Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- llava/lib/python3.10/asyncore.py +649 -0
- llava/lib/python3.10/contextlib.py +745 -0
- llava/lib/python3.10/copy.py +304 -0
- llava/lib/python3.10/distutils/README +11 -0
- llava/lib/python3.10/distutils/__init__.py +20 -0
- llava/lib/python3.10/distutils/_msvccompiler.py +546 -0
- llava/lib/python3.10/distutils/archive_util.py +256 -0
- llava/lib/python3.10/distutils/bcppcompiler.py +393 -0
- llava/lib/python3.10/distutils/ccompiler.py +1116 -0
- llava/lib/python3.10/distutils/cmd.py +403 -0
- llava/lib/python3.10/distutils/config.py +130 -0
- llava/lib/python3.10/distutils/core.py +234 -0
- llava/lib/python3.10/distutils/cygwinccompiler.py +406 -0
- llava/lib/python3.10/distutils/debug.py +5 -0
- llava/lib/python3.10/distutils/dep_util.py +92 -0
- llava/lib/python3.10/distutils/dir_util.py +210 -0
- llava/lib/python3.10/distutils/dist.py +1256 -0
- llava/lib/python3.10/distutils/errors.py +97 -0
- llava/lib/python3.10/distutils/extension.py +241 -0
- llava/lib/python3.10/distutils/fancy_getopt.py +457 -0
- llava/lib/python3.10/distutils/file_util.py +238 -0
- llava/lib/python3.10/distutils/filelist.py +327 -0
- llava/lib/python3.10/distutils/log.py +77 -0
- llava/lib/python3.10/distutils/msvc9compiler.py +788 -0
- llava/lib/python3.10/distutils/msvccompiler.py +643 -0
- llava/lib/python3.10/distutils/spawn.py +129 -0
- llava/lib/python3.10/distutils/sysconfig.py +353 -0
- llava/lib/python3.10/distutils/tests/__init__.py +41 -0
- llava/lib/python3.10/distutils/tests/includetest.rst +1 -0
- llava/lib/python3.10/distutils/tests/support.py +209 -0
- llava/lib/python3.10/distutils/tests/test_archive_util.py +396 -0
- llava/lib/python3.10/distutils/tests/test_build.py +56 -0
- llava/lib/python3.10/distutils/tests/test_build_ext.py +553 -0
- llava/lib/python3.10/distutils/tests/test_build_py.py +179 -0
- llava/lib/python3.10/distutils/tests/test_cmd.py +126 -0
- llava/lib/python3.10/distutils/tests/test_core.py +140 -0
- llava/lib/python3.10/distutils/tests/test_cygwinccompiler.py +154 -0
- llava/lib/python3.10/distutils/tests/test_dep_util.py +80 -0
- llava/lib/python3.10/distutils/tests/test_extension.py +70 -0
- llava/lib/python3.10/distutils/tests/test_filelist.py +340 -0
- llava/lib/python3.10/distutils/tests/test_install.py +260 -0
- llava/lib/python3.10/distutils/tests/test_install_data.py +75 -0
- llava/lib/python3.10/distutils/tests/test_install_headers.py +39 -0
- llava/lib/python3.10/distutils/tests/test_msvccompiler.py +81 -0
- llava/lib/python3.10/distutils/tests/test_register.py +324 -0
- llava/lib/python3.10/distutils/tests/test_sysconfig.py +277 -0
- llava/lib/python3.10/distutils/tests/test_text_file.py +107 -0
- llava/lib/python3.10/distutils/tests/test_unixccompiler.py +145 -0
- llava/lib/python3.10/distutils/tests/test_upload.py +223 -0
- llava/lib/python3.10/distutils/tests/test_version.py +87 -0
llava/lib/python3.10/asyncore.py
ADDED
|
@@ -0,0 +1,649 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- Mode: Python -*-
|
| 2 |
+
# Id: asyncore.py,v 2.51 2000/09/07 22:29:26 rushing Exp
|
| 3 |
+
# Author: Sam Rushing <rushing@nightmare.com>
|
| 4 |
+
|
| 5 |
+
# ======================================================================
|
| 6 |
+
# Copyright 1996 by Sam Rushing
|
| 7 |
+
#
|
| 8 |
+
# All Rights Reserved
|
| 9 |
+
#
|
| 10 |
+
# Permission to use, copy, modify, and distribute this software and
|
| 11 |
+
# its documentation for any purpose and without fee is hereby
|
| 12 |
+
# granted, provided that the above copyright notice appear in all
|
| 13 |
+
# copies and that both that copyright notice and this permission
|
| 14 |
+
# notice appear in supporting documentation, and that the name of Sam
|
| 15 |
+
# Rushing not be used in advertising or publicity pertaining to
|
| 16 |
+
# distribution of the software without specific, written prior
|
| 17 |
+
# permission.
|
| 18 |
+
#
|
| 19 |
+
# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
| 20 |
+
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
|
| 21 |
+
# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
| 22 |
+
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
| 23 |
+
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
|
| 24 |
+
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
|
| 25 |
+
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
| 26 |
+
# ======================================================================
|
| 27 |
+
|
| 28 |
+
"""Basic infrastructure for asynchronous socket service clients and servers.
|
| 29 |
+
|
| 30 |
+
There are only two ways to have a program on a single processor do "more
|
| 31 |
+
than one thing at a time". Multi-threaded programming is the simplest and
|
| 32 |
+
most popular way to do it, but there is another very different technique,
|
| 33 |
+
that lets you have nearly all the advantages of multi-threading, without
|
| 34 |
+
actually using multiple threads. it's really only practical if your program
|
| 35 |
+
is largely I/O bound. If your program is CPU bound, then pre-emptive
|
| 36 |
+
scheduled threads are probably what you really need. Network servers are
|
| 37 |
+
rarely CPU-bound, however.
|
| 38 |
+
|
| 39 |
+
If your operating system supports the select() system call in its I/O
|
| 40 |
+
library (and nearly all do), then you can use it to juggle multiple
|
| 41 |
+
communication channels at once; doing other work while your I/O is taking
|
| 42 |
+
place in the "background." Although this strategy can seem strange and
|
| 43 |
+
complex, especially at first, it is in many ways easier to understand and
|
| 44 |
+
control than multi-threaded programming. The module documented here solves
|
| 45 |
+
many of the difficult problems for you, making the task of building
|
| 46 |
+
sophisticated high-performance network servers and clients a snap.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
import select
|
| 50 |
+
import socket
|
| 51 |
+
import sys
|
| 52 |
+
import time
|
| 53 |
+
import warnings
|
| 54 |
+
|
| 55 |
+
import os
|
| 56 |
+
from errno import EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL, \
|
| 57 |
+
ENOTCONN, ESHUTDOWN, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, \
|
| 58 |
+
errorcode
|
| 59 |
+
|
| 60 |
+
warnings.warn(
|
| 61 |
+
'The asyncore module is deprecated and will be removed in Python 3.12. '
|
| 62 |
+
'The recommended replacement is asyncio',
|
| 63 |
+
DeprecationWarning,
|
| 64 |
+
stacklevel=2)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
_DISCONNECTED = frozenset({ECONNRESET, ENOTCONN, ESHUTDOWN, ECONNABORTED, EPIPE,
|
| 68 |
+
EBADF})
|
| 69 |
+
|
| 70 |
+
try:
|
| 71 |
+
socket_map
|
| 72 |
+
except NameError:
|
| 73 |
+
socket_map = {}
|
| 74 |
+
|
| 75 |
+
def _strerror(err):
|
| 76 |
+
try:
|
| 77 |
+
return os.strerror(err)
|
| 78 |
+
except (ValueError, OverflowError, NameError):
|
| 79 |
+
if err in errorcode:
|
| 80 |
+
return errorcode[err]
|
| 81 |
+
return "Unknown error %s" %err
|
| 82 |
+
|
| 83 |
+
class ExitNow(Exception):
|
| 84 |
+
pass
|
| 85 |
+
|
| 86 |
+
_reraised_exceptions = (ExitNow, KeyboardInterrupt, SystemExit)
|
| 87 |
+
|
| 88 |
+
def read(obj):
|
| 89 |
+
try:
|
| 90 |
+
obj.handle_read_event()
|
| 91 |
+
except _reraised_exceptions:
|
| 92 |
+
raise
|
| 93 |
+
except:
|
| 94 |
+
obj.handle_error()
|
| 95 |
+
|
| 96 |
+
def write(obj):
|
| 97 |
+
try:
|
| 98 |
+
obj.handle_write_event()
|
| 99 |
+
except _reraised_exceptions:
|
| 100 |
+
raise
|
| 101 |
+
except:
|
| 102 |
+
obj.handle_error()
|
| 103 |
+
|
| 104 |
+
def _exception(obj):
|
| 105 |
+
try:
|
| 106 |
+
obj.handle_expt_event()
|
| 107 |
+
except _reraised_exceptions:
|
| 108 |
+
raise
|
| 109 |
+
except:
|
| 110 |
+
obj.handle_error()
|
| 111 |
+
|
| 112 |
+
def readwrite(obj, flags):
|
| 113 |
+
try:
|
| 114 |
+
if flags & select.POLLIN:
|
| 115 |
+
obj.handle_read_event()
|
| 116 |
+
if flags & select.POLLOUT:
|
| 117 |
+
obj.handle_write_event()
|
| 118 |
+
if flags & select.POLLPRI:
|
| 119 |
+
obj.handle_expt_event()
|
| 120 |
+
if flags & (select.POLLHUP | select.POLLERR | select.POLLNVAL):
|
| 121 |
+
obj.handle_close()
|
| 122 |
+
except OSError as e:
|
| 123 |
+
if e.errno not in _DISCONNECTED:
|
| 124 |
+
obj.handle_error()
|
| 125 |
+
else:
|
| 126 |
+
obj.handle_close()
|
| 127 |
+
except _reraised_exceptions:
|
| 128 |
+
raise
|
| 129 |
+
except:
|
| 130 |
+
obj.handle_error()
|
| 131 |
+
|
| 132 |
+
def poll(timeout=0.0, map=None):
|
| 133 |
+
if map is None:
|
| 134 |
+
map = socket_map
|
| 135 |
+
if map:
|
| 136 |
+
r = []; w = []; e = []
|
| 137 |
+
for fd, obj in list(map.items()):
|
| 138 |
+
is_r = obj.readable()
|
| 139 |
+
is_w = obj.writable()
|
| 140 |
+
if is_r:
|
| 141 |
+
r.append(fd)
|
| 142 |
+
# accepting sockets should not be writable
|
| 143 |
+
if is_w and not obj.accepting:
|
| 144 |
+
w.append(fd)
|
| 145 |
+
if is_r or is_w:
|
| 146 |
+
e.append(fd)
|
| 147 |
+
if [] == r == w == e:
|
| 148 |
+
time.sleep(timeout)
|
| 149 |
+
return
|
| 150 |
+
|
| 151 |
+
r, w, e = select.select(r, w, e, timeout)
|
| 152 |
+
|
| 153 |
+
for fd in r:
|
| 154 |
+
obj = map.get(fd)
|
| 155 |
+
if obj is None:
|
| 156 |
+
continue
|
| 157 |
+
read(obj)
|
| 158 |
+
|
| 159 |
+
for fd in w:
|
| 160 |
+
obj = map.get(fd)
|
| 161 |
+
if obj is None:
|
| 162 |
+
continue
|
| 163 |
+
write(obj)
|
| 164 |
+
|
| 165 |
+
for fd in e:
|
| 166 |
+
obj = map.get(fd)
|
| 167 |
+
if obj is None:
|
| 168 |
+
continue
|
| 169 |
+
_exception(obj)
|
| 170 |
+
|
| 171 |
+
def poll2(timeout=0.0, map=None):
|
| 172 |
+
# Use the poll() support added to the select module in Python 2.0
|
| 173 |
+
if map is None:
|
| 174 |
+
map = socket_map
|
| 175 |
+
if timeout is not None:
|
| 176 |
+
# timeout is in milliseconds
|
| 177 |
+
timeout = int(timeout*1000)
|
| 178 |
+
pollster = select.poll()
|
| 179 |
+
if map:
|
| 180 |
+
for fd, obj in list(map.items()):
|
| 181 |
+
flags = 0
|
| 182 |
+
if obj.readable():
|
| 183 |
+
flags |= select.POLLIN | select.POLLPRI
|
| 184 |
+
# accepting sockets should not be writable
|
| 185 |
+
if obj.writable() and not obj.accepting:
|
| 186 |
+
flags |= select.POLLOUT
|
| 187 |
+
if flags:
|
| 188 |
+
pollster.register(fd, flags)
|
| 189 |
+
|
| 190 |
+
r = pollster.poll(timeout)
|
| 191 |
+
for fd, flags in r:
|
| 192 |
+
obj = map.get(fd)
|
| 193 |
+
if obj is None:
|
| 194 |
+
continue
|
| 195 |
+
readwrite(obj, flags)
|
| 196 |
+
|
| 197 |
+
poll3 = poll2 # Alias for backward compatibility
|
| 198 |
+
|
| 199 |
+
def loop(timeout=30.0, use_poll=False, map=None, count=None):
|
| 200 |
+
if map is None:
|
| 201 |
+
map = socket_map
|
| 202 |
+
|
| 203 |
+
if use_poll and hasattr(select, 'poll'):
|
| 204 |
+
poll_fun = poll2
|
| 205 |
+
else:
|
| 206 |
+
poll_fun = poll
|
| 207 |
+
|
| 208 |
+
if count is None:
|
| 209 |
+
while map:
|
| 210 |
+
poll_fun(timeout, map)
|
| 211 |
+
|
| 212 |
+
else:
|
| 213 |
+
while map and count > 0:
|
| 214 |
+
poll_fun(timeout, map)
|
| 215 |
+
count = count - 1
|
| 216 |
+
|
| 217 |
+
class dispatcher:
|
| 218 |
+
|
| 219 |
+
debug = False
|
| 220 |
+
connected = False
|
| 221 |
+
accepting = False
|
| 222 |
+
connecting = False
|
| 223 |
+
closing = False
|
| 224 |
+
addr = None
|
| 225 |
+
ignore_log_types = frozenset({'warning'})
|
| 226 |
+
|
| 227 |
+
def __init__(self, sock=None, map=None):
|
| 228 |
+
if map is None:
|
| 229 |
+
self._map = socket_map
|
| 230 |
+
else:
|
| 231 |
+
self._map = map
|
| 232 |
+
|
| 233 |
+
self._fileno = None
|
| 234 |
+
|
| 235 |
+
if sock:
|
| 236 |
+
# Set to nonblocking just to make sure for cases where we
|
| 237 |
+
# get a socket from a blocking source.
|
| 238 |
+
sock.setblocking(False)
|
| 239 |
+
self.set_socket(sock, map)
|
| 240 |
+
self.connected = True
|
| 241 |
+
# The constructor no longer requires that the socket
|
| 242 |
+
# passed be connected.
|
| 243 |
+
try:
|
| 244 |
+
self.addr = sock.getpeername()
|
| 245 |
+
except OSError as err:
|
| 246 |
+
if err.errno in (ENOTCONN, EINVAL):
|
| 247 |
+
# To handle the case where we got an unconnected
|
| 248 |
+
# socket.
|
| 249 |
+
self.connected = False
|
| 250 |
+
else:
|
| 251 |
+
# The socket is broken in some unknown way, alert
|
| 252 |
+
# the user and remove it from the map (to prevent
|
| 253 |
+
# polling of broken sockets).
|
| 254 |
+
self.del_channel(map)
|
| 255 |
+
raise
|
| 256 |
+
else:
|
| 257 |
+
self.socket = None
|
| 258 |
+
|
| 259 |
+
def __repr__(self):
|
| 260 |
+
status = [self.__class__.__module__+"."+self.__class__.__qualname__]
|
| 261 |
+
if self.accepting and self.addr:
|
| 262 |
+
status.append('listening')
|
| 263 |
+
elif self.connected:
|
| 264 |
+
status.append('connected')
|
| 265 |
+
if self.addr is not None:
|
| 266 |
+
try:
|
| 267 |
+
status.append('%s:%d' % self.addr)
|
| 268 |
+
except TypeError:
|
| 269 |
+
status.append(repr(self.addr))
|
| 270 |
+
return '<%s at %#x>' % (' '.join(status), id(self))
|
| 271 |
+
|
| 272 |
+
def add_channel(self, map=None):
|
| 273 |
+
#self.log_info('adding channel %s' % self)
|
| 274 |
+
if map is None:
|
| 275 |
+
map = self._map
|
| 276 |
+
map[self._fileno] = self
|
| 277 |
+
|
| 278 |
+
def del_channel(self, map=None):
|
| 279 |
+
fd = self._fileno
|
| 280 |
+
if map is None:
|
| 281 |
+
map = self._map
|
| 282 |
+
if fd in map:
|
| 283 |
+
#self.log_info('closing channel %d:%s' % (fd, self))
|
| 284 |
+
del map[fd]
|
| 285 |
+
self._fileno = None
|
| 286 |
+
|
| 287 |
+
def create_socket(self, family=socket.AF_INET, type=socket.SOCK_STREAM):
|
| 288 |
+
self.family_and_type = family, type
|
| 289 |
+
sock = socket.socket(family, type)
|
| 290 |
+
sock.setblocking(False)
|
| 291 |
+
self.set_socket(sock)
|
| 292 |
+
|
| 293 |
+
def set_socket(self, sock, map=None):
|
| 294 |
+
self.socket = sock
|
| 295 |
+
self._fileno = sock.fileno()
|
| 296 |
+
self.add_channel(map)
|
| 297 |
+
|
| 298 |
+
def set_reuse_addr(self):
|
| 299 |
+
# try to re-use a server port if possible
|
| 300 |
+
try:
|
| 301 |
+
self.socket.setsockopt(
|
| 302 |
+
socket.SOL_SOCKET, socket.SO_REUSEADDR,
|
| 303 |
+
self.socket.getsockopt(socket.SOL_SOCKET,
|
| 304 |
+
socket.SO_REUSEADDR) | 1
|
| 305 |
+
)
|
| 306 |
+
except OSError:
|
| 307 |
+
pass
|
| 308 |
+
|
| 309 |
+
# ==================================================
|
| 310 |
+
# predicates for select()
|
| 311 |
+
# these are used as filters for the lists of sockets
|
| 312 |
+
# to pass to select().
|
| 313 |
+
# ==================================================
|
| 314 |
+
|
| 315 |
+
def readable(self):
|
| 316 |
+
return True
|
| 317 |
+
|
| 318 |
+
def writable(self):
|
| 319 |
+
return True
|
| 320 |
+
|
| 321 |
+
# ==================================================
|
| 322 |
+
# socket object methods.
|
| 323 |
+
# ==================================================
|
| 324 |
+
|
| 325 |
+
def listen(self, num):
|
| 326 |
+
self.accepting = True
|
| 327 |
+
if os.name == 'nt' and num > 5:
|
| 328 |
+
num = 5
|
| 329 |
+
return self.socket.listen(num)
|
| 330 |
+
|
| 331 |
+
def bind(self, addr):
|
| 332 |
+
self.addr = addr
|
| 333 |
+
return self.socket.bind(addr)
|
| 334 |
+
|
| 335 |
+
def connect(self, address):
|
| 336 |
+
self.connected = False
|
| 337 |
+
self.connecting = True
|
| 338 |
+
err = self.socket.connect_ex(address)
|
| 339 |
+
if err in (EINPROGRESS, EALREADY, EWOULDBLOCK) \
|
| 340 |
+
or err == EINVAL and os.name == 'nt':
|
| 341 |
+
self.addr = address
|
| 342 |
+
return
|
| 343 |
+
if err in (0, EISCONN):
|
| 344 |
+
self.addr = address
|
| 345 |
+
self.handle_connect_event()
|
| 346 |
+
else:
|
| 347 |
+
raise OSError(err, errorcode[err])
|
| 348 |
+
|
| 349 |
+
def accept(self):
|
| 350 |
+
# XXX can return either an address pair or None
|
| 351 |
+
try:
|
| 352 |
+
conn, addr = self.socket.accept()
|
| 353 |
+
except TypeError:
|
| 354 |
+
return None
|
| 355 |
+
except OSError as why:
|
| 356 |
+
if why.errno in (EWOULDBLOCK, ECONNABORTED, EAGAIN):
|
| 357 |
+
return None
|
| 358 |
+
else:
|
| 359 |
+
raise
|
| 360 |
+
else:
|
| 361 |
+
return conn, addr
|
| 362 |
+
|
| 363 |
+
def send(self, data):
|
| 364 |
+
try:
|
| 365 |
+
result = self.socket.send(data)
|
| 366 |
+
return result
|
| 367 |
+
except OSError as why:
|
| 368 |
+
if why.errno == EWOULDBLOCK:
|
| 369 |
+
return 0
|
| 370 |
+
elif why.errno in _DISCONNECTED:
|
| 371 |
+
self.handle_close()
|
| 372 |
+
return 0
|
| 373 |
+
else:
|
| 374 |
+
raise
|
| 375 |
+
|
| 376 |
+
def recv(self, buffer_size):
|
| 377 |
+
try:
|
| 378 |
+
data = self.socket.recv(buffer_size)
|
| 379 |
+
if not data:
|
| 380 |
+
# a closed connection is indicated by signaling
|
| 381 |
+
# a read condition, and having recv() return 0.
|
| 382 |
+
self.handle_close()
|
| 383 |
+
return b''
|
| 384 |
+
else:
|
| 385 |
+
return data
|
| 386 |
+
except OSError as why:
|
| 387 |
+
# winsock sometimes raises ENOTCONN
|
| 388 |
+
if why.errno in _DISCONNECTED:
|
| 389 |
+
self.handle_close()
|
| 390 |
+
return b''
|
| 391 |
+
else:
|
| 392 |
+
raise
|
| 393 |
+
|
| 394 |
+
def close(self):
|
| 395 |
+
self.connected = False
|
| 396 |
+
self.accepting = False
|
| 397 |
+
self.connecting = False
|
| 398 |
+
self.del_channel()
|
| 399 |
+
if self.socket is not None:
|
| 400 |
+
try:
|
| 401 |
+
self.socket.close()
|
| 402 |
+
except OSError as why:
|
| 403 |
+
if why.errno not in (ENOTCONN, EBADF):
|
| 404 |
+
raise
|
| 405 |
+
|
| 406 |
+
# log and log_info may be overridden to provide more sophisticated
|
| 407 |
+
# logging and warning methods. In general, log is for 'hit' logging
|
| 408 |
+
# and 'log_info' is for informational, warning and error logging.
|
| 409 |
+
|
| 410 |
+
def log(self, message):
|
| 411 |
+
sys.stderr.write('log: %s\n' % str(message))
|
| 412 |
+
|
| 413 |
+
def log_info(self, message, type='info'):
|
| 414 |
+
if type not in self.ignore_log_types:
|
| 415 |
+
print('%s: %s' % (type, message))
|
| 416 |
+
|
| 417 |
+
def handle_read_event(self):
|
| 418 |
+
if self.accepting:
|
| 419 |
+
# accepting sockets are never connected, they "spawn" new
|
| 420 |
+
# sockets that are connected
|
| 421 |
+
self.handle_accept()
|
| 422 |
+
elif not self.connected:
|
| 423 |
+
if self.connecting:
|
| 424 |
+
self.handle_connect_event()
|
| 425 |
+
self.handle_read()
|
| 426 |
+
else:
|
| 427 |
+
self.handle_read()
|
| 428 |
+
|
| 429 |
+
def handle_connect_event(self):
|
| 430 |
+
err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
|
| 431 |
+
if err != 0:
|
| 432 |
+
raise OSError(err, _strerror(err))
|
| 433 |
+
self.handle_connect()
|
| 434 |
+
self.connected = True
|
| 435 |
+
self.connecting = False
|
| 436 |
+
|
| 437 |
+
def handle_write_event(self):
|
| 438 |
+
if self.accepting:
|
| 439 |
+
# Accepting sockets shouldn't get a write event.
|
| 440 |
+
# We will pretend it didn't happen.
|
| 441 |
+
return
|
| 442 |
+
|
| 443 |
+
if not self.connected:
|
| 444 |
+
if self.connecting:
|
| 445 |
+
self.handle_connect_event()
|
| 446 |
+
self.handle_write()
|
| 447 |
+
|
| 448 |
+
def handle_expt_event(self):
|
| 449 |
+
# handle_expt_event() is called if there might be an error on the
|
| 450 |
+
# socket, or if there is OOB data
|
| 451 |
+
# check for the error condition first
|
| 452 |
+
err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
|
| 453 |
+
if err != 0:
|
| 454 |
+
# we can get here when select.select() says that there is an
|
| 455 |
+
# exceptional condition on the socket
|
| 456 |
+
# since there is an error, we'll go ahead and close the socket
|
| 457 |
+
# like we would in a subclassed handle_read() that received no
|
| 458 |
+
# data
|
| 459 |
+
self.handle_close()
|
| 460 |
+
else:
|
| 461 |
+
self.handle_expt()
|
| 462 |
+
|
| 463 |
+
def handle_error(self):
|
| 464 |
+
nil, t, v, tbinfo = compact_traceback()
|
| 465 |
+
|
| 466 |
+
# sometimes a user repr method will crash.
|
| 467 |
+
try:
|
| 468 |
+
self_repr = repr(self)
|
| 469 |
+
except:
|
| 470 |
+
self_repr = '<__repr__(self) failed for object at %0x>' % id(self)
|
| 471 |
+
|
| 472 |
+
self.log_info(
|
| 473 |
+
'uncaptured python exception, closing channel %s (%s:%s %s)' % (
|
| 474 |
+
self_repr,
|
| 475 |
+
t,
|
| 476 |
+
v,
|
| 477 |
+
tbinfo
|
| 478 |
+
),
|
| 479 |
+
'error'
|
| 480 |
+
)
|
| 481 |
+
self.handle_close()
|
| 482 |
+
|
| 483 |
+
def handle_expt(self):
|
| 484 |
+
self.log_info('unhandled incoming priority event', 'warning')
|
| 485 |
+
|
| 486 |
+
def handle_read(self):
|
| 487 |
+
self.log_info('unhandled read event', 'warning')
|
| 488 |
+
|
| 489 |
+
def handle_write(self):
|
| 490 |
+
self.log_info('unhandled write event', 'warning')
|
| 491 |
+
|
| 492 |
+
def handle_connect(self):
|
| 493 |
+
self.log_info('unhandled connect event', 'warning')
|
| 494 |
+
|
| 495 |
+
def handle_accept(self):
|
| 496 |
+
pair = self.accept()
|
| 497 |
+
if pair is not None:
|
| 498 |
+
self.handle_accepted(*pair)
|
| 499 |
+
|
| 500 |
+
def handle_accepted(self, sock, addr):
|
| 501 |
+
sock.close()
|
| 502 |
+
self.log_info('unhandled accepted event', 'warning')
|
| 503 |
+
|
| 504 |
+
def handle_close(self):
|
| 505 |
+
self.log_info('unhandled close event', 'warning')
|
| 506 |
+
self.close()
|
| 507 |
+
|
| 508 |
+
# ---------------------------------------------------------------------------
|
| 509 |
+
# adds simple buffered output capability, useful for simple clients.
|
| 510 |
+
# [for more sophisticated usage use asynchat.async_chat]
|
| 511 |
+
# ---------------------------------------------------------------------------
|
| 512 |
+
|
| 513 |
+
class dispatcher_with_send(dispatcher):
|
| 514 |
+
|
| 515 |
+
def __init__(self, sock=None, map=None):
|
| 516 |
+
dispatcher.__init__(self, sock, map)
|
| 517 |
+
self.out_buffer = b''
|
| 518 |
+
|
| 519 |
+
def initiate_send(self):
|
| 520 |
+
num_sent = 0
|
| 521 |
+
num_sent = dispatcher.send(self, self.out_buffer[:65536])
|
| 522 |
+
self.out_buffer = self.out_buffer[num_sent:]
|
| 523 |
+
|
| 524 |
+
def handle_write(self):
|
| 525 |
+
self.initiate_send()
|
| 526 |
+
|
| 527 |
+
def writable(self):
|
| 528 |
+
return (not self.connected) or len(self.out_buffer)
|
| 529 |
+
|
| 530 |
+
def send(self, data):
|
| 531 |
+
if self.debug:
|
| 532 |
+
self.log_info('sending %s' % repr(data))
|
| 533 |
+
self.out_buffer = self.out_buffer + data
|
| 534 |
+
self.initiate_send()
|
| 535 |
+
|
| 536 |
+
# ---------------------------------------------------------------------------
|
| 537 |
+
# used for debugging.
|
| 538 |
+
# ---------------------------------------------------------------------------
|
| 539 |
+
|
| 540 |
+
def compact_traceback():
|
| 541 |
+
t, v, tb = sys.exc_info()
|
| 542 |
+
tbinfo = []
|
| 543 |
+
if not tb: # Must have a traceback
|
| 544 |
+
raise AssertionError("traceback does not exist")
|
| 545 |
+
while tb:
|
| 546 |
+
tbinfo.append((
|
| 547 |
+
tb.tb_frame.f_code.co_filename,
|
| 548 |
+
tb.tb_frame.f_code.co_name,
|
| 549 |
+
str(tb.tb_lineno)
|
| 550 |
+
))
|
| 551 |
+
tb = tb.tb_next
|
| 552 |
+
|
| 553 |
+
# just to be safe
|
| 554 |
+
del tb
|
| 555 |
+
|
| 556 |
+
file, function, line = tbinfo[-1]
|
| 557 |
+
info = ' '.join(['[%s|%s|%s]' % x for x in tbinfo])
|
| 558 |
+
return (file, function, line), t, v, info
|
| 559 |
+
|
| 560 |
+
def close_all(map=None, ignore_all=False):
|
| 561 |
+
if map is None:
|
| 562 |
+
map = socket_map
|
| 563 |
+
for x in list(map.values()):
|
| 564 |
+
try:
|
| 565 |
+
x.close()
|
| 566 |
+
except OSError as x:
|
| 567 |
+
if x.errno == EBADF:
|
| 568 |
+
pass
|
| 569 |
+
elif not ignore_all:
|
| 570 |
+
raise
|
| 571 |
+
except _reraised_exceptions:
|
| 572 |
+
raise
|
| 573 |
+
except:
|
| 574 |
+
if not ignore_all:
|
| 575 |
+
raise
|
| 576 |
+
map.clear()
|
| 577 |
+
|
| 578 |
+
# Asynchronous File I/O:
|
| 579 |
+
#
|
| 580 |
+
# After a little research (reading man pages on various unixen, and
|
| 581 |
+
# digging through the linux kernel), I've determined that select()
|
| 582 |
+
# isn't meant for doing asynchronous file i/o.
|
| 583 |
+
# Heartening, though - reading linux/mm/filemap.c shows that linux
|
| 584 |
+
# supports asynchronous read-ahead. So _MOST_ of the time, the data
|
| 585 |
+
# will be sitting in memory for us already when we go to read it.
|
| 586 |
+
#
|
| 587 |
+
# What other OS's (besides NT) support async file i/o? [VMS?]
|
| 588 |
+
#
|
| 589 |
+
# Regardless, this is useful for pipes, and stdin/stdout...
|
| 590 |
+
|
| 591 |
+
if os.name == 'posix':
|
| 592 |
+
class file_wrapper:
|
| 593 |
+
# Here we override just enough to make a file
|
| 594 |
+
# look like a socket for the purposes of asyncore.
|
| 595 |
+
# The passed fd is automatically os.dup()'d
|
| 596 |
+
|
| 597 |
+
def __init__(self, fd):
|
| 598 |
+
self.fd = os.dup(fd)
|
| 599 |
+
|
| 600 |
+
def __del__(self):
|
| 601 |
+
if self.fd >= 0:
|
| 602 |
+
warnings.warn("unclosed file %r" % self, ResourceWarning,
|
| 603 |
+
source=self)
|
| 604 |
+
self.close()
|
| 605 |
+
|
| 606 |
+
def recv(self, *args):
|
| 607 |
+
return os.read(self.fd, *args)
|
| 608 |
+
|
| 609 |
+
def send(self, *args):
|
| 610 |
+
return os.write(self.fd, *args)
|
| 611 |
+
|
| 612 |
+
def getsockopt(self, level, optname, buflen=None):
|
| 613 |
+
if (level == socket.SOL_SOCKET and
|
| 614 |
+
optname == socket.SO_ERROR and
|
| 615 |
+
not buflen):
|
| 616 |
+
return 0
|
| 617 |
+
raise NotImplementedError("Only asyncore specific behaviour "
|
| 618 |
+
"implemented.")
|
| 619 |
+
|
| 620 |
+
read = recv
|
| 621 |
+
write = send
|
| 622 |
+
|
| 623 |
+
def close(self):
|
| 624 |
+
if self.fd < 0:
|
| 625 |
+
return
|
| 626 |
+
fd = self.fd
|
| 627 |
+
self.fd = -1
|
| 628 |
+
os.close(fd)
|
| 629 |
+
|
| 630 |
+
def fileno(self):
|
| 631 |
+
return self.fd
|
| 632 |
+
|
| 633 |
+
class file_dispatcher(dispatcher):
|
| 634 |
+
|
| 635 |
+
def __init__(self, fd, map=None):
|
| 636 |
+
dispatcher.__init__(self, None, map)
|
| 637 |
+
self.connected = True
|
| 638 |
+
try:
|
| 639 |
+
fd = fd.fileno()
|
| 640 |
+
except AttributeError:
|
| 641 |
+
pass
|
| 642 |
+
self.set_file(fd)
|
| 643 |
+
# set it to non-blocking mode
|
| 644 |
+
os.set_blocking(fd, False)
|
| 645 |
+
|
| 646 |
+
def set_file(self, fd):
|
| 647 |
+
self.socket = file_wrapper(fd)
|
| 648 |
+
self._fileno = self.socket.fileno()
|
| 649 |
+
self.add_channel()
|
llava/lib/python3.10/contextlib.py
ADDED
|
@@ -0,0 +1,745 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities for with-statement contexts. See PEP 343."""
|
| 2 |
+
import abc
|
| 3 |
+
import sys
|
| 4 |
+
import _collections_abc
|
| 5 |
+
from collections import deque
|
| 6 |
+
from functools import wraps
|
| 7 |
+
from types import MethodType, GenericAlias
|
| 8 |
+
|
| 9 |
+
__all__ = ["asynccontextmanager", "contextmanager", "closing", "nullcontext",
|
| 10 |
+
"AbstractContextManager", "AbstractAsyncContextManager",
|
| 11 |
+
"AsyncExitStack", "ContextDecorator", "ExitStack",
|
| 12 |
+
"redirect_stdout", "redirect_stderr", "suppress", "aclosing"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class AbstractContextManager(abc.ABC):
|
| 16 |
+
|
| 17 |
+
"""An abstract base class for context managers."""
|
| 18 |
+
|
| 19 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 20 |
+
|
| 21 |
+
def __enter__(self):
|
| 22 |
+
"""Return `self` upon entering the runtime context."""
|
| 23 |
+
return self
|
| 24 |
+
|
| 25 |
+
@abc.abstractmethod
|
| 26 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
| 27 |
+
"""Raise any exception triggered within the runtime context."""
|
| 28 |
+
return None
|
| 29 |
+
|
| 30 |
+
@classmethod
|
| 31 |
+
def __subclasshook__(cls, C):
|
| 32 |
+
if cls is AbstractContextManager:
|
| 33 |
+
return _collections_abc._check_methods(C, "__enter__", "__exit__")
|
| 34 |
+
return NotImplemented
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class AbstractAsyncContextManager(abc.ABC):
|
| 38 |
+
|
| 39 |
+
"""An abstract base class for asynchronous context managers."""
|
| 40 |
+
|
| 41 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 42 |
+
|
| 43 |
+
async def __aenter__(self):
|
| 44 |
+
"""Return `self` upon entering the runtime context."""
|
| 45 |
+
return self
|
| 46 |
+
|
| 47 |
+
@abc.abstractmethod
|
| 48 |
+
async def __aexit__(self, exc_type, exc_value, traceback):
|
| 49 |
+
"""Raise any exception triggered within the runtime context."""
|
| 50 |
+
return None
|
| 51 |
+
|
| 52 |
+
@classmethod
|
| 53 |
+
def __subclasshook__(cls, C):
|
| 54 |
+
if cls is AbstractAsyncContextManager:
|
| 55 |
+
return _collections_abc._check_methods(C, "__aenter__",
|
| 56 |
+
"__aexit__")
|
| 57 |
+
return NotImplemented
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class ContextDecorator(object):
|
| 61 |
+
"A base class or mixin that enables context managers to work as decorators."
|
| 62 |
+
|
| 63 |
+
def _recreate_cm(self):
|
| 64 |
+
"""Return a recreated instance of self.
|
| 65 |
+
|
| 66 |
+
Allows an otherwise one-shot context manager like
|
| 67 |
+
_GeneratorContextManager to support use as
|
| 68 |
+
a decorator via implicit recreation.
|
| 69 |
+
|
| 70 |
+
This is a private interface just for _GeneratorContextManager.
|
| 71 |
+
See issue #11647 for details.
|
| 72 |
+
"""
|
| 73 |
+
return self
|
| 74 |
+
|
| 75 |
+
def __call__(self, func):
|
| 76 |
+
@wraps(func)
|
| 77 |
+
def inner(*args, **kwds):
|
| 78 |
+
with self._recreate_cm():
|
| 79 |
+
return func(*args, **kwds)
|
| 80 |
+
return inner
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class AsyncContextDecorator(object):
|
| 84 |
+
"A base class or mixin that enables async context managers to work as decorators."
|
| 85 |
+
|
| 86 |
+
def _recreate_cm(self):
|
| 87 |
+
"""Return a recreated instance of self.
|
| 88 |
+
"""
|
| 89 |
+
return self
|
| 90 |
+
|
| 91 |
+
def __call__(self, func):
|
| 92 |
+
@wraps(func)
|
| 93 |
+
async def inner(*args, **kwds):
|
| 94 |
+
async with self._recreate_cm():
|
| 95 |
+
return await func(*args, **kwds)
|
| 96 |
+
return inner
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class _GeneratorContextManagerBase:
|
| 100 |
+
"""Shared functionality for @contextmanager and @asynccontextmanager."""
|
| 101 |
+
|
| 102 |
+
def __init__(self, func, args, kwds):
|
| 103 |
+
self.gen = func(*args, **kwds)
|
| 104 |
+
self.func, self.args, self.kwds = func, args, kwds
|
| 105 |
+
# Issue 19330: ensure context manager instances have good docstrings
|
| 106 |
+
doc = getattr(func, "__doc__", None)
|
| 107 |
+
if doc is None:
|
| 108 |
+
doc = type(self).__doc__
|
| 109 |
+
self.__doc__ = doc
|
| 110 |
+
# Unfortunately, this still doesn't provide good help output when
|
| 111 |
+
# inspecting the created context manager instances, since pydoc
|
| 112 |
+
# currently bypasses the instance docstring and shows the docstring
|
| 113 |
+
# for the class instead.
|
| 114 |
+
# See http://bugs.python.org/issue19404 for more details.
|
| 115 |
+
|
| 116 |
+
def _recreate_cm(self):
|
| 117 |
+
# _GCMB instances are one-shot context managers, so the
|
| 118 |
+
# CM must be recreated each time a decorated function is
|
| 119 |
+
# called
|
| 120 |
+
return self.__class__(self.func, self.args, self.kwds)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class _GeneratorContextManager(
|
| 124 |
+
_GeneratorContextManagerBase,
|
| 125 |
+
AbstractContextManager,
|
| 126 |
+
ContextDecorator,
|
| 127 |
+
):
|
| 128 |
+
"""Helper for @contextmanager decorator."""
|
| 129 |
+
|
| 130 |
+
def __enter__(self):
|
| 131 |
+
# do not keep args and kwds alive unnecessarily
|
| 132 |
+
# they are only needed for recreation, which is not possible anymore
|
| 133 |
+
del self.args, self.kwds, self.func
|
| 134 |
+
try:
|
| 135 |
+
return next(self.gen)
|
| 136 |
+
except StopIteration:
|
| 137 |
+
raise RuntimeError("generator didn't yield") from None
|
| 138 |
+
|
| 139 |
+
def __exit__(self, typ, value, traceback):
|
| 140 |
+
if typ is None:
|
| 141 |
+
try:
|
| 142 |
+
next(self.gen)
|
| 143 |
+
except StopIteration:
|
| 144 |
+
return False
|
| 145 |
+
else:
|
| 146 |
+
raise RuntimeError("generator didn't stop")
|
| 147 |
+
else:
|
| 148 |
+
if value is None:
|
| 149 |
+
# Need to force instantiation so we can reliably
|
| 150 |
+
# tell if we get the same exception back
|
| 151 |
+
value = typ()
|
| 152 |
+
try:
|
| 153 |
+
self.gen.throw(typ, value, traceback)
|
| 154 |
+
except StopIteration as exc:
|
| 155 |
+
# Suppress StopIteration *unless* it's the same exception that
|
| 156 |
+
# was passed to throw(). This prevents a StopIteration
|
| 157 |
+
# raised inside the "with" statement from being suppressed.
|
| 158 |
+
return exc is not value
|
| 159 |
+
except RuntimeError as exc:
|
| 160 |
+
# Don't re-raise the passed in exception. (issue27122)
|
| 161 |
+
if exc is value:
|
| 162 |
+
return False
|
| 163 |
+
# Avoid suppressing if a StopIteration exception
|
| 164 |
+
# was passed to throw() and later wrapped into a RuntimeError
|
| 165 |
+
# (see PEP 479 for sync generators; async generators also
|
| 166 |
+
# have this behavior). But do this only if the exception wrapped
|
| 167 |
+
# by the RuntimeError is actually Stop(Async)Iteration (see
|
| 168 |
+
# issue29692).
|
| 169 |
+
if (
|
| 170 |
+
isinstance(value, StopIteration)
|
| 171 |
+
and exc.__cause__ is value
|
| 172 |
+
):
|
| 173 |
+
return False
|
| 174 |
+
raise
|
| 175 |
+
except BaseException as exc:
|
| 176 |
+
# only re-raise if it's *not* the exception that was
|
| 177 |
+
# passed to throw(), because __exit__() must not raise
|
| 178 |
+
# an exception unless __exit__() itself failed. But throw()
|
| 179 |
+
# has to raise the exception to signal propagation, so this
|
| 180 |
+
# fixes the impedance mismatch between the throw() protocol
|
| 181 |
+
# and the __exit__() protocol.
|
| 182 |
+
if exc is not value:
|
| 183 |
+
raise
|
| 184 |
+
return False
|
| 185 |
+
raise RuntimeError("generator didn't stop after throw()")
|
| 186 |
+
|
| 187 |
+
class _AsyncGeneratorContextManager(
|
| 188 |
+
_GeneratorContextManagerBase,
|
| 189 |
+
AbstractAsyncContextManager,
|
| 190 |
+
AsyncContextDecorator,
|
| 191 |
+
):
|
| 192 |
+
"""Helper for @asynccontextmanager decorator."""
|
| 193 |
+
|
| 194 |
+
async def __aenter__(self):
|
| 195 |
+
# do not keep args and kwds alive unnecessarily
|
| 196 |
+
# they are only needed for recreation, which is not possible anymore
|
| 197 |
+
del self.args, self.kwds, self.func
|
| 198 |
+
try:
|
| 199 |
+
return await anext(self.gen)
|
| 200 |
+
except StopAsyncIteration:
|
| 201 |
+
raise RuntimeError("generator didn't yield") from None
|
| 202 |
+
|
| 203 |
+
async def __aexit__(self, typ, value, traceback):
|
| 204 |
+
if typ is None:
|
| 205 |
+
try:
|
| 206 |
+
await anext(self.gen)
|
| 207 |
+
except StopAsyncIteration:
|
| 208 |
+
return False
|
| 209 |
+
else:
|
| 210 |
+
raise RuntimeError("generator didn't stop")
|
| 211 |
+
else:
|
| 212 |
+
if value is None:
|
| 213 |
+
# Need to force instantiation so we can reliably
|
| 214 |
+
# tell if we get the same exception back
|
| 215 |
+
value = typ()
|
| 216 |
+
try:
|
| 217 |
+
await self.gen.athrow(typ, value, traceback)
|
| 218 |
+
except StopAsyncIteration as exc:
|
| 219 |
+
# Suppress StopIteration *unless* it's the same exception that
|
| 220 |
+
# was passed to throw(). This prevents a StopIteration
|
| 221 |
+
# raised inside the "with" statement from being suppressed.
|
| 222 |
+
return exc is not value
|
| 223 |
+
except RuntimeError as exc:
|
| 224 |
+
# Don't re-raise the passed in exception. (issue27122)
|
| 225 |
+
if exc is value:
|
| 226 |
+
return False
|
| 227 |
+
# Avoid suppressing if a Stop(Async)Iteration exception
|
| 228 |
+
# was passed to athrow() and later wrapped into a RuntimeError
|
| 229 |
+
# (see PEP 479 for sync generators; async generators also
|
| 230 |
+
# have this behavior). But do this only if the exception wrapped
|
| 231 |
+
# by the RuntimeError is actully Stop(Async)Iteration (see
|
| 232 |
+
# issue29692).
|
| 233 |
+
if (
|
| 234 |
+
isinstance(value, (StopIteration, StopAsyncIteration))
|
| 235 |
+
and exc.__cause__ is value
|
| 236 |
+
):
|
| 237 |
+
return False
|
| 238 |
+
raise
|
| 239 |
+
except BaseException as exc:
|
| 240 |
+
# only re-raise if it's *not* the exception that was
|
| 241 |
+
# passed to throw(), because __exit__() must not raise
|
| 242 |
+
# an exception unless __exit__() itself failed. But throw()
|
| 243 |
+
# has to raise the exception to signal propagation, so this
|
| 244 |
+
# fixes the impedance mismatch between the throw() protocol
|
| 245 |
+
# and the __exit__() protocol.
|
| 246 |
+
if exc is not value:
|
| 247 |
+
raise
|
| 248 |
+
return False
|
| 249 |
+
raise RuntimeError("generator didn't stop after athrow()")
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def contextmanager(func):
|
| 253 |
+
"""@contextmanager decorator.
|
| 254 |
+
|
| 255 |
+
Typical usage:
|
| 256 |
+
|
| 257 |
+
@contextmanager
|
| 258 |
+
def some_generator(<arguments>):
|
| 259 |
+
<setup>
|
| 260 |
+
try:
|
| 261 |
+
yield <value>
|
| 262 |
+
finally:
|
| 263 |
+
<cleanup>
|
| 264 |
+
|
| 265 |
+
This makes this:
|
| 266 |
+
|
| 267 |
+
with some_generator(<arguments>) as <variable>:
|
| 268 |
+
<body>
|
| 269 |
+
|
| 270 |
+
equivalent to this:
|
| 271 |
+
|
| 272 |
+
<setup>
|
| 273 |
+
try:
|
| 274 |
+
<variable> = <value>
|
| 275 |
+
<body>
|
| 276 |
+
finally:
|
| 277 |
+
<cleanup>
|
| 278 |
+
"""
|
| 279 |
+
@wraps(func)
|
| 280 |
+
def helper(*args, **kwds):
|
| 281 |
+
return _GeneratorContextManager(func, args, kwds)
|
| 282 |
+
return helper
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
def asynccontextmanager(func):
|
| 286 |
+
"""@asynccontextmanager decorator.
|
| 287 |
+
|
| 288 |
+
Typical usage:
|
| 289 |
+
|
| 290 |
+
@asynccontextmanager
|
| 291 |
+
async def some_async_generator(<arguments>):
|
| 292 |
+
<setup>
|
| 293 |
+
try:
|
| 294 |
+
yield <value>
|
| 295 |
+
finally:
|
| 296 |
+
<cleanup>
|
| 297 |
+
|
| 298 |
+
This makes this:
|
| 299 |
+
|
| 300 |
+
async with some_async_generator(<arguments>) as <variable>:
|
| 301 |
+
<body>
|
| 302 |
+
|
| 303 |
+
equivalent to this:
|
| 304 |
+
|
| 305 |
+
<setup>
|
| 306 |
+
try:
|
| 307 |
+
<variable> = <value>
|
| 308 |
+
<body>
|
| 309 |
+
finally:
|
| 310 |
+
<cleanup>
|
| 311 |
+
"""
|
| 312 |
+
@wraps(func)
|
| 313 |
+
def helper(*args, **kwds):
|
| 314 |
+
return _AsyncGeneratorContextManager(func, args, kwds)
|
| 315 |
+
return helper
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
class closing(AbstractContextManager):
|
| 319 |
+
"""Context to automatically close something at the end of a block.
|
| 320 |
+
|
| 321 |
+
Code like this:
|
| 322 |
+
|
| 323 |
+
with closing(<module>.open(<arguments>)) as f:
|
| 324 |
+
<block>
|
| 325 |
+
|
| 326 |
+
is equivalent to this:
|
| 327 |
+
|
| 328 |
+
f = <module>.open(<arguments>)
|
| 329 |
+
try:
|
| 330 |
+
<block>
|
| 331 |
+
finally:
|
| 332 |
+
f.close()
|
| 333 |
+
|
| 334 |
+
"""
|
| 335 |
+
def __init__(self, thing):
|
| 336 |
+
self.thing = thing
|
| 337 |
+
def __enter__(self):
|
| 338 |
+
return self.thing
|
| 339 |
+
def __exit__(self, *exc_info):
|
| 340 |
+
self.thing.close()
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
class aclosing(AbstractAsyncContextManager):
|
| 344 |
+
"""Async context manager for safely finalizing an asynchronously cleaned-up
|
| 345 |
+
resource such as an async generator, calling its ``aclose()`` method.
|
| 346 |
+
|
| 347 |
+
Code like this:
|
| 348 |
+
|
| 349 |
+
async with aclosing(<module>.fetch(<arguments>)) as agen:
|
| 350 |
+
<block>
|
| 351 |
+
|
| 352 |
+
is equivalent to this:
|
| 353 |
+
|
| 354 |
+
agen = <module>.fetch(<arguments>)
|
| 355 |
+
try:
|
| 356 |
+
<block>
|
| 357 |
+
finally:
|
| 358 |
+
await agen.aclose()
|
| 359 |
+
|
| 360 |
+
"""
|
| 361 |
+
def __init__(self, thing):
|
| 362 |
+
self.thing = thing
|
| 363 |
+
async def __aenter__(self):
|
| 364 |
+
return self.thing
|
| 365 |
+
async def __aexit__(self, *exc_info):
|
| 366 |
+
await self.thing.aclose()
|
| 367 |
+
|
| 368 |
+
|
| 369 |
+
class _RedirectStream(AbstractContextManager):
|
| 370 |
+
|
| 371 |
+
_stream = None
|
| 372 |
+
|
| 373 |
+
def __init__(self, new_target):
|
| 374 |
+
self._new_target = new_target
|
| 375 |
+
# We use a list of old targets to make this CM re-entrant
|
| 376 |
+
self._old_targets = []
|
| 377 |
+
|
| 378 |
+
def __enter__(self):
|
| 379 |
+
self._old_targets.append(getattr(sys, self._stream))
|
| 380 |
+
setattr(sys, self._stream, self._new_target)
|
| 381 |
+
return self._new_target
|
| 382 |
+
|
| 383 |
+
def __exit__(self, exctype, excinst, exctb):
|
| 384 |
+
setattr(sys, self._stream, self._old_targets.pop())
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
class redirect_stdout(_RedirectStream):
|
| 388 |
+
"""Context manager for temporarily redirecting stdout to another file.
|
| 389 |
+
|
| 390 |
+
# How to send help() to stderr
|
| 391 |
+
with redirect_stdout(sys.stderr):
|
| 392 |
+
help(dir)
|
| 393 |
+
|
| 394 |
+
# How to write help() to a file
|
| 395 |
+
with open('help.txt', 'w') as f:
|
| 396 |
+
with redirect_stdout(f):
|
| 397 |
+
help(pow)
|
| 398 |
+
"""
|
| 399 |
+
|
| 400 |
+
_stream = "stdout"
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
class redirect_stderr(_RedirectStream):
|
| 404 |
+
"""Context manager for temporarily redirecting stderr to another file."""
|
| 405 |
+
|
| 406 |
+
_stream = "stderr"
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
class suppress(AbstractContextManager):
|
| 410 |
+
"""Context manager to suppress specified exceptions
|
| 411 |
+
|
| 412 |
+
After the exception is suppressed, execution proceeds with the next
|
| 413 |
+
statement following the with statement.
|
| 414 |
+
|
| 415 |
+
with suppress(FileNotFoundError):
|
| 416 |
+
os.remove(somefile)
|
| 417 |
+
# Execution still resumes here if the file was already removed
|
| 418 |
+
"""
|
| 419 |
+
|
| 420 |
+
def __init__(self, *exceptions):
|
| 421 |
+
self._exceptions = exceptions
|
| 422 |
+
|
| 423 |
+
def __enter__(self):
|
| 424 |
+
pass
|
| 425 |
+
|
| 426 |
+
def __exit__(self, exctype, excinst, exctb):
|
| 427 |
+
# Unlike isinstance and issubclass, CPython exception handling
|
| 428 |
+
# currently only looks at the concrete type hierarchy (ignoring
|
| 429 |
+
# the instance and subclass checking hooks). While Guido considers
|
| 430 |
+
# that a bug rather than a feature, it's a fairly hard one to fix
|
| 431 |
+
# due to various internal implementation details. suppress provides
|
| 432 |
+
# the simpler issubclass based semantics, rather than trying to
|
| 433 |
+
# exactly reproduce the limitations of the CPython interpreter.
|
| 434 |
+
#
|
| 435 |
+
# See http://bugs.python.org/issue12029 for more details
|
| 436 |
+
return exctype is not None and issubclass(exctype, self._exceptions)
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
class _BaseExitStack:
|
| 440 |
+
"""A base class for ExitStack and AsyncExitStack."""
|
| 441 |
+
|
| 442 |
+
@staticmethod
|
| 443 |
+
def _create_exit_wrapper(cm, cm_exit):
|
| 444 |
+
return MethodType(cm_exit, cm)
|
| 445 |
+
|
| 446 |
+
@staticmethod
|
| 447 |
+
def _create_cb_wrapper(callback, /, *args, **kwds):
|
| 448 |
+
def _exit_wrapper(exc_type, exc, tb):
|
| 449 |
+
callback(*args, **kwds)
|
| 450 |
+
return _exit_wrapper
|
| 451 |
+
|
| 452 |
+
def __init__(self):
|
| 453 |
+
self._exit_callbacks = deque()
|
| 454 |
+
|
| 455 |
+
def pop_all(self):
|
| 456 |
+
"""Preserve the context stack by transferring it to a new instance."""
|
| 457 |
+
new_stack = type(self)()
|
| 458 |
+
new_stack._exit_callbacks = self._exit_callbacks
|
| 459 |
+
self._exit_callbacks = deque()
|
| 460 |
+
return new_stack
|
| 461 |
+
|
| 462 |
+
def push(self, exit):
|
| 463 |
+
"""Registers a callback with the standard __exit__ method signature.
|
| 464 |
+
|
| 465 |
+
Can suppress exceptions the same way __exit__ method can.
|
| 466 |
+
Also accepts any object with an __exit__ method (registering a call
|
| 467 |
+
to the method instead of the object itself).
|
| 468 |
+
"""
|
| 469 |
+
# We use an unbound method rather than a bound method to follow
|
| 470 |
+
# the standard lookup behaviour for special methods.
|
| 471 |
+
_cb_type = type(exit)
|
| 472 |
+
|
| 473 |
+
try:
|
| 474 |
+
exit_method = _cb_type.__exit__
|
| 475 |
+
except AttributeError:
|
| 476 |
+
# Not a context manager, so assume it's a callable.
|
| 477 |
+
self._push_exit_callback(exit)
|
| 478 |
+
else:
|
| 479 |
+
self._push_cm_exit(exit, exit_method)
|
| 480 |
+
return exit # Allow use as a decorator.
|
| 481 |
+
|
| 482 |
+
def enter_context(self, cm):
|
| 483 |
+
"""Enters the supplied context manager.
|
| 484 |
+
|
| 485 |
+
If successful, also pushes its __exit__ method as a callback and
|
| 486 |
+
returns the result of the __enter__ method.
|
| 487 |
+
"""
|
| 488 |
+
# We look up the special methods on the type to match the with
|
| 489 |
+
# statement.
|
| 490 |
+
_cm_type = type(cm)
|
| 491 |
+
_exit = _cm_type.__exit__
|
| 492 |
+
result = _cm_type.__enter__(cm)
|
| 493 |
+
self._push_cm_exit(cm, _exit)
|
| 494 |
+
return result
|
| 495 |
+
|
| 496 |
+
def callback(self, callback, /, *args, **kwds):
|
| 497 |
+
"""Registers an arbitrary callback and arguments.
|
| 498 |
+
|
| 499 |
+
Cannot suppress exceptions.
|
| 500 |
+
"""
|
| 501 |
+
_exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds)
|
| 502 |
+
|
| 503 |
+
# We changed the signature, so using @wraps is not appropriate, but
|
| 504 |
+
# setting __wrapped__ may still help with introspection.
|
| 505 |
+
_exit_wrapper.__wrapped__ = callback
|
| 506 |
+
self._push_exit_callback(_exit_wrapper)
|
| 507 |
+
return callback # Allow use as a decorator
|
| 508 |
+
|
| 509 |
+
def _push_cm_exit(self, cm, cm_exit):
|
| 510 |
+
"""Helper to correctly register callbacks to __exit__ methods."""
|
| 511 |
+
_exit_wrapper = self._create_exit_wrapper(cm, cm_exit)
|
| 512 |
+
self._push_exit_callback(_exit_wrapper, True)
|
| 513 |
+
|
| 514 |
+
def _push_exit_callback(self, callback, is_sync=True):
|
| 515 |
+
self._exit_callbacks.append((is_sync, callback))
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
# Inspired by discussions on http://bugs.python.org/issue13585
|
| 519 |
+
class ExitStack(_BaseExitStack, AbstractContextManager):
|
| 520 |
+
"""Context manager for dynamic management of a stack of exit callbacks.
|
| 521 |
+
|
| 522 |
+
For example:
|
| 523 |
+
with ExitStack() as stack:
|
| 524 |
+
files = [stack.enter_context(open(fname)) for fname in filenames]
|
| 525 |
+
# All opened files will automatically be closed at the end of
|
| 526 |
+
# the with statement, even if attempts to open files later
|
| 527 |
+
# in the list raise an exception.
|
| 528 |
+
"""
|
| 529 |
+
|
| 530 |
+
def __enter__(self):
|
| 531 |
+
return self
|
| 532 |
+
|
| 533 |
+
def __exit__(self, *exc_details):
|
| 534 |
+
received_exc = exc_details[0] is not None
|
| 535 |
+
|
| 536 |
+
# We manipulate the exception state so it behaves as though
|
| 537 |
+
# we were actually nesting multiple with statements
|
| 538 |
+
frame_exc = sys.exc_info()[1]
|
| 539 |
+
def _fix_exception_context(new_exc, old_exc):
|
| 540 |
+
# Context may not be correct, so find the end of the chain
|
| 541 |
+
while 1:
|
| 542 |
+
exc_context = new_exc.__context__
|
| 543 |
+
if exc_context is None or exc_context is old_exc:
|
| 544 |
+
# Context is already set correctly (see issue 20317)
|
| 545 |
+
return
|
| 546 |
+
if exc_context is frame_exc:
|
| 547 |
+
break
|
| 548 |
+
new_exc = exc_context
|
| 549 |
+
# Change the end of the chain to point to the exception
|
| 550 |
+
# we expect it to reference
|
| 551 |
+
new_exc.__context__ = old_exc
|
| 552 |
+
|
| 553 |
+
# Callbacks are invoked in LIFO order to match the behaviour of
|
| 554 |
+
# nested context managers
|
| 555 |
+
suppressed_exc = False
|
| 556 |
+
pending_raise = False
|
| 557 |
+
while self._exit_callbacks:
|
| 558 |
+
is_sync, cb = self._exit_callbacks.pop()
|
| 559 |
+
assert is_sync
|
| 560 |
+
try:
|
| 561 |
+
if cb(*exc_details):
|
| 562 |
+
suppressed_exc = True
|
| 563 |
+
pending_raise = False
|
| 564 |
+
exc_details = (None, None, None)
|
| 565 |
+
except:
|
| 566 |
+
new_exc_details = sys.exc_info()
|
| 567 |
+
# simulate the stack of exceptions by setting the context
|
| 568 |
+
_fix_exception_context(new_exc_details[1], exc_details[1])
|
| 569 |
+
pending_raise = True
|
| 570 |
+
exc_details = new_exc_details
|
| 571 |
+
if pending_raise:
|
| 572 |
+
try:
|
| 573 |
+
# bare "raise exc_details[1]" replaces our carefully
|
| 574 |
+
# set-up context
|
| 575 |
+
fixed_ctx = exc_details[1].__context__
|
| 576 |
+
raise exc_details[1]
|
| 577 |
+
except BaseException:
|
| 578 |
+
exc_details[1].__context__ = fixed_ctx
|
| 579 |
+
raise
|
| 580 |
+
return received_exc and suppressed_exc
|
| 581 |
+
|
| 582 |
+
def close(self):
|
| 583 |
+
"""Immediately unwind the context stack."""
|
| 584 |
+
self.__exit__(None, None, None)
|
| 585 |
+
|
| 586 |
+
|
| 587 |
+
# Inspired by discussions on https://bugs.python.org/issue29302
|
| 588 |
+
class AsyncExitStack(_BaseExitStack, AbstractAsyncContextManager):
|
| 589 |
+
"""Async context manager for dynamic management of a stack of exit
|
| 590 |
+
callbacks.
|
| 591 |
+
|
| 592 |
+
For example:
|
| 593 |
+
async with AsyncExitStack() as stack:
|
| 594 |
+
connections = [await stack.enter_async_context(get_connection())
|
| 595 |
+
for i in range(5)]
|
| 596 |
+
# All opened connections will automatically be released at the
|
| 597 |
+
# end of the async with statement, even if attempts to open a
|
| 598 |
+
# connection later in the list raise an exception.
|
| 599 |
+
"""
|
| 600 |
+
|
| 601 |
+
@staticmethod
|
| 602 |
+
def _create_async_exit_wrapper(cm, cm_exit):
|
| 603 |
+
return MethodType(cm_exit, cm)
|
| 604 |
+
|
| 605 |
+
@staticmethod
|
| 606 |
+
def _create_async_cb_wrapper(callback, /, *args, **kwds):
|
| 607 |
+
async def _exit_wrapper(exc_type, exc, tb):
|
| 608 |
+
await callback(*args, **kwds)
|
| 609 |
+
return _exit_wrapper
|
| 610 |
+
|
| 611 |
+
async def enter_async_context(self, cm):
|
| 612 |
+
"""Enters the supplied async context manager.
|
| 613 |
+
|
| 614 |
+
If successful, also pushes its __aexit__ method as a callback and
|
| 615 |
+
returns the result of the __aenter__ method.
|
| 616 |
+
"""
|
| 617 |
+
_cm_type = type(cm)
|
| 618 |
+
_exit = _cm_type.__aexit__
|
| 619 |
+
result = await _cm_type.__aenter__(cm)
|
| 620 |
+
self._push_async_cm_exit(cm, _exit)
|
| 621 |
+
return result
|
| 622 |
+
|
| 623 |
+
def push_async_exit(self, exit):
|
| 624 |
+
"""Registers a coroutine function with the standard __aexit__ method
|
| 625 |
+
signature.
|
| 626 |
+
|
| 627 |
+
Can suppress exceptions the same way __aexit__ method can.
|
| 628 |
+
Also accepts any object with an __aexit__ method (registering a call
|
| 629 |
+
to the method instead of the object itself).
|
| 630 |
+
"""
|
| 631 |
+
_cb_type = type(exit)
|
| 632 |
+
try:
|
| 633 |
+
exit_method = _cb_type.__aexit__
|
| 634 |
+
except AttributeError:
|
| 635 |
+
# Not an async context manager, so assume it's a coroutine function
|
| 636 |
+
self._push_exit_callback(exit, False)
|
| 637 |
+
else:
|
| 638 |
+
self._push_async_cm_exit(exit, exit_method)
|
| 639 |
+
return exit # Allow use as a decorator
|
| 640 |
+
|
| 641 |
+
def push_async_callback(self, callback, /, *args, **kwds):
|
| 642 |
+
"""Registers an arbitrary coroutine function and arguments.
|
| 643 |
+
|
| 644 |
+
Cannot suppress exceptions.
|
| 645 |
+
"""
|
| 646 |
+
_exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds)
|
| 647 |
+
|
| 648 |
+
# We changed the signature, so using @wraps is not appropriate, but
|
| 649 |
+
# setting __wrapped__ may still help with introspection.
|
| 650 |
+
_exit_wrapper.__wrapped__ = callback
|
| 651 |
+
self._push_exit_callback(_exit_wrapper, False)
|
| 652 |
+
return callback # Allow use as a decorator
|
| 653 |
+
|
| 654 |
+
async def aclose(self):
|
| 655 |
+
"""Immediately unwind the context stack."""
|
| 656 |
+
await self.__aexit__(None, None, None)
|
| 657 |
+
|
| 658 |
+
def _push_async_cm_exit(self, cm, cm_exit):
|
| 659 |
+
"""Helper to correctly register coroutine function to __aexit__
|
| 660 |
+
method."""
|
| 661 |
+
_exit_wrapper = self._create_async_exit_wrapper(cm, cm_exit)
|
| 662 |
+
self._push_exit_callback(_exit_wrapper, False)
|
| 663 |
+
|
| 664 |
+
async def __aenter__(self):
|
| 665 |
+
return self
|
| 666 |
+
|
| 667 |
+
async def __aexit__(self, *exc_details):
|
| 668 |
+
received_exc = exc_details[0] is not None
|
| 669 |
+
|
| 670 |
+
# We manipulate the exception state so it behaves as though
|
| 671 |
+
# we were actually nesting multiple with statements
|
| 672 |
+
frame_exc = sys.exc_info()[1]
|
| 673 |
+
def _fix_exception_context(new_exc, old_exc):
|
| 674 |
+
# Context may not be correct, so find the end of the chain
|
| 675 |
+
while 1:
|
| 676 |
+
exc_context = new_exc.__context__
|
| 677 |
+
if exc_context is None or exc_context is old_exc:
|
| 678 |
+
# Context is already set correctly (see issue 20317)
|
| 679 |
+
return
|
| 680 |
+
if exc_context is frame_exc:
|
| 681 |
+
break
|
| 682 |
+
new_exc = exc_context
|
| 683 |
+
# Change the end of the chain to point to the exception
|
| 684 |
+
# we expect it to reference
|
| 685 |
+
new_exc.__context__ = old_exc
|
| 686 |
+
|
| 687 |
+
# Callbacks are invoked in LIFO order to match the behaviour of
|
| 688 |
+
# nested context managers
|
| 689 |
+
suppressed_exc = False
|
| 690 |
+
pending_raise = False
|
| 691 |
+
while self._exit_callbacks:
|
| 692 |
+
is_sync, cb = self._exit_callbacks.pop()
|
| 693 |
+
try:
|
| 694 |
+
if is_sync:
|
| 695 |
+
cb_suppress = cb(*exc_details)
|
| 696 |
+
else:
|
| 697 |
+
cb_suppress = await cb(*exc_details)
|
| 698 |
+
|
| 699 |
+
if cb_suppress:
|
| 700 |
+
suppressed_exc = True
|
| 701 |
+
pending_raise = False
|
| 702 |
+
exc_details = (None, None, None)
|
| 703 |
+
except:
|
| 704 |
+
new_exc_details = sys.exc_info()
|
| 705 |
+
# simulate the stack of exceptions by setting the context
|
| 706 |
+
_fix_exception_context(new_exc_details[1], exc_details[1])
|
| 707 |
+
pending_raise = True
|
| 708 |
+
exc_details = new_exc_details
|
| 709 |
+
if pending_raise:
|
| 710 |
+
try:
|
| 711 |
+
# bare "raise exc_details[1]" replaces our carefully
|
| 712 |
+
# set-up context
|
| 713 |
+
fixed_ctx = exc_details[1].__context__
|
| 714 |
+
raise exc_details[1]
|
| 715 |
+
except BaseException:
|
| 716 |
+
exc_details[1].__context__ = fixed_ctx
|
| 717 |
+
raise
|
| 718 |
+
return received_exc and suppressed_exc
|
| 719 |
+
|
| 720 |
+
|
| 721 |
+
class nullcontext(AbstractContextManager, AbstractAsyncContextManager):
|
| 722 |
+
"""Context manager that does no additional processing.
|
| 723 |
+
|
| 724 |
+
Used as a stand-in for a normal context manager, when a particular
|
| 725 |
+
block of code is only sometimes used with a normal context manager:
|
| 726 |
+
|
| 727 |
+
cm = optional_cm if condition else nullcontext()
|
| 728 |
+
with cm:
|
| 729 |
+
# Perform operation, using optional_cm if condition is True
|
| 730 |
+
"""
|
| 731 |
+
|
| 732 |
+
def __init__(self, enter_result=None):
|
| 733 |
+
self.enter_result = enter_result
|
| 734 |
+
|
| 735 |
+
def __enter__(self):
|
| 736 |
+
return self.enter_result
|
| 737 |
+
|
| 738 |
+
def __exit__(self, *excinfo):
|
| 739 |
+
pass
|
| 740 |
+
|
| 741 |
+
async def __aenter__(self):
|
| 742 |
+
return self.enter_result
|
| 743 |
+
|
| 744 |
+
async def __aexit__(self, *excinfo):
|
| 745 |
+
pass
|
llava/lib/python3.10/copy.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Generic (shallow and deep) copying operations.
|
| 2 |
+
|
| 3 |
+
Interface summary:
|
| 4 |
+
|
| 5 |
+
import copy
|
| 6 |
+
|
| 7 |
+
x = copy.copy(y) # make a shallow copy of y
|
| 8 |
+
x = copy.deepcopy(y) # make a deep copy of y
|
| 9 |
+
|
| 10 |
+
For module specific errors, copy.Error is raised.
|
| 11 |
+
|
| 12 |
+
The difference between shallow and deep copying is only relevant for
|
| 13 |
+
compound objects (objects that contain other objects, like lists or
|
| 14 |
+
class instances).
|
| 15 |
+
|
| 16 |
+
- A shallow copy constructs a new compound object and then (to the
|
| 17 |
+
extent possible) inserts *the same objects* into it that the
|
| 18 |
+
original contains.
|
| 19 |
+
|
| 20 |
+
- A deep copy constructs a new compound object and then, recursively,
|
| 21 |
+
inserts *copies* into it of the objects found in the original.
|
| 22 |
+
|
| 23 |
+
Two problems often exist with deep copy operations that don't exist
|
| 24 |
+
with shallow copy operations:
|
| 25 |
+
|
| 26 |
+
a) recursive objects (compound objects that, directly or indirectly,
|
| 27 |
+
contain a reference to themselves) may cause a recursive loop
|
| 28 |
+
|
| 29 |
+
b) because deep copy copies *everything* it may copy too much, e.g.
|
| 30 |
+
administrative data structures that should be shared even between
|
| 31 |
+
copies
|
| 32 |
+
|
| 33 |
+
Python's deep copy operation avoids these problems by:
|
| 34 |
+
|
| 35 |
+
a) keeping a table of objects already copied during the current
|
| 36 |
+
copying pass
|
| 37 |
+
|
| 38 |
+
b) letting user-defined classes override the copying operation or the
|
| 39 |
+
set of components copied
|
| 40 |
+
|
| 41 |
+
This version does not copy types like module, class, function, method,
|
| 42 |
+
nor stack trace, stack frame, nor file, socket, window, nor any
|
| 43 |
+
similar types.
|
| 44 |
+
|
| 45 |
+
Classes can use the same interfaces to control copying that they use
|
| 46 |
+
to control pickling: they can define methods called __getinitargs__(),
|
| 47 |
+
__getstate__() and __setstate__(). See the documentation for module
|
| 48 |
+
"pickle" for information on these methods.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
import types
|
| 52 |
+
import weakref
|
| 53 |
+
from copyreg import dispatch_table
|
| 54 |
+
|
| 55 |
+
class Error(Exception):
|
| 56 |
+
pass
|
| 57 |
+
error = Error # backward compatibility
|
| 58 |
+
|
| 59 |
+
try:
|
| 60 |
+
from org.python.core import PyStringMap
|
| 61 |
+
except ImportError:
|
| 62 |
+
PyStringMap = None
|
| 63 |
+
|
| 64 |
+
__all__ = ["Error", "copy", "deepcopy"]
|
| 65 |
+
|
| 66 |
+
def copy(x):
|
| 67 |
+
"""Shallow copy operation on arbitrary Python objects.
|
| 68 |
+
|
| 69 |
+
See the module's __doc__ string for more info.
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
cls = type(x)
|
| 73 |
+
|
| 74 |
+
copier = _copy_dispatch.get(cls)
|
| 75 |
+
if copier:
|
| 76 |
+
return copier(x)
|
| 77 |
+
|
| 78 |
+
if issubclass(cls, type):
|
| 79 |
+
# treat it as a regular class:
|
| 80 |
+
return _copy_immutable(x)
|
| 81 |
+
|
| 82 |
+
copier = getattr(cls, "__copy__", None)
|
| 83 |
+
if copier is not None:
|
| 84 |
+
return copier(x)
|
| 85 |
+
|
| 86 |
+
reductor = dispatch_table.get(cls)
|
| 87 |
+
if reductor is not None:
|
| 88 |
+
rv = reductor(x)
|
| 89 |
+
else:
|
| 90 |
+
reductor = getattr(x, "__reduce_ex__", None)
|
| 91 |
+
if reductor is not None:
|
| 92 |
+
rv = reductor(4)
|
| 93 |
+
else:
|
| 94 |
+
reductor = getattr(x, "__reduce__", None)
|
| 95 |
+
if reductor:
|
| 96 |
+
rv = reductor()
|
| 97 |
+
else:
|
| 98 |
+
raise Error("un(shallow)copyable object of type %s" % cls)
|
| 99 |
+
|
| 100 |
+
if isinstance(rv, str):
|
| 101 |
+
return x
|
| 102 |
+
return _reconstruct(x, None, *rv)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
_copy_dispatch = d = {}
|
| 106 |
+
|
| 107 |
+
def _copy_immutable(x):
|
| 108 |
+
return x
|
| 109 |
+
for t in (type(None), int, float, bool, complex, str, tuple,
|
| 110 |
+
bytes, frozenset, type, range, slice, property,
|
| 111 |
+
types.BuiltinFunctionType, type(Ellipsis), type(NotImplemented),
|
| 112 |
+
types.FunctionType, weakref.ref):
|
| 113 |
+
d[t] = _copy_immutable
|
| 114 |
+
t = getattr(types, "CodeType", None)
|
| 115 |
+
if t is not None:
|
| 116 |
+
d[t] = _copy_immutable
|
| 117 |
+
|
| 118 |
+
d[list] = list.copy
|
| 119 |
+
d[dict] = dict.copy
|
| 120 |
+
d[set] = set.copy
|
| 121 |
+
d[bytearray] = bytearray.copy
|
| 122 |
+
|
| 123 |
+
if PyStringMap is not None:
|
| 124 |
+
d[PyStringMap] = PyStringMap.copy
|
| 125 |
+
|
| 126 |
+
del d, t
|
| 127 |
+
|
| 128 |
+
def deepcopy(x, memo=None, _nil=[]):
|
| 129 |
+
"""Deep copy operation on arbitrary Python objects.
|
| 130 |
+
|
| 131 |
+
See the module's __doc__ string for more info.
|
| 132 |
+
"""
|
| 133 |
+
|
| 134 |
+
if memo is None:
|
| 135 |
+
memo = {}
|
| 136 |
+
|
| 137 |
+
d = id(x)
|
| 138 |
+
y = memo.get(d, _nil)
|
| 139 |
+
if y is not _nil:
|
| 140 |
+
return y
|
| 141 |
+
|
| 142 |
+
cls = type(x)
|
| 143 |
+
|
| 144 |
+
copier = _deepcopy_dispatch.get(cls)
|
| 145 |
+
if copier is not None:
|
| 146 |
+
y = copier(x, memo)
|
| 147 |
+
else:
|
| 148 |
+
if issubclass(cls, type):
|
| 149 |
+
y = _deepcopy_atomic(x, memo)
|
| 150 |
+
else:
|
| 151 |
+
copier = getattr(x, "__deepcopy__", None)
|
| 152 |
+
if copier is not None:
|
| 153 |
+
y = copier(memo)
|
| 154 |
+
else:
|
| 155 |
+
reductor = dispatch_table.get(cls)
|
| 156 |
+
if reductor:
|
| 157 |
+
rv = reductor(x)
|
| 158 |
+
else:
|
| 159 |
+
reductor = getattr(x, "__reduce_ex__", None)
|
| 160 |
+
if reductor is not None:
|
| 161 |
+
rv = reductor(4)
|
| 162 |
+
else:
|
| 163 |
+
reductor = getattr(x, "__reduce__", None)
|
| 164 |
+
if reductor:
|
| 165 |
+
rv = reductor()
|
| 166 |
+
else:
|
| 167 |
+
raise Error(
|
| 168 |
+
"un(deep)copyable object of type %s" % cls)
|
| 169 |
+
if isinstance(rv, str):
|
| 170 |
+
y = x
|
| 171 |
+
else:
|
| 172 |
+
y = _reconstruct(x, memo, *rv)
|
| 173 |
+
|
| 174 |
+
# If is its own copy, don't memoize.
|
| 175 |
+
if y is not x:
|
| 176 |
+
memo[d] = y
|
| 177 |
+
_keep_alive(x, memo) # Make sure x lives at least as long as d
|
| 178 |
+
return y
|
| 179 |
+
|
| 180 |
+
_deepcopy_dispatch = d = {}
|
| 181 |
+
|
| 182 |
+
def _deepcopy_atomic(x, memo):
|
| 183 |
+
return x
|
| 184 |
+
d[type(None)] = _deepcopy_atomic
|
| 185 |
+
d[type(Ellipsis)] = _deepcopy_atomic
|
| 186 |
+
d[type(NotImplemented)] = _deepcopy_atomic
|
| 187 |
+
d[int] = _deepcopy_atomic
|
| 188 |
+
d[float] = _deepcopy_atomic
|
| 189 |
+
d[bool] = _deepcopy_atomic
|
| 190 |
+
d[complex] = _deepcopy_atomic
|
| 191 |
+
d[bytes] = _deepcopy_atomic
|
| 192 |
+
d[str] = _deepcopy_atomic
|
| 193 |
+
d[types.CodeType] = _deepcopy_atomic
|
| 194 |
+
d[type] = _deepcopy_atomic
|
| 195 |
+
d[range] = _deepcopy_atomic
|
| 196 |
+
d[types.BuiltinFunctionType] = _deepcopy_atomic
|
| 197 |
+
d[types.FunctionType] = _deepcopy_atomic
|
| 198 |
+
d[weakref.ref] = _deepcopy_atomic
|
| 199 |
+
d[property] = _deepcopy_atomic
|
| 200 |
+
|
| 201 |
+
def _deepcopy_list(x, memo, deepcopy=deepcopy):
|
| 202 |
+
y = []
|
| 203 |
+
memo[id(x)] = y
|
| 204 |
+
append = y.append
|
| 205 |
+
for a in x:
|
| 206 |
+
append(deepcopy(a, memo))
|
| 207 |
+
return y
|
| 208 |
+
d[list] = _deepcopy_list
|
| 209 |
+
|
| 210 |
+
def _deepcopy_tuple(x, memo, deepcopy=deepcopy):
|
| 211 |
+
y = [deepcopy(a, memo) for a in x]
|
| 212 |
+
# We're not going to put the tuple in the memo, but it's still important we
|
| 213 |
+
# check for it, in case the tuple contains recursive mutable structures.
|
| 214 |
+
try:
|
| 215 |
+
return memo[id(x)]
|
| 216 |
+
except KeyError:
|
| 217 |
+
pass
|
| 218 |
+
for k, j in zip(x, y):
|
| 219 |
+
if k is not j:
|
| 220 |
+
y = tuple(y)
|
| 221 |
+
break
|
| 222 |
+
else:
|
| 223 |
+
y = x
|
| 224 |
+
return y
|
| 225 |
+
d[tuple] = _deepcopy_tuple
|
| 226 |
+
|
| 227 |
+
def _deepcopy_dict(x, memo, deepcopy=deepcopy):
|
| 228 |
+
y = {}
|
| 229 |
+
memo[id(x)] = y
|
| 230 |
+
for key, value in x.items():
|
| 231 |
+
y[deepcopy(key, memo)] = deepcopy(value, memo)
|
| 232 |
+
return y
|
| 233 |
+
d[dict] = _deepcopy_dict
|
| 234 |
+
if PyStringMap is not None:
|
| 235 |
+
d[PyStringMap] = _deepcopy_dict
|
| 236 |
+
|
| 237 |
+
def _deepcopy_method(x, memo): # Copy instance methods
|
| 238 |
+
return type(x)(x.__func__, deepcopy(x.__self__, memo))
|
| 239 |
+
d[types.MethodType] = _deepcopy_method
|
| 240 |
+
|
| 241 |
+
del d
|
| 242 |
+
|
| 243 |
+
def _keep_alive(x, memo):
|
| 244 |
+
"""Keeps a reference to the object x in the memo.
|
| 245 |
+
|
| 246 |
+
Because we remember objects by their id, we have
|
| 247 |
+
to assure that possibly temporary objects are kept
|
| 248 |
+
alive by referencing them.
|
| 249 |
+
We store a reference at the id of the memo, which should
|
| 250 |
+
normally not be used unless someone tries to deepcopy
|
| 251 |
+
the memo itself...
|
| 252 |
+
"""
|
| 253 |
+
try:
|
| 254 |
+
memo[id(memo)].append(x)
|
| 255 |
+
except KeyError:
|
| 256 |
+
# aha, this is the first one :-)
|
| 257 |
+
memo[id(memo)]=[x]
|
| 258 |
+
|
| 259 |
+
def _reconstruct(x, memo, func, args,
|
| 260 |
+
state=None, listiter=None, dictiter=None,
|
| 261 |
+
*, deepcopy=deepcopy):
|
| 262 |
+
deep = memo is not None
|
| 263 |
+
if deep and args:
|
| 264 |
+
args = (deepcopy(arg, memo) for arg in args)
|
| 265 |
+
y = func(*args)
|
| 266 |
+
if deep:
|
| 267 |
+
memo[id(x)] = y
|
| 268 |
+
|
| 269 |
+
if state is not None:
|
| 270 |
+
if deep:
|
| 271 |
+
state = deepcopy(state, memo)
|
| 272 |
+
if hasattr(y, '__setstate__'):
|
| 273 |
+
y.__setstate__(state)
|
| 274 |
+
else:
|
| 275 |
+
if isinstance(state, tuple) and len(state) == 2:
|
| 276 |
+
state, slotstate = state
|
| 277 |
+
else:
|
| 278 |
+
slotstate = None
|
| 279 |
+
if state is not None:
|
| 280 |
+
y.__dict__.update(state)
|
| 281 |
+
if slotstate is not None:
|
| 282 |
+
for key, value in slotstate.items():
|
| 283 |
+
setattr(y, key, value)
|
| 284 |
+
|
| 285 |
+
if listiter is not None:
|
| 286 |
+
if deep:
|
| 287 |
+
for item in listiter:
|
| 288 |
+
item = deepcopy(item, memo)
|
| 289 |
+
y.append(item)
|
| 290 |
+
else:
|
| 291 |
+
for item in listiter:
|
| 292 |
+
y.append(item)
|
| 293 |
+
if dictiter is not None:
|
| 294 |
+
if deep:
|
| 295 |
+
for key, value in dictiter:
|
| 296 |
+
key = deepcopy(key, memo)
|
| 297 |
+
value = deepcopy(value, memo)
|
| 298 |
+
y[key] = value
|
| 299 |
+
else:
|
| 300 |
+
for key, value in dictiter:
|
| 301 |
+
y[key] = value
|
| 302 |
+
return y
|
| 303 |
+
|
| 304 |
+
del types, weakref, PyStringMap
|
llava/lib/python3.10/distutils/README
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This directory contains the Distutils package.
|
| 2 |
+
|
| 3 |
+
There's a full documentation available at:
|
| 4 |
+
|
| 5 |
+
https://docs.python.org/distutils/
|
| 6 |
+
|
| 7 |
+
The Distutils-SIG web page is also a good starting point:
|
| 8 |
+
|
| 9 |
+
https://www.python.org/sigs/distutils-sig/
|
| 10 |
+
|
| 11 |
+
$Id$
|
llava/lib/python3.10/distutils/__init__.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils
|
| 2 |
+
|
| 3 |
+
The main package for the Python Module Distribution Utilities. Normally
|
| 4 |
+
used from a setup script as
|
| 5 |
+
|
| 6 |
+
from distutils.core import setup
|
| 7 |
+
|
| 8 |
+
setup (...)
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
__version__ = sys.version[:sys.version.index(' ')]
|
| 15 |
+
|
| 16 |
+
_DEPRECATION_MESSAGE = ("The distutils package is deprecated and slated for "
|
| 17 |
+
"removal in Python 3.12. Use setuptools or check "
|
| 18 |
+
"PEP 632 for potential alternatives")
|
| 19 |
+
warnings.warn(_DEPRECATION_MESSAGE,
|
| 20 |
+
DeprecationWarning, 2)
|
llava/lib/python3.10/distutils/_msvccompiler.py
ADDED
|
@@ -0,0 +1,546 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils._msvccompiler
|
| 2 |
+
|
| 3 |
+
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for Microsoft Visual Studio 2015.
|
| 5 |
+
|
| 6 |
+
The module is compatible with VS 2015 and later. You can find legacy support
|
| 7 |
+
for older versions in distutils.msvc9compiler and distutils.msvccompiler.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Written by Perry Stoll
|
| 11 |
+
# hacked by Robin Becker and Thomas Heller to do a better job of
|
| 12 |
+
# finding DevStudio (through the registry)
|
| 13 |
+
# ported to VS 2005 and VS 2008 by Christian Heimes
|
| 14 |
+
# ported to VS 2015 by Steve Dower
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import subprocess
|
| 18 |
+
import winreg
|
| 19 |
+
|
| 20 |
+
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
|
| 21 |
+
CompileError, LibError, LinkError
|
| 22 |
+
from distutils.ccompiler import CCompiler, gen_lib_options
|
| 23 |
+
from distutils import log
|
| 24 |
+
from distutils.util import get_platform
|
| 25 |
+
|
| 26 |
+
from itertools import count
|
| 27 |
+
|
| 28 |
+
def _find_vc2015():
|
| 29 |
+
try:
|
| 30 |
+
key = winreg.OpenKeyEx(
|
| 31 |
+
winreg.HKEY_LOCAL_MACHINE,
|
| 32 |
+
r"Software\Microsoft\VisualStudio\SxS\VC7",
|
| 33 |
+
access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY
|
| 34 |
+
)
|
| 35 |
+
except OSError:
|
| 36 |
+
log.debug("Visual C++ is not registered")
|
| 37 |
+
return None, None
|
| 38 |
+
|
| 39 |
+
best_version = 0
|
| 40 |
+
best_dir = None
|
| 41 |
+
with key:
|
| 42 |
+
for i in count():
|
| 43 |
+
try:
|
| 44 |
+
v, vc_dir, vt = winreg.EnumValue(key, i)
|
| 45 |
+
except OSError:
|
| 46 |
+
break
|
| 47 |
+
if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
|
| 48 |
+
try:
|
| 49 |
+
version = int(float(v))
|
| 50 |
+
except (ValueError, TypeError):
|
| 51 |
+
continue
|
| 52 |
+
if version >= 14 and version > best_version:
|
| 53 |
+
best_version, best_dir = version, vc_dir
|
| 54 |
+
return best_version, best_dir
|
| 55 |
+
|
| 56 |
+
def _find_vc2017():
|
| 57 |
+
"""Returns "15, path" based on the result of invoking vswhere.exe
|
| 58 |
+
If no install is found, returns "None, None"
|
| 59 |
+
|
| 60 |
+
The version is returned to avoid unnecessarily changing the function
|
| 61 |
+
result. It may be ignored when the path is not None.
|
| 62 |
+
|
| 63 |
+
If vswhere.exe is not available, by definition, VS 2017 is not
|
| 64 |
+
installed.
|
| 65 |
+
"""
|
| 66 |
+
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
| 67 |
+
if not root:
|
| 68 |
+
return None, None
|
| 69 |
+
|
| 70 |
+
try:
|
| 71 |
+
path = subprocess.check_output([
|
| 72 |
+
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
|
| 73 |
+
"-latest",
|
| 74 |
+
"-prerelease",
|
| 75 |
+
"-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
| 76 |
+
"-property", "installationPath",
|
| 77 |
+
"-products", "*",
|
| 78 |
+
], encoding="mbcs", errors="strict").strip()
|
| 79 |
+
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
| 80 |
+
return None, None
|
| 81 |
+
|
| 82 |
+
path = os.path.join(path, "VC", "Auxiliary", "Build")
|
| 83 |
+
if os.path.isdir(path):
|
| 84 |
+
return 15, path
|
| 85 |
+
|
| 86 |
+
return None, None
|
| 87 |
+
|
| 88 |
+
PLAT_SPEC_TO_RUNTIME = {
|
| 89 |
+
'x86' : 'x86',
|
| 90 |
+
'x86_amd64' : 'x64',
|
| 91 |
+
'x86_arm' : 'arm',
|
| 92 |
+
'x86_arm64' : 'arm64'
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
def _find_vcvarsall(plat_spec):
|
| 96 |
+
# bpo-38597: Removed vcruntime return value
|
| 97 |
+
_, best_dir = _find_vc2017()
|
| 98 |
+
|
| 99 |
+
if not best_dir:
|
| 100 |
+
best_version, best_dir = _find_vc2015()
|
| 101 |
+
|
| 102 |
+
if not best_dir:
|
| 103 |
+
log.debug("No suitable Visual C++ version found")
|
| 104 |
+
return None, None
|
| 105 |
+
|
| 106 |
+
vcvarsall = os.path.join(best_dir, "vcvarsall.bat")
|
| 107 |
+
if not os.path.isfile(vcvarsall):
|
| 108 |
+
log.debug("%s cannot be found", vcvarsall)
|
| 109 |
+
return None, None
|
| 110 |
+
|
| 111 |
+
return vcvarsall, None
|
| 112 |
+
|
| 113 |
+
def _get_vc_env(plat_spec):
|
| 114 |
+
if os.getenv("DISTUTILS_USE_SDK"):
|
| 115 |
+
return {
|
| 116 |
+
key.lower(): value
|
| 117 |
+
for key, value in os.environ.items()
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
vcvarsall, _ = _find_vcvarsall(plat_spec)
|
| 121 |
+
if not vcvarsall:
|
| 122 |
+
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
|
| 123 |
+
|
| 124 |
+
try:
|
| 125 |
+
out = subprocess.check_output(
|
| 126 |
+
'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
|
| 127 |
+
stderr=subprocess.STDOUT,
|
| 128 |
+
).decode('utf-16le', errors='replace')
|
| 129 |
+
except subprocess.CalledProcessError as exc:
|
| 130 |
+
log.error(exc.output)
|
| 131 |
+
raise DistutilsPlatformError("Error executing {}"
|
| 132 |
+
.format(exc.cmd))
|
| 133 |
+
|
| 134 |
+
env = {
|
| 135 |
+
key.lower(): value
|
| 136 |
+
for key, _, value in
|
| 137 |
+
(line.partition('=') for line in out.splitlines())
|
| 138 |
+
if key and value
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
return env
|
| 142 |
+
|
| 143 |
+
def _find_exe(exe, paths=None):
|
| 144 |
+
"""Return path to an MSVC executable program.
|
| 145 |
+
|
| 146 |
+
Tries to find the program in several places: first, one of the
|
| 147 |
+
MSVC program search paths from the registry; next, the directories
|
| 148 |
+
in the PATH environment variable. If any of those work, return an
|
| 149 |
+
absolute path that is known to exist. If none of them work, just
|
| 150 |
+
return the original program name, 'exe'.
|
| 151 |
+
"""
|
| 152 |
+
if not paths:
|
| 153 |
+
paths = os.getenv('path').split(os.pathsep)
|
| 154 |
+
for p in paths:
|
| 155 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 156 |
+
if os.path.isfile(fn):
|
| 157 |
+
return fn
|
| 158 |
+
return exe
|
| 159 |
+
|
| 160 |
+
# A map keyed by get_platform() return values to values accepted by
|
| 161 |
+
# 'vcvarsall.bat'. Always cross-compile from x86 to work with the
|
| 162 |
+
# lighter-weight MSVC installs that do not include native 64-bit tools.
|
| 163 |
+
PLAT_TO_VCVARS = {
|
| 164 |
+
'win32' : 'x86',
|
| 165 |
+
'win-amd64' : 'x86_amd64',
|
| 166 |
+
'win-arm32' : 'x86_arm',
|
| 167 |
+
'win-arm64' : 'x86_arm64'
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
class MSVCCompiler(CCompiler) :
|
| 171 |
+
"""Concrete class that implements an interface to Microsoft Visual C++,
|
| 172 |
+
as defined by the CCompiler abstract class."""
|
| 173 |
+
|
| 174 |
+
compiler_type = 'msvc'
|
| 175 |
+
|
| 176 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 177 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 178 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 179 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 180 |
+
# though, so it's worth thinking about.
|
| 181 |
+
executables = {}
|
| 182 |
+
|
| 183 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 184 |
+
_c_extensions = ['.c']
|
| 185 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 186 |
+
_rc_extensions = ['.rc']
|
| 187 |
+
_mc_extensions = ['.mc']
|
| 188 |
+
|
| 189 |
+
# Needed for the filename generation methods provided by the
|
| 190 |
+
# base class, CCompiler.
|
| 191 |
+
src_extensions = (_c_extensions + _cpp_extensions +
|
| 192 |
+
_rc_extensions + _mc_extensions)
|
| 193 |
+
res_extension = '.res'
|
| 194 |
+
obj_extension = '.obj'
|
| 195 |
+
static_lib_extension = '.lib'
|
| 196 |
+
shared_lib_extension = '.dll'
|
| 197 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 198 |
+
exe_extension = '.exe'
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 202 |
+
CCompiler.__init__ (self, verbose, dry_run, force)
|
| 203 |
+
# target platform (.plat_name is consistent with 'bdist')
|
| 204 |
+
self.plat_name = None
|
| 205 |
+
self.initialized = False
|
| 206 |
+
|
| 207 |
+
def initialize(self, plat_name=None):
|
| 208 |
+
# multi-init means we would need to check platform same each time...
|
| 209 |
+
assert not self.initialized, "don't init multiple times"
|
| 210 |
+
if plat_name is None:
|
| 211 |
+
plat_name = get_platform()
|
| 212 |
+
# sanity check for platforms to prevent obscure errors later.
|
| 213 |
+
if plat_name not in PLAT_TO_VCVARS:
|
| 214 |
+
raise DistutilsPlatformError("--plat-name must be one of {}"
|
| 215 |
+
.format(tuple(PLAT_TO_VCVARS)))
|
| 216 |
+
|
| 217 |
+
# Get the vcvarsall.bat spec for the requested platform.
|
| 218 |
+
plat_spec = PLAT_TO_VCVARS[plat_name]
|
| 219 |
+
|
| 220 |
+
vc_env = _get_vc_env(plat_spec)
|
| 221 |
+
if not vc_env:
|
| 222 |
+
raise DistutilsPlatformError("Unable to find a compatible "
|
| 223 |
+
"Visual Studio installation.")
|
| 224 |
+
|
| 225 |
+
self._paths = vc_env.get('path', '')
|
| 226 |
+
paths = self._paths.split(os.pathsep)
|
| 227 |
+
self.cc = _find_exe("cl.exe", paths)
|
| 228 |
+
self.linker = _find_exe("link.exe", paths)
|
| 229 |
+
self.lib = _find_exe("lib.exe", paths)
|
| 230 |
+
self.rc = _find_exe("rc.exe", paths) # resource compiler
|
| 231 |
+
self.mc = _find_exe("mc.exe", paths) # message compiler
|
| 232 |
+
self.mt = _find_exe("mt.exe", paths) # message compiler
|
| 233 |
+
|
| 234 |
+
for dir in vc_env.get('include', '').split(os.pathsep):
|
| 235 |
+
if dir:
|
| 236 |
+
self.add_include_dir(dir.rstrip(os.sep))
|
| 237 |
+
|
| 238 |
+
for dir in vc_env.get('lib', '').split(os.pathsep):
|
| 239 |
+
if dir:
|
| 240 |
+
self.add_library_dir(dir.rstrip(os.sep))
|
| 241 |
+
|
| 242 |
+
self.preprocess_options = None
|
| 243 |
+
# bpo-38597: Always compile with dynamic linking
|
| 244 |
+
# Future releases of Python 3.x will include all past
|
| 245 |
+
# versions of vcruntime*.dll for compatibility.
|
| 246 |
+
self.compile_options = [
|
| 247 |
+
'/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD'
|
| 248 |
+
]
|
| 249 |
+
|
| 250 |
+
self.compile_options_debug = [
|
| 251 |
+
'/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG'
|
| 252 |
+
]
|
| 253 |
+
|
| 254 |
+
ldflags = [
|
| 255 |
+
'/nologo', '/INCREMENTAL:NO', '/LTCG'
|
| 256 |
+
]
|
| 257 |
+
|
| 258 |
+
ldflags_debug = [
|
| 259 |
+
'/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'
|
| 260 |
+
]
|
| 261 |
+
|
| 262 |
+
self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1']
|
| 263 |
+
self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1']
|
| 264 |
+
self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
|
| 265 |
+
self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
|
| 266 |
+
self.ldflags_static = [*ldflags]
|
| 267 |
+
self.ldflags_static_debug = [*ldflags_debug]
|
| 268 |
+
|
| 269 |
+
self._ldflags = {
|
| 270 |
+
(CCompiler.EXECUTABLE, None): self.ldflags_exe,
|
| 271 |
+
(CCompiler.EXECUTABLE, False): self.ldflags_exe,
|
| 272 |
+
(CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
|
| 273 |
+
(CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
|
| 274 |
+
(CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
|
| 275 |
+
(CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
|
| 276 |
+
(CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
|
| 277 |
+
(CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
|
| 278 |
+
(CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
|
| 279 |
+
}
|
| 280 |
+
|
| 281 |
+
self.initialized = True
|
| 282 |
+
|
| 283 |
+
# -- Worker methods ------------------------------------------------
|
| 284 |
+
|
| 285 |
+
def object_filenames(self,
|
| 286 |
+
source_filenames,
|
| 287 |
+
strip_dir=0,
|
| 288 |
+
output_dir=''):
|
| 289 |
+
ext_map = {
|
| 290 |
+
**{ext: self.obj_extension for ext in self.src_extensions},
|
| 291 |
+
**{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions},
|
| 292 |
+
}
|
| 293 |
+
|
| 294 |
+
output_dir = output_dir or ''
|
| 295 |
+
|
| 296 |
+
def make_out_path(p):
|
| 297 |
+
base, ext = os.path.splitext(p)
|
| 298 |
+
if strip_dir:
|
| 299 |
+
base = os.path.basename(base)
|
| 300 |
+
else:
|
| 301 |
+
_, base = os.path.splitdrive(base)
|
| 302 |
+
if base.startswith((os.path.sep, os.path.altsep)):
|
| 303 |
+
base = base[1:]
|
| 304 |
+
try:
|
| 305 |
+
# XXX: This may produce absurdly long paths. We should check
|
| 306 |
+
# the length of the result and trim base until we fit within
|
| 307 |
+
# 260 characters.
|
| 308 |
+
return os.path.join(output_dir, base + ext_map[ext])
|
| 309 |
+
except LookupError:
|
| 310 |
+
# Better to raise an exception instead of silently continuing
|
| 311 |
+
# and later complain about sources and targets having
|
| 312 |
+
# different lengths
|
| 313 |
+
raise CompileError("Don't know how to compile {}".format(p))
|
| 314 |
+
|
| 315 |
+
return list(map(make_out_path, source_filenames))
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def compile(self, sources,
|
| 319 |
+
output_dir=None, macros=None, include_dirs=None, debug=0,
|
| 320 |
+
extra_preargs=None, extra_postargs=None, depends=None):
|
| 321 |
+
|
| 322 |
+
if not self.initialized:
|
| 323 |
+
self.initialize()
|
| 324 |
+
compile_info = self._setup_compile(output_dir, macros, include_dirs,
|
| 325 |
+
sources, depends, extra_postargs)
|
| 326 |
+
macros, objects, extra_postargs, pp_opts, build = compile_info
|
| 327 |
+
|
| 328 |
+
compile_opts = extra_preargs or []
|
| 329 |
+
compile_opts.append('/c')
|
| 330 |
+
if debug:
|
| 331 |
+
compile_opts.extend(self.compile_options_debug)
|
| 332 |
+
else:
|
| 333 |
+
compile_opts.extend(self.compile_options)
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
add_cpp_opts = False
|
| 337 |
+
|
| 338 |
+
for obj in objects:
|
| 339 |
+
try:
|
| 340 |
+
src, ext = build[obj]
|
| 341 |
+
except KeyError:
|
| 342 |
+
continue
|
| 343 |
+
if debug:
|
| 344 |
+
# pass the full pathname to MSVC in debug mode,
|
| 345 |
+
# this allows the debugger to find the source file
|
| 346 |
+
# without asking the user to browse for it
|
| 347 |
+
src = os.path.abspath(src)
|
| 348 |
+
|
| 349 |
+
# Anaconda/conda-forge customisation, we want our pdbs to be
|
| 350 |
+
# relocatable:
|
| 351 |
+
# https://developercommunity.visualstudio.com/comments/623156/view.html
|
| 352 |
+
d1trimfile_opts = []
|
| 353 |
+
if 'SRC_DIR' in os.environ and os.path.basename(self.cc) == "cl.exe":
|
| 354 |
+
d1trimfile_opts.append("/d1trimfile:" + os.environ['SRC_DIR'])
|
| 355 |
+
|
| 356 |
+
if ext in self._c_extensions:
|
| 357 |
+
input_opt = "/Tc" + src
|
| 358 |
+
elif ext in self._cpp_extensions:
|
| 359 |
+
input_opt = "/Tp" + src
|
| 360 |
+
add_cpp_opts = True
|
| 361 |
+
elif ext in self._rc_extensions:
|
| 362 |
+
# compile .RC to .RES file
|
| 363 |
+
input_opt = src
|
| 364 |
+
output_opt = "/fo" + obj
|
| 365 |
+
try:
|
| 366 |
+
self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
|
| 367 |
+
except DistutilsExecError as msg:
|
| 368 |
+
raise CompileError(msg)
|
| 369 |
+
continue
|
| 370 |
+
elif ext in self._mc_extensions:
|
| 371 |
+
# Compile .MC to .RC file to .RES file.
|
| 372 |
+
# * '-h dir' specifies the directory for the
|
| 373 |
+
# generated include file
|
| 374 |
+
# * '-r dir' specifies the target directory of the
|
| 375 |
+
# generated RC file and the binary message resource
|
| 376 |
+
# it includes
|
| 377 |
+
#
|
| 378 |
+
# For now (since there are no options to change this),
|
| 379 |
+
# we use the source-directory for the include file and
|
| 380 |
+
# the build directory for the RC file and message
|
| 381 |
+
# resources. This works at least for win32all.
|
| 382 |
+
h_dir = os.path.dirname(src)
|
| 383 |
+
rc_dir = os.path.dirname(obj)
|
| 384 |
+
try:
|
| 385 |
+
# first compile .MC to .RC and .H file
|
| 386 |
+
self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
|
| 387 |
+
base, _ = os.path.splitext(os.path.basename (src))
|
| 388 |
+
rc_file = os.path.join(rc_dir, base + '.rc')
|
| 389 |
+
# then compile .RC to .RES file
|
| 390 |
+
self.spawn([self.rc, "/fo" + obj, rc_file])
|
| 391 |
+
|
| 392 |
+
except DistutilsExecError as msg:
|
| 393 |
+
raise CompileError(msg)
|
| 394 |
+
continue
|
| 395 |
+
else:
|
| 396 |
+
# how to handle this file?
|
| 397 |
+
raise CompileError("Don't know how to compile {} to {}"
|
| 398 |
+
.format(src, obj))
|
| 399 |
+
|
| 400 |
+
args = [self.cc] + compile_opts + pp_opts + d1trimfile_opts
|
| 401 |
+
if add_cpp_opts:
|
| 402 |
+
args.append('/EHsc')
|
| 403 |
+
args.append(input_opt)
|
| 404 |
+
args.append("/Fo" + obj)
|
| 405 |
+
args.extend(extra_postargs)
|
| 406 |
+
|
| 407 |
+
try:
|
| 408 |
+
self.spawn(args)
|
| 409 |
+
except DistutilsExecError as msg:
|
| 410 |
+
raise CompileError(msg)
|
| 411 |
+
|
| 412 |
+
return objects
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
def create_static_lib(self,
|
| 416 |
+
objects,
|
| 417 |
+
output_libname,
|
| 418 |
+
output_dir=None,
|
| 419 |
+
debug=0,
|
| 420 |
+
target_lang=None):
|
| 421 |
+
|
| 422 |
+
if not self.initialized:
|
| 423 |
+
self.initialize()
|
| 424 |
+
objects, output_dir = self._fix_object_args(objects, output_dir)
|
| 425 |
+
output_filename = self.library_filename(output_libname,
|
| 426 |
+
output_dir=output_dir)
|
| 427 |
+
|
| 428 |
+
if self._need_link(objects, output_filename):
|
| 429 |
+
lib_args = objects + ['/OUT:' + output_filename]
|
| 430 |
+
if debug:
|
| 431 |
+
pass # XXX what goes here?
|
| 432 |
+
try:
|
| 433 |
+
log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args))
|
| 434 |
+
self.spawn([self.lib] + lib_args)
|
| 435 |
+
except DistutilsExecError as msg:
|
| 436 |
+
raise LibError(msg)
|
| 437 |
+
else:
|
| 438 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def link(self,
|
| 442 |
+
target_desc,
|
| 443 |
+
objects,
|
| 444 |
+
output_filename,
|
| 445 |
+
output_dir=None,
|
| 446 |
+
libraries=None,
|
| 447 |
+
library_dirs=None,
|
| 448 |
+
runtime_library_dirs=None,
|
| 449 |
+
export_symbols=None,
|
| 450 |
+
debug=0,
|
| 451 |
+
extra_preargs=None,
|
| 452 |
+
extra_postargs=None,
|
| 453 |
+
build_temp=None,
|
| 454 |
+
target_lang=None):
|
| 455 |
+
|
| 456 |
+
if not self.initialized:
|
| 457 |
+
self.initialize()
|
| 458 |
+
objects, output_dir = self._fix_object_args(objects, output_dir)
|
| 459 |
+
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
| 460 |
+
runtime_library_dirs)
|
| 461 |
+
libraries, library_dirs, runtime_library_dirs = fixed_args
|
| 462 |
+
|
| 463 |
+
if runtime_library_dirs:
|
| 464 |
+
self.warn("I don't know what to do with 'runtime_library_dirs': "
|
| 465 |
+
+ str(runtime_library_dirs))
|
| 466 |
+
|
| 467 |
+
lib_opts = gen_lib_options(self,
|
| 468 |
+
library_dirs, runtime_library_dirs,
|
| 469 |
+
libraries)
|
| 470 |
+
if output_dir is not None:
|
| 471 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 472 |
+
|
| 473 |
+
if self._need_link(objects, output_filename):
|
| 474 |
+
ldflags = self._ldflags[target_desc, debug]
|
| 475 |
+
|
| 476 |
+
export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])]
|
| 477 |
+
|
| 478 |
+
ld_args = (ldflags + lib_opts + export_opts +
|
| 479 |
+
objects + ['/OUT:' + output_filename])
|
| 480 |
+
|
| 481 |
+
# The MSVC linker generates .lib and .exp files, which cannot be
|
| 482 |
+
# suppressed by any linker switches. The .lib files may even be
|
| 483 |
+
# needed! Make sure they are generated in the temporary build
|
| 484 |
+
# directory. Since they have different names for debug and release
|
| 485 |
+
# builds, they can go into the same directory.
|
| 486 |
+
build_temp = os.path.dirname(objects[0])
|
| 487 |
+
if export_symbols is not None:
|
| 488 |
+
(dll_name, dll_ext) = os.path.splitext(
|
| 489 |
+
os.path.basename(output_filename))
|
| 490 |
+
implib_file = os.path.join(
|
| 491 |
+
build_temp,
|
| 492 |
+
self.library_filename(dll_name))
|
| 493 |
+
ld_args.append ('/IMPLIB:' + implib_file)
|
| 494 |
+
|
| 495 |
+
if extra_preargs:
|
| 496 |
+
ld_args[:0] = extra_preargs
|
| 497 |
+
if extra_postargs:
|
| 498 |
+
ld_args.extend(extra_postargs)
|
| 499 |
+
|
| 500 |
+
output_dir = os.path.dirname(os.path.abspath(output_filename))
|
| 501 |
+
self.mkpath(output_dir)
|
| 502 |
+
try:
|
| 503 |
+
log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
|
| 504 |
+
self.spawn([self.linker] + ld_args)
|
| 505 |
+
except DistutilsExecError as msg:
|
| 506 |
+
raise LinkError(msg)
|
| 507 |
+
else:
|
| 508 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 509 |
+
|
| 510 |
+
def spawn(self, cmd):
|
| 511 |
+
old_path = os.getenv('path')
|
| 512 |
+
try:
|
| 513 |
+
os.environ['path'] = self._paths
|
| 514 |
+
return super().spawn(cmd)
|
| 515 |
+
finally:
|
| 516 |
+
os.environ['path'] = old_path
|
| 517 |
+
|
| 518 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 519 |
+
# These are all used by the 'gen_lib_options() function, in
|
| 520 |
+
# ccompiler.py.
|
| 521 |
+
|
| 522 |
+
def library_dir_option(self, dir):
|
| 523 |
+
return "/LIBPATH:" + dir
|
| 524 |
+
|
| 525 |
+
def runtime_library_dir_option(self, dir):
|
| 526 |
+
raise DistutilsPlatformError(
|
| 527 |
+
"don't know how to set runtime library search path for MSVC")
|
| 528 |
+
|
| 529 |
+
def library_option(self, lib):
|
| 530 |
+
return self.library_filename(lib)
|
| 531 |
+
|
| 532 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 533 |
+
# Prefer a debugging library if found (and requested), but deal
|
| 534 |
+
# with it if we don't have one.
|
| 535 |
+
if debug:
|
| 536 |
+
try_names = [lib + "_d", lib]
|
| 537 |
+
else:
|
| 538 |
+
try_names = [lib]
|
| 539 |
+
for dir in dirs:
|
| 540 |
+
for name in try_names:
|
| 541 |
+
libfile = os.path.join(dir, self.library_filename(name))
|
| 542 |
+
if os.path.isfile(libfile):
|
| 543 |
+
return libfile
|
| 544 |
+
else:
|
| 545 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 546 |
+
return None
|
llava/lib/python3.10/distutils/archive_util.py
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.archive_util
|
| 2 |
+
|
| 3 |
+
Utility functions for creating archive files (tarballs, zip files,
|
| 4 |
+
that sort of thing)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from warnings import warn
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import zipfile
|
| 12 |
+
except ImportError:
|
| 13 |
+
zipfile = None
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
from distutils.errors import DistutilsExecError
|
| 17 |
+
from distutils.spawn import spawn
|
| 18 |
+
from distutils.dir_util import mkpath
|
| 19 |
+
from distutils import log
|
| 20 |
+
|
| 21 |
+
try:
|
| 22 |
+
from pwd import getpwnam
|
| 23 |
+
except ImportError:
|
| 24 |
+
getpwnam = None
|
| 25 |
+
|
| 26 |
+
try:
|
| 27 |
+
from grp import getgrnam
|
| 28 |
+
except ImportError:
|
| 29 |
+
getgrnam = None
|
| 30 |
+
|
| 31 |
+
def _get_gid(name):
|
| 32 |
+
"""Returns a gid, given a group name."""
|
| 33 |
+
if getgrnam is None or name is None:
|
| 34 |
+
return None
|
| 35 |
+
try:
|
| 36 |
+
result = getgrnam(name)
|
| 37 |
+
except KeyError:
|
| 38 |
+
result = None
|
| 39 |
+
if result is not None:
|
| 40 |
+
return result[2]
|
| 41 |
+
return None
|
| 42 |
+
|
| 43 |
+
def _get_uid(name):
|
| 44 |
+
"""Returns an uid, given a user name."""
|
| 45 |
+
if getpwnam is None or name is None:
|
| 46 |
+
return None
|
| 47 |
+
try:
|
| 48 |
+
result = getpwnam(name)
|
| 49 |
+
except KeyError:
|
| 50 |
+
result = None
|
| 51 |
+
if result is not None:
|
| 52 |
+
return result[2]
|
| 53 |
+
return None
|
| 54 |
+
|
| 55 |
+
def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
|
| 56 |
+
owner=None, group=None):
|
| 57 |
+
"""Create a (possibly compressed) tar file from all the files under
|
| 58 |
+
'base_dir'.
|
| 59 |
+
|
| 60 |
+
'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or
|
| 61 |
+
None. ("compress" will be deprecated in Python 3.2)
|
| 62 |
+
|
| 63 |
+
'owner' and 'group' can be used to define an owner and a group for the
|
| 64 |
+
archive that is being built. If not provided, the current owner and group
|
| 65 |
+
will be used.
|
| 66 |
+
|
| 67 |
+
The output tar file will be named 'base_dir' + ".tar", possibly plus
|
| 68 |
+
the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
|
| 69 |
+
|
| 70 |
+
Returns the output filename.
|
| 71 |
+
"""
|
| 72 |
+
tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '',
|
| 73 |
+
'compress': ''}
|
| 74 |
+
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz',
|
| 75 |
+
'compress': '.Z'}
|
| 76 |
+
|
| 77 |
+
# flags for compression program, each element of list will be an argument
|
| 78 |
+
if compress is not None and compress not in compress_ext.keys():
|
| 79 |
+
raise ValueError(
|
| 80 |
+
"bad value for 'compress': must be None, 'gzip', 'bzip2', "
|
| 81 |
+
"'xz' or 'compress'")
|
| 82 |
+
|
| 83 |
+
archive_name = base_name + '.tar'
|
| 84 |
+
if compress != 'compress':
|
| 85 |
+
archive_name += compress_ext.get(compress, '')
|
| 86 |
+
|
| 87 |
+
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
|
| 88 |
+
|
| 89 |
+
# creating the tarball
|
| 90 |
+
import tarfile # late import so Python build itself doesn't break
|
| 91 |
+
|
| 92 |
+
log.info('Creating tar archive')
|
| 93 |
+
|
| 94 |
+
uid = _get_uid(owner)
|
| 95 |
+
gid = _get_gid(group)
|
| 96 |
+
|
| 97 |
+
def _set_uid_gid(tarinfo):
|
| 98 |
+
if gid is not None:
|
| 99 |
+
tarinfo.gid = gid
|
| 100 |
+
tarinfo.gname = group
|
| 101 |
+
if uid is not None:
|
| 102 |
+
tarinfo.uid = uid
|
| 103 |
+
tarinfo.uname = owner
|
| 104 |
+
return tarinfo
|
| 105 |
+
|
| 106 |
+
if not dry_run:
|
| 107 |
+
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
|
| 108 |
+
try:
|
| 109 |
+
tar.add(base_dir, filter=_set_uid_gid)
|
| 110 |
+
finally:
|
| 111 |
+
tar.close()
|
| 112 |
+
|
| 113 |
+
# compression using `compress`
|
| 114 |
+
if compress == 'compress':
|
| 115 |
+
warn("'compress' will be deprecated.", PendingDeprecationWarning)
|
| 116 |
+
# the option varies depending on the platform
|
| 117 |
+
compressed_name = archive_name + compress_ext[compress]
|
| 118 |
+
if sys.platform == 'win32':
|
| 119 |
+
cmd = [compress, archive_name, compressed_name]
|
| 120 |
+
else:
|
| 121 |
+
cmd = [compress, '-f', archive_name]
|
| 122 |
+
spawn(cmd, dry_run=dry_run)
|
| 123 |
+
return compressed_name
|
| 124 |
+
|
| 125 |
+
return archive_name
|
| 126 |
+
|
| 127 |
+
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
|
| 128 |
+
"""Create a zip file from all the files under 'base_dir'.
|
| 129 |
+
|
| 130 |
+
The output zip file will be named 'base_name' + ".zip". Uses either the
|
| 131 |
+
"zipfile" Python module (if available) or the InfoZIP "zip" utility
|
| 132 |
+
(if installed and found on the default search path). If neither tool is
|
| 133 |
+
available, raises DistutilsExecError. Returns the name of the output zip
|
| 134 |
+
file.
|
| 135 |
+
"""
|
| 136 |
+
zip_filename = base_name + ".zip"
|
| 137 |
+
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
| 138 |
+
|
| 139 |
+
# If zipfile module is not available, try spawning an external
|
| 140 |
+
# 'zip' command.
|
| 141 |
+
if zipfile is None:
|
| 142 |
+
if verbose:
|
| 143 |
+
zipoptions = "-r"
|
| 144 |
+
else:
|
| 145 |
+
zipoptions = "-rq"
|
| 146 |
+
|
| 147 |
+
try:
|
| 148 |
+
spawn(["zip", zipoptions, zip_filename, base_dir],
|
| 149 |
+
dry_run=dry_run)
|
| 150 |
+
except DistutilsExecError:
|
| 151 |
+
# XXX really should distinguish between "couldn't find
|
| 152 |
+
# external 'zip' command" and "zip failed".
|
| 153 |
+
raise DistutilsExecError(("unable to create zip file '%s': "
|
| 154 |
+
"could neither import the 'zipfile' module nor "
|
| 155 |
+
"find a standalone zip utility") % zip_filename)
|
| 156 |
+
|
| 157 |
+
else:
|
| 158 |
+
log.info("creating '%s' and adding '%s' to it",
|
| 159 |
+
zip_filename, base_dir)
|
| 160 |
+
|
| 161 |
+
if not dry_run:
|
| 162 |
+
try:
|
| 163 |
+
zip = zipfile.ZipFile(zip_filename, "w",
|
| 164 |
+
compression=zipfile.ZIP_DEFLATED)
|
| 165 |
+
except RuntimeError:
|
| 166 |
+
zip = zipfile.ZipFile(zip_filename, "w",
|
| 167 |
+
compression=zipfile.ZIP_STORED)
|
| 168 |
+
|
| 169 |
+
with zip:
|
| 170 |
+
if base_dir != os.curdir:
|
| 171 |
+
path = os.path.normpath(os.path.join(base_dir, ''))
|
| 172 |
+
zip.write(path, path)
|
| 173 |
+
log.info("adding '%s'", path)
|
| 174 |
+
for dirpath, dirnames, filenames in os.walk(base_dir):
|
| 175 |
+
for name in dirnames:
|
| 176 |
+
path = os.path.normpath(os.path.join(dirpath, name, ''))
|
| 177 |
+
zip.write(path, path)
|
| 178 |
+
log.info("adding '%s'", path)
|
| 179 |
+
for name in filenames:
|
| 180 |
+
path = os.path.normpath(os.path.join(dirpath, name))
|
| 181 |
+
if os.path.isfile(path):
|
| 182 |
+
zip.write(path, path)
|
| 183 |
+
log.info("adding '%s'", path)
|
| 184 |
+
|
| 185 |
+
return zip_filename
|
| 186 |
+
|
| 187 |
+
ARCHIVE_FORMATS = {
|
| 188 |
+
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
|
| 189 |
+
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
|
| 190 |
+
'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
|
| 191 |
+
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
|
| 192 |
+
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
|
| 193 |
+
'zip': (make_zipfile, [],"ZIP file")
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
+
def check_archive_formats(formats):
|
| 197 |
+
"""Returns the first format from the 'format' list that is unknown.
|
| 198 |
+
|
| 199 |
+
If all formats are known, returns None
|
| 200 |
+
"""
|
| 201 |
+
for format in formats:
|
| 202 |
+
if format not in ARCHIVE_FORMATS:
|
| 203 |
+
return format
|
| 204 |
+
return None
|
| 205 |
+
|
| 206 |
+
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
|
| 207 |
+
dry_run=0, owner=None, group=None):
|
| 208 |
+
"""Create an archive file (eg. zip or tar).
|
| 209 |
+
|
| 210 |
+
'base_name' is the name of the file to create, minus any format-specific
|
| 211 |
+
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
|
| 212 |
+
"bztar", "xztar", or "ztar".
|
| 213 |
+
|
| 214 |
+
'root_dir' is a directory that will be the root directory of the
|
| 215 |
+
archive; ie. we typically chdir into 'root_dir' before creating the
|
| 216 |
+
archive. 'base_dir' is the directory where we start archiving from;
|
| 217 |
+
ie. 'base_dir' will be the common prefix of all files and
|
| 218 |
+
directories in the archive. 'root_dir' and 'base_dir' both default
|
| 219 |
+
to the current directory. Returns the name of the archive file.
|
| 220 |
+
|
| 221 |
+
'owner' and 'group' are used when creating a tar archive. By default,
|
| 222 |
+
uses the current owner and group.
|
| 223 |
+
"""
|
| 224 |
+
save_cwd = os.getcwd()
|
| 225 |
+
if root_dir is not None:
|
| 226 |
+
log.debug("changing into '%s'", root_dir)
|
| 227 |
+
base_name = os.path.abspath(base_name)
|
| 228 |
+
if not dry_run:
|
| 229 |
+
os.chdir(root_dir)
|
| 230 |
+
|
| 231 |
+
if base_dir is None:
|
| 232 |
+
base_dir = os.curdir
|
| 233 |
+
|
| 234 |
+
kwargs = {'dry_run': dry_run}
|
| 235 |
+
|
| 236 |
+
try:
|
| 237 |
+
format_info = ARCHIVE_FORMATS[format]
|
| 238 |
+
except KeyError:
|
| 239 |
+
raise ValueError("unknown archive format '%s'" % format)
|
| 240 |
+
|
| 241 |
+
func = format_info[0]
|
| 242 |
+
for arg, val in format_info[1]:
|
| 243 |
+
kwargs[arg] = val
|
| 244 |
+
|
| 245 |
+
if format != 'zip':
|
| 246 |
+
kwargs['owner'] = owner
|
| 247 |
+
kwargs['group'] = group
|
| 248 |
+
|
| 249 |
+
try:
|
| 250 |
+
filename = func(base_name, base_dir, **kwargs)
|
| 251 |
+
finally:
|
| 252 |
+
if root_dir is not None:
|
| 253 |
+
log.debug("changing back to '%s'", save_cwd)
|
| 254 |
+
os.chdir(save_cwd)
|
| 255 |
+
|
| 256 |
+
return filename
|
llava/lib/python3.10/distutils/bcppcompiler.py
ADDED
|
@@ -0,0 +1,393 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.bcppcompiler
|
| 2 |
+
|
| 3 |
+
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for the Borland C++ compiler.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# This implementation by Lyle Johnson, based on the original msvccompiler.py
|
| 8 |
+
# module and using the directions originally published by Gordon Williams.
|
| 9 |
+
|
| 10 |
+
# XXX looks like there's a LOT of overlap between these two classes:
|
| 11 |
+
# someone should sit down and factor out the common code as
|
| 12 |
+
# WindowsCCompiler! --GPW
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
from distutils.errors import \
|
| 17 |
+
DistutilsExecError, \
|
| 18 |
+
CompileError, LibError, LinkError, UnknownFileError
|
| 19 |
+
from distutils.ccompiler import \
|
| 20 |
+
CCompiler, gen_preprocess_options
|
| 21 |
+
from distutils.file_util import write_file
|
| 22 |
+
from distutils.dep_util import newer
|
| 23 |
+
from distutils import log
|
| 24 |
+
|
| 25 |
+
class BCPPCompiler(CCompiler) :
|
| 26 |
+
"""Concrete class that implements an interface to the Borland C/C++
|
| 27 |
+
compiler, as defined by the CCompiler abstract class.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
compiler_type = 'bcpp'
|
| 31 |
+
|
| 32 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 33 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 34 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 35 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 36 |
+
# though, so it's worth thinking about.
|
| 37 |
+
executables = {}
|
| 38 |
+
|
| 39 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 40 |
+
_c_extensions = ['.c']
|
| 41 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 42 |
+
|
| 43 |
+
# Needed for the filename generation methods provided by the
|
| 44 |
+
# base class, CCompiler.
|
| 45 |
+
src_extensions = _c_extensions + _cpp_extensions
|
| 46 |
+
obj_extension = '.obj'
|
| 47 |
+
static_lib_extension = '.lib'
|
| 48 |
+
shared_lib_extension = '.dll'
|
| 49 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 50 |
+
exe_extension = '.exe'
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def __init__ (self,
|
| 54 |
+
verbose=0,
|
| 55 |
+
dry_run=0,
|
| 56 |
+
force=0):
|
| 57 |
+
|
| 58 |
+
CCompiler.__init__ (self, verbose, dry_run, force)
|
| 59 |
+
|
| 60 |
+
# These executables are assumed to all be in the path.
|
| 61 |
+
# Borland doesn't seem to use any special registry settings to
|
| 62 |
+
# indicate their installation locations.
|
| 63 |
+
|
| 64 |
+
self.cc = "bcc32.exe"
|
| 65 |
+
self.linker = "ilink32.exe"
|
| 66 |
+
self.lib = "tlib.exe"
|
| 67 |
+
|
| 68 |
+
self.preprocess_options = None
|
| 69 |
+
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
|
| 70 |
+
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
|
| 71 |
+
|
| 72 |
+
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
|
| 73 |
+
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
|
| 74 |
+
self.ldflags_static = []
|
| 75 |
+
self.ldflags_exe = ['/Gn', '/q', '/x']
|
| 76 |
+
self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
# -- Worker methods ------------------------------------------------
|
| 80 |
+
|
| 81 |
+
def compile(self, sources,
|
| 82 |
+
output_dir=None, macros=None, include_dirs=None, debug=0,
|
| 83 |
+
extra_preargs=None, extra_postargs=None, depends=None):
|
| 84 |
+
|
| 85 |
+
macros, objects, extra_postargs, pp_opts, build = \
|
| 86 |
+
self._setup_compile(output_dir, macros, include_dirs, sources,
|
| 87 |
+
depends, extra_postargs)
|
| 88 |
+
compile_opts = extra_preargs or []
|
| 89 |
+
compile_opts.append ('-c')
|
| 90 |
+
if debug:
|
| 91 |
+
compile_opts.extend (self.compile_options_debug)
|
| 92 |
+
else:
|
| 93 |
+
compile_opts.extend (self.compile_options)
|
| 94 |
+
|
| 95 |
+
for obj in objects:
|
| 96 |
+
try:
|
| 97 |
+
src, ext = build[obj]
|
| 98 |
+
except KeyError:
|
| 99 |
+
continue
|
| 100 |
+
# XXX why do the normpath here?
|
| 101 |
+
src = os.path.normpath(src)
|
| 102 |
+
obj = os.path.normpath(obj)
|
| 103 |
+
# XXX _setup_compile() did a mkpath() too but before the normpath.
|
| 104 |
+
# Is it possible to skip the normpath?
|
| 105 |
+
self.mkpath(os.path.dirname(obj))
|
| 106 |
+
|
| 107 |
+
if ext == '.res':
|
| 108 |
+
# This is already a binary file -- skip it.
|
| 109 |
+
continue # the 'for' loop
|
| 110 |
+
if ext == '.rc':
|
| 111 |
+
# This needs to be compiled to a .res file -- do it now.
|
| 112 |
+
try:
|
| 113 |
+
self.spawn (["brcc32", "-fo", obj, src])
|
| 114 |
+
except DistutilsExecError as msg:
|
| 115 |
+
raise CompileError(msg)
|
| 116 |
+
continue # the 'for' loop
|
| 117 |
+
|
| 118 |
+
# The next two are both for the real compiler.
|
| 119 |
+
if ext in self._c_extensions:
|
| 120 |
+
input_opt = ""
|
| 121 |
+
elif ext in self._cpp_extensions:
|
| 122 |
+
input_opt = "-P"
|
| 123 |
+
else:
|
| 124 |
+
# Unknown file type -- no extra options. The compiler
|
| 125 |
+
# will probably fail, but let it just in case this is a
|
| 126 |
+
# file the compiler recognizes even if we don't.
|
| 127 |
+
input_opt = ""
|
| 128 |
+
|
| 129 |
+
output_opt = "-o" + obj
|
| 130 |
+
|
| 131 |
+
# Compiler command line syntax is: "bcc32 [options] file(s)".
|
| 132 |
+
# Note that the source file names must appear at the end of
|
| 133 |
+
# the command line.
|
| 134 |
+
try:
|
| 135 |
+
self.spawn ([self.cc] + compile_opts + pp_opts +
|
| 136 |
+
[input_opt, output_opt] +
|
| 137 |
+
extra_postargs + [src])
|
| 138 |
+
except DistutilsExecError as msg:
|
| 139 |
+
raise CompileError(msg)
|
| 140 |
+
|
| 141 |
+
return objects
|
| 142 |
+
|
| 143 |
+
# compile ()
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def create_static_lib (self,
|
| 147 |
+
objects,
|
| 148 |
+
output_libname,
|
| 149 |
+
output_dir=None,
|
| 150 |
+
debug=0,
|
| 151 |
+
target_lang=None):
|
| 152 |
+
|
| 153 |
+
(objects, output_dir) = self._fix_object_args (objects, output_dir)
|
| 154 |
+
output_filename = \
|
| 155 |
+
self.library_filename (output_libname, output_dir=output_dir)
|
| 156 |
+
|
| 157 |
+
if self._need_link (objects, output_filename):
|
| 158 |
+
lib_args = [output_filename, '/u'] + objects
|
| 159 |
+
if debug:
|
| 160 |
+
pass # XXX what goes here?
|
| 161 |
+
try:
|
| 162 |
+
self.spawn ([self.lib] + lib_args)
|
| 163 |
+
except DistutilsExecError as msg:
|
| 164 |
+
raise LibError(msg)
|
| 165 |
+
else:
|
| 166 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 167 |
+
|
| 168 |
+
# create_static_lib ()
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def link (self,
|
| 172 |
+
target_desc,
|
| 173 |
+
objects,
|
| 174 |
+
output_filename,
|
| 175 |
+
output_dir=None,
|
| 176 |
+
libraries=None,
|
| 177 |
+
library_dirs=None,
|
| 178 |
+
runtime_library_dirs=None,
|
| 179 |
+
export_symbols=None,
|
| 180 |
+
debug=0,
|
| 181 |
+
extra_preargs=None,
|
| 182 |
+
extra_postargs=None,
|
| 183 |
+
build_temp=None,
|
| 184 |
+
target_lang=None):
|
| 185 |
+
|
| 186 |
+
# XXX this ignores 'build_temp'! should follow the lead of
|
| 187 |
+
# msvccompiler.py
|
| 188 |
+
|
| 189 |
+
(objects, output_dir) = self._fix_object_args (objects, output_dir)
|
| 190 |
+
(libraries, library_dirs, runtime_library_dirs) = \
|
| 191 |
+
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
|
| 192 |
+
|
| 193 |
+
if runtime_library_dirs:
|
| 194 |
+
log.warn("I don't know what to do with 'runtime_library_dirs': %s",
|
| 195 |
+
str(runtime_library_dirs))
|
| 196 |
+
|
| 197 |
+
if output_dir is not None:
|
| 198 |
+
output_filename = os.path.join (output_dir, output_filename)
|
| 199 |
+
|
| 200 |
+
if self._need_link (objects, output_filename):
|
| 201 |
+
|
| 202 |
+
# Figure out linker args based on type of target.
|
| 203 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 204 |
+
startup_obj = 'c0w32'
|
| 205 |
+
if debug:
|
| 206 |
+
ld_args = self.ldflags_exe_debug[:]
|
| 207 |
+
else:
|
| 208 |
+
ld_args = self.ldflags_exe[:]
|
| 209 |
+
else:
|
| 210 |
+
startup_obj = 'c0d32'
|
| 211 |
+
if debug:
|
| 212 |
+
ld_args = self.ldflags_shared_debug[:]
|
| 213 |
+
else:
|
| 214 |
+
ld_args = self.ldflags_shared[:]
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
# Create a temporary exports file for use by the linker
|
| 218 |
+
if export_symbols is None:
|
| 219 |
+
def_file = ''
|
| 220 |
+
else:
|
| 221 |
+
head, tail = os.path.split (output_filename)
|
| 222 |
+
modname, ext = os.path.splitext (tail)
|
| 223 |
+
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
|
| 224 |
+
def_file = os.path.join (temp_dir, '%s.def' % modname)
|
| 225 |
+
contents = ['EXPORTS']
|
| 226 |
+
for sym in (export_symbols or []):
|
| 227 |
+
contents.append(' %s=_%s' % (sym, sym))
|
| 228 |
+
self.execute(write_file, (def_file, contents),
|
| 229 |
+
"writing %s" % def_file)
|
| 230 |
+
|
| 231 |
+
# Borland C++ has problems with '/' in paths
|
| 232 |
+
objects2 = map(os.path.normpath, objects)
|
| 233 |
+
# split objects in .obj and .res files
|
| 234 |
+
# Borland C++ needs them at different positions in the command line
|
| 235 |
+
objects = [startup_obj]
|
| 236 |
+
resources = []
|
| 237 |
+
for file in objects2:
|
| 238 |
+
(base, ext) = os.path.splitext(os.path.normcase(file))
|
| 239 |
+
if ext == '.res':
|
| 240 |
+
resources.append(file)
|
| 241 |
+
else:
|
| 242 |
+
objects.append(file)
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
for l in library_dirs:
|
| 246 |
+
ld_args.append("/L%s" % os.path.normpath(l))
|
| 247 |
+
ld_args.append("/L.") # we sometimes use relative paths
|
| 248 |
+
|
| 249 |
+
# list of object files
|
| 250 |
+
ld_args.extend(objects)
|
| 251 |
+
|
| 252 |
+
# XXX the command-line syntax for Borland C++ is a bit wonky;
|
| 253 |
+
# certain filenames are jammed together in one big string, but
|
| 254 |
+
# comma-delimited. This doesn't mesh too well with the
|
| 255 |
+
# Unix-centric attitude (with a DOS/Windows quoting hack) of
|
| 256 |
+
# 'spawn()', so constructing the argument list is a bit
|
| 257 |
+
# awkward. Note that doing the obvious thing and jamming all
|
| 258 |
+
# the filenames and commas into one argument would be wrong,
|
| 259 |
+
# because 'spawn()' would quote any filenames with spaces in
|
| 260 |
+
# them. Arghghh!. Apparently it works fine as coded...
|
| 261 |
+
|
| 262 |
+
# name of dll/exe file
|
| 263 |
+
ld_args.extend([',',output_filename])
|
| 264 |
+
# no map file and start libraries
|
| 265 |
+
ld_args.append(',,')
|
| 266 |
+
|
| 267 |
+
for lib in libraries:
|
| 268 |
+
# see if we find it and if there is a bcpp specific lib
|
| 269 |
+
# (xxx_bcpp.lib)
|
| 270 |
+
libfile = self.find_library_file(library_dirs, lib, debug)
|
| 271 |
+
if libfile is None:
|
| 272 |
+
ld_args.append(lib)
|
| 273 |
+
# probably a BCPP internal library -- don't warn
|
| 274 |
+
else:
|
| 275 |
+
# full name which prefers bcpp_xxx.lib over xxx.lib
|
| 276 |
+
ld_args.append(libfile)
|
| 277 |
+
|
| 278 |
+
# some default libraries
|
| 279 |
+
ld_args.append ('import32')
|
| 280 |
+
ld_args.append ('cw32mt')
|
| 281 |
+
|
| 282 |
+
# def file for export symbols
|
| 283 |
+
ld_args.extend([',',def_file])
|
| 284 |
+
# add resource files
|
| 285 |
+
ld_args.append(',')
|
| 286 |
+
ld_args.extend(resources)
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
if extra_preargs:
|
| 290 |
+
ld_args[:0] = extra_preargs
|
| 291 |
+
if extra_postargs:
|
| 292 |
+
ld_args.extend(extra_postargs)
|
| 293 |
+
|
| 294 |
+
self.mkpath (os.path.dirname (output_filename))
|
| 295 |
+
try:
|
| 296 |
+
self.spawn ([self.linker] + ld_args)
|
| 297 |
+
except DistutilsExecError as msg:
|
| 298 |
+
raise LinkError(msg)
|
| 299 |
+
|
| 300 |
+
else:
|
| 301 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 302 |
+
|
| 303 |
+
# link ()
|
| 304 |
+
|
| 305 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
def find_library_file (self, dirs, lib, debug=0):
|
| 309 |
+
# List of effective library names to try, in order of preference:
|
| 310 |
+
# xxx_bcpp.lib is better than xxx.lib
|
| 311 |
+
# and xxx_d.lib is better than xxx.lib if debug is set
|
| 312 |
+
#
|
| 313 |
+
# The "_bcpp" suffix is to handle a Python installation for people
|
| 314 |
+
# with multiple compilers (primarily Distutils hackers, I suspect
|
| 315 |
+
# ;-). The idea is they'd have one static library for each
|
| 316 |
+
# compiler they care about, since (almost?) every Windows compiler
|
| 317 |
+
# seems to have a different format for static libraries.
|
| 318 |
+
if debug:
|
| 319 |
+
dlib = (lib + "_d")
|
| 320 |
+
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
|
| 321 |
+
else:
|
| 322 |
+
try_names = (lib + "_bcpp", lib)
|
| 323 |
+
|
| 324 |
+
for dir in dirs:
|
| 325 |
+
for name in try_names:
|
| 326 |
+
libfile = os.path.join(dir, self.library_filename(name))
|
| 327 |
+
if os.path.exists(libfile):
|
| 328 |
+
return libfile
|
| 329 |
+
else:
|
| 330 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 331 |
+
return None
|
| 332 |
+
|
| 333 |
+
# overwrite the one from CCompiler to support rc and res-files
|
| 334 |
+
def object_filenames (self,
|
| 335 |
+
source_filenames,
|
| 336 |
+
strip_dir=0,
|
| 337 |
+
output_dir=''):
|
| 338 |
+
if output_dir is None: output_dir = ''
|
| 339 |
+
obj_names = []
|
| 340 |
+
for src_name in source_filenames:
|
| 341 |
+
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
| 342 |
+
(base, ext) = os.path.splitext (os.path.normcase(src_name))
|
| 343 |
+
if ext not in (self.src_extensions + ['.rc','.res']):
|
| 344 |
+
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
|
| 345 |
+
(ext, src_name))
|
| 346 |
+
if strip_dir:
|
| 347 |
+
base = os.path.basename (base)
|
| 348 |
+
if ext == '.res':
|
| 349 |
+
# these can go unchanged
|
| 350 |
+
obj_names.append (os.path.join (output_dir, base + ext))
|
| 351 |
+
elif ext == '.rc':
|
| 352 |
+
# these need to be compiled to .res-files
|
| 353 |
+
obj_names.append (os.path.join (output_dir, base + '.res'))
|
| 354 |
+
else:
|
| 355 |
+
obj_names.append (os.path.join (output_dir,
|
| 356 |
+
base + self.obj_extension))
|
| 357 |
+
return obj_names
|
| 358 |
+
|
| 359 |
+
# object_filenames ()
|
| 360 |
+
|
| 361 |
+
def preprocess (self,
|
| 362 |
+
source,
|
| 363 |
+
output_file=None,
|
| 364 |
+
macros=None,
|
| 365 |
+
include_dirs=None,
|
| 366 |
+
extra_preargs=None,
|
| 367 |
+
extra_postargs=None):
|
| 368 |
+
|
| 369 |
+
(_, macros, include_dirs) = \
|
| 370 |
+
self._fix_compile_args(None, macros, include_dirs)
|
| 371 |
+
pp_opts = gen_preprocess_options(macros, include_dirs)
|
| 372 |
+
pp_args = ['cpp32.exe'] + pp_opts
|
| 373 |
+
if output_file is not None:
|
| 374 |
+
pp_args.append('-o' + output_file)
|
| 375 |
+
if extra_preargs:
|
| 376 |
+
pp_args[:0] = extra_preargs
|
| 377 |
+
if extra_postargs:
|
| 378 |
+
pp_args.extend(extra_postargs)
|
| 379 |
+
pp_args.append(source)
|
| 380 |
+
|
| 381 |
+
# We need to preprocess: either we're being forced to, or the
|
| 382 |
+
# source file is newer than the target (or the target doesn't
|
| 383 |
+
# exist).
|
| 384 |
+
if self.force or output_file is None or newer(source, output_file):
|
| 385 |
+
if output_file:
|
| 386 |
+
self.mkpath(os.path.dirname(output_file))
|
| 387 |
+
try:
|
| 388 |
+
self.spawn(pp_args)
|
| 389 |
+
except DistutilsExecError as msg:
|
| 390 |
+
print(msg)
|
| 391 |
+
raise CompileError(msg)
|
| 392 |
+
|
| 393 |
+
# preprocess()
|
llava/lib/python3.10/distutils/ccompiler.py
ADDED
|
@@ -0,0 +1,1116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.ccompiler
|
| 2 |
+
|
| 3 |
+
Contains CCompiler, an abstract base class that defines the interface
|
| 4 |
+
for the Distutils compiler abstraction model."""
|
| 5 |
+
|
| 6 |
+
import sys, os, re
|
| 7 |
+
from distutils.errors import *
|
| 8 |
+
from distutils.spawn import spawn
|
| 9 |
+
from distutils.file_util import move_file
|
| 10 |
+
from distutils.dir_util import mkpath
|
| 11 |
+
from distutils.dep_util import newer_group
|
| 12 |
+
from distutils.util import split_quoted, execute
|
| 13 |
+
from distutils import log
|
| 14 |
+
|
| 15 |
+
class CCompiler:
|
| 16 |
+
"""Abstract base class to define the interface that must be implemented
|
| 17 |
+
by real compiler classes. Also has some utility methods used by
|
| 18 |
+
several compiler classes.
|
| 19 |
+
|
| 20 |
+
The basic idea behind a compiler abstraction class is that each
|
| 21 |
+
instance can be used for all the compile/link steps in building a
|
| 22 |
+
single project. Thus, attributes common to all of those compile and
|
| 23 |
+
link steps -- include directories, macros to define, libraries to link
|
| 24 |
+
against, etc. -- are attributes of the compiler instance. To allow for
|
| 25 |
+
variability in how individual files are treated, most of those
|
| 26 |
+
attributes may be varied on a per-compilation or per-link basis.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
# 'compiler_type' is a class attribute that identifies this class. It
|
| 30 |
+
# keeps code that wants to know what kind of compiler it's dealing with
|
| 31 |
+
# from having to import all possible compiler classes just to do an
|
| 32 |
+
# 'isinstance'. In concrete CCompiler subclasses, 'compiler_type'
|
| 33 |
+
# should really, really be one of the keys of the 'compiler_class'
|
| 34 |
+
# dictionary (see below -- used by the 'new_compiler()' factory
|
| 35 |
+
# function) -- authors of new compiler interface classes are
|
| 36 |
+
# responsible for updating 'compiler_class'!
|
| 37 |
+
compiler_type = None
|
| 38 |
+
|
| 39 |
+
# XXX things not handled by this compiler abstraction model:
|
| 40 |
+
# * client can't provide additional options for a compiler,
|
| 41 |
+
# e.g. warning, optimization, debugging flags. Perhaps this
|
| 42 |
+
# should be the domain of concrete compiler abstraction classes
|
| 43 |
+
# (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
|
| 44 |
+
# class should have methods for the common ones.
|
| 45 |
+
# * can't completely override the include or library searchg
|
| 46 |
+
# path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
|
| 47 |
+
# I'm not sure how widely supported this is even by Unix
|
| 48 |
+
# compilers, much less on other platforms. And I'm even less
|
| 49 |
+
# sure how useful it is; maybe for cross-compiling, but
|
| 50 |
+
# support for that is a ways off. (And anyways, cross
|
| 51 |
+
# compilers probably have a dedicated binary with the
|
| 52 |
+
# right paths compiled in. I hope.)
|
| 53 |
+
# * can't do really freaky things with the library list/library
|
| 54 |
+
# dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
|
| 55 |
+
# different versions of libfoo.a in different locations. I
|
| 56 |
+
# think this is useless without the ability to null out the
|
| 57 |
+
# library search path anyways.
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# Subclasses that rely on the standard filename generation methods
|
| 61 |
+
# implemented below should override these; see the comment near
|
| 62 |
+
# those methods ('object_filenames()' et. al.) for details:
|
| 63 |
+
src_extensions = None # list of strings
|
| 64 |
+
obj_extension = None # string
|
| 65 |
+
static_lib_extension = None
|
| 66 |
+
shared_lib_extension = None # string
|
| 67 |
+
static_lib_format = None # format string
|
| 68 |
+
shared_lib_format = None # prob. same as static_lib_format
|
| 69 |
+
exe_extension = None # string
|
| 70 |
+
|
| 71 |
+
# Default language settings. language_map is used to detect a source
|
| 72 |
+
# file or Extension target language, checking source filenames.
|
| 73 |
+
# language_order is used to detect the language precedence, when deciding
|
| 74 |
+
# what language to use when mixing source types. For example, if some
|
| 75 |
+
# extension has two files with ".c" extension, and one with ".cpp", it
|
| 76 |
+
# is still linked as c++.
|
| 77 |
+
language_map = {".c" : "c",
|
| 78 |
+
".cc" : "c++",
|
| 79 |
+
".cpp" : "c++",
|
| 80 |
+
".cxx" : "c++",
|
| 81 |
+
".m" : "objc",
|
| 82 |
+
}
|
| 83 |
+
language_order = ["c++", "objc", "c"]
|
| 84 |
+
|
| 85 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 86 |
+
self.dry_run = dry_run
|
| 87 |
+
self.force = force
|
| 88 |
+
self.verbose = verbose
|
| 89 |
+
|
| 90 |
+
# 'output_dir': a common output directory for object, library,
|
| 91 |
+
# shared object, and shared library files
|
| 92 |
+
self.output_dir = None
|
| 93 |
+
|
| 94 |
+
# 'macros': a list of macro definitions (or undefinitions). A
|
| 95 |
+
# macro definition is a 2-tuple (name, value), where the value is
|
| 96 |
+
# either a string or None (no explicit value). A macro
|
| 97 |
+
# undefinition is a 1-tuple (name,).
|
| 98 |
+
self.macros = []
|
| 99 |
+
|
| 100 |
+
# 'include_dirs': a list of directories to search for include files
|
| 101 |
+
self.include_dirs = []
|
| 102 |
+
|
| 103 |
+
# 'libraries': a list of libraries to include in any link
|
| 104 |
+
# (library names, not filenames: eg. "foo" not "libfoo.a")
|
| 105 |
+
self.libraries = []
|
| 106 |
+
|
| 107 |
+
# 'library_dirs': a list of directories to search for libraries
|
| 108 |
+
self.library_dirs = []
|
| 109 |
+
|
| 110 |
+
# 'runtime_library_dirs': a list of directories to search for
|
| 111 |
+
# shared libraries/objects at runtime
|
| 112 |
+
self.runtime_library_dirs = []
|
| 113 |
+
|
| 114 |
+
# 'objects': a list of object files (or similar, such as explicitly
|
| 115 |
+
# named library files) to include on any link
|
| 116 |
+
self.objects = []
|
| 117 |
+
|
| 118 |
+
for key in self.executables.keys():
|
| 119 |
+
self.set_executable(key, self.executables[key])
|
| 120 |
+
|
| 121 |
+
def set_executables(self, **kwargs):
|
| 122 |
+
"""Define the executables (and options for them) that will be run
|
| 123 |
+
to perform the various stages of compilation. The exact set of
|
| 124 |
+
executables that may be specified here depends on the compiler
|
| 125 |
+
class (via the 'executables' class attribute), but most will have:
|
| 126 |
+
compiler the C/C++ compiler
|
| 127 |
+
linker_so linker used to create shared objects and libraries
|
| 128 |
+
linker_exe linker used to create binary executables
|
| 129 |
+
archiver static library creator
|
| 130 |
+
|
| 131 |
+
On platforms with a command-line (Unix, DOS/Windows), each of these
|
| 132 |
+
is a string that will be split into executable name and (optional)
|
| 133 |
+
list of arguments. (Splitting the string is done similarly to how
|
| 134 |
+
Unix shells operate: words are delimited by spaces, but quotes and
|
| 135 |
+
backslashes can override this. See
|
| 136 |
+
'distutils.util.split_quoted()'.)
|
| 137 |
+
"""
|
| 138 |
+
|
| 139 |
+
# Note that some CCompiler implementation classes will define class
|
| 140 |
+
# attributes 'cpp', 'cc', etc. with hard-coded executable names;
|
| 141 |
+
# this is appropriate when a compiler class is for exactly one
|
| 142 |
+
# compiler/OS combination (eg. MSVCCompiler). Other compiler
|
| 143 |
+
# classes (UnixCCompiler, in particular) are driven by information
|
| 144 |
+
# discovered at run-time, since there are many different ways to do
|
| 145 |
+
# basically the same things with Unix C compilers.
|
| 146 |
+
|
| 147 |
+
for key in kwargs:
|
| 148 |
+
if key not in self.executables:
|
| 149 |
+
raise ValueError("unknown executable '%s' for class %s" %
|
| 150 |
+
(key, self.__class__.__name__))
|
| 151 |
+
self.set_executable(key, kwargs[key])
|
| 152 |
+
|
| 153 |
+
def set_executable(self, key, value):
|
| 154 |
+
if isinstance(value, str):
|
| 155 |
+
setattr(self, key, split_quoted(value))
|
| 156 |
+
else:
|
| 157 |
+
setattr(self, key, value)
|
| 158 |
+
|
| 159 |
+
def _find_macro(self, name):
|
| 160 |
+
i = 0
|
| 161 |
+
for defn in self.macros:
|
| 162 |
+
if defn[0] == name:
|
| 163 |
+
return i
|
| 164 |
+
i += 1
|
| 165 |
+
return None
|
| 166 |
+
|
| 167 |
+
def _check_macro_definitions(self, definitions):
|
| 168 |
+
"""Ensures that every element of 'definitions' is a valid macro
|
| 169 |
+
definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
|
| 170 |
+
nothing if all definitions are OK, raise TypeError otherwise.
|
| 171 |
+
"""
|
| 172 |
+
for defn in definitions:
|
| 173 |
+
if not (isinstance(defn, tuple) and
|
| 174 |
+
(len(defn) in (1, 2) and
|
| 175 |
+
(isinstance (defn[1], str) or defn[1] is None)) and
|
| 176 |
+
isinstance (defn[0], str)):
|
| 177 |
+
raise TypeError(("invalid macro definition '%s': " % defn) + \
|
| 178 |
+
"must be tuple (string,), (string, string), or " + \
|
| 179 |
+
"(string, None)")
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
# -- Bookkeeping methods -------------------------------------------
|
| 183 |
+
|
| 184 |
+
def define_macro(self, name, value=None):
|
| 185 |
+
"""Define a preprocessor macro for all compilations driven by this
|
| 186 |
+
compiler object. The optional parameter 'value' should be a
|
| 187 |
+
string; if it is not supplied, then the macro will be defined
|
| 188 |
+
without an explicit value and the exact outcome depends on the
|
| 189 |
+
compiler used (XXX true? does ANSI say anything about this?)
|
| 190 |
+
"""
|
| 191 |
+
# Delete from the list of macro definitions/undefinitions if
|
| 192 |
+
# already there (so that this one will take precedence).
|
| 193 |
+
i = self._find_macro (name)
|
| 194 |
+
if i is not None:
|
| 195 |
+
del self.macros[i]
|
| 196 |
+
|
| 197 |
+
self.macros.append((name, value))
|
| 198 |
+
|
| 199 |
+
def undefine_macro(self, name):
|
| 200 |
+
"""Undefine a preprocessor macro for all compilations driven by
|
| 201 |
+
this compiler object. If the same macro is defined by
|
| 202 |
+
'define_macro()' and undefined by 'undefine_macro()' the last call
|
| 203 |
+
takes precedence (including multiple redefinitions or
|
| 204 |
+
undefinitions). If the macro is redefined/undefined on a
|
| 205 |
+
per-compilation basis (ie. in the call to 'compile()'), then that
|
| 206 |
+
takes precedence.
|
| 207 |
+
"""
|
| 208 |
+
# Delete from the list of macro definitions/undefinitions if
|
| 209 |
+
# already there (so that this one will take precedence).
|
| 210 |
+
i = self._find_macro (name)
|
| 211 |
+
if i is not None:
|
| 212 |
+
del self.macros[i]
|
| 213 |
+
|
| 214 |
+
undefn = (name,)
|
| 215 |
+
self.macros.append(undefn)
|
| 216 |
+
|
| 217 |
+
def add_include_dir(self, dir):
|
| 218 |
+
"""Add 'dir' to the list of directories that will be searched for
|
| 219 |
+
header files. The compiler is instructed to search directories in
|
| 220 |
+
the order in which they are supplied by successive calls to
|
| 221 |
+
'add_include_dir()'.
|
| 222 |
+
"""
|
| 223 |
+
self.include_dirs.append(dir)
|
| 224 |
+
|
| 225 |
+
def set_include_dirs(self, dirs):
|
| 226 |
+
"""Set the list of directories that will be searched to 'dirs' (a
|
| 227 |
+
list of strings). Overrides any preceding calls to
|
| 228 |
+
'add_include_dir()'; subsequence calls to 'add_include_dir()' add
|
| 229 |
+
to the list passed to 'set_include_dirs()'. This does not affect
|
| 230 |
+
any list of standard include directories that the compiler may
|
| 231 |
+
search by default.
|
| 232 |
+
"""
|
| 233 |
+
self.include_dirs = dirs[:]
|
| 234 |
+
|
| 235 |
+
def add_library(self, libname):
|
| 236 |
+
"""Add 'libname' to the list of libraries that will be included in
|
| 237 |
+
all links driven by this compiler object. Note that 'libname'
|
| 238 |
+
should *not* be the name of a file containing a library, but the
|
| 239 |
+
name of the library itself: the actual filename will be inferred by
|
| 240 |
+
the linker, the compiler, or the compiler class (depending on the
|
| 241 |
+
platform).
|
| 242 |
+
|
| 243 |
+
The linker will be instructed to link against libraries in the
|
| 244 |
+
order they were supplied to 'add_library()' and/or
|
| 245 |
+
'set_libraries()'. It is perfectly valid to duplicate library
|
| 246 |
+
names; the linker will be instructed to link against libraries as
|
| 247 |
+
many times as they are mentioned.
|
| 248 |
+
"""
|
| 249 |
+
self.libraries.append(libname)
|
| 250 |
+
|
| 251 |
+
def set_libraries(self, libnames):
|
| 252 |
+
"""Set the list of libraries to be included in all links driven by
|
| 253 |
+
this compiler object to 'libnames' (a list of strings). This does
|
| 254 |
+
not affect any standard system libraries that the linker may
|
| 255 |
+
include by default.
|
| 256 |
+
"""
|
| 257 |
+
self.libraries = libnames[:]
|
| 258 |
+
|
| 259 |
+
def add_library_dir(self, dir):
|
| 260 |
+
"""Add 'dir' to the list of directories that will be searched for
|
| 261 |
+
libraries specified to 'add_library()' and 'set_libraries()'. The
|
| 262 |
+
linker will be instructed to search for libraries in the order they
|
| 263 |
+
are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
|
| 264 |
+
"""
|
| 265 |
+
self.library_dirs.append(dir)
|
| 266 |
+
|
| 267 |
+
def set_library_dirs(self, dirs):
|
| 268 |
+
"""Set the list of library search directories to 'dirs' (a list of
|
| 269 |
+
strings). This does not affect any standard library search path
|
| 270 |
+
that the linker may search by default.
|
| 271 |
+
"""
|
| 272 |
+
self.library_dirs = dirs[:]
|
| 273 |
+
|
| 274 |
+
def add_runtime_library_dir(self, dir):
|
| 275 |
+
"""Add 'dir' to the list of directories that will be searched for
|
| 276 |
+
shared libraries at runtime.
|
| 277 |
+
"""
|
| 278 |
+
self.runtime_library_dirs.append(dir)
|
| 279 |
+
|
| 280 |
+
def set_runtime_library_dirs(self, dirs):
|
| 281 |
+
"""Set the list of directories to search for shared libraries at
|
| 282 |
+
runtime to 'dirs' (a list of strings). This does not affect any
|
| 283 |
+
standard search path that the runtime linker may search by
|
| 284 |
+
default.
|
| 285 |
+
"""
|
| 286 |
+
self.runtime_library_dirs = dirs[:]
|
| 287 |
+
|
| 288 |
+
def add_link_object(self, object):
|
| 289 |
+
"""Add 'object' to the list of object files (or analogues, such as
|
| 290 |
+
explicitly named library files or the output of "resource
|
| 291 |
+
compilers") to be included in every link driven by this compiler
|
| 292 |
+
object.
|
| 293 |
+
"""
|
| 294 |
+
self.objects.append(object)
|
| 295 |
+
|
| 296 |
+
def set_link_objects(self, objects):
|
| 297 |
+
"""Set the list of object files (or analogues) to be included in
|
| 298 |
+
every link to 'objects'. This does not affect any standard object
|
| 299 |
+
files that the linker may include by default (such as system
|
| 300 |
+
libraries).
|
| 301 |
+
"""
|
| 302 |
+
self.objects = objects[:]
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
# -- Private utility methods --------------------------------------
|
| 306 |
+
# (here for the convenience of subclasses)
|
| 307 |
+
|
| 308 |
+
# Helper method to prep compiler in subclass compile() methods
|
| 309 |
+
|
| 310 |
+
def _setup_compile(self, outdir, macros, incdirs, sources, depends,
|
| 311 |
+
extra):
|
| 312 |
+
"""Process arguments and decide which source files to compile."""
|
| 313 |
+
if outdir is None:
|
| 314 |
+
outdir = self.output_dir
|
| 315 |
+
elif not isinstance(outdir, str):
|
| 316 |
+
raise TypeError("'output_dir' must be a string or None")
|
| 317 |
+
|
| 318 |
+
if macros is None:
|
| 319 |
+
macros = self.macros
|
| 320 |
+
elif isinstance(macros, list):
|
| 321 |
+
macros = macros + (self.macros or [])
|
| 322 |
+
else:
|
| 323 |
+
raise TypeError("'macros' (if supplied) must be a list of tuples")
|
| 324 |
+
|
| 325 |
+
if incdirs is None:
|
| 326 |
+
incdirs = self.include_dirs
|
| 327 |
+
elif isinstance(incdirs, (list, tuple)):
|
| 328 |
+
incdirs = list(incdirs) + (self.include_dirs or [])
|
| 329 |
+
else:
|
| 330 |
+
raise TypeError(
|
| 331 |
+
"'include_dirs' (if supplied) must be a list of strings")
|
| 332 |
+
|
| 333 |
+
if extra is None:
|
| 334 |
+
extra = []
|
| 335 |
+
|
| 336 |
+
# Get the list of expected output (object) files
|
| 337 |
+
objects = self.object_filenames(sources, strip_dir=0,
|
| 338 |
+
output_dir=outdir)
|
| 339 |
+
assert len(objects) == len(sources)
|
| 340 |
+
|
| 341 |
+
pp_opts = gen_preprocess_options(macros, incdirs)
|
| 342 |
+
|
| 343 |
+
build = {}
|
| 344 |
+
for i in range(len(sources)):
|
| 345 |
+
src = sources[i]
|
| 346 |
+
obj = objects[i]
|
| 347 |
+
ext = os.path.splitext(src)[1]
|
| 348 |
+
self.mkpath(os.path.dirname(obj))
|
| 349 |
+
build[obj] = (src, ext)
|
| 350 |
+
|
| 351 |
+
return macros, objects, extra, pp_opts, build
|
| 352 |
+
|
| 353 |
+
def _get_cc_args(self, pp_opts, debug, before):
|
| 354 |
+
# works for unixccompiler, cygwinccompiler
|
| 355 |
+
cc_args = pp_opts + ['-c']
|
| 356 |
+
if debug:
|
| 357 |
+
cc_args[:0] = ['-g']
|
| 358 |
+
if before:
|
| 359 |
+
cc_args[:0] = before
|
| 360 |
+
return cc_args
|
| 361 |
+
|
| 362 |
+
def _fix_compile_args(self, output_dir, macros, include_dirs):
|
| 363 |
+
"""Typecheck and fix-up some of the arguments to the 'compile()'
|
| 364 |
+
method, and return fixed-up values. Specifically: if 'output_dir'
|
| 365 |
+
is None, replaces it with 'self.output_dir'; ensures that 'macros'
|
| 366 |
+
is a list, and augments it with 'self.macros'; ensures that
|
| 367 |
+
'include_dirs' is a list, and augments it with 'self.include_dirs'.
|
| 368 |
+
Guarantees that the returned values are of the correct type,
|
| 369 |
+
i.e. for 'output_dir' either string or None, and for 'macros' and
|
| 370 |
+
'include_dirs' either list or None.
|
| 371 |
+
"""
|
| 372 |
+
if output_dir is None:
|
| 373 |
+
output_dir = self.output_dir
|
| 374 |
+
elif not isinstance(output_dir, str):
|
| 375 |
+
raise TypeError("'output_dir' must be a string or None")
|
| 376 |
+
|
| 377 |
+
if macros is None:
|
| 378 |
+
macros = self.macros
|
| 379 |
+
elif isinstance(macros, list):
|
| 380 |
+
macros = macros + (self.macros or [])
|
| 381 |
+
else:
|
| 382 |
+
raise TypeError("'macros' (if supplied) must be a list of tuples")
|
| 383 |
+
|
| 384 |
+
if include_dirs is None:
|
| 385 |
+
include_dirs = self.include_dirs
|
| 386 |
+
elif isinstance(include_dirs, (list, tuple)):
|
| 387 |
+
include_dirs = list(include_dirs) + (self.include_dirs or [])
|
| 388 |
+
else:
|
| 389 |
+
raise TypeError(
|
| 390 |
+
"'include_dirs' (if supplied) must be a list of strings")
|
| 391 |
+
|
| 392 |
+
return output_dir, macros, include_dirs
|
| 393 |
+
|
| 394 |
+
def _prep_compile(self, sources, output_dir, depends=None):
|
| 395 |
+
"""Decide which source files must be recompiled.
|
| 396 |
+
|
| 397 |
+
Determine the list of object files corresponding to 'sources',
|
| 398 |
+
and figure out which ones really need to be recompiled.
|
| 399 |
+
Return a list of all object files and a dictionary telling
|
| 400 |
+
which source files can be skipped.
|
| 401 |
+
"""
|
| 402 |
+
# Get the list of expected output (object) files
|
| 403 |
+
objects = self.object_filenames(sources, output_dir=output_dir)
|
| 404 |
+
assert len(objects) == len(sources)
|
| 405 |
+
|
| 406 |
+
# Return an empty dict for the "which source files can be skipped"
|
| 407 |
+
# return value to preserve API compatibility.
|
| 408 |
+
return objects, {}
|
| 409 |
+
|
| 410 |
+
def _fix_object_args(self, objects, output_dir):
|
| 411 |
+
"""Typecheck and fix up some arguments supplied to various methods.
|
| 412 |
+
Specifically: ensure that 'objects' is a list; if output_dir is
|
| 413 |
+
None, replace with self.output_dir. Return fixed versions of
|
| 414 |
+
'objects' and 'output_dir'.
|
| 415 |
+
"""
|
| 416 |
+
if not isinstance(objects, (list, tuple)):
|
| 417 |
+
raise TypeError("'objects' must be a list or tuple of strings")
|
| 418 |
+
objects = list(objects)
|
| 419 |
+
|
| 420 |
+
if output_dir is None:
|
| 421 |
+
output_dir = self.output_dir
|
| 422 |
+
elif not isinstance(output_dir, str):
|
| 423 |
+
raise TypeError("'output_dir' must be a string or None")
|
| 424 |
+
|
| 425 |
+
return (objects, output_dir)
|
| 426 |
+
|
| 427 |
+
def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
| 428 |
+
"""Typecheck and fix up some of the arguments supplied to the
|
| 429 |
+
'link_*' methods. Specifically: ensure that all arguments are
|
| 430 |
+
lists, and augment them with their permanent versions
|
| 431 |
+
(eg. 'self.libraries' augments 'libraries'). Return a tuple with
|
| 432 |
+
fixed versions of all arguments.
|
| 433 |
+
"""
|
| 434 |
+
if libraries is None:
|
| 435 |
+
libraries = self.libraries
|
| 436 |
+
elif isinstance(libraries, (list, tuple)):
|
| 437 |
+
libraries = list (libraries) + (self.libraries or [])
|
| 438 |
+
else:
|
| 439 |
+
raise TypeError(
|
| 440 |
+
"'libraries' (if supplied) must be a list of strings")
|
| 441 |
+
|
| 442 |
+
if library_dirs is None:
|
| 443 |
+
library_dirs = self.library_dirs
|
| 444 |
+
elif isinstance(library_dirs, (list, tuple)):
|
| 445 |
+
library_dirs = list (library_dirs) + (self.library_dirs or [])
|
| 446 |
+
else:
|
| 447 |
+
raise TypeError(
|
| 448 |
+
"'library_dirs' (if supplied) must be a list of strings")
|
| 449 |
+
|
| 450 |
+
if runtime_library_dirs is None:
|
| 451 |
+
runtime_library_dirs = self.runtime_library_dirs
|
| 452 |
+
elif isinstance(runtime_library_dirs, (list, tuple)):
|
| 453 |
+
runtime_library_dirs = (list(runtime_library_dirs) +
|
| 454 |
+
(self.runtime_library_dirs or []))
|
| 455 |
+
else:
|
| 456 |
+
raise TypeError("'runtime_library_dirs' (if supplied) "
|
| 457 |
+
"must be a list of strings")
|
| 458 |
+
|
| 459 |
+
return (libraries, library_dirs, runtime_library_dirs)
|
| 460 |
+
|
| 461 |
+
def _need_link(self, objects, output_file):
|
| 462 |
+
"""Return true if we need to relink the files listed in 'objects'
|
| 463 |
+
to recreate 'output_file'.
|
| 464 |
+
"""
|
| 465 |
+
if self.force:
|
| 466 |
+
return True
|
| 467 |
+
else:
|
| 468 |
+
if self.dry_run:
|
| 469 |
+
newer = newer_group (objects, output_file, missing='newer')
|
| 470 |
+
else:
|
| 471 |
+
newer = newer_group (objects, output_file)
|
| 472 |
+
return newer
|
| 473 |
+
|
| 474 |
+
def detect_language(self, sources):
|
| 475 |
+
"""Detect the language of a given file, or list of files. Uses
|
| 476 |
+
language_map, and language_order to do the job.
|
| 477 |
+
"""
|
| 478 |
+
if not isinstance(sources, list):
|
| 479 |
+
sources = [sources]
|
| 480 |
+
lang = None
|
| 481 |
+
index = len(self.language_order)
|
| 482 |
+
for source in sources:
|
| 483 |
+
base, ext = os.path.splitext(source)
|
| 484 |
+
extlang = self.language_map.get(ext)
|
| 485 |
+
try:
|
| 486 |
+
extindex = self.language_order.index(extlang)
|
| 487 |
+
if extindex < index:
|
| 488 |
+
lang = extlang
|
| 489 |
+
index = extindex
|
| 490 |
+
except ValueError:
|
| 491 |
+
pass
|
| 492 |
+
return lang
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
# -- Worker methods ------------------------------------------------
|
| 496 |
+
# (must be implemented by subclasses)
|
| 497 |
+
|
| 498 |
+
def preprocess(self, source, output_file=None, macros=None,
|
| 499 |
+
include_dirs=None, extra_preargs=None, extra_postargs=None):
|
| 500 |
+
"""Preprocess a single C/C++ source file, named in 'source'.
|
| 501 |
+
Output will be written to file named 'output_file', or stdout if
|
| 502 |
+
'output_file' not supplied. 'macros' is a list of macro
|
| 503 |
+
definitions as for 'compile()', which will augment the macros set
|
| 504 |
+
with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
|
| 505 |
+
list of directory names that will be added to the default list.
|
| 506 |
+
|
| 507 |
+
Raises PreprocessError on failure.
|
| 508 |
+
"""
|
| 509 |
+
pass
|
| 510 |
+
|
| 511 |
+
def compile(self, sources, output_dir=None, macros=None,
|
| 512 |
+
include_dirs=None, debug=0, extra_preargs=None,
|
| 513 |
+
extra_postargs=None, depends=None):
|
| 514 |
+
"""Compile one or more source files.
|
| 515 |
+
|
| 516 |
+
'sources' must be a list of filenames, most likely C/C++
|
| 517 |
+
files, but in reality anything that can be handled by a
|
| 518 |
+
particular compiler and compiler class (eg. MSVCCompiler can
|
| 519 |
+
handle resource files in 'sources'). Return a list of object
|
| 520 |
+
filenames, one per source filename in 'sources'. Depending on
|
| 521 |
+
the implementation, not all source files will necessarily be
|
| 522 |
+
compiled, but all corresponding object filenames will be
|
| 523 |
+
returned.
|
| 524 |
+
|
| 525 |
+
If 'output_dir' is given, object files will be put under it, while
|
| 526 |
+
retaining their original path component. That is, "foo/bar.c"
|
| 527 |
+
normally compiles to "foo/bar.o" (for a Unix implementation); if
|
| 528 |
+
'output_dir' is "build", then it would compile to
|
| 529 |
+
"build/foo/bar.o".
|
| 530 |
+
|
| 531 |
+
'macros', if given, must be a list of macro definitions. A macro
|
| 532 |
+
definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
|
| 533 |
+
The former defines a macro; if the value is None, the macro is
|
| 534 |
+
defined without an explicit value. The 1-tuple case undefines a
|
| 535 |
+
macro. Later definitions/redefinitions/ undefinitions take
|
| 536 |
+
precedence.
|
| 537 |
+
|
| 538 |
+
'include_dirs', if given, must be a list of strings, the
|
| 539 |
+
directories to add to the default include file search path for this
|
| 540 |
+
compilation only.
|
| 541 |
+
|
| 542 |
+
'debug' is a boolean; if true, the compiler will be instructed to
|
| 543 |
+
output debug symbols in (or alongside) the object file(s).
|
| 544 |
+
|
| 545 |
+
'extra_preargs' and 'extra_postargs' are implementation- dependent.
|
| 546 |
+
On platforms that have the notion of a command-line (e.g. Unix,
|
| 547 |
+
DOS/Windows), they are most likely lists of strings: extra
|
| 548 |
+
command-line arguments to prepend/append to the compiler command
|
| 549 |
+
line. On other platforms, consult the implementation class
|
| 550 |
+
documentation. In any event, they are intended as an escape hatch
|
| 551 |
+
for those occasions when the abstract compiler framework doesn't
|
| 552 |
+
cut the mustard.
|
| 553 |
+
|
| 554 |
+
'depends', if given, is a list of filenames that all targets
|
| 555 |
+
depend on. If a source file is older than any file in
|
| 556 |
+
depends, then the source file will be recompiled. This
|
| 557 |
+
supports dependency tracking, but only at a coarse
|
| 558 |
+
granularity.
|
| 559 |
+
|
| 560 |
+
Raises CompileError on failure.
|
| 561 |
+
"""
|
| 562 |
+
# A concrete compiler class can either override this method
|
| 563 |
+
# entirely or implement _compile().
|
| 564 |
+
macros, objects, extra_postargs, pp_opts, build = \
|
| 565 |
+
self._setup_compile(output_dir, macros, include_dirs, sources,
|
| 566 |
+
depends, extra_postargs)
|
| 567 |
+
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
|
| 568 |
+
|
| 569 |
+
for obj in objects:
|
| 570 |
+
try:
|
| 571 |
+
src, ext = build[obj]
|
| 572 |
+
except KeyError:
|
| 573 |
+
continue
|
| 574 |
+
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
|
| 575 |
+
|
| 576 |
+
# Return *all* object filenames, not just the ones we just built.
|
| 577 |
+
return objects
|
| 578 |
+
|
| 579 |
+
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
| 580 |
+
"""Compile 'src' to product 'obj'."""
|
| 581 |
+
# A concrete compiler class that does not override compile()
|
| 582 |
+
# should implement _compile().
|
| 583 |
+
pass
|
| 584 |
+
|
| 585 |
+
def create_static_lib(self, objects, output_libname, output_dir=None,
|
| 586 |
+
debug=0, target_lang=None):
|
| 587 |
+
"""Link a bunch of stuff together to create a static library file.
|
| 588 |
+
The "bunch of stuff" consists of the list of object files supplied
|
| 589 |
+
as 'objects', the extra object files supplied to
|
| 590 |
+
'add_link_object()' and/or 'set_link_objects()', the libraries
|
| 591 |
+
supplied to 'add_library()' and/or 'set_libraries()', and the
|
| 592 |
+
libraries supplied as 'libraries' (if any).
|
| 593 |
+
|
| 594 |
+
'output_libname' should be a library name, not a filename; the
|
| 595 |
+
filename will be inferred from the library name. 'output_dir' is
|
| 596 |
+
the directory where the library file will be put.
|
| 597 |
+
|
| 598 |
+
'debug' is a boolean; if true, debugging information will be
|
| 599 |
+
included in the library (note that on most platforms, it is the
|
| 600 |
+
compile step where this matters: the 'debug' flag is included here
|
| 601 |
+
just for consistency).
|
| 602 |
+
|
| 603 |
+
'target_lang' is the target language for which the given objects
|
| 604 |
+
are being compiled. This allows specific linkage time treatment of
|
| 605 |
+
certain languages.
|
| 606 |
+
|
| 607 |
+
Raises LibError on failure.
|
| 608 |
+
"""
|
| 609 |
+
pass
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
# values for target_desc parameter in link()
|
| 613 |
+
SHARED_OBJECT = "shared_object"
|
| 614 |
+
SHARED_LIBRARY = "shared_library"
|
| 615 |
+
EXECUTABLE = "executable"
|
| 616 |
+
|
| 617 |
+
def link(self,
|
| 618 |
+
target_desc,
|
| 619 |
+
objects,
|
| 620 |
+
output_filename,
|
| 621 |
+
output_dir=None,
|
| 622 |
+
libraries=None,
|
| 623 |
+
library_dirs=None,
|
| 624 |
+
runtime_library_dirs=None,
|
| 625 |
+
export_symbols=None,
|
| 626 |
+
debug=0,
|
| 627 |
+
extra_preargs=None,
|
| 628 |
+
extra_postargs=None,
|
| 629 |
+
build_temp=None,
|
| 630 |
+
target_lang=None):
|
| 631 |
+
"""Link a bunch of stuff together to create an executable or
|
| 632 |
+
shared library file.
|
| 633 |
+
|
| 634 |
+
The "bunch of stuff" consists of the list of object files supplied
|
| 635 |
+
as 'objects'. 'output_filename' should be a filename. If
|
| 636 |
+
'output_dir' is supplied, 'output_filename' is relative to it
|
| 637 |
+
(i.e. 'output_filename' can provide directory components if
|
| 638 |
+
needed).
|
| 639 |
+
|
| 640 |
+
'libraries' is a list of libraries to link against. These are
|
| 641 |
+
library names, not filenames, since they're translated into
|
| 642 |
+
filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
|
| 643 |
+
on Unix and "foo.lib" on DOS/Windows). However, they can include a
|
| 644 |
+
directory component, which means the linker will look in that
|
| 645 |
+
specific directory rather than searching all the normal locations.
|
| 646 |
+
|
| 647 |
+
'library_dirs', if supplied, should be a list of directories to
|
| 648 |
+
search for libraries that were specified as bare library names
|
| 649 |
+
(ie. no directory component). These are on top of the system
|
| 650 |
+
default and those supplied to 'add_library_dir()' and/or
|
| 651 |
+
'set_library_dirs()'. 'runtime_library_dirs' is a list of
|
| 652 |
+
directories that will be embedded into the shared library and used
|
| 653 |
+
to search for other shared libraries that *it* depends on at
|
| 654 |
+
run-time. (This may only be relevant on Unix.)
|
| 655 |
+
|
| 656 |
+
'export_symbols' is a list of symbols that the shared library will
|
| 657 |
+
export. (This appears to be relevant only on Windows.)
|
| 658 |
+
|
| 659 |
+
'debug' is as for 'compile()' and 'create_static_lib()', with the
|
| 660 |
+
slight distinction that it actually matters on most platforms (as
|
| 661 |
+
opposed to 'create_static_lib()', which includes a 'debug' flag
|
| 662 |
+
mostly for form's sake).
|
| 663 |
+
|
| 664 |
+
'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
|
| 665 |
+
of course that they supply command-line arguments for the
|
| 666 |
+
particular linker being used).
|
| 667 |
+
|
| 668 |
+
'target_lang' is the target language for which the given objects
|
| 669 |
+
are being compiled. This allows specific linkage time treatment of
|
| 670 |
+
certain languages.
|
| 671 |
+
|
| 672 |
+
Raises LinkError on failure.
|
| 673 |
+
"""
|
| 674 |
+
raise NotImplementedError
|
| 675 |
+
|
| 676 |
+
|
| 677 |
+
# Old 'link_*()' methods, rewritten to use the new 'link()' method.
|
| 678 |
+
|
| 679 |
+
def link_shared_lib(self,
|
| 680 |
+
objects,
|
| 681 |
+
output_libname,
|
| 682 |
+
output_dir=None,
|
| 683 |
+
libraries=None,
|
| 684 |
+
library_dirs=None,
|
| 685 |
+
runtime_library_dirs=None,
|
| 686 |
+
export_symbols=None,
|
| 687 |
+
debug=0,
|
| 688 |
+
extra_preargs=None,
|
| 689 |
+
extra_postargs=None,
|
| 690 |
+
build_temp=None,
|
| 691 |
+
target_lang=None):
|
| 692 |
+
self.link(CCompiler.SHARED_LIBRARY, objects,
|
| 693 |
+
self.library_filename(output_libname, lib_type='shared'),
|
| 694 |
+
output_dir,
|
| 695 |
+
libraries, library_dirs, runtime_library_dirs,
|
| 696 |
+
export_symbols, debug,
|
| 697 |
+
extra_preargs, extra_postargs, build_temp, target_lang)
|
| 698 |
+
|
| 699 |
+
|
| 700 |
+
def link_shared_object(self,
|
| 701 |
+
objects,
|
| 702 |
+
output_filename,
|
| 703 |
+
output_dir=None,
|
| 704 |
+
libraries=None,
|
| 705 |
+
library_dirs=None,
|
| 706 |
+
runtime_library_dirs=None,
|
| 707 |
+
export_symbols=None,
|
| 708 |
+
debug=0,
|
| 709 |
+
extra_preargs=None,
|
| 710 |
+
extra_postargs=None,
|
| 711 |
+
build_temp=None,
|
| 712 |
+
target_lang=None):
|
| 713 |
+
self.link(CCompiler.SHARED_OBJECT, objects,
|
| 714 |
+
output_filename, output_dir,
|
| 715 |
+
libraries, library_dirs, runtime_library_dirs,
|
| 716 |
+
export_symbols, debug,
|
| 717 |
+
extra_preargs, extra_postargs, build_temp, target_lang)
|
| 718 |
+
|
| 719 |
+
|
| 720 |
+
def link_executable(self,
|
| 721 |
+
objects,
|
| 722 |
+
output_progname,
|
| 723 |
+
output_dir=None,
|
| 724 |
+
libraries=None,
|
| 725 |
+
library_dirs=None,
|
| 726 |
+
runtime_library_dirs=None,
|
| 727 |
+
debug=0,
|
| 728 |
+
extra_preargs=None,
|
| 729 |
+
extra_postargs=None,
|
| 730 |
+
target_lang=None):
|
| 731 |
+
self.link(CCompiler.EXECUTABLE, objects,
|
| 732 |
+
self.executable_filename(output_progname), output_dir,
|
| 733 |
+
libraries, library_dirs, runtime_library_dirs, None,
|
| 734 |
+
debug, extra_preargs, extra_postargs, None, target_lang)
|
| 735 |
+
|
| 736 |
+
|
| 737 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 738 |
+
# These are all used by the 'gen_lib_options() function; there is
|
| 739 |
+
# no appropriate default implementation so subclasses should
|
| 740 |
+
# implement all of these.
|
| 741 |
+
|
| 742 |
+
def library_dir_option(self, dir):
|
| 743 |
+
"""Return the compiler option to add 'dir' to the list of
|
| 744 |
+
directories searched for libraries.
|
| 745 |
+
"""
|
| 746 |
+
raise NotImplementedError
|
| 747 |
+
|
| 748 |
+
def runtime_library_dir_option(self, dir):
|
| 749 |
+
"""Return the compiler option to add 'dir' to the list of
|
| 750 |
+
directories searched for runtime libraries.
|
| 751 |
+
"""
|
| 752 |
+
raise NotImplementedError
|
| 753 |
+
|
| 754 |
+
def library_option(self, lib):
|
| 755 |
+
"""Return the compiler option to add 'lib' to the list of libraries
|
| 756 |
+
linked into the shared library or executable.
|
| 757 |
+
"""
|
| 758 |
+
raise NotImplementedError
|
| 759 |
+
|
| 760 |
+
def has_function(self, funcname, includes=None, include_dirs=None,
|
| 761 |
+
libraries=None, library_dirs=None):
|
| 762 |
+
"""Return a boolean indicating whether funcname is supported on
|
| 763 |
+
the current platform. The optional arguments can be used to
|
| 764 |
+
augment the compilation environment.
|
| 765 |
+
"""
|
| 766 |
+
# this can't be included at module scope because it tries to
|
| 767 |
+
# import math which might not be available at that point - maybe
|
| 768 |
+
# the necessary logic should just be inlined?
|
| 769 |
+
import tempfile
|
| 770 |
+
if includes is None:
|
| 771 |
+
includes = []
|
| 772 |
+
if include_dirs is None:
|
| 773 |
+
include_dirs = []
|
| 774 |
+
if libraries is None:
|
| 775 |
+
libraries = []
|
| 776 |
+
if library_dirs is None:
|
| 777 |
+
library_dirs = []
|
| 778 |
+
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
|
| 779 |
+
f = os.fdopen(fd, "w")
|
| 780 |
+
try:
|
| 781 |
+
for incl in includes:
|
| 782 |
+
f.write("""#include "%s"\n""" % incl)
|
| 783 |
+
f.write("""\
|
| 784 |
+
int main (int argc, char **argv) {
|
| 785 |
+
%s();
|
| 786 |
+
return 0;
|
| 787 |
+
}
|
| 788 |
+
""" % funcname)
|
| 789 |
+
finally:
|
| 790 |
+
f.close()
|
| 791 |
+
try:
|
| 792 |
+
objects = self.compile([fname], include_dirs=include_dirs)
|
| 793 |
+
except CompileError:
|
| 794 |
+
return False
|
| 795 |
+
|
| 796 |
+
try:
|
| 797 |
+
self.link_executable(objects, "a.out",
|
| 798 |
+
libraries=libraries,
|
| 799 |
+
library_dirs=library_dirs)
|
| 800 |
+
except (LinkError, TypeError):
|
| 801 |
+
return False
|
| 802 |
+
return True
|
| 803 |
+
|
| 804 |
+
def find_library_file (self, dirs, lib, debug=0):
|
| 805 |
+
"""Search the specified list of directories for a static or shared
|
| 806 |
+
library file 'lib' and return the full path to that file. If
|
| 807 |
+
'debug' true, look for a debugging version (if that makes sense on
|
| 808 |
+
the current platform). Return None if 'lib' wasn't found in any of
|
| 809 |
+
the specified directories.
|
| 810 |
+
"""
|
| 811 |
+
raise NotImplementedError
|
| 812 |
+
|
| 813 |
+
# -- Filename generation methods -----------------------------------
|
| 814 |
+
|
| 815 |
+
# The default implementation of the filename generating methods are
|
| 816 |
+
# prejudiced towards the Unix/DOS/Windows view of the world:
|
| 817 |
+
# * object files are named by replacing the source file extension
|
| 818 |
+
# (eg. .c/.cpp -> .o/.obj)
|
| 819 |
+
# * library files (shared or static) are named by plugging the
|
| 820 |
+
# library name and extension into a format string, eg.
|
| 821 |
+
# "lib%s.%s" % (lib_name, ".a") for Unix static libraries
|
| 822 |
+
# * executables are named by appending an extension (possibly
|
| 823 |
+
# empty) to the program name: eg. progname + ".exe" for
|
| 824 |
+
# Windows
|
| 825 |
+
#
|
| 826 |
+
# To reduce redundant code, these methods expect to find
|
| 827 |
+
# several attributes in the current object (presumably defined
|
| 828 |
+
# as class attributes):
|
| 829 |
+
# * src_extensions -
|
| 830 |
+
# list of C/C++ source file extensions, eg. ['.c', '.cpp']
|
| 831 |
+
# * obj_extension -
|
| 832 |
+
# object file extension, eg. '.o' or '.obj'
|
| 833 |
+
# * static_lib_extension -
|
| 834 |
+
# extension for static library files, eg. '.a' or '.lib'
|
| 835 |
+
# * shared_lib_extension -
|
| 836 |
+
# extension for shared library/object files, eg. '.so', '.dll'
|
| 837 |
+
# * static_lib_format -
|
| 838 |
+
# format string for generating static library filenames,
|
| 839 |
+
# eg. 'lib%s.%s' or '%s.%s'
|
| 840 |
+
# * shared_lib_format
|
| 841 |
+
# format string for generating shared library filenames
|
| 842 |
+
# (probably same as static_lib_format, since the extension
|
| 843 |
+
# is one of the intended parameters to the format string)
|
| 844 |
+
# * exe_extension -
|
| 845 |
+
# extension for executable files, eg. '' or '.exe'
|
| 846 |
+
|
| 847 |
+
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
| 848 |
+
if output_dir is None:
|
| 849 |
+
output_dir = ''
|
| 850 |
+
obj_names = []
|
| 851 |
+
for src_name in source_filenames:
|
| 852 |
+
base, ext = os.path.splitext(src_name)
|
| 853 |
+
base = os.path.splitdrive(base)[1] # Chop off the drive
|
| 854 |
+
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
| 855 |
+
if ext not in self.src_extensions:
|
| 856 |
+
raise UnknownFileError(
|
| 857 |
+
"unknown file type '%s' (from '%s')" % (ext, src_name))
|
| 858 |
+
if strip_dir:
|
| 859 |
+
base = os.path.basename(base)
|
| 860 |
+
obj_names.append(os.path.join(output_dir,
|
| 861 |
+
base + self.obj_extension))
|
| 862 |
+
return obj_names
|
| 863 |
+
|
| 864 |
+
def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
|
| 865 |
+
assert output_dir is not None
|
| 866 |
+
if strip_dir:
|
| 867 |
+
basename = os.path.basename(basename)
|
| 868 |
+
return os.path.join(output_dir, basename + self.shared_lib_extension)
|
| 869 |
+
|
| 870 |
+
def executable_filename(self, basename, strip_dir=0, output_dir=''):
|
| 871 |
+
assert output_dir is not None
|
| 872 |
+
if strip_dir:
|
| 873 |
+
basename = os.path.basename(basename)
|
| 874 |
+
return os.path.join(output_dir, basename + (self.exe_extension or ''))
|
| 875 |
+
|
| 876 |
+
def library_filename(self, libname, lib_type='static', # or 'shared'
|
| 877 |
+
strip_dir=0, output_dir=''):
|
| 878 |
+
assert output_dir is not None
|
| 879 |
+
if lib_type not in ("static", "shared", "dylib", "xcode_stub"):
|
| 880 |
+
raise ValueError(
|
| 881 |
+
"'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"")
|
| 882 |
+
fmt = getattr(self, lib_type + "_lib_format")
|
| 883 |
+
ext = getattr(self, lib_type + "_lib_extension")
|
| 884 |
+
|
| 885 |
+
dir, base = os.path.split(libname)
|
| 886 |
+
filename = fmt % (base, ext)
|
| 887 |
+
if strip_dir:
|
| 888 |
+
dir = ''
|
| 889 |
+
|
| 890 |
+
return os.path.join(output_dir, dir, filename)
|
| 891 |
+
|
| 892 |
+
|
| 893 |
+
# -- Utility methods -----------------------------------------------
|
| 894 |
+
|
| 895 |
+
def announce(self, msg, level=1):
|
| 896 |
+
log.debug(msg)
|
| 897 |
+
|
| 898 |
+
def debug_print(self, msg):
|
| 899 |
+
from distutils.debug import DEBUG
|
| 900 |
+
if DEBUG:
|
| 901 |
+
print(msg)
|
| 902 |
+
|
| 903 |
+
def warn(self, msg):
|
| 904 |
+
sys.stderr.write("warning: %s\n" % msg)
|
| 905 |
+
|
| 906 |
+
def execute(self, func, args, msg=None, level=1):
|
| 907 |
+
execute(func, args, msg, self.dry_run)
|
| 908 |
+
|
| 909 |
+
def spawn(self, cmd):
|
| 910 |
+
spawn(cmd, dry_run=self.dry_run)
|
| 911 |
+
|
| 912 |
+
def move_file(self, src, dst):
|
| 913 |
+
return move_file(src, dst, dry_run=self.dry_run)
|
| 914 |
+
|
| 915 |
+
def mkpath (self, name, mode=0o777):
|
| 916 |
+
mkpath(name, mode, dry_run=self.dry_run)
|
| 917 |
+
|
| 918 |
+
|
| 919 |
+
# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
|
| 920 |
+
# type for that platform. Keys are interpreted as re match
|
| 921 |
+
# patterns. Order is important; platform mappings are preferred over
|
| 922 |
+
# OS names.
|
| 923 |
+
_default_compilers = (
|
| 924 |
+
|
| 925 |
+
# Platform string mappings
|
| 926 |
+
|
| 927 |
+
# on a cygwin built python we can use gcc like an ordinary UNIXish
|
| 928 |
+
# compiler
|
| 929 |
+
('cygwin.*', 'unix'),
|
| 930 |
+
|
| 931 |
+
# OS name mappings
|
| 932 |
+
('posix', 'unix'),
|
| 933 |
+
('nt', 'msvc'),
|
| 934 |
+
|
| 935 |
+
)
|
| 936 |
+
|
| 937 |
+
def get_default_compiler(osname=None, platform=None):
|
| 938 |
+
"""Determine the default compiler to use for the given platform.
|
| 939 |
+
|
| 940 |
+
osname should be one of the standard Python OS names (i.e. the
|
| 941 |
+
ones returned by os.name) and platform the common value
|
| 942 |
+
returned by sys.platform for the platform in question.
|
| 943 |
+
|
| 944 |
+
The default values are os.name and sys.platform in case the
|
| 945 |
+
parameters are not given.
|
| 946 |
+
"""
|
| 947 |
+
if osname is None:
|
| 948 |
+
osname = os.name
|
| 949 |
+
if platform is None:
|
| 950 |
+
platform = sys.platform
|
| 951 |
+
for pattern, compiler in _default_compilers:
|
| 952 |
+
if re.match(pattern, platform) is not None or \
|
| 953 |
+
re.match(pattern, osname) is not None:
|
| 954 |
+
return compiler
|
| 955 |
+
# Default to Unix compiler
|
| 956 |
+
return 'unix'
|
| 957 |
+
|
| 958 |
+
# Map compiler types to (module_name, class_name) pairs -- ie. where to
|
| 959 |
+
# find the code that implements an interface to this compiler. (The module
|
| 960 |
+
# is assumed to be in the 'distutils' package.)
|
| 961 |
+
compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
|
| 962 |
+
"standard UNIX-style compiler"),
|
| 963 |
+
'msvc': ('_msvccompiler', 'MSVCCompiler',
|
| 964 |
+
"Microsoft Visual C++"),
|
| 965 |
+
'cygwin': ('cygwinccompiler', 'CygwinCCompiler',
|
| 966 |
+
"Cygwin port of GNU C Compiler for Win32"),
|
| 967 |
+
'mingw32': ('cygwinccompiler', 'Mingw32CCompiler',
|
| 968 |
+
"Mingw32 port of GNU C Compiler for Win32"),
|
| 969 |
+
'bcpp': ('bcppcompiler', 'BCPPCompiler',
|
| 970 |
+
"Borland C++ Compiler"),
|
| 971 |
+
}
|
| 972 |
+
|
| 973 |
+
def show_compilers():
|
| 974 |
+
"""Print list of available compilers (used by the "--help-compiler"
|
| 975 |
+
options to "build", "build_ext", "build_clib").
|
| 976 |
+
"""
|
| 977 |
+
# XXX this "knows" that the compiler option it's describing is
|
| 978 |
+
# "--compiler", which just happens to be the case for the three
|
| 979 |
+
# commands that use it.
|
| 980 |
+
from distutils.fancy_getopt import FancyGetopt
|
| 981 |
+
compilers = []
|
| 982 |
+
for compiler in compiler_class.keys():
|
| 983 |
+
compilers.append(("compiler="+compiler, None,
|
| 984 |
+
compiler_class[compiler][2]))
|
| 985 |
+
compilers.sort()
|
| 986 |
+
pretty_printer = FancyGetopt(compilers)
|
| 987 |
+
pretty_printer.print_help("List of available compilers:")
|
| 988 |
+
|
| 989 |
+
|
| 990 |
+
def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
|
| 991 |
+
"""Generate an instance of some CCompiler subclass for the supplied
|
| 992 |
+
platform/compiler combination. 'plat' defaults to 'os.name'
|
| 993 |
+
(eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
|
| 994 |
+
for that platform. Currently only 'posix' and 'nt' are supported, and
|
| 995 |
+
the default compilers are "traditional Unix interface" (UnixCCompiler
|
| 996 |
+
class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
|
| 997 |
+
possible to ask for a Unix compiler object under Windows, and a
|
| 998 |
+
Microsoft compiler object under Unix -- if you supply a value for
|
| 999 |
+
'compiler', 'plat' is ignored.
|
| 1000 |
+
"""
|
| 1001 |
+
if plat is None:
|
| 1002 |
+
plat = os.name
|
| 1003 |
+
|
| 1004 |
+
try:
|
| 1005 |
+
if compiler is None:
|
| 1006 |
+
compiler = get_default_compiler(plat)
|
| 1007 |
+
|
| 1008 |
+
(module_name, class_name, long_description) = compiler_class[compiler]
|
| 1009 |
+
except KeyError:
|
| 1010 |
+
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
|
| 1011 |
+
if compiler is not None:
|
| 1012 |
+
msg = msg + " with '%s' compiler" % compiler
|
| 1013 |
+
raise DistutilsPlatformError(msg)
|
| 1014 |
+
|
| 1015 |
+
try:
|
| 1016 |
+
module_name = "distutils." + module_name
|
| 1017 |
+
__import__ (module_name)
|
| 1018 |
+
module = sys.modules[module_name]
|
| 1019 |
+
klass = vars(module)[class_name]
|
| 1020 |
+
except ImportError:
|
| 1021 |
+
raise DistutilsModuleError(
|
| 1022 |
+
"can't compile C/C++ code: unable to load module '%s'" % \
|
| 1023 |
+
module_name)
|
| 1024 |
+
except KeyError:
|
| 1025 |
+
raise DistutilsModuleError(
|
| 1026 |
+
"can't compile C/C++ code: unable to find class '%s' "
|
| 1027 |
+
"in module '%s'" % (class_name, module_name))
|
| 1028 |
+
|
| 1029 |
+
# XXX The None is necessary to preserve backwards compatibility
|
| 1030 |
+
# with classes that expect verbose to be the first positional
|
| 1031 |
+
# argument.
|
| 1032 |
+
return klass(None, dry_run, force)
|
| 1033 |
+
|
| 1034 |
+
|
| 1035 |
+
def gen_preprocess_options(macros, include_dirs):
|
| 1036 |
+
"""Generate C pre-processor options (-D, -U, -I) as used by at least
|
| 1037 |
+
two types of compilers: the typical Unix compiler and Visual C++.
|
| 1038 |
+
'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
|
| 1039 |
+
means undefine (-U) macro 'name', and (name,value) means define (-D)
|
| 1040 |
+
macro 'name' to 'value'. 'include_dirs' is just a list of directory
|
| 1041 |
+
names to be added to the header file search path (-I). Returns a list
|
| 1042 |
+
of command-line options suitable for either Unix compilers or Visual
|
| 1043 |
+
C++.
|
| 1044 |
+
"""
|
| 1045 |
+
# XXX it would be nice (mainly aesthetic, and so we don't generate
|
| 1046 |
+
# stupid-looking command lines) to go over 'macros' and eliminate
|
| 1047 |
+
# redundant definitions/undefinitions (ie. ensure that only the
|
| 1048 |
+
# latest mention of a particular macro winds up on the command
|
| 1049 |
+
# line). I don't think it's essential, though, since most (all?)
|
| 1050 |
+
# Unix C compilers only pay attention to the latest -D or -U
|
| 1051 |
+
# mention of a macro on their command line. Similar situation for
|
| 1052 |
+
# 'include_dirs'. I'm punting on both for now. Anyways, weeding out
|
| 1053 |
+
# redundancies like this should probably be the province of
|
| 1054 |
+
# CCompiler, since the data structures used are inherited from it
|
| 1055 |
+
# and therefore common to all CCompiler classes.
|
| 1056 |
+
pp_opts = []
|
| 1057 |
+
for macro in macros:
|
| 1058 |
+
if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
|
| 1059 |
+
raise TypeError(
|
| 1060 |
+
"bad macro definition '%s': "
|
| 1061 |
+
"each element of 'macros' list must be a 1- or 2-tuple"
|
| 1062 |
+
% macro)
|
| 1063 |
+
|
| 1064 |
+
if len(macro) == 1: # undefine this macro
|
| 1065 |
+
pp_opts.append("-U%s" % macro[0])
|
| 1066 |
+
elif len(macro) == 2:
|
| 1067 |
+
if macro[1] is None: # define with no explicit value
|
| 1068 |
+
pp_opts.append("-D%s" % macro[0])
|
| 1069 |
+
else:
|
| 1070 |
+
# XXX *don't* need to be clever about quoting the
|
| 1071 |
+
# macro value here, because we're going to avoid the
|
| 1072 |
+
# shell at all costs when we spawn the command!
|
| 1073 |
+
pp_opts.append("-D%s=%s" % macro)
|
| 1074 |
+
|
| 1075 |
+
for dir in include_dirs:
|
| 1076 |
+
pp_opts.append("-I%s" % dir)
|
| 1077 |
+
return pp_opts
|
| 1078 |
+
|
| 1079 |
+
|
| 1080 |
+
def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries):
|
| 1081 |
+
"""Generate linker options for searching library directories and
|
| 1082 |
+
linking with specific libraries. 'libraries' and 'library_dirs' are,
|
| 1083 |
+
respectively, lists of library names (not filenames!) and search
|
| 1084 |
+
directories. Returns a list of command-line options suitable for use
|
| 1085 |
+
with some compiler (depending on the two format strings passed in).
|
| 1086 |
+
"""
|
| 1087 |
+
lib_opts = []
|
| 1088 |
+
|
| 1089 |
+
for dir in library_dirs:
|
| 1090 |
+
lib_opts.append(compiler.library_dir_option(dir))
|
| 1091 |
+
|
| 1092 |
+
for dir in runtime_library_dirs:
|
| 1093 |
+
opt = compiler.runtime_library_dir_option(dir)
|
| 1094 |
+
if isinstance(opt, list):
|
| 1095 |
+
lib_opts = lib_opts + opt
|
| 1096 |
+
else:
|
| 1097 |
+
lib_opts.append(opt)
|
| 1098 |
+
|
| 1099 |
+
# XXX it's important that we *not* remove redundant library mentions!
|
| 1100 |
+
# sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
|
| 1101 |
+
# resolve all symbols. I just hope we never have to say "-lfoo obj.o
|
| 1102 |
+
# -lbar" to get things to work -- that's certainly a possibility, but a
|
| 1103 |
+
# pretty nasty way to arrange your C code.
|
| 1104 |
+
|
| 1105 |
+
for lib in libraries:
|
| 1106 |
+
(lib_dir, lib_name) = os.path.split(lib)
|
| 1107 |
+
if lib_dir:
|
| 1108 |
+
lib_file = compiler.find_library_file([lib_dir], lib_name)
|
| 1109 |
+
if lib_file:
|
| 1110 |
+
lib_opts.append(lib_file)
|
| 1111 |
+
else:
|
| 1112 |
+
compiler.warn("no library file corresponding to "
|
| 1113 |
+
"'%s' found (skipping)" % lib)
|
| 1114 |
+
else:
|
| 1115 |
+
lib_opts.append(compiler.library_option (lib))
|
| 1116 |
+
return lib_opts
|
llava/lib/python3.10/distutils/cmd.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.cmd
|
| 2 |
+
|
| 3 |
+
Provides the Command class, the base class for the command classes
|
| 4 |
+
in the distutils.command package.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import sys, os, re
|
| 8 |
+
from distutils.errors import DistutilsOptionError
|
| 9 |
+
from distutils import util, dir_util, file_util, archive_util, dep_util
|
| 10 |
+
from distutils import log
|
| 11 |
+
|
| 12 |
+
class Command:
|
| 13 |
+
"""Abstract base class for defining command classes, the "worker bees"
|
| 14 |
+
of the Distutils. A useful analogy for command classes is to think of
|
| 15 |
+
them as subroutines with local variables called "options". The options
|
| 16 |
+
are "declared" in 'initialize_options()' and "defined" (given their
|
| 17 |
+
final values, aka "finalized") in 'finalize_options()', both of which
|
| 18 |
+
must be defined by every command class. The distinction between the
|
| 19 |
+
two is necessary because option values might come from the outside
|
| 20 |
+
world (command line, config file, ...), and any options dependent on
|
| 21 |
+
other options must be computed *after* these outside influences have
|
| 22 |
+
been processed -- hence 'finalize_options()'. The "body" of the
|
| 23 |
+
subroutine, where it does all its work based on the values of its
|
| 24 |
+
options, is the 'run()' method, which must also be implemented by every
|
| 25 |
+
command class.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# 'sub_commands' formalizes the notion of a "family" of commands,
|
| 29 |
+
# eg. "install" as the parent with sub-commands "install_lib",
|
| 30 |
+
# "install_headers", etc. The parent of a family of commands
|
| 31 |
+
# defines 'sub_commands' as a class attribute; it's a list of
|
| 32 |
+
# (command_name : string, predicate : unbound_method | string | None)
|
| 33 |
+
# tuples, where 'predicate' is a method of the parent command that
|
| 34 |
+
# determines whether the corresponding command is applicable in the
|
| 35 |
+
# current situation. (Eg. we "install_headers" is only applicable if
|
| 36 |
+
# we have any C header files to install.) If 'predicate' is None,
|
| 37 |
+
# that command is always applicable.
|
| 38 |
+
#
|
| 39 |
+
# 'sub_commands' is usually defined at the *end* of a class, because
|
| 40 |
+
# predicates can be unbound methods, so they must already have been
|
| 41 |
+
# defined. The canonical example is the "install" command.
|
| 42 |
+
sub_commands = []
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
# -- Creation/initialization methods -------------------------------
|
| 46 |
+
|
| 47 |
+
def __init__(self, dist):
|
| 48 |
+
"""Create and initialize a new Command object. Most importantly,
|
| 49 |
+
invokes the 'initialize_options()' method, which is the real
|
| 50 |
+
initializer and depends on the actual command being
|
| 51 |
+
instantiated.
|
| 52 |
+
"""
|
| 53 |
+
# late import because of mutual dependence between these classes
|
| 54 |
+
from distutils.dist import Distribution
|
| 55 |
+
|
| 56 |
+
if not isinstance(dist, Distribution):
|
| 57 |
+
raise TypeError("dist must be a Distribution instance")
|
| 58 |
+
if self.__class__ is Command:
|
| 59 |
+
raise RuntimeError("Command is an abstract class")
|
| 60 |
+
|
| 61 |
+
self.distribution = dist
|
| 62 |
+
self.initialize_options()
|
| 63 |
+
|
| 64 |
+
# Per-command versions of the global flags, so that the user can
|
| 65 |
+
# customize Distutils' behaviour command-by-command and let some
|
| 66 |
+
# commands fall back on the Distribution's behaviour. None means
|
| 67 |
+
# "not defined, check self.distribution's copy", while 0 or 1 mean
|
| 68 |
+
# false and true (duh). Note that this means figuring out the real
|
| 69 |
+
# value of each flag is a touch complicated -- hence "self._dry_run"
|
| 70 |
+
# will be handled by __getattr__, below.
|
| 71 |
+
# XXX This needs to be fixed.
|
| 72 |
+
self._dry_run = None
|
| 73 |
+
|
| 74 |
+
# verbose is largely ignored, but needs to be set for
|
| 75 |
+
# backwards compatibility (I think)?
|
| 76 |
+
self.verbose = dist.verbose
|
| 77 |
+
|
| 78 |
+
# Some commands define a 'self.force' option to ignore file
|
| 79 |
+
# timestamps, but methods defined *here* assume that
|
| 80 |
+
# 'self.force' exists for all commands. So define it here
|
| 81 |
+
# just to be safe.
|
| 82 |
+
self.force = None
|
| 83 |
+
|
| 84 |
+
# The 'help' flag is just used for command-line parsing, so
|
| 85 |
+
# none of that complicated bureaucracy is needed.
|
| 86 |
+
self.help = 0
|
| 87 |
+
|
| 88 |
+
# 'finalized' records whether or not 'finalize_options()' has been
|
| 89 |
+
# called. 'finalize_options()' itself should not pay attention to
|
| 90 |
+
# this flag: it is the business of 'ensure_finalized()', which
|
| 91 |
+
# always calls 'finalize_options()', to respect/update it.
|
| 92 |
+
self.finalized = 0
|
| 93 |
+
|
| 94 |
+
# XXX A more explicit way to customize dry_run would be better.
|
| 95 |
+
def __getattr__(self, attr):
|
| 96 |
+
if attr == 'dry_run':
|
| 97 |
+
myval = getattr(self, "_" + attr)
|
| 98 |
+
if myval is None:
|
| 99 |
+
return getattr(self.distribution, attr)
|
| 100 |
+
else:
|
| 101 |
+
return myval
|
| 102 |
+
else:
|
| 103 |
+
raise AttributeError(attr)
|
| 104 |
+
|
| 105 |
+
def ensure_finalized(self):
|
| 106 |
+
if not self.finalized:
|
| 107 |
+
self.finalize_options()
|
| 108 |
+
self.finalized = 1
|
| 109 |
+
|
| 110 |
+
# Subclasses must define:
|
| 111 |
+
# initialize_options()
|
| 112 |
+
# provide default values for all options; may be customized by
|
| 113 |
+
# setup script, by options from config file(s), or by command-line
|
| 114 |
+
# options
|
| 115 |
+
# finalize_options()
|
| 116 |
+
# decide on the final values for all options; this is called
|
| 117 |
+
# after all possible intervention from the outside world
|
| 118 |
+
# (command-line, option file, etc.) has been processed
|
| 119 |
+
# run()
|
| 120 |
+
# run the command: do whatever it is we're here to do,
|
| 121 |
+
# controlled by the command's various option values
|
| 122 |
+
|
| 123 |
+
def initialize_options(self):
|
| 124 |
+
"""Set default values for all the options that this command
|
| 125 |
+
supports. Note that these defaults may be overridden by other
|
| 126 |
+
commands, by the setup script, by config files, or by the
|
| 127 |
+
command-line. Thus, this is not the place to code dependencies
|
| 128 |
+
between options; generally, 'initialize_options()' implementations
|
| 129 |
+
are just a bunch of "self.foo = None" assignments.
|
| 130 |
+
|
| 131 |
+
This method must be implemented by all command classes.
|
| 132 |
+
"""
|
| 133 |
+
raise RuntimeError("abstract method -- subclass %s must override"
|
| 134 |
+
% self.__class__)
|
| 135 |
+
|
| 136 |
+
def finalize_options(self):
|
| 137 |
+
"""Set final values for all the options that this command supports.
|
| 138 |
+
This is always called as late as possible, ie. after any option
|
| 139 |
+
assignments from the command-line or from other commands have been
|
| 140 |
+
done. Thus, this is the place to code option dependencies: if
|
| 141 |
+
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
|
| 142 |
+
long as 'foo' still has the same value it was assigned in
|
| 143 |
+
'initialize_options()'.
|
| 144 |
+
|
| 145 |
+
This method must be implemented by all command classes.
|
| 146 |
+
"""
|
| 147 |
+
raise RuntimeError("abstract method -- subclass %s must override"
|
| 148 |
+
% self.__class__)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def dump_options(self, header=None, indent=""):
|
| 152 |
+
from distutils.fancy_getopt import longopt_xlate
|
| 153 |
+
if header is None:
|
| 154 |
+
header = "command options for '%s':" % self.get_command_name()
|
| 155 |
+
self.announce(indent + header, level=log.INFO)
|
| 156 |
+
indent = indent + " "
|
| 157 |
+
for (option, _, _) in self.user_options:
|
| 158 |
+
option = option.translate(longopt_xlate)
|
| 159 |
+
if option[-1] == "=":
|
| 160 |
+
option = option[:-1]
|
| 161 |
+
value = getattr(self, option)
|
| 162 |
+
self.announce(indent + "%s = %s" % (option, value),
|
| 163 |
+
level=log.INFO)
|
| 164 |
+
|
| 165 |
+
def run(self):
|
| 166 |
+
"""A command's raison d'etre: carry out the action it exists to
|
| 167 |
+
perform, controlled by the options initialized in
|
| 168 |
+
'initialize_options()', customized by other commands, the setup
|
| 169 |
+
script, the command-line, and config files, and finalized in
|
| 170 |
+
'finalize_options()'. All terminal output and filesystem
|
| 171 |
+
interaction should be done by 'run()'.
|
| 172 |
+
|
| 173 |
+
This method must be implemented by all command classes.
|
| 174 |
+
"""
|
| 175 |
+
raise RuntimeError("abstract method -- subclass %s must override"
|
| 176 |
+
% self.__class__)
|
| 177 |
+
|
| 178 |
+
def announce(self, msg, level=1):
|
| 179 |
+
"""If the current verbosity level is of greater than or equal to
|
| 180 |
+
'level' print 'msg' to stdout.
|
| 181 |
+
"""
|
| 182 |
+
log.log(level, msg)
|
| 183 |
+
|
| 184 |
+
def debug_print(self, msg):
|
| 185 |
+
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
| 186 |
+
DISTUTILS_DEBUG environment variable) flag is true.
|
| 187 |
+
"""
|
| 188 |
+
from distutils.debug import DEBUG
|
| 189 |
+
if DEBUG:
|
| 190 |
+
print(msg)
|
| 191 |
+
sys.stdout.flush()
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
# -- Option validation methods -------------------------------------
|
| 195 |
+
# (these are very handy in writing the 'finalize_options()' method)
|
| 196 |
+
#
|
| 197 |
+
# NB. the general philosophy here is to ensure that a particular option
|
| 198 |
+
# value meets certain type and value constraints. If not, we try to
|
| 199 |
+
# force it into conformance (eg. if we expect a list but have a string,
|
| 200 |
+
# split the string on comma and/or whitespace). If we can't force the
|
| 201 |
+
# option into conformance, raise DistutilsOptionError. Thus, command
|
| 202 |
+
# classes need do nothing more than (eg.)
|
| 203 |
+
# self.ensure_string_list('foo')
|
| 204 |
+
# and they can be guaranteed that thereafter, self.foo will be
|
| 205 |
+
# a list of strings.
|
| 206 |
+
|
| 207 |
+
def _ensure_stringlike(self, option, what, default=None):
|
| 208 |
+
val = getattr(self, option)
|
| 209 |
+
if val is None:
|
| 210 |
+
setattr(self, option, default)
|
| 211 |
+
return default
|
| 212 |
+
elif not isinstance(val, str):
|
| 213 |
+
raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
|
| 214 |
+
% (option, what, val))
|
| 215 |
+
return val
|
| 216 |
+
|
| 217 |
+
def ensure_string(self, option, default=None):
|
| 218 |
+
"""Ensure that 'option' is a string; if not defined, set it to
|
| 219 |
+
'default'.
|
| 220 |
+
"""
|
| 221 |
+
self._ensure_stringlike(option, "string", default)
|
| 222 |
+
|
| 223 |
+
def ensure_string_list(self, option):
|
| 224 |
+
r"""Ensure that 'option' is a list of strings. If 'option' is
|
| 225 |
+
currently a string, we split it either on /,\s*/ or /\s+/, so
|
| 226 |
+
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
| 227 |
+
["foo", "bar", "baz"].
|
| 228 |
+
"""
|
| 229 |
+
val = getattr(self, option)
|
| 230 |
+
if val is None:
|
| 231 |
+
return
|
| 232 |
+
elif isinstance(val, str):
|
| 233 |
+
setattr(self, option, re.split(r',\s*|\s+', val))
|
| 234 |
+
else:
|
| 235 |
+
if isinstance(val, list):
|
| 236 |
+
ok = all(isinstance(v, str) for v in val)
|
| 237 |
+
else:
|
| 238 |
+
ok = False
|
| 239 |
+
if not ok:
|
| 240 |
+
raise DistutilsOptionError(
|
| 241 |
+
"'%s' must be a list of strings (got %r)"
|
| 242 |
+
% (option, val))
|
| 243 |
+
|
| 244 |
+
def _ensure_tested_string(self, option, tester, what, error_fmt,
|
| 245 |
+
default=None):
|
| 246 |
+
val = self._ensure_stringlike(option, what, default)
|
| 247 |
+
if val is not None and not tester(val):
|
| 248 |
+
raise DistutilsOptionError(("error in '%s' option: " + error_fmt)
|
| 249 |
+
% (option, val))
|
| 250 |
+
|
| 251 |
+
def ensure_filename(self, option):
|
| 252 |
+
"""Ensure that 'option' is the name of an existing file."""
|
| 253 |
+
self._ensure_tested_string(option, os.path.isfile,
|
| 254 |
+
"filename",
|
| 255 |
+
"'%s' does not exist or is not a file")
|
| 256 |
+
|
| 257 |
+
def ensure_dirname(self, option):
|
| 258 |
+
self._ensure_tested_string(option, os.path.isdir,
|
| 259 |
+
"directory name",
|
| 260 |
+
"'%s' does not exist or is not a directory")
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
# -- Convenience methods for commands ------------------------------
|
| 264 |
+
|
| 265 |
+
def get_command_name(self):
|
| 266 |
+
if hasattr(self, 'command_name'):
|
| 267 |
+
return self.command_name
|
| 268 |
+
else:
|
| 269 |
+
return self.__class__.__name__
|
| 270 |
+
|
| 271 |
+
def set_undefined_options(self, src_cmd, *option_pairs):
|
| 272 |
+
"""Set the values of any "undefined" options from corresponding
|
| 273 |
+
option values in some other command object. "Undefined" here means
|
| 274 |
+
"is None", which is the convention used to indicate that an option
|
| 275 |
+
has not been changed between 'initialize_options()' and
|
| 276 |
+
'finalize_options()'. Usually called from 'finalize_options()' for
|
| 277 |
+
options that depend on some other command rather than another
|
| 278 |
+
option of the same command. 'src_cmd' is the other command from
|
| 279 |
+
which option values will be taken (a command object will be created
|
| 280 |
+
for it if necessary); the remaining arguments are
|
| 281 |
+
'(src_option,dst_option)' tuples which mean "take the value of
|
| 282 |
+
'src_option' in the 'src_cmd' command object, and copy it to
|
| 283 |
+
'dst_option' in the current command object".
|
| 284 |
+
"""
|
| 285 |
+
# Option_pairs: list of (src_option, dst_option) tuples
|
| 286 |
+
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
|
| 287 |
+
src_cmd_obj.ensure_finalized()
|
| 288 |
+
for (src_option, dst_option) in option_pairs:
|
| 289 |
+
if getattr(self, dst_option) is None:
|
| 290 |
+
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
|
| 291 |
+
|
| 292 |
+
def get_finalized_command(self, command, create=1):
|
| 293 |
+
"""Wrapper around Distribution's 'get_command_obj()' method: find
|
| 294 |
+
(create if necessary and 'create' is true) the command object for
|
| 295 |
+
'command', call its 'ensure_finalized()' method, and return the
|
| 296 |
+
finalized command object.
|
| 297 |
+
"""
|
| 298 |
+
cmd_obj = self.distribution.get_command_obj(command, create)
|
| 299 |
+
cmd_obj.ensure_finalized()
|
| 300 |
+
return cmd_obj
|
| 301 |
+
|
| 302 |
+
# XXX rename to 'get_reinitialized_command()'? (should do the
|
| 303 |
+
# same in dist.py, if so)
|
| 304 |
+
def reinitialize_command(self, command, reinit_subcommands=0):
|
| 305 |
+
return self.distribution.reinitialize_command(command,
|
| 306 |
+
reinit_subcommands)
|
| 307 |
+
|
| 308 |
+
def run_command(self, command):
|
| 309 |
+
"""Run some other command: uses the 'run_command()' method of
|
| 310 |
+
Distribution, which creates and finalizes the command object if
|
| 311 |
+
necessary and then invokes its 'run()' method.
|
| 312 |
+
"""
|
| 313 |
+
self.distribution.run_command(command)
|
| 314 |
+
|
| 315 |
+
def get_sub_commands(self):
|
| 316 |
+
"""Determine the sub-commands that are relevant in the current
|
| 317 |
+
distribution (ie., that need to be run). This is based on the
|
| 318 |
+
'sub_commands' class attribute: each tuple in that list may include
|
| 319 |
+
a method that we call to determine if the subcommand needs to be
|
| 320 |
+
run for the current distribution. Return a list of command names.
|
| 321 |
+
"""
|
| 322 |
+
commands = []
|
| 323 |
+
for (cmd_name, method) in self.sub_commands:
|
| 324 |
+
if method is None or method(self):
|
| 325 |
+
commands.append(cmd_name)
|
| 326 |
+
return commands
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
# -- External world manipulation -----------------------------------
|
| 330 |
+
|
| 331 |
+
def warn(self, msg):
|
| 332 |
+
log.warn("warning: %s: %s\n", self.get_command_name(), msg)
|
| 333 |
+
|
| 334 |
+
def execute(self, func, args, msg=None, level=1):
|
| 335 |
+
util.execute(func, args, msg, dry_run=self.dry_run)
|
| 336 |
+
|
| 337 |
+
def mkpath(self, name, mode=0o777):
|
| 338 |
+
dir_util.mkpath(name, mode, dry_run=self.dry_run)
|
| 339 |
+
|
| 340 |
+
def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
|
| 341 |
+
link=None, level=1):
|
| 342 |
+
"""Copy a file respecting verbose, dry-run and force flags. (The
|
| 343 |
+
former two default to whatever is in the Distribution object, and
|
| 344 |
+
the latter defaults to false for commands that don't define it.)"""
|
| 345 |
+
return file_util.copy_file(infile, outfile, preserve_mode,
|
| 346 |
+
preserve_times, not self.force, link,
|
| 347 |
+
dry_run=self.dry_run)
|
| 348 |
+
|
| 349 |
+
def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1,
|
| 350 |
+
preserve_symlinks=0, level=1):
|
| 351 |
+
"""Copy an entire directory tree respecting verbose, dry-run,
|
| 352 |
+
and force flags.
|
| 353 |
+
"""
|
| 354 |
+
return dir_util.copy_tree(infile, outfile, preserve_mode,
|
| 355 |
+
preserve_times, preserve_symlinks,
|
| 356 |
+
not self.force, dry_run=self.dry_run)
|
| 357 |
+
|
| 358 |
+
def move_file (self, src, dst, level=1):
|
| 359 |
+
"""Move a file respecting dry-run flag."""
|
| 360 |
+
return file_util.move_file(src, dst, dry_run=self.dry_run)
|
| 361 |
+
|
| 362 |
+
def spawn(self, cmd, search_path=1, level=1):
|
| 363 |
+
"""Spawn an external command respecting dry-run flag."""
|
| 364 |
+
from distutils.spawn import spawn
|
| 365 |
+
spawn(cmd, search_path, dry_run=self.dry_run)
|
| 366 |
+
|
| 367 |
+
def make_archive(self, base_name, format, root_dir=None, base_dir=None,
|
| 368 |
+
owner=None, group=None):
|
| 369 |
+
return archive_util.make_archive(base_name, format, root_dir, base_dir,
|
| 370 |
+
dry_run=self.dry_run,
|
| 371 |
+
owner=owner, group=group)
|
| 372 |
+
|
| 373 |
+
def make_file(self, infiles, outfile, func, args,
|
| 374 |
+
exec_msg=None, skip_msg=None, level=1):
|
| 375 |
+
"""Special case of 'execute()' for operations that process one or
|
| 376 |
+
more input files and generate one output file. Works just like
|
| 377 |
+
'execute()', except the operation is skipped and a different
|
| 378 |
+
message printed if 'outfile' already exists and is newer than all
|
| 379 |
+
files listed in 'infiles'. If the command defined 'self.force',
|
| 380 |
+
and it is true, then the command is unconditionally run -- does no
|
| 381 |
+
timestamp checks.
|
| 382 |
+
"""
|
| 383 |
+
if skip_msg is None:
|
| 384 |
+
skip_msg = "skipping %s (inputs unchanged)" % outfile
|
| 385 |
+
|
| 386 |
+
# Allow 'infiles' to be a single string
|
| 387 |
+
if isinstance(infiles, str):
|
| 388 |
+
infiles = (infiles,)
|
| 389 |
+
elif not isinstance(infiles, (list, tuple)):
|
| 390 |
+
raise TypeError(
|
| 391 |
+
"'infiles' must be a string, or a list or tuple of strings")
|
| 392 |
+
|
| 393 |
+
if exec_msg is None:
|
| 394 |
+
exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
|
| 395 |
+
|
| 396 |
+
# If 'outfile' must be regenerated (either because it doesn't
|
| 397 |
+
# exist, is out-of-date, or the 'force' flag is true) then
|
| 398 |
+
# perform the action that presumably regenerates it
|
| 399 |
+
if self.force or dep_util.newer_group(infiles, outfile):
|
| 400 |
+
self.execute(func, args, exec_msg, level)
|
| 401 |
+
# Otherwise, print the "skip" message
|
| 402 |
+
else:
|
| 403 |
+
log.debug(skip_msg)
|
llava/lib/python3.10/distutils/config.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.pypirc
|
| 2 |
+
|
| 3 |
+
Provides the PyPIRCCommand class, the base class for the command classes
|
| 4 |
+
that uses .pypirc in the distutils.command package.
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
from configparser import RawConfigParser
|
| 8 |
+
|
| 9 |
+
from distutils.cmd import Command
|
| 10 |
+
|
| 11 |
+
DEFAULT_PYPIRC = """\
|
| 12 |
+
[distutils]
|
| 13 |
+
index-servers =
|
| 14 |
+
pypi
|
| 15 |
+
|
| 16 |
+
[pypi]
|
| 17 |
+
username:%s
|
| 18 |
+
password:%s
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
class PyPIRCCommand(Command):
|
| 22 |
+
"""Base command that knows how to handle the .pypirc file
|
| 23 |
+
"""
|
| 24 |
+
DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
|
| 25 |
+
DEFAULT_REALM = 'pypi'
|
| 26 |
+
repository = None
|
| 27 |
+
realm = None
|
| 28 |
+
|
| 29 |
+
user_options = [
|
| 30 |
+
('repository=', 'r',
|
| 31 |
+
"url of repository [default: %s]" % \
|
| 32 |
+
DEFAULT_REPOSITORY),
|
| 33 |
+
('show-response', None,
|
| 34 |
+
'display full response text from server')]
|
| 35 |
+
|
| 36 |
+
boolean_options = ['show-response']
|
| 37 |
+
|
| 38 |
+
def _get_rc_file(self):
|
| 39 |
+
"""Returns rc file path."""
|
| 40 |
+
return os.path.join(os.path.expanduser('~'), '.pypirc')
|
| 41 |
+
|
| 42 |
+
def _store_pypirc(self, username, password):
|
| 43 |
+
"""Creates a default .pypirc file."""
|
| 44 |
+
rc = self._get_rc_file()
|
| 45 |
+
with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
| 46 |
+
f.write(DEFAULT_PYPIRC % (username, password))
|
| 47 |
+
|
| 48 |
+
def _read_pypirc(self):
|
| 49 |
+
"""Reads the .pypirc file."""
|
| 50 |
+
rc = self._get_rc_file()
|
| 51 |
+
if os.path.exists(rc):
|
| 52 |
+
self.announce('Using PyPI login from %s' % rc)
|
| 53 |
+
repository = self.repository or self.DEFAULT_REPOSITORY
|
| 54 |
+
|
| 55 |
+
config = RawConfigParser()
|
| 56 |
+
config.read(rc)
|
| 57 |
+
sections = config.sections()
|
| 58 |
+
if 'distutils' in sections:
|
| 59 |
+
# let's get the list of servers
|
| 60 |
+
index_servers = config.get('distutils', 'index-servers')
|
| 61 |
+
_servers = [server.strip() for server in
|
| 62 |
+
index_servers.split('\n')
|
| 63 |
+
if server.strip() != '']
|
| 64 |
+
if _servers == []:
|
| 65 |
+
# nothing set, let's try to get the default pypi
|
| 66 |
+
if 'pypi' in sections:
|
| 67 |
+
_servers = ['pypi']
|
| 68 |
+
else:
|
| 69 |
+
# the file is not properly defined, returning
|
| 70 |
+
# an empty dict
|
| 71 |
+
return {}
|
| 72 |
+
for server in _servers:
|
| 73 |
+
current = {'server': server}
|
| 74 |
+
current['username'] = config.get(server, 'username')
|
| 75 |
+
|
| 76 |
+
# optional params
|
| 77 |
+
for key, default in (('repository',
|
| 78 |
+
self.DEFAULT_REPOSITORY),
|
| 79 |
+
('realm', self.DEFAULT_REALM),
|
| 80 |
+
('password', None)):
|
| 81 |
+
if config.has_option(server, key):
|
| 82 |
+
current[key] = config.get(server, key)
|
| 83 |
+
else:
|
| 84 |
+
current[key] = default
|
| 85 |
+
|
| 86 |
+
# work around people having "repository" for the "pypi"
|
| 87 |
+
# section of their config set to the HTTP (rather than
|
| 88 |
+
# HTTPS) URL
|
| 89 |
+
if (server == 'pypi' and
|
| 90 |
+
repository in (self.DEFAULT_REPOSITORY, 'pypi')):
|
| 91 |
+
current['repository'] = self.DEFAULT_REPOSITORY
|
| 92 |
+
return current
|
| 93 |
+
|
| 94 |
+
if (current['server'] == repository or
|
| 95 |
+
current['repository'] == repository):
|
| 96 |
+
return current
|
| 97 |
+
elif 'server-login' in sections:
|
| 98 |
+
# old format
|
| 99 |
+
server = 'server-login'
|
| 100 |
+
if config.has_option(server, 'repository'):
|
| 101 |
+
repository = config.get(server, 'repository')
|
| 102 |
+
else:
|
| 103 |
+
repository = self.DEFAULT_REPOSITORY
|
| 104 |
+
return {'username': config.get(server, 'username'),
|
| 105 |
+
'password': config.get(server, 'password'),
|
| 106 |
+
'repository': repository,
|
| 107 |
+
'server': server,
|
| 108 |
+
'realm': self.DEFAULT_REALM}
|
| 109 |
+
|
| 110 |
+
return {}
|
| 111 |
+
|
| 112 |
+
def _read_pypi_response(self, response):
|
| 113 |
+
"""Read and decode a PyPI HTTP response."""
|
| 114 |
+
import cgi
|
| 115 |
+
content_type = response.getheader('content-type', 'text/plain')
|
| 116 |
+
encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
|
| 117 |
+
return response.read().decode(encoding)
|
| 118 |
+
|
| 119 |
+
def initialize_options(self):
|
| 120 |
+
"""Initialize options."""
|
| 121 |
+
self.repository = None
|
| 122 |
+
self.realm = None
|
| 123 |
+
self.show_response = 0
|
| 124 |
+
|
| 125 |
+
def finalize_options(self):
|
| 126 |
+
"""Finalizes options."""
|
| 127 |
+
if self.repository is None:
|
| 128 |
+
self.repository = self.DEFAULT_REPOSITORY
|
| 129 |
+
if self.realm is None:
|
| 130 |
+
self.realm = self.DEFAULT_REALM
|
llava/lib/python3.10/distutils/core.py
ADDED
|
@@ -0,0 +1,234 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.core
|
| 2 |
+
|
| 3 |
+
The only module that needs to be imported to use the Distutils; provides
|
| 4 |
+
the 'setup' function (which is to be called from the setup script). Also
|
| 5 |
+
indirectly provides the Distribution and Command classes, although they are
|
| 6 |
+
really defined in distutils.dist and distutils.cmd.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import sys
|
| 11 |
+
|
| 12 |
+
from distutils.debug import DEBUG
|
| 13 |
+
from distutils.errors import *
|
| 14 |
+
|
| 15 |
+
# Mainly import these so setup scripts can "from distutils.core import" them.
|
| 16 |
+
from distutils.dist import Distribution
|
| 17 |
+
from distutils.cmd import Command
|
| 18 |
+
from distutils.config import PyPIRCCommand
|
| 19 |
+
from distutils.extension import Extension
|
| 20 |
+
|
| 21 |
+
# This is a barebones help message generated displayed when the user
|
| 22 |
+
# runs the setup script with no arguments at all. More useful help
|
| 23 |
+
# is generated with various --help options: global help, list commands,
|
| 24 |
+
# and per-command help.
|
| 25 |
+
USAGE = """\
|
| 26 |
+
usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
|
| 27 |
+
or: %(script)s --help [cmd1 cmd2 ...]
|
| 28 |
+
or: %(script)s --help-commands
|
| 29 |
+
or: %(script)s cmd --help
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def gen_usage (script_name):
|
| 33 |
+
script = os.path.basename(script_name)
|
| 34 |
+
return USAGE % vars()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
|
| 38 |
+
_setup_stop_after = None
|
| 39 |
+
_setup_distribution = None
|
| 40 |
+
|
| 41 |
+
# Legal keyword arguments for the setup() function
|
| 42 |
+
setup_keywords = ('distclass', 'script_name', 'script_args', 'options',
|
| 43 |
+
'name', 'version', 'author', 'author_email',
|
| 44 |
+
'maintainer', 'maintainer_email', 'url', 'license',
|
| 45 |
+
'description', 'long_description', 'keywords',
|
| 46 |
+
'platforms', 'classifiers', 'download_url',
|
| 47 |
+
'requires', 'provides', 'obsoletes',
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
# Legal keyword arguments for the Extension constructor
|
| 51 |
+
extension_keywords = ('name', 'sources', 'include_dirs',
|
| 52 |
+
'define_macros', 'undef_macros',
|
| 53 |
+
'library_dirs', 'libraries', 'runtime_library_dirs',
|
| 54 |
+
'extra_objects', 'extra_compile_args', 'extra_link_args',
|
| 55 |
+
'swig_opts', 'export_symbols', 'depends', 'language')
|
| 56 |
+
|
| 57 |
+
def setup (**attrs):
|
| 58 |
+
"""The gateway to the Distutils: do everything your setup script needs
|
| 59 |
+
to do, in a highly flexible and user-driven way. Briefly: create a
|
| 60 |
+
Distribution instance; find and parse config files; parse the command
|
| 61 |
+
line; run each Distutils command found there, customized by the options
|
| 62 |
+
supplied to 'setup()' (as keyword arguments), in config files, and on
|
| 63 |
+
the command line.
|
| 64 |
+
|
| 65 |
+
The Distribution instance might be an instance of a class supplied via
|
| 66 |
+
the 'distclass' keyword argument to 'setup'; if no such class is
|
| 67 |
+
supplied, then the Distribution class (in dist.py) is instantiated.
|
| 68 |
+
All other arguments to 'setup' (except for 'cmdclass') are used to set
|
| 69 |
+
attributes of the Distribution instance.
|
| 70 |
+
|
| 71 |
+
The 'cmdclass' argument, if supplied, is a dictionary mapping command
|
| 72 |
+
names to command classes. Each command encountered on the command line
|
| 73 |
+
will be turned into a command class, which is in turn instantiated; any
|
| 74 |
+
class found in 'cmdclass' is used in place of the default, which is
|
| 75 |
+
(for command 'foo_bar') class 'foo_bar' in module
|
| 76 |
+
'distutils.command.foo_bar'. The command class must provide a
|
| 77 |
+
'user_options' attribute which is a list of option specifiers for
|
| 78 |
+
'distutils.fancy_getopt'. Any command-line options between the current
|
| 79 |
+
and the next command are used to set attributes of the current command
|
| 80 |
+
object.
|
| 81 |
+
|
| 82 |
+
When the entire command-line has been successfully parsed, calls the
|
| 83 |
+
'run()' method on each command object in turn. This method will be
|
| 84 |
+
driven entirely by the Distribution object (which each command object
|
| 85 |
+
has a reference to, thanks to its constructor), and the
|
| 86 |
+
command-specific options that became attributes of each command
|
| 87 |
+
object.
|
| 88 |
+
"""
|
| 89 |
+
|
| 90 |
+
global _setup_stop_after, _setup_distribution
|
| 91 |
+
|
| 92 |
+
# Determine the distribution class -- either caller-supplied or
|
| 93 |
+
# our Distribution (see below).
|
| 94 |
+
klass = attrs.get('distclass')
|
| 95 |
+
if klass:
|
| 96 |
+
del attrs['distclass']
|
| 97 |
+
else:
|
| 98 |
+
klass = Distribution
|
| 99 |
+
|
| 100 |
+
if 'script_name' not in attrs:
|
| 101 |
+
attrs['script_name'] = os.path.basename(sys.argv[0])
|
| 102 |
+
if 'script_args' not in attrs:
|
| 103 |
+
attrs['script_args'] = sys.argv[1:]
|
| 104 |
+
|
| 105 |
+
# Create the Distribution instance, using the remaining arguments
|
| 106 |
+
# (ie. everything except distclass) to initialize it
|
| 107 |
+
try:
|
| 108 |
+
_setup_distribution = dist = klass(attrs)
|
| 109 |
+
except DistutilsSetupError as msg:
|
| 110 |
+
if 'name' not in attrs:
|
| 111 |
+
raise SystemExit("error in setup command: %s" % msg)
|
| 112 |
+
else:
|
| 113 |
+
raise SystemExit("error in %s setup command: %s" % \
|
| 114 |
+
(attrs['name'], msg))
|
| 115 |
+
|
| 116 |
+
if _setup_stop_after == "init":
|
| 117 |
+
return dist
|
| 118 |
+
|
| 119 |
+
# Find and parse the config file(s): they will override options from
|
| 120 |
+
# the setup script, but be overridden by the command line.
|
| 121 |
+
dist.parse_config_files()
|
| 122 |
+
|
| 123 |
+
if DEBUG:
|
| 124 |
+
print("options (after parsing config files):")
|
| 125 |
+
dist.dump_option_dicts()
|
| 126 |
+
|
| 127 |
+
if _setup_stop_after == "config":
|
| 128 |
+
return dist
|
| 129 |
+
|
| 130 |
+
# Parse the command line and override config files; any
|
| 131 |
+
# command-line errors are the end user's fault, so turn them into
|
| 132 |
+
# SystemExit to suppress tracebacks.
|
| 133 |
+
try:
|
| 134 |
+
ok = dist.parse_command_line()
|
| 135 |
+
except DistutilsArgError as msg:
|
| 136 |
+
raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
|
| 137 |
+
|
| 138 |
+
if DEBUG:
|
| 139 |
+
print("options (after parsing command line):")
|
| 140 |
+
dist.dump_option_dicts()
|
| 141 |
+
|
| 142 |
+
if _setup_stop_after == "commandline":
|
| 143 |
+
return dist
|
| 144 |
+
|
| 145 |
+
# And finally, run all the commands found on the command line.
|
| 146 |
+
if ok:
|
| 147 |
+
try:
|
| 148 |
+
dist.run_commands()
|
| 149 |
+
except KeyboardInterrupt:
|
| 150 |
+
raise SystemExit("interrupted")
|
| 151 |
+
except OSError as exc:
|
| 152 |
+
if DEBUG:
|
| 153 |
+
sys.stderr.write("error: %s\n" % (exc,))
|
| 154 |
+
raise
|
| 155 |
+
else:
|
| 156 |
+
raise SystemExit("error: %s" % (exc,))
|
| 157 |
+
|
| 158 |
+
except (DistutilsError,
|
| 159 |
+
CCompilerError) as msg:
|
| 160 |
+
if DEBUG:
|
| 161 |
+
raise
|
| 162 |
+
else:
|
| 163 |
+
raise SystemExit("error: " + str(msg))
|
| 164 |
+
|
| 165 |
+
return dist
|
| 166 |
+
|
| 167 |
+
# setup ()
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def run_setup (script_name, script_args=None, stop_after="run"):
|
| 171 |
+
"""Run a setup script in a somewhat controlled environment, and
|
| 172 |
+
return the Distribution instance that drives things. This is useful
|
| 173 |
+
if you need to find out the distribution meta-data (passed as
|
| 174 |
+
keyword args from 'script' to 'setup()', or the contents of the
|
| 175 |
+
config files or command-line.
|
| 176 |
+
|
| 177 |
+
'script_name' is a file that will be read and run with 'exec()';
|
| 178 |
+
'sys.argv[0]' will be replaced with 'script' for the duration of the
|
| 179 |
+
call. 'script_args' is a list of strings; if supplied,
|
| 180 |
+
'sys.argv[1:]' will be replaced by 'script_args' for the duration of
|
| 181 |
+
the call.
|
| 182 |
+
|
| 183 |
+
'stop_after' tells 'setup()' when to stop processing; possible
|
| 184 |
+
values:
|
| 185 |
+
init
|
| 186 |
+
stop after the Distribution instance has been created and
|
| 187 |
+
populated with the keyword arguments to 'setup()'
|
| 188 |
+
config
|
| 189 |
+
stop after config files have been parsed (and their data
|
| 190 |
+
stored in the Distribution instance)
|
| 191 |
+
commandline
|
| 192 |
+
stop after the command-line ('sys.argv[1:]' or 'script_args')
|
| 193 |
+
have been parsed (and the data stored in the Distribution)
|
| 194 |
+
run [default]
|
| 195 |
+
stop after all commands have been run (the same as if 'setup()'
|
| 196 |
+
had been called in the usual way
|
| 197 |
+
|
| 198 |
+
Returns the Distribution instance, which provides all information
|
| 199 |
+
used to drive the Distutils.
|
| 200 |
+
"""
|
| 201 |
+
if stop_after not in ('init', 'config', 'commandline', 'run'):
|
| 202 |
+
raise ValueError("invalid value for 'stop_after': %r" % (stop_after,))
|
| 203 |
+
|
| 204 |
+
global _setup_stop_after, _setup_distribution
|
| 205 |
+
_setup_stop_after = stop_after
|
| 206 |
+
|
| 207 |
+
save_argv = sys.argv.copy()
|
| 208 |
+
g = {'__file__': script_name}
|
| 209 |
+
try:
|
| 210 |
+
try:
|
| 211 |
+
sys.argv[0] = script_name
|
| 212 |
+
if script_args is not None:
|
| 213 |
+
sys.argv[1:] = script_args
|
| 214 |
+
with open(script_name, 'rb') as f:
|
| 215 |
+
exec(f.read(), g)
|
| 216 |
+
finally:
|
| 217 |
+
sys.argv = save_argv
|
| 218 |
+
_setup_stop_after = None
|
| 219 |
+
except SystemExit:
|
| 220 |
+
# Hmm, should we do something if exiting with a non-zero code
|
| 221 |
+
# (ie. error)?
|
| 222 |
+
pass
|
| 223 |
+
|
| 224 |
+
if _setup_distribution is None:
|
| 225 |
+
raise RuntimeError(("'distutils.core.setup()' was never called -- "
|
| 226 |
+
"perhaps '%s' is not a Distutils setup script?") % \
|
| 227 |
+
script_name)
|
| 228 |
+
|
| 229 |
+
# I wonder if the setup script's namespace -- g and l -- would be of
|
| 230 |
+
# any interest to callers?
|
| 231 |
+
#print "_setup_distribution:", _setup_distribution
|
| 232 |
+
return _setup_distribution
|
| 233 |
+
|
| 234 |
+
# run_setup ()
|
llava/lib/python3.10/distutils/cygwinccompiler.py
ADDED
|
@@ -0,0 +1,406 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.cygwinccompiler
|
| 2 |
+
|
| 3 |
+
Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
|
| 4 |
+
handles the Cygwin port of the GNU C compiler to Windows. It also contains
|
| 5 |
+
the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
|
| 6 |
+
cygwin in no-cygwin mode).
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
# problems:
|
| 10 |
+
#
|
| 11 |
+
# * if you use a msvc compiled python version (1.5.2)
|
| 12 |
+
# 1. you have to insert a __GNUC__ section in its config.h
|
| 13 |
+
# 2. you have to generate an import library for its dll
|
| 14 |
+
# - create a def-file for python??.dll
|
| 15 |
+
# - create an import library using
|
| 16 |
+
# dlltool --dllname python15.dll --def python15.def \
|
| 17 |
+
# --output-lib libpython15.a
|
| 18 |
+
#
|
| 19 |
+
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
|
| 20 |
+
#
|
| 21 |
+
# * We put export_symbols in a def-file, and don't use
|
| 22 |
+
# --export-all-symbols because it doesn't worked reliable in some
|
| 23 |
+
# tested configurations. And because other windows compilers also
|
| 24 |
+
# need their symbols specified this no serious problem.
|
| 25 |
+
#
|
| 26 |
+
# tested configurations:
|
| 27 |
+
#
|
| 28 |
+
# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
|
| 29 |
+
# (after patching python's config.h and for C++ some other include files)
|
| 30 |
+
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
|
| 31 |
+
# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
|
| 32 |
+
# (ld doesn't support -shared, so we use dllwrap)
|
| 33 |
+
# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
|
| 34 |
+
# - its dllwrap doesn't work, there is a bug in binutils 2.10.90
|
| 35 |
+
# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
|
| 36 |
+
# - using gcc -mdll instead dllwrap doesn't work without -static because
|
| 37 |
+
# it tries to link against dlls instead their import libraries. (If
|
| 38 |
+
# it finds the dll first.)
|
| 39 |
+
# By specifying -static we force ld to link against the import libraries,
|
| 40 |
+
# this is windows standard and there are normally not the necessary symbols
|
| 41 |
+
# in the dlls.
|
| 42 |
+
# *** only the version of June 2000 shows these problems
|
| 43 |
+
# * cygwin gcc 3.2/ld 2.13.90 works
|
| 44 |
+
# (ld supports -shared)
|
| 45 |
+
# * mingw gcc 3.2/ld 2.13 works
|
| 46 |
+
# (ld supports -shared)
|
| 47 |
+
|
| 48 |
+
import os
|
| 49 |
+
import sys
|
| 50 |
+
import copy
|
| 51 |
+
from subprocess import Popen, PIPE, check_output
|
| 52 |
+
import re
|
| 53 |
+
|
| 54 |
+
from distutils.unixccompiler import UnixCCompiler
|
| 55 |
+
from distutils.file_util import write_file
|
| 56 |
+
from distutils.errors import (DistutilsExecError, CCompilerError,
|
| 57 |
+
CompileError, UnknownFileError)
|
| 58 |
+
from distutils.version import LooseVersion
|
| 59 |
+
from distutils.spawn import find_executable
|
| 60 |
+
|
| 61 |
+
def get_msvcr():
|
| 62 |
+
"""Include the appropriate MSVC runtime library if Python was built
|
| 63 |
+
with MSVC 7.0 or later.
|
| 64 |
+
"""
|
| 65 |
+
msc_pos = sys.version.find('MSC v.')
|
| 66 |
+
if msc_pos != -1:
|
| 67 |
+
msc_ver = sys.version[msc_pos+6:msc_pos+10]
|
| 68 |
+
if msc_ver == '1300':
|
| 69 |
+
# MSVC 7.0
|
| 70 |
+
return ['msvcr70']
|
| 71 |
+
elif msc_ver == '1310':
|
| 72 |
+
# MSVC 7.1
|
| 73 |
+
return ['msvcr71']
|
| 74 |
+
elif msc_ver == '1400':
|
| 75 |
+
# VS2005 / MSVC 8.0
|
| 76 |
+
return ['msvcr80']
|
| 77 |
+
elif msc_ver == '1500':
|
| 78 |
+
# VS2008 / MSVC 9.0
|
| 79 |
+
return ['msvcr90']
|
| 80 |
+
elif msc_ver == '1600':
|
| 81 |
+
# VS2010 / MSVC 10.0
|
| 82 |
+
return ['msvcr100']
|
| 83 |
+
elif int(msc_ver) >= 1900:
|
| 84 |
+
# VS2015 / MSVC 14.0
|
| 85 |
+
return ['msvcr140']
|
| 86 |
+
else:
|
| 87 |
+
raise ValueError("Unknown MS Compiler version %s " % msc_ver)
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class CygwinCCompiler(UnixCCompiler):
|
| 91 |
+
""" Handles the Cygwin port of the GNU C compiler to Windows.
|
| 92 |
+
"""
|
| 93 |
+
compiler_type = 'cygwin'
|
| 94 |
+
obj_extension = ".o"
|
| 95 |
+
static_lib_extension = ".a"
|
| 96 |
+
shared_lib_extension = ".dll"
|
| 97 |
+
static_lib_format = "lib%s%s"
|
| 98 |
+
shared_lib_format = "%s%s"
|
| 99 |
+
exe_extension = ".exe"
|
| 100 |
+
|
| 101 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 102 |
+
|
| 103 |
+
UnixCCompiler.__init__(self, verbose, dry_run, force)
|
| 104 |
+
|
| 105 |
+
status, details = check_config_h()
|
| 106 |
+
self.debug_print("Python's GCC status: %s (details: %s)" %
|
| 107 |
+
(status, details))
|
| 108 |
+
if status is not CONFIG_H_OK:
|
| 109 |
+
self.warn(
|
| 110 |
+
"Python's pyconfig.h doesn't seem to support your compiler. "
|
| 111 |
+
"Reason: %s. "
|
| 112 |
+
"Compiling may fail because of undefined preprocessor macros."
|
| 113 |
+
% details)
|
| 114 |
+
|
| 115 |
+
self.gcc_version, self.ld_version, self.dllwrap_version = \
|
| 116 |
+
get_versions()
|
| 117 |
+
self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" %
|
| 118 |
+
(self.gcc_version,
|
| 119 |
+
self.ld_version,
|
| 120 |
+
self.dllwrap_version) )
|
| 121 |
+
|
| 122 |
+
# ld_version >= "2.10.90" and < "2.13" should also be able to use
|
| 123 |
+
# gcc -mdll instead of dllwrap
|
| 124 |
+
# Older dllwraps had own version numbers, newer ones use the
|
| 125 |
+
# same as the rest of binutils ( also ld )
|
| 126 |
+
# dllwrap 2.10.90 is buggy
|
| 127 |
+
if self.ld_version >= "2.10.90":
|
| 128 |
+
self.linker_dll = "gcc"
|
| 129 |
+
else:
|
| 130 |
+
self.linker_dll = "dllwrap"
|
| 131 |
+
|
| 132 |
+
# ld_version >= "2.13" support -shared so use it instead of
|
| 133 |
+
# -mdll -static
|
| 134 |
+
if self.ld_version >= "2.13":
|
| 135 |
+
shared_option = "-shared"
|
| 136 |
+
else:
|
| 137 |
+
shared_option = "-mdll -static"
|
| 138 |
+
|
| 139 |
+
# Hard-code GCC because that's what this is all about.
|
| 140 |
+
# XXX optimization, warnings etc. should be customizable.
|
| 141 |
+
self.set_executables(compiler='gcc -mcygwin -O -Wall',
|
| 142 |
+
compiler_so='gcc -mcygwin -mdll -O -Wall',
|
| 143 |
+
compiler_cxx='g++ -mcygwin -O -Wall',
|
| 144 |
+
linker_exe='gcc -mcygwin',
|
| 145 |
+
linker_so=('%s -mcygwin %s' %
|
| 146 |
+
(self.linker_dll, shared_option)))
|
| 147 |
+
|
| 148 |
+
# cygwin and mingw32 need different sets of libraries
|
| 149 |
+
if self.gcc_version == "2.91.57":
|
| 150 |
+
# cygwin shouldn't need msvcrt, but without the dlls will crash
|
| 151 |
+
# (gcc version 2.91.57) -- perhaps something about initialization
|
| 152 |
+
self.dll_libraries=["msvcrt"]
|
| 153 |
+
self.warn(
|
| 154 |
+
"Consider upgrading to a newer version of gcc")
|
| 155 |
+
else:
|
| 156 |
+
# Include the appropriate MSVC runtime library if Python was built
|
| 157 |
+
# with MSVC 7.0 or later.
|
| 158 |
+
self.dll_libraries = get_msvcr()
|
| 159 |
+
|
| 160 |
+
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
| 161 |
+
"""Compiles the source by spawning GCC and windres if needed."""
|
| 162 |
+
if ext == '.rc' or ext == '.res':
|
| 163 |
+
# gcc needs '.res' and '.rc' compiled to object files !!!
|
| 164 |
+
try:
|
| 165 |
+
self.spawn(["windres", "-i", src, "-o", obj])
|
| 166 |
+
except DistutilsExecError as msg:
|
| 167 |
+
raise CompileError(msg)
|
| 168 |
+
else: # for other files use the C-compiler
|
| 169 |
+
try:
|
| 170 |
+
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
|
| 171 |
+
extra_postargs)
|
| 172 |
+
except DistutilsExecError as msg:
|
| 173 |
+
raise CompileError(msg)
|
| 174 |
+
|
| 175 |
+
def link(self, target_desc, objects, output_filename, output_dir=None,
|
| 176 |
+
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
| 177 |
+
export_symbols=None, debug=0, extra_preargs=None,
|
| 178 |
+
extra_postargs=None, build_temp=None, target_lang=None):
|
| 179 |
+
"""Link the objects."""
|
| 180 |
+
# use separate copies, so we can modify the lists
|
| 181 |
+
extra_preargs = copy.copy(extra_preargs or [])
|
| 182 |
+
libraries = copy.copy(libraries or [])
|
| 183 |
+
objects = copy.copy(objects or [])
|
| 184 |
+
|
| 185 |
+
# Additional libraries
|
| 186 |
+
libraries.extend(self.dll_libraries)
|
| 187 |
+
|
| 188 |
+
# handle export symbols by creating a def-file
|
| 189 |
+
# with executables this only works with gcc/ld as linker
|
| 190 |
+
if ((export_symbols is not None) and
|
| 191 |
+
(target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
| 192 |
+
# (The linker doesn't do anything if output is up-to-date.
|
| 193 |
+
# So it would probably better to check if we really need this,
|
| 194 |
+
# but for this we had to insert some unchanged parts of
|
| 195 |
+
# UnixCCompiler, and this is not what we want.)
|
| 196 |
+
|
| 197 |
+
# we want to put some files in the same directory as the
|
| 198 |
+
# object files are, build_temp doesn't help much
|
| 199 |
+
# where are the object files
|
| 200 |
+
temp_dir = os.path.dirname(objects[0])
|
| 201 |
+
# name of dll to give the helper files the same base name
|
| 202 |
+
(dll_name, dll_extension) = os.path.splitext(
|
| 203 |
+
os.path.basename(output_filename))
|
| 204 |
+
|
| 205 |
+
# generate the filenames for these files
|
| 206 |
+
def_file = os.path.join(temp_dir, dll_name + ".def")
|
| 207 |
+
lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
|
| 208 |
+
|
| 209 |
+
# Generate .def file
|
| 210 |
+
contents = [
|
| 211 |
+
"LIBRARY %s" % os.path.basename(output_filename),
|
| 212 |
+
"EXPORTS"]
|
| 213 |
+
for sym in export_symbols:
|
| 214 |
+
contents.append(sym)
|
| 215 |
+
self.execute(write_file, (def_file, contents),
|
| 216 |
+
"writing %s" % def_file)
|
| 217 |
+
|
| 218 |
+
# next add options for def-file and to creating import libraries
|
| 219 |
+
|
| 220 |
+
# dllwrap uses different options than gcc/ld
|
| 221 |
+
if self.linker_dll == "dllwrap":
|
| 222 |
+
extra_preargs.extend(["--output-lib", lib_file])
|
| 223 |
+
# for dllwrap we have to use a special option
|
| 224 |
+
extra_preargs.extend(["--def", def_file])
|
| 225 |
+
# we use gcc/ld here and can be sure ld is >= 2.9.10
|
| 226 |
+
else:
|
| 227 |
+
# doesn't work: bfd_close build\...\libfoo.a: Invalid operation
|
| 228 |
+
#extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file])
|
| 229 |
+
# for gcc/ld the def-file is specified as any object files
|
| 230 |
+
objects.append(def_file)
|
| 231 |
+
|
| 232 |
+
#end: if ((export_symbols is not None) and
|
| 233 |
+
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
| 234 |
+
|
| 235 |
+
# who wants symbols and a many times larger output file
|
| 236 |
+
# should explicitly switch the debug mode on
|
| 237 |
+
# otherwise we let dllwrap/ld strip the output file
|
| 238 |
+
# (On my machine: 10KiB < stripped_file < ??100KiB
|
| 239 |
+
# unstripped_file = stripped_file + XXX KiB
|
| 240 |
+
# ( XXX=254 for a typical python extension))
|
| 241 |
+
if not debug:
|
| 242 |
+
extra_preargs.append("-s")
|
| 243 |
+
|
| 244 |
+
UnixCCompiler.link(self, target_desc, objects, output_filename,
|
| 245 |
+
output_dir, libraries, library_dirs,
|
| 246 |
+
runtime_library_dirs,
|
| 247 |
+
None, # export_symbols, we do this in our def-file
|
| 248 |
+
debug, extra_preargs, extra_postargs, build_temp,
|
| 249 |
+
target_lang)
|
| 250 |
+
|
| 251 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 252 |
+
|
| 253 |
+
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
| 254 |
+
"""Adds supports for rc and res files."""
|
| 255 |
+
if output_dir is None:
|
| 256 |
+
output_dir = ''
|
| 257 |
+
obj_names = []
|
| 258 |
+
for src_name in source_filenames:
|
| 259 |
+
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
| 260 |
+
base, ext = os.path.splitext(os.path.normcase(src_name))
|
| 261 |
+
if ext not in (self.src_extensions + ['.rc','.res']):
|
| 262 |
+
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
|
| 263 |
+
(ext, src_name))
|
| 264 |
+
if strip_dir:
|
| 265 |
+
base = os.path.basename (base)
|
| 266 |
+
if ext in ('.res', '.rc'):
|
| 267 |
+
# these need to be compiled to object files
|
| 268 |
+
obj_names.append (os.path.join(output_dir,
|
| 269 |
+
base + ext + self.obj_extension))
|
| 270 |
+
else:
|
| 271 |
+
obj_names.append (os.path.join(output_dir,
|
| 272 |
+
base + self.obj_extension))
|
| 273 |
+
return obj_names
|
| 274 |
+
|
| 275 |
+
# the same as cygwin plus some additional parameters
|
| 276 |
+
class Mingw32CCompiler(CygwinCCompiler):
|
| 277 |
+
""" Handles the Mingw32 port of the GNU C compiler to Windows.
|
| 278 |
+
"""
|
| 279 |
+
compiler_type = 'mingw32'
|
| 280 |
+
|
| 281 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 282 |
+
|
| 283 |
+
CygwinCCompiler.__init__ (self, verbose, dry_run, force)
|
| 284 |
+
|
| 285 |
+
# ld_version >= "2.13" support -shared so use it instead of
|
| 286 |
+
# -mdll -static
|
| 287 |
+
if self.ld_version >= "2.13":
|
| 288 |
+
shared_option = "-shared"
|
| 289 |
+
else:
|
| 290 |
+
shared_option = "-mdll -static"
|
| 291 |
+
|
| 292 |
+
# A real mingw32 doesn't need to specify a different entry point,
|
| 293 |
+
# but cygwin 2.91.57 in no-cygwin-mode needs it.
|
| 294 |
+
if self.gcc_version <= "2.91.57":
|
| 295 |
+
entry_point = '--entry _DllMain@12'
|
| 296 |
+
else:
|
| 297 |
+
entry_point = ''
|
| 298 |
+
|
| 299 |
+
if is_cygwingcc():
|
| 300 |
+
raise CCompilerError(
|
| 301 |
+
'Cygwin gcc cannot be used with --compiler=mingw32')
|
| 302 |
+
|
| 303 |
+
self.set_executables(compiler='gcc -O -Wall',
|
| 304 |
+
compiler_so='gcc -mdll -O -Wall',
|
| 305 |
+
compiler_cxx='g++ -O -Wall',
|
| 306 |
+
linker_exe='gcc',
|
| 307 |
+
linker_so='%s %s %s'
|
| 308 |
+
% (self.linker_dll, shared_option,
|
| 309 |
+
entry_point))
|
| 310 |
+
# Maybe we should also append -mthreads, but then the finished
|
| 311 |
+
# dlls need another dll (mingwm10.dll see Mingw32 docs)
|
| 312 |
+
# (-mthreads: Support thread-safe exception handling on `Mingw32')
|
| 313 |
+
|
| 314 |
+
# no additional libraries needed
|
| 315 |
+
self.dll_libraries=[]
|
| 316 |
+
|
| 317 |
+
# Include the appropriate MSVC runtime library if Python was built
|
| 318 |
+
# with MSVC 7.0 or later.
|
| 319 |
+
self.dll_libraries = get_msvcr()
|
| 320 |
+
|
| 321 |
+
# Because these compilers aren't configured in Python's pyconfig.h file by
|
| 322 |
+
# default, we should at least warn the user if he is using an unmodified
|
| 323 |
+
# version.
|
| 324 |
+
|
| 325 |
+
CONFIG_H_OK = "ok"
|
| 326 |
+
CONFIG_H_NOTOK = "not ok"
|
| 327 |
+
CONFIG_H_UNCERTAIN = "uncertain"
|
| 328 |
+
|
| 329 |
+
def check_config_h():
|
| 330 |
+
"""Check if the current Python installation appears amenable to building
|
| 331 |
+
extensions with GCC.
|
| 332 |
+
|
| 333 |
+
Returns a tuple (status, details), where 'status' is one of the following
|
| 334 |
+
constants:
|
| 335 |
+
|
| 336 |
+
- CONFIG_H_OK: all is well, go ahead and compile
|
| 337 |
+
- CONFIG_H_NOTOK: doesn't look good
|
| 338 |
+
- CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
|
| 339 |
+
|
| 340 |
+
'details' is a human-readable string explaining the situation.
|
| 341 |
+
|
| 342 |
+
Note there are two ways to conclude "OK": either 'sys.version' contains
|
| 343 |
+
the string "GCC" (implying that this Python was built with GCC), or the
|
| 344 |
+
installed "pyconfig.h" contains the string "__GNUC__".
|
| 345 |
+
"""
|
| 346 |
+
|
| 347 |
+
# XXX since this function also checks sys.version, it's not strictly a
|
| 348 |
+
# "pyconfig.h" check -- should probably be renamed...
|
| 349 |
+
|
| 350 |
+
from distutils import sysconfig
|
| 351 |
+
|
| 352 |
+
# if sys.version contains GCC then python was compiled with GCC, and the
|
| 353 |
+
# pyconfig.h file should be OK
|
| 354 |
+
if "GCC" in sys.version:
|
| 355 |
+
return CONFIG_H_OK, "sys.version mentions 'GCC'"
|
| 356 |
+
|
| 357 |
+
# let's see if __GNUC__ is mentioned in python.h
|
| 358 |
+
fn = sysconfig.get_config_h_filename()
|
| 359 |
+
try:
|
| 360 |
+
config_h = open(fn)
|
| 361 |
+
try:
|
| 362 |
+
if "__GNUC__" in config_h.read():
|
| 363 |
+
return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
|
| 364 |
+
else:
|
| 365 |
+
return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
|
| 366 |
+
finally:
|
| 367 |
+
config_h.close()
|
| 368 |
+
except OSError as exc:
|
| 369 |
+
return (CONFIG_H_UNCERTAIN,
|
| 370 |
+
"couldn't read '%s': %s" % (fn, exc.strerror))
|
| 371 |
+
|
| 372 |
+
RE_VERSION = re.compile(br'(\d+\.\d+(\.\d+)*)')
|
| 373 |
+
|
| 374 |
+
def _find_exe_version(cmd):
|
| 375 |
+
"""Find the version of an executable by running `cmd` in the shell.
|
| 376 |
+
|
| 377 |
+
If the command is not found, or the output does not match
|
| 378 |
+
`RE_VERSION`, returns None.
|
| 379 |
+
"""
|
| 380 |
+
executable = cmd.split()[0]
|
| 381 |
+
if find_executable(executable) is None:
|
| 382 |
+
return None
|
| 383 |
+
out = Popen(cmd, shell=True, stdout=PIPE).stdout
|
| 384 |
+
try:
|
| 385 |
+
out_string = out.read()
|
| 386 |
+
finally:
|
| 387 |
+
out.close()
|
| 388 |
+
result = RE_VERSION.search(out_string)
|
| 389 |
+
if result is None:
|
| 390 |
+
return None
|
| 391 |
+
# LooseVersion works with strings
|
| 392 |
+
# so we need to decode our bytes
|
| 393 |
+
return LooseVersion(result.group(1).decode())
|
| 394 |
+
|
| 395 |
+
def get_versions():
|
| 396 |
+
""" Try to find out the versions of gcc, ld and dllwrap.
|
| 397 |
+
|
| 398 |
+
If not possible it returns None for it.
|
| 399 |
+
"""
|
| 400 |
+
commands = ['gcc -dumpversion', 'ld -v', 'dllwrap --version']
|
| 401 |
+
return tuple([_find_exe_version(cmd) for cmd in commands])
|
| 402 |
+
|
| 403 |
+
def is_cygwingcc():
|
| 404 |
+
'''Try to determine if the gcc that would be used is from cygwin.'''
|
| 405 |
+
out_string = check_output(['gcc', '-dumpmachine'])
|
| 406 |
+
return out_string.strip().endswith(b'cygwin')
|
llava/lib/python3.10/distutils/debug.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
# If DISTUTILS_DEBUG is anything other than the empty string, we run in
|
| 4 |
+
# debug mode.
|
| 5 |
+
DEBUG = os.environ.get('DISTUTILS_DEBUG')
|
llava/lib/python3.10/distutils/dep_util.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.dep_util
|
| 2 |
+
|
| 3 |
+
Utility functions for simple, timestamp-based dependency of files
|
| 4 |
+
and groups of files; also, function based entirely on such
|
| 5 |
+
timestamp dependency analysis."""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from distutils.errors import DistutilsFileError
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def newer (source, target):
|
| 12 |
+
"""Return true if 'source' exists and is more recently modified than
|
| 13 |
+
'target', or if 'source' exists and 'target' doesn't. Return false if
|
| 14 |
+
both exist and 'target' is the same age or younger than 'source'.
|
| 15 |
+
Raise DistutilsFileError if 'source' does not exist.
|
| 16 |
+
"""
|
| 17 |
+
if not os.path.exists(source):
|
| 18 |
+
raise DistutilsFileError("file '%s' does not exist" %
|
| 19 |
+
os.path.abspath(source))
|
| 20 |
+
if not os.path.exists(target):
|
| 21 |
+
return 1
|
| 22 |
+
|
| 23 |
+
from stat import ST_MTIME
|
| 24 |
+
mtime1 = os.stat(source)[ST_MTIME]
|
| 25 |
+
mtime2 = os.stat(target)[ST_MTIME]
|
| 26 |
+
|
| 27 |
+
return mtime1 > mtime2
|
| 28 |
+
|
| 29 |
+
# newer ()
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def newer_pairwise (sources, targets):
|
| 33 |
+
"""Walk two filename lists in parallel, testing if each source is newer
|
| 34 |
+
than its corresponding target. Return a pair of lists (sources,
|
| 35 |
+
targets) where source is newer than target, according to the semantics
|
| 36 |
+
of 'newer()'.
|
| 37 |
+
"""
|
| 38 |
+
if len(sources) != len(targets):
|
| 39 |
+
raise ValueError("'sources' and 'targets' must be same length")
|
| 40 |
+
|
| 41 |
+
# build a pair of lists (sources, targets) where source is newer
|
| 42 |
+
n_sources = []
|
| 43 |
+
n_targets = []
|
| 44 |
+
for i in range(len(sources)):
|
| 45 |
+
if newer(sources[i], targets[i]):
|
| 46 |
+
n_sources.append(sources[i])
|
| 47 |
+
n_targets.append(targets[i])
|
| 48 |
+
|
| 49 |
+
return (n_sources, n_targets)
|
| 50 |
+
|
| 51 |
+
# newer_pairwise ()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def newer_group (sources, target, missing='error'):
|
| 55 |
+
"""Return true if 'target' is out-of-date with respect to any file
|
| 56 |
+
listed in 'sources'. In other words, if 'target' exists and is newer
|
| 57 |
+
than every file in 'sources', return false; otherwise return true.
|
| 58 |
+
'missing' controls what we do when a source file is missing; the
|
| 59 |
+
default ("error") is to blow up with an OSError from inside 'stat()';
|
| 60 |
+
if it is "ignore", we silently drop any missing source files; if it is
|
| 61 |
+
"newer", any missing source files make us assume that 'target' is
|
| 62 |
+
out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
|
| 63 |
+
carry out commands that wouldn't work because inputs are missing, but
|
| 64 |
+
that doesn't matter because you're not actually going to run the
|
| 65 |
+
commands).
|
| 66 |
+
"""
|
| 67 |
+
# If the target doesn't even exist, then it's definitely out-of-date.
|
| 68 |
+
if not os.path.exists(target):
|
| 69 |
+
return 1
|
| 70 |
+
|
| 71 |
+
# Otherwise we have to find out the hard way: if *any* source file
|
| 72 |
+
# is more recent than 'target', then 'target' is out-of-date and
|
| 73 |
+
# we can immediately return true. If we fall through to the end
|
| 74 |
+
# of the loop, then 'target' is up-to-date and we return false.
|
| 75 |
+
from stat import ST_MTIME
|
| 76 |
+
target_mtime = os.stat(target)[ST_MTIME]
|
| 77 |
+
for source in sources:
|
| 78 |
+
if not os.path.exists(source):
|
| 79 |
+
if missing == 'error': # blow up when we stat() the file
|
| 80 |
+
pass
|
| 81 |
+
elif missing == 'ignore': # missing source dropped from
|
| 82 |
+
continue # target's dependency list
|
| 83 |
+
elif missing == 'newer': # missing source means target is
|
| 84 |
+
return 1 # out-of-date
|
| 85 |
+
|
| 86 |
+
source_mtime = os.stat(source)[ST_MTIME]
|
| 87 |
+
if source_mtime > target_mtime:
|
| 88 |
+
return 1
|
| 89 |
+
else:
|
| 90 |
+
return 0
|
| 91 |
+
|
| 92 |
+
# newer_group ()
|
llava/lib/python3.10/distutils/dir_util.py
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.dir_util
|
| 2 |
+
|
| 3 |
+
Utility functions for manipulating directories and directory trees."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import errno
|
| 7 |
+
from distutils.errors import DistutilsFileError, DistutilsInternalError
|
| 8 |
+
from distutils import log
|
| 9 |
+
|
| 10 |
+
# cache for by mkpath() -- in addition to cheapening redundant calls,
|
| 11 |
+
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
|
| 12 |
+
_path_created = {}
|
| 13 |
+
|
| 14 |
+
# I don't use os.makedirs because a) it's new to Python 1.5.2, and
|
| 15 |
+
# b) it blows up if the directory already exists (I want to silently
|
| 16 |
+
# succeed in that case).
|
| 17 |
+
def mkpath(name, mode=0o777, verbose=1, dry_run=0):
|
| 18 |
+
"""Create a directory and any missing ancestor directories.
|
| 19 |
+
|
| 20 |
+
If the directory already exists (or if 'name' is the empty string, which
|
| 21 |
+
means the current directory, which of course exists), then do nothing.
|
| 22 |
+
Raise DistutilsFileError if unable to create some directory along the way
|
| 23 |
+
(eg. some sub-path exists, but is a file rather than a directory).
|
| 24 |
+
If 'verbose' is true, print a one-line summary of each mkdir to stdout.
|
| 25 |
+
Return the list of directories actually created.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
global _path_created
|
| 29 |
+
|
| 30 |
+
# Detect a common bug -- name is None
|
| 31 |
+
if not isinstance(name, str):
|
| 32 |
+
raise DistutilsInternalError(
|
| 33 |
+
"mkpath: 'name' must be a string (got %r)" % (name,))
|
| 34 |
+
|
| 35 |
+
# XXX what's the better way to handle verbosity? print as we create
|
| 36 |
+
# each directory in the path (the current behaviour), or only announce
|
| 37 |
+
# the creation of the whole path? (quite easy to do the latter since
|
| 38 |
+
# we're not using a recursive algorithm)
|
| 39 |
+
|
| 40 |
+
name = os.path.normpath(name)
|
| 41 |
+
created_dirs = []
|
| 42 |
+
if os.path.isdir(name) or name == '':
|
| 43 |
+
return created_dirs
|
| 44 |
+
if _path_created.get(os.path.abspath(name)):
|
| 45 |
+
return created_dirs
|
| 46 |
+
|
| 47 |
+
(head, tail) = os.path.split(name)
|
| 48 |
+
tails = [tail] # stack of lone dirs to create
|
| 49 |
+
|
| 50 |
+
while head and tail and not os.path.isdir(head):
|
| 51 |
+
(head, tail) = os.path.split(head)
|
| 52 |
+
tails.insert(0, tail) # push next higher dir onto stack
|
| 53 |
+
|
| 54 |
+
# now 'head' contains the deepest directory that already exists
|
| 55 |
+
# (that is, the child of 'head' in 'name' is the highest directory
|
| 56 |
+
# that does *not* exist)
|
| 57 |
+
for d in tails:
|
| 58 |
+
#print "head = %s, d = %s: " % (head, d),
|
| 59 |
+
head = os.path.join(head, d)
|
| 60 |
+
abs_head = os.path.abspath(head)
|
| 61 |
+
|
| 62 |
+
if _path_created.get(abs_head):
|
| 63 |
+
continue
|
| 64 |
+
|
| 65 |
+
if verbose >= 1:
|
| 66 |
+
log.info("creating %s", head)
|
| 67 |
+
|
| 68 |
+
if not dry_run:
|
| 69 |
+
try:
|
| 70 |
+
os.mkdir(head, mode)
|
| 71 |
+
except OSError as exc:
|
| 72 |
+
if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
|
| 73 |
+
raise DistutilsFileError(
|
| 74 |
+
"could not create '%s': %s" % (head, exc.args[-1]))
|
| 75 |
+
created_dirs.append(head)
|
| 76 |
+
|
| 77 |
+
_path_created[abs_head] = 1
|
| 78 |
+
return created_dirs
|
| 79 |
+
|
| 80 |
+
def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
|
| 81 |
+
"""Create all the empty directories under 'base_dir' needed to put 'files'
|
| 82 |
+
there.
|
| 83 |
+
|
| 84 |
+
'base_dir' is just the name of a directory which doesn't necessarily
|
| 85 |
+
exist yet; 'files' is a list of filenames to be interpreted relative to
|
| 86 |
+
'base_dir'. 'base_dir' + the directory portion of every file in 'files'
|
| 87 |
+
will be created if it doesn't already exist. 'mode', 'verbose' and
|
| 88 |
+
'dry_run' flags are as for 'mkpath()'.
|
| 89 |
+
"""
|
| 90 |
+
# First get the list of directories to create
|
| 91 |
+
need_dir = set()
|
| 92 |
+
for file in files:
|
| 93 |
+
need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
|
| 94 |
+
|
| 95 |
+
# Now create them
|
| 96 |
+
for dir in sorted(need_dir):
|
| 97 |
+
mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
|
| 98 |
+
|
| 99 |
+
def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
|
| 100 |
+
preserve_symlinks=0, update=0, verbose=1, dry_run=0):
|
| 101 |
+
"""Copy an entire directory tree 'src' to a new location 'dst'.
|
| 102 |
+
|
| 103 |
+
Both 'src' and 'dst' must be directory names. If 'src' is not a
|
| 104 |
+
directory, raise DistutilsFileError. If 'dst' does not exist, it is
|
| 105 |
+
created with 'mkpath()'. The end result of the copy is that every
|
| 106 |
+
file in 'src' is copied to 'dst', and directories under 'src' are
|
| 107 |
+
recursively copied to 'dst'. Return the list of files that were
|
| 108 |
+
copied or might have been copied, using their output name. The
|
| 109 |
+
return value is unaffected by 'update' or 'dry_run': it is simply
|
| 110 |
+
the list of all files under 'src', with the names changed to be
|
| 111 |
+
under 'dst'.
|
| 112 |
+
|
| 113 |
+
'preserve_mode' and 'preserve_times' are the same as for
|
| 114 |
+
'copy_file'; note that they only apply to regular files, not to
|
| 115 |
+
directories. If 'preserve_symlinks' is true, symlinks will be
|
| 116 |
+
copied as symlinks (on platforms that support them!); otherwise
|
| 117 |
+
(the default), the destination of the symlink will be copied.
|
| 118 |
+
'update' and 'verbose' are the same as for 'copy_file'.
|
| 119 |
+
"""
|
| 120 |
+
from distutils.file_util import copy_file
|
| 121 |
+
|
| 122 |
+
if not dry_run and not os.path.isdir(src):
|
| 123 |
+
raise DistutilsFileError(
|
| 124 |
+
"cannot copy tree '%s': not a directory" % src)
|
| 125 |
+
try:
|
| 126 |
+
names = os.listdir(src)
|
| 127 |
+
except OSError as e:
|
| 128 |
+
if dry_run:
|
| 129 |
+
names = []
|
| 130 |
+
else:
|
| 131 |
+
raise DistutilsFileError(
|
| 132 |
+
"error listing files in '%s': %s" % (src, e.strerror))
|
| 133 |
+
|
| 134 |
+
if not dry_run:
|
| 135 |
+
mkpath(dst, verbose=verbose)
|
| 136 |
+
|
| 137 |
+
outputs = []
|
| 138 |
+
|
| 139 |
+
for n in names:
|
| 140 |
+
src_name = os.path.join(src, n)
|
| 141 |
+
dst_name = os.path.join(dst, n)
|
| 142 |
+
|
| 143 |
+
if n.startswith('.nfs'):
|
| 144 |
+
# skip NFS rename files
|
| 145 |
+
continue
|
| 146 |
+
|
| 147 |
+
if preserve_symlinks and os.path.islink(src_name):
|
| 148 |
+
link_dest = os.readlink(src_name)
|
| 149 |
+
if verbose >= 1:
|
| 150 |
+
log.info("linking %s -> %s", dst_name, link_dest)
|
| 151 |
+
if not dry_run:
|
| 152 |
+
os.symlink(link_dest, dst_name)
|
| 153 |
+
outputs.append(dst_name)
|
| 154 |
+
|
| 155 |
+
elif os.path.isdir(src_name):
|
| 156 |
+
outputs.extend(
|
| 157 |
+
copy_tree(src_name, dst_name, preserve_mode,
|
| 158 |
+
preserve_times, preserve_symlinks, update,
|
| 159 |
+
verbose=verbose, dry_run=dry_run))
|
| 160 |
+
else:
|
| 161 |
+
copy_file(src_name, dst_name, preserve_mode,
|
| 162 |
+
preserve_times, update, verbose=verbose,
|
| 163 |
+
dry_run=dry_run)
|
| 164 |
+
outputs.append(dst_name)
|
| 165 |
+
|
| 166 |
+
return outputs
|
| 167 |
+
|
| 168 |
+
def _build_cmdtuple(path, cmdtuples):
|
| 169 |
+
"""Helper for remove_tree()."""
|
| 170 |
+
for f in os.listdir(path):
|
| 171 |
+
real_f = os.path.join(path,f)
|
| 172 |
+
if os.path.isdir(real_f) and not os.path.islink(real_f):
|
| 173 |
+
_build_cmdtuple(real_f, cmdtuples)
|
| 174 |
+
else:
|
| 175 |
+
cmdtuples.append((os.remove, real_f))
|
| 176 |
+
cmdtuples.append((os.rmdir, path))
|
| 177 |
+
|
| 178 |
+
def remove_tree(directory, verbose=1, dry_run=0):
|
| 179 |
+
"""Recursively remove an entire directory tree.
|
| 180 |
+
|
| 181 |
+
Any errors are ignored (apart from being reported to stdout if 'verbose'
|
| 182 |
+
is true).
|
| 183 |
+
"""
|
| 184 |
+
global _path_created
|
| 185 |
+
|
| 186 |
+
if verbose >= 1:
|
| 187 |
+
log.info("removing '%s' (and everything under it)", directory)
|
| 188 |
+
if dry_run:
|
| 189 |
+
return
|
| 190 |
+
cmdtuples = []
|
| 191 |
+
_build_cmdtuple(directory, cmdtuples)
|
| 192 |
+
for cmd in cmdtuples:
|
| 193 |
+
try:
|
| 194 |
+
cmd[0](cmd[1])
|
| 195 |
+
# remove dir from cache if it's already there
|
| 196 |
+
abspath = os.path.abspath(cmd[1])
|
| 197 |
+
if abspath in _path_created:
|
| 198 |
+
del _path_created[abspath]
|
| 199 |
+
except OSError as exc:
|
| 200 |
+
log.warn("error removing %s: %s", directory, exc)
|
| 201 |
+
|
| 202 |
+
def ensure_relative(path):
|
| 203 |
+
"""Take the full path 'path', and make it a relative path.
|
| 204 |
+
|
| 205 |
+
This is useful to make 'path' the second argument to os.path.join().
|
| 206 |
+
"""
|
| 207 |
+
drive, path = os.path.splitdrive(path)
|
| 208 |
+
if path[0:1] == os.sep:
|
| 209 |
+
path = drive + path[1:]
|
| 210 |
+
return path
|
llava/lib/python3.10/distutils/dist.py
ADDED
|
@@ -0,0 +1,1256 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.dist
|
| 2 |
+
|
| 3 |
+
Provides the Distribution class, which represents the module distribution
|
| 4 |
+
being built/installed/distributed.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
from email import message_from_file
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
import warnings
|
| 14 |
+
except ImportError:
|
| 15 |
+
warnings = None
|
| 16 |
+
|
| 17 |
+
from distutils.errors import *
|
| 18 |
+
from distutils.fancy_getopt import FancyGetopt, translate_longopt
|
| 19 |
+
from distutils.util import check_environ, strtobool, rfc822_escape
|
| 20 |
+
from distutils import log
|
| 21 |
+
from distutils.debug import DEBUG
|
| 22 |
+
|
| 23 |
+
# Regex to define acceptable Distutils command names. This is not *quite*
|
| 24 |
+
# the same as a Python NAME -- I don't allow leading underscores. The fact
|
| 25 |
+
# that they're very similar is no coincidence; the default naming scheme is
|
| 26 |
+
# to look for a Python module named after the command.
|
| 27 |
+
command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _ensure_list(value, fieldname):
|
| 31 |
+
if isinstance(value, str):
|
| 32 |
+
# a string containing comma separated values is okay. It will
|
| 33 |
+
# be converted to a list by Distribution.finalize_options().
|
| 34 |
+
pass
|
| 35 |
+
elif not isinstance(value, list):
|
| 36 |
+
# passing a tuple or an iterator perhaps, warn and convert
|
| 37 |
+
typename = type(value).__name__
|
| 38 |
+
msg = f"Warning: '{fieldname}' should be a list, got type '{typename}'"
|
| 39 |
+
log.log(log.WARN, msg)
|
| 40 |
+
value = list(value)
|
| 41 |
+
return value
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class Distribution:
|
| 45 |
+
"""The core of the Distutils. Most of the work hiding behind 'setup'
|
| 46 |
+
is really done within a Distribution instance, which farms the work out
|
| 47 |
+
to the Distutils commands specified on the command line.
|
| 48 |
+
|
| 49 |
+
Setup scripts will almost never instantiate Distribution directly,
|
| 50 |
+
unless the 'setup()' function is totally inadequate to their needs.
|
| 51 |
+
However, it is conceivable that a setup script might wish to subclass
|
| 52 |
+
Distribution for some specialized purpose, and then pass the subclass
|
| 53 |
+
to 'setup()' as the 'distclass' keyword argument. If so, it is
|
| 54 |
+
necessary to respect the expectations that 'setup' has of Distribution.
|
| 55 |
+
See the code for 'setup()', in core.py, for details.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
# 'global_options' describes the command-line options that may be
|
| 59 |
+
# supplied to the setup script prior to any actual commands.
|
| 60 |
+
# Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
|
| 61 |
+
# these global options. This list should be kept to a bare minimum,
|
| 62 |
+
# since every global option is also valid as a command option -- and we
|
| 63 |
+
# don't want to pollute the commands with too many options that they
|
| 64 |
+
# have minimal control over.
|
| 65 |
+
# The fourth entry for verbose means that it can be repeated.
|
| 66 |
+
global_options = [
|
| 67 |
+
('verbose', 'v', "run verbosely (default)", 1),
|
| 68 |
+
('quiet', 'q', "run quietly (turns verbosity off)"),
|
| 69 |
+
('dry-run', 'n', "don't actually do anything"),
|
| 70 |
+
('help', 'h', "show detailed help message"),
|
| 71 |
+
('no-user-cfg', None,
|
| 72 |
+
'ignore pydistutils.cfg in your home directory'),
|
| 73 |
+
]
|
| 74 |
+
|
| 75 |
+
# 'common_usage' is a short (2-3 line) string describing the common
|
| 76 |
+
# usage of the setup script.
|
| 77 |
+
common_usage = """\
|
| 78 |
+
Common commands: (see '--help-commands' for more)
|
| 79 |
+
|
| 80 |
+
setup.py build will build the package underneath 'build/'
|
| 81 |
+
setup.py install will install the package
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
# options that are not propagated to the commands
|
| 85 |
+
display_options = [
|
| 86 |
+
('help-commands', None,
|
| 87 |
+
"list all available commands"),
|
| 88 |
+
('name', None,
|
| 89 |
+
"print package name"),
|
| 90 |
+
('version', 'V',
|
| 91 |
+
"print package version"),
|
| 92 |
+
('fullname', None,
|
| 93 |
+
"print <package name>-<version>"),
|
| 94 |
+
('author', None,
|
| 95 |
+
"print the author's name"),
|
| 96 |
+
('author-email', None,
|
| 97 |
+
"print the author's email address"),
|
| 98 |
+
('maintainer', None,
|
| 99 |
+
"print the maintainer's name"),
|
| 100 |
+
('maintainer-email', None,
|
| 101 |
+
"print the maintainer's email address"),
|
| 102 |
+
('contact', None,
|
| 103 |
+
"print the maintainer's name if known, else the author's"),
|
| 104 |
+
('contact-email', None,
|
| 105 |
+
"print the maintainer's email address if known, else the author's"),
|
| 106 |
+
('url', None,
|
| 107 |
+
"print the URL for this package"),
|
| 108 |
+
('license', None,
|
| 109 |
+
"print the license of the package"),
|
| 110 |
+
('licence', None,
|
| 111 |
+
"alias for --license"),
|
| 112 |
+
('description', None,
|
| 113 |
+
"print the package description"),
|
| 114 |
+
('long-description', None,
|
| 115 |
+
"print the long package description"),
|
| 116 |
+
('platforms', None,
|
| 117 |
+
"print the list of platforms"),
|
| 118 |
+
('classifiers', None,
|
| 119 |
+
"print the list of classifiers"),
|
| 120 |
+
('keywords', None,
|
| 121 |
+
"print the list of keywords"),
|
| 122 |
+
('provides', None,
|
| 123 |
+
"print the list of packages/modules provided"),
|
| 124 |
+
('requires', None,
|
| 125 |
+
"print the list of packages/modules required"),
|
| 126 |
+
('obsoletes', None,
|
| 127 |
+
"print the list of packages/modules made obsolete")
|
| 128 |
+
]
|
| 129 |
+
display_option_names = [translate_longopt(x[0]) for x in display_options]
|
| 130 |
+
|
| 131 |
+
# negative options are options that exclude other options
|
| 132 |
+
negative_opt = {'quiet': 'verbose'}
|
| 133 |
+
|
| 134 |
+
# -- Creation/initialization methods -------------------------------
|
| 135 |
+
|
| 136 |
+
def __init__(self, attrs=None):
|
| 137 |
+
"""Construct a new Distribution instance: initialize all the
|
| 138 |
+
attributes of a Distribution, and then use 'attrs' (a dictionary
|
| 139 |
+
mapping attribute names to values) to assign some of those
|
| 140 |
+
attributes their "real" values. (Any attributes not mentioned in
|
| 141 |
+
'attrs' will be assigned to some null value: 0, None, an empty list
|
| 142 |
+
or dictionary, etc.) Most importantly, initialize the
|
| 143 |
+
'command_obj' attribute to the empty dictionary; this will be
|
| 144 |
+
filled in with real command objects by 'parse_command_line()'.
|
| 145 |
+
"""
|
| 146 |
+
|
| 147 |
+
# Default values for our command-line options
|
| 148 |
+
self.verbose = 1
|
| 149 |
+
self.dry_run = 0
|
| 150 |
+
self.help = 0
|
| 151 |
+
for attr in self.display_option_names:
|
| 152 |
+
setattr(self, attr, 0)
|
| 153 |
+
|
| 154 |
+
# Store the distribution meta-data (name, version, author, and so
|
| 155 |
+
# forth) in a separate object -- we're getting to have enough
|
| 156 |
+
# information here (and enough command-line options) that it's
|
| 157 |
+
# worth it. Also delegate 'get_XXX()' methods to the 'metadata'
|
| 158 |
+
# object in a sneaky and underhanded (but efficient!) way.
|
| 159 |
+
self.metadata = DistributionMetadata()
|
| 160 |
+
for basename in self.metadata._METHOD_BASENAMES:
|
| 161 |
+
method_name = "get_" + basename
|
| 162 |
+
setattr(self, method_name, getattr(self.metadata, method_name))
|
| 163 |
+
|
| 164 |
+
# 'cmdclass' maps command names to class objects, so we
|
| 165 |
+
# can 1) quickly figure out which class to instantiate when
|
| 166 |
+
# we need to create a new command object, and 2) have a way
|
| 167 |
+
# for the setup script to override command classes
|
| 168 |
+
self.cmdclass = {}
|
| 169 |
+
|
| 170 |
+
# 'command_packages' is a list of packages in which commands
|
| 171 |
+
# are searched for. The factory for command 'foo' is expected
|
| 172 |
+
# to be named 'foo' in the module 'foo' in one of the packages
|
| 173 |
+
# named here. This list is searched from the left; an error
|
| 174 |
+
# is raised if no named package provides the command being
|
| 175 |
+
# searched for. (Always access using get_command_packages().)
|
| 176 |
+
self.command_packages = None
|
| 177 |
+
|
| 178 |
+
# 'script_name' and 'script_args' are usually set to sys.argv[0]
|
| 179 |
+
# and sys.argv[1:], but they can be overridden when the caller is
|
| 180 |
+
# not necessarily a setup script run from the command-line.
|
| 181 |
+
self.script_name = None
|
| 182 |
+
self.script_args = None
|
| 183 |
+
|
| 184 |
+
# 'command_options' is where we store command options between
|
| 185 |
+
# parsing them (from config files, the command-line, etc.) and when
|
| 186 |
+
# they are actually needed -- ie. when the command in question is
|
| 187 |
+
# instantiated. It is a dictionary of dictionaries of 2-tuples:
|
| 188 |
+
# command_options = { command_name : { option : (source, value) } }
|
| 189 |
+
self.command_options = {}
|
| 190 |
+
|
| 191 |
+
# 'dist_files' is the list of (command, pyversion, file) that
|
| 192 |
+
# have been created by any dist commands run so far. This is
|
| 193 |
+
# filled regardless of whether the run is dry or not. pyversion
|
| 194 |
+
# gives sysconfig.get_python_version() if the dist file is
|
| 195 |
+
# specific to a Python version, 'any' if it is good for all
|
| 196 |
+
# Python versions on the target platform, and '' for a source
|
| 197 |
+
# file. pyversion should not be used to specify minimum or
|
| 198 |
+
# maximum required Python versions; use the metainfo for that
|
| 199 |
+
# instead.
|
| 200 |
+
self.dist_files = []
|
| 201 |
+
|
| 202 |
+
# These options are really the business of various commands, rather
|
| 203 |
+
# than of the Distribution itself. We provide aliases for them in
|
| 204 |
+
# Distribution as a convenience to the developer.
|
| 205 |
+
self.packages = None
|
| 206 |
+
self.package_data = {}
|
| 207 |
+
self.package_dir = None
|
| 208 |
+
self.py_modules = None
|
| 209 |
+
self.libraries = None
|
| 210 |
+
self.headers = None
|
| 211 |
+
self.ext_modules = None
|
| 212 |
+
self.ext_package = None
|
| 213 |
+
self.include_dirs = None
|
| 214 |
+
self.extra_path = None
|
| 215 |
+
self.scripts = None
|
| 216 |
+
self.data_files = None
|
| 217 |
+
self.password = ''
|
| 218 |
+
|
| 219 |
+
# And now initialize bookkeeping stuff that can't be supplied by
|
| 220 |
+
# the caller at all. 'command_obj' maps command names to
|
| 221 |
+
# Command instances -- that's how we enforce that every command
|
| 222 |
+
# class is a singleton.
|
| 223 |
+
self.command_obj = {}
|
| 224 |
+
|
| 225 |
+
# 'have_run' maps command names to boolean values; it keeps track
|
| 226 |
+
# of whether we have actually run a particular command, to make it
|
| 227 |
+
# cheap to "run" a command whenever we think we might need to -- if
|
| 228 |
+
# it's already been done, no need for expensive filesystem
|
| 229 |
+
# operations, we just check the 'have_run' dictionary and carry on.
|
| 230 |
+
# It's only safe to query 'have_run' for a command class that has
|
| 231 |
+
# been instantiated -- a false value will be inserted when the
|
| 232 |
+
# command object is created, and replaced with a true value when
|
| 233 |
+
# the command is successfully run. Thus it's probably best to use
|
| 234 |
+
# '.get()' rather than a straight lookup.
|
| 235 |
+
self.have_run = {}
|
| 236 |
+
|
| 237 |
+
# Now we'll use the attrs dictionary (ultimately, keyword args from
|
| 238 |
+
# the setup script) to possibly override any or all of these
|
| 239 |
+
# distribution options.
|
| 240 |
+
|
| 241 |
+
if attrs:
|
| 242 |
+
# Pull out the set of command options and work on them
|
| 243 |
+
# specifically. Note that this order guarantees that aliased
|
| 244 |
+
# command options will override any supplied redundantly
|
| 245 |
+
# through the general options dictionary.
|
| 246 |
+
options = attrs.get('options')
|
| 247 |
+
if options is not None:
|
| 248 |
+
del attrs['options']
|
| 249 |
+
for (command, cmd_options) in options.items():
|
| 250 |
+
opt_dict = self.get_option_dict(command)
|
| 251 |
+
for (opt, val) in cmd_options.items():
|
| 252 |
+
opt_dict[opt] = ("setup script", val)
|
| 253 |
+
|
| 254 |
+
if 'licence' in attrs:
|
| 255 |
+
attrs['license'] = attrs['licence']
|
| 256 |
+
del attrs['licence']
|
| 257 |
+
msg = "'licence' distribution option is deprecated; use 'license'"
|
| 258 |
+
if warnings is not None:
|
| 259 |
+
warnings.warn(msg)
|
| 260 |
+
else:
|
| 261 |
+
sys.stderr.write(msg + "\n")
|
| 262 |
+
|
| 263 |
+
# Now work on the rest of the attributes. Any attribute that's
|
| 264 |
+
# not already defined is invalid!
|
| 265 |
+
for (key, val) in attrs.items():
|
| 266 |
+
if hasattr(self.metadata, "set_" + key):
|
| 267 |
+
getattr(self.metadata, "set_" + key)(val)
|
| 268 |
+
elif hasattr(self.metadata, key):
|
| 269 |
+
setattr(self.metadata, key, val)
|
| 270 |
+
elif hasattr(self, key):
|
| 271 |
+
setattr(self, key, val)
|
| 272 |
+
else:
|
| 273 |
+
msg = "Unknown distribution option: %s" % repr(key)
|
| 274 |
+
warnings.warn(msg)
|
| 275 |
+
|
| 276 |
+
# no-user-cfg is handled before other command line args
|
| 277 |
+
# because other args override the config files, and this
|
| 278 |
+
# one is needed before we can load the config files.
|
| 279 |
+
# If attrs['script_args'] wasn't passed, assume false.
|
| 280 |
+
#
|
| 281 |
+
# This also make sure we just look at the global options
|
| 282 |
+
self.want_user_cfg = True
|
| 283 |
+
|
| 284 |
+
if self.script_args is not None:
|
| 285 |
+
for arg in self.script_args:
|
| 286 |
+
if not arg.startswith('-'):
|
| 287 |
+
break
|
| 288 |
+
if arg == '--no-user-cfg':
|
| 289 |
+
self.want_user_cfg = False
|
| 290 |
+
break
|
| 291 |
+
|
| 292 |
+
self.finalize_options()
|
| 293 |
+
|
| 294 |
+
def get_option_dict(self, command):
|
| 295 |
+
"""Get the option dictionary for a given command. If that
|
| 296 |
+
command's option dictionary hasn't been created yet, then create it
|
| 297 |
+
and return the new dictionary; otherwise, return the existing
|
| 298 |
+
option dictionary.
|
| 299 |
+
"""
|
| 300 |
+
dict = self.command_options.get(command)
|
| 301 |
+
if dict is None:
|
| 302 |
+
dict = self.command_options[command] = {}
|
| 303 |
+
return dict
|
| 304 |
+
|
| 305 |
+
def dump_option_dicts(self, header=None, commands=None, indent=""):
|
| 306 |
+
from pprint import pformat
|
| 307 |
+
|
| 308 |
+
if commands is None: # dump all command option dicts
|
| 309 |
+
commands = sorted(self.command_options.keys())
|
| 310 |
+
|
| 311 |
+
if header is not None:
|
| 312 |
+
self.announce(indent + header)
|
| 313 |
+
indent = indent + " "
|
| 314 |
+
|
| 315 |
+
if not commands:
|
| 316 |
+
self.announce(indent + "no commands known yet")
|
| 317 |
+
return
|
| 318 |
+
|
| 319 |
+
for cmd_name in commands:
|
| 320 |
+
opt_dict = self.command_options.get(cmd_name)
|
| 321 |
+
if opt_dict is None:
|
| 322 |
+
self.announce(indent +
|
| 323 |
+
"no option dict for '%s' command" % cmd_name)
|
| 324 |
+
else:
|
| 325 |
+
self.announce(indent +
|
| 326 |
+
"option dict for '%s' command:" % cmd_name)
|
| 327 |
+
out = pformat(opt_dict)
|
| 328 |
+
for line in out.split('\n'):
|
| 329 |
+
self.announce(indent + " " + line)
|
| 330 |
+
|
| 331 |
+
# -- Config file finding/parsing methods ---------------------------
|
| 332 |
+
|
| 333 |
+
def find_config_files(self):
|
| 334 |
+
"""Find as many configuration files as should be processed for this
|
| 335 |
+
platform, and return a list of filenames in the order in which they
|
| 336 |
+
should be parsed. The filenames returned are guaranteed to exist
|
| 337 |
+
(modulo nasty race conditions).
|
| 338 |
+
|
| 339 |
+
There are three possible config files: distutils.cfg in the
|
| 340 |
+
Distutils installation directory (ie. where the top-level
|
| 341 |
+
Distutils __inst__.py file lives), a file in the user's home
|
| 342 |
+
directory named .pydistutils.cfg on Unix and pydistutils.cfg
|
| 343 |
+
on Windows/Mac; and setup.cfg in the current directory.
|
| 344 |
+
|
| 345 |
+
The file in the user's home directory can be disabled with the
|
| 346 |
+
--no-user-cfg option.
|
| 347 |
+
"""
|
| 348 |
+
files = []
|
| 349 |
+
check_environ()
|
| 350 |
+
|
| 351 |
+
# Where to look for the system-wide Distutils config file
|
| 352 |
+
sys_dir = os.path.dirname(sys.modules['distutils'].__file__)
|
| 353 |
+
|
| 354 |
+
# Look for the system config file
|
| 355 |
+
sys_file = os.path.join(sys_dir, "distutils.cfg")
|
| 356 |
+
if os.path.isfile(sys_file):
|
| 357 |
+
files.append(sys_file)
|
| 358 |
+
|
| 359 |
+
# What to call the per-user config file
|
| 360 |
+
if os.name == 'posix':
|
| 361 |
+
user_filename = ".pydistutils.cfg"
|
| 362 |
+
else:
|
| 363 |
+
user_filename = "pydistutils.cfg"
|
| 364 |
+
|
| 365 |
+
# And look for the user config file
|
| 366 |
+
if self.want_user_cfg:
|
| 367 |
+
user_file = os.path.join(os.path.expanduser('~'), user_filename)
|
| 368 |
+
if os.path.isfile(user_file):
|
| 369 |
+
files.append(user_file)
|
| 370 |
+
|
| 371 |
+
# All platforms support local setup.cfg
|
| 372 |
+
local_file = "setup.cfg"
|
| 373 |
+
if os.path.isfile(local_file):
|
| 374 |
+
files.append(local_file)
|
| 375 |
+
|
| 376 |
+
if DEBUG:
|
| 377 |
+
self.announce("using config files: %s" % ', '.join(files))
|
| 378 |
+
|
| 379 |
+
return files
|
| 380 |
+
|
| 381 |
+
def parse_config_files(self, filenames=None):
|
| 382 |
+
from configparser import ConfigParser
|
| 383 |
+
|
| 384 |
+
# Ignore install directory options if we have a venv
|
| 385 |
+
if sys.prefix != sys.base_prefix:
|
| 386 |
+
ignore_options = [
|
| 387 |
+
'install-base', 'install-platbase', 'install-lib',
|
| 388 |
+
'install-platlib', 'install-purelib', 'install-headers',
|
| 389 |
+
'install-scripts', 'install-data', 'prefix', 'exec-prefix',
|
| 390 |
+
'home', 'user', 'root']
|
| 391 |
+
else:
|
| 392 |
+
ignore_options = []
|
| 393 |
+
|
| 394 |
+
ignore_options = frozenset(ignore_options)
|
| 395 |
+
|
| 396 |
+
if filenames is None:
|
| 397 |
+
filenames = self.find_config_files()
|
| 398 |
+
|
| 399 |
+
if DEBUG:
|
| 400 |
+
self.announce("Distribution.parse_config_files():")
|
| 401 |
+
|
| 402 |
+
parser = ConfigParser()
|
| 403 |
+
for filename in filenames:
|
| 404 |
+
if DEBUG:
|
| 405 |
+
self.announce(" reading %s" % filename)
|
| 406 |
+
parser.read(filename)
|
| 407 |
+
for section in parser.sections():
|
| 408 |
+
options = parser.options(section)
|
| 409 |
+
opt_dict = self.get_option_dict(section)
|
| 410 |
+
|
| 411 |
+
for opt in options:
|
| 412 |
+
if opt != '__name__' and opt not in ignore_options:
|
| 413 |
+
val = parser.get(section,opt)
|
| 414 |
+
opt = opt.replace('-', '_')
|
| 415 |
+
opt_dict[opt] = (filename, val)
|
| 416 |
+
|
| 417 |
+
# Make the ConfigParser forget everything (so we retain
|
| 418 |
+
# the original filenames that options come from)
|
| 419 |
+
parser.__init__()
|
| 420 |
+
|
| 421 |
+
# If there was a "global" section in the config file, use it
|
| 422 |
+
# to set Distribution options.
|
| 423 |
+
|
| 424 |
+
if 'global' in self.command_options:
|
| 425 |
+
for (opt, (src, val)) in self.command_options['global'].items():
|
| 426 |
+
alias = self.negative_opt.get(opt)
|
| 427 |
+
try:
|
| 428 |
+
if alias:
|
| 429 |
+
setattr(self, alias, not strtobool(val))
|
| 430 |
+
elif opt in ('verbose', 'dry_run'): # ugh!
|
| 431 |
+
setattr(self, opt, strtobool(val))
|
| 432 |
+
else:
|
| 433 |
+
setattr(self, opt, val)
|
| 434 |
+
except ValueError as msg:
|
| 435 |
+
raise DistutilsOptionError(msg)
|
| 436 |
+
|
| 437 |
+
# -- Command-line parsing methods ----------------------------------
|
| 438 |
+
|
| 439 |
+
def parse_command_line(self):
|
| 440 |
+
"""Parse the setup script's command line, taken from the
|
| 441 |
+
'script_args' instance attribute (which defaults to 'sys.argv[1:]'
|
| 442 |
+
-- see 'setup()' in core.py). This list is first processed for
|
| 443 |
+
"global options" -- options that set attributes of the Distribution
|
| 444 |
+
instance. Then, it is alternately scanned for Distutils commands
|
| 445 |
+
and options for that command. Each new command terminates the
|
| 446 |
+
options for the previous command. The allowed options for a
|
| 447 |
+
command are determined by the 'user_options' attribute of the
|
| 448 |
+
command class -- thus, we have to be able to load command classes
|
| 449 |
+
in order to parse the command line. Any error in that 'options'
|
| 450 |
+
attribute raises DistutilsGetoptError; any error on the
|
| 451 |
+
command-line raises DistutilsArgError. If no Distutils commands
|
| 452 |
+
were found on the command line, raises DistutilsArgError. Return
|
| 453 |
+
true if command-line was successfully parsed and we should carry
|
| 454 |
+
on with executing commands; false if no errors but we shouldn't
|
| 455 |
+
execute commands (currently, this only happens if user asks for
|
| 456 |
+
help).
|
| 457 |
+
"""
|
| 458 |
+
#
|
| 459 |
+
# We now have enough information to show the Macintosh dialog
|
| 460 |
+
# that allows the user to interactively specify the "command line".
|
| 461 |
+
#
|
| 462 |
+
toplevel_options = self._get_toplevel_options()
|
| 463 |
+
|
| 464 |
+
# We have to parse the command line a bit at a time -- global
|
| 465 |
+
# options, then the first command, then its options, and so on --
|
| 466 |
+
# because each command will be handled by a different class, and
|
| 467 |
+
# the options that are valid for a particular class aren't known
|
| 468 |
+
# until we have loaded the command class, which doesn't happen
|
| 469 |
+
# until we know what the command is.
|
| 470 |
+
|
| 471 |
+
self.commands = []
|
| 472 |
+
parser = FancyGetopt(toplevel_options + self.display_options)
|
| 473 |
+
parser.set_negative_aliases(self.negative_opt)
|
| 474 |
+
parser.set_aliases({'licence': 'license'})
|
| 475 |
+
args = parser.getopt(args=self.script_args, object=self)
|
| 476 |
+
option_order = parser.get_option_order()
|
| 477 |
+
log.set_verbosity(self.verbose)
|
| 478 |
+
|
| 479 |
+
# for display options we return immediately
|
| 480 |
+
if self.handle_display_options(option_order):
|
| 481 |
+
return
|
| 482 |
+
while args:
|
| 483 |
+
args = self._parse_command_opts(parser, args)
|
| 484 |
+
if args is None: # user asked for help (and got it)
|
| 485 |
+
return
|
| 486 |
+
|
| 487 |
+
# Handle the cases of --help as a "global" option, ie.
|
| 488 |
+
# "setup.py --help" and "setup.py --help command ...". For the
|
| 489 |
+
# former, we show global options (--verbose, --dry-run, etc.)
|
| 490 |
+
# and display-only options (--name, --version, etc.); for the
|
| 491 |
+
# latter, we omit the display-only options and show help for
|
| 492 |
+
# each command listed on the command line.
|
| 493 |
+
if self.help:
|
| 494 |
+
self._show_help(parser,
|
| 495 |
+
display_options=len(self.commands) == 0,
|
| 496 |
+
commands=self.commands)
|
| 497 |
+
return
|
| 498 |
+
|
| 499 |
+
# Oops, no commands found -- an end-user error
|
| 500 |
+
if not self.commands:
|
| 501 |
+
raise DistutilsArgError("no commands supplied")
|
| 502 |
+
|
| 503 |
+
# All is well: return true
|
| 504 |
+
return True
|
| 505 |
+
|
| 506 |
+
def _get_toplevel_options(self):
|
| 507 |
+
"""Return the non-display options recognized at the top level.
|
| 508 |
+
|
| 509 |
+
This includes options that are recognized *only* at the top
|
| 510 |
+
level as well as options recognized for commands.
|
| 511 |
+
"""
|
| 512 |
+
return self.global_options + [
|
| 513 |
+
("command-packages=", None,
|
| 514 |
+
"list of packages that provide distutils commands"),
|
| 515 |
+
]
|
| 516 |
+
|
| 517 |
+
def _parse_command_opts(self, parser, args):
|
| 518 |
+
"""Parse the command-line options for a single command.
|
| 519 |
+
'parser' must be a FancyGetopt instance; 'args' must be the list
|
| 520 |
+
of arguments, starting with the current command (whose options
|
| 521 |
+
we are about to parse). Returns a new version of 'args' with
|
| 522 |
+
the next command at the front of the list; will be the empty
|
| 523 |
+
list if there are no more commands on the command line. Returns
|
| 524 |
+
None if the user asked for help on this command.
|
| 525 |
+
"""
|
| 526 |
+
# late import because of mutual dependence between these modules
|
| 527 |
+
from distutils.cmd import Command
|
| 528 |
+
|
| 529 |
+
# Pull the current command from the head of the command line
|
| 530 |
+
command = args[0]
|
| 531 |
+
if not command_re.match(command):
|
| 532 |
+
raise SystemExit("invalid command name '%s'" % command)
|
| 533 |
+
self.commands.append(command)
|
| 534 |
+
|
| 535 |
+
# Dig up the command class that implements this command, so we
|
| 536 |
+
# 1) know that it's a valid command, and 2) know which options
|
| 537 |
+
# it takes.
|
| 538 |
+
try:
|
| 539 |
+
cmd_class = self.get_command_class(command)
|
| 540 |
+
except DistutilsModuleError as msg:
|
| 541 |
+
raise DistutilsArgError(msg)
|
| 542 |
+
|
| 543 |
+
# Require that the command class be derived from Command -- want
|
| 544 |
+
# to be sure that the basic "command" interface is implemented.
|
| 545 |
+
if not issubclass(cmd_class, Command):
|
| 546 |
+
raise DistutilsClassError(
|
| 547 |
+
"command class %s must subclass Command" % cmd_class)
|
| 548 |
+
|
| 549 |
+
# Also make sure that the command object provides a list of its
|
| 550 |
+
# known options.
|
| 551 |
+
if not (hasattr(cmd_class, 'user_options') and
|
| 552 |
+
isinstance(cmd_class.user_options, list)):
|
| 553 |
+
msg = ("command class %s must provide "
|
| 554 |
+
"'user_options' attribute (a list of tuples)")
|
| 555 |
+
raise DistutilsClassError(msg % cmd_class)
|
| 556 |
+
|
| 557 |
+
# If the command class has a list of negative alias options,
|
| 558 |
+
# merge it in with the global negative aliases.
|
| 559 |
+
negative_opt = self.negative_opt
|
| 560 |
+
if hasattr(cmd_class, 'negative_opt'):
|
| 561 |
+
negative_opt = negative_opt.copy()
|
| 562 |
+
negative_opt.update(cmd_class.negative_opt)
|
| 563 |
+
|
| 564 |
+
# Check for help_options in command class. They have a different
|
| 565 |
+
# format (tuple of four) so we need to preprocess them here.
|
| 566 |
+
if (hasattr(cmd_class, 'help_options') and
|
| 567 |
+
isinstance(cmd_class.help_options, list)):
|
| 568 |
+
help_options = fix_help_options(cmd_class.help_options)
|
| 569 |
+
else:
|
| 570 |
+
help_options = []
|
| 571 |
+
|
| 572 |
+
# All commands support the global options too, just by adding
|
| 573 |
+
# in 'global_options'.
|
| 574 |
+
parser.set_option_table(self.global_options +
|
| 575 |
+
cmd_class.user_options +
|
| 576 |
+
help_options)
|
| 577 |
+
parser.set_negative_aliases(negative_opt)
|
| 578 |
+
(args, opts) = parser.getopt(args[1:])
|
| 579 |
+
if hasattr(opts, 'help') and opts.help:
|
| 580 |
+
self._show_help(parser, display_options=0, commands=[cmd_class])
|
| 581 |
+
return
|
| 582 |
+
|
| 583 |
+
if (hasattr(cmd_class, 'help_options') and
|
| 584 |
+
isinstance(cmd_class.help_options, list)):
|
| 585 |
+
help_option_found=0
|
| 586 |
+
for (help_option, short, desc, func) in cmd_class.help_options:
|
| 587 |
+
if hasattr(opts, parser.get_attr_name(help_option)):
|
| 588 |
+
help_option_found=1
|
| 589 |
+
if callable(func):
|
| 590 |
+
func()
|
| 591 |
+
else:
|
| 592 |
+
raise DistutilsClassError(
|
| 593 |
+
"invalid help function %r for help option '%s': "
|
| 594 |
+
"must be a callable object (function, etc.)"
|
| 595 |
+
% (func, help_option))
|
| 596 |
+
|
| 597 |
+
if help_option_found:
|
| 598 |
+
return
|
| 599 |
+
|
| 600 |
+
# Put the options from the command-line into their official
|
| 601 |
+
# holding pen, the 'command_options' dictionary.
|
| 602 |
+
opt_dict = self.get_option_dict(command)
|
| 603 |
+
for (name, value) in vars(opts).items():
|
| 604 |
+
opt_dict[name] = ("command line", value)
|
| 605 |
+
|
| 606 |
+
return args
|
| 607 |
+
|
| 608 |
+
def finalize_options(self):
|
| 609 |
+
"""Set final values for all the options on the Distribution
|
| 610 |
+
instance, analogous to the .finalize_options() method of Command
|
| 611 |
+
objects.
|
| 612 |
+
"""
|
| 613 |
+
for attr in ('keywords', 'platforms'):
|
| 614 |
+
value = getattr(self.metadata, attr)
|
| 615 |
+
if value is None:
|
| 616 |
+
continue
|
| 617 |
+
if isinstance(value, str):
|
| 618 |
+
value = [elm.strip() for elm in value.split(',')]
|
| 619 |
+
setattr(self.metadata, attr, value)
|
| 620 |
+
|
| 621 |
+
def _show_help(self, parser, global_options=1, display_options=1,
|
| 622 |
+
commands=[]):
|
| 623 |
+
"""Show help for the setup script command-line in the form of
|
| 624 |
+
several lists of command-line options. 'parser' should be a
|
| 625 |
+
FancyGetopt instance; do not expect it to be returned in the
|
| 626 |
+
same state, as its option table will be reset to make it
|
| 627 |
+
generate the correct help text.
|
| 628 |
+
|
| 629 |
+
If 'global_options' is true, lists the global options:
|
| 630 |
+
--verbose, --dry-run, etc. If 'display_options' is true, lists
|
| 631 |
+
the "display-only" options: --name, --version, etc. Finally,
|
| 632 |
+
lists per-command help for every command name or command class
|
| 633 |
+
in 'commands'.
|
| 634 |
+
"""
|
| 635 |
+
# late import because of mutual dependence between these modules
|
| 636 |
+
from distutils.core import gen_usage
|
| 637 |
+
from distutils.cmd import Command
|
| 638 |
+
|
| 639 |
+
if global_options:
|
| 640 |
+
if display_options:
|
| 641 |
+
options = self._get_toplevel_options()
|
| 642 |
+
else:
|
| 643 |
+
options = self.global_options
|
| 644 |
+
parser.set_option_table(options)
|
| 645 |
+
parser.print_help(self.common_usage + "\nGlobal options:")
|
| 646 |
+
print('')
|
| 647 |
+
|
| 648 |
+
if display_options:
|
| 649 |
+
parser.set_option_table(self.display_options)
|
| 650 |
+
parser.print_help(
|
| 651 |
+
"Information display options (just display " +
|
| 652 |
+
"information, ignore any commands)")
|
| 653 |
+
print('')
|
| 654 |
+
|
| 655 |
+
for command in self.commands:
|
| 656 |
+
if isinstance(command, type) and issubclass(command, Command):
|
| 657 |
+
klass = command
|
| 658 |
+
else:
|
| 659 |
+
klass = self.get_command_class(command)
|
| 660 |
+
if (hasattr(klass, 'help_options') and
|
| 661 |
+
isinstance(klass.help_options, list)):
|
| 662 |
+
parser.set_option_table(klass.user_options +
|
| 663 |
+
fix_help_options(klass.help_options))
|
| 664 |
+
else:
|
| 665 |
+
parser.set_option_table(klass.user_options)
|
| 666 |
+
parser.print_help("Options for '%s' command:" % klass.__name__)
|
| 667 |
+
print('')
|
| 668 |
+
|
| 669 |
+
print(gen_usage(self.script_name))
|
| 670 |
+
|
| 671 |
+
def handle_display_options(self, option_order):
|
| 672 |
+
"""If there were any non-global "display-only" options
|
| 673 |
+
(--help-commands or the metadata display options) on the command
|
| 674 |
+
line, display the requested info and return true; else return
|
| 675 |
+
false.
|
| 676 |
+
"""
|
| 677 |
+
from distutils.core import gen_usage
|
| 678 |
+
|
| 679 |
+
# User just wants a list of commands -- we'll print it out and stop
|
| 680 |
+
# processing now (ie. if they ran "setup --help-commands foo bar",
|
| 681 |
+
# we ignore "foo bar").
|
| 682 |
+
if self.help_commands:
|
| 683 |
+
self.print_commands()
|
| 684 |
+
print('')
|
| 685 |
+
print(gen_usage(self.script_name))
|
| 686 |
+
return 1
|
| 687 |
+
|
| 688 |
+
# If user supplied any of the "display metadata" options, then
|
| 689 |
+
# display that metadata in the order in which the user supplied the
|
| 690 |
+
# metadata options.
|
| 691 |
+
any_display_options = 0
|
| 692 |
+
is_display_option = {}
|
| 693 |
+
for option in self.display_options:
|
| 694 |
+
is_display_option[option[0]] = 1
|
| 695 |
+
|
| 696 |
+
for (opt, val) in option_order:
|
| 697 |
+
if val and is_display_option.get(opt):
|
| 698 |
+
opt = translate_longopt(opt)
|
| 699 |
+
value = getattr(self.metadata, "get_"+opt)()
|
| 700 |
+
if opt in ['keywords', 'platforms']:
|
| 701 |
+
print(','.join(value))
|
| 702 |
+
elif opt in ('classifiers', 'provides', 'requires',
|
| 703 |
+
'obsoletes'):
|
| 704 |
+
print('\n'.join(value))
|
| 705 |
+
else:
|
| 706 |
+
print(value)
|
| 707 |
+
any_display_options = 1
|
| 708 |
+
|
| 709 |
+
return any_display_options
|
| 710 |
+
|
| 711 |
+
def print_command_list(self, commands, header, max_length):
|
| 712 |
+
"""Print a subset of the list of all commands -- used by
|
| 713 |
+
'print_commands()'.
|
| 714 |
+
"""
|
| 715 |
+
print(header + ":")
|
| 716 |
+
|
| 717 |
+
for cmd in commands:
|
| 718 |
+
klass = self.cmdclass.get(cmd)
|
| 719 |
+
if not klass:
|
| 720 |
+
klass = self.get_command_class(cmd)
|
| 721 |
+
try:
|
| 722 |
+
description = klass.description
|
| 723 |
+
except AttributeError:
|
| 724 |
+
description = "(no description available)"
|
| 725 |
+
|
| 726 |
+
print(" %-*s %s" % (max_length, cmd, description))
|
| 727 |
+
|
| 728 |
+
def print_commands(self):
|
| 729 |
+
"""Print out a help message listing all available commands with a
|
| 730 |
+
description of each. The list is divided into "standard commands"
|
| 731 |
+
(listed in distutils.command.__all__) and "extra commands"
|
| 732 |
+
(mentioned in self.cmdclass, but not a standard command). The
|
| 733 |
+
descriptions come from the command class attribute
|
| 734 |
+
'description'.
|
| 735 |
+
"""
|
| 736 |
+
import distutils.command
|
| 737 |
+
std_commands = distutils.command.__all__
|
| 738 |
+
is_std = {}
|
| 739 |
+
for cmd in std_commands:
|
| 740 |
+
is_std[cmd] = 1
|
| 741 |
+
|
| 742 |
+
extra_commands = []
|
| 743 |
+
for cmd in self.cmdclass.keys():
|
| 744 |
+
if not is_std.get(cmd):
|
| 745 |
+
extra_commands.append(cmd)
|
| 746 |
+
|
| 747 |
+
max_length = 0
|
| 748 |
+
for cmd in (std_commands + extra_commands):
|
| 749 |
+
if len(cmd) > max_length:
|
| 750 |
+
max_length = len(cmd)
|
| 751 |
+
|
| 752 |
+
self.print_command_list(std_commands,
|
| 753 |
+
"Standard commands",
|
| 754 |
+
max_length)
|
| 755 |
+
if extra_commands:
|
| 756 |
+
print()
|
| 757 |
+
self.print_command_list(extra_commands,
|
| 758 |
+
"Extra commands",
|
| 759 |
+
max_length)
|
| 760 |
+
|
| 761 |
+
def get_command_list(self):
|
| 762 |
+
"""Get a list of (command, description) tuples.
|
| 763 |
+
The list is divided into "standard commands" (listed in
|
| 764 |
+
distutils.command.__all__) and "extra commands" (mentioned in
|
| 765 |
+
self.cmdclass, but not a standard command). The descriptions come
|
| 766 |
+
from the command class attribute 'description'.
|
| 767 |
+
"""
|
| 768 |
+
# Currently this is only used on Mac OS, for the Mac-only GUI
|
| 769 |
+
# Distutils interface (by Jack Jansen)
|
| 770 |
+
import distutils.command
|
| 771 |
+
std_commands = distutils.command.__all__
|
| 772 |
+
is_std = {}
|
| 773 |
+
for cmd in std_commands:
|
| 774 |
+
is_std[cmd] = 1
|
| 775 |
+
|
| 776 |
+
extra_commands = []
|
| 777 |
+
for cmd in self.cmdclass.keys():
|
| 778 |
+
if not is_std.get(cmd):
|
| 779 |
+
extra_commands.append(cmd)
|
| 780 |
+
|
| 781 |
+
rv = []
|
| 782 |
+
for cmd in (std_commands + extra_commands):
|
| 783 |
+
klass = self.cmdclass.get(cmd)
|
| 784 |
+
if not klass:
|
| 785 |
+
klass = self.get_command_class(cmd)
|
| 786 |
+
try:
|
| 787 |
+
description = klass.description
|
| 788 |
+
except AttributeError:
|
| 789 |
+
description = "(no description available)"
|
| 790 |
+
rv.append((cmd, description))
|
| 791 |
+
return rv
|
| 792 |
+
|
| 793 |
+
# -- Command class/object methods ----------------------------------
|
| 794 |
+
|
| 795 |
+
def get_command_packages(self):
|
| 796 |
+
"""Return a list of packages from which commands are loaded."""
|
| 797 |
+
pkgs = self.command_packages
|
| 798 |
+
if not isinstance(pkgs, list):
|
| 799 |
+
if pkgs is None:
|
| 800 |
+
pkgs = ''
|
| 801 |
+
pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
|
| 802 |
+
if "distutils.command" not in pkgs:
|
| 803 |
+
pkgs.insert(0, "distutils.command")
|
| 804 |
+
self.command_packages = pkgs
|
| 805 |
+
return pkgs
|
| 806 |
+
|
| 807 |
+
def get_command_class(self, command):
|
| 808 |
+
"""Return the class that implements the Distutils command named by
|
| 809 |
+
'command'. First we check the 'cmdclass' dictionary; if the
|
| 810 |
+
command is mentioned there, we fetch the class object from the
|
| 811 |
+
dictionary and return it. Otherwise we load the command module
|
| 812 |
+
("distutils.command." + command) and fetch the command class from
|
| 813 |
+
the module. The loaded class is also stored in 'cmdclass'
|
| 814 |
+
to speed future calls to 'get_command_class()'.
|
| 815 |
+
|
| 816 |
+
Raises DistutilsModuleError if the expected module could not be
|
| 817 |
+
found, or if that module does not define the expected class.
|
| 818 |
+
"""
|
| 819 |
+
klass = self.cmdclass.get(command)
|
| 820 |
+
if klass:
|
| 821 |
+
return klass
|
| 822 |
+
|
| 823 |
+
for pkgname in self.get_command_packages():
|
| 824 |
+
module_name = "%s.%s" % (pkgname, command)
|
| 825 |
+
klass_name = command
|
| 826 |
+
|
| 827 |
+
try:
|
| 828 |
+
__import__(module_name)
|
| 829 |
+
module = sys.modules[module_name]
|
| 830 |
+
except ImportError:
|
| 831 |
+
continue
|
| 832 |
+
|
| 833 |
+
try:
|
| 834 |
+
klass = getattr(module, klass_name)
|
| 835 |
+
except AttributeError:
|
| 836 |
+
raise DistutilsModuleError(
|
| 837 |
+
"invalid command '%s' (no class '%s' in module '%s')"
|
| 838 |
+
% (command, klass_name, module_name))
|
| 839 |
+
|
| 840 |
+
self.cmdclass[command] = klass
|
| 841 |
+
return klass
|
| 842 |
+
|
| 843 |
+
raise DistutilsModuleError("invalid command '%s'" % command)
|
| 844 |
+
|
| 845 |
+
def get_command_obj(self, command, create=1):
|
| 846 |
+
"""Return the command object for 'command'. Normally this object
|
| 847 |
+
is cached on a previous call to 'get_command_obj()'; if no command
|
| 848 |
+
object for 'command' is in the cache, then we either create and
|
| 849 |
+
return it (if 'create' is true) or return None.
|
| 850 |
+
"""
|
| 851 |
+
cmd_obj = self.command_obj.get(command)
|
| 852 |
+
if not cmd_obj and create:
|
| 853 |
+
if DEBUG:
|
| 854 |
+
self.announce("Distribution.get_command_obj(): "
|
| 855 |
+
"creating '%s' command object" % command)
|
| 856 |
+
|
| 857 |
+
klass = self.get_command_class(command)
|
| 858 |
+
cmd_obj = self.command_obj[command] = klass(self)
|
| 859 |
+
self.have_run[command] = 0
|
| 860 |
+
|
| 861 |
+
# Set any options that were supplied in config files
|
| 862 |
+
# or on the command line. (NB. support for error
|
| 863 |
+
# reporting is lame here: any errors aren't reported
|
| 864 |
+
# until 'finalize_options()' is called, which means
|
| 865 |
+
# we won't report the source of the error.)
|
| 866 |
+
options = self.command_options.get(command)
|
| 867 |
+
if options:
|
| 868 |
+
self._set_command_options(cmd_obj, options)
|
| 869 |
+
|
| 870 |
+
return cmd_obj
|
| 871 |
+
|
| 872 |
+
def _set_command_options(self, command_obj, option_dict=None):
|
| 873 |
+
"""Set the options for 'command_obj' from 'option_dict'. Basically
|
| 874 |
+
this means copying elements of a dictionary ('option_dict') to
|
| 875 |
+
attributes of an instance ('command').
|
| 876 |
+
|
| 877 |
+
'command_obj' must be a Command instance. If 'option_dict' is not
|
| 878 |
+
supplied, uses the standard option dictionary for this command
|
| 879 |
+
(from 'self.command_options').
|
| 880 |
+
"""
|
| 881 |
+
command_name = command_obj.get_command_name()
|
| 882 |
+
if option_dict is None:
|
| 883 |
+
option_dict = self.get_option_dict(command_name)
|
| 884 |
+
|
| 885 |
+
if DEBUG:
|
| 886 |
+
self.announce(" setting options for '%s' command:" % command_name)
|
| 887 |
+
for (option, (source, value)) in option_dict.items():
|
| 888 |
+
if DEBUG:
|
| 889 |
+
self.announce(" %s = %s (from %s)" % (option, value,
|
| 890 |
+
source))
|
| 891 |
+
try:
|
| 892 |
+
bool_opts = [translate_longopt(o)
|
| 893 |
+
for o in command_obj.boolean_options]
|
| 894 |
+
except AttributeError:
|
| 895 |
+
bool_opts = []
|
| 896 |
+
try:
|
| 897 |
+
neg_opt = command_obj.negative_opt
|
| 898 |
+
except AttributeError:
|
| 899 |
+
neg_opt = {}
|
| 900 |
+
|
| 901 |
+
try:
|
| 902 |
+
is_string = isinstance(value, str)
|
| 903 |
+
if option in neg_opt and is_string:
|
| 904 |
+
setattr(command_obj, neg_opt[option], not strtobool(value))
|
| 905 |
+
elif option in bool_opts and is_string:
|
| 906 |
+
setattr(command_obj, option, strtobool(value))
|
| 907 |
+
elif hasattr(command_obj, option):
|
| 908 |
+
setattr(command_obj, option, value)
|
| 909 |
+
else:
|
| 910 |
+
raise DistutilsOptionError(
|
| 911 |
+
"error in %s: command '%s' has no such option '%s'"
|
| 912 |
+
% (source, command_name, option))
|
| 913 |
+
except ValueError as msg:
|
| 914 |
+
raise DistutilsOptionError(msg)
|
| 915 |
+
|
| 916 |
+
def reinitialize_command(self, command, reinit_subcommands=0):
|
| 917 |
+
"""Reinitializes a command to the state it was in when first
|
| 918 |
+
returned by 'get_command_obj()': ie., initialized but not yet
|
| 919 |
+
finalized. This provides the opportunity to sneak option
|
| 920 |
+
values in programmatically, overriding or supplementing
|
| 921 |
+
user-supplied values from the config files and command line.
|
| 922 |
+
You'll have to re-finalize the command object (by calling
|
| 923 |
+
'finalize_options()' or 'ensure_finalized()') before using it for
|
| 924 |
+
real.
|
| 925 |
+
|
| 926 |
+
'command' should be a command name (string) or command object. If
|
| 927 |
+
'reinit_subcommands' is true, also reinitializes the command's
|
| 928 |
+
sub-commands, as declared by the 'sub_commands' class attribute (if
|
| 929 |
+
it has one). See the "install" command for an example. Only
|
| 930 |
+
reinitializes the sub-commands that actually matter, ie. those
|
| 931 |
+
whose test predicates return true.
|
| 932 |
+
|
| 933 |
+
Returns the reinitialized command object.
|
| 934 |
+
"""
|
| 935 |
+
from distutils.cmd import Command
|
| 936 |
+
if not isinstance(command, Command):
|
| 937 |
+
command_name = command
|
| 938 |
+
command = self.get_command_obj(command_name)
|
| 939 |
+
else:
|
| 940 |
+
command_name = command.get_command_name()
|
| 941 |
+
|
| 942 |
+
if not command.finalized:
|
| 943 |
+
return command
|
| 944 |
+
command.initialize_options()
|
| 945 |
+
command.finalized = 0
|
| 946 |
+
self.have_run[command_name] = 0
|
| 947 |
+
self._set_command_options(command)
|
| 948 |
+
|
| 949 |
+
if reinit_subcommands:
|
| 950 |
+
for sub in command.get_sub_commands():
|
| 951 |
+
self.reinitialize_command(sub, reinit_subcommands)
|
| 952 |
+
|
| 953 |
+
return command
|
| 954 |
+
|
| 955 |
+
# -- Methods that operate on the Distribution ----------------------
|
| 956 |
+
|
| 957 |
+
def announce(self, msg, level=log.INFO):
|
| 958 |
+
log.log(level, msg)
|
| 959 |
+
|
| 960 |
+
def run_commands(self):
|
| 961 |
+
"""Run each command that was seen on the setup script command line.
|
| 962 |
+
Uses the list of commands found and cache of command objects
|
| 963 |
+
created by 'get_command_obj()'.
|
| 964 |
+
"""
|
| 965 |
+
for cmd in self.commands:
|
| 966 |
+
self.run_command(cmd)
|
| 967 |
+
|
| 968 |
+
# -- Methods that operate on its Commands --------------------------
|
| 969 |
+
|
| 970 |
+
def run_command(self, command):
|
| 971 |
+
"""Do whatever it takes to run a command (including nothing at all,
|
| 972 |
+
if the command has already been run). Specifically: if we have
|
| 973 |
+
already created and run the command named by 'command', return
|
| 974 |
+
silently without doing anything. If the command named by 'command'
|
| 975 |
+
doesn't even have a command object yet, create one. Then invoke
|
| 976 |
+
'run()' on that command object (or an existing one).
|
| 977 |
+
"""
|
| 978 |
+
# Already been here, done that? then return silently.
|
| 979 |
+
if self.have_run.get(command):
|
| 980 |
+
return
|
| 981 |
+
|
| 982 |
+
log.info("running %s", command)
|
| 983 |
+
cmd_obj = self.get_command_obj(command)
|
| 984 |
+
cmd_obj.ensure_finalized()
|
| 985 |
+
cmd_obj.run()
|
| 986 |
+
self.have_run[command] = 1
|
| 987 |
+
|
| 988 |
+
# -- Distribution query methods ------------------------------------
|
| 989 |
+
|
| 990 |
+
def has_pure_modules(self):
|
| 991 |
+
return len(self.packages or self.py_modules or []) > 0
|
| 992 |
+
|
| 993 |
+
def has_ext_modules(self):
|
| 994 |
+
return self.ext_modules and len(self.ext_modules) > 0
|
| 995 |
+
|
| 996 |
+
def has_c_libraries(self):
|
| 997 |
+
return self.libraries and len(self.libraries) > 0
|
| 998 |
+
|
| 999 |
+
def has_modules(self):
|
| 1000 |
+
return self.has_pure_modules() or self.has_ext_modules()
|
| 1001 |
+
|
| 1002 |
+
def has_headers(self):
|
| 1003 |
+
return self.headers and len(self.headers) > 0
|
| 1004 |
+
|
| 1005 |
+
def has_scripts(self):
|
| 1006 |
+
return self.scripts and len(self.scripts) > 0
|
| 1007 |
+
|
| 1008 |
+
def has_data_files(self):
|
| 1009 |
+
return self.data_files and len(self.data_files) > 0
|
| 1010 |
+
|
| 1011 |
+
def is_pure(self):
|
| 1012 |
+
return (self.has_pure_modules() and
|
| 1013 |
+
not self.has_ext_modules() and
|
| 1014 |
+
not self.has_c_libraries())
|
| 1015 |
+
|
| 1016 |
+
# -- Metadata query methods ----------------------------------------
|
| 1017 |
+
|
| 1018 |
+
# If you're looking for 'get_name()', 'get_version()', and so forth,
|
| 1019 |
+
# they are defined in a sneaky way: the constructor binds self.get_XXX
|
| 1020 |
+
# to self.metadata.get_XXX. The actual code is in the
|
| 1021 |
+
# DistributionMetadata class, below.
|
| 1022 |
+
|
| 1023 |
+
class DistributionMetadata:
|
| 1024 |
+
"""Dummy class to hold the distribution meta-data: name, version,
|
| 1025 |
+
author, and so forth.
|
| 1026 |
+
"""
|
| 1027 |
+
|
| 1028 |
+
_METHOD_BASENAMES = ("name", "version", "author", "author_email",
|
| 1029 |
+
"maintainer", "maintainer_email", "url",
|
| 1030 |
+
"license", "description", "long_description",
|
| 1031 |
+
"keywords", "platforms", "fullname", "contact",
|
| 1032 |
+
"contact_email", "classifiers", "download_url",
|
| 1033 |
+
# PEP 314
|
| 1034 |
+
"provides", "requires", "obsoletes",
|
| 1035 |
+
)
|
| 1036 |
+
|
| 1037 |
+
def __init__(self, path=None):
|
| 1038 |
+
if path is not None:
|
| 1039 |
+
self.read_pkg_file(open(path))
|
| 1040 |
+
else:
|
| 1041 |
+
self.name = None
|
| 1042 |
+
self.version = None
|
| 1043 |
+
self.author = None
|
| 1044 |
+
self.author_email = None
|
| 1045 |
+
self.maintainer = None
|
| 1046 |
+
self.maintainer_email = None
|
| 1047 |
+
self.url = None
|
| 1048 |
+
self.license = None
|
| 1049 |
+
self.description = None
|
| 1050 |
+
self.long_description = None
|
| 1051 |
+
self.keywords = None
|
| 1052 |
+
self.platforms = None
|
| 1053 |
+
self.classifiers = None
|
| 1054 |
+
self.download_url = None
|
| 1055 |
+
# PEP 314
|
| 1056 |
+
self.provides = None
|
| 1057 |
+
self.requires = None
|
| 1058 |
+
self.obsoletes = None
|
| 1059 |
+
|
| 1060 |
+
def read_pkg_file(self, file):
|
| 1061 |
+
"""Reads the metadata values from a file object."""
|
| 1062 |
+
msg = message_from_file(file)
|
| 1063 |
+
|
| 1064 |
+
def _read_field(name):
|
| 1065 |
+
value = msg[name]
|
| 1066 |
+
if value == 'UNKNOWN':
|
| 1067 |
+
return None
|
| 1068 |
+
return value
|
| 1069 |
+
|
| 1070 |
+
def _read_list(name):
|
| 1071 |
+
values = msg.get_all(name, None)
|
| 1072 |
+
if values == []:
|
| 1073 |
+
return None
|
| 1074 |
+
return values
|
| 1075 |
+
|
| 1076 |
+
metadata_version = msg['metadata-version']
|
| 1077 |
+
self.name = _read_field('name')
|
| 1078 |
+
self.version = _read_field('version')
|
| 1079 |
+
self.description = _read_field('summary')
|
| 1080 |
+
# we are filling author only.
|
| 1081 |
+
self.author = _read_field('author')
|
| 1082 |
+
self.maintainer = None
|
| 1083 |
+
self.author_email = _read_field('author-email')
|
| 1084 |
+
self.maintainer_email = None
|
| 1085 |
+
self.url = _read_field('home-page')
|
| 1086 |
+
self.license = _read_field('license')
|
| 1087 |
+
|
| 1088 |
+
if 'download-url' in msg:
|
| 1089 |
+
self.download_url = _read_field('download-url')
|
| 1090 |
+
else:
|
| 1091 |
+
self.download_url = None
|
| 1092 |
+
|
| 1093 |
+
self.long_description = _read_field('description')
|
| 1094 |
+
self.description = _read_field('summary')
|
| 1095 |
+
|
| 1096 |
+
if 'keywords' in msg:
|
| 1097 |
+
self.keywords = _read_field('keywords').split(',')
|
| 1098 |
+
|
| 1099 |
+
self.platforms = _read_list('platform')
|
| 1100 |
+
self.classifiers = _read_list('classifier')
|
| 1101 |
+
|
| 1102 |
+
# PEP 314 - these fields only exist in 1.1
|
| 1103 |
+
if metadata_version == '1.1':
|
| 1104 |
+
self.requires = _read_list('requires')
|
| 1105 |
+
self.provides = _read_list('provides')
|
| 1106 |
+
self.obsoletes = _read_list('obsoletes')
|
| 1107 |
+
else:
|
| 1108 |
+
self.requires = None
|
| 1109 |
+
self.provides = None
|
| 1110 |
+
self.obsoletes = None
|
| 1111 |
+
|
| 1112 |
+
def write_pkg_info(self, base_dir):
|
| 1113 |
+
"""Write the PKG-INFO file into the release tree.
|
| 1114 |
+
"""
|
| 1115 |
+
with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
|
| 1116 |
+
encoding='UTF-8') as pkg_info:
|
| 1117 |
+
self.write_pkg_file(pkg_info)
|
| 1118 |
+
|
| 1119 |
+
def write_pkg_file(self, file):
|
| 1120 |
+
"""Write the PKG-INFO format data to a file object.
|
| 1121 |
+
"""
|
| 1122 |
+
version = '1.0'
|
| 1123 |
+
if (self.provides or self.requires or self.obsoletes or
|
| 1124 |
+
self.classifiers or self.download_url):
|
| 1125 |
+
version = '1.1'
|
| 1126 |
+
|
| 1127 |
+
file.write('Metadata-Version: %s\n' % version)
|
| 1128 |
+
file.write('Name: %s\n' % self.get_name())
|
| 1129 |
+
file.write('Version: %s\n' % self.get_version())
|
| 1130 |
+
file.write('Summary: %s\n' % self.get_description())
|
| 1131 |
+
file.write('Home-page: %s\n' % self.get_url())
|
| 1132 |
+
file.write('Author: %s\n' % self.get_contact())
|
| 1133 |
+
file.write('Author-email: %s\n' % self.get_contact_email())
|
| 1134 |
+
file.write('License: %s\n' % self.get_license())
|
| 1135 |
+
if self.download_url:
|
| 1136 |
+
file.write('Download-URL: %s\n' % self.download_url)
|
| 1137 |
+
|
| 1138 |
+
long_desc = rfc822_escape(self.get_long_description())
|
| 1139 |
+
file.write('Description: %s\n' % long_desc)
|
| 1140 |
+
|
| 1141 |
+
keywords = ','.join(self.get_keywords())
|
| 1142 |
+
if keywords:
|
| 1143 |
+
file.write('Keywords: %s\n' % keywords)
|
| 1144 |
+
|
| 1145 |
+
self._write_list(file, 'Platform', self.get_platforms())
|
| 1146 |
+
self._write_list(file, 'Classifier', self.get_classifiers())
|
| 1147 |
+
|
| 1148 |
+
# PEP 314
|
| 1149 |
+
self._write_list(file, 'Requires', self.get_requires())
|
| 1150 |
+
self._write_list(file, 'Provides', self.get_provides())
|
| 1151 |
+
self._write_list(file, 'Obsoletes', self.get_obsoletes())
|
| 1152 |
+
|
| 1153 |
+
def _write_list(self, file, name, values):
|
| 1154 |
+
for value in values:
|
| 1155 |
+
file.write('%s: %s\n' % (name, value))
|
| 1156 |
+
|
| 1157 |
+
# -- Metadata query methods ----------------------------------------
|
| 1158 |
+
|
| 1159 |
+
def get_name(self):
|
| 1160 |
+
return self.name or "UNKNOWN"
|
| 1161 |
+
|
| 1162 |
+
def get_version(self):
|
| 1163 |
+
return self.version or "0.0.0"
|
| 1164 |
+
|
| 1165 |
+
def get_fullname(self):
|
| 1166 |
+
return "%s-%s" % (self.get_name(), self.get_version())
|
| 1167 |
+
|
| 1168 |
+
def get_author(self):
|
| 1169 |
+
return self.author or "UNKNOWN"
|
| 1170 |
+
|
| 1171 |
+
def get_author_email(self):
|
| 1172 |
+
return self.author_email or "UNKNOWN"
|
| 1173 |
+
|
| 1174 |
+
def get_maintainer(self):
|
| 1175 |
+
return self.maintainer or "UNKNOWN"
|
| 1176 |
+
|
| 1177 |
+
def get_maintainer_email(self):
|
| 1178 |
+
return self.maintainer_email or "UNKNOWN"
|
| 1179 |
+
|
| 1180 |
+
def get_contact(self):
|
| 1181 |
+
return self.maintainer or self.author or "UNKNOWN"
|
| 1182 |
+
|
| 1183 |
+
def get_contact_email(self):
|
| 1184 |
+
return self.maintainer_email or self.author_email or "UNKNOWN"
|
| 1185 |
+
|
| 1186 |
+
def get_url(self):
|
| 1187 |
+
return self.url or "UNKNOWN"
|
| 1188 |
+
|
| 1189 |
+
def get_license(self):
|
| 1190 |
+
return self.license or "UNKNOWN"
|
| 1191 |
+
get_licence = get_license
|
| 1192 |
+
|
| 1193 |
+
def get_description(self):
|
| 1194 |
+
return self.description or "UNKNOWN"
|
| 1195 |
+
|
| 1196 |
+
def get_long_description(self):
|
| 1197 |
+
return self.long_description or "UNKNOWN"
|
| 1198 |
+
|
| 1199 |
+
def get_keywords(self):
|
| 1200 |
+
return self.keywords or []
|
| 1201 |
+
|
| 1202 |
+
def set_keywords(self, value):
|
| 1203 |
+
self.keywords = _ensure_list(value, 'keywords')
|
| 1204 |
+
|
| 1205 |
+
def get_platforms(self):
|
| 1206 |
+
return self.platforms or ["UNKNOWN"]
|
| 1207 |
+
|
| 1208 |
+
def set_platforms(self, value):
|
| 1209 |
+
self.platforms = _ensure_list(value, 'platforms')
|
| 1210 |
+
|
| 1211 |
+
def get_classifiers(self):
|
| 1212 |
+
return self.classifiers or []
|
| 1213 |
+
|
| 1214 |
+
def set_classifiers(self, value):
|
| 1215 |
+
self.classifiers = _ensure_list(value, 'classifiers')
|
| 1216 |
+
|
| 1217 |
+
def get_download_url(self):
|
| 1218 |
+
return self.download_url or "UNKNOWN"
|
| 1219 |
+
|
| 1220 |
+
# PEP 314
|
| 1221 |
+
def get_requires(self):
|
| 1222 |
+
return self.requires or []
|
| 1223 |
+
|
| 1224 |
+
def set_requires(self, value):
|
| 1225 |
+
import distutils.versionpredicate
|
| 1226 |
+
for v in value:
|
| 1227 |
+
distutils.versionpredicate.VersionPredicate(v)
|
| 1228 |
+
self.requires = list(value)
|
| 1229 |
+
|
| 1230 |
+
def get_provides(self):
|
| 1231 |
+
return self.provides or []
|
| 1232 |
+
|
| 1233 |
+
def set_provides(self, value):
|
| 1234 |
+
value = [v.strip() for v in value]
|
| 1235 |
+
for v in value:
|
| 1236 |
+
import distutils.versionpredicate
|
| 1237 |
+
distutils.versionpredicate.split_provision(v)
|
| 1238 |
+
self.provides = value
|
| 1239 |
+
|
| 1240 |
+
def get_obsoletes(self):
|
| 1241 |
+
return self.obsoletes or []
|
| 1242 |
+
|
| 1243 |
+
def set_obsoletes(self, value):
|
| 1244 |
+
import distutils.versionpredicate
|
| 1245 |
+
for v in value:
|
| 1246 |
+
distutils.versionpredicate.VersionPredicate(v)
|
| 1247 |
+
self.obsoletes = list(value)
|
| 1248 |
+
|
| 1249 |
+
def fix_help_options(options):
|
| 1250 |
+
"""Convert a 4-tuple 'help_options' list as found in various command
|
| 1251 |
+
classes to the 3-tuple form required by FancyGetopt.
|
| 1252 |
+
"""
|
| 1253 |
+
new_options = []
|
| 1254 |
+
for help_tuple in options:
|
| 1255 |
+
new_options.append(help_tuple[0:3])
|
| 1256 |
+
return new_options
|
llava/lib/python3.10/distutils/errors.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.errors
|
| 2 |
+
|
| 3 |
+
Provides exceptions used by the Distutils modules. Note that Distutils
|
| 4 |
+
modules may raise standard exceptions; in particular, SystemExit is
|
| 5 |
+
usually raised for errors that are obviously the end-user's fault
|
| 6 |
+
(eg. bad command-line arguments).
|
| 7 |
+
|
| 8 |
+
This module is safe to use in "from ... import *" mode; it only exports
|
| 9 |
+
symbols whose names start with "Distutils" and end with "Error"."""
|
| 10 |
+
|
| 11 |
+
class DistutilsError (Exception):
|
| 12 |
+
"""The root of all Distutils evil."""
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
class DistutilsModuleError (DistutilsError):
|
| 16 |
+
"""Unable to load an expected module, or to find an expected class
|
| 17 |
+
within some module (in particular, command modules and classes)."""
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
class DistutilsClassError (DistutilsError):
|
| 21 |
+
"""Some command class (or possibly distribution class, if anyone
|
| 22 |
+
feels a need to subclass Distribution) is found not to be holding
|
| 23 |
+
up its end of the bargain, ie. implementing some part of the
|
| 24 |
+
"command "interface."""
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
class DistutilsGetoptError (DistutilsError):
|
| 28 |
+
"""The option table provided to 'fancy_getopt()' is bogus."""
|
| 29 |
+
pass
|
| 30 |
+
|
| 31 |
+
class DistutilsArgError (DistutilsError):
|
| 32 |
+
"""Raised by fancy_getopt in response to getopt.error -- ie. an
|
| 33 |
+
error in the command line usage."""
|
| 34 |
+
pass
|
| 35 |
+
|
| 36 |
+
class DistutilsFileError (DistutilsError):
|
| 37 |
+
"""Any problems in the filesystem: expected file not found, etc.
|
| 38 |
+
Typically this is for problems that we detect before OSError
|
| 39 |
+
could be raised."""
|
| 40 |
+
pass
|
| 41 |
+
|
| 42 |
+
class DistutilsOptionError (DistutilsError):
|
| 43 |
+
"""Syntactic/semantic errors in command options, such as use of
|
| 44 |
+
mutually conflicting options, or inconsistent options,
|
| 45 |
+
badly-spelled values, etc. No distinction is made between option
|
| 46 |
+
values originating in the setup script, the command line, config
|
| 47 |
+
files, or what-have-you -- but if we *know* something originated in
|
| 48 |
+
the setup script, we'll raise DistutilsSetupError instead."""
|
| 49 |
+
pass
|
| 50 |
+
|
| 51 |
+
class DistutilsSetupError (DistutilsError):
|
| 52 |
+
"""For errors that can be definitely blamed on the setup script,
|
| 53 |
+
such as invalid keyword arguments to 'setup()'."""
|
| 54 |
+
pass
|
| 55 |
+
|
| 56 |
+
class DistutilsPlatformError (DistutilsError):
|
| 57 |
+
"""We don't know how to do something on the current platform (but
|
| 58 |
+
we do know how to do it on some platform) -- eg. trying to compile
|
| 59 |
+
C files on a platform not supported by a CCompiler subclass."""
|
| 60 |
+
pass
|
| 61 |
+
|
| 62 |
+
class DistutilsExecError (DistutilsError):
|
| 63 |
+
"""Any problems executing an external program (such as the C
|
| 64 |
+
compiler, when compiling C files)."""
|
| 65 |
+
pass
|
| 66 |
+
|
| 67 |
+
class DistutilsInternalError (DistutilsError):
|
| 68 |
+
"""Internal inconsistencies or impossibilities (obviously, this
|
| 69 |
+
should never be seen if the code is working!)."""
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
class DistutilsTemplateError (DistutilsError):
|
| 73 |
+
"""Syntax error in a file list template."""
|
| 74 |
+
|
| 75 |
+
class DistutilsByteCompileError(DistutilsError):
|
| 76 |
+
"""Byte compile error."""
|
| 77 |
+
|
| 78 |
+
# Exception classes used by the CCompiler implementation classes
|
| 79 |
+
class CCompilerError (Exception):
|
| 80 |
+
"""Some compile/link operation failed."""
|
| 81 |
+
|
| 82 |
+
class PreprocessError (CCompilerError):
|
| 83 |
+
"""Failure to preprocess one or more C/C++ files."""
|
| 84 |
+
|
| 85 |
+
class CompileError (CCompilerError):
|
| 86 |
+
"""Failure to compile one or more C/C++ source files."""
|
| 87 |
+
|
| 88 |
+
class LibError (CCompilerError):
|
| 89 |
+
"""Failure to create a static library from one or more C/C++ object
|
| 90 |
+
files."""
|
| 91 |
+
|
| 92 |
+
class LinkError (CCompilerError):
|
| 93 |
+
"""Failure to link one or more C/C++ object files into an executable
|
| 94 |
+
or shared library file."""
|
| 95 |
+
|
| 96 |
+
class UnknownFileError (CCompilerError):
|
| 97 |
+
"""Attempt to process an unknown file type."""
|
llava/lib/python3.10/distutils/extension.py
ADDED
|
@@ -0,0 +1,241 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.extension
|
| 2 |
+
|
| 3 |
+
Provides the Extension class, used to describe C/C++ extension
|
| 4 |
+
modules in setup scripts."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import re
|
| 8 |
+
import warnings
|
| 9 |
+
|
| 10 |
+
# This class is really only used by the "build_ext" command, so it might
|
| 11 |
+
# make sense to put it in distutils.command.build_ext. However, that
|
| 12 |
+
# module is already big enough, and I want to make this class a bit more
|
| 13 |
+
# complex to simplify some common cases ("foo" module in "foo.c") and do
|
| 14 |
+
# better error-checking ("foo.c" actually exists).
|
| 15 |
+
#
|
| 16 |
+
# Also, putting this in build_ext.py means every setup script would have to
|
| 17 |
+
# import that large-ish module (indirectly, through distutils.core) in
|
| 18 |
+
# order to do anything.
|
| 19 |
+
|
| 20 |
+
class Extension:
|
| 21 |
+
"""Just a collection of attributes that describes an extension
|
| 22 |
+
module and everything needed to build it (hopefully in a portable
|
| 23 |
+
way, but there are hooks that let you be as unportable as you need).
|
| 24 |
+
|
| 25 |
+
Instance attributes:
|
| 26 |
+
name : string
|
| 27 |
+
the full name of the extension, including any packages -- ie.
|
| 28 |
+
*not* a filename or pathname, but Python dotted name
|
| 29 |
+
sources : [string]
|
| 30 |
+
list of source filenames, relative to the distribution root
|
| 31 |
+
(where the setup script lives), in Unix form (slash-separated)
|
| 32 |
+
for portability. Source files may be C, C++, SWIG (.i),
|
| 33 |
+
platform-specific resource files, or whatever else is recognized
|
| 34 |
+
by the "build_ext" command as source for a Python extension.
|
| 35 |
+
include_dirs : [string]
|
| 36 |
+
list of directories to search for C/C++ header files (in Unix
|
| 37 |
+
form for portability)
|
| 38 |
+
define_macros : [(name : string, value : string|None)]
|
| 39 |
+
list of macros to define; each macro is defined using a 2-tuple,
|
| 40 |
+
where 'value' is either the string to define it to or None to
|
| 41 |
+
define it without a particular value (equivalent of "#define
|
| 42 |
+
FOO" in source or -DFOO on Unix C compiler command line)
|
| 43 |
+
undef_macros : [string]
|
| 44 |
+
list of macros to undefine explicitly
|
| 45 |
+
library_dirs : [string]
|
| 46 |
+
list of directories to search for C/C++ libraries at link time
|
| 47 |
+
libraries : [string]
|
| 48 |
+
list of library names (not filenames or paths) to link against
|
| 49 |
+
runtime_library_dirs : [string]
|
| 50 |
+
list of directories to search for C/C++ libraries at run time
|
| 51 |
+
(for shared extensions, this is when the extension is loaded)
|
| 52 |
+
extra_objects : [string]
|
| 53 |
+
list of extra files to link with (eg. object files not implied
|
| 54 |
+
by 'sources', static library that must be explicitly specified,
|
| 55 |
+
binary resource files, etc.)
|
| 56 |
+
extra_compile_args : [string]
|
| 57 |
+
any extra platform- and compiler-specific information to use
|
| 58 |
+
when compiling the source files in 'sources'. For platforms and
|
| 59 |
+
compilers where "command line" makes sense, this is typically a
|
| 60 |
+
list of command-line arguments, but for other platforms it could
|
| 61 |
+
be anything.
|
| 62 |
+
extra_link_args : [string]
|
| 63 |
+
any extra platform- and compiler-specific information to use
|
| 64 |
+
when linking object files together to create the extension (or
|
| 65 |
+
to create a new static Python interpreter). Similar
|
| 66 |
+
interpretation as for 'extra_compile_args'.
|
| 67 |
+
export_symbols : [string]
|
| 68 |
+
list of symbols to be exported from a shared extension. Not
|
| 69 |
+
used on all platforms, and not generally necessary for Python
|
| 70 |
+
extensions, which typically export exactly one symbol: "init" +
|
| 71 |
+
extension_name.
|
| 72 |
+
swig_opts : [string]
|
| 73 |
+
any extra options to pass to SWIG if a source file has the .i
|
| 74 |
+
extension.
|
| 75 |
+
depends : [string]
|
| 76 |
+
list of files that the extension depends on
|
| 77 |
+
language : string
|
| 78 |
+
extension language (i.e. "c", "c++", "objc"). Will be detected
|
| 79 |
+
from the source extensions if not provided.
|
| 80 |
+
optional : boolean
|
| 81 |
+
specifies that a build failure in the extension should not abort the
|
| 82 |
+
build process, but simply not install the failing extension.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
# When adding arguments to this constructor, be sure to update
|
| 86 |
+
# setup_keywords in core.py.
|
| 87 |
+
def __init__(self, name, sources,
|
| 88 |
+
include_dirs=None,
|
| 89 |
+
define_macros=None,
|
| 90 |
+
undef_macros=None,
|
| 91 |
+
library_dirs=None,
|
| 92 |
+
libraries=None,
|
| 93 |
+
runtime_library_dirs=None,
|
| 94 |
+
extra_objects=None,
|
| 95 |
+
extra_compile_args=None,
|
| 96 |
+
extra_link_args=None,
|
| 97 |
+
export_symbols=None,
|
| 98 |
+
swig_opts = None,
|
| 99 |
+
depends=None,
|
| 100 |
+
language=None,
|
| 101 |
+
optional=None,
|
| 102 |
+
**kw # To catch unknown keywords
|
| 103 |
+
):
|
| 104 |
+
if not isinstance(name, str):
|
| 105 |
+
raise AssertionError("'name' must be a string")
|
| 106 |
+
if not (isinstance(sources, list) and
|
| 107 |
+
all(isinstance(v, str) for v in sources)):
|
| 108 |
+
raise AssertionError("'sources' must be a list of strings")
|
| 109 |
+
|
| 110 |
+
self.name = name
|
| 111 |
+
self.sources = sources
|
| 112 |
+
self.include_dirs = include_dirs or []
|
| 113 |
+
self.define_macros = define_macros or []
|
| 114 |
+
self.undef_macros = undef_macros or []
|
| 115 |
+
self.library_dirs = library_dirs or []
|
| 116 |
+
self.libraries = libraries or []
|
| 117 |
+
self.runtime_library_dirs = runtime_library_dirs or []
|
| 118 |
+
self.extra_objects = extra_objects or []
|
| 119 |
+
self.extra_compile_args = extra_compile_args or []
|
| 120 |
+
self.extra_link_args = extra_link_args or []
|
| 121 |
+
self.export_symbols = export_symbols or []
|
| 122 |
+
self.swig_opts = swig_opts or []
|
| 123 |
+
self.depends = depends or []
|
| 124 |
+
self.language = language
|
| 125 |
+
self.optional = optional
|
| 126 |
+
|
| 127 |
+
# If there are unknown keyword options, warn about them
|
| 128 |
+
if len(kw) > 0:
|
| 129 |
+
options = [repr(option) for option in kw]
|
| 130 |
+
options = ', '.join(sorted(options))
|
| 131 |
+
msg = "Unknown Extension options: %s" % options
|
| 132 |
+
warnings.warn(msg)
|
| 133 |
+
|
| 134 |
+
def __repr__(self):
|
| 135 |
+
return '<%s.%s(%r) at %#x>' % (
|
| 136 |
+
self.__class__.__module__,
|
| 137 |
+
self.__class__.__qualname__,
|
| 138 |
+
self.name,
|
| 139 |
+
id(self))
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def read_setup_file(filename):
|
| 143 |
+
"""Reads a Setup file and returns Extension instances."""
|
| 144 |
+
from distutils.sysconfig import (parse_makefile, expand_makefile_vars,
|
| 145 |
+
_variable_rx)
|
| 146 |
+
|
| 147 |
+
from distutils.text_file import TextFile
|
| 148 |
+
from distutils.util import split_quoted
|
| 149 |
+
|
| 150 |
+
# First pass over the file to gather "VAR = VALUE" assignments.
|
| 151 |
+
vars = parse_makefile(filename)
|
| 152 |
+
|
| 153 |
+
# Second pass to gobble up the real content: lines of the form
|
| 154 |
+
# <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
|
| 155 |
+
file = TextFile(filename,
|
| 156 |
+
strip_comments=1, skip_blanks=1, join_lines=1,
|
| 157 |
+
lstrip_ws=1, rstrip_ws=1)
|
| 158 |
+
try:
|
| 159 |
+
extensions = []
|
| 160 |
+
|
| 161 |
+
while True:
|
| 162 |
+
line = file.readline()
|
| 163 |
+
if line is None: # eof
|
| 164 |
+
break
|
| 165 |
+
if re.match(_variable_rx, line): # VAR=VALUE, handled in first pass
|
| 166 |
+
continue
|
| 167 |
+
|
| 168 |
+
if line[0] == line[-1] == "*":
|
| 169 |
+
file.warn("'%s' lines not handled yet" % line)
|
| 170 |
+
continue
|
| 171 |
+
|
| 172 |
+
line = expand_makefile_vars(line, vars)
|
| 173 |
+
words = split_quoted(line)
|
| 174 |
+
|
| 175 |
+
# NB. this parses a slightly different syntax than the old
|
| 176 |
+
# makesetup script: here, there must be exactly one extension per
|
| 177 |
+
# line, and it must be the first word of the line. I have no idea
|
| 178 |
+
# why the old syntax supported multiple extensions per line, as
|
| 179 |
+
# they all wind up being the same.
|
| 180 |
+
|
| 181 |
+
module = words[0]
|
| 182 |
+
ext = Extension(module, [])
|
| 183 |
+
append_next_word = None
|
| 184 |
+
|
| 185 |
+
for word in words[1:]:
|
| 186 |
+
if append_next_word is not None:
|
| 187 |
+
append_next_word.append(word)
|
| 188 |
+
append_next_word = None
|
| 189 |
+
continue
|
| 190 |
+
|
| 191 |
+
suffix = os.path.splitext(word)[1]
|
| 192 |
+
switch = word[0:2] ; value = word[2:]
|
| 193 |
+
|
| 194 |
+
if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
|
| 195 |
+
# hmm, should we do something about C vs. C++ sources?
|
| 196 |
+
# or leave it up to the CCompiler implementation to
|
| 197 |
+
# worry about?
|
| 198 |
+
ext.sources.append(word)
|
| 199 |
+
elif switch == "-I":
|
| 200 |
+
ext.include_dirs.append(value)
|
| 201 |
+
elif switch == "-D":
|
| 202 |
+
equals = value.find("=")
|
| 203 |
+
if equals == -1: # bare "-DFOO" -- no value
|
| 204 |
+
ext.define_macros.append((value, None))
|
| 205 |
+
else: # "-DFOO=blah"
|
| 206 |
+
ext.define_macros.append((value[0:equals],
|
| 207 |
+
value[equals+2:]))
|
| 208 |
+
elif switch == "-U":
|
| 209 |
+
ext.undef_macros.append(value)
|
| 210 |
+
elif switch == "-C": # only here 'cause makesetup has it!
|
| 211 |
+
ext.extra_compile_args.append(word)
|
| 212 |
+
elif switch == "-l":
|
| 213 |
+
ext.libraries.append(value)
|
| 214 |
+
elif switch == "-L":
|
| 215 |
+
ext.library_dirs.append(value)
|
| 216 |
+
elif switch == "-R":
|
| 217 |
+
ext.runtime_library_dirs.append(value)
|
| 218 |
+
elif word == "-rpath":
|
| 219 |
+
append_next_word = ext.runtime_library_dirs
|
| 220 |
+
elif word == "-Xlinker":
|
| 221 |
+
append_next_word = ext.extra_link_args
|
| 222 |
+
elif word == "-Xcompiler":
|
| 223 |
+
append_next_word = ext.extra_compile_args
|
| 224 |
+
elif switch == "-u":
|
| 225 |
+
ext.extra_link_args.append(word)
|
| 226 |
+
if not value:
|
| 227 |
+
append_next_word = ext.extra_link_args
|
| 228 |
+
elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
|
| 229 |
+
# NB. a really faithful emulation of makesetup would
|
| 230 |
+
# append a .o file to extra_objects only if it
|
| 231 |
+
# had a slash in it; otherwise, it would s/.o/.c/
|
| 232 |
+
# and append it to sources. Hmmmm.
|
| 233 |
+
ext.extra_objects.append(word)
|
| 234 |
+
else:
|
| 235 |
+
file.warn("unrecognized argument '%s'" % word)
|
| 236 |
+
|
| 237 |
+
extensions.append(ext)
|
| 238 |
+
finally:
|
| 239 |
+
file.close()
|
| 240 |
+
|
| 241 |
+
return extensions
|
llava/lib/python3.10/distutils/fancy_getopt.py
ADDED
|
@@ -0,0 +1,457 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.fancy_getopt
|
| 2 |
+
|
| 3 |
+
Wrapper around the standard getopt module that provides the following
|
| 4 |
+
additional features:
|
| 5 |
+
* short and long options are tied together
|
| 6 |
+
* options have help strings, so fancy_getopt could potentially
|
| 7 |
+
create a complete usage summary
|
| 8 |
+
* options set attributes of a passed-in object
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys, string, re
|
| 12 |
+
import getopt
|
| 13 |
+
from distutils.errors import *
|
| 14 |
+
|
| 15 |
+
# Much like command_re in distutils.core, this is close to but not quite
|
| 16 |
+
# the same as a Python NAME -- except, in the spirit of most GNU
|
| 17 |
+
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
| 18 |
+
# The similarities to NAME are again not a coincidence...
|
| 19 |
+
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
| 20 |
+
longopt_re = re.compile(r'^%s$' % longopt_pat)
|
| 21 |
+
|
| 22 |
+
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
| 23 |
+
neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
|
| 24 |
+
|
| 25 |
+
# This is used to translate long options to legitimate Python identifiers
|
| 26 |
+
# (for use as attributes of some object).
|
| 27 |
+
longopt_xlate = str.maketrans('-', '_')
|
| 28 |
+
|
| 29 |
+
class FancyGetopt:
|
| 30 |
+
"""Wrapper around the standard 'getopt()' module that provides some
|
| 31 |
+
handy extra functionality:
|
| 32 |
+
* short and long options are tied together
|
| 33 |
+
* options have help strings, and help text can be assembled
|
| 34 |
+
from them
|
| 35 |
+
* options set attributes of a passed-in object
|
| 36 |
+
* boolean options can have "negative aliases" -- eg. if
|
| 37 |
+
--quiet is the "negative alias" of --verbose, then "--quiet"
|
| 38 |
+
on the command line sets 'verbose' to false
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
def __init__(self, option_table=None):
|
| 42 |
+
# The option table is (currently) a list of tuples. The
|
| 43 |
+
# tuples may have 3 or four values:
|
| 44 |
+
# (long_option, short_option, help_string [, repeatable])
|
| 45 |
+
# if an option takes an argument, its long_option should have '='
|
| 46 |
+
# appended; short_option should just be a single character, no ':'
|
| 47 |
+
# in any case. If a long_option doesn't have a corresponding
|
| 48 |
+
# short_option, short_option should be None. All option tuples
|
| 49 |
+
# must have long options.
|
| 50 |
+
self.option_table = option_table
|
| 51 |
+
|
| 52 |
+
# 'option_index' maps long option names to entries in the option
|
| 53 |
+
# table (ie. those 3-tuples).
|
| 54 |
+
self.option_index = {}
|
| 55 |
+
if self.option_table:
|
| 56 |
+
self._build_index()
|
| 57 |
+
|
| 58 |
+
# 'alias' records (duh) alias options; {'foo': 'bar'} means
|
| 59 |
+
# --foo is an alias for --bar
|
| 60 |
+
self.alias = {}
|
| 61 |
+
|
| 62 |
+
# 'negative_alias' keeps track of options that are the boolean
|
| 63 |
+
# opposite of some other option
|
| 64 |
+
self.negative_alias = {}
|
| 65 |
+
|
| 66 |
+
# These keep track of the information in the option table. We
|
| 67 |
+
# don't actually populate these structures until we're ready to
|
| 68 |
+
# parse the command-line, since the 'option_table' passed in here
|
| 69 |
+
# isn't necessarily the final word.
|
| 70 |
+
self.short_opts = []
|
| 71 |
+
self.long_opts = []
|
| 72 |
+
self.short2long = {}
|
| 73 |
+
self.attr_name = {}
|
| 74 |
+
self.takes_arg = {}
|
| 75 |
+
|
| 76 |
+
# And 'option_order' is filled up in 'getopt()'; it records the
|
| 77 |
+
# original order of options (and their values) on the command-line,
|
| 78 |
+
# but expands short options, converts aliases, etc.
|
| 79 |
+
self.option_order = []
|
| 80 |
+
|
| 81 |
+
def _build_index(self):
|
| 82 |
+
self.option_index.clear()
|
| 83 |
+
for option in self.option_table:
|
| 84 |
+
self.option_index[option[0]] = option
|
| 85 |
+
|
| 86 |
+
def set_option_table(self, option_table):
|
| 87 |
+
self.option_table = option_table
|
| 88 |
+
self._build_index()
|
| 89 |
+
|
| 90 |
+
def add_option(self, long_option, short_option=None, help_string=None):
|
| 91 |
+
if long_option in self.option_index:
|
| 92 |
+
raise DistutilsGetoptError(
|
| 93 |
+
"option conflict: already an option '%s'" % long_option)
|
| 94 |
+
else:
|
| 95 |
+
option = (long_option, short_option, help_string)
|
| 96 |
+
self.option_table.append(option)
|
| 97 |
+
self.option_index[long_option] = option
|
| 98 |
+
|
| 99 |
+
def has_option(self, long_option):
|
| 100 |
+
"""Return true if the option table for this parser has an
|
| 101 |
+
option with long name 'long_option'."""
|
| 102 |
+
return long_option in self.option_index
|
| 103 |
+
|
| 104 |
+
def get_attr_name(self, long_option):
|
| 105 |
+
"""Translate long option name 'long_option' to the form it
|
| 106 |
+
has as an attribute of some object: ie., translate hyphens
|
| 107 |
+
to underscores."""
|
| 108 |
+
return long_option.translate(longopt_xlate)
|
| 109 |
+
|
| 110 |
+
def _check_alias_dict(self, aliases, what):
|
| 111 |
+
assert isinstance(aliases, dict)
|
| 112 |
+
for (alias, opt) in aliases.items():
|
| 113 |
+
if alias not in self.option_index:
|
| 114 |
+
raise DistutilsGetoptError(("invalid %s '%s': "
|
| 115 |
+
"option '%s' not defined") % (what, alias, alias))
|
| 116 |
+
if opt not in self.option_index:
|
| 117 |
+
raise DistutilsGetoptError(("invalid %s '%s': "
|
| 118 |
+
"aliased option '%s' not defined") % (what, alias, opt))
|
| 119 |
+
|
| 120 |
+
def set_aliases(self, alias):
|
| 121 |
+
"""Set the aliases for this option parser."""
|
| 122 |
+
self._check_alias_dict(alias, "alias")
|
| 123 |
+
self.alias = alias
|
| 124 |
+
|
| 125 |
+
def set_negative_aliases(self, negative_alias):
|
| 126 |
+
"""Set the negative aliases for this option parser.
|
| 127 |
+
'negative_alias' should be a dictionary mapping option names to
|
| 128 |
+
option names, both the key and value must already be defined
|
| 129 |
+
in the option table."""
|
| 130 |
+
self._check_alias_dict(negative_alias, "negative alias")
|
| 131 |
+
self.negative_alias = negative_alias
|
| 132 |
+
|
| 133 |
+
def _grok_option_table(self):
|
| 134 |
+
"""Populate the various data structures that keep tabs on the
|
| 135 |
+
option table. Called by 'getopt()' before it can do anything
|
| 136 |
+
worthwhile.
|
| 137 |
+
"""
|
| 138 |
+
self.long_opts = []
|
| 139 |
+
self.short_opts = []
|
| 140 |
+
self.short2long.clear()
|
| 141 |
+
self.repeat = {}
|
| 142 |
+
|
| 143 |
+
for option in self.option_table:
|
| 144 |
+
if len(option) == 3:
|
| 145 |
+
long, short, help = option
|
| 146 |
+
repeat = 0
|
| 147 |
+
elif len(option) == 4:
|
| 148 |
+
long, short, help, repeat = option
|
| 149 |
+
else:
|
| 150 |
+
# the option table is part of the code, so simply
|
| 151 |
+
# assert that it is correct
|
| 152 |
+
raise ValueError("invalid option tuple: %r" % (option,))
|
| 153 |
+
|
| 154 |
+
# Type- and value-check the option names
|
| 155 |
+
if not isinstance(long, str) or len(long) < 2:
|
| 156 |
+
raise DistutilsGetoptError(("invalid long option '%s': "
|
| 157 |
+
"must be a string of length >= 2") % long)
|
| 158 |
+
|
| 159 |
+
if (not ((short is None) or
|
| 160 |
+
(isinstance(short, str) and len(short) == 1))):
|
| 161 |
+
raise DistutilsGetoptError("invalid short option '%s': "
|
| 162 |
+
"must a single character or None" % short)
|
| 163 |
+
|
| 164 |
+
self.repeat[long] = repeat
|
| 165 |
+
self.long_opts.append(long)
|
| 166 |
+
|
| 167 |
+
if long[-1] == '=': # option takes an argument?
|
| 168 |
+
if short: short = short + ':'
|
| 169 |
+
long = long[0:-1]
|
| 170 |
+
self.takes_arg[long] = 1
|
| 171 |
+
else:
|
| 172 |
+
# Is option is a "negative alias" for some other option (eg.
|
| 173 |
+
# "quiet" == "!verbose")?
|
| 174 |
+
alias_to = self.negative_alias.get(long)
|
| 175 |
+
if alias_to is not None:
|
| 176 |
+
if self.takes_arg[alias_to]:
|
| 177 |
+
raise DistutilsGetoptError(
|
| 178 |
+
"invalid negative alias '%s': "
|
| 179 |
+
"aliased option '%s' takes a value"
|
| 180 |
+
% (long, alias_to))
|
| 181 |
+
|
| 182 |
+
self.long_opts[-1] = long # XXX redundant?!
|
| 183 |
+
self.takes_arg[long] = 0
|
| 184 |
+
|
| 185 |
+
# If this is an alias option, make sure its "takes arg" flag is
|
| 186 |
+
# the same as the option it's aliased to.
|
| 187 |
+
alias_to = self.alias.get(long)
|
| 188 |
+
if alias_to is not None:
|
| 189 |
+
if self.takes_arg[long] != self.takes_arg[alias_to]:
|
| 190 |
+
raise DistutilsGetoptError(
|
| 191 |
+
"invalid alias '%s': inconsistent with "
|
| 192 |
+
"aliased option '%s' (one of them takes a value, "
|
| 193 |
+
"the other doesn't"
|
| 194 |
+
% (long, alias_to))
|
| 195 |
+
|
| 196 |
+
# Now enforce some bondage on the long option name, so we can
|
| 197 |
+
# later translate it to an attribute name on some object. Have
|
| 198 |
+
# to do this a bit late to make sure we've removed any trailing
|
| 199 |
+
# '='.
|
| 200 |
+
if not longopt_re.match(long):
|
| 201 |
+
raise DistutilsGetoptError(
|
| 202 |
+
"invalid long option name '%s' "
|
| 203 |
+
"(must be letters, numbers, hyphens only" % long)
|
| 204 |
+
|
| 205 |
+
self.attr_name[long] = self.get_attr_name(long)
|
| 206 |
+
if short:
|
| 207 |
+
self.short_opts.append(short)
|
| 208 |
+
self.short2long[short[0]] = long
|
| 209 |
+
|
| 210 |
+
def getopt(self, args=None, object=None):
|
| 211 |
+
"""Parse command-line options in args. Store as attributes on object.
|
| 212 |
+
|
| 213 |
+
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
|
| 214 |
+
'object' is None or not supplied, creates a new OptionDummy
|
| 215 |
+
object, stores option values there, and returns a tuple (args,
|
| 216 |
+
object). If 'object' is supplied, it is modified in place and
|
| 217 |
+
'getopt()' just returns 'args'; in both cases, the returned
|
| 218 |
+
'args' is a modified copy of the passed-in 'args' list, which
|
| 219 |
+
is left untouched.
|
| 220 |
+
"""
|
| 221 |
+
if args is None:
|
| 222 |
+
args = sys.argv[1:]
|
| 223 |
+
if object is None:
|
| 224 |
+
object = OptionDummy()
|
| 225 |
+
created_object = True
|
| 226 |
+
else:
|
| 227 |
+
created_object = False
|
| 228 |
+
|
| 229 |
+
self._grok_option_table()
|
| 230 |
+
|
| 231 |
+
short_opts = ' '.join(self.short_opts)
|
| 232 |
+
try:
|
| 233 |
+
opts, args = getopt.getopt(args, short_opts, self.long_opts)
|
| 234 |
+
except getopt.error as msg:
|
| 235 |
+
raise DistutilsArgError(msg)
|
| 236 |
+
|
| 237 |
+
for opt, val in opts:
|
| 238 |
+
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
| 239 |
+
opt = self.short2long[opt[1]]
|
| 240 |
+
else:
|
| 241 |
+
assert len(opt) > 2 and opt[:2] == '--'
|
| 242 |
+
opt = opt[2:]
|
| 243 |
+
|
| 244 |
+
alias = self.alias.get(opt)
|
| 245 |
+
if alias:
|
| 246 |
+
opt = alias
|
| 247 |
+
|
| 248 |
+
if not self.takes_arg[opt]: # boolean option?
|
| 249 |
+
assert val == '', "boolean option can't have value"
|
| 250 |
+
alias = self.negative_alias.get(opt)
|
| 251 |
+
if alias:
|
| 252 |
+
opt = alias
|
| 253 |
+
val = 0
|
| 254 |
+
else:
|
| 255 |
+
val = 1
|
| 256 |
+
|
| 257 |
+
attr = self.attr_name[opt]
|
| 258 |
+
# The only repeating option at the moment is 'verbose'.
|
| 259 |
+
# It has a negative option -q quiet, which should set verbose = 0.
|
| 260 |
+
if val and self.repeat.get(attr) is not None:
|
| 261 |
+
val = getattr(object, attr, 0) + 1
|
| 262 |
+
setattr(object, attr, val)
|
| 263 |
+
self.option_order.append((opt, val))
|
| 264 |
+
|
| 265 |
+
# for opts
|
| 266 |
+
if created_object:
|
| 267 |
+
return args, object
|
| 268 |
+
else:
|
| 269 |
+
return args
|
| 270 |
+
|
| 271 |
+
def get_option_order(self):
|
| 272 |
+
"""Returns the list of (option, value) tuples processed by the
|
| 273 |
+
previous run of 'getopt()'. Raises RuntimeError if
|
| 274 |
+
'getopt()' hasn't been called yet.
|
| 275 |
+
"""
|
| 276 |
+
if self.option_order is None:
|
| 277 |
+
raise RuntimeError("'getopt()' hasn't been called yet")
|
| 278 |
+
else:
|
| 279 |
+
return self.option_order
|
| 280 |
+
|
| 281 |
+
def generate_help(self, header=None):
|
| 282 |
+
"""Generate help text (a list of strings, one per suggested line of
|
| 283 |
+
output) from the option table for this FancyGetopt object.
|
| 284 |
+
"""
|
| 285 |
+
# Blithely assume the option table is good: probably wouldn't call
|
| 286 |
+
# 'generate_help()' unless you've already called 'getopt()'.
|
| 287 |
+
|
| 288 |
+
# First pass: determine maximum length of long option names
|
| 289 |
+
max_opt = 0
|
| 290 |
+
for option in self.option_table:
|
| 291 |
+
long = option[0]
|
| 292 |
+
short = option[1]
|
| 293 |
+
l = len(long)
|
| 294 |
+
if long[-1] == '=':
|
| 295 |
+
l = l - 1
|
| 296 |
+
if short is not None:
|
| 297 |
+
l = l + 5 # " (-x)" where short == 'x'
|
| 298 |
+
if l > max_opt:
|
| 299 |
+
max_opt = l
|
| 300 |
+
|
| 301 |
+
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
|
| 302 |
+
|
| 303 |
+
# Typical help block looks like this:
|
| 304 |
+
# --foo controls foonabulation
|
| 305 |
+
# Help block for longest option looks like this:
|
| 306 |
+
# --flimflam set the flim-flam level
|
| 307 |
+
# and with wrapped text:
|
| 308 |
+
# --flimflam set the flim-flam level (must be between
|
| 309 |
+
# 0 and 100, except on Tuesdays)
|
| 310 |
+
# Options with short names will have the short name shown (but
|
| 311 |
+
# it doesn't contribute to max_opt):
|
| 312 |
+
# --foo (-f) controls foonabulation
|
| 313 |
+
# If adding the short option would make the left column too wide,
|
| 314 |
+
# we push the explanation off to the next line
|
| 315 |
+
# --flimflam (-l)
|
| 316 |
+
# set the flim-flam level
|
| 317 |
+
# Important parameters:
|
| 318 |
+
# - 2 spaces before option block start lines
|
| 319 |
+
# - 2 dashes for each long option name
|
| 320 |
+
# - min. 2 spaces between option and explanation (gutter)
|
| 321 |
+
# - 5 characters (incl. space) for short option name
|
| 322 |
+
|
| 323 |
+
# Now generate lines of help text. (If 80 columns were good enough
|
| 324 |
+
# for Jesus, then 78 columns are good enough for me!)
|
| 325 |
+
line_width = 78
|
| 326 |
+
text_width = line_width - opt_width
|
| 327 |
+
big_indent = ' ' * opt_width
|
| 328 |
+
if header:
|
| 329 |
+
lines = [header]
|
| 330 |
+
else:
|
| 331 |
+
lines = ['Option summary:']
|
| 332 |
+
|
| 333 |
+
for option in self.option_table:
|
| 334 |
+
long, short, help = option[:3]
|
| 335 |
+
text = wrap_text(help, text_width)
|
| 336 |
+
if long[-1] == '=':
|
| 337 |
+
long = long[0:-1]
|
| 338 |
+
|
| 339 |
+
# Case 1: no short option at all (makes life easy)
|
| 340 |
+
if short is None:
|
| 341 |
+
if text:
|
| 342 |
+
lines.append(" --%-*s %s" % (max_opt, long, text[0]))
|
| 343 |
+
else:
|
| 344 |
+
lines.append(" --%-*s " % (max_opt, long))
|
| 345 |
+
|
| 346 |
+
# Case 2: we have a short option, so we have to include it
|
| 347 |
+
# just after the long option
|
| 348 |
+
else:
|
| 349 |
+
opt_names = "%s (-%s)" % (long, short)
|
| 350 |
+
if text:
|
| 351 |
+
lines.append(" --%-*s %s" %
|
| 352 |
+
(max_opt, opt_names, text[0]))
|
| 353 |
+
else:
|
| 354 |
+
lines.append(" --%-*s" % opt_names)
|
| 355 |
+
|
| 356 |
+
for l in text[1:]:
|
| 357 |
+
lines.append(big_indent + l)
|
| 358 |
+
return lines
|
| 359 |
+
|
| 360 |
+
def print_help(self, header=None, file=None):
|
| 361 |
+
if file is None:
|
| 362 |
+
file = sys.stdout
|
| 363 |
+
for line in self.generate_help(header):
|
| 364 |
+
file.write(line + "\n")
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
def fancy_getopt(options, negative_opt, object, args):
|
| 368 |
+
parser = FancyGetopt(options)
|
| 369 |
+
parser.set_negative_aliases(negative_opt)
|
| 370 |
+
return parser.getopt(args, object)
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace}
|
| 374 |
+
|
| 375 |
+
def wrap_text(text, width):
|
| 376 |
+
"""wrap_text(text : string, width : int) -> [string]
|
| 377 |
+
|
| 378 |
+
Split 'text' into multiple lines of no more than 'width' characters
|
| 379 |
+
each, and return the list of strings that results.
|
| 380 |
+
"""
|
| 381 |
+
if text is None:
|
| 382 |
+
return []
|
| 383 |
+
if len(text) <= width:
|
| 384 |
+
return [text]
|
| 385 |
+
|
| 386 |
+
text = text.expandtabs()
|
| 387 |
+
text = text.translate(WS_TRANS)
|
| 388 |
+
chunks = re.split(r'( +|-+)', text)
|
| 389 |
+
chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
|
| 390 |
+
lines = []
|
| 391 |
+
|
| 392 |
+
while chunks:
|
| 393 |
+
cur_line = [] # list of chunks (to-be-joined)
|
| 394 |
+
cur_len = 0 # length of current line
|
| 395 |
+
|
| 396 |
+
while chunks:
|
| 397 |
+
l = len(chunks[0])
|
| 398 |
+
if cur_len + l <= width: # can squeeze (at least) this chunk in
|
| 399 |
+
cur_line.append(chunks[0])
|
| 400 |
+
del chunks[0]
|
| 401 |
+
cur_len = cur_len + l
|
| 402 |
+
else: # this line is full
|
| 403 |
+
# drop last chunk if all space
|
| 404 |
+
if cur_line and cur_line[-1][0] == ' ':
|
| 405 |
+
del cur_line[-1]
|
| 406 |
+
break
|
| 407 |
+
|
| 408 |
+
if chunks: # any chunks left to process?
|
| 409 |
+
# if the current line is still empty, then we had a single
|
| 410 |
+
# chunk that's too big too fit on a line -- so we break
|
| 411 |
+
# down and break it up at the line width
|
| 412 |
+
if cur_len == 0:
|
| 413 |
+
cur_line.append(chunks[0][0:width])
|
| 414 |
+
chunks[0] = chunks[0][width:]
|
| 415 |
+
|
| 416 |
+
# all-whitespace chunks at the end of a line can be discarded
|
| 417 |
+
# (and we know from the re.split above that if a chunk has
|
| 418 |
+
# *any* whitespace, it is *all* whitespace)
|
| 419 |
+
if chunks[0][0] == ' ':
|
| 420 |
+
del chunks[0]
|
| 421 |
+
|
| 422 |
+
# and store this line in the list-of-all-lines -- as a single
|
| 423 |
+
# string, of course!
|
| 424 |
+
lines.append(''.join(cur_line))
|
| 425 |
+
|
| 426 |
+
return lines
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
def translate_longopt(opt):
|
| 430 |
+
"""Convert a long option name to a valid Python identifier by
|
| 431 |
+
changing "-" to "_".
|
| 432 |
+
"""
|
| 433 |
+
return opt.translate(longopt_xlate)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
class OptionDummy:
|
| 437 |
+
"""Dummy class just used as a place to hold command-line option
|
| 438 |
+
values as instance attributes."""
|
| 439 |
+
|
| 440 |
+
def __init__(self, options=[]):
|
| 441 |
+
"""Create a new OptionDummy instance. The attributes listed in
|
| 442 |
+
'options' will be initialized to None."""
|
| 443 |
+
for opt in options:
|
| 444 |
+
setattr(self, opt, None)
|
| 445 |
+
|
| 446 |
+
|
| 447 |
+
if __name__ == "__main__":
|
| 448 |
+
text = """\
|
| 449 |
+
Tra-la-la, supercalifragilisticexpialidocious.
|
| 450 |
+
How *do* you spell that odd word, anyways?
|
| 451 |
+
(Someone ask Mary -- she'll know [or she'll
|
| 452 |
+
say, "How should I know?"].)"""
|
| 453 |
+
|
| 454 |
+
for w in (10, 20, 30, 40):
|
| 455 |
+
print("width: %d" % w)
|
| 456 |
+
print("\n".join(wrap_text(text, w)))
|
| 457 |
+
print()
|
llava/lib/python3.10/distutils/file_util.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.file_util
|
| 2 |
+
|
| 3 |
+
Utility functions for operating on single files.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from distutils.errors import DistutilsFileError
|
| 8 |
+
from distutils import log
|
| 9 |
+
|
| 10 |
+
# for generating verbose output in 'copy_file()'
|
| 11 |
+
_copy_action = { None: 'copying',
|
| 12 |
+
'hard': 'hard linking',
|
| 13 |
+
'sym': 'symbolically linking' }
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _copy_file_contents(src, dst, buffer_size=16*1024):
|
| 17 |
+
"""Copy the file 'src' to 'dst'; both must be filenames. Any error
|
| 18 |
+
opening either file, reading from 'src', or writing to 'dst', raises
|
| 19 |
+
DistutilsFileError. Data is read/written in chunks of 'buffer_size'
|
| 20 |
+
bytes (default 16k). No attempt is made to handle anything apart from
|
| 21 |
+
regular files.
|
| 22 |
+
"""
|
| 23 |
+
# Stolen from shutil module in the standard library, but with
|
| 24 |
+
# custom error-handling added.
|
| 25 |
+
fsrc = None
|
| 26 |
+
fdst = None
|
| 27 |
+
try:
|
| 28 |
+
try:
|
| 29 |
+
fsrc = open(src, 'rb')
|
| 30 |
+
except OSError as e:
|
| 31 |
+
raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror))
|
| 32 |
+
|
| 33 |
+
if os.path.exists(dst):
|
| 34 |
+
try:
|
| 35 |
+
os.unlink(dst)
|
| 36 |
+
except OSError as e:
|
| 37 |
+
raise DistutilsFileError(
|
| 38 |
+
"could not delete '%s': %s" % (dst, e.strerror))
|
| 39 |
+
|
| 40 |
+
try:
|
| 41 |
+
fdst = open(dst, 'wb')
|
| 42 |
+
except OSError as e:
|
| 43 |
+
raise DistutilsFileError(
|
| 44 |
+
"could not create '%s': %s" % (dst, e.strerror))
|
| 45 |
+
|
| 46 |
+
while True:
|
| 47 |
+
try:
|
| 48 |
+
buf = fsrc.read(buffer_size)
|
| 49 |
+
except OSError as e:
|
| 50 |
+
raise DistutilsFileError(
|
| 51 |
+
"could not read from '%s': %s" % (src, e.strerror))
|
| 52 |
+
|
| 53 |
+
if not buf:
|
| 54 |
+
break
|
| 55 |
+
|
| 56 |
+
try:
|
| 57 |
+
fdst.write(buf)
|
| 58 |
+
except OSError as e:
|
| 59 |
+
raise DistutilsFileError(
|
| 60 |
+
"could not write to '%s': %s" % (dst, e.strerror))
|
| 61 |
+
finally:
|
| 62 |
+
if fdst:
|
| 63 |
+
fdst.close()
|
| 64 |
+
if fsrc:
|
| 65 |
+
fsrc.close()
|
| 66 |
+
|
| 67 |
+
def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0,
|
| 68 |
+
link=None, verbose=1, dry_run=0):
|
| 69 |
+
"""Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
|
| 70 |
+
copied there with the same name; otherwise, it must be a filename. (If
|
| 71 |
+
the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
|
| 72 |
+
is true (the default), the file's mode (type and permission bits, or
|
| 73 |
+
whatever is analogous on the current platform) is copied. If
|
| 74 |
+
'preserve_times' is true (the default), the last-modified and
|
| 75 |
+
last-access times are copied as well. If 'update' is true, 'src' will
|
| 76 |
+
only be copied if 'dst' does not exist, or if 'dst' does exist but is
|
| 77 |
+
older than 'src'.
|
| 78 |
+
|
| 79 |
+
'link' allows you to make hard links (os.link) or symbolic links
|
| 80 |
+
(os.symlink) instead of copying: set it to "hard" or "sym"; if it is
|
| 81 |
+
None (the default), files are copied. Don't set 'link' on systems that
|
| 82 |
+
don't support it: 'copy_file()' doesn't check if hard or symbolic
|
| 83 |
+
linking is available. If hardlink fails, falls back to
|
| 84 |
+
_copy_file_contents().
|
| 85 |
+
|
| 86 |
+
Under Mac OS, uses the native file copy function in macostools; on
|
| 87 |
+
other systems, uses '_copy_file_contents()' to copy file contents.
|
| 88 |
+
|
| 89 |
+
Return a tuple (dest_name, copied): 'dest_name' is the actual name of
|
| 90 |
+
the output file, and 'copied' is true if the file was copied (or would
|
| 91 |
+
have been copied, if 'dry_run' true).
|
| 92 |
+
"""
|
| 93 |
+
# XXX if the destination file already exists, we clobber it if
|
| 94 |
+
# copying, but blow up if linking. Hmmm. And I don't know what
|
| 95 |
+
# macostools.copyfile() does. Should definitely be consistent, and
|
| 96 |
+
# should probably blow up if destination exists and we would be
|
| 97 |
+
# changing it (ie. it's not already a hard/soft link to src OR
|
| 98 |
+
# (not update) and (src newer than dst).
|
| 99 |
+
|
| 100 |
+
from distutils.dep_util import newer
|
| 101 |
+
from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
|
| 102 |
+
|
| 103 |
+
if not os.path.isfile(src):
|
| 104 |
+
raise DistutilsFileError(
|
| 105 |
+
"can't copy '%s': doesn't exist or not a regular file" % src)
|
| 106 |
+
|
| 107 |
+
if os.path.isdir(dst):
|
| 108 |
+
dir = dst
|
| 109 |
+
dst = os.path.join(dst, os.path.basename(src))
|
| 110 |
+
else:
|
| 111 |
+
dir = os.path.dirname(dst)
|
| 112 |
+
|
| 113 |
+
if update and not newer(src, dst):
|
| 114 |
+
if verbose >= 1:
|
| 115 |
+
log.debug("not copying %s (output up-to-date)", src)
|
| 116 |
+
return (dst, 0)
|
| 117 |
+
|
| 118 |
+
try:
|
| 119 |
+
action = _copy_action[link]
|
| 120 |
+
except KeyError:
|
| 121 |
+
raise ValueError("invalid value '%s' for 'link' argument" % link)
|
| 122 |
+
|
| 123 |
+
if verbose >= 1:
|
| 124 |
+
if os.path.basename(dst) == os.path.basename(src):
|
| 125 |
+
log.info("%s %s -> %s", action, src, dir)
|
| 126 |
+
else:
|
| 127 |
+
log.info("%s %s -> %s", action, src, dst)
|
| 128 |
+
|
| 129 |
+
if dry_run:
|
| 130 |
+
return (dst, 1)
|
| 131 |
+
|
| 132 |
+
# If linking (hard or symbolic), use the appropriate system call
|
| 133 |
+
# (Unix only, of course, but that's the caller's responsibility)
|
| 134 |
+
elif link == 'hard':
|
| 135 |
+
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
| 136 |
+
try:
|
| 137 |
+
os.link(src, dst)
|
| 138 |
+
return (dst, 1)
|
| 139 |
+
except OSError:
|
| 140 |
+
# If hard linking fails, fall back on copying file
|
| 141 |
+
# (some special filesystems don't support hard linking
|
| 142 |
+
# even under Unix, see issue #8876).
|
| 143 |
+
pass
|
| 144 |
+
elif link == 'sym':
|
| 145 |
+
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
| 146 |
+
os.symlink(src, dst)
|
| 147 |
+
return (dst, 1)
|
| 148 |
+
|
| 149 |
+
# Otherwise (non-Mac, not linking), copy the file contents and
|
| 150 |
+
# (optionally) copy the times and mode.
|
| 151 |
+
_copy_file_contents(src, dst)
|
| 152 |
+
if preserve_mode or preserve_times:
|
| 153 |
+
st = os.stat(src)
|
| 154 |
+
|
| 155 |
+
# According to David Ascher <da@ski.org>, utime() should be done
|
| 156 |
+
# before chmod() (at least under NT).
|
| 157 |
+
if preserve_times:
|
| 158 |
+
os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
|
| 159 |
+
if preserve_mode:
|
| 160 |
+
os.chmod(dst, S_IMODE(st[ST_MODE]))
|
| 161 |
+
|
| 162 |
+
return (dst, 1)
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
# XXX I suspect this is Unix-specific -- need porting help!
|
| 166 |
+
def move_file (src, dst,
|
| 167 |
+
verbose=1,
|
| 168 |
+
dry_run=0):
|
| 169 |
+
|
| 170 |
+
"""Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
|
| 171 |
+
be moved into it with the same name; otherwise, 'src' is just renamed
|
| 172 |
+
to 'dst'. Return the new full name of the file.
|
| 173 |
+
|
| 174 |
+
Handles cross-device moves on Unix using 'copy_file()'. What about
|
| 175 |
+
other systems???
|
| 176 |
+
"""
|
| 177 |
+
from os.path import exists, isfile, isdir, basename, dirname
|
| 178 |
+
import errno
|
| 179 |
+
|
| 180 |
+
if verbose >= 1:
|
| 181 |
+
log.info("moving %s -> %s", src, dst)
|
| 182 |
+
|
| 183 |
+
if dry_run:
|
| 184 |
+
return dst
|
| 185 |
+
|
| 186 |
+
if not isfile(src):
|
| 187 |
+
raise DistutilsFileError("can't move '%s': not a regular file" % src)
|
| 188 |
+
|
| 189 |
+
if isdir(dst):
|
| 190 |
+
dst = os.path.join(dst, basename(src))
|
| 191 |
+
elif exists(dst):
|
| 192 |
+
raise DistutilsFileError(
|
| 193 |
+
"can't move '%s': destination '%s' already exists" %
|
| 194 |
+
(src, dst))
|
| 195 |
+
|
| 196 |
+
if not isdir(dirname(dst)):
|
| 197 |
+
raise DistutilsFileError(
|
| 198 |
+
"can't move '%s': destination '%s' not a valid path" %
|
| 199 |
+
(src, dst))
|
| 200 |
+
|
| 201 |
+
copy_it = False
|
| 202 |
+
try:
|
| 203 |
+
os.rename(src, dst)
|
| 204 |
+
except OSError as e:
|
| 205 |
+
(num, msg) = e.args
|
| 206 |
+
if num == errno.EXDEV:
|
| 207 |
+
copy_it = True
|
| 208 |
+
else:
|
| 209 |
+
raise DistutilsFileError(
|
| 210 |
+
"couldn't move '%s' to '%s': %s" % (src, dst, msg))
|
| 211 |
+
|
| 212 |
+
if copy_it:
|
| 213 |
+
copy_file(src, dst, verbose=verbose)
|
| 214 |
+
try:
|
| 215 |
+
os.unlink(src)
|
| 216 |
+
except OSError as e:
|
| 217 |
+
(num, msg) = e.args
|
| 218 |
+
try:
|
| 219 |
+
os.unlink(dst)
|
| 220 |
+
except OSError:
|
| 221 |
+
pass
|
| 222 |
+
raise DistutilsFileError(
|
| 223 |
+
"couldn't move '%s' to '%s' by copy/delete: "
|
| 224 |
+
"delete '%s' failed: %s"
|
| 225 |
+
% (src, dst, src, msg))
|
| 226 |
+
return dst
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def write_file (filename, contents):
|
| 230 |
+
"""Create a file with the specified name and write 'contents' (a
|
| 231 |
+
sequence of strings without line terminators) to it.
|
| 232 |
+
"""
|
| 233 |
+
f = open(filename, "w")
|
| 234 |
+
try:
|
| 235 |
+
for line in contents:
|
| 236 |
+
f.write(line + "\n")
|
| 237 |
+
finally:
|
| 238 |
+
f.close()
|
llava/lib/python3.10/distutils/filelist.py
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.filelist
|
| 2 |
+
|
| 3 |
+
Provides the FileList class, used for poking about the filesystem
|
| 4 |
+
and building lists of files.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os, re
|
| 8 |
+
import fnmatch
|
| 9 |
+
import functools
|
| 10 |
+
from distutils.util import convert_path
|
| 11 |
+
from distutils.errors import DistutilsTemplateError, DistutilsInternalError
|
| 12 |
+
from distutils import log
|
| 13 |
+
|
| 14 |
+
class FileList:
|
| 15 |
+
"""A list of files built by on exploring the filesystem and filtered by
|
| 16 |
+
applying various patterns to what we find there.
|
| 17 |
+
|
| 18 |
+
Instance attributes:
|
| 19 |
+
dir
|
| 20 |
+
directory from which files will be taken -- only used if
|
| 21 |
+
'allfiles' not supplied to constructor
|
| 22 |
+
files
|
| 23 |
+
list of filenames currently being built/filtered/manipulated
|
| 24 |
+
allfiles
|
| 25 |
+
complete list of files under consideration (ie. without any
|
| 26 |
+
filtering applied)
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
def __init__(self, warn=None, debug_print=None):
|
| 30 |
+
# ignore argument to FileList, but keep them for backwards
|
| 31 |
+
# compatibility
|
| 32 |
+
self.allfiles = None
|
| 33 |
+
self.files = []
|
| 34 |
+
|
| 35 |
+
def set_allfiles(self, allfiles):
|
| 36 |
+
self.allfiles = allfiles
|
| 37 |
+
|
| 38 |
+
def findall(self, dir=os.curdir):
|
| 39 |
+
self.allfiles = findall(dir)
|
| 40 |
+
|
| 41 |
+
def debug_print(self, msg):
|
| 42 |
+
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
| 43 |
+
DISTUTILS_DEBUG environment variable) flag is true.
|
| 44 |
+
"""
|
| 45 |
+
from distutils.debug import DEBUG
|
| 46 |
+
if DEBUG:
|
| 47 |
+
print(msg)
|
| 48 |
+
|
| 49 |
+
# -- List-like methods ---------------------------------------------
|
| 50 |
+
|
| 51 |
+
def append(self, item):
|
| 52 |
+
self.files.append(item)
|
| 53 |
+
|
| 54 |
+
def extend(self, items):
|
| 55 |
+
self.files.extend(items)
|
| 56 |
+
|
| 57 |
+
def sort(self):
|
| 58 |
+
# Not a strict lexical sort!
|
| 59 |
+
sortable_files = sorted(map(os.path.split, self.files))
|
| 60 |
+
self.files = []
|
| 61 |
+
for sort_tuple in sortable_files:
|
| 62 |
+
self.files.append(os.path.join(*sort_tuple))
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# -- Other miscellaneous utility methods ---------------------------
|
| 66 |
+
|
| 67 |
+
def remove_duplicates(self):
|
| 68 |
+
# Assumes list has been sorted!
|
| 69 |
+
for i in range(len(self.files) - 1, 0, -1):
|
| 70 |
+
if self.files[i] == self.files[i - 1]:
|
| 71 |
+
del self.files[i]
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
# -- "File template" methods ---------------------------------------
|
| 75 |
+
|
| 76 |
+
def _parse_template_line(self, line):
|
| 77 |
+
words = line.split()
|
| 78 |
+
action = words[0]
|
| 79 |
+
|
| 80 |
+
patterns = dir = dir_pattern = None
|
| 81 |
+
|
| 82 |
+
if action in ('include', 'exclude',
|
| 83 |
+
'global-include', 'global-exclude'):
|
| 84 |
+
if len(words) < 2:
|
| 85 |
+
raise DistutilsTemplateError(
|
| 86 |
+
"'%s' expects <pattern1> <pattern2> ..." % action)
|
| 87 |
+
patterns = [convert_path(w) for w in words[1:]]
|
| 88 |
+
elif action in ('recursive-include', 'recursive-exclude'):
|
| 89 |
+
if len(words) < 3:
|
| 90 |
+
raise DistutilsTemplateError(
|
| 91 |
+
"'%s' expects <dir> <pattern1> <pattern2> ..." % action)
|
| 92 |
+
dir = convert_path(words[1])
|
| 93 |
+
patterns = [convert_path(w) for w in words[2:]]
|
| 94 |
+
elif action in ('graft', 'prune'):
|
| 95 |
+
if len(words) != 2:
|
| 96 |
+
raise DistutilsTemplateError(
|
| 97 |
+
"'%s' expects a single <dir_pattern>" % action)
|
| 98 |
+
dir_pattern = convert_path(words[1])
|
| 99 |
+
else:
|
| 100 |
+
raise DistutilsTemplateError("unknown action '%s'" % action)
|
| 101 |
+
|
| 102 |
+
return (action, patterns, dir, dir_pattern)
|
| 103 |
+
|
| 104 |
+
def process_template_line(self, line):
|
| 105 |
+
# Parse the line: split it up, make sure the right number of words
|
| 106 |
+
# is there, and return the relevant words. 'action' is always
|
| 107 |
+
# defined: it's the first word of the line. Which of the other
|
| 108 |
+
# three are defined depends on the action; it'll be either
|
| 109 |
+
# patterns, (dir and patterns), or (dir_pattern).
|
| 110 |
+
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
| 111 |
+
|
| 112 |
+
# OK, now we know that the action is valid and we have the
|
| 113 |
+
# right number of words on the line for that action -- so we
|
| 114 |
+
# can proceed with minimal error-checking.
|
| 115 |
+
if action == 'include':
|
| 116 |
+
self.debug_print("include " + ' '.join(patterns))
|
| 117 |
+
for pattern in patterns:
|
| 118 |
+
if not self.include_pattern(pattern, anchor=1):
|
| 119 |
+
log.warn("warning: no files found matching '%s'",
|
| 120 |
+
pattern)
|
| 121 |
+
|
| 122 |
+
elif action == 'exclude':
|
| 123 |
+
self.debug_print("exclude " + ' '.join(patterns))
|
| 124 |
+
for pattern in patterns:
|
| 125 |
+
if not self.exclude_pattern(pattern, anchor=1):
|
| 126 |
+
log.warn(("warning: no previously-included files "
|
| 127 |
+
"found matching '%s'"), pattern)
|
| 128 |
+
|
| 129 |
+
elif action == 'global-include':
|
| 130 |
+
self.debug_print("global-include " + ' '.join(patterns))
|
| 131 |
+
for pattern in patterns:
|
| 132 |
+
if not self.include_pattern(pattern, anchor=0):
|
| 133 |
+
log.warn(("warning: no files found matching '%s' "
|
| 134 |
+
"anywhere in distribution"), pattern)
|
| 135 |
+
|
| 136 |
+
elif action == 'global-exclude':
|
| 137 |
+
self.debug_print("global-exclude " + ' '.join(patterns))
|
| 138 |
+
for pattern in patterns:
|
| 139 |
+
if not self.exclude_pattern(pattern, anchor=0):
|
| 140 |
+
log.warn(("warning: no previously-included files matching "
|
| 141 |
+
"'%s' found anywhere in distribution"),
|
| 142 |
+
pattern)
|
| 143 |
+
|
| 144 |
+
elif action == 'recursive-include':
|
| 145 |
+
self.debug_print("recursive-include %s %s" %
|
| 146 |
+
(dir, ' '.join(patterns)))
|
| 147 |
+
for pattern in patterns:
|
| 148 |
+
if not self.include_pattern(pattern, prefix=dir):
|
| 149 |
+
log.warn(("warning: no files found matching '%s' "
|
| 150 |
+
"under directory '%s'"),
|
| 151 |
+
pattern, dir)
|
| 152 |
+
|
| 153 |
+
elif action == 'recursive-exclude':
|
| 154 |
+
self.debug_print("recursive-exclude %s %s" %
|
| 155 |
+
(dir, ' '.join(patterns)))
|
| 156 |
+
for pattern in patterns:
|
| 157 |
+
if not self.exclude_pattern(pattern, prefix=dir):
|
| 158 |
+
log.warn(("warning: no previously-included files matching "
|
| 159 |
+
"'%s' found under directory '%s'"),
|
| 160 |
+
pattern, dir)
|
| 161 |
+
|
| 162 |
+
elif action == 'graft':
|
| 163 |
+
self.debug_print("graft " + dir_pattern)
|
| 164 |
+
if not self.include_pattern(None, prefix=dir_pattern):
|
| 165 |
+
log.warn("warning: no directories found matching '%s'",
|
| 166 |
+
dir_pattern)
|
| 167 |
+
|
| 168 |
+
elif action == 'prune':
|
| 169 |
+
self.debug_print("prune " + dir_pattern)
|
| 170 |
+
if not self.exclude_pattern(None, prefix=dir_pattern):
|
| 171 |
+
log.warn(("no previously-included directories found "
|
| 172 |
+
"matching '%s'"), dir_pattern)
|
| 173 |
+
else:
|
| 174 |
+
raise DistutilsInternalError(
|
| 175 |
+
"this cannot happen: invalid action '%s'" % action)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
# -- Filtering/selection methods -----------------------------------
|
| 179 |
+
|
| 180 |
+
def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
|
| 181 |
+
"""Select strings (presumably filenames) from 'self.files' that
|
| 182 |
+
match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
|
| 183 |
+
are not quite the same as implemented by the 'fnmatch' module: '*'
|
| 184 |
+
and '?' match non-special characters, where "special" is platform-
|
| 185 |
+
dependent: slash on Unix; colon, slash, and backslash on
|
| 186 |
+
DOS/Windows; and colon on Mac OS.
|
| 187 |
+
|
| 188 |
+
If 'anchor' is true (the default), then the pattern match is more
|
| 189 |
+
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
| 190 |
+
'anchor' is false, both of these will match.
|
| 191 |
+
|
| 192 |
+
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
| 193 |
+
(itself a pattern) and ending with 'pattern', with anything in between
|
| 194 |
+
them, will match. 'anchor' is ignored in this case.
|
| 195 |
+
|
| 196 |
+
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
| 197 |
+
'pattern' is assumed to be either a string containing a regex or a
|
| 198 |
+
regex object -- no translation is done, the regex is just compiled
|
| 199 |
+
and used as-is.
|
| 200 |
+
|
| 201 |
+
Selected strings will be added to self.files.
|
| 202 |
+
|
| 203 |
+
Return True if files are found, False otherwise.
|
| 204 |
+
"""
|
| 205 |
+
# XXX docstring lying about what the special chars are?
|
| 206 |
+
files_found = False
|
| 207 |
+
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
| 208 |
+
self.debug_print("include_pattern: applying regex r'%s'" %
|
| 209 |
+
pattern_re.pattern)
|
| 210 |
+
|
| 211 |
+
# delayed loading of allfiles list
|
| 212 |
+
if self.allfiles is None:
|
| 213 |
+
self.findall()
|
| 214 |
+
|
| 215 |
+
for name in self.allfiles:
|
| 216 |
+
if pattern_re.search(name):
|
| 217 |
+
self.debug_print(" adding " + name)
|
| 218 |
+
self.files.append(name)
|
| 219 |
+
files_found = True
|
| 220 |
+
return files_found
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def exclude_pattern (self, pattern,
|
| 224 |
+
anchor=1, prefix=None, is_regex=0):
|
| 225 |
+
"""Remove strings (presumably filenames) from 'files' that match
|
| 226 |
+
'pattern'. Other parameters are the same as for
|
| 227 |
+
'include_pattern()', above.
|
| 228 |
+
The list 'self.files' is modified in place.
|
| 229 |
+
Return True if files are found, False otherwise.
|
| 230 |
+
"""
|
| 231 |
+
files_found = False
|
| 232 |
+
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
| 233 |
+
self.debug_print("exclude_pattern: applying regex r'%s'" %
|
| 234 |
+
pattern_re.pattern)
|
| 235 |
+
for i in range(len(self.files)-1, -1, -1):
|
| 236 |
+
if pattern_re.search(self.files[i]):
|
| 237 |
+
self.debug_print(" removing " + self.files[i])
|
| 238 |
+
del self.files[i]
|
| 239 |
+
files_found = True
|
| 240 |
+
return files_found
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
# ----------------------------------------------------------------------
|
| 244 |
+
# Utility functions
|
| 245 |
+
|
| 246 |
+
def _find_all_simple(path):
|
| 247 |
+
"""
|
| 248 |
+
Find all files under 'path'
|
| 249 |
+
"""
|
| 250 |
+
results = (
|
| 251 |
+
os.path.join(base, file)
|
| 252 |
+
for base, dirs, files in os.walk(path, followlinks=True)
|
| 253 |
+
for file in files
|
| 254 |
+
)
|
| 255 |
+
return filter(os.path.isfile, results)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
def findall(dir=os.curdir):
|
| 259 |
+
"""
|
| 260 |
+
Find all files under 'dir' and return the list of full filenames.
|
| 261 |
+
Unless dir is '.', return full filenames with dir prepended.
|
| 262 |
+
"""
|
| 263 |
+
files = _find_all_simple(dir)
|
| 264 |
+
if dir == os.curdir:
|
| 265 |
+
make_rel = functools.partial(os.path.relpath, start=dir)
|
| 266 |
+
files = map(make_rel, files)
|
| 267 |
+
return list(files)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def glob_to_re(pattern):
|
| 271 |
+
"""Translate a shell-like glob pattern to a regular expression; return
|
| 272 |
+
a string containing the regex. Differs from 'fnmatch.translate()' in
|
| 273 |
+
that '*' does not match "special characters" (which are
|
| 274 |
+
platform-specific).
|
| 275 |
+
"""
|
| 276 |
+
pattern_re = fnmatch.translate(pattern)
|
| 277 |
+
|
| 278 |
+
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
| 279 |
+
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
| 280 |
+
# and by extension they shouldn't match such "special characters" under
|
| 281 |
+
# any OS. So change all non-escaped dots in the RE to match any
|
| 282 |
+
# character except the special characters (currently: just os.sep).
|
| 283 |
+
sep = os.sep
|
| 284 |
+
if os.sep == '\\':
|
| 285 |
+
# we're using a regex to manipulate a regex, so we need
|
| 286 |
+
# to escape the backslash twice
|
| 287 |
+
sep = r'\\\\'
|
| 288 |
+
escaped = r'\1[^%s]' % sep
|
| 289 |
+
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
| 290 |
+
return pattern_re
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
|
| 294 |
+
"""Translate a shell-like wildcard pattern to a compiled regular
|
| 295 |
+
expression. Return the compiled regex. If 'is_regex' true,
|
| 296 |
+
then 'pattern' is directly compiled to a regex (if it's a string)
|
| 297 |
+
or just returned as-is (assumes it's a regex object).
|
| 298 |
+
"""
|
| 299 |
+
if is_regex:
|
| 300 |
+
if isinstance(pattern, str):
|
| 301 |
+
return re.compile(pattern)
|
| 302 |
+
else:
|
| 303 |
+
return pattern
|
| 304 |
+
|
| 305 |
+
# ditch start and end characters
|
| 306 |
+
start, _, end = glob_to_re('_').partition('_')
|
| 307 |
+
|
| 308 |
+
if pattern:
|
| 309 |
+
pattern_re = glob_to_re(pattern)
|
| 310 |
+
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
| 311 |
+
else:
|
| 312 |
+
pattern_re = ''
|
| 313 |
+
|
| 314 |
+
if prefix is not None:
|
| 315 |
+
prefix_re = glob_to_re(prefix)
|
| 316 |
+
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
| 317 |
+
prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
|
| 318 |
+
sep = os.sep
|
| 319 |
+
if os.sep == '\\':
|
| 320 |
+
sep = r'\\'
|
| 321 |
+
pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
|
| 322 |
+
pattern_re = r'%s\A%s%s.*%s%s' % (start, prefix_re, sep, pattern_re, end)
|
| 323 |
+
else: # no prefix -- respect anchor flag
|
| 324 |
+
if anchor:
|
| 325 |
+
pattern_re = r'%s\A%s' % (start, pattern_re[len(start):])
|
| 326 |
+
|
| 327 |
+
return re.compile(pattern_re)
|
llava/lib/python3.10/distutils/log.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A simple log mechanism styled after PEP 282."""
|
| 2 |
+
|
| 3 |
+
# The class here is styled after PEP 282 so that it could later be
|
| 4 |
+
# replaced with a standard Python logging implementation.
|
| 5 |
+
|
| 6 |
+
DEBUG = 1
|
| 7 |
+
INFO = 2
|
| 8 |
+
WARN = 3
|
| 9 |
+
ERROR = 4
|
| 10 |
+
FATAL = 5
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
|
| 14 |
+
class Log:
|
| 15 |
+
|
| 16 |
+
def __init__(self, threshold=WARN):
|
| 17 |
+
self.threshold = threshold
|
| 18 |
+
|
| 19 |
+
def _log(self, level, msg, args):
|
| 20 |
+
if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
|
| 21 |
+
raise ValueError('%s wrong log level' % str(level))
|
| 22 |
+
|
| 23 |
+
if level >= self.threshold:
|
| 24 |
+
if args:
|
| 25 |
+
msg = msg % args
|
| 26 |
+
if level in (WARN, ERROR, FATAL):
|
| 27 |
+
stream = sys.stderr
|
| 28 |
+
else:
|
| 29 |
+
stream = sys.stdout
|
| 30 |
+
try:
|
| 31 |
+
stream.write('%s\n' % msg)
|
| 32 |
+
except UnicodeEncodeError:
|
| 33 |
+
# emulate backslashreplace error handler
|
| 34 |
+
encoding = stream.encoding
|
| 35 |
+
msg = msg.encode(encoding, "backslashreplace").decode(encoding)
|
| 36 |
+
stream.write('%s\n' % msg)
|
| 37 |
+
stream.flush()
|
| 38 |
+
|
| 39 |
+
def log(self, level, msg, *args):
|
| 40 |
+
self._log(level, msg, args)
|
| 41 |
+
|
| 42 |
+
def debug(self, msg, *args):
|
| 43 |
+
self._log(DEBUG, msg, args)
|
| 44 |
+
|
| 45 |
+
def info(self, msg, *args):
|
| 46 |
+
self._log(INFO, msg, args)
|
| 47 |
+
|
| 48 |
+
def warn(self, msg, *args):
|
| 49 |
+
self._log(WARN, msg, args)
|
| 50 |
+
|
| 51 |
+
def error(self, msg, *args):
|
| 52 |
+
self._log(ERROR, msg, args)
|
| 53 |
+
|
| 54 |
+
def fatal(self, msg, *args):
|
| 55 |
+
self._log(FATAL, msg, args)
|
| 56 |
+
|
| 57 |
+
_global_log = Log()
|
| 58 |
+
log = _global_log.log
|
| 59 |
+
debug = _global_log.debug
|
| 60 |
+
info = _global_log.info
|
| 61 |
+
warn = _global_log.warn
|
| 62 |
+
error = _global_log.error
|
| 63 |
+
fatal = _global_log.fatal
|
| 64 |
+
|
| 65 |
+
def set_threshold(level):
|
| 66 |
+
# return the old threshold for use from tests
|
| 67 |
+
old = _global_log.threshold
|
| 68 |
+
_global_log.threshold = level
|
| 69 |
+
return old
|
| 70 |
+
|
| 71 |
+
def set_verbosity(v):
|
| 72 |
+
if v <= 0:
|
| 73 |
+
set_threshold(WARN)
|
| 74 |
+
elif v == 1:
|
| 75 |
+
set_threshold(INFO)
|
| 76 |
+
elif v >= 2:
|
| 77 |
+
set_threshold(DEBUG)
|
llava/lib/python3.10/distutils/msvc9compiler.py
ADDED
|
@@ -0,0 +1,788 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.msvc9compiler
|
| 2 |
+
|
| 3 |
+
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for the Microsoft Visual Studio 2008.
|
| 5 |
+
|
| 6 |
+
The module is compatible with VS 2005 and VS 2008. You can find legacy support
|
| 7 |
+
for older versions of VS in distutils.msvccompiler.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Written by Perry Stoll
|
| 11 |
+
# hacked by Robin Becker and Thomas Heller to do a better job of
|
| 12 |
+
# finding DevStudio (through the registry)
|
| 13 |
+
# ported to VS2005 and VS 2008 by Christian Heimes
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
import subprocess
|
| 17 |
+
import sys
|
| 18 |
+
import re
|
| 19 |
+
|
| 20 |
+
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
|
| 21 |
+
CompileError, LibError, LinkError
|
| 22 |
+
from distutils.ccompiler import CCompiler, gen_lib_options
|
| 23 |
+
from distutils import log
|
| 24 |
+
from distutils.util import get_platform
|
| 25 |
+
|
| 26 |
+
import winreg
|
| 27 |
+
|
| 28 |
+
RegOpenKeyEx = winreg.OpenKeyEx
|
| 29 |
+
RegEnumKey = winreg.EnumKey
|
| 30 |
+
RegEnumValue = winreg.EnumValue
|
| 31 |
+
RegError = winreg.error
|
| 32 |
+
|
| 33 |
+
HKEYS = (winreg.HKEY_USERS,
|
| 34 |
+
winreg.HKEY_CURRENT_USER,
|
| 35 |
+
winreg.HKEY_LOCAL_MACHINE,
|
| 36 |
+
winreg.HKEY_CLASSES_ROOT)
|
| 37 |
+
|
| 38 |
+
NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32)
|
| 39 |
+
if NATIVE_WIN64:
|
| 40 |
+
# Visual C++ is a 32-bit application, so we need to look in
|
| 41 |
+
# the corresponding registry branch, if we're running a
|
| 42 |
+
# 64-bit Python on Win64
|
| 43 |
+
VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
|
| 44 |
+
WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
|
| 45 |
+
NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
|
| 46 |
+
else:
|
| 47 |
+
VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
|
| 48 |
+
WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
|
| 49 |
+
NET_BASE = r"Software\Microsoft\.NETFramework"
|
| 50 |
+
|
| 51 |
+
# A map keyed by get_platform() return values to values accepted by
|
| 52 |
+
# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
|
| 53 |
+
# the param to cross-compile on x86 targeting amd64.)
|
| 54 |
+
PLAT_TO_VCVARS = {
|
| 55 |
+
'win32' : 'x86',
|
| 56 |
+
'win-amd64' : 'amd64',
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
class Reg:
|
| 60 |
+
"""Helper class to read values from the registry
|
| 61 |
+
"""
|
| 62 |
+
|
| 63 |
+
def get_value(cls, path, key):
|
| 64 |
+
for base in HKEYS:
|
| 65 |
+
d = cls.read_values(base, path)
|
| 66 |
+
if d and key in d:
|
| 67 |
+
return d[key]
|
| 68 |
+
raise KeyError(key)
|
| 69 |
+
get_value = classmethod(get_value)
|
| 70 |
+
|
| 71 |
+
def read_keys(cls, base, key):
|
| 72 |
+
"""Return list of registry keys."""
|
| 73 |
+
try:
|
| 74 |
+
handle = RegOpenKeyEx(base, key)
|
| 75 |
+
except RegError:
|
| 76 |
+
return None
|
| 77 |
+
L = []
|
| 78 |
+
i = 0
|
| 79 |
+
while True:
|
| 80 |
+
try:
|
| 81 |
+
k = RegEnumKey(handle, i)
|
| 82 |
+
except RegError:
|
| 83 |
+
break
|
| 84 |
+
L.append(k)
|
| 85 |
+
i += 1
|
| 86 |
+
return L
|
| 87 |
+
read_keys = classmethod(read_keys)
|
| 88 |
+
|
| 89 |
+
def read_values(cls, base, key):
|
| 90 |
+
"""Return dict of registry keys and values.
|
| 91 |
+
|
| 92 |
+
All names are converted to lowercase.
|
| 93 |
+
"""
|
| 94 |
+
try:
|
| 95 |
+
handle = RegOpenKeyEx(base, key)
|
| 96 |
+
except RegError:
|
| 97 |
+
return None
|
| 98 |
+
d = {}
|
| 99 |
+
i = 0
|
| 100 |
+
while True:
|
| 101 |
+
try:
|
| 102 |
+
name, value, type = RegEnumValue(handle, i)
|
| 103 |
+
except RegError:
|
| 104 |
+
break
|
| 105 |
+
name = name.lower()
|
| 106 |
+
d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
|
| 107 |
+
i += 1
|
| 108 |
+
return d
|
| 109 |
+
read_values = classmethod(read_values)
|
| 110 |
+
|
| 111 |
+
def convert_mbcs(s):
|
| 112 |
+
dec = getattr(s, "decode", None)
|
| 113 |
+
if dec is not None:
|
| 114 |
+
try:
|
| 115 |
+
s = dec("mbcs")
|
| 116 |
+
except UnicodeError:
|
| 117 |
+
pass
|
| 118 |
+
return s
|
| 119 |
+
convert_mbcs = staticmethod(convert_mbcs)
|
| 120 |
+
|
| 121 |
+
class MacroExpander:
|
| 122 |
+
|
| 123 |
+
def __init__(self, version):
|
| 124 |
+
self.macros = {}
|
| 125 |
+
self.vsbase = VS_BASE % version
|
| 126 |
+
self.load_macros(version)
|
| 127 |
+
|
| 128 |
+
def set_macro(self, macro, path, key):
|
| 129 |
+
self.macros["$(%s)" % macro] = Reg.get_value(path, key)
|
| 130 |
+
|
| 131 |
+
def load_macros(self, version):
|
| 132 |
+
self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
|
| 133 |
+
self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
|
| 134 |
+
self.set_macro("FrameworkDir", NET_BASE, "installroot")
|
| 135 |
+
try:
|
| 136 |
+
if version >= 8.0:
|
| 137 |
+
self.set_macro("FrameworkSDKDir", NET_BASE,
|
| 138 |
+
"sdkinstallrootv2.0")
|
| 139 |
+
else:
|
| 140 |
+
raise KeyError("sdkinstallrootv2.0")
|
| 141 |
+
except KeyError:
|
| 142 |
+
raise DistutilsPlatformError(
|
| 143 |
+
"""Python was built with Visual Studio 2008;
|
| 144 |
+
extensions must be built with a compiler than can generate compatible binaries.
|
| 145 |
+
Visual Studio 2008 was not found on this system. If you have Cygwin installed,
|
| 146 |
+
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
|
| 147 |
+
|
| 148 |
+
if version >= 9.0:
|
| 149 |
+
self.set_macro("FrameworkVersion", self.vsbase, "clr version")
|
| 150 |
+
self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
|
| 151 |
+
else:
|
| 152 |
+
p = r"Software\Microsoft\NET Framework Setup\Product"
|
| 153 |
+
for base in HKEYS:
|
| 154 |
+
try:
|
| 155 |
+
h = RegOpenKeyEx(base, p)
|
| 156 |
+
except RegError:
|
| 157 |
+
continue
|
| 158 |
+
key = RegEnumKey(h, 0)
|
| 159 |
+
d = Reg.get_value(base, r"%s\%s" % (p, key))
|
| 160 |
+
self.macros["$(FrameworkVersion)"] = d["version"]
|
| 161 |
+
|
| 162 |
+
def sub(self, s):
|
| 163 |
+
for k, v in self.macros.items():
|
| 164 |
+
s = s.replace(k, v)
|
| 165 |
+
return s
|
| 166 |
+
|
| 167 |
+
def get_build_version():
|
| 168 |
+
"""Return the version of MSVC that was used to build Python.
|
| 169 |
+
|
| 170 |
+
For Python 2.3 and up, the version number is included in
|
| 171 |
+
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
| 172 |
+
"""
|
| 173 |
+
prefix = "MSC v."
|
| 174 |
+
i = sys.version.find(prefix)
|
| 175 |
+
if i == -1:
|
| 176 |
+
return 6
|
| 177 |
+
i = i + len(prefix)
|
| 178 |
+
s, rest = sys.version[i:].split(" ", 1)
|
| 179 |
+
majorVersion = int(s[:-2]) - 6
|
| 180 |
+
if majorVersion >= 13:
|
| 181 |
+
# v13 was skipped and should be v14
|
| 182 |
+
majorVersion += 1
|
| 183 |
+
minorVersion = int(s[2:3]) / 10.0
|
| 184 |
+
# I don't think paths are affected by minor version in version 6
|
| 185 |
+
if majorVersion == 6:
|
| 186 |
+
minorVersion = 0
|
| 187 |
+
if majorVersion >= 6:
|
| 188 |
+
return majorVersion + minorVersion
|
| 189 |
+
# else we don't know what version of the compiler this is
|
| 190 |
+
return None
|
| 191 |
+
|
| 192 |
+
def normalize_and_reduce_paths(paths):
|
| 193 |
+
"""Return a list of normalized paths with duplicates removed.
|
| 194 |
+
|
| 195 |
+
The current order of paths is maintained.
|
| 196 |
+
"""
|
| 197 |
+
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
| 198 |
+
reduced_paths = []
|
| 199 |
+
for p in paths:
|
| 200 |
+
np = os.path.normpath(p)
|
| 201 |
+
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
| 202 |
+
if np not in reduced_paths:
|
| 203 |
+
reduced_paths.append(np)
|
| 204 |
+
return reduced_paths
|
| 205 |
+
|
| 206 |
+
def removeDuplicates(variable):
|
| 207 |
+
"""Remove duplicate values of an environment variable.
|
| 208 |
+
"""
|
| 209 |
+
oldList = variable.split(os.pathsep)
|
| 210 |
+
newList = []
|
| 211 |
+
for i in oldList:
|
| 212 |
+
if i not in newList:
|
| 213 |
+
newList.append(i)
|
| 214 |
+
newVariable = os.pathsep.join(newList)
|
| 215 |
+
return newVariable
|
| 216 |
+
|
| 217 |
+
def find_vcvarsall(version):
|
| 218 |
+
"""Find the vcvarsall.bat file
|
| 219 |
+
|
| 220 |
+
At first it tries to find the productdir of VS 2008 in the registry. If
|
| 221 |
+
that fails it falls back to the VS90COMNTOOLS env var.
|
| 222 |
+
"""
|
| 223 |
+
vsbase = VS_BASE % version
|
| 224 |
+
try:
|
| 225 |
+
productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
|
| 226 |
+
"productdir")
|
| 227 |
+
except KeyError:
|
| 228 |
+
log.debug("Unable to find productdir in registry")
|
| 229 |
+
productdir = None
|
| 230 |
+
|
| 231 |
+
if not productdir or not os.path.isdir(productdir):
|
| 232 |
+
toolskey = "VS%0.f0COMNTOOLS" % version
|
| 233 |
+
toolsdir = os.environ.get(toolskey, None)
|
| 234 |
+
|
| 235 |
+
if toolsdir and os.path.isdir(toolsdir):
|
| 236 |
+
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
|
| 237 |
+
productdir = os.path.abspath(productdir)
|
| 238 |
+
if not os.path.isdir(productdir):
|
| 239 |
+
log.debug("%s is not a valid directory" % productdir)
|
| 240 |
+
return None
|
| 241 |
+
else:
|
| 242 |
+
log.debug("Env var %s is not set or invalid" % toolskey)
|
| 243 |
+
if not productdir:
|
| 244 |
+
log.debug("No productdir found")
|
| 245 |
+
return None
|
| 246 |
+
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
|
| 247 |
+
if os.path.isfile(vcvarsall):
|
| 248 |
+
return vcvarsall
|
| 249 |
+
log.debug("Unable to find vcvarsall.bat")
|
| 250 |
+
return None
|
| 251 |
+
|
| 252 |
+
def query_vcvarsall(version, arch="x86"):
|
| 253 |
+
"""Launch vcvarsall.bat and read the settings from its environment
|
| 254 |
+
"""
|
| 255 |
+
vcvarsall = find_vcvarsall(version)
|
| 256 |
+
interesting = {"include", "lib", "libpath", "path"}
|
| 257 |
+
result = {}
|
| 258 |
+
|
| 259 |
+
if vcvarsall is None:
|
| 260 |
+
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
|
| 261 |
+
log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
|
| 262 |
+
popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
|
| 263 |
+
stdout=subprocess.PIPE,
|
| 264 |
+
stderr=subprocess.PIPE)
|
| 265 |
+
try:
|
| 266 |
+
stdout, stderr = popen.communicate()
|
| 267 |
+
if popen.wait() != 0:
|
| 268 |
+
raise DistutilsPlatformError(stderr.decode("mbcs"))
|
| 269 |
+
|
| 270 |
+
stdout = stdout.decode("mbcs")
|
| 271 |
+
for line in stdout.split("\n"):
|
| 272 |
+
line = Reg.convert_mbcs(line)
|
| 273 |
+
if '=' not in line:
|
| 274 |
+
continue
|
| 275 |
+
line = line.strip()
|
| 276 |
+
key, value = line.split('=', 1)
|
| 277 |
+
key = key.lower()
|
| 278 |
+
if key in interesting:
|
| 279 |
+
if value.endswith(os.pathsep):
|
| 280 |
+
value = value[:-1]
|
| 281 |
+
result[key] = removeDuplicates(value)
|
| 282 |
+
|
| 283 |
+
finally:
|
| 284 |
+
popen.stdout.close()
|
| 285 |
+
popen.stderr.close()
|
| 286 |
+
|
| 287 |
+
if len(result) != len(interesting):
|
| 288 |
+
raise ValueError(str(list(result.keys())))
|
| 289 |
+
|
| 290 |
+
return result
|
| 291 |
+
|
| 292 |
+
# More globals
|
| 293 |
+
VERSION = get_build_version()
|
| 294 |
+
if VERSION < 8.0:
|
| 295 |
+
raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION)
|
| 296 |
+
# MACROS = MacroExpander(VERSION)
|
| 297 |
+
|
| 298 |
+
class MSVCCompiler(CCompiler) :
|
| 299 |
+
"""Concrete class that implements an interface to Microsoft Visual C++,
|
| 300 |
+
as defined by the CCompiler abstract class."""
|
| 301 |
+
|
| 302 |
+
compiler_type = 'msvc'
|
| 303 |
+
|
| 304 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 305 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 306 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 307 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 308 |
+
# though, so it's worth thinking about.
|
| 309 |
+
executables = {}
|
| 310 |
+
|
| 311 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 312 |
+
_c_extensions = ['.c']
|
| 313 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 314 |
+
_rc_extensions = ['.rc']
|
| 315 |
+
_mc_extensions = ['.mc']
|
| 316 |
+
|
| 317 |
+
# Needed for the filename generation methods provided by the
|
| 318 |
+
# base class, CCompiler.
|
| 319 |
+
src_extensions = (_c_extensions + _cpp_extensions +
|
| 320 |
+
_rc_extensions + _mc_extensions)
|
| 321 |
+
res_extension = '.res'
|
| 322 |
+
obj_extension = '.obj'
|
| 323 |
+
static_lib_extension = '.lib'
|
| 324 |
+
shared_lib_extension = '.dll'
|
| 325 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 326 |
+
exe_extension = '.exe'
|
| 327 |
+
|
| 328 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 329 |
+
CCompiler.__init__ (self, verbose, dry_run, force)
|
| 330 |
+
self.__version = VERSION
|
| 331 |
+
self.__root = r"Software\Microsoft\VisualStudio"
|
| 332 |
+
# self.__macros = MACROS
|
| 333 |
+
self.__paths = []
|
| 334 |
+
# target platform (.plat_name is consistent with 'bdist')
|
| 335 |
+
self.plat_name = None
|
| 336 |
+
self.__arch = None # deprecated name
|
| 337 |
+
self.initialized = False
|
| 338 |
+
|
| 339 |
+
def initialize(self, plat_name=None):
|
| 340 |
+
# multi-init means we would need to check platform same each time...
|
| 341 |
+
assert not self.initialized, "don't init multiple times"
|
| 342 |
+
if plat_name is None:
|
| 343 |
+
plat_name = get_platform()
|
| 344 |
+
# sanity check for platforms to prevent obscure errors later.
|
| 345 |
+
ok_plats = 'win32', 'win-amd64'
|
| 346 |
+
if plat_name not in ok_plats:
|
| 347 |
+
raise DistutilsPlatformError("--plat-name must be one of %s" %
|
| 348 |
+
(ok_plats,))
|
| 349 |
+
|
| 350 |
+
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
|
| 351 |
+
# Assume that the SDK set up everything alright; don't try to be
|
| 352 |
+
# smarter
|
| 353 |
+
self.cc = "cl.exe"
|
| 354 |
+
self.linker = "link.exe"
|
| 355 |
+
self.lib = "lib.exe"
|
| 356 |
+
self.rc = "rc.exe"
|
| 357 |
+
self.mc = "mc.exe"
|
| 358 |
+
else:
|
| 359 |
+
# On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
|
| 360 |
+
# to cross compile, you use 'x86_amd64'.
|
| 361 |
+
# On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
|
| 362 |
+
# compile use 'x86' (ie, it runs the x86 compiler directly)
|
| 363 |
+
if plat_name == get_platform() or plat_name == 'win32':
|
| 364 |
+
# native build or cross-compile to win32
|
| 365 |
+
plat_spec = PLAT_TO_VCVARS[plat_name]
|
| 366 |
+
else:
|
| 367 |
+
# cross compile from win32 -> some 64bit
|
| 368 |
+
plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
|
| 369 |
+
PLAT_TO_VCVARS[plat_name]
|
| 370 |
+
|
| 371 |
+
vc_env = query_vcvarsall(VERSION, plat_spec)
|
| 372 |
+
|
| 373 |
+
self.__paths = vc_env['path'].split(os.pathsep)
|
| 374 |
+
os.environ['lib'] = vc_env['lib']
|
| 375 |
+
os.environ['include'] = vc_env['include']
|
| 376 |
+
|
| 377 |
+
if len(self.__paths) == 0:
|
| 378 |
+
raise DistutilsPlatformError("Python was built with %s, "
|
| 379 |
+
"and extensions need to be built with the same "
|
| 380 |
+
"version of the compiler, but it isn't installed."
|
| 381 |
+
% self.__product)
|
| 382 |
+
|
| 383 |
+
self.cc = self.find_exe("cl.exe")
|
| 384 |
+
self.linker = self.find_exe("link.exe")
|
| 385 |
+
self.lib = self.find_exe("lib.exe")
|
| 386 |
+
self.rc = self.find_exe("rc.exe") # resource compiler
|
| 387 |
+
self.mc = self.find_exe("mc.exe") # message compiler
|
| 388 |
+
#self.set_path_env_var('lib')
|
| 389 |
+
#self.set_path_env_var('include')
|
| 390 |
+
|
| 391 |
+
# extend the MSVC path with the current path
|
| 392 |
+
try:
|
| 393 |
+
for p in os.environ['path'].split(';'):
|
| 394 |
+
self.__paths.append(p)
|
| 395 |
+
except KeyError:
|
| 396 |
+
pass
|
| 397 |
+
self.__paths = normalize_and_reduce_paths(self.__paths)
|
| 398 |
+
os.environ['path'] = ";".join(self.__paths)
|
| 399 |
+
|
| 400 |
+
self.preprocess_options = None
|
| 401 |
+
if self.__arch == "x86":
|
| 402 |
+
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
|
| 403 |
+
'/DNDEBUG']
|
| 404 |
+
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
|
| 405 |
+
'/Z7', '/D_DEBUG']
|
| 406 |
+
else:
|
| 407 |
+
# Win64
|
| 408 |
+
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
|
| 409 |
+
'/DNDEBUG']
|
| 410 |
+
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
|
| 411 |
+
'/Z7', '/D_DEBUG']
|
| 412 |
+
|
| 413 |
+
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
| 414 |
+
if self.__version >= 7:
|
| 415 |
+
self.ldflags_shared_debug = [
|
| 416 |
+
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
|
| 417 |
+
]
|
| 418 |
+
self.ldflags_static = [ '/nologo']
|
| 419 |
+
|
| 420 |
+
self.initialized = True
|
| 421 |
+
|
| 422 |
+
# -- Worker methods ------------------------------------------------
|
| 423 |
+
|
| 424 |
+
def object_filenames(self,
|
| 425 |
+
source_filenames,
|
| 426 |
+
strip_dir=0,
|
| 427 |
+
output_dir=''):
|
| 428 |
+
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
| 429 |
+
# for .rc input file
|
| 430 |
+
if output_dir is None: output_dir = ''
|
| 431 |
+
obj_names = []
|
| 432 |
+
for src_name in source_filenames:
|
| 433 |
+
(base, ext) = os.path.splitext (src_name)
|
| 434 |
+
base = os.path.splitdrive(base)[1] # Chop off the drive
|
| 435 |
+
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
| 436 |
+
if ext not in self.src_extensions:
|
| 437 |
+
# Better to raise an exception instead of silently continuing
|
| 438 |
+
# and later complain about sources and targets having
|
| 439 |
+
# different lengths
|
| 440 |
+
raise CompileError ("Don't know how to compile %s" % src_name)
|
| 441 |
+
if strip_dir:
|
| 442 |
+
base = os.path.basename (base)
|
| 443 |
+
if ext in self._rc_extensions:
|
| 444 |
+
obj_names.append (os.path.join (output_dir,
|
| 445 |
+
base + self.res_extension))
|
| 446 |
+
elif ext in self._mc_extensions:
|
| 447 |
+
obj_names.append (os.path.join (output_dir,
|
| 448 |
+
base + self.res_extension))
|
| 449 |
+
else:
|
| 450 |
+
obj_names.append (os.path.join (output_dir,
|
| 451 |
+
base + self.obj_extension))
|
| 452 |
+
return obj_names
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def compile(self, sources,
|
| 456 |
+
output_dir=None, macros=None, include_dirs=None, debug=0,
|
| 457 |
+
extra_preargs=None, extra_postargs=None, depends=None):
|
| 458 |
+
|
| 459 |
+
if not self.initialized:
|
| 460 |
+
self.initialize()
|
| 461 |
+
compile_info = self._setup_compile(output_dir, macros, include_dirs,
|
| 462 |
+
sources, depends, extra_postargs)
|
| 463 |
+
macros, objects, extra_postargs, pp_opts, build = compile_info
|
| 464 |
+
|
| 465 |
+
compile_opts = extra_preargs or []
|
| 466 |
+
compile_opts.append ('/c')
|
| 467 |
+
if debug:
|
| 468 |
+
compile_opts.extend(self.compile_options_debug)
|
| 469 |
+
else:
|
| 470 |
+
compile_opts.extend(self.compile_options)
|
| 471 |
+
|
| 472 |
+
for obj in objects:
|
| 473 |
+
try:
|
| 474 |
+
src, ext = build[obj]
|
| 475 |
+
except KeyError:
|
| 476 |
+
continue
|
| 477 |
+
if debug:
|
| 478 |
+
# pass the full pathname to MSVC in debug mode,
|
| 479 |
+
# this allows the debugger to find the source file
|
| 480 |
+
# without asking the user to browse for it
|
| 481 |
+
src = os.path.abspath(src)
|
| 482 |
+
|
| 483 |
+
if ext in self._c_extensions:
|
| 484 |
+
input_opt = "/Tc" + src
|
| 485 |
+
elif ext in self._cpp_extensions:
|
| 486 |
+
input_opt = "/Tp" + src
|
| 487 |
+
elif ext in self._rc_extensions:
|
| 488 |
+
# compile .RC to .RES file
|
| 489 |
+
input_opt = src
|
| 490 |
+
output_opt = "/fo" + obj
|
| 491 |
+
try:
|
| 492 |
+
self.spawn([self.rc] + pp_opts +
|
| 493 |
+
[output_opt] + [input_opt])
|
| 494 |
+
except DistutilsExecError as msg:
|
| 495 |
+
raise CompileError(msg)
|
| 496 |
+
continue
|
| 497 |
+
elif ext in self._mc_extensions:
|
| 498 |
+
# Compile .MC to .RC file to .RES file.
|
| 499 |
+
# * '-h dir' specifies the directory for the
|
| 500 |
+
# generated include file
|
| 501 |
+
# * '-r dir' specifies the target directory of the
|
| 502 |
+
# generated RC file and the binary message resource
|
| 503 |
+
# it includes
|
| 504 |
+
#
|
| 505 |
+
# For now (since there are no options to change this),
|
| 506 |
+
# we use the source-directory for the include file and
|
| 507 |
+
# the build directory for the RC file and message
|
| 508 |
+
# resources. This works at least for win32all.
|
| 509 |
+
h_dir = os.path.dirname(src)
|
| 510 |
+
rc_dir = os.path.dirname(obj)
|
| 511 |
+
try:
|
| 512 |
+
# first compile .MC to .RC and .H file
|
| 513 |
+
self.spawn([self.mc] +
|
| 514 |
+
['-h', h_dir, '-r', rc_dir] + [src])
|
| 515 |
+
base, _ = os.path.splitext (os.path.basename (src))
|
| 516 |
+
rc_file = os.path.join (rc_dir, base + '.rc')
|
| 517 |
+
# then compile .RC to .RES file
|
| 518 |
+
self.spawn([self.rc] +
|
| 519 |
+
["/fo" + obj] + [rc_file])
|
| 520 |
+
|
| 521 |
+
except DistutilsExecError as msg:
|
| 522 |
+
raise CompileError(msg)
|
| 523 |
+
continue
|
| 524 |
+
else:
|
| 525 |
+
# how to handle this file?
|
| 526 |
+
raise CompileError("Don't know how to compile %s to %s"
|
| 527 |
+
% (src, obj))
|
| 528 |
+
|
| 529 |
+
output_opt = "/Fo" + obj
|
| 530 |
+
try:
|
| 531 |
+
self.spawn([self.cc] + compile_opts + pp_opts +
|
| 532 |
+
[input_opt, output_opt] +
|
| 533 |
+
extra_postargs)
|
| 534 |
+
except DistutilsExecError as msg:
|
| 535 |
+
raise CompileError(msg)
|
| 536 |
+
|
| 537 |
+
return objects
|
| 538 |
+
|
| 539 |
+
|
| 540 |
+
def create_static_lib(self,
|
| 541 |
+
objects,
|
| 542 |
+
output_libname,
|
| 543 |
+
output_dir=None,
|
| 544 |
+
debug=0,
|
| 545 |
+
target_lang=None):
|
| 546 |
+
|
| 547 |
+
if not self.initialized:
|
| 548 |
+
self.initialize()
|
| 549 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 550 |
+
output_filename = self.library_filename(output_libname,
|
| 551 |
+
output_dir=output_dir)
|
| 552 |
+
|
| 553 |
+
if self._need_link(objects, output_filename):
|
| 554 |
+
lib_args = objects + ['/OUT:' + output_filename]
|
| 555 |
+
if debug:
|
| 556 |
+
pass # XXX what goes here?
|
| 557 |
+
try:
|
| 558 |
+
self.spawn([self.lib] + lib_args)
|
| 559 |
+
except DistutilsExecError as msg:
|
| 560 |
+
raise LibError(msg)
|
| 561 |
+
else:
|
| 562 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def link(self,
|
| 566 |
+
target_desc,
|
| 567 |
+
objects,
|
| 568 |
+
output_filename,
|
| 569 |
+
output_dir=None,
|
| 570 |
+
libraries=None,
|
| 571 |
+
library_dirs=None,
|
| 572 |
+
runtime_library_dirs=None,
|
| 573 |
+
export_symbols=None,
|
| 574 |
+
debug=0,
|
| 575 |
+
extra_preargs=None,
|
| 576 |
+
extra_postargs=None,
|
| 577 |
+
build_temp=None,
|
| 578 |
+
target_lang=None):
|
| 579 |
+
|
| 580 |
+
if not self.initialized:
|
| 581 |
+
self.initialize()
|
| 582 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 583 |
+
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
| 584 |
+
runtime_library_dirs)
|
| 585 |
+
(libraries, library_dirs, runtime_library_dirs) = fixed_args
|
| 586 |
+
|
| 587 |
+
if runtime_library_dirs:
|
| 588 |
+
self.warn ("I don't know what to do with 'runtime_library_dirs': "
|
| 589 |
+
+ str (runtime_library_dirs))
|
| 590 |
+
|
| 591 |
+
lib_opts = gen_lib_options(self,
|
| 592 |
+
library_dirs, runtime_library_dirs,
|
| 593 |
+
libraries)
|
| 594 |
+
if output_dir is not None:
|
| 595 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 596 |
+
|
| 597 |
+
if self._need_link(objects, output_filename):
|
| 598 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 599 |
+
if debug:
|
| 600 |
+
ldflags = self.ldflags_shared_debug[1:]
|
| 601 |
+
else:
|
| 602 |
+
ldflags = self.ldflags_shared[1:]
|
| 603 |
+
else:
|
| 604 |
+
if debug:
|
| 605 |
+
ldflags = self.ldflags_shared_debug
|
| 606 |
+
else:
|
| 607 |
+
ldflags = self.ldflags_shared
|
| 608 |
+
|
| 609 |
+
export_opts = []
|
| 610 |
+
for sym in (export_symbols or []):
|
| 611 |
+
export_opts.append("/EXPORT:" + sym)
|
| 612 |
+
|
| 613 |
+
ld_args = (ldflags + lib_opts + export_opts +
|
| 614 |
+
objects + ['/OUT:' + output_filename])
|
| 615 |
+
|
| 616 |
+
# The MSVC linker generates .lib and .exp files, which cannot be
|
| 617 |
+
# suppressed by any linker switches. The .lib files may even be
|
| 618 |
+
# needed! Make sure they are generated in the temporary build
|
| 619 |
+
# directory. Since they have different names for debug and release
|
| 620 |
+
# builds, they can go into the same directory.
|
| 621 |
+
build_temp = os.path.dirname(objects[0])
|
| 622 |
+
if export_symbols is not None:
|
| 623 |
+
(dll_name, dll_ext) = os.path.splitext(
|
| 624 |
+
os.path.basename(output_filename))
|
| 625 |
+
implib_file = os.path.join(
|
| 626 |
+
build_temp,
|
| 627 |
+
self.library_filename(dll_name))
|
| 628 |
+
ld_args.append ('/IMPLIB:' + implib_file)
|
| 629 |
+
|
| 630 |
+
self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
|
| 631 |
+
|
| 632 |
+
if extra_preargs:
|
| 633 |
+
ld_args[:0] = extra_preargs
|
| 634 |
+
if extra_postargs:
|
| 635 |
+
ld_args.extend(extra_postargs)
|
| 636 |
+
|
| 637 |
+
self.mkpath(os.path.dirname(output_filename))
|
| 638 |
+
try:
|
| 639 |
+
self.spawn([self.linker] + ld_args)
|
| 640 |
+
except DistutilsExecError as msg:
|
| 641 |
+
raise LinkError(msg)
|
| 642 |
+
|
| 643 |
+
# embed the manifest
|
| 644 |
+
# XXX - this is somewhat fragile - if mt.exe fails, distutils
|
| 645 |
+
# will still consider the DLL up-to-date, but it will not have a
|
| 646 |
+
# manifest. Maybe we should link to a temp file? OTOH, that
|
| 647 |
+
# implies a build environment error that shouldn't go undetected.
|
| 648 |
+
mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
|
| 649 |
+
if mfinfo is not None:
|
| 650 |
+
mffilename, mfid = mfinfo
|
| 651 |
+
out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
|
| 652 |
+
try:
|
| 653 |
+
self.spawn(['mt.exe', '-nologo', '-manifest',
|
| 654 |
+
mffilename, out_arg])
|
| 655 |
+
except DistutilsExecError as msg:
|
| 656 |
+
raise LinkError(msg)
|
| 657 |
+
else:
|
| 658 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 659 |
+
|
| 660 |
+
def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
|
| 661 |
+
# If we need a manifest at all, an embedded manifest is recommended.
|
| 662 |
+
# See MSDN article titled
|
| 663 |
+
# "How to: Embed a Manifest Inside a C/C++ Application"
|
| 664 |
+
# (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
|
| 665 |
+
# Ask the linker to generate the manifest in the temp dir, so
|
| 666 |
+
# we can check it, and possibly embed it, later.
|
| 667 |
+
temp_manifest = os.path.join(
|
| 668 |
+
build_temp,
|
| 669 |
+
os.path.basename(output_filename) + ".manifest")
|
| 670 |
+
ld_args.append('/MANIFESTFILE:' + temp_manifest)
|
| 671 |
+
|
| 672 |
+
def manifest_get_embed_info(self, target_desc, ld_args):
|
| 673 |
+
# If a manifest should be embedded, return a tuple of
|
| 674 |
+
# (manifest_filename, resource_id). Returns None if no manifest
|
| 675 |
+
# should be embedded. See http://bugs.python.org/issue7833 for why
|
| 676 |
+
# we want to avoid any manifest for extension modules if we can.
|
| 677 |
+
for arg in ld_args:
|
| 678 |
+
if arg.startswith("/MANIFESTFILE:"):
|
| 679 |
+
temp_manifest = arg.split(":", 1)[1]
|
| 680 |
+
break
|
| 681 |
+
else:
|
| 682 |
+
# no /MANIFESTFILE so nothing to do.
|
| 683 |
+
return None
|
| 684 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 685 |
+
# by default, executables always get the manifest with the
|
| 686 |
+
# CRT referenced.
|
| 687 |
+
mfid = 1
|
| 688 |
+
else:
|
| 689 |
+
# Extension modules try and avoid any manifest if possible.
|
| 690 |
+
mfid = 2
|
| 691 |
+
temp_manifest = self._remove_visual_c_ref(temp_manifest)
|
| 692 |
+
if temp_manifest is None:
|
| 693 |
+
return None
|
| 694 |
+
return temp_manifest, mfid
|
| 695 |
+
|
| 696 |
+
def _remove_visual_c_ref(self, manifest_file):
|
| 697 |
+
try:
|
| 698 |
+
# Remove references to the Visual C runtime, so they will
|
| 699 |
+
# fall through to the Visual C dependency of Python.exe.
|
| 700 |
+
# This way, when installed for a restricted user (e.g.
|
| 701 |
+
# runtimes are not in WinSxS folder, but in Python's own
|
| 702 |
+
# folder), the runtimes do not need to be in every folder
|
| 703 |
+
# with .pyd's.
|
| 704 |
+
# Returns either the filename of the modified manifest or
|
| 705 |
+
# None if no manifest should be embedded.
|
| 706 |
+
manifest_f = open(manifest_file)
|
| 707 |
+
try:
|
| 708 |
+
manifest_buf = manifest_f.read()
|
| 709 |
+
finally:
|
| 710 |
+
manifest_f.close()
|
| 711 |
+
pattern = re.compile(
|
| 712 |
+
r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
|
| 713 |
+
r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
|
| 714 |
+
re.DOTALL)
|
| 715 |
+
manifest_buf = re.sub(pattern, "", manifest_buf)
|
| 716 |
+
pattern = r"<dependentAssembly>\s*</dependentAssembly>"
|
| 717 |
+
manifest_buf = re.sub(pattern, "", manifest_buf)
|
| 718 |
+
# Now see if any other assemblies are referenced - if not, we
|
| 719 |
+
# don't want a manifest embedded.
|
| 720 |
+
pattern = re.compile(
|
| 721 |
+
r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')"""
|
| 722 |
+
r""".*?(?:/>|</assemblyIdentity>)""", re.DOTALL)
|
| 723 |
+
if re.search(pattern, manifest_buf) is None:
|
| 724 |
+
return None
|
| 725 |
+
|
| 726 |
+
manifest_f = open(manifest_file, 'w')
|
| 727 |
+
try:
|
| 728 |
+
manifest_f.write(manifest_buf)
|
| 729 |
+
return manifest_file
|
| 730 |
+
finally:
|
| 731 |
+
manifest_f.close()
|
| 732 |
+
except OSError:
|
| 733 |
+
pass
|
| 734 |
+
|
| 735 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 736 |
+
# These are all used by the 'gen_lib_options() function, in
|
| 737 |
+
# ccompiler.py.
|
| 738 |
+
|
| 739 |
+
def library_dir_option(self, dir):
|
| 740 |
+
return "/LIBPATH:" + dir
|
| 741 |
+
|
| 742 |
+
def runtime_library_dir_option(self, dir):
|
| 743 |
+
raise DistutilsPlatformError(
|
| 744 |
+
"don't know how to set runtime library search path for MSVC++")
|
| 745 |
+
|
| 746 |
+
def library_option(self, lib):
|
| 747 |
+
return self.library_filename(lib)
|
| 748 |
+
|
| 749 |
+
|
| 750 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 751 |
+
# Prefer a debugging library if found (and requested), but deal
|
| 752 |
+
# with it if we don't have one.
|
| 753 |
+
if debug:
|
| 754 |
+
try_names = [lib + "_d", lib]
|
| 755 |
+
else:
|
| 756 |
+
try_names = [lib]
|
| 757 |
+
for dir in dirs:
|
| 758 |
+
for name in try_names:
|
| 759 |
+
libfile = os.path.join(dir, self.library_filename (name))
|
| 760 |
+
if os.path.exists(libfile):
|
| 761 |
+
return libfile
|
| 762 |
+
else:
|
| 763 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 764 |
+
return None
|
| 765 |
+
|
| 766 |
+
# Helper methods for using the MSVC registry settings
|
| 767 |
+
|
| 768 |
+
def find_exe(self, exe):
|
| 769 |
+
"""Return path to an MSVC executable program.
|
| 770 |
+
|
| 771 |
+
Tries to find the program in several places: first, one of the
|
| 772 |
+
MSVC program search paths from the registry; next, the directories
|
| 773 |
+
in the PATH environment variable. If any of those work, return an
|
| 774 |
+
absolute path that is known to exist. If none of them work, just
|
| 775 |
+
return the original program name, 'exe'.
|
| 776 |
+
"""
|
| 777 |
+
for p in self.__paths:
|
| 778 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 779 |
+
if os.path.isfile(fn):
|
| 780 |
+
return fn
|
| 781 |
+
|
| 782 |
+
# didn't find it; try existing path
|
| 783 |
+
for p in os.environ['Path'].split(';'):
|
| 784 |
+
fn = os.path.join(os.path.abspath(p),exe)
|
| 785 |
+
if os.path.isfile(fn):
|
| 786 |
+
return fn
|
| 787 |
+
|
| 788 |
+
return exe
|
llava/lib/python3.10/distutils/msvccompiler.py
ADDED
|
@@ -0,0 +1,643 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.msvccompiler
|
| 2 |
+
|
| 3 |
+
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for the Microsoft Visual Studio.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# Written by Perry Stoll
|
| 8 |
+
# hacked by Robin Becker and Thomas Heller to do a better job of
|
| 9 |
+
# finding DevStudio (through the registry)
|
| 10 |
+
|
| 11 |
+
import sys, os
|
| 12 |
+
from distutils.errors import \
|
| 13 |
+
DistutilsExecError, DistutilsPlatformError, \
|
| 14 |
+
CompileError, LibError, LinkError
|
| 15 |
+
from distutils.ccompiler import \
|
| 16 |
+
CCompiler, gen_lib_options
|
| 17 |
+
from distutils import log
|
| 18 |
+
|
| 19 |
+
_can_read_reg = False
|
| 20 |
+
try:
|
| 21 |
+
import winreg
|
| 22 |
+
|
| 23 |
+
_can_read_reg = True
|
| 24 |
+
hkey_mod = winreg
|
| 25 |
+
|
| 26 |
+
RegOpenKeyEx = winreg.OpenKeyEx
|
| 27 |
+
RegEnumKey = winreg.EnumKey
|
| 28 |
+
RegEnumValue = winreg.EnumValue
|
| 29 |
+
RegError = winreg.error
|
| 30 |
+
|
| 31 |
+
except ImportError:
|
| 32 |
+
try:
|
| 33 |
+
import win32api
|
| 34 |
+
import win32con
|
| 35 |
+
_can_read_reg = True
|
| 36 |
+
hkey_mod = win32con
|
| 37 |
+
|
| 38 |
+
RegOpenKeyEx = win32api.RegOpenKeyEx
|
| 39 |
+
RegEnumKey = win32api.RegEnumKey
|
| 40 |
+
RegEnumValue = win32api.RegEnumValue
|
| 41 |
+
RegError = win32api.error
|
| 42 |
+
except ImportError:
|
| 43 |
+
log.info("Warning: Can't read registry to find the "
|
| 44 |
+
"necessary compiler setting\n"
|
| 45 |
+
"Make sure that Python modules winreg, "
|
| 46 |
+
"win32api or win32con are installed.")
|
| 47 |
+
pass
|
| 48 |
+
|
| 49 |
+
if _can_read_reg:
|
| 50 |
+
HKEYS = (hkey_mod.HKEY_USERS,
|
| 51 |
+
hkey_mod.HKEY_CURRENT_USER,
|
| 52 |
+
hkey_mod.HKEY_LOCAL_MACHINE,
|
| 53 |
+
hkey_mod.HKEY_CLASSES_ROOT)
|
| 54 |
+
|
| 55 |
+
def read_keys(base, key):
|
| 56 |
+
"""Return list of registry keys."""
|
| 57 |
+
try:
|
| 58 |
+
handle = RegOpenKeyEx(base, key)
|
| 59 |
+
except RegError:
|
| 60 |
+
return None
|
| 61 |
+
L = []
|
| 62 |
+
i = 0
|
| 63 |
+
while True:
|
| 64 |
+
try:
|
| 65 |
+
k = RegEnumKey(handle, i)
|
| 66 |
+
except RegError:
|
| 67 |
+
break
|
| 68 |
+
L.append(k)
|
| 69 |
+
i += 1
|
| 70 |
+
return L
|
| 71 |
+
|
| 72 |
+
def read_values(base, key):
|
| 73 |
+
"""Return dict of registry keys and values.
|
| 74 |
+
|
| 75 |
+
All names are converted to lowercase.
|
| 76 |
+
"""
|
| 77 |
+
try:
|
| 78 |
+
handle = RegOpenKeyEx(base, key)
|
| 79 |
+
except RegError:
|
| 80 |
+
return None
|
| 81 |
+
d = {}
|
| 82 |
+
i = 0
|
| 83 |
+
while True:
|
| 84 |
+
try:
|
| 85 |
+
name, value, type = RegEnumValue(handle, i)
|
| 86 |
+
except RegError:
|
| 87 |
+
break
|
| 88 |
+
name = name.lower()
|
| 89 |
+
d[convert_mbcs(name)] = convert_mbcs(value)
|
| 90 |
+
i += 1
|
| 91 |
+
return d
|
| 92 |
+
|
| 93 |
+
def convert_mbcs(s):
|
| 94 |
+
dec = getattr(s, "decode", None)
|
| 95 |
+
if dec is not None:
|
| 96 |
+
try:
|
| 97 |
+
s = dec("mbcs")
|
| 98 |
+
except UnicodeError:
|
| 99 |
+
pass
|
| 100 |
+
return s
|
| 101 |
+
|
| 102 |
+
class MacroExpander:
|
| 103 |
+
def __init__(self, version):
|
| 104 |
+
self.macros = {}
|
| 105 |
+
self.load_macros(version)
|
| 106 |
+
|
| 107 |
+
def set_macro(self, macro, path, key):
|
| 108 |
+
for base in HKEYS:
|
| 109 |
+
d = read_values(base, path)
|
| 110 |
+
if d:
|
| 111 |
+
self.macros["$(%s)" % macro] = d[key]
|
| 112 |
+
break
|
| 113 |
+
|
| 114 |
+
def load_macros(self, version):
|
| 115 |
+
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
|
| 116 |
+
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
|
| 117 |
+
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
|
| 118 |
+
net = r"Software\Microsoft\.NETFramework"
|
| 119 |
+
self.set_macro("FrameworkDir", net, "installroot")
|
| 120 |
+
try:
|
| 121 |
+
if version > 7.0:
|
| 122 |
+
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
|
| 123 |
+
else:
|
| 124 |
+
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
|
| 125 |
+
except KeyError as exc: #
|
| 126 |
+
raise DistutilsPlatformError(
|
| 127 |
+
"""Python was built with Visual Studio 2003;
|
| 128 |
+
extensions must be built with a compiler than can generate compatible binaries.
|
| 129 |
+
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
|
| 130 |
+
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
|
| 131 |
+
|
| 132 |
+
p = r"Software\Microsoft\NET Framework Setup\Product"
|
| 133 |
+
for base in HKEYS:
|
| 134 |
+
try:
|
| 135 |
+
h = RegOpenKeyEx(base, p)
|
| 136 |
+
except RegError:
|
| 137 |
+
continue
|
| 138 |
+
key = RegEnumKey(h, 0)
|
| 139 |
+
d = read_values(base, r"%s\%s" % (p, key))
|
| 140 |
+
self.macros["$(FrameworkVersion)"] = d["version"]
|
| 141 |
+
|
| 142 |
+
def sub(self, s):
|
| 143 |
+
for k, v in self.macros.items():
|
| 144 |
+
s = s.replace(k, v)
|
| 145 |
+
return s
|
| 146 |
+
|
| 147 |
+
def get_build_version():
|
| 148 |
+
"""Return the version of MSVC that was used to build Python.
|
| 149 |
+
|
| 150 |
+
For Python 2.3 and up, the version number is included in
|
| 151 |
+
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
| 152 |
+
"""
|
| 153 |
+
prefix = "MSC v."
|
| 154 |
+
i = sys.version.find(prefix)
|
| 155 |
+
if i == -1:
|
| 156 |
+
return 6
|
| 157 |
+
i = i + len(prefix)
|
| 158 |
+
s, rest = sys.version[i:].split(" ", 1)
|
| 159 |
+
majorVersion = int(s[:-2]) - 6
|
| 160 |
+
if majorVersion >= 13:
|
| 161 |
+
# v13 was skipped and should be v14
|
| 162 |
+
majorVersion += 1
|
| 163 |
+
minorVersion = int(s[2:3]) / 10.0
|
| 164 |
+
# I don't think paths are affected by minor version in version 6
|
| 165 |
+
if majorVersion == 6:
|
| 166 |
+
minorVersion = 0
|
| 167 |
+
if majorVersion >= 6:
|
| 168 |
+
return majorVersion + minorVersion
|
| 169 |
+
# else we don't know what version of the compiler this is
|
| 170 |
+
return None
|
| 171 |
+
|
| 172 |
+
def get_build_architecture():
|
| 173 |
+
"""Return the processor architecture.
|
| 174 |
+
|
| 175 |
+
Possible results are "Intel" or "AMD64".
|
| 176 |
+
"""
|
| 177 |
+
|
| 178 |
+
prefix = " bit ("
|
| 179 |
+
i = sys.version.find(prefix)
|
| 180 |
+
if i == -1:
|
| 181 |
+
return "Intel"
|
| 182 |
+
j = sys.version.find(")", i)
|
| 183 |
+
return sys.version[i+len(prefix):j]
|
| 184 |
+
|
| 185 |
+
def normalize_and_reduce_paths(paths):
|
| 186 |
+
"""Return a list of normalized paths with duplicates removed.
|
| 187 |
+
|
| 188 |
+
The current order of paths is maintained.
|
| 189 |
+
"""
|
| 190 |
+
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
| 191 |
+
reduced_paths = []
|
| 192 |
+
for p in paths:
|
| 193 |
+
np = os.path.normpath(p)
|
| 194 |
+
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
| 195 |
+
if np not in reduced_paths:
|
| 196 |
+
reduced_paths.append(np)
|
| 197 |
+
return reduced_paths
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class MSVCCompiler(CCompiler) :
|
| 201 |
+
"""Concrete class that implements an interface to Microsoft Visual C++,
|
| 202 |
+
as defined by the CCompiler abstract class."""
|
| 203 |
+
|
| 204 |
+
compiler_type = 'msvc'
|
| 205 |
+
|
| 206 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 207 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 208 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 209 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 210 |
+
# though, so it's worth thinking about.
|
| 211 |
+
executables = {}
|
| 212 |
+
|
| 213 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 214 |
+
_c_extensions = ['.c']
|
| 215 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 216 |
+
_rc_extensions = ['.rc']
|
| 217 |
+
_mc_extensions = ['.mc']
|
| 218 |
+
|
| 219 |
+
# Needed for the filename generation methods provided by the
|
| 220 |
+
# base class, CCompiler.
|
| 221 |
+
src_extensions = (_c_extensions + _cpp_extensions +
|
| 222 |
+
_rc_extensions + _mc_extensions)
|
| 223 |
+
res_extension = '.res'
|
| 224 |
+
obj_extension = '.obj'
|
| 225 |
+
static_lib_extension = '.lib'
|
| 226 |
+
shared_lib_extension = '.dll'
|
| 227 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 228 |
+
exe_extension = '.exe'
|
| 229 |
+
|
| 230 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 231 |
+
CCompiler.__init__ (self, verbose, dry_run, force)
|
| 232 |
+
self.__version = get_build_version()
|
| 233 |
+
self.__arch = get_build_architecture()
|
| 234 |
+
if self.__arch == "Intel":
|
| 235 |
+
# x86
|
| 236 |
+
if self.__version >= 7:
|
| 237 |
+
self.__root = r"Software\Microsoft\VisualStudio"
|
| 238 |
+
self.__macros = MacroExpander(self.__version)
|
| 239 |
+
else:
|
| 240 |
+
self.__root = r"Software\Microsoft\Devstudio"
|
| 241 |
+
self.__product = "Visual Studio version %s" % self.__version
|
| 242 |
+
else:
|
| 243 |
+
# Win64. Assume this was built with the platform SDK
|
| 244 |
+
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
|
| 245 |
+
|
| 246 |
+
self.initialized = False
|
| 247 |
+
|
| 248 |
+
def initialize(self):
|
| 249 |
+
self.__paths = []
|
| 250 |
+
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
|
| 251 |
+
# Assume that the SDK set up everything alright; don't try to be
|
| 252 |
+
# smarter
|
| 253 |
+
self.cc = "cl.exe"
|
| 254 |
+
self.linker = "link.exe"
|
| 255 |
+
self.lib = "lib.exe"
|
| 256 |
+
self.rc = "rc.exe"
|
| 257 |
+
self.mc = "mc.exe"
|
| 258 |
+
else:
|
| 259 |
+
self.__paths = self.get_msvc_paths("path")
|
| 260 |
+
|
| 261 |
+
if len(self.__paths) == 0:
|
| 262 |
+
raise DistutilsPlatformError("Python was built with %s, "
|
| 263 |
+
"and extensions need to be built with the same "
|
| 264 |
+
"version of the compiler, but it isn't installed."
|
| 265 |
+
% self.__product)
|
| 266 |
+
|
| 267 |
+
self.cc = self.find_exe("cl.exe")
|
| 268 |
+
self.linker = self.find_exe("link.exe")
|
| 269 |
+
self.lib = self.find_exe("lib.exe")
|
| 270 |
+
self.rc = self.find_exe("rc.exe") # resource compiler
|
| 271 |
+
self.mc = self.find_exe("mc.exe") # message compiler
|
| 272 |
+
self.set_path_env_var('lib')
|
| 273 |
+
self.set_path_env_var('include')
|
| 274 |
+
|
| 275 |
+
# extend the MSVC path with the current path
|
| 276 |
+
try:
|
| 277 |
+
for p in os.environ['path'].split(';'):
|
| 278 |
+
self.__paths.append(p)
|
| 279 |
+
except KeyError:
|
| 280 |
+
pass
|
| 281 |
+
self.__paths = normalize_and_reduce_paths(self.__paths)
|
| 282 |
+
os.environ['path'] = ";".join(self.__paths)
|
| 283 |
+
|
| 284 |
+
self.preprocess_options = None
|
| 285 |
+
if self.__arch == "Intel":
|
| 286 |
+
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' ,
|
| 287 |
+
'/DNDEBUG']
|
| 288 |
+
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
|
| 289 |
+
'/Z7', '/D_DEBUG']
|
| 290 |
+
else:
|
| 291 |
+
# Win64
|
| 292 |
+
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
|
| 293 |
+
'/DNDEBUG']
|
| 294 |
+
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
|
| 295 |
+
'/Z7', '/D_DEBUG']
|
| 296 |
+
|
| 297 |
+
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
| 298 |
+
if self.__version >= 7:
|
| 299 |
+
self.ldflags_shared_debug = [
|
| 300 |
+
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
|
| 301 |
+
]
|
| 302 |
+
else:
|
| 303 |
+
self.ldflags_shared_debug = [
|
| 304 |
+
'/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
|
| 305 |
+
]
|
| 306 |
+
self.ldflags_static = [ '/nologo']
|
| 307 |
+
|
| 308 |
+
self.initialized = True
|
| 309 |
+
|
| 310 |
+
# -- Worker methods ------------------------------------------------
|
| 311 |
+
|
| 312 |
+
def object_filenames(self,
|
| 313 |
+
source_filenames,
|
| 314 |
+
strip_dir=0,
|
| 315 |
+
output_dir=''):
|
| 316 |
+
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
| 317 |
+
# for .rc input file
|
| 318 |
+
if output_dir is None: output_dir = ''
|
| 319 |
+
obj_names = []
|
| 320 |
+
for src_name in source_filenames:
|
| 321 |
+
(base, ext) = os.path.splitext (src_name)
|
| 322 |
+
base = os.path.splitdrive(base)[1] # Chop off the drive
|
| 323 |
+
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
| 324 |
+
if ext not in self.src_extensions:
|
| 325 |
+
# Better to raise an exception instead of silently continuing
|
| 326 |
+
# and later complain about sources and targets having
|
| 327 |
+
# different lengths
|
| 328 |
+
raise CompileError ("Don't know how to compile %s" % src_name)
|
| 329 |
+
if strip_dir:
|
| 330 |
+
base = os.path.basename (base)
|
| 331 |
+
if ext in self._rc_extensions:
|
| 332 |
+
obj_names.append (os.path.join (output_dir,
|
| 333 |
+
base + self.res_extension))
|
| 334 |
+
elif ext in self._mc_extensions:
|
| 335 |
+
obj_names.append (os.path.join (output_dir,
|
| 336 |
+
base + self.res_extension))
|
| 337 |
+
else:
|
| 338 |
+
obj_names.append (os.path.join (output_dir,
|
| 339 |
+
base + self.obj_extension))
|
| 340 |
+
return obj_names
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def compile(self, sources,
|
| 344 |
+
output_dir=None, macros=None, include_dirs=None, debug=0,
|
| 345 |
+
extra_preargs=None, extra_postargs=None, depends=None):
|
| 346 |
+
|
| 347 |
+
if not self.initialized:
|
| 348 |
+
self.initialize()
|
| 349 |
+
compile_info = self._setup_compile(output_dir, macros, include_dirs,
|
| 350 |
+
sources, depends, extra_postargs)
|
| 351 |
+
macros, objects, extra_postargs, pp_opts, build = compile_info
|
| 352 |
+
|
| 353 |
+
compile_opts = extra_preargs or []
|
| 354 |
+
compile_opts.append ('/c')
|
| 355 |
+
if debug:
|
| 356 |
+
compile_opts.extend(self.compile_options_debug)
|
| 357 |
+
else:
|
| 358 |
+
compile_opts.extend(self.compile_options)
|
| 359 |
+
|
| 360 |
+
for obj in objects:
|
| 361 |
+
try:
|
| 362 |
+
src, ext = build[obj]
|
| 363 |
+
except KeyError:
|
| 364 |
+
continue
|
| 365 |
+
if debug:
|
| 366 |
+
# pass the full pathname to MSVC in debug mode,
|
| 367 |
+
# this allows the debugger to find the source file
|
| 368 |
+
# without asking the user to browse for it
|
| 369 |
+
src = os.path.abspath(src)
|
| 370 |
+
|
| 371 |
+
if ext in self._c_extensions:
|
| 372 |
+
input_opt = "/Tc" + src
|
| 373 |
+
elif ext in self._cpp_extensions:
|
| 374 |
+
input_opt = "/Tp" + src
|
| 375 |
+
elif ext in self._rc_extensions:
|
| 376 |
+
# compile .RC to .RES file
|
| 377 |
+
input_opt = src
|
| 378 |
+
output_opt = "/fo" + obj
|
| 379 |
+
try:
|
| 380 |
+
self.spawn([self.rc] + pp_opts +
|
| 381 |
+
[output_opt] + [input_opt])
|
| 382 |
+
except DistutilsExecError as msg:
|
| 383 |
+
raise CompileError(msg)
|
| 384 |
+
continue
|
| 385 |
+
elif ext in self._mc_extensions:
|
| 386 |
+
# Compile .MC to .RC file to .RES file.
|
| 387 |
+
# * '-h dir' specifies the directory for the
|
| 388 |
+
# generated include file
|
| 389 |
+
# * '-r dir' specifies the target directory of the
|
| 390 |
+
# generated RC file and the binary message resource
|
| 391 |
+
# it includes
|
| 392 |
+
#
|
| 393 |
+
# For now (since there are no options to change this),
|
| 394 |
+
# we use the source-directory for the include file and
|
| 395 |
+
# the build directory for the RC file and message
|
| 396 |
+
# resources. This works at least for win32all.
|
| 397 |
+
h_dir = os.path.dirname(src)
|
| 398 |
+
rc_dir = os.path.dirname(obj)
|
| 399 |
+
try:
|
| 400 |
+
# first compile .MC to .RC and .H file
|
| 401 |
+
self.spawn([self.mc] +
|
| 402 |
+
['-h', h_dir, '-r', rc_dir] + [src])
|
| 403 |
+
base, _ = os.path.splitext (os.path.basename (src))
|
| 404 |
+
rc_file = os.path.join (rc_dir, base + '.rc')
|
| 405 |
+
# then compile .RC to .RES file
|
| 406 |
+
self.spawn([self.rc] +
|
| 407 |
+
["/fo" + obj] + [rc_file])
|
| 408 |
+
|
| 409 |
+
except DistutilsExecError as msg:
|
| 410 |
+
raise CompileError(msg)
|
| 411 |
+
continue
|
| 412 |
+
else:
|
| 413 |
+
# how to handle this file?
|
| 414 |
+
raise CompileError("Don't know how to compile %s to %s"
|
| 415 |
+
% (src, obj))
|
| 416 |
+
|
| 417 |
+
output_opt = "/Fo" + obj
|
| 418 |
+
try:
|
| 419 |
+
self.spawn([self.cc] + compile_opts + pp_opts +
|
| 420 |
+
[input_opt, output_opt] +
|
| 421 |
+
extra_postargs)
|
| 422 |
+
except DistutilsExecError as msg:
|
| 423 |
+
raise CompileError(msg)
|
| 424 |
+
|
| 425 |
+
return objects
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
def create_static_lib(self,
|
| 429 |
+
objects,
|
| 430 |
+
output_libname,
|
| 431 |
+
output_dir=None,
|
| 432 |
+
debug=0,
|
| 433 |
+
target_lang=None):
|
| 434 |
+
|
| 435 |
+
if not self.initialized:
|
| 436 |
+
self.initialize()
|
| 437 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 438 |
+
output_filename = self.library_filename(output_libname,
|
| 439 |
+
output_dir=output_dir)
|
| 440 |
+
|
| 441 |
+
if self._need_link(objects, output_filename):
|
| 442 |
+
lib_args = objects + ['/OUT:' + output_filename]
|
| 443 |
+
if debug:
|
| 444 |
+
pass # XXX what goes here?
|
| 445 |
+
try:
|
| 446 |
+
self.spawn([self.lib] + lib_args)
|
| 447 |
+
except DistutilsExecError as msg:
|
| 448 |
+
raise LibError(msg)
|
| 449 |
+
else:
|
| 450 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
def link(self,
|
| 454 |
+
target_desc,
|
| 455 |
+
objects,
|
| 456 |
+
output_filename,
|
| 457 |
+
output_dir=None,
|
| 458 |
+
libraries=None,
|
| 459 |
+
library_dirs=None,
|
| 460 |
+
runtime_library_dirs=None,
|
| 461 |
+
export_symbols=None,
|
| 462 |
+
debug=0,
|
| 463 |
+
extra_preargs=None,
|
| 464 |
+
extra_postargs=None,
|
| 465 |
+
build_temp=None,
|
| 466 |
+
target_lang=None):
|
| 467 |
+
|
| 468 |
+
if not self.initialized:
|
| 469 |
+
self.initialize()
|
| 470 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 471 |
+
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
| 472 |
+
runtime_library_dirs)
|
| 473 |
+
(libraries, library_dirs, runtime_library_dirs) = fixed_args
|
| 474 |
+
|
| 475 |
+
if runtime_library_dirs:
|
| 476 |
+
self.warn ("I don't know what to do with 'runtime_library_dirs': "
|
| 477 |
+
+ str (runtime_library_dirs))
|
| 478 |
+
|
| 479 |
+
lib_opts = gen_lib_options(self,
|
| 480 |
+
library_dirs, runtime_library_dirs,
|
| 481 |
+
libraries)
|
| 482 |
+
if output_dir is not None:
|
| 483 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 484 |
+
|
| 485 |
+
if self._need_link(objects, output_filename):
|
| 486 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 487 |
+
if debug:
|
| 488 |
+
ldflags = self.ldflags_shared_debug[1:]
|
| 489 |
+
else:
|
| 490 |
+
ldflags = self.ldflags_shared[1:]
|
| 491 |
+
else:
|
| 492 |
+
if debug:
|
| 493 |
+
ldflags = self.ldflags_shared_debug
|
| 494 |
+
else:
|
| 495 |
+
ldflags = self.ldflags_shared
|
| 496 |
+
|
| 497 |
+
export_opts = []
|
| 498 |
+
for sym in (export_symbols or []):
|
| 499 |
+
export_opts.append("/EXPORT:" + sym)
|
| 500 |
+
|
| 501 |
+
ld_args = (ldflags + lib_opts + export_opts +
|
| 502 |
+
objects + ['/OUT:' + output_filename])
|
| 503 |
+
|
| 504 |
+
# The MSVC linker generates .lib and .exp files, which cannot be
|
| 505 |
+
# suppressed by any linker switches. The .lib files may even be
|
| 506 |
+
# needed! Make sure they are generated in the temporary build
|
| 507 |
+
# directory. Since they have different names for debug and release
|
| 508 |
+
# builds, they can go into the same directory.
|
| 509 |
+
if export_symbols is not None:
|
| 510 |
+
(dll_name, dll_ext) = os.path.splitext(
|
| 511 |
+
os.path.basename(output_filename))
|
| 512 |
+
implib_file = os.path.join(
|
| 513 |
+
os.path.dirname(objects[0]),
|
| 514 |
+
self.library_filename(dll_name))
|
| 515 |
+
ld_args.append ('/IMPLIB:' + implib_file)
|
| 516 |
+
|
| 517 |
+
if extra_preargs:
|
| 518 |
+
ld_args[:0] = extra_preargs
|
| 519 |
+
if extra_postargs:
|
| 520 |
+
ld_args.extend(extra_postargs)
|
| 521 |
+
|
| 522 |
+
self.mkpath(os.path.dirname(output_filename))
|
| 523 |
+
try:
|
| 524 |
+
self.spawn([self.linker] + ld_args)
|
| 525 |
+
except DistutilsExecError as msg:
|
| 526 |
+
raise LinkError(msg)
|
| 527 |
+
|
| 528 |
+
else:
|
| 529 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 533 |
+
# These are all used by the 'gen_lib_options() function, in
|
| 534 |
+
# ccompiler.py.
|
| 535 |
+
|
| 536 |
+
def library_dir_option(self, dir):
|
| 537 |
+
return "/LIBPATH:" + dir
|
| 538 |
+
|
| 539 |
+
def runtime_library_dir_option(self, dir):
|
| 540 |
+
raise DistutilsPlatformError(
|
| 541 |
+
"don't know how to set runtime library search path for MSVC++")
|
| 542 |
+
|
| 543 |
+
def library_option(self, lib):
|
| 544 |
+
return self.library_filename(lib)
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 548 |
+
# Prefer a debugging library if found (and requested), but deal
|
| 549 |
+
# with it if we don't have one.
|
| 550 |
+
if debug:
|
| 551 |
+
try_names = [lib + "_d", lib]
|
| 552 |
+
else:
|
| 553 |
+
try_names = [lib]
|
| 554 |
+
for dir in dirs:
|
| 555 |
+
for name in try_names:
|
| 556 |
+
libfile = os.path.join(dir, self.library_filename (name))
|
| 557 |
+
if os.path.exists(libfile):
|
| 558 |
+
return libfile
|
| 559 |
+
else:
|
| 560 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 561 |
+
return None
|
| 562 |
+
|
| 563 |
+
# Helper methods for using the MSVC registry settings
|
| 564 |
+
|
| 565 |
+
def find_exe(self, exe):
|
| 566 |
+
"""Return path to an MSVC executable program.
|
| 567 |
+
|
| 568 |
+
Tries to find the program in several places: first, one of the
|
| 569 |
+
MSVC program search paths from the registry; next, the directories
|
| 570 |
+
in the PATH environment variable. If any of those work, return an
|
| 571 |
+
absolute path that is known to exist. If none of them work, just
|
| 572 |
+
return the original program name, 'exe'.
|
| 573 |
+
"""
|
| 574 |
+
for p in self.__paths:
|
| 575 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 576 |
+
if os.path.isfile(fn):
|
| 577 |
+
return fn
|
| 578 |
+
|
| 579 |
+
# didn't find it; try existing path
|
| 580 |
+
for p in os.environ['Path'].split(';'):
|
| 581 |
+
fn = os.path.join(os.path.abspath(p),exe)
|
| 582 |
+
if os.path.isfile(fn):
|
| 583 |
+
return fn
|
| 584 |
+
|
| 585 |
+
return exe
|
| 586 |
+
|
| 587 |
+
def get_msvc_paths(self, path, platform='x86'):
|
| 588 |
+
"""Get a list of devstudio directories (include, lib or path).
|
| 589 |
+
|
| 590 |
+
Return a list of strings. The list will be empty if unable to
|
| 591 |
+
access the registry or appropriate registry keys not found.
|
| 592 |
+
"""
|
| 593 |
+
if not _can_read_reg:
|
| 594 |
+
return []
|
| 595 |
+
|
| 596 |
+
path = path + " dirs"
|
| 597 |
+
if self.__version >= 7:
|
| 598 |
+
key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
|
| 599 |
+
% (self.__root, self.__version))
|
| 600 |
+
else:
|
| 601 |
+
key = (r"%s\6.0\Build System\Components\Platforms"
|
| 602 |
+
r"\Win32 (%s)\Directories" % (self.__root, platform))
|
| 603 |
+
|
| 604 |
+
for base in HKEYS:
|
| 605 |
+
d = read_values(base, key)
|
| 606 |
+
if d:
|
| 607 |
+
if self.__version >= 7:
|
| 608 |
+
return self.__macros.sub(d[path]).split(";")
|
| 609 |
+
else:
|
| 610 |
+
return d[path].split(";")
|
| 611 |
+
# MSVC 6 seems to create the registry entries we need only when
|
| 612 |
+
# the GUI is run.
|
| 613 |
+
if self.__version == 6:
|
| 614 |
+
for base in HKEYS:
|
| 615 |
+
if read_values(base, r"%s\6.0" % self.__root) is not None:
|
| 616 |
+
self.warn("It seems you have Visual Studio 6 installed, "
|
| 617 |
+
"but the expected registry settings are not present.\n"
|
| 618 |
+
"You must at least run the Visual Studio GUI once "
|
| 619 |
+
"so that these entries are created.")
|
| 620 |
+
break
|
| 621 |
+
return []
|
| 622 |
+
|
| 623 |
+
def set_path_env_var(self, name):
|
| 624 |
+
"""Set environment variable 'name' to an MSVC path type value.
|
| 625 |
+
|
| 626 |
+
This is equivalent to a SET command prior to execution of spawned
|
| 627 |
+
commands.
|
| 628 |
+
"""
|
| 629 |
+
|
| 630 |
+
if name == "lib":
|
| 631 |
+
p = self.get_msvc_paths("library")
|
| 632 |
+
else:
|
| 633 |
+
p = self.get_msvc_paths(name)
|
| 634 |
+
if p:
|
| 635 |
+
os.environ[name] = ';'.join(p)
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
if get_build_version() >= 8.0:
|
| 639 |
+
log.debug("Importing new compiler from distutils.msvc9compiler")
|
| 640 |
+
OldMSVCCompiler = MSVCCompiler
|
| 641 |
+
from distutils.msvc9compiler import MSVCCompiler
|
| 642 |
+
# get_build_architecture not really relevant now we support cross-compile
|
| 643 |
+
from distutils.msvc9compiler import MacroExpander
|
llava/lib/python3.10/distutils/spawn.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.spawn
|
| 2 |
+
|
| 3 |
+
Provides the 'spawn()' function, a front-end to various platform-
|
| 4 |
+
specific functions for launching another program in a sub-process.
|
| 5 |
+
Also provides the 'find_executable()' to search the path for a given
|
| 6 |
+
executable name.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import sys
|
| 10 |
+
import os
|
| 11 |
+
import subprocess
|
| 12 |
+
|
| 13 |
+
from distutils.errors import DistutilsPlatformError, DistutilsExecError
|
| 14 |
+
from distutils.debug import DEBUG
|
| 15 |
+
from distutils import log
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
if sys.platform == 'darwin':
|
| 19 |
+
_cfg_target = None
|
| 20 |
+
_cfg_target_split = None
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def spawn(cmd, search_path=1, verbose=0, dry_run=0):
|
| 24 |
+
"""Run another program, specified as a command list 'cmd', in a new process.
|
| 25 |
+
|
| 26 |
+
'cmd' is just the argument list for the new process, ie.
|
| 27 |
+
cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
|
| 28 |
+
There is no way to run a program with a name different from that of its
|
| 29 |
+
executable.
|
| 30 |
+
|
| 31 |
+
If 'search_path' is true (the default), the system's executable
|
| 32 |
+
search path will be used to find the program; otherwise, cmd[0]
|
| 33 |
+
must be the exact path to the executable. If 'dry_run' is true,
|
| 34 |
+
the command will not actually be run.
|
| 35 |
+
|
| 36 |
+
Raise DistutilsExecError if running the program fails in any way; just
|
| 37 |
+
return on success.
|
| 38 |
+
"""
|
| 39 |
+
# cmd is documented as a list, but just in case some code passes a tuple
|
| 40 |
+
# in, protect our %-formatting code against horrible death
|
| 41 |
+
cmd = list(cmd)
|
| 42 |
+
|
| 43 |
+
log.info(' '.join(cmd))
|
| 44 |
+
if dry_run:
|
| 45 |
+
return
|
| 46 |
+
|
| 47 |
+
if search_path:
|
| 48 |
+
executable = find_executable(cmd[0])
|
| 49 |
+
if executable is not None:
|
| 50 |
+
cmd[0] = executable
|
| 51 |
+
|
| 52 |
+
env = None
|
| 53 |
+
if sys.platform == 'darwin':
|
| 54 |
+
global _cfg_target, _cfg_target_split
|
| 55 |
+
if _cfg_target is None:
|
| 56 |
+
from distutils import sysconfig
|
| 57 |
+
_cfg_target = sysconfig.get_config_var(
|
| 58 |
+
'MACOSX_DEPLOYMENT_TARGET') or ''
|
| 59 |
+
if _cfg_target:
|
| 60 |
+
_cfg_target_split = [int(x) for x in _cfg_target.split('.')]
|
| 61 |
+
if _cfg_target:
|
| 62 |
+
# Ensure that the deployment target of the build process is not
|
| 63 |
+
# less than 10.3 if the interpreter was built for 10.3 or later.
|
| 64 |
+
# This ensures extension modules are built with correct
|
| 65 |
+
# compatibility values, specifically LDSHARED which can use
|
| 66 |
+
# '-undefined dynamic_lookup' which only works on >= 10.3.
|
| 67 |
+
cur_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target)
|
| 68 |
+
cur_target_split = [int(x) for x in cur_target.split('.')]
|
| 69 |
+
if _cfg_target_split[:2] >= [10, 3] and cur_target_split[:2] < [10, 3]:
|
| 70 |
+
my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: '
|
| 71 |
+
'now "%s" but "%s" during configure;'
|
| 72 |
+
'must use 10.3 or later'
|
| 73 |
+
% (cur_target, _cfg_target))
|
| 74 |
+
raise DistutilsPlatformError(my_msg)
|
| 75 |
+
env = dict(os.environ,
|
| 76 |
+
MACOSX_DEPLOYMENT_TARGET=cur_target)
|
| 77 |
+
|
| 78 |
+
try:
|
| 79 |
+
proc = subprocess.Popen(cmd, env=env)
|
| 80 |
+
proc.wait()
|
| 81 |
+
exitcode = proc.returncode
|
| 82 |
+
except OSError as exc:
|
| 83 |
+
if not DEBUG:
|
| 84 |
+
cmd = cmd[0]
|
| 85 |
+
raise DistutilsExecError(
|
| 86 |
+
"command %r failed: %s" % (cmd, exc.args[-1])) from exc
|
| 87 |
+
|
| 88 |
+
if exitcode:
|
| 89 |
+
if not DEBUG:
|
| 90 |
+
cmd = cmd[0]
|
| 91 |
+
raise DistutilsExecError(
|
| 92 |
+
"command %r failed with exit code %s" % (cmd, exitcode))
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def find_executable(executable, path=None):
|
| 96 |
+
"""Tries to find 'executable' in the directories listed in 'path'.
|
| 97 |
+
|
| 98 |
+
A string listing directories separated by 'os.pathsep'; defaults to
|
| 99 |
+
os.environ['PATH']. Returns the complete filename or None if not found.
|
| 100 |
+
"""
|
| 101 |
+
_, ext = os.path.splitext(executable)
|
| 102 |
+
if (sys.platform == 'win32') and (ext != '.exe'):
|
| 103 |
+
executable = executable + '.exe'
|
| 104 |
+
|
| 105 |
+
if os.path.isfile(executable):
|
| 106 |
+
return executable
|
| 107 |
+
|
| 108 |
+
if path is None:
|
| 109 |
+
path = os.environ.get('PATH', None)
|
| 110 |
+
if path is None:
|
| 111 |
+
try:
|
| 112 |
+
path = os.confstr("CS_PATH")
|
| 113 |
+
except (AttributeError, ValueError):
|
| 114 |
+
# os.confstr() or CS_PATH is not available
|
| 115 |
+
path = os.defpath
|
| 116 |
+
# bpo-35755: Don't use os.defpath if the PATH environment variable is
|
| 117 |
+
# set to an empty string
|
| 118 |
+
|
| 119 |
+
# PATH='' doesn't match, whereas PATH=':' looks in the current directory
|
| 120 |
+
if not path:
|
| 121 |
+
return None
|
| 122 |
+
|
| 123 |
+
paths = path.split(os.pathsep)
|
| 124 |
+
for p in paths:
|
| 125 |
+
f = os.path.join(p, executable)
|
| 126 |
+
if os.path.isfile(f):
|
| 127 |
+
# the file exists, we have a shot at spawn working
|
| 128 |
+
return f
|
| 129 |
+
return None
|
llava/lib/python3.10/distutils/sysconfig.py
ADDED
|
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Provide access to Python's configuration information. The specific
|
| 2 |
+
configuration variables available depend heavily on the platform and
|
| 3 |
+
configuration. The values may be retrieved using
|
| 4 |
+
get_config_var(name), and the list of variables is available via
|
| 5 |
+
get_config_vars().keys(). Additional convenience functions are also
|
| 6 |
+
available.
|
| 7 |
+
|
| 8 |
+
Written by: Fred L. Drake, Jr.
|
| 9 |
+
Email: <fdrake@acm.org>
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import _imp
|
| 13 |
+
import os
|
| 14 |
+
import re
|
| 15 |
+
import sys
|
| 16 |
+
import warnings
|
| 17 |
+
|
| 18 |
+
from functools import partial
|
| 19 |
+
|
| 20 |
+
from .errors import DistutilsPlatformError
|
| 21 |
+
|
| 22 |
+
from sysconfig import (
|
| 23 |
+
_PREFIX as PREFIX,
|
| 24 |
+
_BASE_PREFIX as BASE_PREFIX,
|
| 25 |
+
_EXEC_PREFIX as EXEC_PREFIX,
|
| 26 |
+
_BASE_EXEC_PREFIX as BASE_EXEC_PREFIX,
|
| 27 |
+
_PROJECT_BASE as project_base,
|
| 28 |
+
_PYTHON_BUILD as python_build,
|
| 29 |
+
_init_posix as sysconfig_init_posix,
|
| 30 |
+
parse_config_h as sysconfig_parse_config_h,
|
| 31 |
+
|
| 32 |
+
_init_non_posix,
|
| 33 |
+
_is_python_source_dir,
|
| 34 |
+
_sys_home,
|
| 35 |
+
|
| 36 |
+
_variable_rx,
|
| 37 |
+
_findvar1_rx,
|
| 38 |
+
_findvar2_rx,
|
| 39 |
+
|
| 40 |
+
expand_makefile_vars,
|
| 41 |
+
is_python_build,
|
| 42 |
+
get_config_h_filename,
|
| 43 |
+
get_config_var,
|
| 44 |
+
get_config_vars,
|
| 45 |
+
get_makefile_filename,
|
| 46 |
+
get_python_version,
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
# This is better than
|
| 50 |
+
# from sysconfig import _CONFIG_VARS as _config_vars
|
| 51 |
+
# because it makes sure that the global dictionary is initialized
|
| 52 |
+
# which might not be true in the time of import.
|
| 53 |
+
_config_vars = get_config_vars()
|
| 54 |
+
|
| 55 |
+
if os.name == "nt":
|
| 56 |
+
from sysconfig import _fix_pcbuild
|
| 57 |
+
|
| 58 |
+
warnings.warn(
|
| 59 |
+
'The distutils.sysconfig module is deprecated, use sysconfig instead',
|
| 60 |
+
DeprecationWarning,
|
| 61 |
+
stacklevel=2
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# Following functions are the same as in sysconfig but with different API
|
| 66 |
+
def parse_config_h(fp, g=None):
|
| 67 |
+
return sysconfig_parse_config_h(fp, vars=g)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
_python_build = partial(is_python_build, check_home=True)
|
| 71 |
+
_init_posix = partial(sysconfig_init_posix, _config_vars)
|
| 72 |
+
_init_nt = partial(_init_non_posix, _config_vars)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
# Similar function is also implemented in sysconfig as _parse_makefile
|
| 76 |
+
# but without the parsing capabilities of distutils.text_file.TextFile.
|
| 77 |
+
def parse_makefile(fn, g=None):
|
| 78 |
+
"""Parse a Makefile-style file.
|
| 79 |
+
A dictionary containing name/value pairs is returned. If an
|
| 80 |
+
optional dictionary is passed in as the second argument, it is
|
| 81 |
+
used instead of a new dictionary.
|
| 82 |
+
"""
|
| 83 |
+
from distutils.text_file import TextFile
|
| 84 |
+
fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape")
|
| 85 |
+
|
| 86 |
+
if g is None:
|
| 87 |
+
g = {}
|
| 88 |
+
done = {}
|
| 89 |
+
notdone = {}
|
| 90 |
+
|
| 91 |
+
while True:
|
| 92 |
+
line = fp.readline()
|
| 93 |
+
if line is None: # eof
|
| 94 |
+
break
|
| 95 |
+
m = re.match(_variable_rx, line)
|
| 96 |
+
if m:
|
| 97 |
+
n, v = m.group(1, 2)
|
| 98 |
+
v = v.strip()
|
| 99 |
+
# `$$' is a literal `$' in make
|
| 100 |
+
tmpv = v.replace('$$', '')
|
| 101 |
+
|
| 102 |
+
if "$" in tmpv:
|
| 103 |
+
notdone[n] = v
|
| 104 |
+
else:
|
| 105 |
+
try:
|
| 106 |
+
v = int(v)
|
| 107 |
+
except ValueError:
|
| 108 |
+
# insert literal `$'
|
| 109 |
+
done[n] = v.replace('$$', '$')
|
| 110 |
+
else:
|
| 111 |
+
done[n] = v
|
| 112 |
+
|
| 113 |
+
# Variables with a 'PY_' prefix in the makefile. These need to
|
| 114 |
+
# be made available without that prefix through sysconfig.
|
| 115 |
+
# Special care is needed to ensure that variable expansion works, even
|
| 116 |
+
# if the expansion uses the name without a prefix.
|
| 117 |
+
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
|
| 118 |
+
|
| 119 |
+
# do variable interpolation here
|
| 120 |
+
while notdone:
|
| 121 |
+
for name in list(notdone):
|
| 122 |
+
value = notdone[name]
|
| 123 |
+
m = re.search(_findvar1_rx, value) or re.search(_findvar2_rx, value)
|
| 124 |
+
if m:
|
| 125 |
+
n = m.group(1)
|
| 126 |
+
found = True
|
| 127 |
+
if n in done:
|
| 128 |
+
item = str(done[n])
|
| 129 |
+
elif n in notdone:
|
| 130 |
+
# get it on a subsequent round
|
| 131 |
+
found = False
|
| 132 |
+
elif n in os.environ:
|
| 133 |
+
# do it like make: fall back to environment
|
| 134 |
+
item = os.environ[n]
|
| 135 |
+
|
| 136 |
+
elif n in renamed_variables:
|
| 137 |
+
if name.startswith('PY_') and name[3:] in renamed_variables:
|
| 138 |
+
item = ""
|
| 139 |
+
|
| 140 |
+
elif 'PY_' + n in notdone:
|
| 141 |
+
found = False
|
| 142 |
+
|
| 143 |
+
else:
|
| 144 |
+
item = str(done['PY_' + n])
|
| 145 |
+
else:
|
| 146 |
+
done[n] = item = ""
|
| 147 |
+
if found:
|
| 148 |
+
after = value[m.end():]
|
| 149 |
+
value = value[:m.start()] + item + after
|
| 150 |
+
if "$" in after:
|
| 151 |
+
notdone[name] = value
|
| 152 |
+
else:
|
| 153 |
+
try: value = int(value)
|
| 154 |
+
except ValueError:
|
| 155 |
+
done[name] = value.strip()
|
| 156 |
+
else:
|
| 157 |
+
done[name] = value
|
| 158 |
+
del notdone[name]
|
| 159 |
+
|
| 160 |
+
if name.startswith('PY_') \
|
| 161 |
+
and name[3:] in renamed_variables:
|
| 162 |
+
|
| 163 |
+
name = name[3:]
|
| 164 |
+
if name not in done:
|
| 165 |
+
done[name] = value
|
| 166 |
+
else:
|
| 167 |
+
# bogus variable reference; just drop it since we can't deal
|
| 168 |
+
del notdone[name]
|
| 169 |
+
|
| 170 |
+
fp.close()
|
| 171 |
+
|
| 172 |
+
# strip spurious spaces
|
| 173 |
+
for k, v in done.items():
|
| 174 |
+
if isinstance(v, str):
|
| 175 |
+
done[k] = v.strip()
|
| 176 |
+
|
| 177 |
+
# save the results in the global dictionary
|
| 178 |
+
g.update(done)
|
| 179 |
+
return g
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
# Following functions are deprecated together with this module and they
|
| 183 |
+
# have no direct replacement
|
| 184 |
+
|
| 185 |
+
# Calculate the build qualifier flags if they are defined. Adding the flags
|
| 186 |
+
# to the include and lib directories only makes sense for an installation, not
|
| 187 |
+
# an in-source build.
|
| 188 |
+
build_flags = ''
|
| 189 |
+
try:
|
| 190 |
+
if not python_build:
|
| 191 |
+
build_flags = sys.abiflags
|
| 192 |
+
except AttributeError:
|
| 193 |
+
# It's not a configure-based build, so the sys module doesn't have
|
| 194 |
+
# this attribute, which is fine.
|
| 195 |
+
pass
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def customize_compiler(compiler):
|
| 199 |
+
"""Do any platform-specific customization of a CCompiler instance.
|
| 200 |
+
|
| 201 |
+
Mainly needed on Unix, so we can plug in the information that
|
| 202 |
+
varies across Unices and is stored in Python's Makefile.
|
| 203 |
+
"""
|
| 204 |
+
if compiler.compiler_type == "unix":
|
| 205 |
+
if sys.platform == "darwin":
|
| 206 |
+
# Perform first-time customization of compiler-related
|
| 207 |
+
# config vars on OS X now that we know we need a compiler.
|
| 208 |
+
# This is primarily to support Pythons from binary
|
| 209 |
+
# installers. The kind and paths to build tools on
|
| 210 |
+
# the user system may vary significantly from the system
|
| 211 |
+
# that Python itself was built on. Also the user OS
|
| 212 |
+
# version and build tools may not support the same set
|
| 213 |
+
# of CPU architectures for universal builds.
|
| 214 |
+
if not _config_vars.get('CUSTOMIZED_OSX_COMPILER'):
|
| 215 |
+
import _osx_support
|
| 216 |
+
_osx_support.customize_compiler(_config_vars)
|
| 217 |
+
_config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
|
| 218 |
+
|
| 219 |
+
(cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
|
| 220 |
+
get_config_vars('CC', 'CXX', 'CFLAGS',
|
| 221 |
+
'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
|
| 222 |
+
|
| 223 |
+
if 'CC' in os.environ:
|
| 224 |
+
newcc = os.environ['CC']
|
| 225 |
+
if ('LDSHARED' not in os.environ
|
| 226 |
+
and ldshared.startswith(cc)):
|
| 227 |
+
# If CC is overridden, use that as the default
|
| 228 |
+
# command for LDSHARED as well
|
| 229 |
+
ldshared = newcc + ldshared[len(cc):]
|
| 230 |
+
cc = newcc
|
| 231 |
+
if 'CXX' in os.environ:
|
| 232 |
+
cxx = os.environ['CXX']
|
| 233 |
+
if 'LDSHARED' in os.environ:
|
| 234 |
+
ldshared = os.environ['LDSHARED']
|
| 235 |
+
if 'CPP' in os.environ:
|
| 236 |
+
cpp = os.environ['CPP']
|
| 237 |
+
else:
|
| 238 |
+
cpp = cc + " -E" # not always
|
| 239 |
+
if 'LDFLAGS' in os.environ:
|
| 240 |
+
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
|
| 241 |
+
if 'CFLAGS' in os.environ:
|
| 242 |
+
cflags = cflags + ' ' + os.environ['CFLAGS']
|
| 243 |
+
ldshared = ldshared + ' ' + os.environ['CFLAGS']
|
| 244 |
+
if 'CPPFLAGS' in os.environ:
|
| 245 |
+
cpp = cpp + ' ' + os.environ['CPPFLAGS']
|
| 246 |
+
cflags = cflags + ' ' + os.environ['CPPFLAGS']
|
| 247 |
+
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
|
| 248 |
+
if 'AR' in os.environ:
|
| 249 |
+
ar = os.environ['AR']
|
| 250 |
+
if 'ARFLAGS' in os.environ:
|
| 251 |
+
archiver = ar + ' ' + os.environ['ARFLAGS']
|
| 252 |
+
else:
|
| 253 |
+
archiver = ar + ' ' + ar_flags
|
| 254 |
+
|
| 255 |
+
cc_cmd = cc + ' ' + cflags
|
| 256 |
+
compiler.set_executables(
|
| 257 |
+
preprocessor=cpp,
|
| 258 |
+
compiler=cc_cmd,
|
| 259 |
+
compiler_so=cc_cmd + ' ' + ccshared,
|
| 260 |
+
compiler_cxx=cxx,
|
| 261 |
+
linker_so=ldshared,
|
| 262 |
+
linker_exe=cc,
|
| 263 |
+
archiver=archiver)
|
| 264 |
+
|
| 265 |
+
if 'RANLIB' in os.environ and 'ranlib' in compiler.executables:
|
| 266 |
+
compiler.set_executables(ranlib=os.environ['RANLIB'])
|
| 267 |
+
|
| 268 |
+
compiler.shared_lib_extension = shlib_suffix
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def get_python_inc(plat_specific=0, prefix=None):
|
| 272 |
+
"""Return the directory containing installed Python header files.
|
| 273 |
+
|
| 274 |
+
If 'plat_specific' is false (the default), this is the path to the
|
| 275 |
+
non-platform-specific header files, i.e. Python.h and so on;
|
| 276 |
+
otherwise, this is the path to platform-specific header files
|
| 277 |
+
(namely pyconfig.h).
|
| 278 |
+
|
| 279 |
+
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
| 280 |
+
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
| 281 |
+
"""
|
| 282 |
+
if prefix is None:
|
| 283 |
+
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
|
| 284 |
+
if os.name == "posix":
|
| 285 |
+
if python_build:
|
| 286 |
+
# Assume the executable is in the build directory. The
|
| 287 |
+
# pyconfig.h file should be in the same directory. Since
|
| 288 |
+
# the build directory may not be the source directory, we
|
| 289 |
+
# must use "srcdir" from the makefile to find the "Include"
|
| 290 |
+
# directory.
|
| 291 |
+
if plat_specific:
|
| 292 |
+
return _sys_home or project_base
|
| 293 |
+
else:
|
| 294 |
+
incdir = os.path.join(get_config_var('srcdir'), 'Include')
|
| 295 |
+
return os.path.normpath(incdir)
|
| 296 |
+
python_dir = 'python' + get_python_version() + build_flags
|
| 297 |
+
return os.path.join(prefix, "include", python_dir)
|
| 298 |
+
elif os.name == "nt":
|
| 299 |
+
if python_build:
|
| 300 |
+
# Include both the include and PC dir to ensure we can find
|
| 301 |
+
# pyconfig.h
|
| 302 |
+
return (os.path.join(prefix, "include") + os.path.pathsep +
|
| 303 |
+
os.path.join(prefix, "PC"))
|
| 304 |
+
return os.path.join(prefix, "include")
|
| 305 |
+
else:
|
| 306 |
+
raise DistutilsPlatformError(
|
| 307 |
+
"I don't know where Python installs its C header files "
|
| 308 |
+
"on platform '%s'" % os.name)
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
| 312 |
+
"""Return the directory containing the Python library (standard or
|
| 313 |
+
site additions).
|
| 314 |
+
|
| 315 |
+
If 'plat_specific' is true, return the directory containing
|
| 316 |
+
platform-specific modules, i.e. any module from a non-pure-Python
|
| 317 |
+
module distribution; otherwise, return the platform-shared library
|
| 318 |
+
directory. If 'standard_lib' is true, return the directory
|
| 319 |
+
containing standard Python library modules; otherwise, return the
|
| 320 |
+
directory for site-specific modules.
|
| 321 |
+
|
| 322 |
+
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
| 323 |
+
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
| 324 |
+
"""
|
| 325 |
+
if prefix is None:
|
| 326 |
+
if standard_lib:
|
| 327 |
+
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
|
| 328 |
+
else:
|
| 329 |
+
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
| 330 |
+
|
| 331 |
+
if os.name == "posix":
|
| 332 |
+
if plat_specific or standard_lib:
|
| 333 |
+
# Platform-specific modules (any module from a non-pure-Python
|
| 334 |
+
# module distribution) or standard Python library modules.
|
| 335 |
+
libdir = sys.platlibdir
|
| 336 |
+
else:
|
| 337 |
+
# Pure Python
|
| 338 |
+
libdir = "lib"
|
| 339 |
+
libpython = os.path.join(prefix, libdir,
|
| 340 |
+
"python" + get_python_version())
|
| 341 |
+
if standard_lib:
|
| 342 |
+
return libpython
|
| 343 |
+
else:
|
| 344 |
+
return os.path.join(libpython, "site-packages")
|
| 345 |
+
elif os.name == "nt":
|
| 346 |
+
if standard_lib:
|
| 347 |
+
return os.path.join(prefix, "Lib")
|
| 348 |
+
else:
|
| 349 |
+
return os.path.join(prefix, "Lib", "site-packages")
|
| 350 |
+
else:
|
| 351 |
+
raise DistutilsPlatformError(
|
| 352 |
+
"I don't know where Python installs its library "
|
| 353 |
+
"on platform '%s'" % os.name)
|
llava/lib/python3.10/distutils/tests/__init__.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test suite for distutils.
|
| 2 |
+
|
| 3 |
+
This test suite consists of a collection of test modules in the
|
| 4 |
+
distutils.tests package. Each test module has a name starting with
|
| 5 |
+
'test' and contains a function test_suite(). The function is expected
|
| 6 |
+
to return an initialized unittest.TestSuite instance.
|
| 7 |
+
|
| 8 |
+
Tests for the command classes in the distutils.command package are
|
| 9 |
+
included in distutils.tests as well, instead of using a separate
|
| 10 |
+
distutils.command.tests package, since command identification is done
|
| 11 |
+
by import rather than matching pre-defined names.
|
| 12 |
+
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
import sys
|
| 17 |
+
import unittest
|
| 18 |
+
from test.support import run_unittest
|
| 19 |
+
from test.support.warnings_helper import save_restore_warnings_filters
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
here = os.path.dirname(__file__) or os.curdir
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def test_suite():
|
| 26 |
+
suite = unittest.TestSuite()
|
| 27 |
+
for fn in os.listdir(here):
|
| 28 |
+
if fn.startswith("test") and fn.endswith(".py"):
|
| 29 |
+
modname = "distutils.tests." + fn[:-3]
|
| 30 |
+
# bpo-40055: Save/restore warnings filters to leave them unchanged.
|
| 31 |
+
# Importing tests imports docutils which imports pkg_resources
|
| 32 |
+
# which adds a warnings filter.
|
| 33 |
+
with save_restore_warnings_filters():
|
| 34 |
+
__import__(modname)
|
| 35 |
+
module = sys.modules[modname]
|
| 36 |
+
suite.addTest(module.test_suite())
|
| 37 |
+
return suite
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
if __name__ == "__main__":
|
| 41 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/includetest.rst
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
This should be included.
|
llava/lib/python3.10/distutils/tests/support.py
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Support code for distutils test cases."""
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
import shutil
|
| 5 |
+
import tempfile
|
| 6 |
+
import unittest
|
| 7 |
+
import sysconfig
|
| 8 |
+
from copy import deepcopy
|
| 9 |
+
from test.support import os_helper
|
| 10 |
+
|
| 11 |
+
from distutils import log
|
| 12 |
+
from distutils.log import DEBUG, INFO, WARN, ERROR, FATAL
|
| 13 |
+
from distutils.core import Distribution
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class LoggingSilencer(object):
|
| 17 |
+
|
| 18 |
+
def setUp(self):
|
| 19 |
+
super().setUp()
|
| 20 |
+
self.threshold = log.set_threshold(log.FATAL)
|
| 21 |
+
# catching warnings
|
| 22 |
+
# when log will be replaced by logging
|
| 23 |
+
# we won't need such monkey-patch anymore
|
| 24 |
+
self._old_log = log.Log._log
|
| 25 |
+
log.Log._log = self._log
|
| 26 |
+
self.logs = []
|
| 27 |
+
|
| 28 |
+
def tearDown(self):
|
| 29 |
+
log.set_threshold(self.threshold)
|
| 30 |
+
log.Log._log = self._old_log
|
| 31 |
+
super().tearDown()
|
| 32 |
+
|
| 33 |
+
def _log(self, level, msg, args):
|
| 34 |
+
if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
|
| 35 |
+
raise ValueError('%s wrong log level' % str(level))
|
| 36 |
+
if not isinstance(msg, str):
|
| 37 |
+
raise TypeError("msg should be str, not '%.200s'"
|
| 38 |
+
% (type(msg).__name__))
|
| 39 |
+
self.logs.append((level, msg, args))
|
| 40 |
+
|
| 41 |
+
def get_logs(self, *levels):
|
| 42 |
+
return [msg % args for level, msg, args
|
| 43 |
+
in self.logs if level in levels]
|
| 44 |
+
|
| 45 |
+
def clear_logs(self):
|
| 46 |
+
self.logs = []
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class TempdirManager(object):
|
| 50 |
+
"""Mix-in class that handles temporary directories for test cases.
|
| 51 |
+
|
| 52 |
+
This is intended to be used with unittest.TestCase.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def setUp(self):
|
| 56 |
+
super().setUp()
|
| 57 |
+
self.old_cwd = os.getcwd()
|
| 58 |
+
self.tempdirs = []
|
| 59 |
+
|
| 60 |
+
def tearDown(self):
|
| 61 |
+
# Restore working dir, for Solaris and derivatives, where rmdir()
|
| 62 |
+
# on the current directory fails.
|
| 63 |
+
os.chdir(self.old_cwd)
|
| 64 |
+
super().tearDown()
|
| 65 |
+
while self.tempdirs:
|
| 66 |
+
tmpdir = self.tempdirs.pop()
|
| 67 |
+
os_helper.rmtree(tmpdir)
|
| 68 |
+
|
| 69 |
+
def mkdtemp(self):
|
| 70 |
+
"""Create a temporary directory that will be cleaned up.
|
| 71 |
+
|
| 72 |
+
Returns the path of the directory.
|
| 73 |
+
"""
|
| 74 |
+
d = tempfile.mkdtemp()
|
| 75 |
+
self.tempdirs.append(d)
|
| 76 |
+
return d
|
| 77 |
+
|
| 78 |
+
def write_file(self, path, content='xxx'):
|
| 79 |
+
"""Writes a file in the given path.
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
path can be a string or a sequence.
|
| 83 |
+
"""
|
| 84 |
+
if isinstance(path, (list, tuple)):
|
| 85 |
+
path = os.path.join(*path)
|
| 86 |
+
f = open(path, 'w')
|
| 87 |
+
try:
|
| 88 |
+
f.write(content)
|
| 89 |
+
finally:
|
| 90 |
+
f.close()
|
| 91 |
+
|
| 92 |
+
def create_dist(self, pkg_name='foo', **kw):
|
| 93 |
+
"""Will generate a test environment.
|
| 94 |
+
|
| 95 |
+
This function creates:
|
| 96 |
+
- a Distribution instance using keywords
|
| 97 |
+
- a temporary directory with a package structure
|
| 98 |
+
|
| 99 |
+
It returns the package directory and the distribution
|
| 100 |
+
instance.
|
| 101 |
+
"""
|
| 102 |
+
tmp_dir = self.mkdtemp()
|
| 103 |
+
pkg_dir = os.path.join(tmp_dir, pkg_name)
|
| 104 |
+
os.mkdir(pkg_dir)
|
| 105 |
+
dist = Distribution(attrs=kw)
|
| 106 |
+
|
| 107 |
+
return pkg_dir, dist
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class DummyCommand:
|
| 111 |
+
"""Class to store options for retrieval via set_undefined_options()."""
|
| 112 |
+
|
| 113 |
+
def __init__(self, **kwargs):
|
| 114 |
+
for kw, val in kwargs.items():
|
| 115 |
+
setattr(self, kw, val)
|
| 116 |
+
|
| 117 |
+
def ensure_finalized(self):
|
| 118 |
+
pass
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class EnvironGuard(object):
|
| 122 |
+
|
| 123 |
+
def setUp(self):
|
| 124 |
+
super(EnvironGuard, self).setUp()
|
| 125 |
+
self.old_environ = deepcopy(os.environ)
|
| 126 |
+
|
| 127 |
+
def tearDown(self):
|
| 128 |
+
for key, value in self.old_environ.items():
|
| 129 |
+
if os.environ.get(key) != value:
|
| 130 |
+
os.environ[key] = value
|
| 131 |
+
|
| 132 |
+
for key in tuple(os.environ.keys()):
|
| 133 |
+
if key not in self.old_environ:
|
| 134 |
+
del os.environ[key]
|
| 135 |
+
|
| 136 |
+
super(EnvironGuard, self).tearDown()
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def copy_xxmodule_c(directory):
|
| 140 |
+
"""Helper for tests that need the xxmodule.c source file.
|
| 141 |
+
|
| 142 |
+
Example use:
|
| 143 |
+
|
| 144 |
+
def test_compile(self):
|
| 145 |
+
copy_xxmodule_c(self.tmpdir)
|
| 146 |
+
self.assertIn('xxmodule.c', os.listdir(self.tmpdir))
|
| 147 |
+
|
| 148 |
+
If the source file can be found, it will be copied to *directory*. If not,
|
| 149 |
+
the test will be skipped. Errors during copy are not caught.
|
| 150 |
+
"""
|
| 151 |
+
filename = _get_xxmodule_path()
|
| 152 |
+
if filename is None:
|
| 153 |
+
raise unittest.SkipTest('cannot find xxmodule.c (test must run in '
|
| 154 |
+
'the python build dir)')
|
| 155 |
+
shutil.copy(filename, directory)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def _get_xxmodule_path():
|
| 159 |
+
srcdir = sysconfig.get_config_var('srcdir')
|
| 160 |
+
candidates = [
|
| 161 |
+
# use installed copy if available
|
| 162 |
+
os.path.join(os.path.dirname(__file__), 'xxmodule.c'),
|
| 163 |
+
# otherwise try using copy from build directory
|
| 164 |
+
os.path.join(srcdir, 'Modules', 'xxmodule.c'),
|
| 165 |
+
# srcdir mysteriously can be $srcdir/Lib/distutils/tests when
|
| 166 |
+
# this file is run from its parent directory, so walk up the
|
| 167 |
+
# tree to find the real srcdir
|
| 168 |
+
os.path.join(srcdir, '..', '..', '..', 'Modules', 'xxmodule.c'),
|
| 169 |
+
]
|
| 170 |
+
for path in candidates:
|
| 171 |
+
if os.path.exists(path):
|
| 172 |
+
return path
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def fixup_build_ext(cmd):
|
| 176 |
+
"""Function needed to make build_ext tests pass.
|
| 177 |
+
|
| 178 |
+
When Python was built with --enable-shared on Unix, -L. is not enough to
|
| 179 |
+
find libpython<blah>.so, because regrtest runs in a tempdir, not in the
|
| 180 |
+
source directory where the .so lives.
|
| 181 |
+
|
| 182 |
+
When Python was built with in debug mode on Windows, build_ext commands
|
| 183 |
+
need their debug attribute set, and it is not done automatically for
|
| 184 |
+
some reason.
|
| 185 |
+
|
| 186 |
+
This function handles both of these things. Example use:
|
| 187 |
+
|
| 188 |
+
cmd = build_ext(dist)
|
| 189 |
+
support.fixup_build_ext(cmd)
|
| 190 |
+
cmd.ensure_finalized()
|
| 191 |
+
|
| 192 |
+
Unlike most other Unix platforms, Mac OS X embeds absolute paths
|
| 193 |
+
to shared libraries into executables, so the fixup is not needed there.
|
| 194 |
+
"""
|
| 195 |
+
if os.name == 'nt':
|
| 196 |
+
cmd.debug = sys.executable.endswith('_d.exe')
|
| 197 |
+
elif sysconfig.get_config_var('Py_ENABLE_SHARED'):
|
| 198 |
+
# To further add to the shared builds fun on Unix, we can't just add
|
| 199 |
+
# library_dirs to the Extension() instance because that doesn't get
|
| 200 |
+
# plumbed through to the final compiler command.
|
| 201 |
+
runshared = sysconfig.get_config_var('RUNSHARED')
|
| 202 |
+
if runshared is None:
|
| 203 |
+
cmd.library_dirs = ['.']
|
| 204 |
+
else:
|
| 205 |
+
if sys.platform == 'darwin':
|
| 206 |
+
cmd.library_dirs = []
|
| 207 |
+
else:
|
| 208 |
+
name, equals, value = runshared.partition('=')
|
| 209 |
+
cmd.library_dirs = [d for d in value.split(os.pathsep) if d]
|
llava/lib/python3.10/distutils/tests/test_archive_util.py
ADDED
|
@@ -0,0 +1,396 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""Tests for distutils.archive_util."""
|
| 3 |
+
import unittest
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import tarfile
|
| 7 |
+
from os.path import splitdrive
|
| 8 |
+
import warnings
|
| 9 |
+
|
| 10 |
+
from distutils import archive_util
|
| 11 |
+
from distutils.archive_util import (check_archive_formats, make_tarball,
|
| 12 |
+
make_zipfile, make_archive,
|
| 13 |
+
ARCHIVE_FORMATS)
|
| 14 |
+
from distutils.spawn import find_executable, spawn
|
| 15 |
+
from distutils.tests import support
|
| 16 |
+
from test.support import run_unittest, patch
|
| 17 |
+
from test.support.os_helper import change_cwd
|
| 18 |
+
from test.support.warnings_helper import check_warnings
|
| 19 |
+
|
| 20 |
+
try:
|
| 21 |
+
import grp
|
| 22 |
+
import pwd
|
| 23 |
+
UID_GID_SUPPORT = True
|
| 24 |
+
except ImportError:
|
| 25 |
+
UID_GID_SUPPORT = False
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
import zipfile
|
| 29 |
+
ZIP_SUPPORT = True
|
| 30 |
+
except ImportError:
|
| 31 |
+
ZIP_SUPPORT = find_executable('zip')
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
import zlib
|
| 35 |
+
ZLIB_SUPPORT = True
|
| 36 |
+
except ImportError:
|
| 37 |
+
ZLIB_SUPPORT = False
|
| 38 |
+
|
| 39 |
+
try:
|
| 40 |
+
import bz2
|
| 41 |
+
except ImportError:
|
| 42 |
+
bz2 = None
|
| 43 |
+
|
| 44 |
+
try:
|
| 45 |
+
import lzma
|
| 46 |
+
except ImportError:
|
| 47 |
+
lzma = None
|
| 48 |
+
|
| 49 |
+
def can_fs_encode(filename):
|
| 50 |
+
"""
|
| 51 |
+
Return True if the filename can be saved in the file system.
|
| 52 |
+
"""
|
| 53 |
+
if os.path.supports_unicode_filenames:
|
| 54 |
+
return True
|
| 55 |
+
try:
|
| 56 |
+
filename.encode(sys.getfilesystemencoding())
|
| 57 |
+
except UnicodeEncodeError:
|
| 58 |
+
return False
|
| 59 |
+
return True
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class ArchiveUtilTestCase(support.TempdirManager,
|
| 63 |
+
support.LoggingSilencer,
|
| 64 |
+
unittest.TestCase):
|
| 65 |
+
|
| 66 |
+
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
| 67 |
+
def test_make_tarball(self, name='archive'):
|
| 68 |
+
# creating something to tar
|
| 69 |
+
tmpdir = self._create_files()
|
| 70 |
+
self._make_tarball(tmpdir, name, '.tar.gz')
|
| 71 |
+
# trying an uncompressed one
|
| 72 |
+
self._make_tarball(tmpdir, name, '.tar', compress=None)
|
| 73 |
+
|
| 74 |
+
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
| 75 |
+
def test_make_tarball_gzip(self):
|
| 76 |
+
tmpdir = self._create_files()
|
| 77 |
+
self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip')
|
| 78 |
+
|
| 79 |
+
@unittest.skipUnless(bz2, 'Need bz2 support to run')
|
| 80 |
+
def test_make_tarball_bzip2(self):
|
| 81 |
+
tmpdir = self._create_files()
|
| 82 |
+
self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2')
|
| 83 |
+
|
| 84 |
+
@unittest.skipUnless(lzma, 'Need lzma support to run')
|
| 85 |
+
def test_make_tarball_xz(self):
|
| 86 |
+
tmpdir = self._create_files()
|
| 87 |
+
self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz')
|
| 88 |
+
|
| 89 |
+
@unittest.skipUnless(can_fs_encode('årchiv'),
|
| 90 |
+
'File system cannot handle this filename')
|
| 91 |
+
def test_make_tarball_latin1(self):
|
| 92 |
+
"""
|
| 93 |
+
Mirror test_make_tarball, except filename contains latin characters.
|
| 94 |
+
"""
|
| 95 |
+
self.test_make_tarball('årchiv') # note this isn't a real word
|
| 96 |
+
|
| 97 |
+
@unittest.skipUnless(can_fs_encode('のアーカイブ'),
|
| 98 |
+
'File system cannot handle this filename')
|
| 99 |
+
def test_make_tarball_extended(self):
|
| 100 |
+
"""
|
| 101 |
+
Mirror test_make_tarball, except filename contains extended
|
| 102 |
+
characters outside the latin charset.
|
| 103 |
+
"""
|
| 104 |
+
self.test_make_tarball('のアーカイブ') # japanese for archive
|
| 105 |
+
|
| 106 |
+
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs):
|
| 107 |
+
tmpdir2 = self.mkdtemp()
|
| 108 |
+
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
|
| 109 |
+
"source and target should be on same drive")
|
| 110 |
+
|
| 111 |
+
base_name = os.path.join(tmpdir2, target_name)
|
| 112 |
+
|
| 113 |
+
# working with relative paths to avoid tar warnings
|
| 114 |
+
with change_cwd(tmpdir):
|
| 115 |
+
make_tarball(splitdrive(base_name)[1], 'dist', **kwargs)
|
| 116 |
+
|
| 117 |
+
# check if the compressed tarball was created
|
| 118 |
+
tarball = base_name + suffix
|
| 119 |
+
self.assertTrue(os.path.exists(tarball))
|
| 120 |
+
self.assertEqual(self._tarinfo(tarball), self._created_files)
|
| 121 |
+
|
| 122 |
+
def _tarinfo(self, path):
|
| 123 |
+
tar = tarfile.open(path)
|
| 124 |
+
try:
|
| 125 |
+
names = tar.getnames()
|
| 126 |
+
names.sort()
|
| 127 |
+
return names
|
| 128 |
+
finally:
|
| 129 |
+
tar.close()
|
| 130 |
+
|
| 131 |
+
_zip_created_files = ['dist/', 'dist/file1', 'dist/file2',
|
| 132 |
+
'dist/sub/', 'dist/sub/file3', 'dist/sub2/']
|
| 133 |
+
_created_files = [p.rstrip('/') for p in _zip_created_files]
|
| 134 |
+
|
| 135 |
+
def _create_files(self):
|
| 136 |
+
# creating something to tar
|
| 137 |
+
tmpdir = self.mkdtemp()
|
| 138 |
+
dist = os.path.join(tmpdir, 'dist')
|
| 139 |
+
os.mkdir(dist)
|
| 140 |
+
self.write_file([dist, 'file1'], 'xxx')
|
| 141 |
+
self.write_file([dist, 'file2'], 'xxx')
|
| 142 |
+
os.mkdir(os.path.join(dist, 'sub'))
|
| 143 |
+
self.write_file([dist, 'sub', 'file3'], 'xxx')
|
| 144 |
+
os.mkdir(os.path.join(dist, 'sub2'))
|
| 145 |
+
return tmpdir
|
| 146 |
+
|
| 147 |
+
@unittest.skipUnless(find_executable('tar') and find_executable('gzip')
|
| 148 |
+
and ZLIB_SUPPORT,
|
| 149 |
+
'Need the tar, gzip and zlib command to run')
|
| 150 |
+
def test_tarfile_vs_tar(self):
|
| 151 |
+
tmpdir = self._create_files()
|
| 152 |
+
tmpdir2 = self.mkdtemp()
|
| 153 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 154 |
+
old_dir = os.getcwd()
|
| 155 |
+
os.chdir(tmpdir)
|
| 156 |
+
try:
|
| 157 |
+
make_tarball(base_name, 'dist')
|
| 158 |
+
finally:
|
| 159 |
+
os.chdir(old_dir)
|
| 160 |
+
|
| 161 |
+
# check if the compressed tarball was created
|
| 162 |
+
tarball = base_name + '.tar.gz'
|
| 163 |
+
self.assertTrue(os.path.exists(tarball))
|
| 164 |
+
|
| 165 |
+
# now create another tarball using `tar`
|
| 166 |
+
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
|
| 167 |
+
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
|
| 168 |
+
gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar']
|
| 169 |
+
old_dir = os.getcwd()
|
| 170 |
+
os.chdir(tmpdir)
|
| 171 |
+
try:
|
| 172 |
+
spawn(tar_cmd)
|
| 173 |
+
spawn(gzip_cmd)
|
| 174 |
+
finally:
|
| 175 |
+
os.chdir(old_dir)
|
| 176 |
+
|
| 177 |
+
self.assertTrue(os.path.exists(tarball2))
|
| 178 |
+
# let's compare both tarballs
|
| 179 |
+
self.assertEqual(self._tarinfo(tarball), self._created_files)
|
| 180 |
+
self.assertEqual(self._tarinfo(tarball2), self._created_files)
|
| 181 |
+
|
| 182 |
+
# trying an uncompressed one
|
| 183 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 184 |
+
old_dir = os.getcwd()
|
| 185 |
+
os.chdir(tmpdir)
|
| 186 |
+
try:
|
| 187 |
+
make_tarball(base_name, 'dist', compress=None)
|
| 188 |
+
finally:
|
| 189 |
+
os.chdir(old_dir)
|
| 190 |
+
tarball = base_name + '.tar'
|
| 191 |
+
self.assertTrue(os.path.exists(tarball))
|
| 192 |
+
|
| 193 |
+
# now for a dry_run
|
| 194 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 195 |
+
old_dir = os.getcwd()
|
| 196 |
+
os.chdir(tmpdir)
|
| 197 |
+
try:
|
| 198 |
+
make_tarball(base_name, 'dist', compress=None, dry_run=True)
|
| 199 |
+
finally:
|
| 200 |
+
os.chdir(old_dir)
|
| 201 |
+
tarball = base_name + '.tar'
|
| 202 |
+
self.assertTrue(os.path.exists(tarball))
|
| 203 |
+
|
| 204 |
+
@unittest.skipUnless(find_executable('compress'),
|
| 205 |
+
'The compress program is required')
|
| 206 |
+
def test_compress_deprecated(self):
|
| 207 |
+
tmpdir = self._create_files()
|
| 208 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 209 |
+
|
| 210 |
+
# using compress and testing the PendingDeprecationWarning
|
| 211 |
+
old_dir = os.getcwd()
|
| 212 |
+
os.chdir(tmpdir)
|
| 213 |
+
try:
|
| 214 |
+
with check_warnings() as w:
|
| 215 |
+
warnings.simplefilter("always")
|
| 216 |
+
make_tarball(base_name, 'dist', compress='compress')
|
| 217 |
+
finally:
|
| 218 |
+
os.chdir(old_dir)
|
| 219 |
+
tarball = base_name + '.tar.Z'
|
| 220 |
+
self.assertTrue(os.path.exists(tarball))
|
| 221 |
+
self.assertEqual(len(w.warnings), 1)
|
| 222 |
+
|
| 223 |
+
# same test with dry_run
|
| 224 |
+
os.remove(tarball)
|
| 225 |
+
old_dir = os.getcwd()
|
| 226 |
+
os.chdir(tmpdir)
|
| 227 |
+
try:
|
| 228 |
+
with check_warnings() as w:
|
| 229 |
+
warnings.simplefilter("always")
|
| 230 |
+
make_tarball(base_name, 'dist', compress='compress',
|
| 231 |
+
dry_run=True)
|
| 232 |
+
finally:
|
| 233 |
+
os.chdir(old_dir)
|
| 234 |
+
self.assertFalse(os.path.exists(tarball))
|
| 235 |
+
self.assertEqual(len(w.warnings), 1)
|
| 236 |
+
|
| 237 |
+
@unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
|
| 238 |
+
'Need zip and zlib support to run')
|
| 239 |
+
def test_make_zipfile(self):
|
| 240 |
+
# creating something to tar
|
| 241 |
+
tmpdir = self._create_files()
|
| 242 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 243 |
+
with change_cwd(tmpdir):
|
| 244 |
+
make_zipfile(base_name, 'dist')
|
| 245 |
+
|
| 246 |
+
# check if the compressed tarball was created
|
| 247 |
+
tarball = base_name + '.zip'
|
| 248 |
+
self.assertTrue(os.path.exists(tarball))
|
| 249 |
+
with zipfile.ZipFile(tarball) as zf:
|
| 250 |
+
self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
|
| 251 |
+
|
| 252 |
+
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
|
| 253 |
+
def test_make_zipfile_no_zlib(self):
|
| 254 |
+
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
|
| 255 |
+
|
| 256 |
+
called = []
|
| 257 |
+
zipfile_class = zipfile.ZipFile
|
| 258 |
+
def fake_zipfile(*a, **kw):
|
| 259 |
+
if kw.get('compression', None) == zipfile.ZIP_STORED:
|
| 260 |
+
called.append((a, kw))
|
| 261 |
+
return zipfile_class(*a, **kw)
|
| 262 |
+
|
| 263 |
+
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
|
| 264 |
+
|
| 265 |
+
# create something to tar and compress
|
| 266 |
+
tmpdir = self._create_files()
|
| 267 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 268 |
+
with change_cwd(tmpdir):
|
| 269 |
+
make_zipfile(base_name, 'dist')
|
| 270 |
+
|
| 271 |
+
tarball = base_name + '.zip'
|
| 272 |
+
self.assertEqual(called,
|
| 273 |
+
[((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
|
| 274 |
+
self.assertTrue(os.path.exists(tarball))
|
| 275 |
+
with zipfile.ZipFile(tarball) as zf:
|
| 276 |
+
self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
|
| 277 |
+
|
| 278 |
+
def test_check_archive_formats(self):
|
| 279 |
+
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
|
| 280 |
+
'xxx')
|
| 281 |
+
self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar',
|
| 282 |
+
'ztar', 'tar', 'zip']))
|
| 283 |
+
|
| 284 |
+
def test_make_archive(self):
|
| 285 |
+
tmpdir = self.mkdtemp()
|
| 286 |
+
base_name = os.path.join(tmpdir, 'archive')
|
| 287 |
+
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
|
| 288 |
+
|
| 289 |
+
def test_make_archive_cwd(self):
|
| 290 |
+
current_dir = os.getcwd()
|
| 291 |
+
def _breaks(*args, **kw):
|
| 292 |
+
raise RuntimeError()
|
| 293 |
+
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
|
| 294 |
+
try:
|
| 295 |
+
try:
|
| 296 |
+
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
|
| 297 |
+
except:
|
| 298 |
+
pass
|
| 299 |
+
self.assertEqual(os.getcwd(), current_dir)
|
| 300 |
+
finally:
|
| 301 |
+
del ARCHIVE_FORMATS['xxx']
|
| 302 |
+
|
| 303 |
+
def test_make_archive_tar(self):
|
| 304 |
+
base_dir = self._create_files()
|
| 305 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 306 |
+
res = make_archive(base_name, 'tar', base_dir, 'dist')
|
| 307 |
+
self.assertTrue(os.path.exists(res))
|
| 308 |
+
self.assertEqual(os.path.basename(res), 'archive.tar')
|
| 309 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 310 |
+
|
| 311 |
+
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
| 312 |
+
def test_make_archive_gztar(self):
|
| 313 |
+
base_dir = self._create_files()
|
| 314 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 315 |
+
res = make_archive(base_name, 'gztar', base_dir, 'dist')
|
| 316 |
+
self.assertTrue(os.path.exists(res))
|
| 317 |
+
self.assertEqual(os.path.basename(res), 'archive.tar.gz')
|
| 318 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 319 |
+
|
| 320 |
+
@unittest.skipUnless(bz2, 'Need bz2 support to run')
|
| 321 |
+
def test_make_archive_bztar(self):
|
| 322 |
+
base_dir = self._create_files()
|
| 323 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 324 |
+
res = make_archive(base_name, 'bztar', base_dir, 'dist')
|
| 325 |
+
self.assertTrue(os.path.exists(res))
|
| 326 |
+
self.assertEqual(os.path.basename(res), 'archive.tar.bz2')
|
| 327 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 328 |
+
|
| 329 |
+
@unittest.skipUnless(lzma, 'Need xz support to run')
|
| 330 |
+
def test_make_archive_xztar(self):
|
| 331 |
+
base_dir = self._create_files()
|
| 332 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 333 |
+
res = make_archive(base_name, 'xztar', base_dir, 'dist')
|
| 334 |
+
self.assertTrue(os.path.exists(res))
|
| 335 |
+
self.assertEqual(os.path.basename(res), 'archive.tar.xz')
|
| 336 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 337 |
+
|
| 338 |
+
def test_make_archive_owner_group(self):
|
| 339 |
+
# testing make_archive with owner and group, with various combinations
|
| 340 |
+
# this works even if there's not gid/uid support
|
| 341 |
+
if UID_GID_SUPPORT:
|
| 342 |
+
group = grp.getgrgid(0)[0]
|
| 343 |
+
owner = pwd.getpwuid(0)[0]
|
| 344 |
+
else:
|
| 345 |
+
group = owner = 'root'
|
| 346 |
+
|
| 347 |
+
base_dir = self._create_files()
|
| 348 |
+
root_dir = self.mkdtemp()
|
| 349 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 350 |
+
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
|
| 351 |
+
group=group)
|
| 352 |
+
self.assertTrue(os.path.exists(res))
|
| 353 |
+
|
| 354 |
+
res = make_archive(base_name, 'zip', root_dir, base_dir)
|
| 355 |
+
self.assertTrue(os.path.exists(res))
|
| 356 |
+
|
| 357 |
+
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
| 358 |
+
owner=owner, group=group)
|
| 359 |
+
self.assertTrue(os.path.exists(res))
|
| 360 |
+
|
| 361 |
+
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
| 362 |
+
owner='kjhkjhkjg', group='oihohoh')
|
| 363 |
+
self.assertTrue(os.path.exists(res))
|
| 364 |
+
|
| 365 |
+
@unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib")
|
| 366 |
+
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
|
| 367 |
+
def test_tarfile_root_owner(self):
|
| 368 |
+
tmpdir = self._create_files()
|
| 369 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 370 |
+
old_dir = os.getcwd()
|
| 371 |
+
os.chdir(tmpdir)
|
| 372 |
+
group = grp.getgrgid(0)[0]
|
| 373 |
+
owner = pwd.getpwuid(0)[0]
|
| 374 |
+
try:
|
| 375 |
+
archive_name = make_tarball(base_name, 'dist', compress=None,
|
| 376 |
+
owner=owner, group=group)
|
| 377 |
+
finally:
|
| 378 |
+
os.chdir(old_dir)
|
| 379 |
+
|
| 380 |
+
# check if the compressed tarball was created
|
| 381 |
+
self.assertTrue(os.path.exists(archive_name))
|
| 382 |
+
|
| 383 |
+
# now checks the rights
|
| 384 |
+
archive = tarfile.open(archive_name)
|
| 385 |
+
try:
|
| 386 |
+
for member in archive.getmembers():
|
| 387 |
+
self.assertEqual(member.uid, 0)
|
| 388 |
+
self.assertEqual(member.gid, 0)
|
| 389 |
+
finally:
|
| 390 |
+
archive.close()
|
| 391 |
+
|
| 392 |
+
def test_suite():
|
| 393 |
+
return unittest.makeSuite(ArchiveUtilTestCase)
|
| 394 |
+
|
| 395 |
+
if __name__ == "__main__":
|
| 396 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_build.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.build."""
|
| 2 |
+
import unittest
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from test.support import run_unittest
|
| 6 |
+
|
| 7 |
+
from distutils.command.build import build
|
| 8 |
+
from distutils.tests import support
|
| 9 |
+
from sysconfig import get_platform
|
| 10 |
+
|
| 11 |
+
class BuildTestCase(support.TempdirManager,
|
| 12 |
+
support.LoggingSilencer,
|
| 13 |
+
unittest.TestCase):
|
| 14 |
+
|
| 15 |
+
def test_finalize_options(self):
|
| 16 |
+
pkg_dir, dist = self.create_dist()
|
| 17 |
+
cmd = build(dist)
|
| 18 |
+
cmd.finalize_options()
|
| 19 |
+
|
| 20 |
+
# if not specified, plat_name gets the current platform
|
| 21 |
+
self.assertEqual(cmd.plat_name, get_platform())
|
| 22 |
+
|
| 23 |
+
# build_purelib is build + lib
|
| 24 |
+
wanted = os.path.join(cmd.build_base, 'lib')
|
| 25 |
+
self.assertEqual(cmd.build_purelib, wanted)
|
| 26 |
+
|
| 27 |
+
# build_platlib is 'build/lib.platform-x.x[-pydebug]'
|
| 28 |
+
# examples:
|
| 29 |
+
# build/lib.macosx-10.3-i386-2.7
|
| 30 |
+
plat_spec = '.%s-%d.%d' % (cmd.plat_name, *sys.version_info[:2])
|
| 31 |
+
if hasattr(sys, 'gettotalrefcount'):
|
| 32 |
+
self.assertTrue(cmd.build_platlib.endswith('-pydebug'))
|
| 33 |
+
plat_spec += '-pydebug'
|
| 34 |
+
wanted = os.path.join(cmd.build_base, 'lib' + plat_spec)
|
| 35 |
+
self.assertEqual(cmd.build_platlib, wanted)
|
| 36 |
+
|
| 37 |
+
# by default, build_lib = build_purelib
|
| 38 |
+
self.assertEqual(cmd.build_lib, cmd.build_purelib)
|
| 39 |
+
|
| 40 |
+
# build_temp is build/temp.<plat>
|
| 41 |
+
wanted = os.path.join(cmd.build_base, 'temp' + plat_spec)
|
| 42 |
+
self.assertEqual(cmd.build_temp, wanted)
|
| 43 |
+
|
| 44 |
+
# build_scripts is build/scripts-x.x
|
| 45 |
+
wanted = os.path.join(cmd.build_base,
|
| 46 |
+
'scripts-%d.%d' % sys.version_info[:2])
|
| 47 |
+
self.assertEqual(cmd.build_scripts, wanted)
|
| 48 |
+
|
| 49 |
+
# executable is os.path.normpath(sys.executable)
|
| 50 |
+
self.assertEqual(cmd.executable, os.path.normpath(sys.executable))
|
| 51 |
+
|
| 52 |
+
def test_suite():
|
| 53 |
+
return unittest.makeSuite(BuildTestCase)
|
| 54 |
+
|
| 55 |
+
if __name__ == "__main__":
|
| 56 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_build_ext.py
ADDED
|
@@ -0,0 +1,553 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import os
|
| 3 |
+
from io import StringIO
|
| 4 |
+
import textwrap
|
| 5 |
+
|
| 6 |
+
from distutils.core import Distribution
|
| 7 |
+
from distutils.command.build_ext import build_ext
|
| 8 |
+
from distutils import sysconfig
|
| 9 |
+
from distutils.tests.support import (TempdirManager, LoggingSilencer,
|
| 10 |
+
copy_xxmodule_c, fixup_build_ext)
|
| 11 |
+
from distutils.extension import Extension
|
| 12 |
+
from distutils.errors import (
|
| 13 |
+
CompileError, DistutilsPlatformError, DistutilsSetupError,
|
| 14 |
+
UnknownFileError)
|
| 15 |
+
|
| 16 |
+
import unittest
|
| 17 |
+
from test import support
|
| 18 |
+
from test.support import os_helper
|
| 19 |
+
from test.support.script_helper import assert_python_ok
|
| 20 |
+
|
| 21 |
+
# http://bugs.python.org/issue4373
|
| 22 |
+
# Don't load the xx module more than once.
|
| 23 |
+
ALREADY_TESTED = False
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class BuildExtTestCase(TempdirManager,
|
| 27 |
+
LoggingSilencer,
|
| 28 |
+
unittest.TestCase):
|
| 29 |
+
def setUp(self):
|
| 30 |
+
# Create a simple test environment
|
| 31 |
+
super(BuildExtTestCase, self).setUp()
|
| 32 |
+
self.tmp_dir = self.mkdtemp()
|
| 33 |
+
import site
|
| 34 |
+
self.old_user_base = site.USER_BASE
|
| 35 |
+
site.USER_BASE = self.mkdtemp()
|
| 36 |
+
from distutils.command import build_ext
|
| 37 |
+
build_ext.USER_BASE = site.USER_BASE
|
| 38 |
+
self.old_config_vars = dict(sysconfig._config_vars)
|
| 39 |
+
|
| 40 |
+
# bpo-30132: On Windows, a .pdb file may be created in the current
|
| 41 |
+
# working directory. Create a temporary working directory to cleanup
|
| 42 |
+
# everything at the end of the test.
|
| 43 |
+
change_cwd = os_helper.change_cwd(self.tmp_dir)
|
| 44 |
+
change_cwd.__enter__()
|
| 45 |
+
self.addCleanup(change_cwd.__exit__, None, None, None)
|
| 46 |
+
|
| 47 |
+
def tearDown(self):
|
| 48 |
+
import site
|
| 49 |
+
site.USER_BASE = self.old_user_base
|
| 50 |
+
from distutils.command import build_ext
|
| 51 |
+
build_ext.USER_BASE = self.old_user_base
|
| 52 |
+
sysconfig._config_vars.clear()
|
| 53 |
+
sysconfig._config_vars.update(self.old_config_vars)
|
| 54 |
+
super(BuildExtTestCase, self).tearDown()
|
| 55 |
+
|
| 56 |
+
def build_ext(self, *args, **kwargs):
|
| 57 |
+
return build_ext(*args, **kwargs)
|
| 58 |
+
|
| 59 |
+
def test_build_ext(self):
|
| 60 |
+
cmd = support.missing_compiler_executable()
|
| 61 |
+
if cmd is not None:
|
| 62 |
+
self.skipTest('The %r command is not found' % cmd)
|
| 63 |
+
global ALREADY_TESTED
|
| 64 |
+
copy_xxmodule_c(self.tmp_dir)
|
| 65 |
+
xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
|
| 66 |
+
xx_ext = Extension('xx', [xx_c])
|
| 67 |
+
dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
|
| 68 |
+
dist.package_dir = self.tmp_dir
|
| 69 |
+
cmd = self.build_ext(dist)
|
| 70 |
+
fixup_build_ext(cmd)
|
| 71 |
+
cmd.build_lib = self.tmp_dir
|
| 72 |
+
cmd.build_temp = self.tmp_dir
|
| 73 |
+
|
| 74 |
+
old_stdout = sys.stdout
|
| 75 |
+
if not support.verbose:
|
| 76 |
+
# silence compiler output
|
| 77 |
+
sys.stdout = StringIO()
|
| 78 |
+
try:
|
| 79 |
+
cmd.ensure_finalized()
|
| 80 |
+
cmd.run()
|
| 81 |
+
finally:
|
| 82 |
+
sys.stdout = old_stdout
|
| 83 |
+
|
| 84 |
+
if ALREADY_TESTED:
|
| 85 |
+
self.skipTest('Already tested in %s' % ALREADY_TESTED)
|
| 86 |
+
else:
|
| 87 |
+
ALREADY_TESTED = type(self).__name__
|
| 88 |
+
|
| 89 |
+
code = textwrap.dedent(f"""
|
| 90 |
+
tmp_dir = {self.tmp_dir!r}
|
| 91 |
+
|
| 92 |
+
import sys
|
| 93 |
+
import unittest
|
| 94 |
+
from test import support
|
| 95 |
+
|
| 96 |
+
sys.path.insert(0, tmp_dir)
|
| 97 |
+
import xx
|
| 98 |
+
|
| 99 |
+
class Tests(unittest.TestCase):
|
| 100 |
+
def test_xx(self):
|
| 101 |
+
for attr in ('error', 'foo', 'new', 'roj'):
|
| 102 |
+
self.assertTrue(hasattr(xx, attr))
|
| 103 |
+
|
| 104 |
+
self.assertEqual(xx.foo(2, 5), 7)
|
| 105 |
+
self.assertEqual(xx.foo(13,15), 28)
|
| 106 |
+
self.assertEqual(xx.new().demo(), None)
|
| 107 |
+
if support.HAVE_DOCSTRINGS:
|
| 108 |
+
doc = 'This is a template module just for instruction.'
|
| 109 |
+
self.assertEqual(xx.__doc__, doc)
|
| 110 |
+
self.assertIsInstance(xx.Null(), xx.Null)
|
| 111 |
+
self.assertIsInstance(xx.Str(), xx.Str)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
unittest.main()
|
| 115 |
+
""")
|
| 116 |
+
assert_python_ok('-c', code)
|
| 117 |
+
|
| 118 |
+
def test_solaris_enable_shared(self):
|
| 119 |
+
dist = Distribution({'name': 'xx'})
|
| 120 |
+
cmd = self.build_ext(dist)
|
| 121 |
+
old = sys.platform
|
| 122 |
+
|
| 123 |
+
sys.platform = 'sunos' # fooling finalize_options
|
| 124 |
+
from distutils.sysconfig import _config_vars
|
| 125 |
+
old_var = _config_vars.get('Py_ENABLE_SHARED')
|
| 126 |
+
_config_vars['Py_ENABLE_SHARED'] = 1
|
| 127 |
+
try:
|
| 128 |
+
cmd.ensure_finalized()
|
| 129 |
+
finally:
|
| 130 |
+
sys.platform = old
|
| 131 |
+
if old_var is None:
|
| 132 |
+
del _config_vars['Py_ENABLE_SHARED']
|
| 133 |
+
else:
|
| 134 |
+
_config_vars['Py_ENABLE_SHARED'] = old_var
|
| 135 |
+
|
| 136 |
+
# make sure we get some library dirs under solaris
|
| 137 |
+
self.assertGreater(len(cmd.library_dirs), 0)
|
| 138 |
+
|
| 139 |
+
def test_user_site(self):
|
| 140 |
+
import site
|
| 141 |
+
dist = Distribution({'name': 'xx'})
|
| 142 |
+
cmd = self.build_ext(dist)
|
| 143 |
+
|
| 144 |
+
# making sure the user option is there
|
| 145 |
+
options = [name for name, short, lable in
|
| 146 |
+
cmd.user_options]
|
| 147 |
+
self.assertIn('user', options)
|
| 148 |
+
|
| 149 |
+
# setting a value
|
| 150 |
+
cmd.user = 1
|
| 151 |
+
|
| 152 |
+
# setting user based lib and include
|
| 153 |
+
lib = os.path.join(site.USER_BASE, 'lib')
|
| 154 |
+
incl = os.path.join(site.USER_BASE, 'include')
|
| 155 |
+
os.mkdir(lib)
|
| 156 |
+
os.mkdir(incl)
|
| 157 |
+
|
| 158 |
+
# let's run finalize
|
| 159 |
+
cmd.ensure_finalized()
|
| 160 |
+
|
| 161 |
+
# see if include_dirs and library_dirs
|
| 162 |
+
# were set
|
| 163 |
+
self.assertIn(lib, cmd.library_dirs)
|
| 164 |
+
self.assertIn(lib, cmd.rpath)
|
| 165 |
+
self.assertIn(incl, cmd.include_dirs)
|
| 166 |
+
|
| 167 |
+
def test_optional_extension(self):
|
| 168 |
+
|
| 169 |
+
# this extension will fail, but let's ignore this failure
|
| 170 |
+
# with the optional argument.
|
| 171 |
+
modules = [Extension('foo', ['xxx'], optional=False)]
|
| 172 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 173 |
+
cmd = self.build_ext(dist)
|
| 174 |
+
cmd.ensure_finalized()
|
| 175 |
+
self.assertRaises((UnknownFileError, CompileError),
|
| 176 |
+
cmd.run) # should raise an error
|
| 177 |
+
|
| 178 |
+
modules = [Extension('foo', ['xxx'], optional=True)]
|
| 179 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 180 |
+
cmd = self.build_ext(dist)
|
| 181 |
+
cmd.ensure_finalized()
|
| 182 |
+
cmd.run() # should pass
|
| 183 |
+
|
| 184 |
+
def test_finalize_options(self):
|
| 185 |
+
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
| 186 |
+
# etc.) are in the include search path.
|
| 187 |
+
modules = [Extension('foo', ['xxx'], optional=False)]
|
| 188 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 189 |
+
cmd = self.build_ext(dist)
|
| 190 |
+
cmd.finalize_options()
|
| 191 |
+
|
| 192 |
+
py_include = sysconfig.get_python_inc()
|
| 193 |
+
for p in py_include.split(os.path.pathsep):
|
| 194 |
+
self.assertIn(p, cmd.include_dirs)
|
| 195 |
+
|
| 196 |
+
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
|
| 197 |
+
for p in plat_py_include.split(os.path.pathsep):
|
| 198 |
+
self.assertIn(p, cmd.include_dirs)
|
| 199 |
+
|
| 200 |
+
# make sure cmd.libraries is turned into a list
|
| 201 |
+
# if it's a string
|
| 202 |
+
cmd = self.build_ext(dist)
|
| 203 |
+
cmd.libraries = 'my_lib, other_lib lastlib'
|
| 204 |
+
cmd.finalize_options()
|
| 205 |
+
self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib'])
|
| 206 |
+
|
| 207 |
+
# make sure cmd.library_dirs is turned into a list
|
| 208 |
+
# if it's a string
|
| 209 |
+
cmd = self.build_ext(dist)
|
| 210 |
+
cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep
|
| 211 |
+
cmd.finalize_options()
|
| 212 |
+
self.assertIn('my_lib_dir', cmd.library_dirs)
|
| 213 |
+
self.assertIn('other_lib_dir', cmd.library_dirs)
|
| 214 |
+
|
| 215 |
+
# make sure rpath is turned into a list
|
| 216 |
+
# if it's a string
|
| 217 |
+
cmd = self.build_ext(dist)
|
| 218 |
+
cmd.rpath = 'one%stwo' % os.pathsep
|
| 219 |
+
cmd.finalize_options()
|
| 220 |
+
self.assertEqual(cmd.rpath, ['one', 'two'])
|
| 221 |
+
|
| 222 |
+
# make sure cmd.link_objects is turned into a list
|
| 223 |
+
# if it's a string
|
| 224 |
+
cmd = build_ext(dist)
|
| 225 |
+
cmd.link_objects = 'one two,three'
|
| 226 |
+
cmd.finalize_options()
|
| 227 |
+
self.assertEqual(cmd.link_objects, ['one', 'two', 'three'])
|
| 228 |
+
|
| 229 |
+
# XXX more tests to perform for win32
|
| 230 |
+
|
| 231 |
+
# make sure define is turned into 2-tuples
|
| 232 |
+
# strings if they are ','-separated strings
|
| 233 |
+
cmd = self.build_ext(dist)
|
| 234 |
+
cmd.define = 'one,two'
|
| 235 |
+
cmd.finalize_options()
|
| 236 |
+
self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])
|
| 237 |
+
|
| 238 |
+
# make sure undef is turned into a list of
|
| 239 |
+
# strings if they are ','-separated strings
|
| 240 |
+
cmd = self.build_ext(dist)
|
| 241 |
+
cmd.undef = 'one,two'
|
| 242 |
+
cmd.finalize_options()
|
| 243 |
+
self.assertEqual(cmd.undef, ['one', 'two'])
|
| 244 |
+
|
| 245 |
+
# make sure swig_opts is turned into a list
|
| 246 |
+
cmd = self.build_ext(dist)
|
| 247 |
+
cmd.swig_opts = None
|
| 248 |
+
cmd.finalize_options()
|
| 249 |
+
self.assertEqual(cmd.swig_opts, [])
|
| 250 |
+
|
| 251 |
+
cmd = self.build_ext(dist)
|
| 252 |
+
cmd.swig_opts = '1 2'
|
| 253 |
+
cmd.finalize_options()
|
| 254 |
+
self.assertEqual(cmd.swig_opts, ['1', '2'])
|
| 255 |
+
|
| 256 |
+
def test_check_extensions_list(self):
|
| 257 |
+
dist = Distribution()
|
| 258 |
+
cmd = self.build_ext(dist)
|
| 259 |
+
cmd.finalize_options()
|
| 260 |
+
|
| 261 |
+
#'extensions' option must be a list of Extension instances
|
| 262 |
+
self.assertRaises(DistutilsSetupError,
|
| 263 |
+
cmd.check_extensions_list, 'foo')
|
| 264 |
+
|
| 265 |
+
# each element of 'ext_modules' option must be an
|
| 266 |
+
# Extension instance or 2-tuple
|
| 267 |
+
exts = [('bar', 'foo', 'bar'), 'foo']
|
| 268 |
+
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
|
| 269 |
+
|
| 270 |
+
# first element of each tuple in 'ext_modules'
|
| 271 |
+
# must be the extension name (a string) and match
|
| 272 |
+
# a python dotted-separated name
|
| 273 |
+
exts = [('foo-bar', '')]
|
| 274 |
+
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
|
| 275 |
+
|
| 276 |
+
# second element of each tuple in 'ext_modules'
|
| 277 |
+
# must be a dictionary (build info)
|
| 278 |
+
exts = [('foo.bar', '')]
|
| 279 |
+
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
|
| 280 |
+
|
| 281 |
+
# ok this one should pass
|
| 282 |
+
exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
|
| 283 |
+
'some': 'bar'})]
|
| 284 |
+
cmd.check_extensions_list(exts)
|
| 285 |
+
ext = exts[0]
|
| 286 |
+
self.assertIsInstance(ext, Extension)
|
| 287 |
+
|
| 288 |
+
# check_extensions_list adds in ext the values passed
|
| 289 |
+
# when they are in ('include_dirs', 'library_dirs', 'libraries'
|
| 290 |
+
# 'extra_objects', 'extra_compile_args', 'extra_link_args')
|
| 291 |
+
self.assertEqual(ext.libraries, 'foo')
|
| 292 |
+
self.assertFalse(hasattr(ext, 'some'))
|
| 293 |
+
|
| 294 |
+
# 'macros' element of build info dict must be 1- or 2-tuple
|
| 295 |
+
exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
|
| 296 |
+
'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})]
|
| 297 |
+
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
|
| 298 |
+
|
| 299 |
+
exts[0][1]['macros'] = [('1', '2'), ('3',)]
|
| 300 |
+
cmd.check_extensions_list(exts)
|
| 301 |
+
self.assertEqual(exts[0].undef_macros, ['3'])
|
| 302 |
+
self.assertEqual(exts[0].define_macros, [('1', '2')])
|
| 303 |
+
|
| 304 |
+
def test_get_source_files(self):
|
| 305 |
+
modules = [Extension('foo', ['xxx'], optional=False)]
|
| 306 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 307 |
+
cmd = self.build_ext(dist)
|
| 308 |
+
cmd.ensure_finalized()
|
| 309 |
+
self.assertEqual(cmd.get_source_files(), ['xxx'])
|
| 310 |
+
|
| 311 |
+
def test_unicode_module_names(self):
|
| 312 |
+
modules = [
|
| 313 |
+
Extension('foo', ['aaa'], optional=False),
|
| 314 |
+
Extension('föö', ['uuu'], optional=False),
|
| 315 |
+
]
|
| 316 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 317 |
+
cmd = self.build_ext(dist)
|
| 318 |
+
cmd.ensure_finalized()
|
| 319 |
+
self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*')
|
| 320 |
+
self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*')
|
| 321 |
+
self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo'])
|
| 322 |
+
self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_gkaa'])
|
| 323 |
+
|
| 324 |
+
def test_compiler_option(self):
|
| 325 |
+
# cmd.compiler is an option and
|
| 326 |
+
# should not be overridden by a compiler instance
|
| 327 |
+
# when the command is run
|
| 328 |
+
dist = Distribution()
|
| 329 |
+
cmd = self.build_ext(dist)
|
| 330 |
+
cmd.compiler = 'unix'
|
| 331 |
+
cmd.ensure_finalized()
|
| 332 |
+
cmd.run()
|
| 333 |
+
self.assertEqual(cmd.compiler, 'unix')
|
| 334 |
+
|
| 335 |
+
def test_get_outputs(self):
|
| 336 |
+
cmd = support.missing_compiler_executable()
|
| 337 |
+
if cmd is not None:
|
| 338 |
+
self.skipTest('The %r command is not found' % cmd)
|
| 339 |
+
tmp_dir = self.mkdtemp()
|
| 340 |
+
c_file = os.path.join(tmp_dir, 'foo.c')
|
| 341 |
+
self.write_file(c_file, 'void PyInit_foo(void) {}\n')
|
| 342 |
+
ext = Extension('foo', [c_file], optional=False)
|
| 343 |
+
dist = Distribution({'name': 'xx',
|
| 344 |
+
'ext_modules': [ext]})
|
| 345 |
+
cmd = self.build_ext(dist)
|
| 346 |
+
fixup_build_ext(cmd)
|
| 347 |
+
cmd.ensure_finalized()
|
| 348 |
+
self.assertEqual(len(cmd.get_outputs()), 1)
|
| 349 |
+
|
| 350 |
+
cmd.build_lib = os.path.join(self.tmp_dir, 'build')
|
| 351 |
+
cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
|
| 352 |
+
|
| 353 |
+
# issue #5977 : distutils build_ext.get_outputs
|
| 354 |
+
# returns wrong result with --inplace
|
| 355 |
+
other_tmp_dir = os.path.realpath(self.mkdtemp())
|
| 356 |
+
old_wd = os.getcwd()
|
| 357 |
+
os.chdir(other_tmp_dir)
|
| 358 |
+
try:
|
| 359 |
+
cmd.inplace = 1
|
| 360 |
+
cmd.run()
|
| 361 |
+
so_file = cmd.get_outputs()[0]
|
| 362 |
+
finally:
|
| 363 |
+
os.chdir(old_wd)
|
| 364 |
+
self.assertTrue(os.path.exists(so_file))
|
| 365 |
+
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
|
| 366 |
+
self.assertTrue(so_file.endswith(ext_suffix))
|
| 367 |
+
so_dir = os.path.dirname(so_file)
|
| 368 |
+
self.assertEqual(so_dir, other_tmp_dir)
|
| 369 |
+
|
| 370 |
+
cmd.inplace = 0
|
| 371 |
+
cmd.compiler = None
|
| 372 |
+
cmd.run()
|
| 373 |
+
so_file = cmd.get_outputs()[0]
|
| 374 |
+
self.assertTrue(os.path.exists(so_file))
|
| 375 |
+
self.assertTrue(so_file.endswith(ext_suffix))
|
| 376 |
+
so_dir = os.path.dirname(so_file)
|
| 377 |
+
self.assertEqual(so_dir, cmd.build_lib)
|
| 378 |
+
|
| 379 |
+
# inplace = 0, cmd.package = 'bar'
|
| 380 |
+
build_py = cmd.get_finalized_command('build_py')
|
| 381 |
+
build_py.package_dir = {'': 'bar'}
|
| 382 |
+
path = cmd.get_ext_fullpath('foo')
|
| 383 |
+
# checking that the last directory is the build_dir
|
| 384 |
+
path = os.path.split(path)[0]
|
| 385 |
+
self.assertEqual(path, cmd.build_lib)
|
| 386 |
+
|
| 387 |
+
# inplace = 1, cmd.package = 'bar'
|
| 388 |
+
cmd.inplace = 1
|
| 389 |
+
other_tmp_dir = os.path.realpath(self.mkdtemp())
|
| 390 |
+
old_wd = os.getcwd()
|
| 391 |
+
os.chdir(other_tmp_dir)
|
| 392 |
+
try:
|
| 393 |
+
path = cmd.get_ext_fullpath('foo')
|
| 394 |
+
finally:
|
| 395 |
+
os.chdir(old_wd)
|
| 396 |
+
# checking that the last directory is bar
|
| 397 |
+
path = os.path.split(path)[0]
|
| 398 |
+
lastdir = os.path.split(path)[-1]
|
| 399 |
+
self.assertEqual(lastdir, 'bar')
|
| 400 |
+
|
| 401 |
+
def test_ext_fullpath(self):
|
| 402 |
+
ext = sysconfig.get_config_var('EXT_SUFFIX')
|
| 403 |
+
# building lxml.etree inplace
|
| 404 |
+
#etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
|
| 405 |
+
#etree_ext = Extension('lxml.etree', [etree_c])
|
| 406 |
+
#dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
|
| 407 |
+
dist = Distribution()
|
| 408 |
+
cmd = self.build_ext(dist)
|
| 409 |
+
cmd.inplace = 1
|
| 410 |
+
cmd.distribution.package_dir = {'': 'src'}
|
| 411 |
+
cmd.distribution.packages = ['lxml', 'lxml.html']
|
| 412 |
+
curdir = os.getcwd()
|
| 413 |
+
wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
|
| 414 |
+
path = cmd.get_ext_fullpath('lxml.etree')
|
| 415 |
+
self.assertEqual(wanted, path)
|
| 416 |
+
|
| 417 |
+
# building lxml.etree not inplace
|
| 418 |
+
cmd.inplace = 0
|
| 419 |
+
cmd.build_lib = os.path.join(curdir, 'tmpdir')
|
| 420 |
+
wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
|
| 421 |
+
path = cmd.get_ext_fullpath('lxml.etree')
|
| 422 |
+
self.assertEqual(wanted, path)
|
| 423 |
+
|
| 424 |
+
# building twisted.runner.portmap not inplace
|
| 425 |
+
build_py = cmd.get_finalized_command('build_py')
|
| 426 |
+
build_py.package_dir = {}
|
| 427 |
+
cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
|
| 428 |
+
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
| 429 |
+
wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
|
| 430 |
+
'portmap' + ext)
|
| 431 |
+
self.assertEqual(wanted, path)
|
| 432 |
+
|
| 433 |
+
# building twisted.runner.portmap inplace
|
| 434 |
+
cmd.inplace = 1
|
| 435 |
+
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
| 436 |
+
wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
|
| 437 |
+
self.assertEqual(wanted, path)
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
|
| 441 |
+
def test_deployment_target_default(self):
|
| 442 |
+
# Issue 9516: Test that, in the absence of the environment variable,
|
| 443 |
+
# an extension module is compiled with the same deployment target as
|
| 444 |
+
# the interpreter.
|
| 445 |
+
self._try_compile_deployment_target('==', None)
|
| 446 |
+
|
| 447 |
+
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
|
| 448 |
+
def test_deployment_target_too_low(self):
|
| 449 |
+
# Issue 9516: Test that an extension module is not allowed to be
|
| 450 |
+
# compiled with a deployment target less than that of the interpreter.
|
| 451 |
+
self.assertRaises(DistutilsPlatformError,
|
| 452 |
+
self._try_compile_deployment_target, '>', '10.1')
|
| 453 |
+
|
| 454 |
+
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
|
| 455 |
+
def test_deployment_target_higher_ok(self):
|
| 456 |
+
# Issue 9516: Test that an extension module can be compiled with a
|
| 457 |
+
# deployment target higher than that of the interpreter: the ext
|
| 458 |
+
# module may depend on some newer OS feature.
|
| 459 |
+
deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
| 460 |
+
if deptarget:
|
| 461 |
+
# increment the minor version number (i.e. 10.6 -> 10.7)
|
| 462 |
+
deptarget = [int(x) for x in deptarget.split('.')]
|
| 463 |
+
deptarget[-1] += 1
|
| 464 |
+
deptarget = '.'.join(str(i) for i in deptarget)
|
| 465 |
+
self._try_compile_deployment_target('<', deptarget)
|
| 466 |
+
|
| 467 |
+
def _try_compile_deployment_target(self, operator, target):
|
| 468 |
+
orig_environ = os.environ
|
| 469 |
+
os.environ = orig_environ.copy()
|
| 470 |
+
self.addCleanup(setattr, os, 'environ', orig_environ)
|
| 471 |
+
|
| 472 |
+
if target is None:
|
| 473 |
+
if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
|
| 474 |
+
del os.environ['MACOSX_DEPLOYMENT_TARGET']
|
| 475 |
+
else:
|
| 476 |
+
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
|
| 477 |
+
|
| 478 |
+
deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')
|
| 479 |
+
|
| 480 |
+
with open(deptarget_c, 'w') as fp:
|
| 481 |
+
fp.write(textwrap.dedent('''\
|
| 482 |
+
#include <AvailabilityMacros.h>
|
| 483 |
+
|
| 484 |
+
int dummy;
|
| 485 |
+
|
| 486 |
+
#if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
|
| 487 |
+
#else
|
| 488 |
+
#error "Unexpected target"
|
| 489 |
+
#endif
|
| 490 |
+
|
| 491 |
+
''' % operator))
|
| 492 |
+
|
| 493 |
+
# get the deployment target that the interpreter was built with
|
| 494 |
+
target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
| 495 |
+
target = tuple(map(int, target.split('.')[0:2]))
|
| 496 |
+
# format the target value as defined in the Apple
|
| 497 |
+
# Availability Macros. We can't use the macro names since
|
| 498 |
+
# at least one value we test with will not exist yet.
|
| 499 |
+
if target[:2] < (10, 10):
|
| 500 |
+
# for 10.1 through 10.9.x -> "10n0"
|
| 501 |
+
target = '%02d%01d0' % target
|
| 502 |
+
else:
|
| 503 |
+
# for 10.10 and beyond -> "10nn00"
|
| 504 |
+
if len(target) >= 2:
|
| 505 |
+
target = '%02d%02d00' % target
|
| 506 |
+
else:
|
| 507 |
+
# 11 and later can have no minor version (11 instead of 11.0)
|
| 508 |
+
target = '%02d0000' % target
|
| 509 |
+
deptarget_ext = Extension(
|
| 510 |
+
'deptarget',
|
| 511 |
+
[deptarget_c],
|
| 512 |
+
extra_compile_args=['-DTARGET=%s'%(target,)],
|
| 513 |
+
)
|
| 514 |
+
dist = Distribution({
|
| 515 |
+
'name': 'deptarget',
|
| 516 |
+
'ext_modules': [deptarget_ext]
|
| 517 |
+
})
|
| 518 |
+
dist.package_dir = self.tmp_dir
|
| 519 |
+
cmd = self.build_ext(dist)
|
| 520 |
+
cmd.build_lib = self.tmp_dir
|
| 521 |
+
cmd.build_temp = self.tmp_dir
|
| 522 |
+
|
| 523 |
+
try:
|
| 524 |
+
old_stdout = sys.stdout
|
| 525 |
+
if not support.verbose:
|
| 526 |
+
# silence compiler output
|
| 527 |
+
sys.stdout = StringIO()
|
| 528 |
+
try:
|
| 529 |
+
cmd.ensure_finalized()
|
| 530 |
+
cmd.run()
|
| 531 |
+
finally:
|
| 532 |
+
sys.stdout = old_stdout
|
| 533 |
+
|
| 534 |
+
except CompileError:
|
| 535 |
+
self.fail("Wrong deployment target during compilation")
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
class ParallelBuildExtTestCase(BuildExtTestCase):
|
| 539 |
+
|
| 540 |
+
def build_ext(self, *args, **kwargs):
|
| 541 |
+
build_ext = super().build_ext(*args, **kwargs)
|
| 542 |
+
build_ext.parallel = True
|
| 543 |
+
return build_ext
|
| 544 |
+
|
| 545 |
+
|
| 546 |
+
def test_suite():
|
| 547 |
+
suite = unittest.TestSuite()
|
| 548 |
+
suite.addTest(unittest.makeSuite(BuildExtTestCase))
|
| 549 |
+
suite.addTest(unittest.makeSuite(ParallelBuildExtTestCase))
|
| 550 |
+
return suite
|
| 551 |
+
|
| 552 |
+
if __name__ == '__main__':
|
| 553 |
+
support.run_unittest(__name__)
|
llava/lib/python3.10/distutils/tests/test_build_py.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.build_py."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import unittest
|
| 6 |
+
|
| 7 |
+
from distutils.command.build_py import build_py
|
| 8 |
+
from distutils.core import Distribution
|
| 9 |
+
from distutils.errors import DistutilsFileError
|
| 10 |
+
|
| 11 |
+
from distutils.tests import support
|
| 12 |
+
from test.support import run_unittest
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class BuildPyTestCase(support.TempdirManager,
|
| 16 |
+
support.LoggingSilencer,
|
| 17 |
+
unittest.TestCase):
|
| 18 |
+
|
| 19 |
+
def test_package_data(self):
|
| 20 |
+
sources = self.mkdtemp()
|
| 21 |
+
f = open(os.path.join(sources, "__init__.py"), "w")
|
| 22 |
+
try:
|
| 23 |
+
f.write("# Pretend this is a package.")
|
| 24 |
+
finally:
|
| 25 |
+
f.close()
|
| 26 |
+
f = open(os.path.join(sources, "README.txt"), "w")
|
| 27 |
+
try:
|
| 28 |
+
f.write("Info about this package")
|
| 29 |
+
finally:
|
| 30 |
+
f.close()
|
| 31 |
+
|
| 32 |
+
destination = self.mkdtemp()
|
| 33 |
+
|
| 34 |
+
dist = Distribution({"packages": ["pkg"],
|
| 35 |
+
"package_dir": {"pkg": sources}})
|
| 36 |
+
# script_name need not exist, it just need to be initialized
|
| 37 |
+
dist.script_name = os.path.join(sources, "setup.py")
|
| 38 |
+
dist.command_obj["build"] = support.DummyCommand(
|
| 39 |
+
force=0,
|
| 40 |
+
build_lib=destination)
|
| 41 |
+
dist.packages = ["pkg"]
|
| 42 |
+
dist.package_data = {"pkg": ["README.txt"]}
|
| 43 |
+
dist.package_dir = {"pkg": sources}
|
| 44 |
+
|
| 45 |
+
cmd = build_py(dist)
|
| 46 |
+
cmd.compile = 1
|
| 47 |
+
cmd.ensure_finalized()
|
| 48 |
+
self.assertEqual(cmd.package_data, dist.package_data)
|
| 49 |
+
|
| 50 |
+
cmd.run()
|
| 51 |
+
|
| 52 |
+
# This makes sure the list of outputs includes byte-compiled
|
| 53 |
+
# files for Python modules but not for package data files
|
| 54 |
+
# (there shouldn't *be* byte-code files for those!).
|
| 55 |
+
self.assertEqual(len(cmd.get_outputs()), 3)
|
| 56 |
+
pkgdest = os.path.join(destination, "pkg")
|
| 57 |
+
files = os.listdir(pkgdest)
|
| 58 |
+
pycache_dir = os.path.join(pkgdest, "__pycache__")
|
| 59 |
+
self.assertIn("__init__.py", files)
|
| 60 |
+
self.assertIn("README.txt", files)
|
| 61 |
+
if sys.dont_write_bytecode:
|
| 62 |
+
self.assertFalse(os.path.exists(pycache_dir))
|
| 63 |
+
else:
|
| 64 |
+
pyc_files = os.listdir(pycache_dir)
|
| 65 |
+
self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag,
|
| 66 |
+
pyc_files)
|
| 67 |
+
|
| 68 |
+
def test_empty_package_dir(self):
|
| 69 |
+
# See bugs #1668596/#1720897
|
| 70 |
+
sources = self.mkdtemp()
|
| 71 |
+
open(os.path.join(sources, "__init__.py"), "w").close()
|
| 72 |
+
|
| 73 |
+
testdir = os.path.join(sources, "doc")
|
| 74 |
+
os.mkdir(testdir)
|
| 75 |
+
open(os.path.join(testdir, "testfile"), "w").close()
|
| 76 |
+
|
| 77 |
+
os.chdir(sources)
|
| 78 |
+
dist = Distribution({"packages": ["pkg"],
|
| 79 |
+
"package_dir": {"pkg": ""},
|
| 80 |
+
"package_data": {"pkg": ["doc/*"]}})
|
| 81 |
+
# script_name need not exist, it just need to be initialized
|
| 82 |
+
dist.script_name = os.path.join(sources, "setup.py")
|
| 83 |
+
dist.script_args = ["build"]
|
| 84 |
+
dist.parse_command_line()
|
| 85 |
+
|
| 86 |
+
try:
|
| 87 |
+
dist.run_commands()
|
| 88 |
+
except DistutilsFileError:
|
| 89 |
+
self.fail("failed package_data test when package_dir is ''")
|
| 90 |
+
|
| 91 |
+
@unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
|
| 92 |
+
def test_byte_compile(self):
|
| 93 |
+
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
|
| 94 |
+
os.chdir(project_dir)
|
| 95 |
+
self.write_file('boiledeggs.py', 'import antigravity')
|
| 96 |
+
cmd = build_py(dist)
|
| 97 |
+
cmd.compile = 1
|
| 98 |
+
cmd.build_lib = 'here'
|
| 99 |
+
cmd.finalize_options()
|
| 100 |
+
cmd.run()
|
| 101 |
+
|
| 102 |
+
found = os.listdir(cmd.build_lib)
|
| 103 |
+
self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
|
| 104 |
+
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
|
| 105 |
+
self.assertEqual(found,
|
| 106 |
+
['boiledeggs.%s.pyc' % sys.implementation.cache_tag])
|
| 107 |
+
|
| 108 |
+
@unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
|
| 109 |
+
def test_byte_compile_optimized(self):
|
| 110 |
+
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
|
| 111 |
+
os.chdir(project_dir)
|
| 112 |
+
self.write_file('boiledeggs.py', 'import antigravity')
|
| 113 |
+
cmd = build_py(dist)
|
| 114 |
+
cmd.compile = 0
|
| 115 |
+
cmd.optimize = 1
|
| 116 |
+
cmd.build_lib = 'here'
|
| 117 |
+
cmd.finalize_options()
|
| 118 |
+
cmd.run()
|
| 119 |
+
|
| 120 |
+
found = os.listdir(cmd.build_lib)
|
| 121 |
+
self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
|
| 122 |
+
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
|
| 123 |
+
expect = 'boiledeggs.{}.opt-1.pyc'.format(sys.implementation.cache_tag)
|
| 124 |
+
self.assertEqual(sorted(found), [expect])
|
| 125 |
+
|
| 126 |
+
def test_dir_in_package_data(self):
|
| 127 |
+
"""
|
| 128 |
+
A directory in package_data should not be added to the filelist.
|
| 129 |
+
"""
|
| 130 |
+
# See bug 19286
|
| 131 |
+
sources = self.mkdtemp()
|
| 132 |
+
pkg_dir = os.path.join(sources, "pkg")
|
| 133 |
+
|
| 134 |
+
os.mkdir(pkg_dir)
|
| 135 |
+
open(os.path.join(pkg_dir, "__init__.py"), "w").close()
|
| 136 |
+
|
| 137 |
+
docdir = os.path.join(pkg_dir, "doc")
|
| 138 |
+
os.mkdir(docdir)
|
| 139 |
+
open(os.path.join(docdir, "testfile"), "w").close()
|
| 140 |
+
|
| 141 |
+
# create the directory that could be incorrectly detected as a file
|
| 142 |
+
os.mkdir(os.path.join(docdir, 'otherdir'))
|
| 143 |
+
|
| 144 |
+
os.chdir(sources)
|
| 145 |
+
dist = Distribution({"packages": ["pkg"],
|
| 146 |
+
"package_data": {"pkg": ["doc/*"]}})
|
| 147 |
+
# script_name need not exist, it just need to be initialized
|
| 148 |
+
dist.script_name = os.path.join(sources, "setup.py")
|
| 149 |
+
dist.script_args = ["build"]
|
| 150 |
+
dist.parse_command_line()
|
| 151 |
+
|
| 152 |
+
try:
|
| 153 |
+
dist.run_commands()
|
| 154 |
+
except DistutilsFileError:
|
| 155 |
+
self.fail("failed package_data when data dir includes a dir")
|
| 156 |
+
|
| 157 |
+
def test_dont_write_bytecode(self):
|
| 158 |
+
# makes sure byte_compile is not used
|
| 159 |
+
dist = self.create_dist()[1]
|
| 160 |
+
cmd = build_py(dist)
|
| 161 |
+
cmd.compile = 1
|
| 162 |
+
cmd.optimize = 1
|
| 163 |
+
|
| 164 |
+
old_dont_write_bytecode = sys.dont_write_bytecode
|
| 165 |
+
sys.dont_write_bytecode = True
|
| 166 |
+
try:
|
| 167 |
+
cmd.byte_compile([])
|
| 168 |
+
finally:
|
| 169 |
+
sys.dont_write_bytecode = old_dont_write_bytecode
|
| 170 |
+
|
| 171 |
+
self.assertIn('byte-compiling is disabled',
|
| 172 |
+
self.logs[0][1] % self.logs[0][2])
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def test_suite():
|
| 176 |
+
return unittest.makeSuite(BuildPyTestCase)
|
| 177 |
+
|
| 178 |
+
if __name__ == "__main__":
|
| 179 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_cmd.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.cmd."""
|
| 2 |
+
import unittest
|
| 3 |
+
import os
|
| 4 |
+
from test.support import captured_stdout, run_unittest
|
| 5 |
+
|
| 6 |
+
from distutils.cmd import Command
|
| 7 |
+
from distutils.dist import Distribution
|
| 8 |
+
from distutils.errors import DistutilsOptionError
|
| 9 |
+
from distutils import debug
|
| 10 |
+
|
| 11 |
+
class MyCmd(Command):
|
| 12 |
+
def initialize_options(self):
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
class CommandTestCase(unittest.TestCase):
|
| 16 |
+
|
| 17 |
+
def setUp(self):
|
| 18 |
+
dist = Distribution()
|
| 19 |
+
self.cmd = MyCmd(dist)
|
| 20 |
+
|
| 21 |
+
def test_ensure_string_list(self):
|
| 22 |
+
|
| 23 |
+
cmd = self.cmd
|
| 24 |
+
cmd.not_string_list = ['one', 2, 'three']
|
| 25 |
+
cmd.yes_string_list = ['one', 'two', 'three']
|
| 26 |
+
cmd.not_string_list2 = object()
|
| 27 |
+
cmd.yes_string_list2 = 'ok'
|
| 28 |
+
cmd.ensure_string_list('yes_string_list')
|
| 29 |
+
cmd.ensure_string_list('yes_string_list2')
|
| 30 |
+
|
| 31 |
+
self.assertRaises(DistutilsOptionError,
|
| 32 |
+
cmd.ensure_string_list, 'not_string_list')
|
| 33 |
+
|
| 34 |
+
self.assertRaises(DistutilsOptionError,
|
| 35 |
+
cmd.ensure_string_list, 'not_string_list2')
|
| 36 |
+
|
| 37 |
+
cmd.option1 = 'ok,dok'
|
| 38 |
+
cmd.ensure_string_list('option1')
|
| 39 |
+
self.assertEqual(cmd.option1, ['ok', 'dok'])
|
| 40 |
+
|
| 41 |
+
cmd.option2 = ['xxx', 'www']
|
| 42 |
+
cmd.ensure_string_list('option2')
|
| 43 |
+
|
| 44 |
+
cmd.option3 = ['ok', 2]
|
| 45 |
+
self.assertRaises(DistutilsOptionError, cmd.ensure_string_list,
|
| 46 |
+
'option3')
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def test_make_file(self):
|
| 50 |
+
|
| 51 |
+
cmd = self.cmd
|
| 52 |
+
|
| 53 |
+
# making sure it raises when infiles is not a string or a list/tuple
|
| 54 |
+
self.assertRaises(TypeError, cmd.make_file,
|
| 55 |
+
infiles=1, outfile='', func='func', args=())
|
| 56 |
+
|
| 57 |
+
# making sure execute gets called properly
|
| 58 |
+
def _execute(func, args, exec_msg, level):
|
| 59 |
+
self.assertEqual(exec_msg, 'generating out from in')
|
| 60 |
+
cmd.force = True
|
| 61 |
+
cmd.execute = _execute
|
| 62 |
+
cmd.make_file(infiles='in', outfile='out', func='func', args=())
|
| 63 |
+
|
| 64 |
+
def test_dump_options(self):
|
| 65 |
+
|
| 66 |
+
msgs = []
|
| 67 |
+
def _announce(msg, level):
|
| 68 |
+
msgs.append(msg)
|
| 69 |
+
cmd = self.cmd
|
| 70 |
+
cmd.announce = _announce
|
| 71 |
+
cmd.option1 = 1
|
| 72 |
+
cmd.option2 = 1
|
| 73 |
+
cmd.user_options = [('option1', '', ''), ('option2', '', '')]
|
| 74 |
+
cmd.dump_options()
|
| 75 |
+
|
| 76 |
+
wanted = ["command options for 'MyCmd':", ' option1 = 1',
|
| 77 |
+
' option2 = 1']
|
| 78 |
+
self.assertEqual(msgs, wanted)
|
| 79 |
+
|
| 80 |
+
def test_ensure_string(self):
|
| 81 |
+
cmd = self.cmd
|
| 82 |
+
cmd.option1 = 'ok'
|
| 83 |
+
cmd.ensure_string('option1')
|
| 84 |
+
|
| 85 |
+
cmd.option2 = None
|
| 86 |
+
cmd.ensure_string('option2', 'xxx')
|
| 87 |
+
self.assertTrue(hasattr(cmd, 'option2'))
|
| 88 |
+
|
| 89 |
+
cmd.option3 = 1
|
| 90 |
+
self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3')
|
| 91 |
+
|
| 92 |
+
def test_ensure_filename(self):
|
| 93 |
+
cmd = self.cmd
|
| 94 |
+
cmd.option1 = __file__
|
| 95 |
+
cmd.ensure_filename('option1')
|
| 96 |
+
cmd.option2 = 'xxx'
|
| 97 |
+
self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2')
|
| 98 |
+
|
| 99 |
+
def test_ensure_dirname(self):
|
| 100 |
+
cmd = self.cmd
|
| 101 |
+
cmd.option1 = os.path.dirname(__file__) or os.curdir
|
| 102 |
+
cmd.ensure_dirname('option1')
|
| 103 |
+
cmd.option2 = 'xxx'
|
| 104 |
+
self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2')
|
| 105 |
+
|
| 106 |
+
def test_debug_print(self):
|
| 107 |
+
cmd = self.cmd
|
| 108 |
+
with captured_stdout() as stdout:
|
| 109 |
+
cmd.debug_print('xxx')
|
| 110 |
+
stdout.seek(0)
|
| 111 |
+
self.assertEqual(stdout.read(), '')
|
| 112 |
+
|
| 113 |
+
debug.DEBUG = True
|
| 114 |
+
try:
|
| 115 |
+
with captured_stdout() as stdout:
|
| 116 |
+
cmd.debug_print('xxx')
|
| 117 |
+
stdout.seek(0)
|
| 118 |
+
self.assertEqual(stdout.read(), 'xxx\n')
|
| 119 |
+
finally:
|
| 120 |
+
debug.DEBUG = False
|
| 121 |
+
|
| 122 |
+
def test_suite():
|
| 123 |
+
return unittest.makeSuite(CommandTestCase)
|
| 124 |
+
|
| 125 |
+
if __name__ == '__main__':
|
| 126 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_core.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.core."""
|
| 2 |
+
|
| 3 |
+
import io
|
| 4 |
+
import distutils.core
|
| 5 |
+
import os
|
| 6 |
+
import shutil
|
| 7 |
+
import sys
|
| 8 |
+
from test.support import captured_stdout, run_unittest
|
| 9 |
+
from test.support import os_helper
|
| 10 |
+
import unittest
|
| 11 |
+
from distutils.tests import support
|
| 12 |
+
from distutils import log
|
| 13 |
+
|
| 14 |
+
# setup script that uses __file__
|
| 15 |
+
setup_using___file__ = """\
|
| 16 |
+
|
| 17 |
+
__file__
|
| 18 |
+
|
| 19 |
+
from distutils.core import setup
|
| 20 |
+
setup()
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
setup_prints_cwd = """\
|
| 24 |
+
|
| 25 |
+
import os
|
| 26 |
+
print(os.getcwd())
|
| 27 |
+
|
| 28 |
+
from distutils.core import setup
|
| 29 |
+
setup()
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
setup_does_nothing = """\
|
| 33 |
+
from distutils.core import setup
|
| 34 |
+
setup()
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
setup_defines_subclass = """\
|
| 39 |
+
from distutils.core import setup
|
| 40 |
+
from distutils.command.install import install as _install
|
| 41 |
+
|
| 42 |
+
class install(_install):
|
| 43 |
+
sub_commands = _install.sub_commands + ['cmd']
|
| 44 |
+
|
| 45 |
+
setup(cmdclass={'install': install})
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
class CoreTestCase(support.EnvironGuard, unittest.TestCase):
|
| 49 |
+
|
| 50 |
+
def setUp(self):
|
| 51 |
+
super(CoreTestCase, self).setUp()
|
| 52 |
+
self.old_stdout = sys.stdout
|
| 53 |
+
self.cleanup_testfn()
|
| 54 |
+
self.old_argv = sys.argv, sys.argv[:]
|
| 55 |
+
self.addCleanup(log.set_threshold, log._global_log.threshold)
|
| 56 |
+
|
| 57 |
+
def tearDown(self):
|
| 58 |
+
sys.stdout = self.old_stdout
|
| 59 |
+
self.cleanup_testfn()
|
| 60 |
+
sys.argv = self.old_argv[0]
|
| 61 |
+
sys.argv[:] = self.old_argv[1]
|
| 62 |
+
super(CoreTestCase, self).tearDown()
|
| 63 |
+
|
| 64 |
+
def cleanup_testfn(self):
|
| 65 |
+
path = os_helper.TESTFN
|
| 66 |
+
if os.path.isfile(path):
|
| 67 |
+
os.remove(path)
|
| 68 |
+
elif os.path.isdir(path):
|
| 69 |
+
shutil.rmtree(path)
|
| 70 |
+
|
| 71 |
+
def write_setup(self, text, path=os_helper.TESTFN):
|
| 72 |
+
f = open(path, "w")
|
| 73 |
+
try:
|
| 74 |
+
f.write(text)
|
| 75 |
+
finally:
|
| 76 |
+
f.close()
|
| 77 |
+
return path
|
| 78 |
+
|
| 79 |
+
def test_run_setup_provides_file(self):
|
| 80 |
+
# Make sure the script can use __file__; if that's missing, the test
|
| 81 |
+
# setup.py script will raise NameError.
|
| 82 |
+
distutils.core.run_setup(
|
| 83 |
+
self.write_setup(setup_using___file__))
|
| 84 |
+
|
| 85 |
+
def test_run_setup_preserves_sys_argv(self):
|
| 86 |
+
# Make sure run_setup does not clobber sys.argv
|
| 87 |
+
argv_copy = sys.argv.copy()
|
| 88 |
+
distutils.core.run_setup(
|
| 89 |
+
self.write_setup(setup_does_nothing))
|
| 90 |
+
self.assertEqual(sys.argv, argv_copy)
|
| 91 |
+
|
| 92 |
+
def test_run_setup_defines_subclass(self):
|
| 93 |
+
# Make sure the script can use __file__; if that's missing, the test
|
| 94 |
+
# setup.py script will raise NameError.
|
| 95 |
+
dist = distutils.core.run_setup(
|
| 96 |
+
self.write_setup(setup_defines_subclass))
|
| 97 |
+
install = dist.get_command_obj('install')
|
| 98 |
+
self.assertIn('cmd', install.sub_commands)
|
| 99 |
+
|
| 100 |
+
def test_run_setup_uses_current_dir(self):
|
| 101 |
+
# This tests that the setup script is run with the current directory
|
| 102 |
+
# as its own current directory; this was temporarily broken by a
|
| 103 |
+
# previous patch when TESTFN did not use the current directory.
|
| 104 |
+
sys.stdout = io.StringIO()
|
| 105 |
+
cwd = os.getcwd()
|
| 106 |
+
|
| 107 |
+
# Create a directory and write the setup.py file there:
|
| 108 |
+
os.mkdir(os_helper.TESTFN)
|
| 109 |
+
setup_py = os.path.join(os_helper.TESTFN, "setup.py")
|
| 110 |
+
distutils.core.run_setup(
|
| 111 |
+
self.write_setup(setup_prints_cwd, path=setup_py))
|
| 112 |
+
|
| 113 |
+
output = sys.stdout.getvalue()
|
| 114 |
+
if output.endswith("\n"):
|
| 115 |
+
output = output[:-1]
|
| 116 |
+
self.assertEqual(cwd, output)
|
| 117 |
+
|
| 118 |
+
def test_debug_mode(self):
|
| 119 |
+
# this covers the code called when DEBUG is set
|
| 120 |
+
sys.argv = ['setup.py', '--name']
|
| 121 |
+
with captured_stdout() as stdout:
|
| 122 |
+
distutils.core.setup(name='bar')
|
| 123 |
+
stdout.seek(0)
|
| 124 |
+
self.assertEqual(stdout.read(), 'bar\n')
|
| 125 |
+
|
| 126 |
+
distutils.core.DEBUG = True
|
| 127 |
+
try:
|
| 128 |
+
with captured_stdout() as stdout:
|
| 129 |
+
distutils.core.setup(name='bar')
|
| 130 |
+
finally:
|
| 131 |
+
distutils.core.DEBUG = False
|
| 132 |
+
stdout.seek(0)
|
| 133 |
+
wanted = "options (after parsing config files):\n"
|
| 134 |
+
self.assertEqual(stdout.readlines()[0], wanted)
|
| 135 |
+
|
| 136 |
+
def test_suite():
|
| 137 |
+
return unittest.makeSuite(CoreTestCase)
|
| 138 |
+
|
| 139 |
+
if __name__ == "__main__":
|
| 140 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_cygwinccompiler.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.cygwinccompiler."""
|
| 2 |
+
import unittest
|
| 3 |
+
import sys
|
| 4 |
+
import os
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from test.support import run_unittest
|
| 7 |
+
|
| 8 |
+
from distutils import cygwinccompiler
|
| 9 |
+
from distutils.cygwinccompiler import (check_config_h,
|
| 10 |
+
CONFIG_H_OK, CONFIG_H_NOTOK,
|
| 11 |
+
CONFIG_H_UNCERTAIN, get_versions,
|
| 12 |
+
get_msvcr)
|
| 13 |
+
from distutils.tests import support
|
| 14 |
+
|
| 15 |
+
class FakePopen(object):
|
| 16 |
+
test_class = None
|
| 17 |
+
|
| 18 |
+
def __init__(self, cmd, shell, stdout):
|
| 19 |
+
self.cmd = cmd.split()[0]
|
| 20 |
+
exes = self.test_class._exes
|
| 21 |
+
if self.cmd in exes:
|
| 22 |
+
# issue #6438 in Python 3.x, Popen returns bytes
|
| 23 |
+
self.stdout = BytesIO(exes[self.cmd])
|
| 24 |
+
else:
|
| 25 |
+
self.stdout = os.popen(cmd, 'r')
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class CygwinCCompilerTestCase(support.TempdirManager,
|
| 29 |
+
unittest.TestCase):
|
| 30 |
+
|
| 31 |
+
def setUp(self):
|
| 32 |
+
super(CygwinCCompilerTestCase, self).setUp()
|
| 33 |
+
self.version = sys.version
|
| 34 |
+
self.python_h = os.path.join(self.mkdtemp(), 'python.h')
|
| 35 |
+
from distutils import sysconfig
|
| 36 |
+
self.old_get_config_h_filename = sysconfig.get_config_h_filename
|
| 37 |
+
sysconfig.get_config_h_filename = self._get_config_h_filename
|
| 38 |
+
self.old_find_executable = cygwinccompiler.find_executable
|
| 39 |
+
cygwinccompiler.find_executable = self._find_executable
|
| 40 |
+
self._exes = {}
|
| 41 |
+
self.old_popen = cygwinccompiler.Popen
|
| 42 |
+
FakePopen.test_class = self
|
| 43 |
+
cygwinccompiler.Popen = FakePopen
|
| 44 |
+
|
| 45 |
+
def tearDown(self):
|
| 46 |
+
sys.version = self.version
|
| 47 |
+
from distutils import sysconfig
|
| 48 |
+
sysconfig.get_config_h_filename = self.old_get_config_h_filename
|
| 49 |
+
cygwinccompiler.find_executable = self.old_find_executable
|
| 50 |
+
cygwinccompiler.Popen = self.old_popen
|
| 51 |
+
super(CygwinCCompilerTestCase, self).tearDown()
|
| 52 |
+
|
| 53 |
+
def _get_config_h_filename(self):
|
| 54 |
+
return self.python_h
|
| 55 |
+
|
| 56 |
+
def _find_executable(self, name):
|
| 57 |
+
if name in self._exes:
|
| 58 |
+
return name
|
| 59 |
+
return None
|
| 60 |
+
|
| 61 |
+
def test_check_config_h(self):
|
| 62 |
+
|
| 63 |
+
# check_config_h looks for "GCC" in sys.version first
|
| 64 |
+
# returns CONFIG_H_OK if found
|
| 65 |
+
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
|
| 66 |
+
'4.0.1 (Apple Computer, Inc. build 5370)]')
|
| 67 |
+
|
| 68 |
+
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
|
| 69 |
+
|
| 70 |
+
# then it tries to see if it can find "__GNUC__" in pyconfig.h
|
| 71 |
+
sys.version = 'something without the *CC word'
|
| 72 |
+
|
| 73 |
+
# if the file doesn't exist it returns CONFIG_H_UNCERTAIN
|
| 74 |
+
self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
|
| 75 |
+
|
| 76 |
+
# if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
|
| 77 |
+
self.write_file(self.python_h, 'xxx')
|
| 78 |
+
self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
|
| 79 |
+
|
| 80 |
+
# and CONFIG_H_OK if __GNUC__ is found
|
| 81 |
+
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
|
| 82 |
+
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
|
| 83 |
+
|
| 84 |
+
def test_get_versions(self):
|
| 85 |
+
|
| 86 |
+
# get_versions calls distutils.spawn.find_executable on
|
| 87 |
+
# 'gcc', 'ld' and 'dllwrap'
|
| 88 |
+
self.assertEqual(get_versions(), (None, None, None))
|
| 89 |
+
|
| 90 |
+
# Let's fake we have 'gcc' and it returns '3.4.5'
|
| 91 |
+
self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF'
|
| 92 |
+
res = get_versions()
|
| 93 |
+
self.assertEqual(str(res[0]), '3.4.5')
|
| 94 |
+
|
| 95 |
+
# and let's see what happens when the version
|
| 96 |
+
# doesn't match the regular expression
|
| 97 |
+
# (\d+\.\d+(\.\d+)*)
|
| 98 |
+
self._exes['gcc'] = b'very strange output'
|
| 99 |
+
res = get_versions()
|
| 100 |
+
self.assertEqual(res[0], None)
|
| 101 |
+
|
| 102 |
+
# same thing for ld
|
| 103 |
+
self._exes['ld'] = b'GNU ld version 2.17.50 20060824'
|
| 104 |
+
res = get_versions()
|
| 105 |
+
self.assertEqual(str(res[1]), '2.17.50')
|
| 106 |
+
self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77'
|
| 107 |
+
res = get_versions()
|
| 108 |
+
self.assertEqual(res[1], None)
|
| 109 |
+
|
| 110 |
+
# and dllwrap
|
| 111 |
+
self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF'
|
| 112 |
+
res = get_versions()
|
| 113 |
+
self.assertEqual(str(res[2]), '2.17.50')
|
| 114 |
+
self._exes['dllwrap'] = b'Cheese Wrap'
|
| 115 |
+
res = get_versions()
|
| 116 |
+
self.assertEqual(res[2], None)
|
| 117 |
+
|
| 118 |
+
def test_get_msvcr(self):
|
| 119 |
+
|
| 120 |
+
# none
|
| 121 |
+
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) '
|
| 122 |
+
'\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
|
| 123 |
+
self.assertEqual(get_msvcr(), None)
|
| 124 |
+
|
| 125 |
+
# MSVC 7.0
|
| 126 |
+
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
|
| 127 |
+
'[MSC v.1300 32 bits (Intel)]')
|
| 128 |
+
self.assertEqual(get_msvcr(), ['msvcr70'])
|
| 129 |
+
|
| 130 |
+
# MSVC 7.1
|
| 131 |
+
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
|
| 132 |
+
'[MSC v.1310 32 bits (Intel)]')
|
| 133 |
+
self.assertEqual(get_msvcr(), ['msvcr71'])
|
| 134 |
+
|
| 135 |
+
# VS2005 / MSVC 8.0
|
| 136 |
+
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
|
| 137 |
+
'[MSC v.1400 32 bits (Intel)]')
|
| 138 |
+
self.assertEqual(get_msvcr(), ['msvcr80'])
|
| 139 |
+
|
| 140 |
+
# VS2008 / MSVC 9.0
|
| 141 |
+
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
|
| 142 |
+
'[MSC v.1500 32 bits (Intel)]')
|
| 143 |
+
self.assertEqual(get_msvcr(), ['msvcr90'])
|
| 144 |
+
|
| 145 |
+
# unknown
|
| 146 |
+
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
|
| 147 |
+
'[MSC v.1999 32 bits (Intel)]')
|
| 148 |
+
self.assertRaises(ValueError, get_msvcr)
|
| 149 |
+
|
| 150 |
+
def test_suite():
|
| 151 |
+
return unittest.makeSuite(CygwinCCompilerTestCase)
|
| 152 |
+
|
| 153 |
+
if __name__ == '__main__':
|
| 154 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_dep_util.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.dep_util."""
|
| 2 |
+
import unittest
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
from distutils.dep_util import newer, newer_pairwise, newer_group
|
| 6 |
+
from distutils.errors import DistutilsFileError
|
| 7 |
+
from distutils.tests import support
|
| 8 |
+
from test.support import run_unittest
|
| 9 |
+
|
| 10 |
+
class DepUtilTestCase(support.TempdirManager, unittest.TestCase):
|
| 11 |
+
|
| 12 |
+
def test_newer(self):
|
| 13 |
+
|
| 14 |
+
tmpdir = self.mkdtemp()
|
| 15 |
+
new_file = os.path.join(tmpdir, 'new')
|
| 16 |
+
old_file = os.path.abspath(__file__)
|
| 17 |
+
|
| 18 |
+
# Raise DistutilsFileError if 'new_file' does not exist.
|
| 19 |
+
self.assertRaises(DistutilsFileError, newer, new_file, old_file)
|
| 20 |
+
|
| 21 |
+
# Return true if 'new_file' exists and is more recently modified than
|
| 22 |
+
# 'old_file', or if 'new_file' exists and 'old_file' doesn't.
|
| 23 |
+
self.write_file(new_file)
|
| 24 |
+
self.assertTrue(newer(new_file, 'I_dont_exist'))
|
| 25 |
+
self.assertTrue(newer(new_file, old_file))
|
| 26 |
+
|
| 27 |
+
# Return false if both exist and 'old_file' is the same age or younger
|
| 28 |
+
# than 'new_file'.
|
| 29 |
+
self.assertFalse(newer(old_file, new_file))
|
| 30 |
+
|
| 31 |
+
def test_newer_pairwise(self):
|
| 32 |
+
tmpdir = self.mkdtemp()
|
| 33 |
+
sources = os.path.join(tmpdir, 'sources')
|
| 34 |
+
targets = os.path.join(tmpdir, 'targets')
|
| 35 |
+
os.mkdir(sources)
|
| 36 |
+
os.mkdir(targets)
|
| 37 |
+
one = os.path.join(sources, 'one')
|
| 38 |
+
two = os.path.join(sources, 'two')
|
| 39 |
+
three = os.path.abspath(__file__) # I am the old file
|
| 40 |
+
four = os.path.join(targets, 'four')
|
| 41 |
+
self.write_file(one)
|
| 42 |
+
self.write_file(two)
|
| 43 |
+
self.write_file(four)
|
| 44 |
+
|
| 45 |
+
self.assertEqual(newer_pairwise([one, two], [three, four]),
|
| 46 |
+
([one],[three]))
|
| 47 |
+
|
| 48 |
+
def test_newer_group(self):
|
| 49 |
+
tmpdir = self.mkdtemp()
|
| 50 |
+
sources = os.path.join(tmpdir, 'sources')
|
| 51 |
+
os.mkdir(sources)
|
| 52 |
+
one = os.path.join(sources, 'one')
|
| 53 |
+
two = os.path.join(sources, 'two')
|
| 54 |
+
three = os.path.join(sources, 'three')
|
| 55 |
+
old_file = os.path.abspath(__file__)
|
| 56 |
+
|
| 57 |
+
# return true if 'old_file' is out-of-date with respect to any file
|
| 58 |
+
# listed in 'sources'.
|
| 59 |
+
self.write_file(one)
|
| 60 |
+
self.write_file(two)
|
| 61 |
+
self.write_file(three)
|
| 62 |
+
self.assertTrue(newer_group([one, two, three], old_file))
|
| 63 |
+
self.assertFalse(newer_group([one, two, old_file], three))
|
| 64 |
+
|
| 65 |
+
# missing handling
|
| 66 |
+
os.remove(one)
|
| 67 |
+
self.assertRaises(OSError, newer_group, [one, two, old_file], three)
|
| 68 |
+
|
| 69 |
+
self.assertFalse(newer_group([one, two, old_file], three,
|
| 70 |
+
missing='ignore'))
|
| 71 |
+
|
| 72 |
+
self.assertTrue(newer_group([one, two, old_file], three,
|
| 73 |
+
missing='newer'))
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def test_suite():
|
| 77 |
+
return unittest.makeSuite(DepUtilTestCase)
|
| 78 |
+
|
| 79 |
+
if __name__ == "__main__":
|
| 80 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_extension.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.extension."""
|
| 2 |
+
import unittest
|
| 3 |
+
import os
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
from test.support import run_unittest
|
| 7 |
+
from test.support.warnings_helper import check_warnings
|
| 8 |
+
from distutils.extension import read_setup_file, Extension
|
| 9 |
+
|
| 10 |
+
class ExtensionTestCase(unittest.TestCase):
|
| 11 |
+
|
| 12 |
+
def test_read_setup_file(self):
|
| 13 |
+
# trying to read a Setup file
|
| 14 |
+
# (sample extracted from the PyGame project)
|
| 15 |
+
setup = os.path.join(os.path.dirname(__file__), 'Setup.sample')
|
| 16 |
+
|
| 17 |
+
exts = read_setup_file(setup)
|
| 18 |
+
names = [ext.name for ext in exts]
|
| 19 |
+
names.sort()
|
| 20 |
+
|
| 21 |
+
# here are the extensions read_setup_file should have created
|
| 22 |
+
# out of the file
|
| 23 |
+
wanted = ['_arraysurfarray', '_camera', '_numericsndarray',
|
| 24 |
+
'_numericsurfarray', 'base', 'bufferproxy', 'cdrom',
|
| 25 |
+
'color', 'constants', 'display', 'draw', 'event',
|
| 26 |
+
'fastevent', 'font', 'gfxdraw', 'image', 'imageext',
|
| 27 |
+
'joystick', 'key', 'mask', 'mixer', 'mixer_music',
|
| 28 |
+
'mouse', 'movie', 'overlay', 'pixelarray', 'pypm',
|
| 29 |
+
'rect', 'rwobject', 'scrap', 'surface', 'surflock',
|
| 30 |
+
'time', 'transform']
|
| 31 |
+
|
| 32 |
+
self.assertEqual(names, wanted)
|
| 33 |
+
|
| 34 |
+
def test_extension_init(self):
|
| 35 |
+
# the first argument, which is the name, must be a string
|
| 36 |
+
self.assertRaises(AssertionError, Extension, 1, [])
|
| 37 |
+
ext = Extension('name', [])
|
| 38 |
+
self.assertEqual(ext.name, 'name')
|
| 39 |
+
|
| 40 |
+
# the second argument, which is the list of files, must
|
| 41 |
+
# be a list of strings
|
| 42 |
+
self.assertRaises(AssertionError, Extension, 'name', 'file')
|
| 43 |
+
self.assertRaises(AssertionError, Extension, 'name', ['file', 1])
|
| 44 |
+
ext = Extension('name', ['file1', 'file2'])
|
| 45 |
+
self.assertEqual(ext.sources, ['file1', 'file2'])
|
| 46 |
+
|
| 47 |
+
# others arguments have defaults
|
| 48 |
+
for attr in ('include_dirs', 'define_macros', 'undef_macros',
|
| 49 |
+
'library_dirs', 'libraries', 'runtime_library_dirs',
|
| 50 |
+
'extra_objects', 'extra_compile_args', 'extra_link_args',
|
| 51 |
+
'export_symbols', 'swig_opts', 'depends'):
|
| 52 |
+
self.assertEqual(getattr(ext, attr), [])
|
| 53 |
+
|
| 54 |
+
self.assertEqual(ext.language, None)
|
| 55 |
+
self.assertEqual(ext.optional, None)
|
| 56 |
+
|
| 57 |
+
# if there are unknown keyword options, warn about them
|
| 58 |
+
with check_warnings() as w:
|
| 59 |
+
warnings.simplefilter('always')
|
| 60 |
+
ext = Extension('name', ['file1', 'file2'], chic=True)
|
| 61 |
+
|
| 62 |
+
self.assertEqual(len(w.warnings), 1)
|
| 63 |
+
self.assertEqual(str(w.warnings[0].message),
|
| 64 |
+
"Unknown Extension options: 'chic'")
|
| 65 |
+
|
| 66 |
+
def test_suite():
|
| 67 |
+
return unittest.makeSuite(ExtensionTestCase)
|
| 68 |
+
|
| 69 |
+
if __name__ == "__main__":
|
| 70 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_filelist.py
ADDED
|
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.filelist."""
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
import unittest
|
| 5 |
+
from distutils import debug
|
| 6 |
+
from distutils.log import WARN
|
| 7 |
+
from distutils.errors import DistutilsTemplateError
|
| 8 |
+
from distutils.filelist import glob_to_re, translate_pattern, FileList
|
| 9 |
+
from distutils import filelist
|
| 10 |
+
|
| 11 |
+
from test.support import os_helper
|
| 12 |
+
from test.support import captured_stdout, run_unittest
|
| 13 |
+
from distutils.tests import support
|
| 14 |
+
|
| 15 |
+
MANIFEST_IN = """\
|
| 16 |
+
include ok
|
| 17 |
+
include xo
|
| 18 |
+
exclude xo
|
| 19 |
+
include foo.tmp
|
| 20 |
+
include buildout.cfg
|
| 21 |
+
global-include *.x
|
| 22 |
+
global-include *.txt
|
| 23 |
+
global-exclude *.tmp
|
| 24 |
+
recursive-include f *.oo
|
| 25 |
+
recursive-exclude global *.x
|
| 26 |
+
graft dir
|
| 27 |
+
prune dir3
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def make_local_path(s):
|
| 32 |
+
"""Converts '/' in a string to os.sep"""
|
| 33 |
+
return s.replace('/', os.sep)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class FileListTestCase(support.LoggingSilencer,
|
| 37 |
+
unittest.TestCase):
|
| 38 |
+
|
| 39 |
+
def assertNoWarnings(self):
|
| 40 |
+
self.assertEqual(self.get_logs(WARN), [])
|
| 41 |
+
self.clear_logs()
|
| 42 |
+
|
| 43 |
+
def assertWarnings(self):
|
| 44 |
+
self.assertGreater(len(self.get_logs(WARN)), 0)
|
| 45 |
+
self.clear_logs()
|
| 46 |
+
|
| 47 |
+
def test_glob_to_re(self):
|
| 48 |
+
sep = os.sep
|
| 49 |
+
if os.sep == '\\':
|
| 50 |
+
sep = re.escape(os.sep)
|
| 51 |
+
|
| 52 |
+
for glob, regex in (
|
| 53 |
+
# simple cases
|
| 54 |
+
('foo*', r'(?s:foo[^%(sep)s]*)\Z'),
|
| 55 |
+
('foo?', r'(?s:foo[^%(sep)s])\Z'),
|
| 56 |
+
('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'),
|
| 57 |
+
# special cases
|
| 58 |
+
(r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'),
|
| 59 |
+
(r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'),
|
| 60 |
+
('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'),
|
| 61 |
+
(r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')):
|
| 62 |
+
regex = regex % {'sep': sep}
|
| 63 |
+
self.assertEqual(glob_to_re(glob), regex)
|
| 64 |
+
|
| 65 |
+
def test_process_template_line(self):
|
| 66 |
+
# testing all MANIFEST.in template patterns
|
| 67 |
+
file_list = FileList()
|
| 68 |
+
l = make_local_path
|
| 69 |
+
|
| 70 |
+
# simulated file list
|
| 71 |
+
file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt',
|
| 72 |
+
'buildout.cfg',
|
| 73 |
+
# filelist does not filter out VCS directories,
|
| 74 |
+
# it's sdist that does
|
| 75 |
+
l('.hg/last-message.txt'),
|
| 76 |
+
l('global/one.txt'),
|
| 77 |
+
l('global/two.txt'),
|
| 78 |
+
l('global/files.x'),
|
| 79 |
+
l('global/here.tmp'),
|
| 80 |
+
l('f/o/f.oo'),
|
| 81 |
+
l('dir/graft-one'),
|
| 82 |
+
l('dir/dir2/graft2'),
|
| 83 |
+
l('dir3/ok'),
|
| 84 |
+
l('dir3/sub/ok.txt'),
|
| 85 |
+
]
|
| 86 |
+
|
| 87 |
+
for line in MANIFEST_IN.split('\n'):
|
| 88 |
+
if line.strip() == '':
|
| 89 |
+
continue
|
| 90 |
+
file_list.process_template_line(line)
|
| 91 |
+
|
| 92 |
+
wanted = ['ok',
|
| 93 |
+
'buildout.cfg',
|
| 94 |
+
'four.txt',
|
| 95 |
+
l('.hg/last-message.txt'),
|
| 96 |
+
l('global/one.txt'),
|
| 97 |
+
l('global/two.txt'),
|
| 98 |
+
l('f/o/f.oo'),
|
| 99 |
+
l('dir/graft-one'),
|
| 100 |
+
l('dir/dir2/graft2'),
|
| 101 |
+
]
|
| 102 |
+
|
| 103 |
+
self.assertEqual(file_list.files, wanted)
|
| 104 |
+
|
| 105 |
+
def test_debug_print(self):
|
| 106 |
+
file_list = FileList()
|
| 107 |
+
with captured_stdout() as stdout:
|
| 108 |
+
file_list.debug_print('xxx')
|
| 109 |
+
self.assertEqual(stdout.getvalue(), '')
|
| 110 |
+
|
| 111 |
+
debug.DEBUG = True
|
| 112 |
+
try:
|
| 113 |
+
with captured_stdout() as stdout:
|
| 114 |
+
file_list.debug_print('xxx')
|
| 115 |
+
self.assertEqual(stdout.getvalue(), 'xxx\n')
|
| 116 |
+
finally:
|
| 117 |
+
debug.DEBUG = False
|
| 118 |
+
|
| 119 |
+
def test_set_allfiles(self):
|
| 120 |
+
file_list = FileList()
|
| 121 |
+
files = ['a', 'b', 'c']
|
| 122 |
+
file_list.set_allfiles(files)
|
| 123 |
+
self.assertEqual(file_list.allfiles, files)
|
| 124 |
+
|
| 125 |
+
def test_remove_duplicates(self):
|
| 126 |
+
file_list = FileList()
|
| 127 |
+
file_list.files = ['a', 'b', 'a', 'g', 'c', 'g']
|
| 128 |
+
# files must be sorted beforehand (sdist does it)
|
| 129 |
+
file_list.sort()
|
| 130 |
+
file_list.remove_duplicates()
|
| 131 |
+
self.assertEqual(file_list.files, ['a', 'b', 'c', 'g'])
|
| 132 |
+
|
| 133 |
+
def test_translate_pattern(self):
|
| 134 |
+
# not regex
|
| 135 |
+
self.assertTrue(hasattr(
|
| 136 |
+
translate_pattern('a', anchor=True, is_regex=False),
|
| 137 |
+
'search'))
|
| 138 |
+
|
| 139 |
+
# is a regex
|
| 140 |
+
regex = re.compile('a')
|
| 141 |
+
self.assertEqual(
|
| 142 |
+
translate_pattern(regex, anchor=True, is_regex=True),
|
| 143 |
+
regex)
|
| 144 |
+
|
| 145 |
+
# plain string flagged as regex
|
| 146 |
+
self.assertTrue(hasattr(
|
| 147 |
+
translate_pattern('a', anchor=True, is_regex=True),
|
| 148 |
+
'search'))
|
| 149 |
+
|
| 150 |
+
# glob support
|
| 151 |
+
self.assertTrue(translate_pattern(
|
| 152 |
+
'*.py', anchor=True, is_regex=False).search('filelist.py'))
|
| 153 |
+
|
| 154 |
+
def test_exclude_pattern(self):
|
| 155 |
+
# return False if no match
|
| 156 |
+
file_list = FileList()
|
| 157 |
+
self.assertFalse(file_list.exclude_pattern('*.py'))
|
| 158 |
+
|
| 159 |
+
# return True if files match
|
| 160 |
+
file_list = FileList()
|
| 161 |
+
file_list.files = ['a.py', 'b.py']
|
| 162 |
+
self.assertTrue(file_list.exclude_pattern('*.py'))
|
| 163 |
+
|
| 164 |
+
# test excludes
|
| 165 |
+
file_list = FileList()
|
| 166 |
+
file_list.files = ['a.py', 'a.txt']
|
| 167 |
+
file_list.exclude_pattern('*.py')
|
| 168 |
+
self.assertEqual(file_list.files, ['a.txt'])
|
| 169 |
+
|
| 170 |
+
def test_include_pattern(self):
|
| 171 |
+
# return False if no match
|
| 172 |
+
file_list = FileList()
|
| 173 |
+
file_list.set_allfiles([])
|
| 174 |
+
self.assertFalse(file_list.include_pattern('*.py'))
|
| 175 |
+
|
| 176 |
+
# return True if files match
|
| 177 |
+
file_list = FileList()
|
| 178 |
+
file_list.set_allfiles(['a.py', 'b.txt'])
|
| 179 |
+
self.assertTrue(file_list.include_pattern('*.py'))
|
| 180 |
+
|
| 181 |
+
# test * matches all files
|
| 182 |
+
file_list = FileList()
|
| 183 |
+
self.assertIsNone(file_list.allfiles)
|
| 184 |
+
file_list.set_allfiles(['a.py', 'b.txt'])
|
| 185 |
+
file_list.include_pattern('*')
|
| 186 |
+
self.assertEqual(file_list.allfiles, ['a.py', 'b.txt'])
|
| 187 |
+
|
| 188 |
+
def test_process_template(self):
|
| 189 |
+
l = make_local_path
|
| 190 |
+
# invalid lines
|
| 191 |
+
file_list = FileList()
|
| 192 |
+
for action in ('include', 'exclude', 'global-include',
|
| 193 |
+
'global-exclude', 'recursive-include',
|
| 194 |
+
'recursive-exclude', 'graft', 'prune', 'blarg'):
|
| 195 |
+
self.assertRaises(DistutilsTemplateError,
|
| 196 |
+
file_list.process_template_line, action)
|
| 197 |
+
|
| 198 |
+
# include
|
| 199 |
+
file_list = FileList()
|
| 200 |
+
file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
|
| 201 |
+
|
| 202 |
+
file_list.process_template_line('include *.py')
|
| 203 |
+
self.assertEqual(file_list.files, ['a.py'])
|
| 204 |
+
self.assertNoWarnings()
|
| 205 |
+
|
| 206 |
+
file_list.process_template_line('include *.rb')
|
| 207 |
+
self.assertEqual(file_list.files, ['a.py'])
|
| 208 |
+
self.assertWarnings()
|
| 209 |
+
|
| 210 |
+
# exclude
|
| 211 |
+
file_list = FileList()
|
| 212 |
+
file_list.files = ['a.py', 'b.txt', l('d/c.py')]
|
| 213 |
+
|
| 214 |
+
file_list.process_template_line('exclude *.py')
|
| 215 |
+
self.assertEqual(file_list.files, ['b.txt', l('d/c.py')])
|
| 216 |
+
self.assertNoWarnings()
|
| 217 |
+
|
| 218 |
+
file_list.process_template_line('exclude *.rb')
|
| 219 |
+
self.assertEqual(file_list.files, ['b.txt', l('d/c.py')])
|
| 220 |
+
self.assertWarnings()
|
| 221 |
+
|
| 222 |
+
# global-include
|
| 223 |
+
file_list = FileList()
|
| 224 |
+
file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
|
| 225 |
+
|
| 226 |
+
file_list.process_template_line('global-include *.py')
|
| 227 |
+
self.assertEqual(file_list.files, ['a.py', l('d/c.py')])
|
| 228 |
+
self.assertNoWarnings()
|
| 229 |
+
|
| 230 |
+
file_list.process_template_line('global-include *.rb')
|
| 231 |
+
self.assertEqual(file_list.files, ['a.py', l('d/c.py')])
|
| 232 |
+
self.assertWarnings()
|
| 233 |
+
|
| 234 |
+
# global-exclude
|
| 235 |
+
file_list = FileList()
|
| 236 |
+
file_list.files = ['a.py', 'b.txt', l('d/c.py')]
|
| 237 |
+
|
| 238 |
+
file_list.process_template_line('global-exclude *.py')
|
| 239 |
+
self.assertEqual(file_list.files, ['b.txt'])
|
| 240 |
+
self.assertNoWarnings()
|
| 241 |
+
|
| 242 |
+
file_list.process_template_line('global-exclude *.rb')
|
| 243 |
+
self.assertEqual(file_list.files, ['b.txt'])
|
| 244 |
+
self.assertWarnings()
|
| 245 |
+
|
| 246 |
+
# recursive-include
|
| 247 |
+
file_list = FileList()
|
| 248 |
+
file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'),
|
| 249 |
+
l('d/d/e.py')])
|
| 250 |
+
|
| 251 |
+
file_list.process_template_line('recursive-include d *.py')
|
| 252 |
+
self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
|
| 253 |
+
self.assertNoWarnings()
|
| 254 |
+
|
| 255 |
+
file_list.process_template_line('recursive-include e *.py')
|
| 256 |
+
self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
|
| 257 |
+
self.assertWarnings()
|
| 258 |
+
|
| 259 |
+
# recursive-exclude
|
| 260 |
+
file_list = FileList()
|
| 261 |
+
file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
|
| 262 |
+
|
| 263 |
+
file_list.process_template_line('recursive-exclude d *.py')
|
| 264 |
+
self.assertEqual(file_list.files, ['a.py', l('d/c.txt')])
|
| 265 |
+
self.assertNoWarnings()
|
| 266 |
+
|
| 267 |
+
file_list.process_template_line('recursive-exclude e *.py')
|
| 268 |
+
self.assertEqual(file_list.files, ['a.py', l('d/c.txt')])
|
| 269 |
+
self.assertWarnings()
|
| 270 |
+
|
| 271 |
+
# graft
|
| 272 |
+
file_list = FileList()
|
| 273 |
+
file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'),
|
| 274 |
+
l('f/f.py')])
|
| 275 |
+
|
| 276 |
+
file_list.process_template_line('graft d')
|
| 277 |
+
self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
|
| 278 |
+
self.assertNoWarnings()
|
| 279 |
+
|
| 280 |
+
file_list.process_template_line('graft e')
|
| 281 |
+
self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
|
| 282 |
+
self.assertWarnings()
|
| 283 |
+
|
| 284 |
+
# prune
|
| 285 |
+
file_list = FileList()
|
| 286 |
+
file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
|
| 287 |
+
|
| 288 |
+
file_list.process_template_line('prune d')
|
| 289 |
+
self.assertEqual(file_list.files, ['a.py', l('f/f.py')])
|
| 290 |
+
self.assertNoWarnings()
|
| 291 |
+
|
| 292 |
+
file_list.process_template_line('prune e')
|
| 293 |
+
self.assertEqual(file_list.files, ['a.py', l('f/f.py')])
|
| 294 |
+
self.assertWarnings()
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
class FindAllTestCase(unittest.TestCase):
|
| 298 |
+
@os_helper.skip_unless_symlink
|
| 299 |
+
def test_missing_symlink(self):
|
| 300 |
+
with os_helper.temp_cwd():
|
| 301 |
+
os.symlink('foo', 'bar')
|
| 302 |
+
self.assertEqual(filelist.findall(), [])
|
| 303 |
+
|
| 304 |
+
def test_basic_discovery(self):
|
| 305 |
+
"""
|
| 306 |
+
When findall is called with no parameters or with
|
| 307 |
+
'.' as the parameter, the dot should be omitted from
|
| 308 |
+
the results.
|
| 309 |
+
"""
|
| 310 |
+
with os_helper.temp_cwd():
|
| 311 |
+
os.mkdir('foo')
|
| 312 |
+
file1 = os.path.join('foo', 'file1.txt')
|
| 313 |
+
os_helper.create_empty_file(file1)
|
| 314 |
+
os.mkdir('bar')
|
| 315 |
+
file2 = os.path.join('bar', 'file2.txt')
|
| 316 |
+
os_helper.create_empty_file(file2)
|
| 317 |
+
expected = [file2, file1]
|
| 318 |
+
self.assertEqual(sorted(filelist.findall()), expected)
|
| 319 |
+
|
| 320 |
+
def test_non_local_discovery(self):
|
| 321 |
+
"""
|
| 322 |
+
When findall is called with another path, the full
|
| 323 |
+
path name should be returned.
|
| 324 |
+
"""
|
| 325 |
+
with os_helper.temp_dir() as temp_dir:
|
| 326 |
+
file1 = os.path.join(temp_dir, 'file1.txt')
|
| 327 |
+
os_helper.create_empty_file(file1)
|
| 328 |
+
expected = [file1]
|
| 329 |
+
self.assertEqual(filelist.findall(temp_dir), expected)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def test_suite():
|
| 333 |
+
return unittest.TestSuite([
|
| 334 |
+
unittest.makeSuite(FileListTestCase),
|
| 335 |
+
unittest.makeSuite(FindAllTestCase),
|
| 336 |
+
])
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
if __name__ == "__main__":
|
| 340 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_install.py
ADDED
|
@@ -0,0 +1,260 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.install."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import unittest
|
| 6 |
+
import site
|
| 7 |
+
|
| 8 |
+
from test.support import captured_stdout, run_unittest
|
| 9 |
+
|
| 10 |
+
from distutils import sysconfig
|
| 11 |
+
from distutils.command.install import install, HAS_USER_SITE
|
| 12 |
+
from distutils.command import install as install_module
|
| 13 |
+
from distutils.command.build_ext import build_ext
|
| 14 |
+
from distutils.command.install import INSTALL_SCHEMES
|
| 15 |
+
from distutils.core import Distribution
|
| 16 |
+
from distutils.errors import DistutilsOptionError
|
| 17 |
+
from distutils.extension import Extension
|
| 18 |
+
|
| 19 |
+
from distutils.tests import support
|
| 20 |
+
from test import support as test_support
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _make_ext_name(modname):
|
| 24 |
+
return modname + sysconfig.get_config_var('EXT_SUFFIX')
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class InstallTestCase(support.TempdirManager,
|
| 28 |
+
support.EnvironGuard,
|
| 29 |
+
support.LoggingSilencer,
|
| 30 |
+
unittest.TestCase):
|
| 31 |
+
|
| 32 |
+
def setUp(self):
|
| 33 |
+
super().setUp()
|
| 34 |
+
self._backup_config_vars = dict(sysconfig._config_vars)
|
| 35 |
+
|
| 36 |
+
def tearDown(self):
|
| 37 |
+
super().tearDown()
|
| 38 |
+
sysconfig._config_vars.clear()
|
| 39 |
+
sysconfig._config_vars.update(self._backup_config_vars)
|
| 40 |
+
|
| 41 |
+
def test_home_installation_scheme(self):
|
| 42 |
+
# This ensure two things:
|
| 43 |
+
# - that --home generates the desired set of directory names
|
| 44 |
+
# - test --home is supported on all platforms
|
| 45 |
+
builddir = self.mkdtemp()
|
| 46 |
+
destination = os.path.join(builddir, "installation")
|
| 47 |
+
|
| 48 |
+
dist = Distribution({"name": "foopkg"})
|
| 49 |
+
# script_name need not exist, it just need to be initialized
|
| 50 |
+
dist.script_name = os.path.join(builddir, "setup.py")
|
| 51 |
+
dist.command_obj["build"] = support.DummyCommand(
|
| 52 |
+
build_base=builddir,
|
| 53 |
+
build_lib=os.path.join(builddir, "lib"),
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
cmd = install(dist)
|
| 57 |
+
cmd.home = destination
|
| 58 |
+
cmd.ensure_finalized()
|
| 59 |
+
|
| 60 |
+
self.assertEqual(cmd.install_base, destination)
|
| 61 |
+
self.assertEqual(cmd.install_platbase, destination)
|
| 62 |
+
|
| 63 |
+
def check_path(got, expected):
|
| 64 |
+
got = os.path.normpath(got)
|
| 65 |
+
expected = os.path.normpath(expected)
|
| 66 |
+
self.assertEqual(got, expected)
|
| 67 |
+
|
| 68 |
+
libdir = os.path.join(destination, "lib", "python")
|
| 69 |
+
check_path(cmd.install_lib, libdir)
|
| 70 |
+
platlibdir = os.path.join(destination, sys.platlibdir, "python")
|
| 71 |
+
check_path(cmd.install_platlib, platlibdir)
|
| 72 |
+
check_path(cmd.install_purelib, libdir)
|
| 73 |
+
check_path(cmd.install_headers,
|
| 74 |
+
os.path.join(destination, "include", "python", "foopkg"))
|
| 75 |
+
check_path(cmd.install_scripts, os.path.join(destination, "bin"))
|
| 76 |
+
check_path(cmd.install_data, destination)
|
| 77 |
+
|
| 78 |
+
@unittest.skipUnless(HAS_USER_SITE, 'need user site')
|
| 79 |
+
def test_user_site(self):
|
| 80 |
+
# test install with --user
|
| 81 |
+
# preparing the environment for the test
|
| 82 |
+
self.old_user_base = site.USER_BASE
|
| 83 |
+
self.old_user_site = site.USER_SITE
|
| 84 |
+
self.tmpdir = self.mkdtemp()
|
| 85 |
+
self.user_base = os.path.join(self.tmpdir, 'B')
|
| 86 |
+
self.user_site = os.path.join(self.tmpdir, 'S')
|
| 87 |
+
site.USER_BASE = self.user_base
|
| 88 |
+
site.USER_SITE = self.user_site
|
| 89 |
+
install_module.USER_BASE = self.user_base
|
| 90 |
+
install_module.USER_SITE = self.user_site
|
| 91 |
+
|
| 92 |
+
def _expanduser(path):
|
| 93 |
+
return self.tmpdir
|
| 94 |
+
self.old_expand = os.path.expanduser
|
| 95 |
+
os.path.expanduser = _expanduser
|
| 96 |
+
|
| 97 |
+
def cleanup():
|
| 98 |
+
site.USER_BASE = self.old_user_base
|
| 99 |
+
site.USER_SITE = self.old_user_site
|
| 100 |
+
install_module.USER_BASE = self.old_user_base
|
| 101 |
+
install_module.USER_SITE = self.old_user_site
|
| 102 |
+
os.path.expanduser = self.old_expand
|
| 103 |
+
|
| 104 |
+
self.addCleanup(cleanup)
|
| 105 |
+
|
| 106 |
+
if HAS_USER_SITE:
|
| 107 |
+
for key in ('nt_user', 'unix_user'):
|
| 108 |
+
self.assertIn(key, INSTALL_SCHEMES)
|
| 109 |
+
|
| 110 |
+
dist = Distribution({'name': 'xx'})
|
| 111 |
+
cmd = install(dist)
|
| 112 |
+
|
| 113 |
+
# making sure the user option is there
|
| 114 |
+
options = [name for name, short, lable in
|
| 115 |
+
cmd.user_options]
|
| 116 |
+
self.assertIn('user', options)
|
| 117 |
+
|
| 118 |
+
# setting a value
|
| 119 |
+
cmd.user = 1
|
| 120 |
+
|
| 121 |
+
# user base and site shouldn't be created yet
|
| 122 |
+
self.assertFalse(os.path.exists(self.user_base))
|
| 123 |
+
self.assertFalse(os.path.exists(self.user_site))
|
| 124 |
+
|
| 125 |
+
# let's run finalize
|
| 126 |
+
cmd.ensure_finalized()
|
| 127 |
+
|
| 128 |
+
# now they should
|
| 129 |
+
self.assertTrue(os.path.exists(self.user_base))
|
| 130 |
+
self.assertTrue(os.path.exists(self.user_site))
|
| 131 |
+
|
| 132 |
+
self.assertIn('userbase', cmd.config_vars)
|
| 133 |
+
self.assertIn('usersite', cmd.config_vars)
|
| 134 |
+
|
| 135 |
+
def test_handle_extra_path(self):
|
| 136 |
+
dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
|
| 137 |
+
cmd = install(dist)
|
| 138 |
+
|
| 139 |
+
# two elements
|
| 140 |
+
cmd.handle_extra_path()
|
| 141 |
+
self.assertEqual(cmd.extra_path, ['path', 'dirs'])
|
| 142 |
+
self.assertEqual(cmd.extra_dirs, 'dirs')
|
| 143 |
+
self.assertEqual(cmd.path_file, 'path')
|
| 144 |
+
|
| 145 |
+
# one element
|
| 146 |
+
cmd.extra_path = ['path']
|
| 147 |
+
cmd.handle_extra_path()
|
| 148 |
+
self.assertEqual(cmd.extra_path, ['path'])
|
| 149 |
+
self.assertEqual(cmd.extra_dirs, 'path')
|
| 150 |
+
self.assertEqual(cmd.path_file, 'path')
|
| 151 |
+
|
| 152 |
+
# none
|
| 153 |
+
dist.extra_path = cmd.extra_path = None
|
| 154 |
+
cmd.handle_extra_path()
|
| 155 |
+
self.assertEqual(cmd.extra_path, None)
|
| 156 |
+
self.assertEqual(cmd.extra_dirs, '')
|
| 157 |
+
self.assertEqual(cmd.path_file, None)
|
| 158 |
+
|
| 159 |
+
# three elements (no way !)
|
| 160 |
+
cmd.extra_path = 'path,dirs,again'
|
| 161 |
+
self.assertRaises(DistutilsOptionError, cmd.handle_extra_path)
|
| 162 |
+
|
| 163 |
+
def test_finalize_options(self):
|
| 164 |
+
dist = Distribution({'name': 'xx'})
|
| 165 |
+
cmd = install(dist)
|
| 166 |
+
|
| 167 |
+
# must supply either prefix/exec-prefix/home or
|
| 168 |
+
# install-base/install-platbase -- not both
|
| 169 |
+
cmd.prefix = 'prefix'
|
| 170 |
+
cmd.install_base = 'base'
|
| 171 |
+
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
|
| 172 |
+
|
| 173 |
+
# must supply either home or prefix/exec-prefix -- not both
|
| 174 |
+
cmd.install_base = None
|
| 175 |
+
cmd.home = 'home'
|
| 176 |
+
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
|
| 177 |
+
|
| 178 |
+
# can't combine user with prefix/exec_prefix/home or
|
| 179 |
+
# install_(plat)base
|
| 180 |
+
cmd.prefix = None
|
| 181 |
+
cmd.user = 'user'
|
| 182 |
+
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
|
| 183 |
+
|
| 184 |
+
def test_record(self):
|
| 185 |
+
install_dir = self.mkdtemp()
|
| 186 |
+
project_dir, dist = self.create_dist(py_modules=['hello'],
|
| 187 |
+
scripts=['sayhi'])
|
| 188 |
+
os.chdir(project_dir)
|
| 189 |
+
self.write_file('hello.py', "def main(): print('o hai')")
|
| 190 |
+
self.write_file('sayhi', 'from hello import main; main()')
|
| 191 |
+
|
| 192 |
+
cmd = install(dist)
|
| 193 |
+
dist.command_obj['install'] = cmd
|
| 194 |
+
cmd.root = install_dir
|
| 195 |
+
cmd.record = os.path.join(project_dir, 'filelist')
|
| 196 |
+
cmd.ensure_finalized()
|
| 197 |
+
cmd.run()
|
| 198 |
+
|
| 199 |
+
f = open(cmd.record)
|
| 200 |
+
try:
|
| 201 |
+
content = f.read()
|
| 202 |
+
finally:
|
| 203 |
+
f.close()
|
| 204 |
+
|
| 205 |
+
found = [os.path.basename(line) for line in content.splitlines()]
|
| 206 |
+
expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag,
|
| 207 |
+
'sayhi',
|
| 208 |
+
'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]]
|
| 209 |
+
self.assertEqual(found, expected)
|
| 210 |
+
|
| 211 |
+
def test_record_extensions(self):
|
| 212 |
+
cmd = test_support.missing_compiler_executable()
|
| 213 |
+
if cmd is not None:
|
| 214 |
+
self.skipTest('The %r command is not found' % cmd)
|
| 215 |
+
install_dir = self.mkdtemp()
|
| 216 |
+
project_dir, dist = self.create_dist(ext_modules=[
|
| 217 |
+
Extension('xx', ['xxmodule.c'])])
|
| 218 |
+
os.chdir(project_dir)
|
| 219 |
+
support.copy_xxmodule_c(project_dir)
|
| 220 |
+
|
| 221 |
+
buildextcmd = build_ext(dist)
|
| 222 |
+
support.fixup_build_ext(buildextcmd)
|
| 223 |
+
buildextcmd.ensure_finalized()
|
| 224 |
+
|
| 225 |
+
cmd = install(dist)
|
| 226 |
+
dist.command_obj['install'] = cmd
|
| 227 |
+
dist.command_obj['build_ext'] = buildextcmd
|
| 228 |
+
cmd.root = install_dir
|
| 229 |
+
cmd.record = os.path.join(project_dir, 'filelist')
|
| 230 |
+
cmd.ensure_finalized()
|
| 231 |
+
cmd.run()
|
| 232 |
+
|
| 233 |
+
f = open(cmd.record)
|
| 234 |
+
try:
|
| 235 |
+
content = f.read()
|
| 236 |
+
finally:
|
| 237 |
+
f.close()
|
| 238 |
+
|
| 239 |
+
found = [os.path.basename(line) for line in content.splitlines()]
|
| 240 |
+
expected = [_make_ext_name('xx'),
|
| 241 |
+
'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]]
|
| 242 |
+
self.assertEqual(found, expected)
|
| 243 |
+
|
| 244 |
+
def test_debug_mode(self):
|
| 245 |
+
# this covers the code called when DEBUG is set
|
| 246 |
+
old_logs_len = len(self.logs)
|
| 247 |
+
install_module.DEBUG = True
|
| 248 |
+
try:
|
| 249 |
+
with captured_stdout():
|
| 250 |
+
self.test_record()
|
| 251 |
+
finally:
|
| 252 |
+
install_module.DEBUG = False
|
| 253 |
+
self.assertGreater(len(self.logs), old_logs_len)
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def test_suite():
|
| 257 |
+
return unittest.makeSuite(InstallTestCase)
|
| 258 |
+
|
| 259 |
+
if __name__ == "__main__":
|
| 260 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_install_data.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.install_data."""
|
| 2 |
+
import os
|
| 3 |
+
import unittest
|
| 4 |
+
|
| 5 |
+
from distutils.command.install_data import install_data
|
| 6 |
+
from distutils.tests import support
|
| 7 |
+
from test.support import run_unittest
|
| 8 |
+
|
| 9 |
+
class InstallDataTestCase(support.TempdirManager,
|
| 10 |
+
support.LoggingSilencer,
|
| 11 |
+
support.EnvironGuard,
|
| 12 |
+
unittest.TestCase):
|
| 13 |
+
|
| 14 |
+
def test_simple_run(self):
|
| 15 |
+
pkg_dir, dist = self.create_dist()
|
| 16 |
+
cmd = install_data(dist)
|
| 17 |
+
cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
|
| 18 |
+
|
| 19 |
+
# data_files can contain
|
| 20 |
+
# - simple files
|
| 21 |
+
# - a tuple with a path, and a list of file
|
| 22 |
+
one = os.path.join(pkg_dir, 'one')
|
| 23 |
+
self.write_file(one, 'xxx')
|
| 24 |
+
inst2 = os.path.join(pkg_dir, 'inst2')
|
| 25 |
+
two = os.path.join(pkg_dir, 'two')
|
| 26 |
+
self.write_file(two, 'xxx')
|
| 27 |
+
|
| 28 |
+
cmd.data_files = [one, (inst2, [two])]
|
| 29 |
+
self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])])
|
| 30 |
+
|
| 31 |
+
# let's run the command
|
| 32 |
+
cmd.ensure_finalized()
|
| 33 |
+
cmd.run()
|
| 34 |
+
|
| 35 |
+
# let's check the result
|
| 36 |
+
self.assertEqual(len(cmd.get_outputs()), 2)
|
| 37 |
+
rtwo = os.path.split(two)[-1]
|
| 38 |
+
self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
|
| 39 |
+
rone = os.path.split(one)[-1]
|
| 40 |
+
self.assertTrue(os.path.exists(os.path.join(inst, rone)))
|
| 41 |
+
cmd.outfiles = []
|
| 42 |
+
|
| 43 |
+
# let's try with warn_dir one
|
| 44 |
+
cmd.warn_dir = 1
|
| 45 |
+
cmd.ensure_finalized()
|
| 46 |
+
cmd.run()
|
| 47 |
+
|
| 48 |
+
# let's check the result
|
| 49 |
+
self.assertEqual(len(cmd.get_outputs()), 2)
|
| 50 |
+
self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
|
| 51 |
+
self.assertTrue(os.path.exists(os.path.join(inst, rone)))
|
| 52 |
+
cmd.outfiles = []
|
| 53 |
+
|
| 54 |
+
# now using root and empty dir
|
| 55 |
+
cmd.root = os.path.join(pkg_dir, 'root')
|
| 56 |
+
inst3 = os.path.join(cmd.install_dir, 'inst3')
|
| 57 |
+
inst4 = os.path.join(pkg_dir, 'inst4')
|
| 58 |
+
three = os.path.join(cmd.install_dir, 'three')
|
| 59 |
+
self.write_file(three, 'xx')
|
| 60 |
+
cmd.data_files = [one, (inst2, [two]),
|
| 61 |
+
('inst3', [three]),
|
| 62 |
+
(inst4, [])]
|
| 63 |
+
cmd.ensure_finalized()
|
| 64 |
+
cmd.run()
|
| 65 |
+
|
| 66 |
+
# let's check the result
|
| 67 |
+
self.assertEqual(len(cmd.get_outputs()), 4)
|
| 68 |
+
self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
|
| 69 |
+
self.assertTrue(os.path.exists(os.path.join(inst, rone)))
|
| 70 |
+
|
| 71 |
+
def test_suite():
|
| 72 |
+
return unittest.makeSuite(InstallDataTestCase)
|
| 73 |
+
|
| 74 |
+
if __name__ == "__main__":
|
| 75 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_install_headers.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.install_headers."""
|
| 2 |
+
import os
|
| 3 |
+
import unittest
|
| 4 |
+
|
| 5 |
+
from distutils.command.install_headers import install_headers
|
| 6 |
+
from distutils.tests import support
|
| 7 |
+
from test.support import run_unittest
|
| 8 |
+
|
| 9 |
+
class InstallHeadersTestCase(support.TempdirManager,
|
| 10 |
+
support.LoggingSilencer,
|
| 11 |
+
support.EnvironGuard,
|
| 12 |
+
unittest.TestCase):
|
| 13 |
+
|
| 14 |
+
def test_simple_run(self):
|
| 15 |
+
# we have two headers
|
| 16 |
+
header_list = self.mkdtemp()
|
| 17 |
+
header1 = os.path.join(header_list, 'header1')
|
| 18 |
+
header2 = os.path.join(header_list, 'header2')
|
| 19 |
+
self.write_file(header1)
|
| 20 |
+
self.write_file(header2)
|
| 21 |
+
headers = [header1, header2]
|
| 22 |
+
|
| 23 |
+
pkg_dir, dist = self.create_dist(headers=headers)
|
| 24 |
+
cmd = install_headers(dist)
|
| 25 |
+
self.assertEqual(cmd.get_inputs(), headers)
|
| 26 |
+
|
| 27 |
+
# let's run the command
|
| 28 |
+
cmd.install_dir = os.path.join(pkg_dir, 'inst')
|
| 29 |
+
cmd.ensure_finalized()
|
| 30 |
+
cmd.run()
|
| 31 |
+
|
| 32 |
+
# let's check the results
|
| 33 |
+
self.assertEqual(len(cmd.get_outputs()), 2)
|
| 34 |
+
|
| 35 |
+
def test_suite():
|
| 36 |
+
return unittest.makeSuite(InstallHeadersTestCase)
|
| 37 |
+
|
| 38 |
+
if __name__ == "__main__":
|
| 39 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_msvccompiler.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils._msvccompiler."""
|
| 2 |
+
import sys
|
| 3 |
+
import unittest
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
from distutils.errors import DistutilsPlatformError
|
| 7 |
+
from distutils.tests import support
|
| 8 |
+
from test.support import run_unittest
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
SKIP_MESSAGE = (None if sys.platform == "win32" else
|
| 12 |
+
"These tests are only for win32")
|
| 13 |
+
|
| 14 |
+
@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
|
| 15 |
+
class msvccompilerTestCase(support.TempdirManager,
|
| 16 |
+
unittest.TestCase):
|
| 17 |
+
|
| 18 |
+
def test_no_compiler(self):
|
| 19 |
+
import distutils._msvccompiler as _msvccompiler
|
| 20 |
+
# makes sure query_vcvarsall raises
|
| 21 |
+
# a DistutilsPlatformError if the compiler
|
| 22 |
+
# is not found
|
| 23 |
+
def _find_vcvarsall(plat_spec):
|
| 24 |
+
return None, None
|
| 25 |
+
|
| 26 |
+
old_find_vcvarsall = _msvccompiler._find_vcvarsall
|
| 27 |
+
_msvccompiler._find_vcvarsall = _find_vcvarsall
|
| 28 |
+
try:
|
| 29 |
+
self.assertRaises(DistutilsPlatformError,
|
| 30 |
+
_msvccompiler._get_vc_env,
|
| 31 |
+
'wont find this version')
|
| 32 |
+
finally:
|
| 33 |
+
_msvccompiler._find_vcvarsall = old_find_vcvarsall
|
| 34 |
+
|
| 35 |
+
def test_get_vc_env_unicode(self):
|
| 36 |
+
import distutils._msvccompiler as _msvccompiler
|
| 37 |
+
|
| 38 |
+
test_var = 'ṰḖṤṪ┅ṼẨṜ'
|
| 39 |
+
test_value = '₃⁴₅'
|
| 40 |
+
|
| 41 |
+
# Ensure we don't early exit from _get_vc_env
|
| 42 |
+
old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
|
| 43 |
+
os.environ[test_var] = test_value
|
| 44 |
+
try:
|
| 45 |
+
env = _msvccompiler._get_vc_env('x86')
|
| 46 |
+
self.assertIn(test_var.lower(), env)
|
| 47 |
+
self.assertEqual(test_value, env[test_var.lower()])
|
| 48 |
+
finally:
|
| 49 |
+
os.environ.pop(test_var)
|
| 50 |
+
if old_distutils_use_sdk:
|
| 51 |
+
os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
|
| 52 |
+
|
| 53 |
+
def test_get_vc2017(self):
|
| 54 |
+
import distutils._msvccompiler as _msvccompiler
|
| 55 |
+
|
| 56 |
+
# This function cannot be mocked, so pass it if we find VS 2017
|
| 57 |
+
# and mark it skipped if we do not.
|
| 58 |
+
version, path = _msvccompiler._find_vc2017()
|
| 59 |
+
if version:
|
| 60 |
+
self.assertGreaterEqual(version, 15)
|
| 61 |
+
self.assertTrue(os.path.isdir(path))
|
| 62 |
+
else:
|
| 63 |
+
raise unittest.SkipTest("VS 2017 is not installed")
|
| 64 |
+
|
| 65 |
+
def test_get_vc2015(self):
|
| 66 |
+
import distutils._msvccompiler as _msvccompiler
|
| 67 |
+
|
| 68 |
+
# This function cannot be mocked, so pass it if we find VS 2015
|
| 69 |
+
# and mark it skipped if we do not.
|
| 70 |
+
version, path = _msvccompiler._find_vc2015()
|
| 71 |
+
if version:
|
| 72 |
+
self.assertGreaterEqual(version, 14)
|
| 73 |
+
self.assertTrue(os.path.isdir(path))
|
| 74 |
+
else:
|
| 75 |
+
raise unittest.SkipTest("VS 2015 is not installed")
|
| 76 |
+
|
| 77 |
+
def test_suite():
|
| 78 |
+
return unittest.makeSuite(msvccompilerTestCase)
|
| 79 |
+
|
| 80 |
+
if __name__ == "__main__":
|
| 81 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_register.py
ADDED
|
@@ -0,0 +1,324 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.register."""
|
| 2 |
+
import os
|
| 3 |
+
import unittest
|
| 4 |
+
import getpass
|
| 5 |
+
import urllib
|
| 6 |
+
import warnings
|
| 7 |
+
|
| 8 |
+
from test.support import run_unittest
|
| 9 |
+
from test.support.warnings_helper import check_warnings
|
| 10 |
+
|
| 11 |
+
from distutils.command import register as register_module
|
| 12 |
+
from distutils.command.register import register
|
| 13 |
+
from distutils.errors import DistutilsSetupError
|
| 14 |
+
from distutils.log import INFO
|
| 15 |
+
|
| 16 |
+
from distutils.tests.test_config import BasePyPIRCCommandTestCase
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
import docutils
|
| 20 |
+
except ImportError:
|
| 21 |
+
docutils = None
|
| 22 |
+
|
| 23 |
+
PYPIRC_NOPASSWORD = """\
|
| 24 |
+
[distutils]
|
| 25 |
+
|
| 26 |
+
index-servers =
|
| 27 |
+
server1
|
| 28 |
+
|
| 29 |
+
[server1]
|
| 30 |
+
username:me
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
WANTED_PYPIRC = """\
|
| 34 |
+
[distutils]
|
| 35 |
+
index-servers =
|
| 36 |
+
pypi
|
| 37 |
+
|
| 38 |
+
[pypi]
|
| 39 |
+
username:tarek
|
| 40 |
+
password:password
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
class Inputs(object):
|
| 44 |
+
"""Fakes user inputs."""
|
| 45 |
+
def __init__(self, *answers):
|
| 46 |
+
self.answers = answers
|
| 47 |
+
self.index = 0
|
| 48 |
+
|
| 49 |
+
def __call__(self, prompt=''):
|
| 50 |
+
try:
|
| 51 |
+
return self.answers[self.index]
|
| 52 |
+
finally:
|
| 53 |
+
self.index += 1
|
| 54 |
+
|
| 55 |
+
class FakeOpener(object):
|
| 56 |
+
"""Fakes a PyPI server"""
|
| 57 |
+
def __init__(self):
|
| 58 |
+
self.reqs = []
|
| 59 |
+
|
| 60 |
+
def __call__(self, *args):
|
| 61 |
+
return self
|
| 62 |
+
|
| 63 |
+
def open(self, req, data=None, timeout=None):
|
| 64 |
+
self.reqs.append(req)
|
| 65 |
+
return self
|
| 66 |
+
|
| 67 |
+
def read(self):
|
| 68 |
+
return b'xxx'
|
| 69 |
+
|
| 70 |
+
def getheader(self, name, default=None):
|
| 71 |
+
return {
|
| 72 |
+
'content-type': 'text/plain; charset=utf-8',
|
| 73 |
+
}.get(name.lower(), default)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class RegisterTestCase(BasePyPIRCCommandTestCase):
|
| 77 |
+
|
| 78 |
+
def setUp(self):
|
| 79 |
+
super(RegisterTestCase, self).setUp()
|
| 80 |
+
# patching the password prompt
|
| 81 |
+
self._old_getpass = getpass.getpass
|
| 82 |
+
def _getpass(prompt):
|
| 83 |
+
return 'password'
|
| 84 |
+
getpass.getpass = _getpass
|
| 85 |
+
urllib.request._opener = None
|
| 86 |
+
self.old_opener = urllib.request.build_opener
|
| 87 |
+
self.conn = urllib.request.build_opener = FakeOpener()
|
| 88 |
+
|
| 89 |
+
def tearDown(self):
|
| 90 |
+
getpass.getpass = self._old_getpass
|
| 91 |
+
urllib.request._opener = None
|
| 92 |
+
urllib.request.build_opener = self.old_opener
|
| 93 |
+
super(RegisterTestCase, self).tearDown()
|
| 94 |
+
|
| 95 |
+
def _get_cmd(self, metadata=None):
|
| 96 |
+
if metadata is None:
|
| 97 |
+
metadata = {'url': 'xxx', 'author': 'xxx',
|
| 98 |
+
'author_email': 'xxx',
|
| 99 |
+
'name': 'xxx', 'version': 'xxx'}
|
| 100 |
+
pkg_info, dist = self.create_dist(**metadata)
|
| 101 |
+
return register(dist)
|
| 102 |
+
|
| 103 |
+
def test_create_pypirc(self):
|
| 104 |
+
# this test makes sure a .pypirc file
|
| 105 |
+
# is created when requested.
|
| 106 |
+
|
| 107 |
+
# let's create a register instance
|
| 108 |
+
cmd = self._get_cmd()
|
| 109 |
+
|
| 110 |
+
# we shouldn't have a .pypirc file yet
|
| 111 |
+
self.assertFalse(os.path.exists(self.rc))
|
| 112 |
+
|
| 113 |
+
# patching input and getpass.getpass
|
| 114 |
+
# so register gets happy
|
| 115 |
+
#
|
| 116 |
+
# Here's what we are faking :
|
| 117 |
+
# use your existing login (choice 1.)
|
| 118 |
+
# Username : 'tarek'
|
| 119 |
+
# Password : 'password'
|
| 120 |
+
# Save your login (y/N)? : 'y'
|
| 121 |
+
inputs = Inputs('1', 'tarek', 'y')
|
| 122 |
+
register_module.input = inputs.__call__
|
| 123 |
+
# let's run the command
|
| 124 |
+
try:
|
| 125 |
+
cmd.run()
|
| 126 |
+
finally:
|
| 127 |
+
del register_module.input
|
| 128 |
+
|
| 129 |
+
# we should have a brand new .pypirc file
|
| 130 |
+
self.assertTrue(os.path.exists(self.rc))
|
| 131 |
+
|
| 132 |
+
# with the content similar to WANTED_PYPIRC
|
| 133 |
+
f = open(self.rc)
|
| 134 |
+
try:
|
| 135 |
+
content = f.read()
|
| 136 |
+
self.assertEqual(content, WANTED_PYPIRC)
|
| 137 |
+
finally:
|
| 138 |
+
f.close()
|
| 139 |
+
|
| 140 |
+
# now let's make sure the .pypirc file generated
|
| 141 |
+
# really works : we shouldn't be asked anything
|
| 142 |
+
# if we run the command again
|
| 143 |
+
def _no_way(prompt=''):
|
| 144 |
+
raise AssertionError(prompt)
|
| 145 |
+
register_module.input = _no_way
|
| 146 |
+
|
| 147 |
+
cmd.show_response = 1
|
| 148 |
+
cmd.run()
|
| 149 |
+
|
| 150 |
+
# let's see what the server received : we should
|
| 151 |
+
# have 2 similar requests
|
| 152 |
+
self.assertEqual(len(self.conn.reqs), 2)
|
| 153 |
+
req1 = dict(self.conn.reqs[0].headers)
|
| 154 |
+
req2 = dict(self.conn.reqs[1].headers)
|
| 155 |
+
|
| 156 |
+
self.assertEqual(req1['Content-length'], '1374')
|
| 157 |
+
self.assertEqual(req2['Content-length'], '1374')
|
| 158 |
+
self.assertIn(b'xxx', self.conn.reqs[1].data)
|
| 159 |
+
|
| 160 |
+
def test_password_not_in_file(self):
|
| 161 |
+
|
| 162 |
+
self.write_file(self.rc, PYPIRC_NOPASSWORD)
|
| 163 |
+
cmd = self._get_cmd()
|
| 164 |
+
cmd._set_config()
|
| 165 |
+
cmd.finalize_options()
|
| 166 |
+
cmd.send_metadata()
|
| 167 |
+
|
| 168 |
+
# dist.password should be set
|
| 169 |
+
# therefore used afterwards by other commands
|
| 170 |
+
self.assertEqual(cmd.distribution.password, 'password')
|
| 171 |
+
|
| 172 |
+
def test_registering(self):
|
| 173 |
+
# this test runs choice 2
|
| 174 |
+
cmd = self._get_cmd()
|
| 175 |
+
inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
|
| 176 |
+
register_module.input = inputs.__call__
|
| 177 |
+
try:
|
| 178 |
+
# let's run the command
|
| 179 |
+
cmd.run()
|
| 180 |
+
finally:
|
| 181 |
+
del register_module.input
|
| 182 |
+
|
| 183 |
+
# we should have send a request
|
| 184 |
+
self.assertEqual(len(self.conn.reqs), 1)
|
| 185 |
+
req = self.conn.reqs[0]
|
| 186 |
+
headers = dict(req.headers)
|
| 187 |
+
self.assertEqual(headers['Content-length'], '608')
|
| 188 |
+
self.assertIn(b'tarek', req.data)
|
| 189 |
+
|
| 190 |
+
def test_password_reset(self):
|
| 191 |
+
# this test runs choice 3
|
| 192 |
+
cmd = self._get_cmd()
|
| 193 |
+
inputs = Inputs('3', 'tarek@ziade.org')
|
| 194 |
+
register_module.input = inputs.__call__
|
| 195 |
+
try:
|
| 196 |
+
# let's run the command
|
| 197 |
+
cmd.run()
|
| 198 |
+
finally:
|
| 199 |
+
del register_module.input
|
| 200 |
+
|
| 201 |
+
# we should have send a request
|
| 202 |
+
self.assertEqual(len(self.conn.reqs), 1)
|
| 203 |
+
req = self.conn.reqs[0]
|
| 204 |
+
headers = dict(req.headers)
|
| 205 |
+
self.assertEqual(headers['Content-length'], '290')
|
| 206 |
+
self.assertIn(b'tarek', req.data)
|
| 207 |
+
|
| 208 |
+
@unittest.skipUnless(docutils is not None, 'needs docutils')
|
| 209 |
+
def test_strict(self):
|
| 210 |
+
# testing the script option
|
| 211 |
+
# when on, the register command stops if
|
| 212 |
+
# the metadata is incomplete or if
|
| 213 |
+
# long_description is not reSt compliant
|
| 214 |
+
|
| 215 |
+
# empty metadata
|
| 216 |
+
cmd = self._get_cmd({})
|
| 217 |
+
cmd.ensure_finalized()
|
| 218 |
+
cmd.strict = 1
|
| 219 |
+
self.assertRaises(DistutilsSetupError, cmd.run)
|
| 220 |
+
|
| 221 |
+
# metadata are OK but long_description is broken
|
| 222 |
+
metadata = {'url': 'xxx', 'author': 'xxx',
|
| 223 |
+
'author_email': 'éxéxé',
|
| 224 |
+
'name': 'xxx', 'version': 'xxx',
|
| 225 |
+
'long_description': 'title\n==\n\ntext'}
|
| 226 |
+
|
| 227 |
+
cmd = self._get_cmd(metadata)
|
| 228 |
+
cmd.ensure_finalized()
|
| 229 |
+
cmd.strict = 1
|
| 230 |
+
self.assertRaises(DistutilsSetupError, cmd.run)
|
| 231 |
+
|
| 232 |
+
# now something that works
|
| 233 |
+
metadata['long_description'] = 'title\n=====\n\ntext'
|
| 234 |
+
cmd = self._get_cmd(metadata)
|
| 235 |
+
cmd.ensure_finalized()
|
| 236 |
+
cmd.strict = 1
|
| 237 |
+
inputs = Inputs('1', 'tarek', 'y')
|
| 238 |
+
register_module.input = inputs.__call__
|
| 239 |
+
# let's run the command
|
| 240 |
+
try:
|
| 241 |
+
cmd.run()
|
| 242 |
+
finally:
|
| 243 |
+
del register_module.input
|
| 244 |
+
|
| 245 |
+
# strict is not by default
|
| 246 |
+
cmd = self._get_cmd()
|
| 247 |
+
cmd.ensure_finalized()
|
| 248 |
+
inputs = Inputs('1', 'tarek', 'y')
|
| 249 |
+
register_module.input = inputs.__call__
|
| 250 |
+
# let's run the command
|
| 251 |
+
try:
|
| 252 |
+
cmd.run()
|
| 253 |
+
finally:
|
| 254 |
+
del register_module.input
|
| 255 |
+
|
| 256 |
+
# and finally a Unicode test (bug #12114)
|
| 257 |
+
metadata = {'url': 'xxx', 'author': '\u00c9ric',
|
| 258 |
+
'author_email': 'xxx', 'name': 'xxx',
|
| 259 |
+
'version': 'xxx',
|
| 260 |
+
'description': 'Something about esszet \u00df',
|
| 261 |
+
'long_description': 'More things about esszet \u00df'}
|
| 262 |
+
|
| 263 |
+
cmd = self._get_cmd(metadata)
|
| 264 |
+
cmd.ensure_finalized()
|
| 265 |
+
cmd.strict = 1
|
| 266 |
+
inputs = Inputs('1', 'tarek', 'y')
|
| 267 |
+
register_module.input = inputs.__call__
|
| 268 |
+
# let's run the command
|
| 269 |
+
try:
|
| 270 |
+
cmd.run()
|
| 271 |
+
finally:
|
| 272 |
+
del register_module.input
|
| 273 |
+
|
| 274 |
+
@unittest.skipUnless(docutils is not None, 'needs docutils')
|
| 275 |
+
def test_register_invalid_long_description(self):
|
| 276 |
+
description = ':funkie:`str`' # mimic Sphinx-specific markup
|
| 277 |
+
metadata = {'url': 'xxx', 'author': 'xxx',
|
| 278 |
+
'author_email': 'xxx',
|
| 279 |
+
'name': 'xxx', 'version': 'xxx',
|
| 280 |
+
'long_description': description}
|
| 281 |
+
cmd = self._get_cmd(metadata)
|
| 282 |
+
cmd.ensure_finalized()
|
| 283 |
+
cmd.strict = True
|
| 284 |
+
inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
|
| 285 |
+
register_module.input = inputs
|
| 286 |
+
self.addCleanup(delattr, register_module, 'input')
|
| 287 |
+
|
| 288 |
+
self.assertRaises(DistutilsSetupError, cmd.run)
|
| 289 |
+
|
| 290 |
+
def test_check_metadata_deprecated(self):
|
| 291 |
+
# makes sure make_metadata is deprecated
|
| 292 |
+
cmd = self._get_cmd()
|
| 293 |
+
with check_warnings() as w:
|
| 294 |
+
warnings.simplefilter("always")
|
| 295 |
+
cmd.check_metadata()
|
| 296 |
+
self.assertEqual(len(w.warnings), 1)
|
| 297 |
+
|
| 298 |
+
def test_list_classifiers(self):
|
| 299 |
+
cmd = self._get_cmd()
|
| 300 |
+
cmd.list_classifiers = 1
|
| 301 |
+
cmd.run()
|
| 302 |
+
results = self.get_logs(INFO)
|
| 303 |
+
self.assertEqual(results, ['running check', 'xxx'])
|
| 304 |
+
|
| 305 |
+
def test_show_response(self):
|
| 306 |
+
# test that the --show-response option return a well formatted response
|
| 307 |
+
cmd = self._get_cmd()
|
| 308 |
+
inputs = Inputs('1', 'tarek', 'y')
|
| 309 |
+
register_module.input = inputs.__call__
|
| 310 |
+
cmd.show_response = 1
|
| 311 |
+
try:
|
| 312 |
+
cmd.run()
|
| 313 |
+
finally:
|
| 314 |
+
del register_module.input
|
| 315 |
+
|
| 316 |
+
results = self.get_logs(INFO)
|
| 317 |
+
self.assertEqual(results[3], 75 * '-' + '\nxxx\n' + 75 * '-')
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def test_suite():
|
| 321 |
+
return unittest.makeSuite(RegisterTestCase)
|
| 322 |
+
|
| 323 |
+
if __name__ == "__main__":
|
| 324 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_sysconfig.py
ADDED
|
@@ -0,0 +1,277 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.sysconfig."""
|
| 2 |
+
import contextlib
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
import subprocess
|
| 6 |
+
import sys
|
| 7 |
+
import textwrap
|
| 8 |
+
import unittest
|
| 9 |
+
|
| 10 |
+
from distutils import sysconfig
|
| 11 |
+
from distutils.ccompiler import get_default_compiler
|
| 12 |
+
from distutils.tests import support
|
| 13 |
+
from test.support import run_unittest, swap_item
|
| 14 |
+
from test.support.os_helper import TESTFN
|
| 15 |
+
from test.support.warnings_helper import check_warnings
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class SysconfigTestCase(support.EnvironGuard, unittest.TestCase):
|
| 19 |
+
def setUp(self):
|
| 20 |
+
super(SysconfigTestCase, self).setUp()
|
| 21 |
+
self.makefile = None
|
| 22 |
+
|
| 23 |
+
def tearDown(self):
|
| 24 |
+
if self.makefile is not None:
|
| 25 |
+
os.unlink(self.makefile)
|
| 26 |
+
self.cleanup_testfn()
|
| 27 |
+
super(SysconfigTestCase, self).tearDown()
|
| 28 |
+
|
| 29 |
+
def cleanup_testfn(self):
|
| 30 |
+
if os.path.isfile(TESTFN):
|
| 31 |
+
os.remove(TESTFN)
|
| 32 |
+
elif os.path.isdir(TESTFN):
|
| 33 |
+
shutil.rmtree(TESTFN)
|
| 34 |
+
|
| 35 |
+
def test_get_config_h_filename(self):
|
| 36 |
+
config_h = sysconfig.get_config_h_filename()
|
| 37 |
+
self.assertTrue(os.path.isfile(config_h), config_h)
|
| 38 |
+
|
| 39 |
+
def test_get_python_lib(self):
|
| 40 |
+
# XXX doesn't work on Linux when Python was never installed before
|
| 41 |
+
#self.assertTrue(os.path.isdir(lib_dir), lib_dir)
|
| 42 |
+
# test for pythonxx.lib?
|
| 43 |
+
self.assertNotEqual(sysconfig.get_python_lib(),
|
| 44 |
+
sysconfig.get_python_lib(prefix=TESTFN))
|
| 45 |
+
|
| 46 |
+
def test_get_config_vars(self):
|
| 47 |
+
cvars = sysconfig.get_config_vars()
|
| 48 |
+
self.assertIsInstance(cvars, dict)
|
| 49 |
+
self.assertTrue(cvars)
|
| 50 |
+
|
| 51 |
+
def test_srcdir(self):
|
| 52 |
+
# See Issues #15322, #15364.
|
| 53 |
+
srcdir = sysconfig.get_config_var('srcdir')
|
| 54 |
+
|
| 55 |
+
self.assertTrue(os.path.isabs(srcdir), srcdir)
|
| 56 |
+
self.assertTrue(os.path.isdir(srcdir), srcdir)
|
| 57 |
+
|
| 58 |
+
if sysconfig.python_build:
|
| 59 |
+
# The python executable has not been installed so srcdir
|
| 60 |
+
# should be a full source checkout.
|
| 61 |
+
Python_h = os.path.join(srcdir, 'Include', 'Python.h')
|
| 62 |
+
self.assertTrue(os.path.exists(Python_h), Python_h)
|
| 63 |
+
self.assertTrue(sysconfig._is_python_source_dir(srcdir))
|
| 64 |
+
elif os.name == 'posix':
|
| 65 |
+
self.assertEqual(
|
| 66 |
+
os.path.dirname(sysconfig.get_makefile_filename()),
|
| 67 |
+
srcdir)
|
| 68 |
+
|
| 69 |
+
def test_srcdir_independent_of_cwd(self):
|
| 70 |
+
# srcdir should be independent of the current working directory
|
| 71 |
+
# See Issues #15322, #15364.
|
| 72 |
+
srcdir = sysconfig.get_config_var('srcdir')
|
| 73 |
+
cwd = os.getcwd()
|
| 74 |
+
try:
|
| 75 |
+
os.chdir('..')
|
| 76 |
+
srcdir2 = sysconfig.get_config_var('srcdir')
|
| 77 |
+
finally:
|
| 78 |
+
os.chdir(cwd)
|
| 79 |
+
self.assertEqual(srcdir, srcdir2)
|
| 80 |
+
|
| 81 |
+
def customize_compiler(self):
|
| 82 |
+
# make sure AR gets caught
|
| 83 |
+
class compiler:
|
| 84 |
+
compiler_type = 'unix'
|
| 85 |
+
|
| 86 |
+
def set_executables(self, **kw):
|
| 87 |
+
self.exes = kw
|
| 88 |
+
|
| 89 |
+
sysconfig_vars = {
|
| 90 |
+
'AR': 'sc_ar',
|
| 91 |
+
'CC': 'sc_cc',
|
| 92 |
+
'CXX': 'sc_cxx',
|
| 93 |
+
'ARFLAGS': '--sc-arflags',
|
| 94 |
+
'CFLAGS': '--sc-cflags',
|
| 95 |
+
'CCSHARED': '--sc-ccshared',
|
| 96 |
+
'LDSHARED': 'sc_ldshared',
|
| 97 |
+
'SHLIB_SUFFIX': 'sc_shutil_suffix',
|
| 98 |
+
|
| 99 |
+
# On macOS, disable _osx_support.customize_compiler()
|
| 100 |
+
'CUSTOMIZED_OSX_COMPILER': 'True',
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
comp = compiler()
|
| 104 |
+
with contextlib.ExitStack() as cm:
|
| 105 |
+
for key, value in sysconfig_vars.items():
|
| 106 |
+
cm.enter_context(swap_item(sysconfig._config_vars, key, value))
|
| 107 |
+
sysconfig.customize_compiler(comp)
|
| 108 |
+
|
| 109 |
+
return comp
|
| 110 |
+
|
| 111 |
+
@unittest.skipUnless(get_default_compiler() == 'unix',
|
| 112 |
+
'not testing if default compiler is not unix')
|
| 113 |
+
def test_customize_compiler(self):
|
| 114 |
+
# Make sure that sysconfig._config_vars is initialized
|
| 115 |
+
sysconfig.get_config_vars()
|
| 116 |
+
|
| 117 |
+
os.environ['AR'] = 'env_ar'
|
| 118 |
+
os.environ['CC'] = 'env_cc'
|
| 119 |
+
os.environ['CPP'] = 'env_cpp'
|
| 120 |
+
os.environ['CXX'] = 'env_cxx --env-cxx-flags'
|
| 121 |
+
os.environ['LDSHARED'] = 'env_ldshared'
|
| 122 |
+
os.environ['LDFLAGS'] = '--env-ldflags'
|
| 123 |
+
os.environ['ARFLAGS'] = '--env-arflags'
|
| 124 |
+
os.environ['CFLAGS'] = '--env-cflags'
|
| 125 |
+
os.environ['CPPFLAGS'] = '--env-cppflags'
|
| 126 |
+
|
| 127 |
+
comp = self.customize_compiler()
|
| 128 |
+
self.assertEqual(comp.exes['archiver'],
|
| 129 |
+
'env_ar --env-arflags')
|
| 130 |
+
self.assertEqual(comp.exes['preprocessor'],
|
| 131 |
+
'env_cpp --env-cppflags')
|
| 132 |
+
self.assertEqual(comp.exes['compiler'],
|
| 133 |
+
'env_cc --sc-cflags --env-cflags --env-cppflags')
|
| 134 |
+
self.assertEqual(comp.exes['compiler_so'],
|
| 135 |
+
('env_cc --sc-cflags '
|
| 136 |
+
'--env-cflags ''--env-cppflags --sc-ccshared'))
|
| 137 |
+
self.assertEqual(comp.exes['compiler_cxx'],
|
| 138 |
+
'env_cxx --env-cxx-flags')
|
| 139 |
+
self.assertEqual(comp.exes['linker_exe'],
|
| 140 |
+
'env_cc')
|
| 141 |
+
self.assertEqual(comp.exes['linker_so'],
|
| 142 |
+
('env_ldshared --env-ldflags --env-cflags'
|
| 143 |
+
' --env-cppflags'))
|
| 144 |
+
self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix')
|
| 145 |
+
|
| 146 |
+
del os.environ['AR']
|
| 147 |
+
del os.environ['CC']
|
| 148 |
+
del os.environ['CPP']
|
| 149 |
+
del os.environ['CXX']
|
| 150 |
+
del os.environ['LDSHARED']
|
| 151 |
+
del os.environ['LDFLAGS']
|
| 152 |
+
del os.environ['ARFLAGS']
|
| 153 |
+
del os.environ['CFLAGS']
|
| 154 |
+
del os.environ['CPPFLAGS']
|
| 155 |
+
|
| 156 |
+
comp = self.customize_compiler()
|
| 157 |
+
self.assertEqual(comp.exes['archiver'],
|
| 158 |
+
'sc_ar --sc-arflags')
|
| 159 |
+
self.assertEqual(comp.exes['preprocessor'],
|
| 160 |
+
'sc_cc -E')
|
| 161 |
+
self.assertEqual(comp.exes['compiler'],
|
| 162 |
+
'sc_cc --sc-cflags')
|
| 163 |
+
self.assertEqual(comp.exes['compiler_so'],
|
| 164 |
+
'sc_cc --sc-cflags --sc-ccshared')
|
| 165 |
+
self.assertEqual(comp.exes['compiler_cxx'],
|
| 166 |
+
'sc_cxx')
|
| 167 |
+
self.assertEqual(comp.exes['linker_exe'],
|
| 168 |
+
'sc_cc')
|
| 169 |
+
self.assertEqual(comp.exes['linker_so'],
|
| 170 |
+
'sc_ldshared')
|
| 171 |
+
self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix')
|
| 172 |
+
|
| 173 |
+
def test_parse_makefile_base(self):
|
| 174 |
+
self.makefile = TESTFN
|
| 175 |
+
fd = open(self.makefile, 'w')
|
| 176 |
+
try:
|
| 177 |
+
fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n')
|
| 178 |
+
fd.write('VAR=$OTHER\nOTHER=foo')
|
| 179 |
+
finally:
|
| 180 |
+
fd.close()
|
| 181 |
+
d = sysconfig.parse_makefile(self.makefile)
|
| 182 |
+
self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'",
|
| 183 |
+
'OTHER': 'foo'})
|
| 184 |
+
|
| 185 |
+
def test_parse_makefile_literal_dollar(self):
|
| 186 |
+
self.makefile = TESTFN
|
| 187 |
+
fd = open(self.makefile, 'w')
|
| 188 |
+
try:
|
| 189 |
+
fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n')
|
| 190 |
+
fd.write('VAR=$OTHER\nOTHER=foo')
|
| 191 |
+
finally:
|
| 192 |
+
fd.close()
|
| 193 |
+
d = sysconfig.parse_makefile(self.makefile)
|
| 194 |
+
self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'",
|
| 195 |
+
'OTHER': 'foo'})
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def test_sysconfig_module(self):
|
| 199 |
+
import sysconfig as global_sysconfig
|
| 200 |
+
self.assertEqual(global_sysconfig.get_config_var('CFLAGS'),
|
| 201 |
+
sysconfig.get_config_var('CFLAGS'))
|
| 202 |
+
self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'),
|
| 203 |
+
sysconfig.get_config_var('LDFLAGS'))
|
| 204 |
+
|
| 205 |
+
@unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'),
|
| 206 |
+
'compiler flags customized')
|
| 207 |
+
def test_sysconfig_compiler_vars(self):
|
| 208 |
+
# On OS X, binary installers support extension module building on
|
| 209 |
+
# various levels of the operating system with differing Xcode
|
| 210 |
+
# configurations. This requires customization of some of the
|
| 211 |
+
# compiler configuration directives to suit the environment on
|
| 212 |
+
# the installed machine. Some of these customizations may require
|
| 213 |
+
# running external programs and, so, are deferred until needed by
|
| 214 |
+
# the first extension module build. With Python 3.3, only
|
| 215 |
+
# the Distutils version of sysconfig is used for extension module
|
| 216 |
+
# builds, which happens earlier in the Distutils tests. This may
|
| 217 |
+
# cause the following tests to fail since no tests have caused
|
| 218 |
+
# the global version of sysconfig to call the customization yet.
|
| 219 |
+
# The solution for now is to simply skip this test in this case.
|
| 220 |
+
# The longer-term solution is to only have one version of sysconfig.
|
| 221 |
+
|
| 222 |
+
import sysconfig as global_sysconfig
|
| 223 |
+
if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):
|
| 224 |
+
self.skipTest('compiler flags customized')
|
| 225 |
+
self.assertEqual(global_sysconfig.get_config_var('LDSHARED'),
|
| 226 |
+
sysconfig.get_config_var('LDSHARED'))
|
| 227 |
+
self.assertEqual(global_sysconfig.get_config_var('CC'),
|
| 228 |
+
sysconfig.get_config_var('CC'))
|
| 229 |
+
|
| 230 |
+
@unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
|
| 231 |
+
'EXT_SUFFIX required for this test')
|
| 232 |
+
def test_SO_deprecation(self):
|
| 233 |
+
self.assertWarns(DeprecationWarning,
|
| 234 |
+
sysconfig.get_config_var, 'SO')
|
| 235 |
+
|
| 236 |
+
@unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
|
| 237 |
+
'EXT_SUFFIX required for this test')
|
| 238 |
+
def test_SO_value(self):
|
| 239 |
+
with check_warnings(('', DeprecationWarning)):
|
| 240 |
+
self.assertEqual(sysconfig.get_config_var('SO'),
|
| 241 |
+
sysconfig.get_config_var('EXT_SUFFIX'))
|
| 242 |
+
|
| 243 |
+
@unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
|
| 244 |
+
'EXT_SUFFIX required for this test')
|
| 245 |
+
def test_SO_in_vars(self):
|
| 246 |
+
vars = sysconfig.get_config_vars()
|
| 247 |
+
self.assertIsNotNone(vars['SO'])
|
| 248 |
+
self.assertEqual(vars['SO'], vars['EXT_SUFFIX'])
|
| 249 |
+
|
| 250 |
+
def test_customize_compiler_before_get_config_vars(self):
|
| 251 |
+
# Issue #21923: test that a Distribution compiler
|
| 252 |
+
# instance can be called without an explicit call to
|
| 253 |
+
# get_config_vars().
|
| 254 |
+
with open(TESTFN, 'w') as f:
|
| 255 |
+
f.writelines(textwrap.dedent('''\
|
| 256 |
+
from distutils.core import Distribution
|
| 257 |
+
config = Distribution().get_command_obj('config')
|
| 258 |
+
# try_compile may pass or it may fail if no compiler
|
| 259 |
+
# is found but it should not raise an exception.
|
| 260 |
+
rc = config.try_compile('int x;')
|
| 261 |
+
'''))
|
| 262 |
+
p = subprocess.Popen([str(sys.executable), TESTFN],
|
| 263 |
+
stdout=subprocess.PIPE,
|
| 264 |
+
stderr=subprocess.STDOUT,
|
| 265 |
+
universal_newlines=True)
|
| 266 |
+
outs, errs = p.communicate()
|
| 267 |
+
self.assertEqual(0, p.returncode, "Subprocess failed: " + outs)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def test_suite():
|
| 271 |
+
suite = unittest.TestSuite()
|
| 272 |
+
suite.addTest(unittest.makeSuite(SysconfigTestCase))
|
| 273 |
+
return suite
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
if __name__ == '__main__':
|
| 277 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_text_file.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.text_file."""
|
| 2 |
+
import os
|
| 3 |
+
import unittest
|
| 4 |
+
from distutils.text_file import TextFile
|
| 5 |
+
from distutils.tests import support
|
| 6 |
+
from test.support import run_unittest
|
| 7 |
+
|
| 8 |
+
TEST_DATA = """# test file
|
| 9 |
+
|
| 10 |
+
line 3 \\
|
| 11 |
+
# intervening comment
|
| 12 |
+
continues on next line
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
class TextFileTestCase(support.TempdirManager, unittest.TestCase):
|
| 16 |
+
|
| 17 |
+
def test_class(self):
|
| 18 |
+
# old tests moved from text_file.__main__
|
| 19 |
+
# so they are really called by the buildbots
|
| 20 |
+
|
| 21 |
+
# result 1: no fancy options
|
| 22 |
+
result1 = ['# test file\n', '\n', 'line 3 \\\n',
|
| 23 |
+
'# intervening comment\n',
|
| 24 |
+
' continues on next line\n']
|
| 25 |
+
|
| 26 |
+
# result 2: just strip comments
|
| 27 |
+
result2 = ["\n",
|
| 28 |
+
"line 3 \\\n",
|
| 29 |
+
" continues on next line\n"]
|
| 30 |
+
|
| 31 |
+
# result 3: just strip blank lines
|
| 32 |
+
result3 = ["# test file\n",
|
| 33 |
+
"line 3 \\\n",
|
| 34 |
+
"# intervening comment\n",
|
| 35 |
+
" continues on next line\n"]
|
| 36 |
+
|
| 37 |
+
# result 4: default, strip comments, blank lines,
|
| 38 |
+
# and trailing whitespace
|
| 39 |
+
result4 = ["line 3 \\",
|
| 40 |
+
" continues on next line"]
|
| 41 |
+
|
| 42 |
+
# result 5: strip comments and blanks, plus join lines (but don't
|
| 43 |
+
# "collapse" joined lines
|
| 44 |
+
result5 = ["line 3 continues on next line"]
|
| 45 |
+
|
| 46 |
+
# result 6: strip comments and blanks, plus join lines (and
|
| 47 |
+
# "collapse" joined lines
|
| 48 |
+
result6 = ["line 3 continues on next line"]
|
| 49 |
+
|
| 50 |
+
def test_input(count, description, file, expected_result):
|
| 51 |
+
result = file.readlines()
|
| 52 |
+
self.assertEqual(result, expected_result)
|
| 53 |
+
|
| 54 |
+
tmpdir = self.mkdtemp()
|
| 55 |
+
filename = os.path.join(tmpdir, "test.txt")
|
| 56 |
+
out_file = open(filename, "w")
|
| 57 |
+
try:
|
| 58 |
+
out_file.write(TEST_DATA)
|
| 59 |
+
finally:
|
| 60 |
+
out_file.close()
|
| 61 |
+
|
| 62 |
+
in_file = TextFile(filename, strip_comments=0, skip_blanks=0,
|
| 63 |
+
lstrip_ws=0, rstrip_ws=0)
|
| 64 |
+
try:
|
| 65 |
+
test_input(1, "no processing", in_file, result1)
|
| 66 |
+
finally:
|
| 67 |
+
in_file.close()
|
| 68 |
+
|
| 69 |
+
in_file = TextFile(filename, strip_comments=1, skip_blanks=0,
|
| 70 |
+
lstrip_ws=0, rstrip_ws=0)
|
| 71 |
+
try:
|
| 72 |
+
test_input(2, "strip comments", in_file, result2)
|
| 73 |
+
finally:
|
| 74 |
+
in_file.close()
|
| 75 |
+
|
| 76 |
+
in_file = TextFile(filename, strip_comments=0, skip_blanks=1,
|
| 77 |
+
lstrip_ws=0, rstrip_ws=0)
|
| 78 |
+
try:
|
| 79 |
+
test_input(3, "strip blanks", in_file, result3)
|
| 80 |
+
finally:
|
| 81 |
+
in_file.close()
|
| 82 |
+
|
| 83 |
+
in_file = TextFile(filename)
|
| 84 |
+
try:
|
| 85 |
+
test_input(4, "default processing", in_file, result4)
|
| 86 |
+
finally:
|
| 87 |
+
in_file.close()
|
| 88 |
+
|
| 89 |
+
in_file = TextFile(filename, strip_comments=1, skip_blanks=1,
|
| 90 |
+
join_lines=1, rstrip_ws=1)
|
| 91 |
+
try:
|
| 92 |
+
test_input(5, "join lines without collapsing", in_file, result5)
|
| 93 |
+
finally:
|
| 94 |
+
in_file.close()
|
| 95 |
+
|
| 96 |
+
in_file = TextFile(filename, strip_comments=1, skip_blanks=1,
|
| 97 |
+
join_lines=1, rstrip_ws=1, collapse_join=1)
|
| 98 |
+
try:
|
| 99 |
+
test_input(6, "join lines with collapsing", in_file, result6)
|
| 100 |
+
finally:
|
| 101 |
+
in_file.close()
|
| 102 |
+
|
| 103 |
+
def test_suite():
|
| 104 |
+
return unittest.makeSuite(TextFileTestCase)
|
| 105 |
+
|
| 106 |
+
if __name__ == "__main__":
|
| 107 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_unixccompiler.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.unixccompiler."""
|
| 2 |
+
import sys
|
| 3 |
+
import unittest
|
| 4 |
+
from test.support import run_unittest
|
| 5 |
+
from test.support.os_helper import EnvironmentVarGuard
|
| 6 |
+
|
| 7 |
+
from distutils import sysconfig
|
| 8 |
+
from distutils.unixccompiler import UnixCCompiler
|
| 9 |
+
|
| 10 |
+
class UnixCCompilerTestCase(unittest.TestCase):
|
| 11 |
+
|
| 12 |
+
def setUp(self):
|
| 13 |
+
self._backup_platform = sys.platform
|
| 14 |
+
self._backup_get_config_var = sysconfig.get_config_var
|
| 15 |
+
self._backup_config_vars = dict(sysconfig._config_vars)
|
| 16 |
+
class CompilerWrapper(UnixCCompiler):
|
| 17 |
+
def rpath_foo(self):
|
| 18 |
+
return self.runtime_library_dir_option('/foo')
|
| 19 |
+
self.cc = CompilerWrapper()
|
| 20 |
+
|
| 21 |
+
def tearDown(self):
|
| 22 |
+
sys.platform = self._backup_platform
|
| 23 |
+
sysconfig.get_config_var = self._backup_get_config_var
|
| 24 |
+
sysconfig._config_vars.clear()
|
| 25 |
+
sysconfig._config_vars.update(self._backup_config_vars)
|
| 26 |
+
|
| 27 |
+
@unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
|
| 28 |
+
def test_runtime_libdir_option(self):
|
| 29 |
+
# Issue#5900
|
| 30 |
+
#
|
| 31 |
+
# Ensure RUNPATH is added to extension modules with RPATH if
|
| 32 |
+
# GNU ld is used
|
| 33 |
+
|
| 34 |
+
# darwin
|
| 35 |
+
sys.platform = 'darwin'
|
| 36 |
+
self.assertEqual(self.cc.rpath_foo(), '-L/foo')
|
| 37 |
+
|
| 38 |
+
# hp-ux
|
| 39 |
+
sys.platform = 'hp-ux'
|
| 40 |
+
old_gcv = sysconfig.get_config_var
|
| 41 |
+
def gcv(v):
|
| 42 |
+
return 'xxx'
|
| 43 |
+
sysconfig.get_config_var = gcv
|
| 44 |
+
self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo'])
|
| 45 |
+
|
| 46 |
+
def gcv(v):
|
| 47 |
+
return 'gcc'
|
| 48 |
+
sysconfig.get_config_var = gcv
|
| 49 |
+
self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
|
| 50 |
+
|
| 51 |
+
def gcv(v):
|
| 52 |
+
return 'g++'
|
| 53 |
+
sysconfig.get_config_var = gcv
|
| 54 |
+
self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
|
| 55 |
+
|
| 56 |
+
sysconfig.get_config_var = old_gcv
|
| 57 |
+
|
| 58 |
+
# GCC GNULD
|
| 59 |
+
sys.platform = 'bar'
|
| 60 |
+
def gcv(v):
|
| 61 |
+
if v == 'CC':
|
| 62 |
+
return 'gcc'
|
| 63 |
+
elif v == 'GNULD':
|
| 64 |
+
return 'yes'
|
| 65 |
+
sysconfig.get_config_var = gcv
|
| 66 |
+
self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
|
| 67 |
+
|
| 68 |
+
# GCC non-GNULD
|
| 69 |
+
sys.platform = 'bar'
|
| 70 |
+
def gcv(v):
|
| 71 |
+
if v == 'CC':
|
| 72 |
+
return 'gcc'
|
| 73 |
+
elif v == 'GNULD':
|
| 74 |
+
return 'no'
|
| 75 |
+
sysconfig.get_config_var = gcv
|
| 76 |
+
self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo')
|
| 77 |
+
|
| 78 |
+
# GCC GNULD with fully qualified configuration prefix
|
| 79 |
+
# see #7617
|
| 80 |
+
sys.platform = 'bar'
|
| 81 |
+
def gcv(v):
|
| 82 |
+
if v == 'CC':
|
| 83 |
+
return 'x86_64-pc-linux-gnu-gcc-4.4.2'
|
| 84 |
+
elif v == 'GNULD':
|
| 85 |
+
return 'yes'
|
| 86 |
+
sysconfig.get_config_var = gcv
|
| 87 |
+
self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
|
| 88 |
+
|
| 89 |
+
# non-GCC GNULD
|
| 90 |
+
sys.platform = 'bar'
|
| 91 |
+
def gcv(v):
|
| 92 |
+
if v == 'CC':
|
| 93 |
+
return 'cc'
|
| 94 |
+
elif v == 'GNULD':
|
| 95 |
+
return 'yes'
|
| 96 |
+
sysconfig.get_config_var = gcv
|
| 97 |
+
self.assertEqual(self.cc.rpath_foo(), '-R/foo')
|
| 98 |
+
|
| 99 |
+
# non-GCC non-GNULD
|
| 100 |
+
sys.platform = 'bar'
|
| 101 |
+
def gcv(v):
|
| 102 |
+
if v == 'CC':
|
| 103 |
+
return 'cc'
|
| 104 |
+
elif v == 'GNULD':
|
| 105 |
+
return 'no'
|
| 106 |
+
sysconfig.get_config_var = gcv
|
| 107 |
+
self.assertEqual(self.cc.rpath_foo(), '-R/foo')
|
| 108 |
+
|
| 109 |
+
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X')
|
| 110 |
+
def test_osx_cc_overrides_ldshared(self):
|
| 111 |
+
# Issue #18080:
|
| 112 |
+
# ensure that setting CC env variable also changes default linker
|
| 113 |
+
def gcv(v):
|
| 114 |
+
if v == 'LDSHARED':
|
| 115 |
+
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
| 116 |
+
return 'gcc-4.2'
|
| 117 |
+
sysconfig.get_config_var = gcv
|
| 118 |
+
with EnvironmentVarGuard() as env:
|
| 119 |
+
env['CC'] = 'my_cc'
|
| 120 |
+
del env['LDSHARED']
|
| 121 |
+
sysconfig.customize_compiler(self.cc)
|
| 122 |
+
self.assertEqual(self.cc.linker_so[0], 'my_cc')
|
| 123 |
+
|
| 124 |
+
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X')
|
| 125 |
+
def test_osx_explicit_ldshared(self):
|
| 126 |
+
# Issue #18080:
|
| 127 |
+
# ensure that setting CC env variable does not change
|
| 128 |
+
# explicit LDSHARED setting for linker
|
| 129 |
+
def gcv(v):
|
| 130 |
+
if v == 'LDSHARED':
|
| 131 |
+
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
| 132 |
+
return 'gcc-4.2'
|
| 133 |
+
sysconfig.get_config_var = gcv
|
| 134 |
+
with EnvironmentVarGuard() as env:
|
| 135 |
+
env['CC'] = 'my_cc'
|
| 136 |
+
env['LDSHARED'] = 'my_ld -bundle -dynamic'
|
| 137 |
+
sysconfig.customize_compiler(self.cc)
|
| 138 |
+
self.assertEqual(self.cc.linker_so[0], 'my_ld')
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def test_suite():
|
| 142 |
+
return unittest.makeSuite(UnixCCompilerTestCase)
|
| 143 |
+
|
| 144 |
+
if __name__ == "__main__":
|
| 145 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_upload.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.upload."""
|
| 2 |
+
import os
|
| 3 |
+
import unittest
|
| 4 |
+
import unittest.mock as mock
|
| 5 |
+
from urllib.error import HTTPError
|
| 6 |
+
|
| 7 |
+
from test.support import run_unittest
|
| 8 |
+
|
| 9 |
+
from distutils.command import upload as upload_mod
|
| 10 |
+
from distutils.command.upload import upload
|
| 11 |
+
from distutils.core import Distribution
|
| 12 |
+
from distutils.errors import DistutilsError
|
| 13 |
+
from distutils.log import ERROR, INFO
|
| 14 |
+
|
| 15 |
+
from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase
|
| 16 |
+
|
| 17 |
+
PYPIRC_LONG_PASSWORD = """\
|
| 18 |
+
[distutils]
|
| 19 |
+
|
| 20 |
+
index-servers =
|
| 21 |
+
server1
|
| 22 |
+
server2
|
| 23 |
+
|
| 24 |
+
[server1]
|
| 25 |
+
username:me
|
| 26 |
+
password:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
|
| 27 |
+
|
| 28 |
+
[server2]
|
| 29 |
+
username:meagain
|
| 30 |
+
password: secret
|
| 31 |
+
realm:acme
|
| 32 |
+
repository:http://another.pypi/
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
PYPIRC_NOPASSWORD = """\
|
| 37 |
+
[distutils]
|
| 38 |
+
|
| 39 |
+
index-servers =
|
| 40 |
+
server1
|
| 41 |
+
|
| 42 |
+
[server1]
|
| 43 |
+
username:me
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
class FakeOpen(object):
|
| 47 |
+
|
| 48 |
+
def __init__(self, url, msg=None, code=None):
|
| 49 |
+
self.url = url
|
| 50 |
+
if not isinstance(url, str):
|
| 51 |
+
self.req = url
|
| 52 |
+
else:
|
| 53 |
+
self.req = None
|
| 54 |
+
self.msg = msg or 'OK'
|
| 55 |
+
self.code = code or 200
|
| 56 |
+
|
| 57 |
+
def getheader(self, name, default=None):
|
| 58 |
+
return {
|
| 59 |
+
'content-type': 'text/plain; charset=utf-8',
|
| 60 |
+
}.get(name.lower(), default)
|
| 61 |
+
|
| 62 |
+
def read(self):
|
| 63 |
+
return b'xyzzy'
|
| 64 |
+
|
| 65 |
+
def getcode(self):
|
| 66 |
+
return self.code
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class uploadTestCase(BasePyPIRCCommandTestCase):
|
| 70 |
+
|
| 71 |
+
def setUp(self):
|
| 72 |
+
super(uploadTestCase, self).setUp()
|
| 73 |
+
self.old_open = upload_mod.urlopen
|
| 74 |
+
upload_mod.urlopen = self._urlopen
|
| 75 |
+
self.last_open = None
|
| 76 |
+
self.next_msg = None
|
| 77 |
+
self.next_code = None
|
| 78 |
+
|
| 79 |
+
def tearDown(self):
|
| 80 |
+
upload_mod.urlopen = self.old_open
|
| 81 |
+
super(uploadTestCase, self).tearDown()
|
| 82 |
+
|
| 83 |
+
def _urlopen(self, url):
|
| 84 |
+
self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code)
|
| 85 |
+
return self.last_open
|
| 86 |
+
|
| 87 |
+
def test_finalize_options(self):
|
| 88 |
+
|
| 89 |
+
# new format
|
| 90 |
+
self.write_file(self.rc, PYPIRC)
|
| 91 |
+
dist = Distribution()
|
| 92 |
+
cmd = upload(dist)
|
| 93 |
+
cmd.finalize_options()
|
| 94 |
+
for attr, waited in (('username', 'me'), ('password', 'secret'),
|
| 95 |
+
('realm', 'pypi'),
|
| 96 |
+
('repository', 'https://upload.pypi.org/legacy/')):
|
| 97 |
+
self.assertEqual(getattr(cmd, attr), waited)
|
| 98 |
+
|
| 99 |
+
def test_saved_password(self):
|
| 100 |
+
# file with no password
|
| 101 |
+
self.write_file(self.rc, PYPIRC_NOPASSWORD)
|
| 102 |
+
|
| 103 |
+
# make sure it passes
|
| 104 |
+
dist = Distribution()
|
| 105 |
+
cmd = upload(dist)
|
| 106 |
+
cmd.finalize_options()
|
| 107 |
+
self.assertEqual(cmd.password, None)
|
| 108 |
+
|
| 109 |
+
# make sure we get it as well, if another command
|
| 110 |
+
# initialized it at the dist level
|
| 111 |
+
dist.password = 'xxx'
|
| 112 |
+
cmd = upload(dist)
|
| 113 |
+
cmd.finalize_options()
|
| 114 |
+
self.assertEqual(cmd.password, 'xxx')
|
| 115 |
+
|
| 116 |
+
def test_upload(self):
|
| 117 |
+
tmp = self.mkdtemp()
|
| 118 |
+
path = os.path.join(tmp, 'xxx')
|
| 119 |
+
self.write_file(path)
|
| 120 |
+
command, pyversion, filename = 'xxx', '2.6', path
|
| 121 |
+
dist_files = [(command, pyversion, filename)]
|
| 122 |
+
self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
|
| 123 |
+
|
| 124 |
+
# lets run it
|
| 125 |
+
pkg_dir, dist = self.create_dist(dist_files=dist_files)
|
| 126 |
+
cmd = upload(dist)
|
| 127 |
+
cmd.show_response = 1
|
| 128 |
+
cmd.ensure_finalized()
|
| 129 |
+
cmd.run()
|
| 130 |
+
|
| 131 |
+
# what did we send ?
|
| 132 |
+
headers = dict(self.last_open.req.headers)
|
| 133 |
+
self.assertGreaterEqual(int(headers['Content-length']), 2162)
|
| 134 |
+
content_type = headers['Content-type']
|
| 135 |
+
self.assertTrue(content_type.startswith('multipart/form-data'))
|
| 136 |
+
self.assertEqual(self.last_open.req.get_method(), 'POST')
|
| 137 |
+
expected_url = 'https://upload.pypi.org/legacy/'
|
| 138 |
+
self.assertEqual(self.last_open.req.get_full_url(), expected_url)
|
| 139 |
+
data = self.last_open.req.data
|
| 140 |
+
self.assertIn(b'xxx',data)
|
| 141 |
+
self.assertIn(b'protocol_version', data)
|
| 142 |
+
self.assertIn(b'sha256_digest', data)
|
| 143 |
+
self.assertIn(
|
| 144 |
+
b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf'
|
| 145 |
+
b'6860',
|
| 146 |
+
data
|
| 147 |
+
)
|
| 148 |
+
if b'md5_digest' in data:
|
| 149 |
+
self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data)
|
| 150 |
+
if b'blake2_256_digest' in data:
|
| 151 |
+
self.assertIn(
|
| 152 |
+
b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be'
|
| 153 |
+
b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc'
|
| 154 |
+
b'ce443f1534330a',
|
| 155 |
+
data
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
# The PyPI response body was echoed
|
| 159 |
+
results = self.get_logs(INFO)
|
| 160 |
+
self.assertEqual(results[-1], 75 * '-' + '\nxyzzy\n' + 75 * '-')
|
| 161 |
+
|
| 162 |
+
# bpo-32304: archives whose last byte was b'\r' were corrupted due to
|
| 163 |
+
# normalization intended for Mac OS 9.
|
| 164 |
+
def test_upload_correct_cr(self):
|
| 165 |
+
# content that ends with \r should not be modified.
|
| 166 |
+
tmp = self.mkdtemp()
|
| 167 |
+
path = os.path.join(tmp, 'xxx')
|
| 168 |
+
self.write_file(path, content='yy\r')
|
| 169 |
+
command, pyversion, filename = 'xxx', '2.6', path
|
| 170 |
+
dist_files = [(command, pyversion, filename)]
|
| 171 |
+
self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
|
| 172 |
+
|
| 173 |
+
# other fields that ended with \r used to be modified, now are
|
| 174 |
+
# preserved.
|
| 175 |
+
pkg_dir, dist = self.create_dist(
|
| 176 |
+
dist_files=dist_files,
|
| 177 |
+
description='long description\r'
|
| 178 |
+
)
|
| 179 |
+
cmd = upload(dist)
|
| 180 |
+
cmd.show_response = 1
|
| 181 |
+
cmd.ensure_finalized()
|
| 182 |
+
cmd.run()
|
| 183 |
+
|
| 184 |
+
headers = dict(self.last_open.req.headers)
|
| 185 |
+
self.assertGreaterEqual(int(headers['Content-length']), 2172)
|
| 186 |
+
self.assertIn(b'long description\r', self.last_open.req.data)
|
| 187 |
+
|
| 188 |
+
def test_upload_fails(self):
|
| 189 |
+
self.next_msg = "Not Found"
|
| 190 |
+
self.next_code = 404
|
| 191 |
+
self.assertRaises(DistutilsError, self.test_upload)
|
| 192 |
+
|
| 193 |
+
def test_wrong_exception_order(self):
|
| 194 |
+
tmp = self.mkdtemp()
|
| 195 |
+
path = os.path.join(tmp, 'xxx')
|
| 196 |
+
self.write_file(path)
|
| 197 |
+
dist_files = [('xxx', '2.6', path)] # command, pyversion, filename
|
| 198 |
+
self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
|
| 199 |
+
|
| 200 |
+
pkg_dir, dist = self.create_dist(dist_files=dist_files)
|
| 201 |
+
tests = [
|
| 202 |
+
(OSError('oserror'), 'oserror', OSError),
|
| 203 |
+
(HTTPError('url', 400, 'httperror', {}, None),
|
| 204 |
+
'Upload failed (400): httperror', DistutilsError),
|
| 205 |
+
]
|
| 206 |
+
for exception, expected, raised_exception in tests:
|
| 207 |
+
with self.subTest(exception=type(exception).__name__):
|
| 208 |
+
with mock.patch('distutils.command.upload.urlopen',
|
| 209 |
+
new=mock.Mock(side_effect=exception)):
|
| 210 |
+
with self.assertRaises(raised_exception):
|
| 211 |
+
cmd = upload(dist)
|
| 212 |
+
cmd.ensure_finalized()
|
| 213 |
+
cmd.run()
|
| 214 |
+
results = self.get_logs(ERROR)
|
| 215 |
+
self.assertIn(expected, results[-1])
|
| 216 |
+
self.clear_logs()
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def test_suite():
|
| 220 |
+
return unittest.makeSuite(uploadTestCase)
|
| 221 |
+
|
| 222 |
+
if __name__ == "__main__":
|
| 223 |
+
run_unittest(test_suite())
|
llava/lib/python3.10/distutils/tests/test_version.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.version."""
|
| 2 |
+
import unittest
|
| 3 |
+
from distutils.version import LooseVersion
|
| 4 |
+
from distutils.version import StrictVersion
|
| 5 |
+
from test.support import run_unittest
|
| 6 |
+
|
| 7 |
+
class VersionTestCase(unittest.TestCase):
|
| 8 |
+
|
| 9 |
+
def test_prerelease(self):
|
| 10 |
+
version = StrictVersion('1.2.3a1')
|
| 11 |
+
self.assertEqual(version.version, (1, 2, 3))
|
| 12 |
+
self.assertEqual(version.prerelease, ('a', 1))
|
| 13 |
+
self.assertEqual(str(version), '1.2.3a1')
|
| 14 |
+
|
| 15 |
+
version = StrictVersion('1.2.0')
|
| 16 |
+
self.assertEqual(str(version), '1.2')
|
| 17 |
+
|
| 18 |
+
def test_cmp_strict(self):
|
| 19 |
+
versions = (('1.5.1', '1.5.2b2', -1),
|
| 20 |
+
('161', '3.10a', ValueError),
|
| 21 |
+
('8.02', '8.02', 0),
|
| 22 |
+
('3.4j', '1996.07.12', ValueError),
|
| 23 |
+
('3.2.pl0', '3.1.1.6', ValueError),
|
| 24 |
+
('2g6', '11g', ValueError),
|
| 25 |
+
('0.9', '2.2', -1),
|
| 26 |
+
('1.2.1', '1.2', 1),
|
| 27 |
+
('1.1', '1.2.2', -1),
|
| 28 |
+
('1.2', '1.1', 1),
|
| 29 |
+
('1.2.1', '1.2.2', -1),
|
| 30 |
+
('1.2.2', '1.2', 1),
|
| 31 |
+
('1.2', '1.2.2', -1),
|
| 32 |
+
('0.4.0', '0.4', 0),
|
| 33 |
+
('1.13++', '5.5.kw', ValueError))
|
| 34 |
+
|
| 35 |
+
for v1, v2, wanted in versions:
|
| 36 |
+
try:
|
| 37 |
+
res = StrictVersion(v1)._cmp(StrictVersion(v2))
|
| 38 |
+
except ValueError:
|
| 39 |
+
if wanted is ValueError:
|
| 40 |
+
continue
|
| 41 |
+
else:
|
| 42 |
+
raise AssertionError(("cmp(%s, %s) "
|
| 43 |
+
"shouldn't raise ValueError")
|
| 44 |
+
% (v1, v2))
|
| 45 |
+
self.assertEqual(res, wanted,
|
| 46 |
+
'cmp(%s, %s) should be %s, got %s' %
|
| 47 |
+
(v1, v2, wanted, res))
|
| 48 |
+
res = StrictVersion(v1)._cmp(v2)
|
| 49 |
+
self.assertEqual(res, wanted,
|
| 50 |
+
'cmp(%s, %s) should be %s, got %s' %
|
| 51 |
+
(v1, v2, wanted, res))
|
| 52 |
+
res = StrictVersion(v1)._cmp(object())
|
| 53 |
+
self.assertIs(res, NotImplemented,
|
| 54 |
+
'cmp(%s, %s) should be NotImplemented, got %s' %
|
| 55 |
+
(v1, v2, res))
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def test_cmp(self):
|
| 59 |
+
versions = (('1.5.1', '1.5.2b2', -1),
|
| 60 |
+
('161', '3.10a', 1),
|
| 61 |
+
('8.02', '8.02', 0),
|
| 62 |
+
('3.4j', '1996.07.12', -1),
|
| 63 |
+
('3.2.pl0', '3.1.1.6', 1),
|
| 64 |
+
('2g6', '11g', -1),
|
| 65 |
+
('0.960923', '2.2beta29', -1),
|
| 66 |
+
('1.13++', '5.5.kw', -1))
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
for v1, v2, wanted in versions:
|
| 70 |
+
res = LooseVersion(v1)._cmp(LooseVersion(v2))
|
| 71 |
+
self.assertEqual(res, wanted,
|
| 72 |
+
'cmp(%s, %s) should be %s, got %s' %
|
| 73 |
+
(v1, v2, wanted, res))
|
| 74 |
+
res = LooseVersion(v1)._cmp(v2)
|
| 75 |
+
self.assertEqual(res, wanted,
|
| 76 |
+
'cmp(%s, %s) should be %s, got %s' %
|
| 77 |
+
(v1, v2, wanted, res))
|
| 78 |
+
res = LooseVersion(v1)._cmp(object())
|
| 79 |
+
self.assertIs(res, NotImplemented,
|
| 80 |
+
'cmp(%s, %s) should be NotImplemented, got %s' %
|
| 81 |
+
(v1, v2, res))
|
| 82 |
+
|
| 83 |
+
def test_suite():
|
| 84 |
+
return unittest.makeSuite(VersionTestCase)
|
| 85 |
+
|
| 86 |
+
if __name__ == "__main__":
|
| 87 |
+
run_unittest(test_suite())
|