Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llava/lib/python3.10/site-packages/setuptools/_distutils/extension.py +255 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/fancy_getopt.py +471 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/__init__.py +42 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/support.py +134 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_archive_util.py +353 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_bdist_dumb.py +78 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_bdist_rpm.py +127 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build.py +49 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build_ext.py +560 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build_py.py +196 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build_scripts.py +96 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_ccompiler.py +93 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_cmd.py +107 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_core.py +130 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_cygwinccompiler.py +81 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_dist.py +552 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_extension.py +117 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_file_util.py +95 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_filelist.py +336 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install.py +245 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install_data.py +74 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install_headers.py +33 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install_lib.py +110 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_modified.py +126 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_msvccompiler.py +137 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_sdist.py +470 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_sysconfig.py +319 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_text_file.py +127 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_unixccompiler.py +350 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_util.py +243 -0
- llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_version.py +80 -0
- minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Addis_Ababa +0 -0
- minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Dar_es_Salaam +0 -0
- minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Mbabane +0 -0
- minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Nouakchott +0 -0
- minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Porto-Novo +0 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_batch_norm_impl_index_compositeimplicitautograd_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_asin_compositeexplicitautograd_dispatch.h +26 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_indices_copy.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_jagged_to_padded_dense_forward.h +47 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_lstm_mps_compositeexplicitautograd_dispatch.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_pad_enum_native.h +21 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sample_dirichlet_native.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_softmax_backward_data_cuda_dispatch.h +25 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_softmax_backward_data.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_softmax_compositeexplicitautograd_dispatch.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_nearest_exact2d_backward.h +91 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_weight_norm_interface_backward_compositeexplicitautograd_dispatch.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/affine_grid_generator_ops.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/bincount.h +39 -0
llava/lib/python3.10/site-packages/setuptools/_distutils/extension.py
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.extension
|
| 2 |
+
|
| 3 |
+
Provides the Extension class, used to describe C/C++ extension
|
| 4 |
+
modules in setup scripts."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
# This class is really only used by the "build_ext" command, so it might
|
| 10 |
+
# make sense to put it in distutils.command.build_ext. However, that
|
| 11 |
+
# module is already big enough, and I want to make this class a bit more
|
| 12 |
+
# complex to simplify some common cases ("foo" module in "foo.c") and do
|
| 13 |
+
# better error-checking ("foo.c" actually exists).
|
| 14 |
+
#
|
| 15 |
+
# Also, putting this in build_ext.py means every setup script would have to
|
| 16 |
+
# import that large-ish module (indirectly, through distutils.core) in
|
| 17 |
+
# order to do anything.
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Extension:
|
| 21 |
+
"""Just a collection of attributes that describes an extension
|
| 22 |
+
module and everything needed to build it (hopefully in a portable
|
| 23 |
+
way, but there are hooks that let you be as unportable as you need).
|
| 24 |
+
|
| 25 |
+
Instance attributes:
|
| 26 |
+
name : string
|
| 27 |
+
the full name of the extension, including any packages -- ie.
|
| 28 |
+
*not* a filename or pathname, but Python dotted name
|
| 29 |
+
sources : Iterable[string | os.PathLike]
|
| 30 |
+
iterable of source filenames (except strings, which could be misinterpreted
|
| 31 |
+
as a single filename), relative to the distribution root (where the setup
|
| 32 |
+
script lives), in Unix form (slash-separated) for portability. Can be any
|
| 33 |
+
non-string iterable (list, tuple, set, etc.) containing strings or
|
| 34 |
+
PathLike objects. Source files may be C, C++, SWIG (.i), platform-specific
|
| 35 |
+
resource files, or whatever else is recognized by the "build_ext" command
|
| 36 |
+
as source for a Python extension.
|
| 37 |
+
include_dirs : [string]
|
| 38 |
+
list of directories to search for C/C++ header files (in Unix
|
| 39 |
+
form for portability)
|
| 40 |
+
define_macros : [(name : string, value : string|None)]
|
| 41 |
+
list of macros to define; each macro is defined using a 2-tuple,
|
| 42 |
+
where 'value' is either the string to define it to or None to
|
| 43 |
+
define it without a particular value (equivalent of "#define
|
| 44 |
+
FOO" in source or -DFOO on Unix C compiler command line)
|
| 45 |
+
undef_macros : [string]
|
| 46 |
+
list of macros to undefine explicitly
|
| 47 |
+
library_dirs : [string]
|
| 48 |
+
list of directories to search for C/C++ libraries at link time
|
| 49 |
+
libraries : [string]
|
| 50 |
+
list of library names (not filenames or paths) to link against
|
| 51 |
+
runtime_library_dirs : [string]
|
| 52 |
+
list of directories to search for C/C++ libraries at run time
|
| 53 |
+
(for shared extensions, this is when the extension is loaded)
|
| 54 |
+
extra_objects : [string]
|
| 55 |
+
list of extra files to link with (eg. object files not implied
|
| 56 |
+
by 'sources', static library that must be explicitly specified,
|
| 57 |
+
binary resource files, etc.)
|
| 58 |
+
extra_compile_args : [string]
|
| 59 |
+
any extra platform- and compiler-specific information to use
|
| 60 |
+
when compiling the source files in 'sources'. For platforms and
|
| 61 |
+
compilers where "command line" makes sense, this is typically a
|
| 62 |
+
list of command-line arguments, but for other platforms it could
|
| 63 |
+
be anything.
|
| 64 |
+
extra_link_args : [string]
|
| 65 |
+
any extra platform- and compiler-specific information to use
|
| 66 |
+
when linking object files together to create the extension (or
|
| 67 |
+
to create a new static Python interpreter). Similar
|
| 68 |
+
interpretation as for 'extra_compile_args'.
|
| 69 |
+
export_symbols : [string]
|
| 70 |
+
list of symbols to be exported from a shared extension. Not
|
| 71 |
+
used on all platforms, and not generally necessary for Python
|
| 72 |
+
extensions, which typically export exactly one symbol: "init" +
|
| 73 |
+
extension_name.
|
| 74 |
+
swig_opts : [string]
|
| 75 |
+
any extra options to pass to SWIG if a source file has the .i
|
| 76 |
+
extension.
|
| 77 |
+
depends : [string]
|
| 78 |
+
list of files that the extension depends on
|
| 79 |
+
language : string
|
| 80 |
+
extension language (i.e. "c", "c++", "objc"). Will be detected
|
| 81 |
+
from the source extensions if not provided.
|
| 82 |
+
optional : boolean
|
| 83 |
+
specifies that a build failure in the extension should not abort the
|
| 84 |
+
build process, but simply not install the failing extension.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
# When adding arguments to this constructor, be sure to update
|
| 88 |
+
# setup_keywords in core.py.
|
| 89 |
+
def __init__(
|
| 90 |
+
self,
|
| 91 |
+
name,
|
| 92 |
+
sources,
|
| 93 |
+
include_dirs=None,
|
| 94 |
+
define_macros=None,
|
| 95 |
+
undef_macros=None,
|
| 96 |
+
library_dirs=None,
|
| 97 |
+
libraries=None,
|
| 98 |
+
runtime_library_dirs=None,
|
| 99 |
+
extra_objects=None,
|
| 100 |
+
extra_compile_args=None,
|
| 101 |
+
extra_link_args=None,
|
| 102 |
+
export_symbols=None,
|
| 103 |
+
swig_opts=None,
|
| 104 |
+
depends=None,
|
| 105 |
+
language=None,
|
| 106 |
+
optional=None,
|
| 107 |
+
**kw, # To catch unknown keywords
|
| 108 |
+
):
|
| 109 |
+
if not isinstance(name, str):
|
| 110 |
+
raise TypeError("'name' must be a string")
|
| 111 |
+
|
| 112 |
+
# handle the string case first; since strings are iterable, disallow them
|
| 113 |
+
if isinstance(sources, str):
|
| 114 |
+
raise TypeError(
|
| 115 |
+
"'sources' must be an iterable of strings or PathLike objects, not a string"
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
# now we check if it's iterable and contains valid types
|
| 119 |
+
try:
|
| 120 |
+
self.sources = list(map(os.fspath, sources))
|
| 121 |
+
except TypeError:
|
| 122 |
+
raise TypeError(
|
| 123 |
+
"'sources' must be an iterable of strings or PathLike objects"
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
self.name = name
|
| 127 |
+
self.include_dirs = include_dirs or []
|
| 128 |
+
self.define_macros = define_macros or []
|
| 129 |
+
self.undef_macros = undef_macros or []
|
| 130 |
+
self.library_dirs = library_dirs or []
|
| 131 |
+
self.libraries = libraries or []
|
| 132 |
+
self.runtime_library_dirs = runtime_library_dirs or []
|
| 133 |
+
self.extra_objects = extra_objects or []
|
| 134 |
+
self.extra_compile_args = extra_compile_args or []
|
| 135 |
+
self.extra_link_args = extra_link_args or []
|
| 136 |
+
self.export_symbols = export_symbols or []
|
| 137 |
+
self.swig_opts = swig_opts or []
|
| 138 |
+
self.depends = depends or []
|
| 139 |
+
self.language = language
|
| 140 |
+
self.optional = optional
|
| 141 |
+
|
| 142 |
+
# If there are unknown keyword options, warn about them
|
| 143 |
+
if len(kw) > 0:
|
| 144 |
+
options = [repr(option) for option in kw]
|
| 145 |
+
options = ', '.join(sorted(options))
|
| 146 |
+
msg = f"Unknown Extension options: {options}"
|
| 147 |
+
warnings.warn(msg)
|
| 148 |
+
|
| 149 |
+
def __repr__(self):
|
| 150 |
+
return f'<{self.__class__.__module__}.{self.__class__.__qualname__}({self.name!r}) at {id(self):#x}>'
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def read_setup_file(filename): # noqa: C901
|
| 154 |
+
"""Reads a Setup file and returns Extension instances."""
|
| 155 |
+
from distutils.sysconfig import _variable_rx, expand_makefile_vars, parse_makefile
|
| 156 |
+
from distutils.text_file import TextFile
|
| 157 |
+
from distutils.util import split_quoted
|
| 158 |
+
|
| 159 |
+
# First pass over the file to gather "VAR = VALUE" assignments.
|
| 160 |
+
vars = parse_makefile(filename)
|
| 161 |
+
|
| 162 |
+
# Second pass to gobble up the real content: lines of the form
|
| 163 |
+
# <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
|
| 164 |
+
file = TextFile(
|
| 165 |
+
filename,
|
| 166 |
+
strip_comments=True,
|
| 167 |
+
skip_blanks=True,
|
| 168 |
+
join_lines=True,
|
| 169 |
+
lstrip_ws=True,
|
| 170 |
+
rstrip_ws=True,
|
| 171 |
+
)
|
| 172 |
+
try:
|
| 173 |
+
extensions = []
|
| 174 |
+
|
| 175 |
+
while True:
|
| 176 |
+
line = file.readline()
|
| 177 |
+
if line is None: # eof
|
| 178 |
+
break
|
| 179 |
+
if _variable_rx.match(line): # VAR=VALUE, handled in first pass
|
| 180 |
+
continue
|
| 181 |
+
|
| 182 |
+
if line[0] == line[-1] == "*":
|
| 183 |
+
file.warn(f"'{line}' lines not handled yet")
|
| 184 |
+
continue
|
| 185 |
+
|
| 186 |
+
line = expand_makefile_vars(line, vars)
|
| 187 |
+
words = split_quoted(line)
|
| 188 |
+
|
| 189 |
+
# NB. this parses a slightly different syntax than the old
|
| 190 |
+
# makesetup script: here, there must be exactly one extension per
|
| 191 |
+
# line, and it must be the first word of the line. I have no idea
|
| 192 |
+
# why the old syntax supported multiple extensions per line, as
|
| 193 |
+
# they all wind up being the same.
|
| 194 |
+
|
| 195 |
+
module = words[0]
|
| 196 |
+
ext = Extension(module, [])
|
| 197 |
+
append_next_word = None
|
| 198 |
+
|
| 199 |
+
for word in words[1:]:
|
| 200 |
+
if append_next_word is not None:
|
| 201 |
+
append_next_word.append(word)
|
| 202 |
+
append_next_word = None
|
| 203 |
+
continue
|
| 204 |
+
|
| 205 |
+
suffix = os.path.splitext(word)[1]
|
| 206 |
+
switch = word[0:2]
|
| 207 |
+
value = word[2:]
|
| 208 |
+
|
| 209 |
+
if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
|
| 210 |
+
# hmm, should we do something about C vs. C++ sources?
|
| 211 |
+
# or leave it up to the CCompiler implementation to
|
| 212 |
+
# worry about?
|
| 213 |
+
ext.sources.append(word)
|
| 214 |
+
elif switch == "-I":
|
| 215 |
+
ext.include_dirs.append(value)
|
| 216 |
+
elif switch == "-D":
|
| 217 |
+
equals = value.find("=")
|
| 218 |
+
if equals == -1: # bare "-DFOO" -- no value
|
| 219 |
+
ext.define_macros.append((value, None))
|
| 220 |
+
else: # "-DFOO=blah"
|
| 221 |
+
ext.define_macros.append((value[0:equals], value[equals + 2 :]))
|
| 222 |
+
elif switch == "-U":
|
| 223 |
+
ext.undef_macros.append(value)
|
| 224 |
+
elif switch == "-C": # only here 'cause makesetup has it!
|
| 225 |
+
ext.extra_compile_args.append(word)
|
| 226 |
+
elif switch == "-l":
|
| 227 |
+
ext.libraries.append(value)
|
| 228 |
+
elif switch == "-L":
|
| 229 |
+
ext.library_dirs.append(value)
|
| 230 |
+
elif switch == "-R":
|
| 231 |
+
ext.runtime_library_dirs.append(value)
|
| 232 |
+
elif word == "-rpath":
|
| 233 |
+
append_next_word = ext.runtime_library_dirs
|
| 234 |
+
elif word == "-Xlinker":
|
| 235 |
+
append_next_word = ext.extra_link_args
|
| 236 |
+
elif word == "-Xcompiler":
|
| 237 |
+
append_next_word = ext.extra_compile_args
|
| 238 |
+
elif switch == "-u":
|
| 239 |
+
ext.extra_link_args.append(word)
|
| 240 |
+
if not value:
|
| 241 |
+
append_next_word = ext.extra_link_args
|
| 242 |
+
elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
|
| 243 |
+
# NB. a really faithful emulation of makesetup would
|
| 244 |
+
# append a .o file to extra_objects only if it
|
| 245 |
+
# had a slash in it; otherwise, it would s/.o/.c/
|
| 246 |
+
# and append it to sources. Hmmmm.
|
| 247 |
+
ext.extra_objects.append(word)
|
| 248 |
+
else:
|
| 249 |
+
file.warn(f"unrecognized argument '{word}'")
|
| 250 |
+
|
| 251 |
+
extensions.append(ext)
|
| 252 |
+
finally:
|
| 253 |
+
file.close()
|
| 254 |
+
|
| 255 |
+
return extensions
|
llava/lib/python3.10/site-packages/setuptools/_distutils/fancy_getopt.py
ADDED
|
@@ -0,0 +1,471 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.fancy_getopt
|
| 2 |
+
|
| 3 |
+
Wrapper around the standard getopt module that provides the following
|
| 4 |
+
additional features:
|
| 5 |
+
* short and long options are tied together
|
| 6 |
+
* options have help strings, so fancy_getopt could potentially
|
| 7 |
+
create a complete usage summary
|
| 8 |
+
* options set attributes of a passed-in object
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import getopt
|
| 14 |
+
import re
|
| 15 |
+
import string
|
| 16 |
+
import sys
|
| 17 |
+
from collections.abc import Sequence
|
| 18 |
+
from typing import Any
|
| 19 |
+
|
| 20 |
+
from .errors import DistutilsArgError, DistutilsGetoptError
|
| 21 |
+
|
| 22 |
+
# Much like command_re in distutils.core, this is close to but not quite
|
| 23 |
+
# the same as a Python NAME -- except, in the spirit of most GNU
|
| 24 |
+
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
| 25 |
+
# The similarities to NAME are again not a coincidence...
|
| 26 |
+
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
| 27 |
+
longopt_re = re.compile(rf'^{longopt_pat}$')
|
| 28 |
+
|
| 29 |
+
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
| 30 |
+
neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$")
|
| 31 |
+
|
| 32 |
+
# This is used to translate long options to legitimate Python identifiers
|
| 33 |
+
# (for use as attributes of some object).
|
| 34 |
+
longopt_xlate = str.maketrans('-', '_')
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class FancyGetopt:
|
| 38 |
+
"""Wrapper around the standard 'getopt()' module that provides some
|
| 39 |
+
handy extra functionality:
|
| 40 |
+
* short and long options are tied together
|
| 41 |
+
* options have help strings, and help text can be assembled
|
| 42 |
+
from them
|
| 43 |
+
* options set attributes of a passed-in object
|
| 44 |
+
* boolean options can have "negative aliases" -- eg. if
|
| 45 |
+
--quiet is the "negative alias" of --verbose, then "--quiet"
|
| 46 |
+
on the command line sets 'verbose' to false
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(self, option_table=None):
|
| 50 |
+
# The option table is (currently) a list of tuples. The
|
| 51 |
+
# tuples may have 3 or four values:
|
| 52 |
+
# (long_option, short_option, help_string [, repeatable])
|
| 53 |
+
# if an option takes an argument, its long_option should have '='
|
| 54 |
+
# appended; short_option should just be a single character, no ':'
|
| 55 |
+
# in any case. If a long_option doesn't have a corresponding
|
| 56 |
+
# short_option, short_option should be None. All option tuples
|
| 57 |
+
# must have long options.
|
| 58 |
+
self.option_table = option_table
|
| 59 |
+
|
| 60 |
+
# 'option_index' maps long option names to entries in the option
|
| 61 |
+
# table (ie. those 3-tuples).
|
| 62 |
+
self.option_index = {}
|
| 63 |
+
if self.option_table:
|
| 64 |
+
self._build_index()
|
| 65 |
+
|
| 66 |
+
# 'alias' records (duh) alias options; {'foo': 'bar'} means
|
| 67 |
+
# --foo is an alias for --bar
|
| 68 |
+
self.alias = {}
|
| 69 |
+
|
| 70 |
+
# 'negative_alias' keeps track of options that are the boolean
|
| 71 |
+
# opposite of some other option
|
| 72 |
+
self.negative_alias = {}
|
| 73 |
+
|
| 74 |
+
# These keep track of the information in the option table. We
|
| 75 |
+
# don't actually populate these structures until we're ready to
|
| 76 |
+
# parse the command-line, since the 'option_table' passed in here
|
| 77 |
+
# isn't necessarily the final word.
|
| 78 |
+
self.short_opts = []
|
| 79 |
+
self.long_opts = []
|
| 80 |
+
self.short2long = {}
|
| 81 |
+
self.attr_name = {}
|
| 82 |
+
self.takes_arg = {}
|
| 83 |
+
|
| 84 |
+
# And 'option_order' is filled up in 'getopt()'; it records the
|
| 85 |
+
# original order of options (and their values) on the command-line,
|
| 86 |
+
# but expands short options, converts aliases, etc.
|
| 87 |
+
self.option_order = []
|
| 88 |
+
|
| 89 |
+
def _build_index(self):
|
| 90 |
+
self.option_index.clear()
|
| 91 |
+
for option in self.option_table:
|
| 92 |
+
self.option_index[option[0]] = option
|
| 93 |
+
|
| 94 |
+
def set_option_table(self, option_table):
|
| 95 |
+
self.option_table = option_table
|
| 96 |
+
self._build_index()
|
| 97 |
+
|
| 98 |
+
def add_option(self, long_option, short_option=None, help_string=None):
|
| 99 |
+
if long_option in self.option_index:
|
| 100 |
+
raise DistutilsGetoptError(
|
| 101 |
+
f"option conflict: already an option '{long_option}'"
|
| 102 |
+
)
|
| 103 |
+
else:
|
| 104 |
+
option = (long_option, short_option, help_string)
|
| 105 |
+
self.option_table.append(option)
|
| 106 |
+
self.option_index[long_option] = option
|
| 107 |
+
|
| 108 |
+
def has_option(self, long_option):
|
| 109 |
+
"""Return true if the option table for this parser has an
|
| 110 |
+
option with long name 'long_option'."""
|
| 111 |
+
return long_option in self.option_index
|
| 112 |
+
|
| 113 |
+
def get_attr_name(self, long_option):
|
| 114 |
+
"""Translate long option name 'long_option' to the form it
|
| 115 |
+
has as an attribute of some object: ie., translate hyphens
|
| 116 |
+
to underscores."""
|
| 117 |
+
return long_option.translate(longopt_xlate)
|
| 118 |
+
|
| 119 |
+
def _check_alias_dict(self, aliases, what):
|
| 120 |
+
assert isinstance(aliases, dict)
|
| 121 |
+
for alias, opt in aliases.items():
|
| 122 |
+
if alias not in self.option_index:
|
| 123 |
+
raise DistutilsGetoptError(
|
| 124 |
+
f"invalid {what} '{alias}': option '{alias}' not defined"
|
| 125 |
+
)
|
| 126 |
+
if opt not in self.option_index:
|
| 127 |
+
raise DistutilsGetoptError(
|
| 128 |
+
f"invalid {what} '{alias}': aliased option '{opt}' not defined"
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
def set_aliases(self, alias):
|
| 132 |
+
"""Set the aliases for this option parser."""
|
| 133 |
+
self._check_alias_dict(alias, "alias")
|
| 134 |
+
self.alias = alias
|
| 135 |
+
|
| 136 |
+
def set_negative_aliases(self, negative_alias):
|
| 137 |
+
"""Set the negative aliases for this option parser.
|
| 138 |
+
'negative_alias' should be a dictionary mapping option names to
|
| 139 |
+
option names, both the key and value must already be defined
|
| 140 |
+
in the option table."""
|
| 141 |
+
self._check_alias_dict(negative_alias, "negative alias")
|
| 142 |
+
self.negative_alias = negative_alias
|
| 143 |
+
|
| 144 |
+
def _grok_option_table(self): # noqa: C901
|
| 145 |
+
"""Populate the various data structures that keep tabs on the
|
| 146 |
+
option table. Called by 'getopt()' before it can do anything
|
| 147 |
+
worthwhile.
|
| 148 |
+
"""
|
| 149 |
+
self.long_opts = []
|
| 150 |
+
self.short_opts = []
|
| 151 |
+
self.short2long.clear()
|
| 152 |
+
self.repeat = {}
|
| 153 |
+
|
| 154 |
+
for option in self.option_table:
|
| 155 |
+
if len(option) == 3:
|
| 156 |
+
long, short, help = option
|
| 157 |
+
repeat = 0
|
| 158 |
+
elif len(option) == 4:
|
| 159 |
+
long, short, help, repeat = option
|
| 160 |
+
else:
|
| 161 |
+
# the option table is part of the code, so simply
|
| 162 |
+
# assert that it is correct
|
| 163 |
+
raise ValueError(f"invalid option tuple: {option!r}")
|
| 164 |
+
|
| 165 |
+
# Type- and value-check the option names
|
| 166 |
+
if not isinstance(long, str) or len(long) < 2:
|
| 167 |
+
raise DistutilsGetoptError(
|
| 168 |
+
f"invalid long option '{long}': must be a string of length >= 2"
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
|
| 172 |
+
raise DistutilsGetoptError(
|
| 173 |
+
f"invalid short option '{short}': must a single character or None"
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
self.repeat[long] = repeat
|
| 177 |
+
self.long_opts.append(long)
|
| 178 |
+
|
| 179 |
+
if long[-1] == '=': # option takes an argument?
|
| 180 |
+
if short:
|
| 181 |
+
short = short + ':'
|
| 182 |
+
long = long[0:-1]
|
| 183 |
+
self.takes_arg[long] = True
|
| 184 |
+
else:
|
| 185 |
+
# Is option is a "negative alias" for some other option (eg.
|
| 186 |
+
# "quiet" == "!verbose")?
|
| 187 |
+
alias_to = self.negative_alias.get(long)
|
| 188 |
+
if alias_to is not None:
|
| 189 |
+
if self.takes_arg[alias_to]:
|
| 190 |
+
raise DistutilsGetoptError(
|
| 191 |
+
f"invalid negative alias '{long}': "
|
| 192 |
+
f"aliased option '{alias_to}' takes a value"
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
self.long_opts[-1] = long # XXX redundant?!
|
| 196 |
+
self.takes_arg[long] = False
|
| 197 |
+
|
| 198 |
+
# If this is an alias option, make sure its "takes arg" flag is
|
| 199 |
+
# the same as the option it's aliased to.
|
| 200 |
+
alias_to = self.alias.get(long)
|
| 201 |
+
if alias_to is not None:
|
| 202 |
+
if self.takes_arg[long] != self.takes_arg[alias_to]:
|
| 203 |
+
raise DistutilsGetoptError(
|
| 204 |
+
f"invalid alias '{long}': inconsistent with "
|
| 205 |
+
f"aliased option '{alias_to}' (one of them takes a value, "
|
| 206 |
+
"the other doesn't"
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
# Now enforce some bondage on the long option name, so we can
|
| 210 |
+
# later translate it to an attribute name on some object. Have
|
| 211 |
+
# to do this a bit late to make sure we've removed any trailing
|
| 212 |
+
# '='.
|
| 213 |
+
if not longopt_re.match(long):
|
| 214 |
+
raise DistutilsGetoptError(
|
| 215 |
+
f"invalid long option name '{long}' "
|
| 216 |
+
"(must be letters, numbers, hyphens only"
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
self.attr_name[long] = self.get_attr_name(long)
|
| 220 |
+
if short:
|
| 221 |
+
self.short_opts.append(short)
|
| 222 |
+
self.short2long[short[0]] = long
|
| 223 |
+
|
| 224 |
+
def getopt(self, args: Sequence[str] | None = None, object=None): # noqa: C901
|
| 225 |
+
"""Parse command-line options in args. Store as attributes on object.
|
| 226 |
+
|
| 227 |
+
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
|
| 228 |
+
'object' is None or not supplied, creates a new OptionDummy
|
| 229 |
+
object, stores option values there, and returns a tuple (args,
|
| 230 |
+
object). If 'object' is supplied, it is modified in place and
|
| 231 |
+
'getopt()' just returns 'args'; in both cases, the returned
|
| 232 |
+
'args' is a modified copy of the passed-in 'args' list, which
|
| 233 |
+
is left untouched.
|
| 234 |
+
"""
|
| 235 |
+
if args is None:
|
| 236 |
+
args = sys.argv[1:]
|
| 237 |
+
if object is None:
|
| 238 |
+
object = OptionDummy()
|
| 239 |
+
created_object = True
|
| 240 |
+
else:
|
| 241 |
+
created_object = False
|
| 242 |
+
|
| 243 |
+
self._grok_option_table()
|
| 244 |
+
|
| 245 |
+
short_opts = ' '.join(self.short_opts)
|
| 246 |
+
try:
|
| 247 |
+
opts, args = getopt.getopt(args, short_opts, self.long_opts)
|
| 248 |
+
except getopt.error as msg:
|
| 249 |
+
raise DistutilsArgError(msg)
|
| 250 |
+
|
| 251 |
+
for opt, val in opts:
|
| 252 |
+
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
| 253 |
+
opt = self.short2long[opt[1]]
|
| 254 |
+
else:
|
| 255 |
+
assert len(opt) > 2 and opt[:2] == '--'
|
| 256 |
+
opt = opt[2:]
|
| 257 |
+
|
| 258 |
+
alias = self.alias.get(opt)
|
| 259 |
+
if alias:
|
| 260 |
+
opt = alias
|
| 261 |
+
|
| 262 |
+
if not self.takes_arg[opt]: # boolean option?
|
| 263 |
+
assert val == '', "boolean option can't have value"
|
| 264 |
+
alias = self.negative_alias.get(opt)
|
| 265 |
+
if alias:
|
| 266 |
+
opt = alias
|
| 267 |
+
val = 0
|
| 268 |
+
else:
|
| 269 |
+
val = 1
|
| 270 |
+
|
| 271 |
+
attr = self.attr_name[opt]
|
| 272 |
+
# The only repeating option at the moment is 'verbose'.
|
| 273 |
+
# It has a negative option -q quiet, which should set verbose = False.
|
| 274 |
+
if val and self.repeat.get(attr) is not None:
|
| 275 |
+
val = getattr(object, attr, 0) + 1
|
| 276 |
+
setattr(object, attr, val)
|
| 277 |
+
self.option_order.append((opt, val))
|
| 278 |
+
|
| 279 |
+
# for opts
|
| 280 |
+
if created_object:
|
| 281 |
+
return args, object
|
| 282 |
+
else:
|
| 283 |
+
return args
|
| 284 |
+
|
| 285 |
+
def get_option_order(self):
|
| 286 |
+
"""Returns the list of (option, value) tuples processed by the
|
| 287 |
+
previous run of 'getopt()'. Raises RuntimeError if
|
| 288 |
+
'getopt()' hasn't been called yet.
|
| 289 |
+
"""
|
| 290 |
+
if self.option_order is None:
|
| 291 |
+
raise RuntimeError("'getopt()' hasn't been called yet")
|
| 292 |
+
else:
|
| 293 |
+
return self.option_order
|
| 294 |
+
|
| 295 |
+
def generate_help(self, header=None): # noqa: C901
|
| 296 |
+
"""Generate help text (a list of strings, one per suggested line of
|
| 297 |
+
output) from the option table for this FancyGetopt object.
|
| 298 |
+
"""
|
| 299 |
+
# Blithely assume the option table is good: probably wouldn't call
|
| 300 |
+
# 'generate_help()' unless you've already called 'getopt()'.
|
| 301 |
+
|
| 302 |
+
# First pass: determine maximum length of long option names
|
| 303 |
+
max_opt = 0
|
| 304 |
+
for option in self.option_table:
|
| 305 |
+
long = option[0]
|
| 306 |
+
short = option[1]
|
| 307 |
+
ell = len(long)
|
| 308 |
+
if long[-1] == '=':
|
| 309 |
+
ell = ell - 1
|
| 310 |
+
if short is not None:
|
| 311 |
+
ell = ell + 5 # " (-x)" where short == 'x'
|
| 312 |
+
if ell > max_opt:
|
| 313 |
+
max_opt = ell
|
| 314 |
+
|
| 315 |
+
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
|
| 316 |
+
|
| 317 |
+
# Typical help block looks like this:
|
| 318 |
+
# --foo controls foonabulation
|
| 319 |
+
# Help block for longest option looks like this:
|
| 320 |
+
# --flimflam set the flim-flam level
|
| 321 |
+
# and with wrapped text:
|
| 322 |
+
# --flimflam set the flim-flam level (must be between
|
| 323 |
+
# 0 and 100, except on Tuesdays)
|
| 324 |
+
# Options with short names will have the short name shown (but
|
| 325 |
+
# it doesn't contribute to max_opt):
|
| 326 |
+
# --foo (-f) controls foonabulation
|
| 327 |
+
# If adding the short option would make the left column too wide,
|
| 328 |
+
# we push the explanation off to the next line
|
| 329 |
+
# --flimflam (-l)
|
| 330 |
+
# set the flim-flam level
|
| 331 |
+
# Important parameters:
|
| 332 |
+
# - 2 spaces before option block start lines
|
| 333 |
+
# - 2 dashes for each long option name
|
| 334 |
+
# - min. 2 spaces between option and explanation (gutter)
|
| 335 |
+
# - 5 characters (incl. space) for short option name
|
| 336 |
+
|
| 337 |
+
# Now generate lines of help text. (If 80 columns were good enough
|
| 338 |
+
# for Jesus, then 78 columns are good enough for me!)
|
| 339 |
+
line_width = 78
|
| 340 |
+
text_width = line_width - opt_width
|
| 341 |
+
big_indent = ' ' * opt_width
|
| 342 |
+
if header:
|
| 343 |
+
lines = [header]
|
| 344 |
+
else:
|
| 345 |
+
lines = ['Option summary:']
|
| 346 |
+
|
| 347 |
+
for option in self.option_table:
|
| 348 |
+
long, short, help = option[:3]
|
| 349 |
+
text = wrap_text(help, text_width)
|
| 350 |
+
if long[-1] == '=':
|
| 351 |
+
long = long[0:-1]
|
| 352 |
+
|
| 353 |
+
# Case 1: no short option at all (makes life easy)
|
| 354 |
+
if short is None:
|
| 355 |
+
if text:
|
| 356 |
+
lines.append(f" --{long:<{max_opt}} {text[0]}")
|
| 357 |
+
else:
|
| 358 |
+
lines.append(f" --{long:<{max_opt}}")
|
| 359 |
+
|
| 360 |
+
# Case 2: we have a short option, so we have to include it
|
| 361 |
+
# just after the long option
|
| 362 |
+
else:
|
| 363 |
+
opt_names = f"{long} (-{short})"
|
| 364 |
+
if text:
|
| 365 |
+
lines.append(f" --{opt_names:<{max_opt}} {text[0]}")
|
| 366 |
+
else:
|
| 367 |
+
lines.append(f" --{opt_names:<{max_opt}}")
|
| 368 |
+
|
| 369 |
+
for ell in text[1:]:
|
| 370 |
+
lines.append(big_indent + ell)
|
| 371 |
+
return lines
|
| 372 |
+
|
| 373 |
+
def print_help(self, header=None, file=None):
|
| 374 |
+
if file is None:
|
| 375 |
+
file = sys.stdout
|
| 376 |
+
for line in self.generate_help(header):
|
| 377 |
+
file.write(line + "\n")
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
def fancy_getopt(options, negative_opt, object, args: Sequence[str] | None):
|
| 381 |
+
parser = FancyGetopt(options)
|
| 382 |
+
parser.set_negative_aliases(negative_opt)
|
| 383 |
+
return parser.getopt(args, object)
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace}
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
def wrap_text(text, width):
|
| 390 |
+
"""wrap_text(text : string, width : int) -> [string]
|
| 391 |
+
|
| 392 |
+
Split 'text' into multiple lines of no more than 'width' characters
|
| 393 |
+
each, and return the list of strings that results.
|
| 394 |
+
"""
|
| 395 |
+
if text is None:
|
| 396 |
+
return []
|
| 397 |
+
if len(text) <= width:
|
| 398 |
+
return [text]
|
| 399 |
+
|
| 400 |
+
text = text.expandtabs()
|
| 401 |
+
text = text.translate(WS_TRANS)
|
| 402 |
+
chunks = re.split(r'( +|-+)', text)
|
| 403 |
+
chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
|
| 404 |
+
lines = []
|
| 405 |
+
|
| 406 |
+
while chunks:
|
| 407 |
+
cur_line = [] # list of chunks (to-be-joined)
|
| 408 |
+
cur_len = 0 # length of current line
|
| 409 |
+
|
| 410 |
+
while chunks:
|
| 411 |
+
ell = len(chunks[0])
|
| 412 |
+
if cur_len + ell <= width: # can squeeze (at least) this chunk in
|
| 413 |
+
cur_line.append(chunks[0])
|
| 414 |
+
del chunks[0]
|
| 415 |
+
cur_len = cur_len + ell
|
| 416 |
+
else: # this line is full
|
| 417 |
+
# drop last chunk if all space
|
| 418 |
+
if cur_line and cur_line[-1][0] == ' ':
|
| 419 |
+
del cur_line[-1]
|
| 420 |
+
break
|
| 421 |
+
|
| 422 |
+
if chunks: # any chunks left to process?
|
| 423 |
+
# if the current line is still empty, then we had a single
|
| 424 |
+
# chunk that's too big too fit on a line -- so we break
|
| 425 |
+
# down and break it up at the line width
|
| 426 |
+
if cur_len == 0:
|
| 427 |
+
cur_line.append(chunks[0][0:width])
|
| 428 |
+
chunks[0] = chunks[0][width:]
|
| 429 |
+
|
| 430 |
+
# all-whitespace chunks at the end of a line can be discarded
|
| 431 |
+
# (and we know from the re.split above that if a chunk has
|
| 432 |
+
# *any* whitespace, it is *all* whitespace)
|
| 433 |
+
if chunks[0][0] == ' ':
|
| 434 |
+
del chunks[0]
|
| 435 |
+
|
| 436 |
+
# and store this line in the list-of-all-lines -- as a single
|
| 437 |
+
# string, of course!
|
| 438 |
+
lines.append(''.join(cur_line))
|
| 439 |
+
|
| 440 |
+
return lines
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
def translate_longopt(opt):
|
| 444 |
+
"""Convert a long option name to a valid Python identifier by
|
| 445 |
+
changing "-" to "_".
|
| 446 |
+
"""
|
| 447 |
+
return opt.translate(longopt_xlate)
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
class OptionDummy:
|
| 451 |
+
"""Dummy class just used as a place to hold command-line option
|
| 452 |
+
values as instance attributes."""
|
| 453 |
+
|
| 454 |
+
def __init__(self, options: Sequence[Any] = []):
|
| 455 |
+
"""Create a new OptionDummy instance. The attributes listed in
|
| 456 |
+
'options' will be initialized to None."""
|
| 457 |
+
for opt in options:
|
| 458 |
+
setattr(self, opt, None)
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
if __name__ == "__main__":
|
| 462 |
+
text = """\
|
| 463 |
+
Tra-la-la, supercalifragilisticexpialidocious.
|
| 464 |
+
How *do* you spell that odd word, anyways?
|
| 465 |
+
(Someone ask Mary -- she'll know [or she'll
|
| 466 |
+
say, "How should I know?"].)"""
|
| 467 |
+
|
| 468 |
+
for w in (10, 20, 30, 40):
|
| 469 |
+
print(f"width: {w}")
|
| 470 |
+
print("\n".join(wrap_text(text, w)))
|
| 471 |
+
print()
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/__init__.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Test suite for distutils.
|
| 3 |
+
|
| 4 |
+
Tests for the command classes in the distutils.command package are
|
| 5 |
+
included in distutils.tests as well, instead of using a separate
|
| 6 |
+
distutils.command.tests package, since command identification is done
|
| 7 |
+
by import rather than matching pre-defined names.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import shutil
|
| 11 |
+
from collections.abc import Sequence
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def missing_compiler_executable(cmd_names: Sequence[str] = []): # pragma: no cover
|
| 15 |
+
"""Check if the compiler components used to build the interpreter exist.
|
| 16 |
+
|
| 17 |
+
Check for the existence of the compiler executables whose names are listed
|
| 18 |
+
in 'cmd_names' or all the compiler executables when 'cmd_names' is empty
|
| 19 |
+
and return the first missing executable or None when none is found
|
| 20 |
+
missing.
|
| 21 |
+
|
| 22 |
+
"""
|
| 23 |
+
from distutils import ccompiler, errors, sysconfig
|
| 24 |
+
|
| 25 |
+
compiler = ccompiler.new_compiler()
|
| 26 |
+
sysconfig.customize_compiler(compiler)
|
| 27 |
+
if compiler.compiler_type == "msvc":
|
| 28 |
+
# MSVC has no executables, so check whether initialization succeeds
|
| 29 |
+
try:
|
| 30 |
+
compiler.initialize()
|
| 31 |
+
except errors.DistutilsPlatformError:
|
| 32 |
+
return "msvc"
|
| 33 |
+
for name in compiler.executables:
|
| 34 |
+
if cmd_names and name not in cmd_names:
|
| 35 |
+
continue
|
| 36 |
+
cmd = getattr(compiler, name)
|
| 37 |
+
if cmd_names:
|
| 38 |
+
assert cmd is not None, f"the '{name}' executable is not configured"
|
| 39 |
+
elif not cmd:
|
| 40 |
+
continue
|
| 41 |
+
if shutil.which(cmd[0]) is None:
|
| 42 |
+
return cmd[0]
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/support.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Support code for distutils test cases."""
|
| 2 |
+
|
| 3 |
+
import itertools
|
| 4 |
+
import os
|
| 5 |
+
import pathlib
|
| 6 |
+
import shutil
|
| 7 |
+
import sys
|
| 8 |
+
import sysconfig
|
| 9 |
+
import tempfile
|
| 10 |
+
from distutils.core import Distribution
|
| 11 |
+
|
| 12 |
+
import pytest
|
| 13 |
+
from more_itertools import always_iterable
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@pytest.mark.usefixtures('distutils_managed_tempdir')
|
| 17 |
+
class TempdirManager:
|
| 18 |
+
"""
|
| 19 |
+
Mix-in class that handles temporary directories for test cases.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
def mkdtemp(self):
|
| 23 |
+
"""Create a temporary directory that will be cleaned up.
|
| 24 |
+
|
| 25 |
+
Returns the path of the directory.
|
| 26 |
+
"""
|
| 27 |
+
d = tempfile.mkdtemp()
|
| 28 |
+
self.tempdirs.append(d)
|
| 29 |
+
return d
|
| 30 |
+
|
| 31 |
+
def write_file(self, path, content='xxx'):
|
| 32 |
+
"""Writes a file in the given path.
|
| 33 |
+
|
| 34 |
+
path can be a string or a sequence.
|
| 35 |
+
"""
|
| 36 |
+
pathlib.Path(*always_iterable(path)).write_text(content, encoding='utf-8')
|
| 37 |
+
|
| 38 |
+
def create_dist(self, pkg_name='foo', **kw):
|
| 39 |
+
"""Will generate a test environment.
|
| 40 |
+
|
| 41 |
+
This function creates:
|
| 42 |
+
- a Distribution instance using keywords
|
| 43 |
+
- a temporary directory with a package structure
|
| 44 |
+
|
| 45 |
+
It returns the package directory and the distribution
|
| 46 |
+
instance.
|
| 47 |
+
"""
|
| 48 |
+
tmp_dir = self.mkdtemp()
|
| 49 |
+
pkg_dir = os.path.join(tmp_dir, pkg_name)
|
| 50 |
+
os.mkdir(pkg_dir)
|
| 51 |
+
dist = Distribution(attrs=kw)
|
| 52 |
+
|
| 53 |
+
return pkg_dir, dist
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class DummyCommand:
|
| 57 |
+
"""Class to store options for retrieval via set_undefined_options()."""
|
| 58 |
+
|
| 59 |
+
def __init__(self, **kwargs):
|
| 60 |
+
vars(self).update(kwargs)
|
| 61 |
+
|
| 62 |
+
def ensure_finalized(self):
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def copy_xxmodule_c(directory):
|
| 67 |
+
"""Helper for tests that need the xxmodule.c source file.
|
| 68 |
+
|
| 69 |
+
Example use:
|
| 70 |
+
|
| 71 |
+
def test_compile(self):
|
| 72 |
+
copy_xxmodule_c(self.tmpdir)
|
| 73 |
+
self.assertIn('xxmodule.c', os.listdir(self.tmpdir))
|
| 74 |
+
|
| 75 |
+
If the source file can be found, it will be copied to *directory*. If not,
|
| 76 |
+
the test will be skipped. Errors during copy are not caught.
|
| 77 |
+
"""
|
| 78 |
+
shutil.copy(_get_xxmodule_path(), os.path.join(directory, 'xxmodule.c'))
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _get_xxmodule_path():
|
| 82 |
+
source_name = 'xxmodule.c' if sys.version_info > (3, 9) else 'xxmodule-3.8.c'
|
| 83 |
+
return os.path.join(os.path.dirname(__file__), source_name)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def fixup_build_ext(cmd):
|
| 87 |
+
"""Function needed to make build_ext tests pass.
|
| 88 |
+
|
| 89 |
+
When Python was built with --enable-shared on Unix, -L. is not enough to
|
| 90 |
+
find libpython<blah>.so, because regrtest runs in a tempdir, not in the
|
| 91 |
+
source directory where the .so lives.
|
| 92 |
+
|
| 93 |
+
When Python was built with in debug mode on Windows, build_ext commands
|
| 94 |
+
need their debug attribute set, and it is not done automatically for
|
| 95 |
+
some reason.
|
| 96 |
+
|
| 97 |
+
This function handles both of these things. Example use:
|
| 98 |
+
|
| 99 |
+
cmd = build_ext(dist)
|
| 100 |
+
support.fixup_build_ext(cmd)
|
| 101 |
+
cmd.ensure_finalized()
|
| 102 |
+
|
| 103 |
+
Unlike most other Unix platforms, Mac OS X embeds absolute paths
|
| 104 |
+
to shared libraries into executables, so the fixup is not needed there.
|
| 105 |
+
"""
|
| 106 |
+
if os.name == 'nt':
|
| 107 |
+
cmd.debug = sys.executable.endswith('_d.exe')
|
| 108 |
+
elif sysconfig.get_config_var('Py_ENABLE_SHARED'):
|
| 109 |
+
# To further add to the shared builds fun on Unix, we can't just add
|
| 110 |
+
# library_dirs to the Extension() instance because that doesn't get
|
| 111 |
+
# plumbed through to the final compiler command.
|
| 112 |
+
runshared = sysconfig.get_config_var('RUNSHARED')
|
| 113 |
+
if runshared is None:
|
| 114 |
+
cmd.library_dirs = ['.']
|
| 115 |
+
else:
|
| 116 |
+
if sys.platform == 'darwin':
|
| 117 |
+
cmd.library_dirs = []
|
| 118 |
+
else:
|
| 119 |
+
name, equals, value = runshared.partition('=')
|
| 120 |
+
cmd.library_dirs = [d for d in value.split(os.pathsep) if d]
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def combine_markers(cls):
|
| 124 |
+
"""
|
| 125 |
+
pytest will honor markers as found on the class, but when
|
| 126 |
+
markers are on multiple subclasses, only one appears. Use
|
| 127 |
+
this decorator to combine those markers.
|
| 128 |
+
"""
|
| 129 |
+
cls.pytestmark = [
|
| 130 |
+
mark
|
| 131 |
+
for base in itertools.chain([cls], cls.__bases__)
|
| 132 |
+
for mark in getattr(base, 'pytestmark', [])
|
| 133 |
+
]
|
| 134 |
+
return cls
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_archive_util.py
ADDED
|
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.archive_util."""
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import operator
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
import sys
|
| 8 |
+
import tarfile
|
| 9 |
+
from distutils import archive_util
|
| 10 |
+
from distutils.archive_util import (
|
| 11 |
+
ARCHIVE_FORMATS,
|
| 12 |
+
check_archive_formats,
|
| 13 |
+
make_archive,
|
| 14 |
+
make_tarball,
|
| 15 |
+
make_zipfile,
|
| 16 |
+
)
|
| 17 |
+
from distutils.spawn import spawn
|
| 18 |
+
from distutils.tests import support
|
| 19 |
+
from os.path import splitdrive
|
| 20 |
+
|
| 21 |
+
import path
|
| 22 |
+
import pytest
|
| 23 |
+
from test.support import patch
|
| 24 |
+
|
| 25 |
+
from .unix_compat import UID_0_SUPPORT, grp, pwd, require_uid_0, require_unix_id
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def can_fs_encode(filename):
|
| 29 |
+
"""
|
| 30 |
+
Return True if the filename can be saved in the file system.
|
| 31 |
+
"""
|
| 32 |
+
if os.path.supports_unicode_filenames:
|
| 33 |
+
return True
|
| 34 |
+
try:
|
| 35 |
+
filename.encode(sys.getfilesystemencoding())
|
| 36 |
+
except UnicodeEncodeError:
|
| 37 |
+
return False
|
| 38 |
+
return True
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def all_equal(values):
|
| 42 |
+
return functools.reduce(operator.eq, values)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def same_drive(*paths):
|
| 46 |
+
return all_equal(pathlib.Path(path).drive for path in paths)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class ArchiveUtilTestCase(support.TempdirManager):
|
| 50 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 51 |
+
def test_make_tarball(self, name='archive'):
|
| 52 |
+
# creating something to tar
|
| 53 |
+
tmpdir = self._create_files()
|
| 54 |
+
self._make_tarball(tmpdir, name, '.tar.gz')
|
| 55 |
+
# trying an uncompressed one
|
| 56 |
+
self._make_tarball(tmpdir, name, '.tar', compress=None)
|
| 57 |
+
|
| 58 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 59 |
+
def test_make_tarball_gzip(self):
|
| 60 |
+
tmpdir = self._create_files()
|
| 61 |
+
self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip')
|
| 62 |
+
|
| 63 |
+
def test_make_tarball_bzip2(self):
|
| 64 |
+
pytest.importorskip('bz2')
|
| 65 |
+
tmpdir = self._create_files()
|
| 66 |
+
self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2')
|
| 67 |
+
|
| 68 |
+
def test_make_tarball_xz(self):
|
| 69 |
+
pytest.importorskip('lzma')
|
| 70 |
+
tmpdir = self._create_files()
|
| 71 |
+
self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz')
|
| 72 |
+
|
| 73 |
+
@pytest.mark.skipif("not can_fs_encode('årchiv')")
|
| 74 |
+
def test_make_tarball_latin1(self):
|
| 75 |
+
"""
|
| 76 |
+
Mirror test_make_tarball, except filename contains latin characters.
|
| 77 |
+
"""
|
| 78 |
+
self.test_make_tarball('årchiv') # note this isn't a real word
|
| 79 |
+
|
| 80 |
+
@pytest.mark.skipif("not can_fs_encode('のアーカイブ')")
|
| 81 |
+
def test_make_tarball_extended(self):
|
| 82 |
+
"""
|
| 83 |
+
Mirror test_make_tarball, except filename contains extended
|
| 84 |
+
characters outside the latin charset.
|
| 85 |
+
"""
|
| 86 |
+
self.test_make_tarball('のアーカイブ') # japanese for archive
|
| 87 |
+
|
| 88 |
+
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs):
|
| 89 |
+
tmpdir2 = self.mkdtemp()
|
| 90 |
+
if same_drive(tmpdir, tmpdir2):
|
| 91 |
+
pytest.skip("source and target should be on same drive")
|
| 92 |
+
|
| 93 |
+
base_name = os.path.join(tmpdir2, target_name)
|
| 94 |
+
|
| 95 |
+
# working with relative paths to avoid tar warnings
|
| 96 |
+
with path.Path(tmpdir):
|
| 97 |
+
make_tarball(splitdrive(base_name)[1], 'dist', **kwargs)
|
| 98 |
+
|
| 99 |
+
# check if the compressed tarball was created
|
| 100 |
+
tarball = base_name + suffix
|
| 101 |
+
assert os.path.exists(tarball)
|
| 102 |
+
assert self._tarinfo(tarball) == self._created_files
|
| 103 |
+
|
| 104 |
+
def _tarinfo(self, path):
|
| 105 |
+
tar = tarfile.open(path)
|
| 106 |
+
try:
|
| 107 |
+
names = tar.getnames()
|
| 108 |
+
names.sort()
|
| 109 |
+
return names
|
| 110 |
+
finally:
|
| 111 |
+
tar.close()
|
| 112 |
+
|
| 113 |
+
_zip_created_files = [
|
| 114 |
+
'dist/',
|
| 115 |
+
'dist/file1',
|
| 116 |
+
'dist/file2',
|
| 117 |
+
'dist/sub/',
|
| 118 |
+
'dist/sub/file3',
|
| 119 |
+
'dist/sub2/',
|
| 120 |
+
]
|
| 121 |
+
_created_files = [p.rstrip('/') for p in _zip_created_files]
|
| 122 |
+
|
| 123 |
+
def _create_files(self):
|
| 124 |
+
# creating something to tar
|
| 125 |
+
tmpdir = self.mkdtemp()
|
| 126 |
+
dist = os.path.join(tmpdir, 'dist')
|
| 127 |
+
os.mkdir(dist)
|
| 128 |
+
self.write_file([dist, 'file1'], 'xxx')
|
| 129 |
+
self.write_file([dist, 'file2'], 'xxx')
|
| 130 |
+
os.mkdir(os.path.join(dist, 'sub'))
|
| 131 |
+
self.write_file([dist, 'sub', 'file3'], 'xxx')
|
| 132 |
+
os.mkdir(os.path.join(dist, 'sub2'))
|
| 133 |
+
return tmpdir
|
| 134 |
+
|
| 135 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 136 |
+
@pytest.mark.skipif("not (shutil.which('tar') and shutil.which('gzip'))")
|
| 137 |
+
def test_tarfile_vs_tar(self):
|
| 138 |
+
tmpdir = self._create_files()
|
| 139 |
+
tmpdir2 = self.mkdtemp()
|
| 140 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 141 |
+
old_dir = os.getcwd()
|
| 142 |
+
os.chdir(tmpdir)
|
| 143 |
+
try:
|
| 144 |
+
make_tarball(base_name, 'dist')
|
| 145 |
+
finally:
|
| 146 |
+
os.chdir(old_dir)
|
| 147 |
+
|
| 148 |
+
# check if the compressed tarball was created
|
| 149 |
+
tarball = base_name + '.tar.gz'
|
| 150 |
+
assert os.path.exists(tarball)
|
| 151 |
+
|
| 152 |
+
# now create another tarball using `tar`
|
| 153 |
+
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
|
| 154 |
+
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
|
| 155 |
+
gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar']
|
| 156 |
+
old_dir = os.getcwd()
|
| 157 |
+
os.chdir(tmpdir)
|
| 158 |
+
try:
|
| 159 |
+
spawn(tar_cmd)
|
| 160 |
+
spawn(gzip_cmd)
|
| 161 |
+
finally:
|
| 162 |
+
os.chdir(old_dir)
|
| 163 |
+
|
| 164 |
+
assert os.path.exists(tarball2)
|
| 165 |
+
# let's compare both tarballs
|
| 166 |
+
assert self._tarinfo(tarball) == self._created_files
|
| 167 |
+
assert self._tarinfo(tarball2) == self._created_files
|
| 168 |
+
|
| 169 |
+
# trying an uncompressed one
|
| 170 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 171 |
+
old_dir = os.getcwd()
|
| 172 |
+
os.chdir(tmpdir)
|
| 173 |
+
try:
|
| 174 |
+
make_tarball(base_name, 'dist', compress=None)
|
| 175 |
+
finally:
|
| 176 |
+
os.chdir(old_dir)
|
| 177 |
+
tarball = base_name + '.tar'
|
| 178 |
+
assert os.path.exists(tarball)
|
| 179 |
+
|
| 180 |
+
# now for a dry_run
|
| 181 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 182 |
+
old_dir = os.getcwd()
|
| 183 |
+
os.chdir(tmpdir)
|
| 184 |
+
try:
|
| 185 |
+
make_tarball(base_name, 'dist', compress=None, dry_run=True)
|
| 186 |
+
finally:
|
| 187 |
+
os.chdir(old_dir)
|
| 188 |
+
tarball = base_name + '.tar'
|
| 189 |
+
assert os.path.exists(tarball)
|
| 190 |
+
|
| 191 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 192 |
+
def test_make_zipfile(self):
|
| 193 |
+
zipfile = pytest.importorskip('zipfile')
|
| 194 |
+
# creating something to tar
|
| 195 |
+
tmpdir = self._create_files()
|
| 196 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 197 |
+
with path.Path(tmpdir):
|
| 198 |
+
make_zipfile(base_name, 'dist')
|
| 199 |
+
|
| 200 |
+
# check if the compressed tarball was created
|
| 201 |
+
tarball = base_name + '.zip'
|
| 202 |
+
assert os.path.exists(tarball)
|
| 203 |
+
with zipfile.ZipFile(tarball) as zf:
|
| 204 |
+
assert sorted(zf.namelist()) == self._zip_created_files
|
| 205 |
+
|
| 206 |
+
def test_make_zipfile_no_zlib(self):
|
| 207 |
+
zipfile = pytest.importorskip('zipfile')
|
| 208 |
+
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
|
| 209 |
+
|
| 210 |
+
called = []
|
| 211 |
+
zipfile_class = zipfile.ZipFile
|
| 212 |
+
|
| 213 |
+
def fake_zipfile(*a, **kw):
|
| 214 |
+
if kw.get('compression', None) == zipfile.ZIP_STORED:
|
| 215 |
+
called.append((a, kw))
|
| 216 |
+
return zipfile_class(*a, **kw)
|
| 217 |
+
|
| 218 |
+
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
|
| 219 |
+
|
| 220 |
+
# create something to tar and compress
|
| 221 |
+
tmpdir = self._create_files()
|
| 222 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 223 |
+
with path.Path(tmpdir):
|
| 224 |
+
make_zipfile(base_name, 'dist')
|
| 225 |
+
|
| 226 |
+
tarball = base_name + '.zip'
|
| 227 |
+
assert called == [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]
|
| 228 |
+
assert os.path.exists(tarball)
|
| 229 |
+
with zipfile.ZipFile(tarball) as zf:
|
| 230 |
+
assert sorted(zf.namelist()) == self._zip_created_files
|
| 231 |
+
|
| 232 |
+
def test_check_archive_formats(self):
|
| 233 |
+
assert check_archive_formats(['gztar', 'xxx', 'zip']) == 'xxx'
|
| 234 |
+
assert (
|
| 235 |
+
check_archive_formats(['gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip'])
|
| 236 |
+
is None
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
def test_make_archive(self):
|
| 240 |
+
tmpdir = self.mkdtemp()
|
| 241 |
+
base_name = os.path.join(tmpdir, 'archive')
|
| 242 |
+
with pytest.raises(ValueError):
|
| 243 |
+
make_archive(base_name, 'xxx')
|
| 244 |
+
|
| 245 |
+
def test_make_archive_cwd(self):
|
| 246 |
+
current_dir = os.getcwd()
|
| 247 |
+
|
| 248 |
+
def _breaks(*args, **kw):
|
| 249 |
+
raise RuntimeError()
|
| 250 |
+
|
| 251 |
+
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
|
| 252 |
+
try:
|
| 253 |
+
try:
|
| 254 |
+
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
|
| 255 |
+
except Exception:
|
| 256 |
+
pass
|
| 257 |
+
assert os.getcwd() == current_dir
|
| 258 |
+
finally:
|
| 259 |
+
ARCHIVE_FORMATS.pop('xxx')
|
| 260 |
+
|
| 261 |
+
def test_make_archive_tar(self):
|
| 262 |
+
base_dir = self._create_files()
|
| 263 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 264 |
+
res = make_archive(base_name, 'tar', base_dir, 'dist')
|
| 265 |
+
assert os.path.exists(res)
|
| 266 |
+
assert os.path.basename(res) == 'archive.tar'
|
| 267 |
+
assert self._tarinfo(res) == self._created_files
|
| 268 |
+
|
| 269 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 270 |
+
def test_make_archive_gztar(self):
|
| 271 |
+
base_dir = self._create_files()
|
| 272 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 273 |
+
res = make_archive(base_name, 'gztar', base_dir, 'dist')
|
| 274 |
+
assert os.path.exists(res)
|
| 275 |
+
assert os.path.basename(res) == 'archive.tar.gz'
|
| 276 |
+
assert self._tarinfo(res) == self._created_files
|
| 277 |
+
|
| 278 |
+
def test_make_archive_bztar(self):
|
| 279 |
+
pytest.importorskip('bz2')
|
| 280 |
+
base_dir = self._create_files()
|
| 281 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 282 |
+
res = make_archive(base_name, 'bztar', base_dir, 'dist')
|
| 283 |
+
assert os.path.exists(res)
|
| 284 |
+
assert os.path.basename(res) == 'archive.tar.bz2'
|
| 285 |
+
assert self._tarinfo(res) == self._created_files
|
| 286 |
+
|
| 287 |
+
def test_make_archive_xztar(self):
|
| 288 |
+
pytest.importorskip('lzma')
|
| 289 |
+
base_dir = self._create_files()
|
| 290 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 291 |
+
res = make_archive(base_name, 'xztar', base_dir, 'dist')
|
| 292 |
+
assert os.path.exists(res)
|
| 293 |
+
assert os.path.basename(res) == 'archive.tar.xz'
|
| 294 |
+
assert self._tarinfo(res) == self._created_files
|
| 295 |
+
|
| 296 |
+
def test_make_archive_owner_group(self):
|
| 297 |
+
# testing make_archive with owner and group, with various combinations
|
| 298 |
+
# this works even if there's not gid/uid support
|
| 299 |
+
if UID_0_SUPPORT:
|
| 300 |
+
group = grp.getgrgid(0)[0]
|
| 301 |
+
owner = pwd.getpwuid(0)[0]
|
| 302 |
+
else:
|
| 303 |
+
group = owner = 'root'
|
| 304 |
+
|
| 305 |
+
base_dir = self._create_files()
|
| 306 |
+
root_dir = self.mkdtemp()
|
| 307 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 308 |
+
res = make_archive(
|
| 309 |
+
base_name, 'zip', root_dir, base_dir, owner=owner, group=group
|
| 310 |
+
)
|
| 311 |
+
assert os.path.exists(res)
|
| 312 |
+
|
| 313 |
+
res = make_archive(base_name, 'zip', root_dir, base_dir)
|
| 314 |
+
assert os.path.exists(res)
|
| 315 |
+
|
| 316 |
+
res = make_archive(
|
| 317 |
+
base_name, 'tar', root_dir, base_dir, owner=owner, group=group
|
| 318 |
+
)
|
| 319 |
+
assert os.path.exists(res)
|
| 320 |
+
|
| 321 |
+
res = make_archive(
|
| 322 |
+
base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh'
|
| 323 |
+
)
|
| 324 |
+
assert os.path.exists(res)
|
| 325 |
+
|
| 326 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 327 |
+
@require_unix_id
|
| 328 |
+
@require_uid_0
|
| 329 |
+
def test_tarfile_root_owner(self):
|
| 330 |
+
tmpdir = self._create_files()
|
| 331 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 332 |
+
old_dir = os.getcwd()
|
| 333 |
+
os.chdir(tmpdir)
|
| 334 |
+
group = grp.getgrgid(0)[0]
|
| 335 |
+
owner = pwd.getpwuid(0)[0]
|
| 336 |
+
try:
|
| 337 |
+
archive_name = make_tarball(
|
| 338 |
+
base_name, 'dist', compress=None, owner=owner, group=group
|
| 339 |
+
)
|
| 340 |
+
finally:
|
| 341 |
+
os.chdir(old_dir)
|
| 342 |
+
|
| 343 |
+
# check if the compressed tarball was created
|
| 344 |
+
assert os.path.exists(archive_name)
|
| 345 |
+
|
| 346 |
+
# now checks the rights
|
| 347 |
+
archive = tarfile.open(archive_name)
|
| 348 |
+
try:
|
| 349 |
+
for member in archive.getmembers():
|
| 350 |
+
assert member.uid == 0
|
| 351 |
+
assert member.gid == 0
|
| 352 |
+
finally:
|
| 353 |
+
archive.close()
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_bdist_dumb.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.bdist_dumb."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import zipfile
|
| 6 |
+
from distutils.command.bdist_dumb import bdist_dumb
|
| 7 |
+
from distutils.core import Distribution
|
| 8 |
+
from distutils.tests import support
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
SETUP_PY = """\
|
| 13 |
+
from distutils.core import setup
|
| 14 |
+
import foo
|
| 15 |
+
|
| 16 |
+
setup(name='foo', version='0.1', py_modules=['foo'],
|
| 17 |
+
url='xxx', author='xxx', author_email='xxx')
|
| 18 |
+
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@support.combine_markers
|
| 23 |
+
@pytest.mark.usefixtures('save_env')
|
| 24 |
+
@pytest.mark.usefixtures('save_argv')
|
| 25 |
+
@pytest.mark.usefixtures('save_cwd')
|
| 26 |
+
class TestBuildDumb(
|
| 27 |
+
support.TempdirManager,
|
| 28 |
+
):
|
| 29 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 30 |
+
def test_simple_built(self):
|
| 31 |
+
# let's create a simple package
|
| 32 |
+
tmp_dir = self.mkdtemp()
|
| 33 |
+
pkg_dir = os.path.join(tmp_dir, 'foo')
|
| 34 |
+
os.mkdir(pkg_dir)
|
| 35 |
+
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
|
| 36 |
+
self.write_file((pkg_dir, 'foo.py'), '#')
|
| 37 |
+
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
|
| 38 |
+
self.write_file((pkg_dir, 'README'), '')
|
| 39 |
+
|
| 40 |
+
dist = Distribution({
|
| 41 |
+
'name': 'foo',
|
| 42 |
+
'version': '0.1',
|
| 43 |
+
'py_modules': ['foo'],
|
| 44 |
+
'url': 'xxx',
|
| 45 |
+
'author': 'xxx',
|
| 46 |
+
'author_email': 'xxx',
|
| 47 |
+
})
|
| 48 |
+
dist.script_name = 'setup.py'
|
| 49 |
+
os.chdir(pkg_dir)
|
| 50 |
+
|
| 51 |
+
sys.argv = ['setup.py']
|
| 52 |
+
cmd = bdist_dumb(dist)
|
| 53 |
+
|
| 54 |
+
# so the output is the same no matter
|
| 55 |
+
# what is the platform
|
| 56 |
+
cmd.format = 'zip'
|
| 57 |
+
|
| 58 |
+
cmd.ensure_finalized()
|
| 59 |
+
cmd.run()
|
| 60 |
+
|
| 61 |
+
# see what we have
|
| 62 |
+
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
|
| 63 |
+
base = f"{dist.get_fullname()}.{cmd.plat_name}.zip"
|
| 64 |
+
|
| 65 |
+
assert dist_created == [base]
|
| 66 |
+
|
| 67 |
+
# now let's check what we have in the zip file
|
| 68 |
+
fp = zipfile.ZipFile(os.path.join('dist', base))
|
| 69 |
+
try:
|
| 70 |
+
contents = fp.namelist()
|
| 71 |
+
finally:
|
| 72 |
+
fp.close()
|
| 73 |
+
|
| 74 |
+
contents = sorted(filter(None, map(os.path.basename, contents)))
|
| 75 |
+
wanted = ['foo-0.1-py{}.{}.egg-info'.format(*sys.version_info[:2]), 'foo.py']
|
| 76 |
+
if not sys.dont_write_bytecode:
|
| 77 |
+
wanted.append(f'foo.{sys.implementation.cache_tag}.pyc')
|
| 78 |
+
assert contents == sorted(wanted)
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_bdist_rpm.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.bdist_rpm."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import shutil # noqa: F401
|
| 5 |
+
import sys
|
| 6 |
+
from distutils.command.bdist_rpm import bdist_rpm
|
| 7 |
+
from distutils.core import Distribution
|
| 8 |
+
from distutils.tests import support
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
from test.support import requires_zlib
|
| 12 |
+
|
| 13 |
+
SETUP_PY = """\
|
| 14 |
+
from distutils.core import setup
|
| 15 |
+
import foo
|
| 16 |
+
|
| 17 |
+
setup(name='foo', version='0.1', py_modules=['foo'],
|
| 18 |
+
url='xxx', author='xxx', author_email='xxx')
|
| 19 |
+
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@pytest.fixture(autouse=True)
|
| 24 |
+
def sys_executable_encodable():
|
| 25 |
+
try:
|
| 26 |
+
sys.executable.encode('UTF-8')
|
| 27 |
+
except UnicodeEncodeError:
|
| 28 |
+
pytest.skip("sys.executable is not encodable to UTF-8")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
mac_woes = pytest.mark.skipif(
|
| 32 |
+
"not sys.platform.startswith('linux')",
|
| 33 |
+
reason='spurious sdtout/stderr output under macOS',
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@pytest.mark.usefixtures('save_env')
|
| 38 |
+
@pytest.mark.usefixtures('save_argv')
|
| 39 |
+
@pytest.mark.usefixtures('save_cwd')
|
| 40 |
+
class TestBuildRpm(
|
| 41 |
+
support.TempdirManager,
|
| 42 |
+
):
|
| 43 |
+
@mac_woes
|
| 44 |
+
@requires_zlib()
|
| 45 |
+
@pytest.mark.skipif("not shutil.which('rpm')")
|
| 46 |
+
@pytest.mark.skipif("not shutil.which('rpmbuild')")
|
| 47 |
+
def test_quiet(self):
|
| 48 |
+
# let's create a package
|
| 49 |
+
tmp_dir = self.mkdtemp()
|
| 50 |
+
os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation
|
| 51 |
+
pkg_dir = os.path.join(tmp_dir, 'foo')
|
| 52 |
+
os.mkdir(pkg_dir)
|
| 53 |
+
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
|
| 54 |
+
self.write_file((pkg_dir, 'foo.py'), '#')
|
| 55 |
+
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
|
| 56 |
+
self.write_file((pkg_dir, 'README'), '')
|
| 57 |
+
|
| 58 |
+
dist = Distribution({
|
| 59 |
+
'name': 'foo',
|
| 60 |
+
'version': '0.1',
|
| 61 |
+
'py_modules': ['foo'],
|
| 62 |
+
'url': 'xxx',
|
| 63 |
+
'author': 'xxx',
|
| 64 |
+
'author_email': 'xxx',
|
| 65 |
+
})
|
| 66 |
+
dist.script_name = 'setup.py'
|
| 67 |
+
os.chdir(pkg_dir)
|
| 68 |
+
|
| 69 |
+
sys.argv = ['setup.py']
|
| 70 |
+
cmd = bdist_rpm(dist)
|
| 71 |
+
cmd.fix_python = True
|
| 72 |
+
|
| 73 |
+
# running in quiet mode
|
| 74 |
+
cmd.quiet = True
|
| 75 |
+
cmd.ensure_finalized()
|
| 76 |
+
cmd.run()
|
| 77 |
+
|
| 78 |
+
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
|
| 79 |
+
assert 'foo-0.1-1.noarch.rpm' in dist_created
|
| 80 |
+
|
| 81 |
+
# bug #2945: upload ignores bdist_rpm files
|
| 82 |
+
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm') in dist.dist_files
|
| 83 |
+
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm') in dist.dist_files
|
| 84 |
+
|
| 85 |
+
@mac_woes
|
| 86 |
+
@requires_zlib()
|
| 87 |
+
# https://bugs.python.org/issue1533164
|
| 88 |
+
@pytest.mark.skipif("not shutil.which('rpm')")
|
| 89 |
+
@pytest.mark.skipif("not shutil.which('rpmbuild')")
|
| 90 |
+
def test_no_optimize_flag(self):
|
| 91 |
+
# let's create a package that breaks bdist_rpm
|
| 92 |
+
tmp_dir = self.mkdtemp()
|
| 93 |
+
os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation
|
| 94 |
+
pkg_dir = os.path.join(tmp_dir, 'foo')
|
| 95 |
+
os.mkdir(pkg_dir)
|
| 96 |
+
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
|
| 97 |
+
self.write_file((pkg_dir, 'foo.py'), '#')
|
| 98 |
+
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
|
| 99 |
+
self.write_file((pkg_dir, 'README'), '')
|
| 100 |
+
|
| 101 |
+
dist = Distribution({
|
| 102 |
+
'name': 'foo',
|
| 103 |
+
'version': '0.1',
|
| 104 |
+
'py_modules': ['foo'],
|
| 105 |
+
'url': 'xxx',
|
| 106 |
+
'author': 'xxx',
|
| 107 |
+
'author_email': 'xxx',
|
| 108 |
+
})
|
| 109 |
+
dist.script_name = 'setup.py'
|
| 110 |
+
os.chdir(pkg_dir)
|
| 111 |
+
|
| 112 |
+
sys.argv = ['setup.py']
|
| 113 |
+
cmd = bdist_rpm(dist)
|
| 114 |
+
cmd.fix_python = True
|
| 115 |
+
|
| 116 |
+
cmd.quiet = True
|
| 117 |
+
cmd.ensure_finalized()
|
| 118 |
+
cmd.run()
|
| 119 |
+
|
| 120 |
+
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
|
| 121 |
+
assert 'foo-0.1-1.noarch.rpm' in dist_created
|
| 122 |
+
|
| 123 |
+
# bug #2945: upload ignores bdist_rpm files
|
| 124 |
+
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm') in dist.dist_files
|
| 125 |
+
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm') in dist.dist_files
|
| 126 |
+
|
| 127 |
+
os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm'))
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.build."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from distutils.command.build import build
|
| 6 |
+
from distutils.tests import support
|
| 7 |
+
from sysconfig import get_config_var, get_platform
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestBuild(support.TempdirManager):
|
| 11 |
+
def test_finalize_options(self):
|
| 12 |
+
pkg_dir, dist = self.create_dist()
|
| 13 |
+
cmd = build(dist)
|
| 14 |
+
cmd.finalize_options()
|
| 15 |
+
|
| 16 |
+
# if not specified, plat_name gets the current platform
|
| 17 |
+
assert cmd.plat_name == get_platform()
|
| 18 |
+
|
| 19 |
+
# build_purelib is build + lib
|
| 20 |
+
wanted = os.path.join(cmd.build_base, 'lib')
|
| 21 |
+
assert cmd.build_purelib == wanted
|
| 22 |
+
|
| 23 |
+
# build_platlib is 'build/lib.platform-cache_tag[-pydebug]'
|
| 24 |
+
# examples:
|
| 25 |
+
# build/lib.macosx-10.3-i386-cpython39
|
| 26 |
+
plat_spec = f'.{cmd.plat_name}-{sys.implementation.cache_tag}'
|
| 27 |
+
if get_config_var('Py_GIL_DISABLED'):
|
| 28 |
+
plat_spec += 't'
|
| 29 |
+
if hasattr(sys, 'gettotalrefcount'):
|
| 30 |
+
assert cmd.build_platlib.endswith('-pydebug')
|
| 31 |
+
plat_spec += '-pydebug'
|
| 32 |
+
wanted = os.path.join(cmd.build_base, 'lib' + plat_spec)
|
| 33 |
+
assert cmd.build_platlib == wanted
|
| 34 |
+
|
| 35 |
+
# by default, build_lib = build_purelib
|
| 36 |
+
assert cmd.build_lib == cmd.build_purelib
|
| 37 |
+
|
| 38 |
+
# build_temp is build/temp.<plat>
|
| 39 |
+
wanted = os.path.join(cmd.build_base, 'temp' + plat_spec)
|
| 40 |
+
assert cmd.build_temp == wanted
|
| 41 |
+
|
| 42 |
+
# build_scripts is build/scripts-x.x
|
| 43 |
+
wanted = os.path.join(
|
| 44 |
+
cmd.build_base, f'scripts-{sys.version_info.major}.{sys.version_info.minor}'
|
| 45 |
+
)
|
| 46 |
+
assert cmd.build_scripts == wanted
|
| 47 |
+
|
| 48 |
+
# executable is os.path.normpath(sys.executable)
|
| 49 |
+
assert cmd.executable == os.path.normpath(sys.executable)
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build_ext.py
ADDED
|
@@ -0,0 +1,560 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import importlib
|
| 3 |
+
import os
|
| 4 |
+
import platform
|
| 5 |
+
import re
|
| 6 |
+
import shutil
|
| 7 |
+
import site
|
| 8 |
+
import sys
|
| 9 |
+
import tempfile
|
| 10 |
+
import textwrap
|
| 11 |
+
from distutils import sysconfig
|
| 12 |
+
from distutils.command.build_ext import build_ext
|
| 13 |
+
from distutils.core import Distribution
|
| 14 |
+
from distutils.errors import (
|
| 15 |
+
CompileError,
|
| 16 |
+
DistutilsPlatformError,
|
| 17 |
+
DistutilsSetupError,
|
| 18 |
+
UnknownFileError,
|
| 19 |
+
)
|
| 20 |
+
from distutils.extension import Extension
|
| 21 |
+
from distutils.tests import missing_compiler_executable
|
| 22 |
+
from distutils.tests.support import TempdirManager, copy_xxmodule_c, fixup_build_ext
|
| 23 |
+
from io import StringIO
|
| 24 |
+
|
| 25 |
+
import jaraco.path
|
| 26 |
+
import path
|
| 27 |
+
import pytest
|
| 28 |
+
from test import support
|
| 29 |
+
|
| 30 |
+
from .compat import py39 as import_helper
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture()
|
| 34 |
+
def user_site_dir(request):
|
| 35 |
+
self = request.instance
|
| 36 |
+
self.tmp_dir = self.mkdtemp()
|
| 37 |
+
self.tmp_path = path.Path(self.tmp_dir)
|
| 38 |
+
from distutils.command import build_ext
|
| 39 |
+
|
| 40 |
+
orig_user_base = site.USER_BASE
|
| 41 |
+
|
| 42 |
+
site.USER_BASE = self.mkdtemp()
|
| 43 |
+
build_ext.USER_BASE = site.USER_BASE
|
| 44 |
+
|
| 45 |
+
# bpo-30132: On Windows, a .pdb file may be created in the current
|
| 46 |
+
# working directory. Create a temporary working directory to cleanup
|
| 47 |
+
# everything at the end of the test.
|
| 48 |
+
with self.tmp_path:
|
| 49 |
+
yield
|
| 50 |
+
|
| 51 |
+
site.USER_BASE = orig_user_base
|
| 52 |
+
build_ext.USER_BASE = orig_user_base
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
@contextlib.contextmanager
|
| 56 |
+
def safe_extension_import(name, path):
|
| 57 |
+
with import_helper.CleanImport(name):
|
| 58 |
+
with extension_redirect(name, path) as new_path:
|
| 59 |
+
with import_helper.DirsOnSysPath(new_path):
|
| 60 |
+
yield
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
@contextlib.contextmanager
|
| 64 |
+
def extension_redirect(mod, path):
|
| 65 |
+
"""
|
| 66 |
+
Tests will fail to tear down an extension module if it's been imported.
|
| 67 |
+
|
| 68 |
+
Before importing, copy the file to a temporary directory that won't
|
| 69 |
+
be cleaned up. Yield the new path.
|
| 70 |
+
"""
|
| 71 |
+
if platform.system() != "Windows" and sys.platform != "cygwin":
|
| 72 |
+
yield path
|
| 73 |
+
return
|
| 74 |
+
with import_helper.DirsOnSysPath(path):
|
| 75 |
+
spec = importlib.util.find_spec(mod)
|
| 76 |
+
filename = os.path.basename(spec.origin)
|
| 77 |
+
trash_dir = tempfile.mkdtemp(prefix='deleteme')
|
| 78 |
+
dest = os.path.join(trash_dir, os.path.basename(filename))
|
| 79 |
+
shutil.copy(spec.origin, dest)
|
| 80 |
+
yield trash_dir
|
| 81 |
+
# TODO: can the file be scheduled for deletion?
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
@pytest.mark.usefixtures('user_site_dir')
|
| 85 |
+
class TestBuildExt(TempdirManager):
|
| 86 |
+
def build_ext(self, *args, **kwargs):
|
| 87 |
+
return build_ext(*args, **kwargs)
|
| 88 |
+
|
| 89 |
+
def test_build_ext(self):
|
| 90 |
+
missing_compiler_executable()
|
| 91 |
+
copy_xxmodule_c(self.tmp_dir)
|
| 92 |
+
xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
|
| 93 |
+
xx_ext = Extension('xx', [xx_c])
|
| 94 |
+
dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
|
| 95 |
+
dist.package_dir = self.tmp_dir
|
| 96 |
+
cmd = self.build_ext(dist)
|
| 97 |
+
fixup_build_ext(cmd)
|
| 98 |
+
cmd.build_lib = self.tmp_dir
|
| 99 |
+
cmd.build_temp = self.tmp_dir
|
| 100 |
+
|
| 101 |
+
old_stdout = sys.stdout
|
| 102 |
+
if not support.verbose:
|
| 103 |
+
# silence compiler output
|
| 104 |
+
sys.stdout = StringIO()
|
| 105 |
+
try:
|
| 106 |
+
cmd.ensure_finalized()
|
| 107 |
+
cmd.run()
|
| 108 |
+
finally:
|
| 109 |
+
sys.stdout = old_stdout
|
| 110 |
+
|
| 111 |
+
with safe_extension_import('xx', self.tmp_dir):
|
| 112 |
+
self._test_xx()
|
| 113 |
+
|
| 114 |
+
@staticmethod
|
| 115 |
+
def _test_xx():
|
| 116 |
+
import xx
|
| 117 |
+
|
| 118 |
+
for attr in ('error', 'foo', 'new', 'roj'):
|
| 119 |
+
assert hasattr(xx, attr)
|
| 120 |
+
|
| 121 |
+
assert xx.foo(2, 5) == 7
|
| 122 |
+
assert xx.foo(13, 15) == 28
|
| 123 |
+
assert xx.new().demo() is None
|
| 124 |
+
if support.HAVE_DOCSTRINGS:
|
| 125 |
+
doc = 'This is a template module just for instruction.'
|
| 126 |
+
assert xx.__doc__ == doc
|
| 127 |
+
assert isinstance(xx.Null(), xx.Null)
|
| 128 |
+
assert isinstance(xx.Str(), xx.Str)
|
| 129 |
+
|
| 130 |
+
def test_solaris_enable_shared(self):
|
| 131 |
+
dist = Distribution({'name': 'xx'})
|
| 132 |
+
cmd = self.build_ext(dist)
|
| 133 |
+
old = sys.platform
|
| 134 |
+
|
| 135 |
+
sys.platform = 'sunos' # fooling finalize_options
|
| 136 |
+
from distutils.sysconfig import _config_vars
|
| 137 |
+
|
| 138 |
+
old_var = _config_vars.get('Py_ENABLE_SHARED')
|
| 139 |
+
_config_vars['Py_ENABLE_SHARED'] = True
|
| 140 |
+
try:
|
| 141 |
+
cmd.ensure_finalized()
|
| 142 |
+
finally:
|
| 143 |
+
sys.platform = old
|
| 144 |
+
if old_var is None:
|
| 145 |
+
del _config_vars['Py_ENABLE_SHARED']
|
| 146 |
+
else:
|
| 147 |
+
_config_vars['Py_ENABLE_SHARED'] = old_var
|
| 148 |
+
|
| 149 |
+
# make sure we get some library dirs under solaris
|
| 150 |
+
assert len(cmd.library_dirs) > 0
|
| 151 |
+
|
| 152 |
+
def test_user_site(self):
|
| 153 |
+
import site
|
| 154 |
+
|
| 155 |
+
dist = Distribution({'name': 'xx'})
|
| 156 |
+
cmd = self.build_ext(dist)
|
| 157 |
+
|
| 158 |
+
# making sure the user option is there
|
| 159 |
+
options = [name for name, short, label in cmd.user_options]
|
| 160 |
+
assert 'user' in options
|
| 161 |
+
|
| 162 |
+
# setting a value
|
| 163 |
+
cmd.user = True
|
| 164 |
+
|
| 165 |
+
# setting user based lib and include
|
| 166 |
+
lib = os.path.join(site.USER_BASE, 'lib')
|
| 167 |
+
incl = os.path.join(site.USER_BASE, 'include')
|
| 168 |
+
os.mkdir(lib)
|
| 169 |
+
os.mkdir(incl)
|
| 170 |
+
|
| 171 |
+
# let's run finalize
|
| 172 |
+
cmd.ensure_finalized()
|
| 173 |
+
|
| 174 |
+
# see if include_dirs and library_dirs
|
| 175 |
+
# were set
|
| 176 |
+
assert lib in cmd.library_dirs
|
| 177 |
+
assert lib in cmd.rpath
|
| 178 |
+
assert incl in cmd.include_dirs
|
| 179 |
+
|
| 180 |
+
def test_optional_extension(self):
|
| 181 |
+
# this extension will fail, but let's ignore this failure
|
| 182 |
+
# with the optional argument.
|
| 183 |
+
modules = [Extension('foo', ['xxx'], optional=False)]
|
| 184 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 185 |
+
cmd = self.build_ext(dist)
|
| 186 |
+
cmd.ensure_finalized()
|
| 187 |
+
with pytest.raises((UnknownFileError, CompileError)):
|
| 188 |
+
cmd.run() # should raise an error
|
| 189 |
+
|
| 190 |
+
modules = [Extension('foo', ['xxx'], optional=True)]
|
| 191 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 192 |
+
cmd = self.build_ext(dist)
|
| 193 |
+
cmd.ensure_finalized()
|
| 194 |
+
cmd.run() # should pass
|
| 195 |
+
|
| 196 |
+
def test_finalize_options(self):
|
| 197 |
+
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
| 198 |
+
# etc.) are in the include search path.
|
| 199 |
+
modules = [Extension('foo', ['xxx'], optional=False)]
|
| 200 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 201 |
+
cmd = self.build_ext(dist)
|
| 202 |
+
cmd.finalize_options()
|
| 203 |
+
|
| 204 |
+
py_include = sysconfig.get_python_inc()
|
| 205 |
+
for p in py_include.split(os.path.pathsep):
|
| 206 |
+
assert p in cmd.include_dirs
|
| 207 |
+
|
| 208 |
+
plat_py_include = sysconfig.get_python_inc(plat_specific=True)
|
| 209 |
+
for p in plat_py_include.split(os.path.pathsep):
|
| 210 |
+
assert p in cmd.include_dirs
|
| 211 |
+
|
| 212 |
+
# make sure cmd.libraries is turned into a list
|
| 213 |
+
# if it's a string
|
| 214 |
+
cmd = self.build_ext(dist)
|
| 215 |
+
cmd.libraries = 'my_lib, other_lib lastlib'
|
| 216 |
+
cmd.finalize_options()
|
| 217 |
+
assert cmd.libraries == ['my_lib', 'other_lib', 'lastlib']
|
| 218 |
+
|
| 219 |
+
# make sure cmd.library_dirs is turned into a list
|
| 220 |
+
# if it's a string
|
| 221 |
+
cmd = self.build_ext(dist)
|
| 222 |
+
cmd.library_dirs = f'my_lib_dir{os.pathsep}other_lib_dir'
|
| 223 |
+
cmd.finalize_options()
|
| 224 |
+
assert 'my_lib_dir' in cmd.library_dirs
|
| 225 |
+
assert 'other_lib_dir' in cmd.library_dirs
|
| 226 |
+
|
| 227 |
+
# make sure rpath is turned into a list
|
| 228 |
+
# if it's a string
|
| 229 |
+
cmd = self.build_ext(dist)
|
| 230 |
+
cmd.rpath = f'one{os.pathsep}two'
|
| 231 |
+
cmd.finalize_options()
|
| 232 |
+
assert cmd.rpath == ['one', 'two']
|
| 233 |
+
|
| 234 |
+
# make sure cmd.link_objects is turned into a list
|
| 235 |
+
# if it's a string
|
| 236 |
+
cmd = build_ext(dist)
|
| 237 |
+
cmd.link_objects = 'one two,three'
|
| 238 |
+
cmd.finalize_options()
|
| 239 |
+
assert cmd.link_objects == ['one', 'two', 'three']
|
| 240 |
+
|
| 241 |
+
# XXX more tests to perform for win32
|
| 242 |
+
|
| 243 |
+
# make sure define is turned into 2-tuples
|
| 244 |
+
# strings if they are ','-separated strings
|
| 245 |
+
cmd = self.build_ext(dist)
|
| 246 |
+
cmd.define = 'one,two'
|
| 247 |
+
cmd.finalize_options()
|
| 248 |
+
assert cmd.define == [('one', '1'), ('two', '1')]
|
| 249 |
+
|
| 250 |
+
# make sure undef is turned into a list of
|
| 251 |
+
# strings if they are ','-separated strings
|
| 252 |
+
cmd = self.build_ext(dist)
|
| 253 |
+
cmd.undef = 'one,two'
|
| 254 |
+
cmd.finalize_options()
|
| 255 |
+
assert cmd.undef == ['one', 'two']
|
| 256 |
+
|
| 257 |
+
# make sure swig_opts is turned into a list
|
| 258 |
+
cmd = self.build_ext(dist)
|
| 259 |
+
cmd.swig_opts = None
|
| 260 |
+
cmd.finalize_options()
|
| 261 |
+
assert cmd.swig_opts == []
|
| 262 |
+
|
| 263 |
+
cmd = self.build_ext(dist)
|
| 264 |
+
cmd.swig_opts = '1 2'
|
| 265 |
+
cmd.finalize_options()
|
| 266 |
+
assert cmd.swig_opts == ['1', '2']
|
| 267 |
+
|
| 268 |
+
def test_check_extensions_list(self):
|
| 269 |
+
dist = Distribution()
|
| 270 |
+
cmd = self.build_ext(dist)
|
| 271 |
+
cmd.finalize_options()
|
| 272 |
+
|
| 273 |
+
# 'extensions' option must be a list of Extension instances
|
| 274 |
+
with pytest.raises(DistutilsSetupError):
|
| 275 |
+
cmd.check_extensions_list('foo')
|
| 276 |
+
|
| 277 |
+
# each element of 'ext_modules' option must be an
|
| 278 |
+
# Extension instance or 2-tuple
|
| 279 |
+
exts = [('bar', 'foo', 'bar'), 'foo']
|
| 280 |
+
with pytest.raises(DistutilsSetupError):
|
| 281 |
+
cmd.check_extensions_list(exts)
|
| 282 |
+
|
| 283 |
+
# first element of each tuple in 'ext_modules'
|
| 284 |
+
# must be the extension name (a string) and match
|
| 285 |
+
# a python dotted-separated name
|
| 286 |
+
exts = [('foo-bar', '')]
|
| 287 |
+
with pytest.raises(DistutilsSetupError):
|
| 288 |
+
cmd.check_extensions_list(exts)
|
| 289 |
+
|
| 290 |
+
# second element of each tuple in 'ext_modules'
|
| 291 |
+
# must be a dictionary (build info)
|
| 292 |
+
exts = [('foo.bar', '')]
|
| 293 |
+
with pytest.raises(DistutilsSetupError):
|
| 294 |
+
cmd.check_extensions_list(exts)
|
| 295 |
+
|
| 296 |
+
# ok this one should pass
|
| 297 |
+
exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', 'some': 'bar'})]
|
| 298 |
+
cmd.check_extensions_list(exts)
|
| 299 |
+
ext = exts[0]
|
| 300 |
+
assert isinstance(ext, Extension)
|
| 301 |
+
|
| 302 |
+
# check_extensions_list adds in ext the values passed
|
| 303 |
+
# when they are in ('include_dirs', 'library_dirs', 'libraries'
|
| 304 |
+
# 'extra_objects', 'extra_compile_args', 'extra_link_args')
|
| 305 |
+
assert ext.libraries == 'foo'
|
| 306 |
+
assert not hasattr(ext, 'some')
|
| 307 |
+
|
| 308 |
+
# 'macros' element of build info dict must be 1- or 2-tuple
|
| 309 |
+
exts = [
|
| 310 |
+
(
|
| 311 |
+
'foo.bar',
|
| 312 |
+
{
|
| 313 |
+
'sources': [''],
|
| 314 |
+
'libraries': 'foo',
|
| 315 |
+
'some': 'bar',
|
| 316 |
+
'macros': [('1', '2', '3'), 'foo'],
|
| 317 |
+
},
|
| 318 |
+
)
|
| 319 |
+
]
|
| 320 |
+
with pytest.raises(DistutilsSetupError):
|
| 321 |
+
cmd.check_extensions_list(exts)
|
| 322 |
+
|
| 323 |
+
exts[0][1]['macros'] = [('1', '2'), ('3',)]
|
| 324 |
+
cmd.check_extensions_list(exts)
|
| 325 |
+
assert exts[0].undef_macros == ['3']
|
| 326 |
+
assert exts[0].define_macros == [('1', '2')]
|
| 327 |
+
|
| 328 |
+
def test_get_source_files(self):
|
| 329 |
+
modules = [Extension('foo', ['xxx'], optional=False)]
|
| 330 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 331 |
+
cmd = self.build_ext(dist)
|
| 332 |
+
cmd.ensure_finalized()
|
| 333 |
+
assert cmd.get_source_files() == ['xxx']
|
| 334 |
+
|
| 335 |
+
def test_unicode_module_names(self):
|
| 336 |
+
modules = [
|
| 337 |
+
Extension('foo', ['aaa'], optional=False),
|
| 338 |
+
Extension('föö', ['uuu'], optional=False),
|
| 339 |
+
]
|
| 340 |
+
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
| 341 |
+
cmd = self.build_ext(dist)
|
| 342 |
+
cmd.ensure_finalized()
|
| 343 |
+
assert re.search(r'foo(_d)?\..*', cmd.get_ext_filename(modules[0].name))
|
| 344 |
+
assert re.search(r'föö(_d)?\..*', cmd.get_ext_filename(modules[1].name))
|
| 345 |
+
assert cmd.get_export_symbols(modules[0]) == ['PyInit_foo']
|
| 346 |
+
assert cmd.get_export_symbols(modules[1]) == ['PyInitU_f_1gaa']
|
| 347 |
+
|
| 348 |
+
def test_compiler_option(self):
|
| 349 |
+
# cmd.compiler is an option and
|
| 350 |
+
# should not be overridden by a compiler instance
|
| 351 |
+
# when the command is run
|
| 352 |
+
dist = Distribution()
|
| 353 |
+
cmd = self.build_ext(dist)
|
| 354 |
+
cmd.compiler = 'unix'
|
| 355 |
+
cmd.ensure_finalized()
|
| 356 |
+
cmd.run()
|
| 357 |
+
assert cmd.compiler == 'unix'
|
| 358 |
+
|
| 359 |
+
def test_get_outputs(self):
|
| 360 |
+
missing_compiler_executable()
|
| 361 |
+
tmp_dir = self.mkdtemp()
|
| 362 |
+
c_file = os.path.join(tmp_dir, 'foo.c')
|
| 363 |
+
self.write_file(c_file, 'void PyInit_foo(void) {}\n')
|
| 364 |
+
ext = Extension('foo', [c_file], optional=False)
|
| 365 |
+
dist = Distribution({'name': 'xx', 'ext_modules': [ext]})
|
| 366 |
+
cmd = self.build_ext(dist)
|
| 367 |
+
fixup_build_ext(cmd)
|
| 368 |
+
cmd.ensure_finalized()
|
| 369 |
+
assert len(cmd.get_outputs()) == 1
|
| 370 |
+
|
| 371 |
+
cmd.build_lib = os.path.join(self.tmp_dir, 'build')
|
| 372 |
+
cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
|
| 373 |
+
|
| 374 |
+
# issue #5977 : distutils build_ext.get_outputs
|
| 375 |
+
# returns wrong result with --inplace
|
| 376 |
+
other_tmp_dir = os.path.realpath(self.mkdtemp())
|
| 377 |
+
old_wd = os.getcwd()
|
| 378 |
+
os.chdir(other_tmp_dir)
|
| 379 |
+
try:
|
| 380 |
+
cmd.inplace = True
|
| 381 |
+
cmd.run()
|
| 382 |
+
so_file = cmd.get_outputs()[0]
|
| 383 |
+
finally:
|
| 384 |
+
os.chdir(old_wd)
|
| 385 |
+
assert os.path.exists(so_file)
|
| 386 |
+
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
|
| 387 |
+
assert so_file.endswith(ext_suffix)
|
| 388 |
+
so_dir = os.path.dirname(so_file)
|
| 389 |
+
assert so_dir == other_tmp_dir
|
| 390 |
+
|
| 391 |
+
cmd.inplace = False
|
| 392 |
+
cmd.compiler = None
|
| 393 |
+
cmd.run()
|
| 394 |
+
so_file = cmd.get_outputs()[0]
|
| 395 |
+
assert os.path.exists(so_file)
|
| 396 |
+
assert so_file.endswith(ext_suffix)
|
| 397 |
+
so_dir = os.path.dirname(so_file)
|
| 398 |
+
assert so_dir == cmd.build_lib
|
| 399 |
+
|
| 400 |
+
# inplace = False, cmd.package = 'bar'
|
| 401 |
+
build_py = cmd.get_finalized_command('build_py')
|
| 402 |
+
build_py.package_dir = {'': 'bar'}
|
| 403 |
+
path = cmd.get_ext_fullpath('foo')
|
| 404 |
+
# checking that the last directory is the build_dir
|
| 405 |
+
path = os.path.split(path)[0]
|
| 406 |
+
assert path == cmd.build_lib
|
| 407 |
+
|
| 408 |
+
# inplace = True, cmd.package = 'bar'
|
| 409 |
+
cmd.inplace = True
|
| 410 |
+
other_tmp_dir = os.path.realpath(self.mkdtemp())
|
| 411 |
+
old_wd = os.getcwd()
|
| 412 |
+
os.chdir(other_tmp_dir)
|
| 413 |
+
try:
|
| 414 |
+
path = cmd.get_ext_fullpath('foo')
|
| 415 |
+
finally:
|
| 416 |
+
os.chdir(old_wd)
|
| 417 |
+
# checking that the last directory is bar
|
| 418 |
+
path = os.path.split(path)[0]
|
| 419 |
+
lastdir = os.path.split(path)[-1]
|
| 420 |
+
assert lastdir == 'bar'
|
| 421 |
+
|
| 422 |
+
def test_ext_fullpath(self):
|
| 423 |
+
ext = sysconfig.get_config_var('EXT_SUFFIX')
|
| 424 |
+
# building lxml.etree inplace
|
| 425 |
+
# etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
|
| 426 |
+
# etree_ext = Extension('lxml.etree', [etree_c])
|
| 427 |
+
# dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
|
| 428 |
+
dist = Distribution()
|
| 429 |
+
cmd = self.build_ext(dist)
|
| 430 |
+
cmd.inplace = True
|
| 431 |
+
cmd.distribution.package_dir = {'': 'src'}
|
| 432 |
+
cmd.distribution.packages = ['lxml', 'lxml.html']
|
| 433 |
+
curdir = os.getcwd()
|
| 434 |
+
wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
|
| 435 |
+
path = cmd.get_ext_fullpath('lxml.etree')
|
| 436 |
+
assert wanted == path
|
| 437 |
+
|
| 438 |
+
# building lxml.etree not inplace
|
| 439 |
+
cmd.inplace = False
|
| 440 |
+
cmd.build_lib = os.path.join(curdir, 'tmpdir')
|
| 441 |
+
wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
|
| 442 |
+
path = cmd.get_ext_fullpath('lxml.etree')
|
| 443 |
+
assert wanted == path
|
| 444 |
+
|
| 445 |
+
# building twisted.runner.portmap not inplace
|
| 446 |
+
build_py = cmd.get_finalized_command('build_py')
|
| 447 |
+
build_py.package_dir = {}
|
| 448 |
+
cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
|
| 449 |
+
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
| 450 |
+
wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', 'portmap' + ext)
|
| 451 |
+
assert wanted == path
|
| 452 |
+
|
| 453 |
+
# building twisted.runner.portmap inplace
|
| 454 |
+
cmd.inplace = True
|
| 455 |
+
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
| 456 |
+
wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
|
| 457 |
+
assert wanted == path
|
| 458 |
+
|
| 459 |
+
@pytest.mark.skipif('platform.system() != "Darwin"')
|
| 460 |
+
@pytest.mark.usefixtures('save_env')
|
| 461 |
+
def test_deployment_target_default(self):
|
| 462 |
+
# Issue 9516: Test that, in the absence of the environment variable,
|
| 463 |
+
# an extension module is compiled with the same deployment target as
|
| 464 |
+
# the interpreter.
|
| 465 |
+
self._try_compile_deployment_target('==', None)
|
| 466 |
+
|
| 467 |
+
@pytest.mark.skipif('platform.system() != "Darwin"')
|
| 468 |
+
@pytest.mark.usefixtures('save_env')
|
| 469 |
+
def test_deployment_target_too_low(self):
|
| 470 |
+
# Issue 9516: Test that an extension module is not allowed to be
|
| 471 |
+
# compiled with a deployment target less than that of the interpreter.
|
| 472 |
+
with pytest.raises(DistutilsPlatformError):
|
| 473 |
+
self._try_compile_deployment_target('>', '10.1')
|
| 474 |
+
|
| 475 |
+
@pytest.mark.skipif('platform.system() != "Darwin"')
|
| 476 |
+
@pytest.mark.usefixtures('save_env')
|
| 477 |
+
def test_deployment_target_higher_ok(self): # pragma: no cover
|
| 478 |
+
# Issue 9516: Test that an extension module can be compiled with a
|
| 479 |
+
# deployment target higher than that of the interpreter: the ext
|
| 480 |
+
# module may depend on some newer OS feature.
|
| 481 |
+
deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
| 482 |
+
if deptarget:
|
| 483 |
+
# increment the minor version number (i.e. 10.6 -> 10.7)
|
| 484 |
+
deptarget = [int(x) for x in deptarget.split('.')]
|
| 485 |
+
deptarget[-1] += 1
|
| 486 |
+
deptarget = '.'.join(str(i) for i in deptarget)
|
| 487 |
+
self._try_compile_deployment_target('<', deptarget)
|
| 488 |
+
|
| 489 |
+
def _try_compile_deployment_target(self, operator, target): # pragma: no cover
|
| 490 |
+
if target is None:
|
| 491 |
+
if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
|
| 492 |
+
del os.environ['MACOSX_DEPLOYMENT_TARGET']
|
| 493 |
+
else:
|
| 494 |
+
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
|
| 495 |
+
|
| 496 |
+
jaraco.path.build(
|
| 497 |
+
{
|
| 498 |
+
'deptargetmodule.c': textwrap.dedent(f"""\
|
| 499 |
+
#include <AvailabilityMacros.h>
|
| 500 |
+
|
| 501 |
+
int dummy;
|
| 502 |
+
|
| 503 |
+
#if TARGET {operator} MAC_OS_X_VERSION_MIN_REQUIRED
|
| 504 |
+
#else
|
| 505 |
+
#error "Unexpected target"
|
| 506 |
+
#endif
|
| 507 |
+
|
| 508 |
+
"""),
|
| 509 |
+
},
|
| 510 |
+
self.tmp_path,
|
| 511 |
+
)
|
| 512 |
+
|
| 513 |
+
# get the deployment target that the interpreter was built with
|
| 514 |
+
target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
| 515 |
+
target = tuple(map(int, target.split('.')[0:2]))
|
| 516 |
+
# format the target value as defined in the Apple
|
| 517 |
+
# Availability Macros. We can't use the macro names since
|
| 518 |
+
# at least one value we test with will not exist yet.
|
| 519 |
+
if target[:2] < (10, 10):
|
| 520 |
+
# for 10.1 through 10.9.x -> "10n0"
|
| 521 |
+
tmpl = '{:02}{:01}0'
|
| 522 |
+
else:
|
| 523 |
+
# for 10.10 and beyond -> "10nn00"
|
| 524 |
+
if len(target) >= 2:
|
| 525 |
+
tmpl = '{:02}{:02}00'
|
| 526 |
+
else:
|
| 527 |
+
# 11 and later can have no minor version (11 instead of 11.0)
|
| 528 |
+
tmpl = '{:02}0000'
|
| 529 |
+
target = tmpl.format(*target)
|
| 530 |
+
deptarget_ext = Extension(
|
| 531 |
+
'deptarget',
|
| 532 |
+
[self.tmp_path / 'deptargetmodule.c'],
|
| 533 |
+
extra_compile_args=[f'-DTARGET={target}'],
|
| 534 |
+
)
|
| 535 |
+
dist = Distribution({'name': 'deptarget', 'ext_modules': [deptarget_ext]})
|
| 536 |
+
dist.package_dir = self.tmp_dir
|
| 537 |
+
cmd = self.build_ext(dist)
|
| 538 |
+
cmd.build_lib = self.tmp_dir
|
| 539 |
+
cmd.build_temp = self.tmp_dir
|
| 540 |
+
|
| 541 |
+
try:
|
| 542 |
+
old_stdout = sys.stdout
|
| 543 |
+
if not support.verbose:
|
| 544 |
+
# silence compiler output
|
| 545 |
+
sys.stdout = StringIO()
|
| 546 |
+
try:
|
| 547 |
+
cmd.ensure_finalized()
|
| 548 |
+
cmd.run()
|
| 549 |
+
finally:
|
| 550 |
+
sys.stdout = old_stdout
|
| 551 |
+
|
| 552 |
+
except CompileError:
|
| 553 |
+
self.fail("Wrong deployment target during compilation")
|
| 554 |
+
|
| 555 |
+
|
| 556 |
+
class TestParallelBuildExt(TestBuildExt):
|
| 557 |
+
def build_ext(self, *args, **kwargs):
|
| 558 |
+
build_ext = super().build_ext(*args, **kwargs)
|
| 559 |
+
build_ext.parallel = True
|
| 560 |
+
return build_ext
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build_py.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.build_py."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from distutils.command.build_py import build_py
|
| 6 |
+
from distutils.core import Distribution
|
| 7 |
+
from distutils.errors import DistutilsFileError
|
| 8 |
+
from distutils.tests import support
|
| 9 |
+
|
| 10 |
+
import jaraco.path
|
| 11 |
+
import pytest
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@support.combine_markers
|
| 15 |
+
class TestBuildPy(support.TempdirManager):
|
| 16 |
+
def test_package_data(self):
|
| 17 |
+
sources = self.mkdtemp()
|
| 18 |
+
jaraco.path.build(
|
| 19 |
+
{
|
| 20 |
+
'__init__.py': "# Pretend this is a package.",
|
| 21 |
+
'README.txt': 'Info about this package',
|
| 22 |
+
},
|
| 23 |
+
sources,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
destination = self.mkdtemp()
|
| 27 |
+
|
| 28 |
+
dist = Distribution({"packages": ["pkg"], "package_dir": {"pkg": sources}})
|
| 29 |
+
# script_name need not exist, it just need to be initialized
|
| 30 |
+
dist.script_name = os.path.join(sources, "setup.py")
|
| 31 |
+
dist.command_obj["build"] = support.DummyCommand(
|
| 32 |
+
force=False, build_lib=destination
|
| 33 |
+
)
|
| 34 |
+
dist.packages = ["pkg"]
|
| 35 |
+
dist.package_data = {"pkg": ["README.txt"]}
|
| 36 |
+
dist.package_dir = {"pkg": sources}
|
| 37 |
+
|
| 38 |
+
cmd = build_py(dist)
|
| 39 |
+
cmd.compile = True
|
| 40 |
+
cmd.ensure_finalized()
|
| 41 |
+
assert cmd.package_data == dist.package_data
|
| 42 |
+
|
| 43 |
+
cmd.run()
|
| 44 |
+
|
| 45 |
+
# This makes sure the list of outputs includes byte-compiled
|
| 46 |
+
# files for Python modules but not for package data files
|
| 47 |
+
# (there shouldn't *be* byte-code files for those!).
|
| 48 |
+
assert len(cmd.get_outputs()) == 3
|
| 49 |
+
pkgdest = os.path.join(destination, "pkg")
|
| 50 |
+
files = os.listdir(pkgdest)
|
| 51 |
+
pycache_dir = os.path.join(pkgdest, "__pycache__")
|
| 52 |
+
assert "__init__.py" in files
|
| 53 |
+
assert "README.txt" in files
|
| 54 |
+
if sys.dont_write_bytecode:
|
| 55 |
+
assert not os.path.exists(pycache_dir)
|
| 56 |
+
else:
|
| 57 |
+
pyc_files = os.listdir(pycache_dir)
|
| 58 |
+
assert f"__init__.{sys.implementation.cache_tag}.pyc" in pyc_files
|
| 59 |
+
|
| 60 |
+
def test_empty_package_dir(self):
|
| 61 |
+
# See bugs #1668596/#1720897
|
| 62 |
+
sources = self.mkdtemp()
|
| 63 |
+
jaraco.path.build({'__init__.py': '', 'doc': {'testfile': ''}}, sources)
|
| 64 |
+
|
| 65 |
+
os.chdir(sources)
|
| 66 |
+
dist = Distribution({
|
| 67 |
+
"packages": ["pkg"],
|
| 68 |
+
"package_dir": {"pkg": ""},
|
| 69 |
+
"package_data": {"pkg": ["doc/*"]},
|
| 70 |
+
})
|
| 71 |
+
# script_name need not exist, it just need to be initialized
|
| 72 |
+
dist.script_name = os.path.join(sources, "setup.py")
|
| 73 |
+
dist.script_args = ["build"]
|
| 74 |
+
dist.parse_command_line()
|
| 75 |
+
|
| 76 |
+
try:
|
| 77 |
+
dist.run_commands()
|
| 78 |
+
except DistutilsFileError:
|
| 79 |
+
self.fail("failed package_data test when package_dir is ''")
|
| 80 |
+
|
| 81 |
+
@pytest.mark.skipif('sys.dont_write_bytecode')
|
| 82 |
+
def test_byte_compile(self):
|
| 83 |
+
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
|
| 84 |
+
os.chdir(project_dir)
|
| 85 |
+
self.write_file('boiledeggs.py', 'import antigravity')
|
| 86 |
+
cmd = build_py(dist)
|
| 87 |
+
cmd.compile = True
|
| 88 |
+
cmd.build_lib = 'here'
|
| 89 |
+
cmd.finalize_options()
|
| 90 |
+
cmd.run()
|
| 91 |
+
|
| 92 |
+
found = os.listdir(cmd.build_lib)
|
| 93 |
+
assert sorted(found) == ['__pycache__', 'boiledeggs.py']
|
| 94 |
+
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
|
| 95 |
+
assert found == [f'boiledeggs.{sys.implementation.cache_tag}.pyc']
|
| 96 |
+
|
| 97 |
+
@pytest.mark.skipif('sys.dont_write_bytecode')
|
| 98 |
+
def test_byte_compile_optimized(self):
|
| 99 |
+
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
|
| 100 |
+
os.chdir(project_dir)
|
| 101 |
+
self.write_file('boiledeggs.py', 'import antigravity')
|
| 102 |
+
cmd = build_py(dist)
|
| 103 |
+
cmd.compile = False
|
| 104 |
+
cmd.optimize = 1
|
| 105 |
+
cmd.build_lib = 'here'
|
| 106 |
+
cmd.finalize_options()
|
| 107 |
+
cmd.run()
|
| 108 |
+
|
| 109 |
+
found = os.listdir(cmd.build_lib)
|
| 110 |
+
assert sorted(found) == ['__pycache__', 'boiledeggs.py']
|
| 111 |
+
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
|
| 112 |
+
expect = f'boiledeggs.{sys.implementation.cache_tag}.opt-1.pyc'
|
| 113 |
+
assert sorted(found) == [expect]
|
| 114 |
+
|
| 115 |
+
def test_dir_in_package_data(self):
|
| 116 |
+
"""
|
| 117 |
+
A directory in package_data should not be added to the filelist.
|
| 118 |
+
"""
|
| 119 |
+
# See bug 19286
|
| 120 |
+
sources = self.mkdtemp()
|
| 121 |
+
jaraco.path.build(
|
| 122 |
+
{
|
| 123 |
+
'pkg': {
|
| 124 |
+
'__init__.py': '',
|
| 125 |
+
'doc': {
|
| 126 |
+
'testfile': '',
|
| 127 |
+
# create a directory that could be incorrectly detected as a file
|
| 128 |
+
'otherdir': {},
|
| 129 |
+
},
|
| 130 |
+
}
|
| 131 |
+
},
|
| 132 |
+
sources,
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
os.chdir(sources)
|
| 136 |
+
dist = Distribution({"packages": ["pkg"], "package_data": {"pkg": ["doc/*"]}})
|
| 137 |
+
# script_name need not exist, it just need to be initialized
|
| 138 |
+
dist.script_name = os.path.join(sources, "setup.py")
|
| 139 |
+
dist.script_args = ["build"]
|
| 140 |
+
dist.parse_command_line()
|
| 141 |
+
|
| 142 |
+
try:
|
| 143 |
+
dist.run_commands()
|
| 144 |
+
except DistutilsFileError:
|
| 145 |
+
self.fail("failed package_data when data dir includes a dir")
|
| 146 |
+
|
| 147 |
+
def test_dont_write_bytecode(self, caplog):
|
| 148 |
+
# makes sure byte_compile is not used
|
| 149 |
+
dist = self.create_dist()[1]
|
| 150 |
+
cmd = build_py(dist)
|
| 151 |
+
cmd.compile = True
|
| 152 |
+
cmd.optimize = 1
|
| 153 |
+
|
| 154 |
+
old_dont_write_bytecode = sys.dont_write_bytecode
|
| 155 |
+
sys.dont_write_bytecode = True
|
| 156 |
+
try:
|
| 157 |
+
cmd.byte_compile([])
|
| 158 |
+
finally:
|
| 159 |
+
sys.dont_write_bytecode = old_dont_write_bytecode
|
| 160 |
+
|
| 161 |
+
assert 'byte-compiling is disabled' in caplog.records[0].message
|
| 162 |
+
|
| 163 |
+
def test_namespace_package_does_not_warn(self, caplog):
|
| 164 |
+
"""
|
| 165 |
+
Originally distutils implementation did not account for PEP 420
|
| 166 |
+
and included warns for package directories that did not contain
|
| 167 |
+
``__init__.py`` files.
|
| 168 |
+
After the acceptance of PEP 420, these warnings don't make more sense
|
| 169 |
+
so we want to ensure there are not displayed to not confuse the users.
|
| 170 |
+
"""
|
| 171 |
+
# Create a fake project structure with a package namespace:
|
| 172 |
+
tmp = self.mkdtemp()
|
| 173 |
+
jaraco.path.build({'ns': {'pkg': {'module.py': ''}}}, tmp)
|
| 174 |
+
os.chdir(tmp)
|
| 175 |
+
|
| 176 |
+
# Configure the package:
|
| 177 |
+
attrs = {
|
| 178 |
+
"name": "ns.pkg",
|
| 179 |
+
"packages": ["ns", "ns.pkg"],
|
| 180 |
+
"script_name": "setup.py",
|
| 181 |
+
}
|
| 182 |
+
dist = Distribution(attrs)
|
| 183 |
+
|
| 184 |
+
# Run code paths that would trigger the trap:
|
| 185 |
+
cmd = dist.get_command_obj("build_py")
|
| 186 |
+
cmd.finalize_options()
|
| 187 |
+
modules = cmd.find_all_modules()
|
| 188 |
+
assert len(modules) == 1
|
| 189 |
+
module_path = modules[0][-1]
|
| 190 |
+
assert module_path.replace(os.sep, "/") == "ns/pkg/module.py"
|
| 191 |
+
|
| 192 |
+
cmd.run()
|
| 193 |
+
|
| 194 |
+
assert not any(
|
| 195 |
+
"package init file" in msg and "not found" in msg for msg in caplog.messages
|
| 196 |
+
)
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_build_scripts.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.build_scripts."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import textwrap
|
| 5 |
+
from distutils import sysconfig
|
| 6 |
+
from distutils.command.build_scripts import build_scripts
|
| 7 |
+
from distutils.core import Distribution
|
| 8 |
+
from distutils.tests import support
|
| 9 |
+
|
| 10 |
+
import jaraco.path
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class TestBuildScripts(support.TempdirManager):
|
| 14 |
+
def test_default_settings(self):
|
| 15 |
+
cmd = self.get_build_scripts_cmd("/foo/bar", [])
|
| 16 |
+
assert not cmd.force
|
| 17 |
+
assert cmd.build_dir is None
|
| 18 |
+
|
| 19 |
+
cmd.finalize_options()
|
| 20 |
+
|
| 21 |
+
assert cmd.force
|
| 22 |
+
assert cmd.build_dir == "/foo/bar"
|
| 23 |
+
|
| 24 |
+
def test_build(self):
|
| 25 |
+
source = self.mkdtemp()
|
| 26 |
+
target = self.mkdtemp()
|
| 27 |
+
expected = self.write_sample_scripts(source)
|
| 28 |
+
|
| 29 |
+
cmd = self.get_build_scripts_cmd(
|
| 30 |
+
target, [os.path.join(source, fn) for fn in expected]
|
| 31 |
+
)
|
| 32 |
+
cmd.finalize_options()
|
| 33 |
+
cmd.run()
|
| 34 |
+
|
| 35 |
+
built = os.listdir(target)
|
| 36 |
+
for name in expected:
|
| 37 |
+
assert name in built
|
| 38 |
+
|
| 39 |
+
def get_build_scripts_cmd(self, target, scripts):
|
| 40 |
+
import sys
|
| 41 |
+
|
| 42 |
+
dist = Distribution()
|
| 43 |
+
dist.scripts = scripts
|
| 44 |
+
dist.command_obj["build"] = support.DummyCommand(
|
| 45 |
+
build_scripts=target, force=True, executable=sys.executable
|
| 46 |
+
)
|
| 47 |
+
return build_scripts(dist)
|
| 48 |
+
|
| 49 |
+
@staticmethod
|
| 50 |
+
def write_sample_scripts(dir):
|
| 51 |
+
spec = {
|
| 52 |
+
'script1.py': textwrap.dedent("""
|
| 53 |
+
#! /usr/bin/env python2.3
|
| 54 |
+
# bogus script w/ Python sh-bang
|
| 55 |
+
pass
|
| 56 |
+
""").lstrip(),
|
| 57 |
+
'script2.py': textwrap.dedent("""
|
| 58 |
+
#!/usr/bin/python
|
| 59 |
+
# bogus script w/ Python sh-bang
|
| 60 |
+
pass
|
| 61 |
+
""").lstrip(),
|
| 62 |
+
'shell.sh': textwrap.dedent("""
|
| 63 |
+
#!/bin/sh
|
| 64 |
+
# bogus shell script w/ sh-bang
|
| 65 |
+
exit 0
|
| 66 |
+
""").lstrip(),
|
| 67 |
+
}
|
| 68 |
+
jaraco.path.build(spec, dir)
|
| 69 |
+
return list(spec)
|
| 70 |
+
|
| 71 |
+
def test_version_int(self):
|
| 72 |
+
source = self.mkdtemp()
|
| 73 |
+
target = self.mkdtemp()
|
| 74 |
+
expected = self.write_sample_scripts(source)
|
| 75 |
+
|
| 76 |
+
cmd = self.get_build_scripts_cmd(
|
| 77 |
+
target, [os.path.join(source, fn) for fn in expected]
|
| 78 |
+
)
|
| 79 |
+
cmd.finalize_options()
|
| 80 |
+
|
| 81 |
+
# https://bugs.python.org/issue4524
|
| 82 |
+
#
|
| 83 |
+
# On linux-g++-32 with command line `./configure --enable-ipv6
|
| 84 |
+
# --with-suffix=3`, python is compiled okay but the build scripts
|
| 85 |
+
# failed when writing the name of the executable
|
| 86 |
+
old = sysconfig.get_config_vars().get('VERSION')
|
| 87 |
+
sysconfig._config_vars['VERSION'] = 4
|
| 88 |
+
try:
|
| 89 |
+
cmd.run()
|
| 90 |
+
finally:
|
| 91 |
+
if old is not None:
|
| 92 |
+
sysconfig._config_vars['VERSION'] = old
|
| 93 |
+
|
| 94 |
+
built = os.listdir(target)
|
| 95 |
+
for name in expected:
|
| 96 |
+
assert name in built
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_ccompiler.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import platform
|
| 3 |
+
import sys
|
| 4 |
+
import sysconfig
|
| 5 |
+
import textwrap
|
| 6 |
+
from distutils import ccompiler
|
| 7 |
+
|
| 8 |
+
import pytest
|
| 9 |
+
|
| 10 |
+
pytestmark = pytest.mark.usefixtures('suppress_path_mangle')
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def _make_strs(paths):
|
| 14 |
+
"""
|
| 15 |
+
Convert paths to strings for legacy compatibility.
|
| 16 |
+
"""
|
| 17 |
+
if sys.version_info > (3, 8) and platform.system() != "Windows":
|
| 18 |
+
return paths
|
| 19 |
+
return list(map(os.fspath, paths))
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@pytest.fixture
|
| 23 |
+
def c_file(tmp_path):
|
| 24 |
+
c_file = tmp_path / 'foo.c'
|
| 25 |
+
gen_headers = ('Python.h',)
|
| 26 |
+
is_windows = platform.system() == "Windows"
|
| 27 |
+
plat_headers = ('windows.h',) * is_windows
|
| 28 |
+
all_headers = gen_headers + plat_headers
|
| 29 |
+
headers = '\n'.join(f'#include <{header}>\n' for header in all_headers)
|
| 30 |
+
payload = (
|
| 31 |
+
textwrap.dedent(
|
| 32 |
+
"""
|
| 33 |
+
#headers
|
| 34 |
+
void PyInit_foo(void) {}
|
| 35 |
+
"""
|
| 36 |
+
)
|
| 37 |
+
.lstrip()
|
| 38 |
+
.replace('#headers', headers)
|
| 39 |
+
)
|
| 40 |
+
c_file.write_text(payload, encoding='utf-8')
|
| 41 |
+
return c_file
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def test_set_include_dirs(c_file):
|
| 45 |
+
"""
|
| 46 |
+
Extensions should build even if set_include_dirs is invoked.
|
| 47 |
+
In particular, compiler-specific paths should not be overridden.
|
| 48 |
+
"""
|
| 49 |
+
compiler = ccompiler.new_compiler()
|
| 50 |
+
python = sysconfig.get_paths()['include']
|
| 51 |
+
compiler.set_include_dirs([python])
|
| 52 |
+
compiler.compile(_make_strs([c_file]))
|
| 53 |
+
|
| 54 |
+
# do it again, setting include dirs after any initialization
|
| 55 |
+
compiler.set_include_dirs([python])
|
| 56 |
+
compiler.compile(_make_strs([c_file]))
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def test_has_function_prototype():
|
| 60 |
+
# Issue https://github.com/pypa/setuptools/issues/3648
|
| 61 |
+
# Test prototype-generating behavior.
|
| 62 |
+
|
| 63 |
+
compiler = ccompiler.new_compiler()
|
| 64 |
+
|
| 65 |
+
# Every C implementation should have these.
|
| 66 |
+
assert compiler.has_function('abort')
|
| 67 |
+
assert compiler.has_function('exit')
|
| 68 |
+
with pytest.deprecated_call(match='includes is deprecated'):
|
| 69 |
+
# abort() is a valid expression with the <stdlib.h> prototype.
|
| 70 |
+
assert compiler.has_function('abort', includes=['stdlib.h'])
|
| 71 |
+
with pytest.deprecated_call(match='includes is deprecated'):
|
| 72 |
+
# But exit() is not valid with the actual prototype in scope.
|
| 73 |
+
assert not compiler.has_function('exit', includes=['stdlib.h'])
|
| 74 |
+
# And setuptools_does_not_exist is not declared or defined at all.
|
| 75 |
+
assert not compiler.has_function('setuptools_does_not_exist')
|
| 76 |
+
with pytest.deprecated_call(match='includes is deprecated'):
|
| 77 |
+
assert not compiler.has_function(
|
| 78 |
+
'setuptools_does_not_exist', includes=['stdio.h']
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def test_include_dirs_after_multiple_compile_calls(c_file):
|
| 83 |
+
"""
|
| 84 |
+
Calling compile multiple times should not change the include dirs
|
| 85 |
+
(regression test for setuptools issue #3591).
|
| 86 |
+
"""
|
| 87 |
+
compiler = ccompiler.new_compiler()
|
| 88 |
+
python = sysconfig.get_paths()['include']
|
| 89 |
+
compiler.set_include_dirs([python])
|
| 90 |
+
compiler.compile(_make_strs([c_file]))
|
| 91 |
+
assert compiler.include_dirs == [python]
|
| 92 |
+
compiler.compile(_make_strs([c_file]))
|
| 93 |
+
assert compiler.include_dirs == [python]
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_cmd.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.cmd."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
from distutils import debug
|
| 5 |
+
from distutils.cmd import Command
|
| 6 |
+
from distutils.dist import Distribution
|
| 7 |
+
from distutils.errors import DistutilsOptionError
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class MyCmd(Command):
|
| 13 |
+
def initialize_options(self):
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@pytest.fixture
|
| 18 |
+
def cmd(request):
|
| 19 |
+
return MyCmd(Distribution())
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class TestCommand:
|
| 23 |
+
def test_ensure_string_list(self, cmd):
|
| 24 |
+
cmd.not_string_list = ['one', 2, 'three']
|
| 25 |
+
cmd.yes_string_list = ['one', 'two', 'three']
|
| 26 |
+
cmd.not_string_list2 = object()
|
| 27 |
+
cmd.yes_string_list2 = 'ok'
|
| 28 |
+
cmd.ensure_string_list('yes_string_list')
|
| 29 |
+
cmd.ensure_string_list('yes_string_list2')
|
| 30 |
+
|
| 31 |
+
with pytest.raises(DistutilsOptionError):
|
| 32 |
+
cmd.ensure_string_list('not_string_list')
|
| 33 |
+
|
| 34 |
+
with pytest.raises(DistutilsOptionError):
|
| 35 |
+
cmd.ensure_string_list('not_string_list2')
|
| 36 |
+
|
| 37 |
+
cmd.option1 = 'ok,dok'
|
| 38 |
+
cmd.ensure_string_list('option1')
|
| 39 |
+
assert cmd.option1 == ['ok', 'dok']
|
| 40 |
+
|
| 41 |
+
cmd.option2 = ['xxx', 'www']
|
| 42 |
+
cmd.ensure_string_list('option2')
|
| 43 |
+
|
| 44 |
+
cmd.option3 = ['ok', 2]
|
| 45 |
+
with pytest.raises(DistutilsOptionError):
|
| 46 |
+
cmd.ensure_string_list('option3')
|
| 47 |
+
|
| 48 |
+
def test_make_file(self, cmd):
|
| 49 |
+
# making sure it raises when infiles is not a string or a list/tuple
|
| 50 |
+
with pytest.raises(TypeError):
|
| 51 |
+
cmd.make_file(infiles=True, outfile='', func='func', args=())
|
| 52 |
+
|
| 53 |
+
# making sure execute gets called properly
|
| 54 |
+
def _execute(func, args, exec_msg, level):
|
| 55 |
+
assert exec_msg == 'generating out from in'
|
| 56 |
+
|
| 57 |
+
cmd.force = True
|
| 58 |
+
cmd.execute = _execute
|
| 59 |
+
cmd.make_file(infiles='in', outfile='out', func='func', args=())
|
| 60 |
+
|
| 61 |
+
def test_dump_options(self, cmd):
|
| 62 |
+
msgs = []
|
| 63 |
+
|
| 64 |
+
def _announce(msg, level):
|
| 65 |
+
msgs.append(msg)
|
| 66 |
+
|
| 67 |
+
cmd.announce = _announce
|
| 68 |
+
cmd.option1 = 1
|
| 69 |
+
cmd.option2 = 1
|
| 70 |
+
cmd.user_options = [('option1', '', ''), ('option2', '', '')]
|
| 71 |
+
cmd.dump_options()
|
| 72 |
+
|
| 73 |
+
wanted = ["command options for 'MyCmd':", ' option1 = 1', ' option2 = 1']
|
| 74 |
+
assert msgs == wanted
|
| 75 |
+
|
| 76 |
+
def test_ensure_string(self, cmd):
|
| 77 |
+
cmd.option1 = 'ok'
|
| 78 |
+
cmd.ensure_string('option1')
|
| 79 |
+
|
| 80 |
+
cmd.option2 = None
|
| 81 |
+
cmd.ensure_string('option2', 'xxx')
|
| 82 |
+
assert hasattr(cmd, 'option2')
|
| 83 |
+
|
| 84 |
+
cmd.option3 = 1
|
| 85 |
+
with pytest.raises(DistutilsOptionError):
|
| 86 |
+
cmd.ensure_string('option3')
|
| 87 |
+
|
| 88 |
+
def test_ensure_filename(self, cmd):
|
| 89 |
+
cmd.option1 = __file__
|
| 90 |
+
cmd.ensure_filename('option1')
|
| 91 |
+
cmd.option2 = 'xxx'
|
| 92 |
+
with pytest.raises(DistutilsOptionError):
|
| 93 |
+
cmd.ensure_filename('option2')
|
| 94 |
+
|
| 95 |
+
def test_ensure_dirname(self, cmd):
|
| 96 |
+
cmd.option1 = os.path.dirname(__file__) or os.curdir
|
| 97 |
+
cmd.ensure_dirname('option1')
|
| 98 |
+
cmd.option2 = 'xxx'
|
| 99 |
+
with pytest.raises(DistutilsOptionError):
|
| 100 |
+
cmd.ensure_dirname('option2')
|
| 101 |
+
|
| 102 |
+
def test_debug_print(self, cmd, capsys, monkeypatch):
|
| 103 |
+
cmd.debug_print('xxx')
|
| 104 |
+
assert capsys.readouterr().out == ''
|
| 105 |
+
monkeypatch.setattr(debug, 'DEBUG', True)
|
| 106 |
+
cmd.debug_print('xxx')
|
| 107 |
+
assert capsys.readouterr().out == 'xxx\n'
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_core.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.core."""
|
| 2 |
+
|
| 3 |
+
import distutils.core
|
| 4 |
+
import io
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from distutils.dist import Distribution
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
# setup script that uses __file__
|
| 12 |
+
setup_using___file__ = """\
|
| 13 |
+
|
| 14 |
+
__file__
|
| 15 |
+
|
| 16 |
+
from distutils.core import setup
|
| 17 |
+
setup()
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
setup_prints_cwd = """\
|
| 21 |
+
|
| 22 |
+
import os
|
| 23 |
+
print(os.getcwd())
|
| 24 |
+
|
| 25 |
+
from distutils.core import setup
|
| 26 |
+
setup()
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
setup_does_nothing = """\
|
| 30 |
+
from distutils.core import setup
|
| 31 |
+
setup()
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
setup_defines_subclass = """\
|
| 36 |
+
from distutils.core import setup
|
| 37 |
+
from distutils.command.install import install as _install
|
| 38 |
+
|
| 39 |
+
class install(_install):
|
| 40 |
+
sub_commands = _install.sub_commands + ['cmd']
|
| 41 |
+
|
| 42 |
+
setup(cmdclass={'install': install})
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
setup_within_if_main = """\
|
| 46 |
+
from distutils.core import setup
|
| 47 |
+
|
| 48 |
+
def main():
|
| 49 |
+
return setup(name="setup_within_if_main")
|
| 50 |
+
|
| 51 |
+
if __name__ == "__main__":
|
| 52 |
+
main()
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@pytest.fixture(autouse=True)
|
| 57 |
+
def save_stdout(monkeypatch):
|
| 58 |
+
monkeypatch.setattr(sys, 'stdout', sys.stdout)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@pytest.fixture
|
| 62 |
+
def temp_file(tmp_path):
|
| 63 |
+
return tmp_path / 'file'
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@pytest.mark.usefixtures('save_env')
|
| 67 |
+
@pytest.mark.usefixtures('save_argv')
|
| 68 |
+
class TestCore:
|
| 69 |
+
def test_run_setup_provides_file(self, temp_file):
|
| 70 |
+
# Make sure the script can use __file__; if that's missing, the test
|
| 71 |
+
# setup.py script will raise NameError.
|
| 72 |
+
temp_file.write_text(setup_using___file__, encoding='utf-8')
|
| 73 |
+
distutils.core.run_setup(temp_file)
|
| 74 |
+
|
| 75 |
+
def test_run_setup_preserves_sys_argv(self, temp_file):
|
| 76 |
+
# Make sure run_setup does not clobber sys.argv
|
| 77 |
+
argv_copy = sys.argv.copy()
|
| 78 |
+
temp_file.write_text(setup_does_nothing, encoding='utf-8')
|
| 79 |
+
distutils.core.run_setup(temp_file)
|
| 80 |
+
assert sys.argv == argv_copy
|
| 81 |
+
|
| 82 |
+
def test_run_setup_defines_subclass(self, temp_file):
|
| 83 |
+
# Make sure the script can use __file__; if that's missing, the test
|
| 84 |
+
# setup.py script will raise NameError.
|
| 85 |
+
temp_file.write_text(setup_defines_subclass, encoding='utf-8')
|
| 86 |
+
dist = distutils.core.run_setup(temp_file)
|
| 87 |
+
install = dist.get_command_obj('install')
|
| 88 |
+
assert 'cmd' in install.sub_commands
|
| 89 |
+
|
| 90 |
+
def test_run_setup_uses_current_dir(self, tmp_path):
|
| 91 |
+
"""
|
| 92 |
+
Test that the setup script is run with the current directory
|
| 93 |
+
as its own current directory.
|
| 94 |
+
"""
|
| 95 |
+
sys.stdout = io.StringIO()
|
| 96 |
+
cwd = os.getcwd()
|
| 97 |
+
|
| 98 |
+
# Create a directory and write the setup.py file there:
|
| 99 |
+
setup_py = tmp_path / 'setup.py'
|
| 100 |
+
setup_py.write_text(setup_prints_cwd, encoding='utf-8')
|
| 101 |
+
distutils.core.run_setup(setup_py)
|
| 102 |
+
|
| 103 |
+
output = sys.stdout.getvalue()
|
| 104 |
+
if output.endswith("\n"):
|
| 105 |
+
output = output[:-1]
|
| 106 |
+
assert cwd == output
|
| 107 |
+
|
| 108 |
+
def test_run_setup_within_if_main(self, temp_file):
|
| 109 |
+
temp_file.write_text(setup_within_if_main, encoding='utf-8')
|
| 110 |
+
dist = distutils.core.run_setup(temp_file, stop_after="config")
|
| 111 |
+
assert isinstance(dist, Distribution)
|
| 112 |
+
assert dist.get_name() == "setup_within_if_main"
|
| 113 |
+
|
| 114 |
+
def test_run_commands(self, temp_file):
|
| 115 |
+
sys.argv = ['setup.py', 'build']
|
| 116 |
+
temp_file.write_text(setup_within_if_main, encoding='utf-8')
|
| 117 |
+
dist = distutils.core.run_setup(temp_file, stop_after="commandline")
|
| 118 |
+
assert 'build' not in dist.have_run
|
| 119 |
+
distutils.core.run_commands(dist)
|
| 120 |
+
assert 'build' in dist.have_run
|
| 121 |
+
|
| 122 |
+
def test_debug_mode(self, capsys, monkeypatch):
|
| 123 |
+
# this covers the code called when DEBUG is set
|
| 124 |
+
sys.argv = ['setup.py', '--name']
|
| 125 |
+
distutils.core.setup(name='bar')
|
| 126 |
+
assert capsys.readouterr().out == 'bar\n'
|
| 127 |
+
monkeypatch.setattr(distutils.core, 'DEBUG', True)
|
| 128 |
+
distutils.core.setup(name='bar')
|
| 129 |
+
wanted = "options (after parsing config files):\n"
|
| 130 |
+
assert capsys.readouterr().out.startswith(wanted)
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_cygwinccompiler.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.cygwinccompiler."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from distutils import sysconfig
|
| 6 |
+
from distutils.cygwinccompiler import (
|
| 7 |
+
CONFIG_H_NOTOK,
|
| 8 |
+
CONFIG_H_OK,
|
| 9 |
+
CONFIG_H_UNCERTAIN,
|
| 10 |
+
check_config_h,
|
| 11 |
+
get_msvcr,
|
| 12 |
+
)
|
| 13 |
+
from distutils.tests import support
|
| 14 |
+
|
| 15 |
+
import pytest
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@pytest.fixture(autouse=True)
|
| 19 |
+
def stuff(request, monkeypatch, distutils_managed_tempdir):
|
| 20 |
+
self = request.instance
|
| 21 |
+
self.python_h = os.path.join(self.mkdtemp(), 'python.h')
|
| 22 |
+
monkeypatch.setattr(sysconfig, 'get_config_h_filename', self._get_config_h_filename)
|
| 23 |
+
monkeypatch.setattr(sys, 'version', sys.version)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class TestCygwinCCompiler(support.TempdirManager):
|
| 27 |
+
def _get_config_h_filename(self):
|
| 28 |
+
return self.python_h
|
| 29 |
+
|
| 30 |
+
@pytest.mark.skipif('sys.platform != "cygwin"')
|
| 31 |
+
@pytest.mark.skipif('not os.path.exists("/usr/lib/libbash.dll.a")')
|
| 32 |
+
def test_find_library_file(self):
|
| 33 |
+
from distutils.cygwinccompiler import CygwinCCompiler
|
| 34 |
+
|
| 35 |
+
compiler = CygwinCCompiler()
|
| 36 |
+
link_name = "bash"
|
| 37 |
+
linkable_file = compiler.find_library_file(["/usr/lib"], link_name)
|
| 38 |
+
assert linkable_file is not None
|
| 39 |
+
assert os.path.exists(linkable_file)
|
| 40 |
+
assert linkable_file == f"/usr/lib/lib{link_name:s}.dll.a"
|
| 41 |
+
|
| 42 |
+
@pytest.mark.skipif('sys.platform != "cygwin"')
|
| 43 |
+
def test_runtime_library_dir_option(self):
|
| 44 |
+
from distutils.cygwinccompiler import CygwinCCompiler
|
| 45 |
+
|
| 46 |
+
compiler = CygwinCCompiler()
|
| 47 |
+
assert compiler.runtime_library_dir_option('/foo') == []
|
| 48 |
+
|
| 49 |
+
def test_check_config_h(self):
|
| 50 |
+
# check_config_h looks for "GCC" in sys.version first
|
| 51 |
+
# returns CONFIG_H_OK if found
|
| 52 |
+
sys.version = (
|
| 53 |
+
'2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
|
| 54 |
+
'4.0.1 (Apple Computer, Inc. build 5370)]'
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
assert check_config_h()[0] == CONFIG_H_OK
|
| 58 |
+
|
| 59 |
+
# then it tries to see if it can find "__GNUC__" in pyconfig.h
|
| 60 |
+
sys.version = 'something without the *CC word'
|
| 61 |
+
|
| 62 |
+
# if the file doesn't exist it returns CONFIG_H_UNCERTAIN
|
| 63 |
+
assert check_config_h()[0] == CONFIG_H_UNCERTAIN
|
| 64 |
+
|
| 65 |
+
# if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
|
| 66 |
+
self.write_file(self.python_h, 'xxx')
|
| 67 |
+
assert check_config_h()[0] == CONFIG_H_NOTOK
|
| 68 |
+
|
| 69 |
+
# and CONFIG_H_OK if __GNUC__ is found
|
| 70 |
+
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
|
| 71 |
+
assert check_config_h()[0] == CONFIG_H_OK
|
| 72 |
+
|
| 73 |
+
def test_get_msvcr(self):
|
| 74 |
+
assert get_msvcr() == []
|
| 75 |
+
|
| 76 |
+
@pytest.mark.skipif('sys.platform != "cygwin"')
|
| 77 |
+
def test_dll_libraries_not_none(self):
|
| 78 |
+
from distutils.cygwinccompiler import CygwinCCompiler
|
| 79 |
+
|
| 80 |
+
compiler = CygwinCCompiler()
|
| 81 |
+
assert compiler.dll_libraries is not None
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_dist.py
ADDED
|
@@ -0,0 +1,552 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.dist."""
|
| 2 |
+
|
| 3 |
+
import email
|
| 4 |
+
import email.generator
|
| 5 |
+
import email.policy
|
| 6 |
+
import functools
|
| 7 |
+
import io
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
import textwrap
|
| 11 |
+
import unittest.mock as mock
|
| 12 |
+
import warnings
|
| 13 |
+
from distutils.cmd import Command
|
| 14 |
+
from distutils.dist import Distribution, fix_help_options
|
| 15 |
+
from distutils.tests import support
|
| 16 |
+
from typing import ClassVar
|
| 17 |
+
|
| 18 |
+
import jaraco.path
|
| 19 |
+
import pytest
|
| 20 |
+
|
| 21 |
+
pydistutils_cfg = '.' * (os.name == 'posix') + 'pydistutils.cfg'
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class test_dist(Command):
|
| 25 |
+
"""Sample distutils extension command."""
|
| 26 |
+
|
| 27 |
+
user_options: ClassVar[list[tuple[str, str, str]]] = [
|
| 28 |
+
("sample-option=", "S", "help text"),
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
def initialize_options(self):
|
| 32 |
+
self.sample_option = None
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class TestDistribution(Distribution):
|
| 36 |
+
"""Distribution subclasses that avoids the default search for
|
| 37 |
+
configuration files.
|
| 38 |
+
|
| 39 |
+
The ._config_files attribute must be set before
|
| 40 |
+
.parse_config_files() is called.
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
def find_config_files(self):
|
| 44 |
+
return self._config_files
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@pytest.fixture
|
| 48 |
+
def clear_argv():
|
| 49 |
+
del sys.argv[1:]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
@support.combine_markers
|
| 53 |
+
@pytest.mark.usefixtures('save_env')
|
| 54 |
+
@pytest.mark.usefixtures('save_argv')
|
| 55 |
+
class TestDistributionBehavior(support.TempdirManager):
|
| 56 |
+
def create_distribution(self, configfiles=()):
|
| 57 |
+
d = TestDistribution()
|
| 58 |
+
d._config_files = configfiles
|
| 59 |
+
d.parse_config_files()
|
| 60 |
+
d.parse_command_line()
|
| 61 |
+
return d
|
| 62 |
+
|
| 63 |
+
def test_command_packages_unspecified(self, clear_argv):
|
| 64 |
+
sys.argv.append("build")
|
| 65 |
+
d = self.create_distribution()
|
| 66 |
+
assert d.get_command_packages() == ["distutils.command"]
|
| 67 |
+
|
| 68 |
+
def test_command_packages_cmdline(self, clear_argv):
|
| 69 |
+
from distutils.tests.test_dist import test_dist
|
| 70 |
+
|
| 71 |
+
sys.argv.extend([
|
| 72 |
+
"--command-packages",
|
| 73 |
+
"foo.bar,distutils.tests",
|
| 74 |
+
"test_dist",
|
| 75 |
+
"-Ssometext",
|
| 76 |
+
])
|
| 77 |
+
d = self.create_distribution()
|
| 78 |
+
# let's actually try to load our test command:
|
| 79 |
+
assert d.get_command_packages() == [
|
| 80 |
+
"distutils.command",
|
| 81 |
+
"foo.bar",
|
| 82 |
+
"distutils.tests",
|
| 83 |
+
]
|
| 84 |
+
cmd = d.get_command_obj("test_dist")
|
| 85 |
+
assert isinstance(cmd, test_dist)
|
| 86 |
+
assert cmd.sample_option == "sometext"
|
| 87 |
+
|
| 88 |
+
@pytest.mark.skipif(
|
| 89 |
+
'distutils' not in Distribution.parse_config_files.__module__,
|
| 90 |
+
reason='Cannot test when virtualenv has monkey-patched Distribution',
|
| 91 |
+
)
|
| 92 |
+
def test_venv_install_options(self, tmp_path, clear_argv):
|
| 93 |
+
sys.argv.append("install")
|
| 94 |
+
file = str(tmp_path / 'file')
|
| 95 |
+
|
| 96 |
+
fakepath = '/somedir'
|
| 97 |
+
|
| 98 |
+
jaraco.path.build({
|
| 99 |
+
file: f"""
|
| 100 |
+
[install]
|
| 101 |
+
install-base = {fakepath}
|
| 102 |
+
install-platbase = {fakepath}
|
| 103 |
+
install-lib = {fakepath}
|
| 104 |
+
install-platlib = {fakepath}
|
| 105 |
+
install-purelib = {fakepath}
|
| 106 |
+
install-headers = {fakepath}
|
| 107 |
+
install-scripts = {fakepath}
|
| 108 |
+
install-data = {fakepath}
|
| 109 |
+
prefix = {fakepath}
|
| 110 |
+
exec-prefix = {fakepath}
|
| 111 |
+
home = {fakepath}
|
| 112 |
+
user = {fakepath}
|
| 113 |
+
root = {fakepath}
|
| 114 |
+
""",
|
| 115 |
+
})
|
| 116 |
+
|
| 117 |
+
# Base case: Not in a Virtual Environment
|
| 118 |
+
with mock.patch.multiple(sys, prefix='/a', base_prefix='/a'):
|
| 119 |
+
d = self.create_distribution([file])
|
| 120 |
+
|
| 121 |
+
option_tuple = (file, fakepath)
|
| 122 |
+
|
| 123 |
+
result_dict = {
|
| 124 |
+
'install_base': option_tuple,
|
| 125 |
+
'install_platbase': option_tuple,
|
| 126 |
+
'install_lib': option_tuple,
|
| 127 |
+
'install_platlib': option_tuple,
|
| 128 |
+
'install_purelib': option_tuple,
|
| 129 |
+
'install_headers': option_tuple,
|
| 130 |
+
'install_scripts': option_tuple,
|
| 131 |
+
'install_data': option_tuple,
|
| 132 |
+
'prefix': option_tuple,
|
| 133 |
+
'exec_prefix': option_tuple,
|
| 134 |
+
'home': option_tuple,
|
| 135 |
+
'user': option_tuple,
|
| 136 |
+
'root': option_tuple,
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
assert sorted(d.command_options.get('install').keys()) == sorted(
|
| 140 |
+
result_dict.keys()
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
for key, value in d.command_options.get('install').items():
|
| 144 |
+
assert value == result_dict[key]
|
| 145 |
+
|
| 146 |
+
# Test case: In a Virtual Environment
|
| 147 |
+
with mock.patch.multiple(sys, prefix='/a', base_prefix='/b'):
|
| 148 |
+
d = self.create_distribution([file])
|
| 149 |
+
|
| 150 |
+
for key in result_dict.keys():
|
| 151 |
+
assert key not in d.command_options.get('install', {})
|
| 152 |
+
|
| 153 |
+
def test_command_packages_configfile(self, tmp_path, clear_argv):
|
| 154 |
+
sys.argv.append("build")
|
| 155 |
+
file = str(tmp_path / "file")
|
| 156 |
+
jaraco.path.build({
|
| 157 |
+
file: """
|
| 158 |
+
[global]
|
| 159 |
+
command_packages = foo.bar, splat
|
| 160 |
+
""",
|
| 161 |
+
})
|
| 162 |
+
|
| 163 |
+
d = self.create_distribution([file])
|
| 164 |
+
assert d.get_command_packages() == ["distutils.command", "foo.bar", "splat"]
|
| 165 |
+
|
| 166 |
+
# ensure command line overrides config:
|
| 167 |
+
sys.argv[1:] = ["--command-packages", "spork", "build"]
|
| 168 |
+
d = self.create_distribution([file])
|
| 169 |
+
assert d.get_command_packages() == ["distutils.command", "spork"]
|
| 170 |
+
|
| 171 |
+
# Setting --command-packages to '' should cause the default to
|
| 172 |
+
# be used even if a config file specified something else:
|
| 173 |
+
sys.argv[1:] = ["--command-packages", "", "build"]
|
| 174 |
+
d = self.create_distribution([file])
|
| 175 |
+
assert d.get_command_packages() == ["distutils.command"]
|
| 176 |
+
|
| 177 |
+
def test_empty_options(self, request):
|
| 178 |
+
# an empty options dictionary should not stay in the
|
| 179 |
+
# list of attributes
|
| 180 |
+
|
| 181 |
+
# catching warnings
|
| 182 |
+
warns = []
|
| 183 |
+
|
| 184 |
+
def _warn(msg):
|
| 185 |
+
warns.append(msg)
|
| 186 |
+
|
| 187 |
+
request.addfinalizer(
|
| 188 |
+
functools.partial(setattr, warnings, 'warn', warnings.warn)
|
| 189 |
+
)
|
| 190 |
+
warnings.warn = _warn
|
| 191 |
+
dist = Distribution(
|
| 192 |
+
attrs={
|
| 193 |
+
'author': 'xxx',
|
| 194 |
+
'name': 'xxx',
|
| 195 |
+
'version': 'xxx',
|
| 196 |
+
'url': 'xxxx',
|
| 197 |
+
'options': {},
|
| 198 |
+
}
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
assert len(warns) == 0
|
| 202 |
+
assert 'options' not in dir(dist)
|
| 203 |
+
|
| 204 |
+
def test_finalize_options(self):
|
| 205 |
+
attrs = {'keywords': 'one,two', 'platforms': 'one,two'}
|
| 206 |
+
|
| 207 |
+
dist = Distribution(attrs=attrs)
|
| 208 |
+
dist.finalize_options()
|
| 209 |
+
|
| 210 |
+
# finalize_option splits platforms and keywords
|
| 211 |
+
assert dist.metadata.platforms == ['one', 'two']
|
| 212 |
+
assert dist.metadata.keywords == ['one', 'two']
|
| 213 |
+
|
| 214 |
+
attrs = {'keywords': 'foo bar', 'platforms': 'foo bar'}
|
| 215 |
+
dist = Distribution(attrs=attrs)
|
| 216 |
+
dist.finalize_options()
|
| 217 |
+
assert dist.metadata.platforms == ['foo bar']
|
| 218 |
+
assert dist.metadata.keywords == ['foo bar']
|
| 219 |
+
|
| 220 |
+
def test_get_command_packages(self):
|
| 221 |
+
dist = Distribution()
|
| 222 |
+
assert dist.command_packages is None
|
| 223 |
+
cmds = dist.get_command_packages()
|
| 224 |
+
assert cmds == ['distutils.command']
|
| 225 |
+
assert dist.command_packages == ['distutils.command']
|
| 226 |
+
|
| 227 |
+
dist.command_packages = 'one,two'
|
| 228 |
+
cmds = dist.get_command_packages()
|
| 229 |
+
assert cmds == ['distutils.command', 'one', 'two']
|
| 230 |
+
|
| 231 |
+
def test_announce(self):
|
| 232 |
+
# make sure the level is known
|
| 233 |
+
dist = Distribution()
|
| 234 |
+
with pytest.raises(TypeError):
|
| 235 |
+
dist.announce('ok', level='ok2')
|
| 236 |
+
|
| 237 |
+
def test_find_config_files_disable(self, temp_home):
|
| 238 |
+
# Ticket #1180: Allow user to disable their home config file.
|
| 239 |
+
jaraco.path.build({pydistutils_cfg: '[distutils]\n'}, temp_home)
|
| 240 |
+
|
| 241 |
+
d = Distribution()
|
| 242 |
+
all_files = d.find_config_files()
|
| 243 |
+
|
| 244 |
+
d = Distribution(attrs={'script_args': ['--no-user-cfg']})
|
| 245 |
+
files = d.find_config_files()
|
| 246 |
+
|
| 247 |
+
# make sure --no-user-cfg disables the user cfg file
|
| 248 |
+
assert len(all_files) - 1 == len(files)
|
| 249 |
+
|
| 250 |
+
def test_script_args_list_coercion(self):
|
| 251 |
+
d = Distribution(attrs={'script_args': ('build', '--no-user-cfg')})
|
| 252 |
+
|
| 253 |
+
# make sure script_args is a list even if it started as a different iterable
|
| 254 |
+
assert d.script_args == ['build', '--no-user-cfg']
|
| 255 |
+
|
| 256 |
+
@pytest.mark.skipif(
|
| 257 |
+
'platform.system() == "Windows"',
|
| 258 |
+
reason='Windows does not honor chmod 000',
|
| 259 |
+
)
|
| 260 |
+
def test_find_config_files_permission_error(self, fake_home):
|
| 261 |
+
"""
|
| 262 |
+
Finding config files should not fail when directory is inaccessible.
|
| 263 |
+
"""
|
| 264 |
+
fake_home.joinpath(pydistutils_cfg).write_text('', encoding='utf-8')
|
| 265 |
+
fake_home.chmod(0o000)
|
| 266 |
+
Distribution().find_config_files()
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
@pytest.mark.usefixtures('save_env')
|
| 270 |
+
@pytest.mark.usefixtures('save_argv')
|
| 271 |
+
class TestMetadata(support.TempdirManager):
|
| 272 |
+
def format_metadata(self, dist):
|
| 273 |
+
sio = io.StringIO()
|
| 274 |
+
dist.metadata.write_pkg_file(sio)
|
| 275 |
+
return sio.getvalue()
|
| 276 |
+
|
| 277 |
+
def test_simple_metadata(self):
|
| 278 |
+
attrs = {"name": "package", "version": "1.0"}
|
| 279 |
+
dist = Distribution(attrs)
|
| 280 |
+
meta = self.format_metadata(dist)
|
| 281 |
+
assert "Metadata-Version: 1.0" in meta
|
| 282 |
+
assert "provides:" not in meta.lower()
|
| 283 |
+
assert "requires:" not in meta.lower()
|
| 284 |
+
assert "obsoletes:" not in meta.lower()
|
| 285 |
+
|
| 286 |
+
def test_provides(self):
|
| 287 |
+
attrs = {
|
| 288 |
+
"name": "package",
|
| 289 |
+
"version": "1.0",
|
| 290 |
+
"provides": ["package", "package.sub"],
|
| 291 |
+
}
|
| 292 |
+
dist = Distribution(attrs)
|
| 293 |
+
assert dist.metadata.get_provides() == ["package", "package.sub"]
|
| 294 |
+
assert dist.get_provides() == ["package", "package.sub"]
|
| 295 |
+
meta = self.format_metadata(dist)
|
| 296 |
+
assert "Metadata-Version: 1.1" in meta
|
| 297 |
+
assert "requires:" not in meta.lower()
|
| 298 |
+
assert "obsoletes:" not in meta.lower()
|
| 299 |
+
|
| 300 |
+
def test_provides_illegal(self):
|
| 301 |
+
with pytest.raises(ValueError):
|
| 302 |
+
Distribution(
|
| 303 |
+
{"name": "package", "version": "1.0", "provides": ["my.pkg (splat)"]},
|
| 304 |
+
)
|
| 305 |
+
|
| 306 |
+
def test_requires(self):
|
| 307 |
+
attrs = {
|
| 308 |
+
"name": "package",
|
| 309 |
+
"version": "1.0",
|
| 310 |
+
"requires": ["other", "another (==1.0)"],
|
| 311 |
+
}
|
| 312 |
+
dist = Distribution(attrs)
|
| 313 |
+
assert dist.metadata.get_requires() == ["other", "another (==1.0)"]
|
| 314 |
+
assert dist.get_requires() == ["other", "another (==1.0)"]
|
| 315 |
+
meta = self.format_metadata(dist)
|
| 316 |
+
assert "Metadata-Version: 1.1" in meta
|
| 317 |
+
assert "provides:" not in meta.lower()
|
| 318 |
+
assert "Requires: other" in meta
|
| 319 |
+
assert "Requires: another (==1.0)" in meta
|
| 320 |
+
assert "obsoletes:" not in meta.lower()
|
| 321 |
+
|
| 322 |
+
def test_requires_illegal(self):
|
| 323 |
+
with pytest.raises(ValueError):
|
| 324 |
+
Distribution(
|
| 325 |
+
{"name": "package", "version": "1.0", "requires": ["my.pkg (splat)"]},
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
def test_requires_to_list(self):
|
| 329 |
+
attrs = {"name": "package", "requires": iter(["other"])}
|
| 330 |
+
dist = Distribution(attrs)
|
| 331 |
+
assert isinstance(dist.metadata.requires, list)
|
| 332 |
+
|
| 333 |
+
def test_obsoletes(self):
|
| 334 |
+
attrs = {
|
| 335 |
+
"name": "package",
|
| 336 |
+
"version": "1.0",
|
| 337 |
+
"obsoletes": ["other", "another (<1.0)"],
|
| 338 |
+
}
|
| 339 |
+
dist = Distribution(attrs)
|
| 340 |
+
assert dist.metadata.get_obsoletes() == ["other", "another (<1.0)"]
|
| 341 |
+
assert dist.get_obsoletes() == ["other", "another (<1.0)"]
|
| 342 |
+
meta = self.format_metadata(dist)
|
| 343 |
+
assert "Metadata-Version: 1.1" in meta
|
| 344 |
+
assert "provides:" not in meta.lower()
|
| 345 |
+
assert "requires:" not in meta.lower()
|
| 346 |
+
assert "Obsoletes: other" in meta
|
| 347 |
+
assert "Obsoletes: another (<1.0)" in meta
|
| 348 |
+
|
| 349 |
+
def test_obsoletes_illegal(self):
|
| 350 |
+
with pytest.raises(ValueError):
|
| 351 |
+
Distribution(
|
| 352 |
+
{"name": "package", "version": "1.0", "obsoletes": ["my.pkg (splat)"]},
|
| 353 |
+
)
|
| 354 |
+
|
| 355 |
+
def test_obsoletes_to_list(self):
|
| 356 |
+
attrs = {"name": "package", "obsoletes": iter(["other"])}
|
| 357 |
+
dist = Distribution(attrs)
|
| 358 |
+
assert isinstance(dist.metadata.obsoletes, list)
|
| 359 |
+
|
| 360 |
+
def test_classifier(self):
|
| 361 |
+
attrs = {
|
| 362 |
+
'name': 'Boa',
|
| 363 |
+
'version': '3.0',
|
| 364 |
+
'classifiers': ['Programming Language :: Python :: 3'],
|
| 365 |
+
}
|
| 366 |
+
dist = Distribution(attrs)
|
| 367 |
+
assert dist.get_classifiers() == ['Programming Language :: Python :: 3']
|
| 368 |
+
meta = self.format_metadata(dist)
|
| 369 |
+
assert 'Metadata-Version: 1.1' in meta
|
| 370 |
+
|
| 371 |
+
def test_classifier_invalid_type(self, caplog):
|
| 372 |
+
attrs = {
|
| 373 |
+
'name': 'Boa',
|
| 374 |
+
'version': '3.0',
|
| 375 |
+
'classifiers': ('Programming Language :: Python :: 3',),
|
| 376 |
+
}
|
| 377 |
+
d = Distribution(attrs)
|
| 378 |
+
# should have warning about passing a non-list
|
| 379 |
+
assert 'should be a list' in caplog.messages[0]
|
| 380 |
+
# should be converted to a list
|
| 381 |
+
assert isinstance(d.metadata.classifiers, list)
|
| 382 |
+
assert d.metadata.classifiers == list(attrs['classifiers'])
|
| 383 |
+
|
| 384 |
+
def test_keywords(self):
|
| 385 |
+
attrs = {
|
| 386 |
+
'name': 'Monty',
|
| 387 |
+
'version': '1.0',
|
| 388 |
+
'keywords': ['spam', 'eggs', 'life of brian'],
|
| 389 |
+
}
|
| 390 |
+
dist = Distribution(attrs)
|
| 391 |
+
assert dist.get_keywords() == ['spam', 'eggs', 'life of brian']
|
| 392 |
+
|
| 393 |
+
def test_keywords_invalid_type(self, caplog):
|
| 394 |
+
attrs = {
|
| 395 |
+
'name': 'Monty',
|
| 396 |
+
'version': '1.0',
|
| 397 |
+
'keywords': ('spam', 'eggs', 'life of brian'),
|
| 398 |
+
}
|
| 399 |
+
d = Distribution(attrs)
|
| 400 |
+
# should have warning about passing a non-list
|
| 401 |
+
assert 'should be a list' in caplog.messages[0]
|
| 402 |
+
# should be converted to a list
|
| 403 |
+
assert isinstance(d.metadata.keywords, list)
|
| 404 |
+
assert d.metadata.keywords == list(attrs['keywords'])
|
| 405 |
+
|
| 406 |
+
def test_platforms(self):
|
| 407 |
+
attrs = {
|
| 408 |
+
'name': 'Monty',
|
| 409 |
+
'version': '1.0',
|
| 410 |
+
'platforms': ['GNU/Linux', 'Some Evil Platform'],
|
| 411 |
+
}
|
| 412 |
+
dist = Distribution(attrs)
|
| 413 |
+
assert dist.get_platforms() == ['GNU/Linux', 'Some Evil Platform']
|
| 414 |
+
|
| 415 |
+
def test_platforms_invalid_types(self, caplog):
|
| 416 |
+
attrs = {
|
| 417 |
+
'name': 'Monty',
|
| 418 |
+
'version': '1.0',
|
| 419 |
+
'platforms': ('GNU/Linux', 'Some Evil Platform'),
|
| 420 |
+
}
|
| 421 |
+
d = Distribution(attrs)
|
| 422 |
+
# should have warning about passing a non-list
|
| 423 |
+
assert 'should be a list' in caplog.messages[0]
|
| 424 |
+
# should be converted to a list
|
| 425 |
+
assert isinstance(d.metadata.platforms, list)
|
| 426 |
+
assert d.metadata.platforms == list(attrs['platforms'])
|
| 427 |
+
|
| 428 |
+
def test_download_url(self):
|
| 429 |
+
attrs = {
|
| 430 |
+
'name': 'Boa',
|
| 431 |
+
'version': '3.0',
|
| 432 |
+
'download_url': 'http://example.org/boa',
|
| 433 |
+
}
|
| 434 |
+
dist = Distribution(attrs)
|
| 435 |
+
meta = self.format_metadata(dist)
|
| 436 |
+
assert 'Metadata-Version: 1.1' in meta
|
| 437 |
+
|
| 438 |
+
def test_long_description(self):
|
| 439 |
+
long_desc = textwrap.dedent(
|
| 440 |
+
"""\
|
| 441 |
+
example::
|
| 442 |
+
We start here
|
| 443 |
+
and continue here
|
| 444 |
+
and end here."""
|
| 445 |
+
)
|
| 446 |
+
attrs = {"name": "package", "version": "1.0", "long_description": long_desc}
|
| 447 |
+
|
| 448 |
+
dist = Distribution(attrs)
|
| 449 |
+
meta = self.format_metadata(dist)
|
| 450 |
+
meta = meta.replace('\n' + 8 * ' ', '\n')
|
| 451 |
+
assert long_desc in meta
|
| 452 |
+
|
| 453 |
+
def test_custom_pydistutils(self, temp_home):
|
| 454 |
+
"""
|
| 455 |
+
pydistutils.cfg is found
|
| 456 |
+
"""
|
| 457 |
+
jaraco.path.build({pydistutils_cfg: ''}, temp_home)
|
| 458 |
+
config_path = temp_home / pydistutils_cfg
|
| 459 |
+
|
| 460 |
+
assert str(config_path) in Distribution().find_config_files()
|
| 461 |
+
|
| 462 |
+
def test_extra_pydistutils(self, monkeypatch, tmp_path):
|
| 463 |
+
jaraco.path.build({'overrides.cfg': ''}, tmp_path)
|
| 464 |
+
filename = tmp_path / 'overrides.cfg'
|
| 465 |
+
monkeypatch.setenv('DIST_EXTRA_CONFIG', str(filename))
|
| 466 |
+
assert str(filename) in Distribution().find_config_files()
|
| 467 |
+
|
| 468 |
+
def test_fix_help_options(self):
|
| 469 |
+
help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)]
|
| 470 |
+
fancy_options = fix_help_options(help_tuples)
|
| 471 |
+
assert fancy_options[0] == ('a', 'b', 'c')
|
| 472 |
+
assert fancy_options[1] == (1, 2, 3)
|
| 473 |
+
|
| 474 |
+
def test_show_help(self, request, capsys):
|
| 475 |
+
# smoke test, just makes sure some help is displayed
|
| 476 |
+
dist = Distribution()
|
| 477 |
+
sys.argv = []
|
| 478 |
+
dist.help = True
|
| 479 |
+
dist.script_name = 'setup.py'
|
| 480 |
+
dist.parse_command_line()
|
| 481 |
+
|
| 482 |
+
output = [
|
| 483 |
+
line for line in capsys.readouterr().out.split('\n') if line.strip() != ''
|
| 484 |
+
]
|
| 485 |
+
assert output
|
| 486 |
+
|
| 487 |
+
def test_read_metadata(self):
|
| 488 |
+
attrs = {
|
| 489 |
+
"name": "package",
|
| 490 |
+
"version": "1.0",
|
| 491 |
+
"long_description": "desc",
|
| 492 |
+
"description": "xxx",
|
| 493 |
+
"download_url": "http://example.com",
|
| 494 |
+
"keywords": ['one', 'two'],
|
| 495 |
+
"requires": ['foo'],
|
| 496 |
+
}
|
| 497 |
+
|
| 498 |
+
dist = Distribution(attrs)
|
| 499 |
+
metadata = dist.metadata
|
| 500 |
+
|
| 501 |
+
# write it then reloads it
|
| 502 |
+
PKG_INFO = io.StringIO()
|
| 503 |
+
metadata.write_pkg_file(PKG_INFO)
|
| 504 |
+
PKG_INFO.seek(0)
|
| 505 |
+
metadata.read_pkg_file(PKG_INFO)
|
| 506 |
+
|
| 507 |
+
assert metadata.name == "package"
|
| 508 |
+
assert metadata.version == "1.0"
|
| 509 |
+
assert metadata.description == "xxx"
|
| 510 |
+
assert metadata.download_url == 'http://example.com'
|
| 511 |
+
assert metadata.keywords == ['one', 'two']
|
| 512 |
+
assert metadata.platforms is None
|
| 513 |
+
assert metadata.obsoletes is None
|
| 514 |
+
assert metadata.requires == ['foo']
|
| 515 |
+
|
| 516 |
+
def test_round_trip_through_email_generator(self):
|
| 517 |
+
"""
|
| 518 |
+
In pypa/setuptools#4033, it was shown that once PKG-INFO is
|
| 519 |
+
re-generated using ``email.generator.Generator``, some control
|
| 520 |
+
characters might cause problems.
|
| 521 |
+
"""
|
| 522 |
+
# Given a PKG-INFO file ...
|
| 523 |
+
attrs = {
|
| 524 |
+
"name": "package",
|
| 525 |
+
"version": "1.0",
|
| 526 |
+
"long_description": "hello\x0b\nworld\n",
|
| 527 |
+
}
|
| 528 |
+
dist = Distribution(attrs)
|
| 529 |
+
metadata = dist.metadata
|
| 530 |
+
|
| 531 |
+
with io.StringIO() as buffer:
|
| 532 |
+
metadata.write_pkg_file(buffer)
|
| 533 |
+
msg = buffer.getvalue()
|
| 534 |
+
|
| 535 |
+
# ... when it is read and re-written using stdlib's email library,
|
| 536 |
+
orig = email.message_from_string(msg)
|
| 537 |
+
policy = email.policy.EmailPolicy(
|
| 538 |
+
utf8=True,
|
| 539 |
+
mangle_from_=False,
|
| 540 |
+
max_line_length=0,
|
| 541 |
+
)
|
| 542 |
+
with io.StringIO() as buffer:
|
| 543 |
+
email.generator.Generator(buffer, policy=policy).flatten(orig)
|
| 544 |
+
|
| 545 |
+
buffer.seek(0)
|
| 546 |
+
regen = email.message_from_file(buffer)
|
| 547 |
+
|
| 548 |
+
# ... then it should be the same as the original
|
| 549 |
+
# (except for the specific line break characters)
|
| 550 |
+
orig_desc = set(orig["Description"].splitlines())
|
| 551 |
+
regen_desc = set(regen["Description"].splitlines())
|
| 552 |
+
assert regen_desc == orig_desc
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_extension.py
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.extension."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import warnings
|
| 6 |
+
from distutils.extension import Extension, read_setup_file
|
| 7 |
+
|
| 8 |
+
import pytest
|
| 9 |
+
from test.support.warnings_helper import check_warnings
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestExtension:
|
| 13 |
+
def test_read_setup_file(self):
|
| 14 |
+
# trying to read a Setup file
|
| 15 |
+
# (sample extracted from the PyGame project)
|
| 16 |
+
setup = os.path.join(os.path.dirname(__file__), 'Setup.sample')
|
| 17 |
+
|
| 18 |
+
exts = read_setup_file(setup)
|
| 19 |
+
names = [ext.name for ext in exts]
|
| 20 |
+
names.sort()
|
| 21 |
+
|
| 22 |
+
# here are the extensions read_setup_file should have created
|
| 23 |
+
# out of the file
|
| 24 |
+
wanted = [
|
| 25 |
+
'_arraysurfarray',
|
| 26 |
+
'_camera',
|
| 27 |
+
'_numericsndarray',
|
| 28 |
+
'_numericsurfarray',
|
| 29 |
+
'base',
|
| 30 |
+
'bufferproxy',
|
| 31 |
+
'cdrom',
|
| 32 |
+
'color',
|
| 33 |
+
'constants',
|
| 34 |
+
'display',
|
| 35 |
+
'draw',
|
| 36 |
+
'event',
|
| 37 |
+
'fastevent',
|
| 38 |
+
'font',
|
| 39 |
+
'gfxdraw',
|
| 40 |
+
'image',
|
| 41 |
+
'imageext',
|
| 42 |
+
'joystick',
|
| 43 |
+
'key',
|
| 44 |
+
'mask',
|
| 45 |
+
'mixer',
|
| 46 |
+
'mixer_music',
|
| 47 |
+
'mouse',
|
| 48 |
+
'movie',
|
| 49 |
+
'overlay',
|
| 50 |
+
'pixelarray',
|
| 51 |
+
'pypm',
|
| 52 |
+
'rect',
|
| 53 |
+
'rwobject',
|
| 54 |
+
'scrap',
|
| 55 |
+
'surface',
|
| 56 |
+
'surflock',
|
| 57 |
+
'time',
|
| 58 |
+
'transform',
|
| 59 |
+
]
|
| 60 |
+
|
| 61 |
+
assert names == wanted
|
| 62 |
+
|
| 63 |
+
def test_extension_init(self):
|
| 64 |
+
# the first argument, which is the name, must be a string
|
| 65 |
+
with pytest.raises(TypeError):
|
| 66 |
+
Extension(1, [])
|
| 67 |
+
ext = Extension('name', [])
|
| 68 |
+
assert ext.name == 'name'
|
| 69 |
+
|
| 70 |
+
# the second argument, which is the list of files, must
|
| 71 |
+
# be an iterable of strings or PathLike objects, and not a string
|
| 72 |
+
with pytest.raises(TypeError):
|
| 73 |
+
Extension('name', 'file')
|
| 74 |
+
with pytest.raises(TypeError):
|
| 75 |
+
Extension('name', ['file', 1])
|
| 76 |
+
ext = Extension('name', ['file1', 'file2'])
|
| 77 |
+
assert ext.sources == ['file1', 'file2']
|
| 78 |
+
ext = Extension('name', [pathlib.Path('file1'), pathlib.Path('file2')])
|
| 79 |
+
assert ext.sources == ['file1', 'file2']
|
| 80 |
+
|
| 81 |
+
# any non-string iterable of strings or PathLike objects should work
|
| 82 |
+
ext = Extension('name', ('file1', 'file2')) # tuple
|
| 83 |
+
assert ext.sources == ['file1', 'file2']
|
| 84 |
+
ext = Extension('name', {'file1', 'file2'}) # set
|
| 85 |
+
assert sorted(ext.sources) == ['file1', 'file2']
|
| 86 |
+
ext = Extension('name', iter(['file1', 'file2'])) # iterator
|
| 87 |
+
assert ext.sources == ['file1', 'file2']
|
| 88 |
+
ext = Extension('name', [pathlib.Path('file1'), 'file2']) # mixed types
|
| 89 |
+
assert ext.sources == ['file1', 'file2']
|
| 90 |
+
|
| 91 |
+
# others arguments have defaults
|
| 92 |
+
for attr in (
|
| 93 |
+
'include_dirs',
|
| 94 |
+
'define_macros',
|
| 95 |
+
'undef_macros',
|
| 96 |
+
'library_dirs',
|
| 97 |
+
'libraries',
|
| 98 |
+
'runtime_library_dirs',
|
| 99 |
+
'extra_objects',
|
| 100 |
+
'extra_compile_args',
|
| 101 |
+
'extra_link_args',
|
| 102 |
+
'export_symbols',
|
| 103 |
+
'swig_opts',
|
| 104 |
+
'depends',
|
| 105 |
+
):
|
| 106 |
+
assert getattr(ext, attr) == []
|
| 107 |
+
|
| 108 |
+
assert ext.language is None
|
| 109 |
+
assert ext.optional is None
|
| 110 |
+
|
| 111 |
+
# if there are unknown keyword options, warn about them
|
| 112 |
+
with check_warnings() as w:
|
| 113 |
+
warnings.simplefilter('always')
|
| 114 |
+
ext = Extension('name', ['file1', 'file2'], chic=True)
|
| 115 |
+
|
| 116 |
+
assert len(w.warnings) == 1
|
| 117 |
+
assert str(w.warnings[0].message) == "Unknown Extension options: 'chic'"
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_file_util.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.file_util."""
|
| 2 |
+
|
| 3 |
+
import errno
|
| 4 |
+
import os
|
| 5 |
+
import unittest.mock as mock
|
| 6 |
+
from distutils.errors import DistutilsFileError
|
| 7 |
+
from distutils.file_util import copy_file, move_file
|
| 8 |
+
|
| 9 |
+
import jaraco.path
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@pytest.fixture(autouse=True)
|
| 14 |
+
def stuff(request, tmp_path):
|
| 15 |
+
self = request.instance
|
| 16 |
+
self.source = tmp_path / 'f1'
|
| 17 |
+
self.target = tmp_path / 'f2'
|
| 18 |
+
self.target_dir = tmp_path / 'd1'
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class TestFileUtil:
|
| 22 |
+
def test_move_file_verbosity(self, caplog):
|
| 23 |
+
jaraco.path.build({self.source: 'some content'})
|
| 24 |
+
|
| 25 |
+
move_file(self.source, self.target, verbose=False)
|
| 26 |
+
assert not caplog.messages
|
| 27 |
+
|
| 28 |
+
# back to original state
|
| 29 |
+
move_file(self.target, self.source, verbose=False)
|
| 30 |
+
|
| 31 |
+
move_file(self.source, self.target, verbose=True)
|
| 32 |
+
wanted = [f'moving {self.source} -> {self.target}']
|
| 33 |
+
assert caplog.messages == wanted
|
| 34 |
+
|
| 35 |
+
# back to original state
|
| 36 |
+
move_file(self.target, self.source, verbose=False)
|
| 37 |
+
|
| 38 |
+
caplog.clear()
|
| 39 |
+
# now the target is a dir
|
| 40 |
+
os.mkdir(self.target_dir)
|
| 41 |
+
move_file(self.source, self.target_dir, verbose=True)
|
| 42 |
+
wanted = [f'moving {self.source} -> {self.target_dir}']
|
| 43 |
+
assert caplog.messages == wanted
|
| 44 |
+
|
| 45 |
+
def test_move_file_exception_unpacking_rename(self):
|
| 46 |
+
# see issue 22182
|
| 47 |
+
with (
|
| 48 |
+
mock.patch("os.rename", side_effect=OSError("wrong", 1)),
|
| 49 |
+
pytest.raises(DistutilsFileError),
|
| 50 |
+
):
|
| 51 |
+
jaraco.path.build({self.source: 'spam eggs'})
|
| 52 |
+
move_file(self.source, self.target, verbose=False)
|
| 53 |
+
|
| 54 |
+
def test_move_file_exception_unpacking_unlink(self):
|
| 55 |
+
# see issue 22182
|
| 56 |
+
with (
|
| 57 |
+
mock.patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")),
|
| 58 |
+
mock.patch("os.unlink", side_effect=OSError("wrong", 1)),
|
| 59 |
+
pytest.raises(DistutilsFileError),
|
| 60 |
+
):
|
| 61 |
+
jaraco.path.build({self.source: 'spam eggs'})
|
| 62 |
+
move_file(self.source, self.target, verbose=False)
|
| 63 |
+
|
| 64 |
+
def test_copy_file_hard_link(self):
|
| 65 |
+
jaraco.path.build({self.source: 'some content'})
|
| 66 |
+
# Check first that copy_file() will not fall back on copying the file
|
| 67 |
+
# instead of creating the hard link.
|
| 68 |
+
try:
|
| 69 |
+
os.link(self.source, self.target)
|
| 70 |
+
except OSError as e:
|
| 71 |
+
self.skipTest(f'os.link: {e}')
|
| 72 |
+
else:
|
| 73 |
+
self.target.unlink()
|
| 74 |
+
st = os.stat(self.source)
|
| 75 |
+
copy_file(self.source, self.target, link='hard')
|
| 76 |
+
st2 = os.stat(self.source)
|
| 77 |
+
st3 = os.stat(self.target)
|
| 78 |
+
assert os.path.samestat(st, st2), (st, st2)
|
| 79 |
+
assert os.path.samestat(st2, st3), (st2, st3)
|
| 80 |
+
assert self.source.read_text(encoding='utf-8') == 'some content'
|
| 81 |
+
|
| 82 |
+
def test_copy_file_hard_link_failure(self):
|
| 83 |
+
# If hard linking fails, copy_file() falls back on copying file
|
| 84 |
+
# (some special filesystems don't support hard linking even under
|
| 85 |
+
# Unix, see issue #8876).
|
| 86 |
+
jaraco.path.build({self.source: 'some content'})
|
| 87 |
+
st = os.stat(self.source)
|
| 88 |
+
with mock.patch("os.link", side_effect=OSError(0, "linking unsupported")):
|
| 89 |
+
copy_file(self.source, self.target, link='hard')
|
| 90 |
+
st2 = os.stat(self.source)
|
| 91 |
+
st3 = os.stat(self.target)
|
| 92 |
+
assert os.path.samestat(st, st2), (st, st2)
|
| 93 |
+
assert not os.path.samestat(st2, st3), (st2, st3)
|
| 94 |
+
for fn in (self.source, self.target):
|
| 95 |
+
assert fn.read_text(encoding='utf-8') == 'some content'
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_filelist.py
ADDED
|
@@ -0,0 +1,336 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.filelist."""
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
from distutils import debug, filelist
|
| 7 |
+
from distutils.errors import DistutilsTemplateError
|
| 8 |
+
from distutils.filelist import FileList, glob_to_re, translate_pattern
|
| 9 |
+
|
| 10 |
+
import jaraco.path
|
| 11 |
+
import pytest
|
| 12 |
+
|
| 13 |
+
from .compat import py39 as os_helper
|
| 14 |
+
|
| 15 |
+
MANIFEST_IN = """\
|
| 16 |
+
include ok
|
| 17 |
+
include xo
|
| 18 |
+
exclude xo
|
| 19 |
+
include foo.tmp
|
| 20 |
+
include buildout.cfg
|
| 21 |
+
global-include *.x
|
| 22 |
+
global-include *.txt
|
| 23 |
+
global-exclude *.tmp
|
| 24 |
+
recursive-include f *.oo
|
| 25 |
+
recursive-exclude global *.x
|
| 26 |
+
graft dir
|
| 27 |
+
prune dir3
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def make_local_path(s):
|
| 32 |
+
"""Converts '/' in a string to os.sep"""
|
| 33 |
+
return s.replace('/', os.sep)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class TestFileList:
|
| 37 |
+
def assertNoWarnings(self, caplog):
|
| 38 |
+
warnings = [rec for rec in caplog.records if rec.levelno == logging.WARNING]
|
| 39 |
+
assert not warnings
|
| 40 |
+
caplog.clear()
|
| 41 |
+
|
| 42 |
+
def assertWarnings(self, caplog):
|
| 43 |
+
warnings = [rec for rec in caplog.records if rec.levelno == logging.WARNING]
|
| 44 |
+
assert warnings
|
| 45 |
+
caplog.clear()
|
| 46 |
+
|
| 47 |
+
def test_glob_to_re(self):
|
| 48 |
+
sep = os.sep
|
| 49 |
+
if os.sep == '\\':
|
| 50 |
+
sep = re.escape(os.sep)
|
| 51 |
+
|
| 52 |
+
for glob, regex in (
|
| 53 |
+
# simple cases
|
| 54 |
+
('foo*', r'(?s:foo[^%(sep)s]*)\Z'),
|
| 55 |
+
('foo?', r'(?s:foo[^%(sep)s])\Z'),
|
| 56 |
+
('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'),
|
| 57 |
+
# special cases
|
| 58 |
+
(r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'),
|
| 59 |
+
(r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'),
|
| 60 |
+
('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'),
|
| 61 |
+
(r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z'),
|
| 62 |
+
):
|
| 63 |
+
regex = regex % {'sep': sep}
|
| 64 |
+
assert glob_to_re(glob) == regex
|
| 65 |
+
|
| 66 |
+
def test_process_template_line(self):
|
| 67 |
+
# testing all MANIFEST.in template patterns
|
| 68 |
+
file_list = FileList()
|
| 69 |
+
mlp = make_local_path
|
| 70 |
+
|
| 71 |
+
# simulated file list
|
| 72 |
+
file_list.allfiles = [
|
| 73 |
+
'foo.tmp',
|
| 74 |
+
'ok',
|
| 75 |
+
'xo',
|
| 76 |
+
'four.txt',
|
| 77 |
+
'buildout.cfg',
|
| 78 |
+
# filelist does not filter out VCS directories,
|
| 79 |
+
# it's sdist that does
|
| 80 |
+
mlp('.hg/last-message.txt'),
|
| 81 |
+
mlp('global/one.txt'),
|
| 82 |
+
mlp('global/two.txt'),
|
| 83 |
+
mlp('global/files.x'),
|
| 84 |
+
mlp('global/here.tmp'),
|
| 85 |
+
mlp('f/o/f.oo'),
|
| 86 |
+
mlp('dir/graft-one'),
|
| 87 |
+
mlp('dir/dir2/graft2'),
|
| 88 |
+
mlp('dir3/ok'),
|
| 89 |
+
mlp('dir3/sub/ok.txt'),
|
| 90 |
+
]
|
| 91 |
+
|
| 92 |
+
for line in MANIFEST_IN.split('\n'):
|
| 93 |
+
if line.strip() == '':
|
| 94 |
+
continue
|
| 95 |
+
file_list.process_template_line(line)
|
| 96 |
+
|
| 97 |
+
wanted = [
|
| 98 |
+
'ok',
|
| 99 |
+
'buildout.cfg',
|
| 100 |
+
'four.txt',
|
| 101 |
+
mlp('.hg/last-message.txt'),
|
| 102 |
+
mlp('global/one.txt'),
|
| 103 |
+
mlp('global/two.txt'),
|
| 104 |
+
mlp('f/o/f.oo'),
|
| 105 |
+
mlp('dir/graft-one'),
|
| 106 |
+
mlp('dir/dir2/graft2'),
|
| 107 |
+
]
|
| 108 |
+
|
| 109 |
+
assert file_list.files == wanted
|
| 110 |
+
|
| 111 |
+
def test_debug_print(self, capsys, monkeypatch):
|
| 112 |
+
file_list = FileList()
|
| 113 |
+
file_list.debug_print('xxx')
|
| 114 |
+
assert capsys.readouterr().out == ''
|
| 115 |
+
|
| 116 |
+
monkeypatch.setattr(debug, 'DEBUG', True)
|
| 117 |
+
file_list.debug_print('xxx')
|
| 118 |
+
assert capsys.readouterr().out == 'xxx\n'
|
| 119 |
+
|
| 120 |
+
def test_set_allfiles(self):
|
| 121 |
+
file_list = FileList()
|
| 122 |
+
files = ['a', 'b', 'c']
|
| 123 |
+
file_list.set_allfiles(files)
|
| 124 |
+
assert file_list.allfiles == files
|
| 125 |
+
|
| 126 |
+
def test_remove_duplicates(self):
|
| 127 |
+
file_list = FileList()
|
| 128 |
+
file_list.files = ['a', 'b', 'a', 'g', 'c', 'g']
|
| 129 |
+
# files must be sorted beforehand (sdist does it)
|
| 130 |
+
file_list.sort()
|
| 131 |
+
file_list.remove_duplicates()
|
| 132 |
+
assert file_list.files == ['a', 'b', 'c', 'g']
|
| 133 |
+
|
| 134 |
+
def test_translate_pattern(self):
|
| 135 |
+
# not regex
|
| 136 |
+
assert hasattr(translate_pattern('a', anchor=True, is_regex=False), 'search')
|
| 137 |
+
|
| 138 |
+
# is a regex
|
| 139 |
+
regex = re.compile('a')
|
| 140 |
+
assert translate_pattern(regex, anchor=True, is_regex=True) == regex
|
| 141 |
+
|
| 142 |
+
# plain string flagged as regex
|
| 143 |
+
assert hasattr(translate_pattern('a', anchor=True, is_regex=True), 'search')
|
| 144 |
+
|
| 145 |
+
# glob support
|
| 146 |
+
assert translate_pattern('*.py', anchor=True, is_regex=False).search(
|
| 147 |
+
'filelist.py'
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
def test_exclude_pattern(self):
|
| 151 |
+
# return False if no match
|
| 152 |
+
file_list = FileList()
|
| 153 |
+
assert not file_list.exclude_pattern('*.py')
|
| 154 |
+
|
| 155 |
+
# return True if files match
|
| 156 |
+
file_list = FileList()
|
| 157 |
+
file_list.files = ['a.py', 'b.py']
|
| 158 |
+
assert file_list.exclude_pattern('*.py')
|
| 159 |
+
|
| 160 |
+
# test excludes
|
| 161 |
+
file_list = FileList()
|
| 162 |
+
file_list.files = ['a.py', 'a.txt']
|
| 163 |
+
file_list.exclude_pattern('*.py')
|
| 164 |
+
assert file_list.files == ['a.txt']
|
| 165 |
+
|
| 166 |
+
def test_include_pattern(self):
|
| 167 |
+
# return False if no match
|
| 168 |
+
file_list = FileList()
|
| 169 |
+
file_list.set_allfiles([])
|
| 170 |
+
assert not file_list.include_pattern('*.py')
|
| 171 |
+
|
| 172 |
+
# return True if files match
|
| 173 |
+
file_list = FileList()
|
| 174 |
+
file_list.set_allfiles(['a.py', 'b.txt'])
|
| 175 |
+
assert file_list.include_pattern('*.py')
|
| 176 |
+
|
| 177 |
+
# test * matches all files
|
| 178 |
+
file_list = FileList()
|
| 179 |
+
assert file_list.allfiles is None
|
| 180 |
+
file_list.set_allfiles(['a.py', 'b.txt'])
|
| 181 |
+
file_list.include_pattern('*')
|
| 182 |
+
assert file_list.allfiles == ['a.py', 'b.txt']
|
| 183 |
+
|
| 184 |
+
def test_process_template(self, caplog):
|
| 185 |
+
mlp = make_local_path
|
| 186 |
+
# invalid lines
|
| 187 |
+
file_list = FileList()
|
| 188 |
+
for action in (
|
| 189 |
+
'include',
|
| 190 |
+
'exclude',
|
| 191 |
+
'global-include',
|
| 192 |
+
'global-exclude',
|
| 193 |
+
'recursive-include',
|
| 194 |
+
'recursive-exclude',
|
| 195 |
+
'graft',
|
| 196 |
+
'prune',
|
| 197 |
+
'blarg',
|
| 198 |
+
):
|
| 199 |
+
with pytest.raises(DistutilsTemplateError):
|
| 200 |
+
file_list.process_template_line(action)
|
| 201 |
+
|
| 202 |
+
# include
|
| 203 |
+
file_list = FileList()
|
| 204 |
+
file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')])
|
| 205 |
+
|
| 206 |
+
file_list.process_template_line('include *.py')
|
| 207 |
+
assert file_list.files == ['a.py']
|
| 208 |
+
self.assertNoWarnings(caplog)
|
| 209 |
+
|
| 210 |
+
file_list.process_template_line('include *.rb')
|
| 211 |
+
assert file_list.files == ['a.py']
|
| 212 |
+
self.assertWarnings(caplog)
|
| 213 |
+
|
| 214 |
+
# exclude
|
| 215 |
+
file_list = FileList()
|
| 216 |
+
file_list.files = ['a.py', 'b.txt', mlp('d/c.py')]
|
| 217 |
+
|
| 218 |
+
file_list.process_template_line('exclude *.py')
|
| 219 |
+
assert file_list.files == ['b.txt', mlp('d/c.py')]
|
| 220 |
+
self.assertNoWarnings(caplog)
|
| 221 |
+
|
| 222 |
+
file_list.process_template_line('exclude *.rb')
|
| 223 |
+
assert file_list.files == ['b.txt', mlp('d/c.py')]
|
| 224 |
+
self.assertWarnings(caplog)
|
| 225 |
+
|
| 226 |
+
# global-include
|
| 227 |
+
file_list = FileList()
|
| 228 |
+
file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')])
|
| 229 |
+
|
| 230 |
+
file_list.process_template_line('global-include *.py')
|
| 231 |
+
assert file_list.files == ['a.py', mlp('d/c.py')]
|
| 232 |
+
self.assertNoWarnings(caplog)
|
| 233 |
+
|
| 234 |
+
file_list.process_template_line('global-include *.rb')
|
| 235 |
+
assert file_list.files == ['a.py', mlp('d/c.py')]
|
| 236 |
+
self.assertWarnings(caplog)
|
| 237 |
+
|
| 238 |
+
# global-exclude
|
| 239 |
+
file_list = FileList()
|
| 240 |
+
file_list.files = ['a.py', 'b.txt', mlp('d/c.py')]
|
| 241 |
+
|
| 242 |
+
file_list.process_template_line('global-exclude *.py')
|
| 243 |
+
assert file_list.files == ['b.txt']
|
| 244 |
+
self.assertNoWarnings(caplog)
|
| 245 |
+
|
| 246 |
+
file_list.process_template_line('global-exclude *.rb')
|
| 247 |
+
assert file_list.files == ['b.txt']
|
| 248 |
+
self.assertWarnings(caplog)
|
| 249 |
+
|
| 250 |
+
# recursive-include
|
| 251 |
+
file_list = FileList()
|
| 252 |
+
file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')])
|
| 253 |
+
|
| 254 |
+
file_list.process_template_line('recursive-include d *.py')
|
| 255 |
+
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
| 256 |
+
self.assertNoWarnings(caplog)
|
| 257 |
+
|
| 258 |
+
file_list.process_template_line('recursive-include e *.py')
|
| 259 |
+
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
| 260 |
+
self.assertWarnings(caplog)
|
| 261 |
+
|
| 262 |
+
# recursive-exclude
|
| 263 |
+
file_list = FileList()
|
| 264 |
+
file_list.files = ['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')]
|
| 265 |
+
|
| 266 |
+
file_list.process_template_line('recursive-exclude d *.py')
|
| 267 |
+
assert file_list.files == ['a.py', mlp('d/c.txt')]
|
| 268 |
+
self.assertNoWarnings(caplog)
|
| 269 |
+
|
| 270 |
+
file_list.process_template_line('recursive-exclude e *.py')
|
| 271 |
+
assert file_list.files == ['a.py', mlp('d/c.txt')]
|
| 272 |
+
self.assertWarnings(caplog)
|
| 273 |
+
|
| 274 |
+
# graft
|
| 275 |
+
file_list = FileList()
|
| 276 |
+
file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')])
|
| 277 |
+
|
| 278 |
+
file_list.process_template_line('graft d')
|
| 279 |
+
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
| 280 |
+
self.assertNoWarnings(caplog)
|
| 281 |
+
|
| 282 |
+
file_list.process_template_line('graft e')
|
| 283 |
+
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
| 284 |
+
self.assertWarnings(caplog)
|
| 285 |
+
|
| 286 |
+
# prune
|
| 287 |
+
file_list = FileList()
|
| 288 |
+
file_list.files = ['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')]
|
| 289 |
+
|
| 290 |
+
file_list.process_template_line('prune d')
|
| 291 |
+
assert file_list.files == ['a.py', mlp('f/f.py')]
|
| 292 |
+
self.assertNoWarnings(caplog)
|
| 293 |
+
|
| 294 |
+
file_list.process_template_line('prune e')
|
| 295 |
+
assert file_list.files == ['a.py', mlp('f/f.py')]
|
| 296 |
+
self.assertWarnings(caplog)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
class TestFindAll:
|
| 300 |
+
@os_helper.skip_unless_symlink
|
| 301 |
+
def test_missing_symlink(self, temp_cwd):
|
| 302 |
+
os.symlink('foo', 'bar')
|
| 303 |
+
assert filelist.findall() == []
|
| 304 |
+
|
| 305 |
+
def test_basic_discovery(self, temp_cwd):
|
| 306 |
+
"""
|
| 307 |
+
When findall is called with no parameters or with
|
| 308 |
+
'.' as the parameter, the dot should be omitted from
|
| 309 |
+
the results.
|
| 310 |
+
"""
|
| 311 |
+
jaraco.path.build({'foo': {'file1.txt': ''}, 'bar': {'file2.txt': ''}})
|
| 312 |
+
file1 = os.path.join('foo', 'file1.txt')
|
| 313 |
+
file2 = os.path.join('bar', 'file2.txt')
|
| 314 |
+
expected = [file2, file1]
|
| 315 |
+
assert sorted(filelist.findall()) == expected
|
| 316 |
+
|
| 317 |
+
def test_non_local_discovery(self, tmp_path):
|
| 318 |
+
"""
|
| 319 |
+
When findall is called with another path, the full
|
| 320 |
+
path name should be returned.
|
| 321 |
+
"""
|
| 322 |
+
jaraco.path.build({'file1.txt': ''}, tmp_path)
|
| 323 |
+
expected = [str(tmp_path / 'file1.txt')]
|
| 324 |
+
assert filelist.findall(tmp_path) == expected
|
| 325 |
+
|
| 326 |
+
@os_helper.skip_unless_symlink
|
| 327 |
+
def test_symlink_loop(self, tmp_path):
|
| 328 |
+
jaraco.path.build(
|
| 329 |
+
{
|
| 330 |
+
'link-to-parent': jaraco.path.Symlink('.'),
|
| 331 |
+
'somefile': '',
|
| 332 |
+
},
|
| 333 |
+
tmp_path,
|
| 334 |
+
)
|
| 335 |
+
files = filelist.findall(tmp_path)
|
| 336 |
+
assert len(files) == 1
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.install."""
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import pathlib
|
| 6 |
+
import site
|
| 7 |
+
import sys
|
| 8 |
+
from distutils import sysconfig
|
| 9 |
+
from distutils.command import install as install_module
|
| 10 |
+
from distutils.command.build_ext import build_ext
|
| 11 |
+
from distutils.command.install import INSTALL_SCHEMES, install
|
| 12 |
+
from distutils.core import Distribution
|
| 13 |
+
from distutils.errors import DistutilsOptionError
|
| 14 |
+
from distutils.extension import Extension
|
| 15 |
+
from distutils.tests import missing_compiler_executable, support
|
| 16 |
+
from distutils.util import is_mingw
|
| 17 |
+
|
| 18 |
+
import pytest
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _make_ext_name(modname):
|
| 22 |
+
return modname + sysconfig.get_config_var('EXT_SUFFIX')
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@support.combine_markers
|
| 26 |
+
@pytest.mark.usefixtures('save_env')
|
| 27 |
+
class TestInstall(
|
| 28 |
+
support.TempdirManager,
|
| 29 |
+
):
|
| 30 |
+
@pytest.mark.xfail(
|
| 31 |
+
'platform.system() == "Windows" and sys.version_info > (3, 11)',
|
| 32 |
+
reason="pypa/distutils#148",
|
| 33 |
+
)
|
| 34 |
+
def test_home_installation_scheme(self):
|
| 35 |
+
# This ensure two things:
|
| 36 |
+
# - that --home generates the desired set of directory names
|
| 37 |
+
# - test --home is supported on all platforms
|
| 38 |
+
builddir = self.mkdtemp()
|
| 39 |
+
destination = os.path.join(builddir, "installation")
|
| 40 |
+
|
| 41 |
+
dist = Distribution({"name": "foopkg"})
|
| 42 |
+
# script_name need not exist, it just need to be initialized
|
| 43 |
+
dist.script_name = os.path.join(builddir, "setup.py")
|
| 44 |
+
dist.command_obj["build"] = support.DummyCommand(
|
| 45 |
+
build_base=builddir,
|
| 46 |
+
build_lib=os.path.join(builddir, "lib"),
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
cmd = install(dist)
|
| 50 |
+
cmd.home = destination
|
| 51 |
+
cmd.ensure_finalized()
|
| 52 |
+
|
| 53 |
+
assert cmd.install_base == destination
|
| 54 |
+
assert cmd.install_platbase == destination
|
| 55 |
+
|
| 56 |
+
def check_path(got, expected):
|
| 57 |
+
got = os.path.normpath(got)
|
| 58 |
+
expected = os.path.normpath(expected)
|
| 59 |
+
assert got == expected
|
| 60 |
+
|
| 61 |
+
impl_name = sys.implementation.name.replace("cpython", "python")
|
| 62 |
+
libdir = os.path.join(destination, "lib", impl_name)
|
| 63 |
+
check_path(cmd.install_lib, libdir)
|
| 64 |
+
_platlibdir = getattr(sys, "platlibdir", "lib")
|
| 65 |
+
platlibdir = os.path.join(destination, _platlibdir, impl_name)
|
| 66 |
+
check_path(cmd.install_platlib, platlibdir)
|
| 67 |
+
check_path(cmd.install_purelib, libdir)
|
| 68 |
+
check_path(
|
| 69 |
+
cmd.install_headers,
|
| 70 |
+
os.path.join(destination, "include", impl_name, "foopkg"),
|
| 71 |
+
)
|
| 72 |
+
check_path(cmd.install_scripts, os.path.join(destination, "bin"))
|
| 73 |
+
check_path(cmd.install_data, destination)
|
| 74 |
+
|
| 75 |
+
def test_user_site(self, monkeypatch):
|
| 76 |
+
# test install with --user
|
| 77 |
+
# preparing the environment for the test
|
| 78 |
+
self.tmpdir = self.mkdtemp()
|
| 79 |
+
orig_site = site.USER_SITE
|
| 80 |
+
orig_base = site.USER_BASE
|
| 81 |
+
monkeypatch.setattr(site, 'USER_BASE', os.path.join(self.tmpdir, 'B'))
|
| 82 |
+
monkeypatch.setattr(site, 'USER_SITE', os.path.join(self.tmpdir, 'S'))
|
| 83 |
+
monkeypatch.setattr(install_module, 'USER_BASE', site.USER_BASE)
|
| 84 |
+
monkeypatch.setattr(install_module, 'USER_SITE', site.USER_SITE)
|
| 85 |
+
|
| 86 |
+
def _expanduser(path):
|
| 87 |
+
if path.startswith('~'):
|
| 88 |
+
return os.path.normpath(self.tmpdir + path[1:])
|
| 89 |
+
return path
|
| 90 |
+
|
| 91 |
+
monkeypatch.setattr(os.path, 'expanduser', _expanduser)
|
| 92 |
+
|
| 93 |
+
for key in ('nt_user', 'posix_user'):
|
| 94 |
+
assert key in INSTALL_SCHEMES
|
| 95 |
+
|
| 96 |
+
dist = Distribution({'name': 'xx'})
|
| 97 |
+
cmd = install(dist)
|
| 98 |
+
|
| 99 |
+
# making sure the user option is there
|
| 100 |
+
options = [name for name, short, label in cmd.user_options]
|
| 101 |
+
assert 'user' in options
|
| 102 |
+
|
| 103 |
+
# setting a value
|
| 104 |
+
cmd.user = True
|
| 105 |
+
|
| 106 |
+
# user base and site shouldn't be created yet
|
| 107 |
+
assert not os.path.exists(site.USER_BASE)
|
| 108 |
+
assert not os.path.exists(site.USER_SITE)
|
| 109 |
+
|
| 110 |
+
# let's run finalize
|
| 111 |
+
cmd.ensure_finalized()
|
| 112 |
+
|
| 113 |
+
# now they should
|
| 114 |
+
assert os.path.exists(site.USER_BASE)
|
| 115 |
+
assert os.path.exists(site.USER_SITE)
|
| 116 |
+
|
| 117 |
+
assert 'userbase' in cmd.config_vars
|
| 118 |
+
assert 'usersite' in cmd.config_vars
|
| 119 |
+
|
| 120 |
+
actual_headers = os.path.relpath(cmd.install_headers, site.USER_BASE)
|
| 121 |
+
if os.name == 'nt' and not is_mingw():
|
| 122 |
+
site_path = os.path.relpath(os.path.dirname(orig_site), orig_base)
|
| 123 |
+
include = os.path.join(site_path, 'Include')
|
| 124 |
+
else:
|
| 125 |
+
include = sysconfig.get_python_inc(0, '')
|
| 126 |
+
expect_headers = os.path.join(include, 'xx')
|
| 127 |
+
|
| 128 |
+
assert os.path.normcase(actual_headers) == os.path.normcase(expect_headers)
|
| 129 |
+
|
| 130 |
+
def test_handle_extra_path(self):
|
| 131 |
+
dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
|
| 132 |
+
cmd = install(dist)
|
| 133 |
+
|
| 134 |
+
# two elements
|
| 135 |
+
cmd.handle_extra_path()
|
| 136 |
+
assert cmd.extra_path == ['path', 'dirs']
|
| 137 |
+
assert cmd.extra_dirs == 'dirs'
|
| 138 |
+
assert cmd.path_file == 'path'
|
| 139 |
+
|
| 140 |
+
# one element
|
| 141 |
+
cmd.extra_path = ['path']
|
| 142 |
+
cmd.handle_extra_path()
|
| 143 |
+
assert cmd.extra_path == ['path']
|
| 144 |
+
assert cmd.extra_dirs == 'path'
|
| 145 |
+
assert cmd.path_file == 'path'
|
| 146 |
+
|
| 147 |
+
# none
|
| 148 |
+
dist.extra_path = cmd.extra_path = None
|
| 149 |
+
cmd.handle_extra_path()
|
| 150 |
+
assert cmd.extra_path is None
|
| 151 |
+
assert cmd.extra_dirs == ''
|
| 152 |
+
assert cmd.path_file is None
|
| 153 |
+
|
| 154 |
+
# three elements (no way !)
|
| 155 |
+
cmd.extra_path = 'path,dirs,again'
|
| 156 |
+
with pytest.raises(DistutilsOptionError):
|
| 157 |
+
cmd.handle_extra_path()
|
| 158 |
+
|
| 159 |
+
def test_finalize_options(self):
|
| 160 |
+
dist = Distribution({'name': 'xx'})
|
| 161 |
+
cmd = install(dist)
|
| 162 |
+
|
| 163 |
+
# must supply either prefix/exec-prefix/home or
|
| 164 |
+
# install-base/install-platbase -- not both
|
| 165 |
+
cmd.prefix = 'prefix'
|
| 166 |
+
cmd.install_base = 'base'
|
| 167 |
+
with pytest.raises(DistutilsOptionError):
|
| 168 |
+
cmd.finalize_options()
|
| 169 |
+
|
| 170 |
+
# must supply either home or prefix/exec-prefix -- not both
|
| 171 |
+
cmd.install_base = None
|
| 172 |
+
cmd.home = 'home'
|
| 173 |
+
with pytest.raises(DistutilsOptionError):
|
| 174 |
+
cmd.finalize_options()
|
| 175 |
+
|
| 176 |
+
# can't combine user with prefix/exec_prefix/home or
|
| 177 |
+
# install_(plat)base
|
| 178 |
+
cmd.prefix = None
|
| 179 |
+
cmd.user = 'user'
|
| 180 |
+
with pytest.raises(DistutilsOptionError):
|
| 181 |
+
cmd.finalize_options()
|
| 182 |
+
|
| 183 |
+
def test_record(self):
|
| 184 |
+
install_dir = self.mkdtemp()
|
| 185 |
+
project_dir, dist = self.create_dist(py_modules=['hello'], scripts=['sayhi'])
|
| 186 |
+
os.chdir(project_dir)
|
| 187 |
+
self.write_file('hello.py', "def main(): print('o hai')")
|
| 188 |
+
self.write_file('sayhi', 'from hello import main; main()')
|
| 189 |
+
|
| 190 |
+
cmd = install(dist)
|
| 191 |
+
dist.command_obj['install'] = cmd
|
| 192 |
+
cmd.root = install_dir
|
| 193 |
+
cmd.record = os.path.join(project_dir, 'filelist')
|
| 194 |
+
cmd.ensure_finalized()
|
| 195 |
+
cmd.run()
|
| 196 |
+
|
| 197 |
+
content = pathlib.Path(cmd.record).read_text(encoding='utf-8')
|
| 198 |
+
|
| 199 |
+
found = [pathlib.Path(line).name for line in content.splitlines()]
|
| 200 |
+
expected = [
|
| 201 |
+
'hello.py',
|
| 202 |
+
f'hello.{sys.implementation.cache_tag}.pyc',
|
| 203 |
+
'sayhi',
|
| 204 |
+
'UNKNOWN-0.0.0-py{}.{}.egg-info'.format(*sys.version_info[:2]),
|
| 205 |
+
]
|
| 206 |
+
assert found == expected
|
| 207 |
+
|
| 208 |
+
def test_record_extensions(self):
|
| 209 |
+
cmd = missing_compiler_executable()
|
| 210 |
+
if cmd is not None:
|
| 211 |
+
pytest.skip(f'The {cmd!r} command is not found')
|
| 212 |
+
install_dir = self.mkdtemp()
|
| 213 |
+
project_dir, dist = self.create_dist(
|
| 214 |
+
ext_modules=[Extension('xx', ['xxmodule.c'])]
|
| 215 |
+
)
|
| 216 |
+
os.chdir(project_dir)
|
| 217 |
+
support.copy_xxmodule_c(project_dir)
|
| 218 |
+
|
| 219 |
+
buildextcmd = build_ext(dist)
|
| 220 |
+
support.fixup_build_ext(buildextcmd)
|
| 221 |
+
buildextcmd.ensure_finalized()
|
| 222 |
+
|
| 223 |
+
cmd = install(dist)
|
| 224 |
+
dist.command_obj['install'] = cmd
|
| 225 |
+
dist.command_obj['build_ext'] = buildextcmd
|
| 226 |
+
cmd.root = install_dir
|
| 227 |
+
cmd.record = os.path.join(project_dir, 'filelist')
|
| 228 |
+
cmd.ensure_finalized()
|
| 229 |
+
cmd.run()
|
| 230 |
+
|
| 231 |
+
content = pathlib.Path(cmd.record).read_text(encoding='utf-8')
|
| 232 |
+
|
| 233 |
+
found = [pathlib.Path(line).name for line in content.splitlines()]
|
| 234 |
+
expected = [
|
| 235 |
+
_make_ext_name('xx'),
|
| 236 |
+
'UNKNOWN-0.0.0-py{}.{}.egg-info'.format(*sys.version_info[:2]),
|
| 237 |
+
]
|
| 238 |
+
assert found == expected
|
| 239 |
+
|
| 240 |
+
def test_debug_mode(self, caplog, monkeypatch):
|
| 241 |
+
# this covers the code called when DEBUG is set
|
| 242 |
+
monkeypatch.setattr(install_module, 'DEBUG', True)
|
| 243 |
+
caplog.set_level(logging.DEBUG)
|
| 244 |
+
self.test_record()
|
| 245 |
+
assert any(rec for rec in caplog.records if rec.levelno == logging.DEBUG)
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install_data.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.install_data."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
from distutils.command.install_data import install_data
|
| 6 |
+
from distutils.tests import support
|
| 7 |
+
|
| 8 |
+
import pytest
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@pytest.mark.usefixtures('save_env')
|
| 12 |
+
class TestInstallData(
|
| 13 |
+
support.TempdirManager,
|
| 14 |
+
):
|
| 15 |
+
def test_simple_run(self):
|
| 16 |
+
pkg_dir, dist = self.create_dist()
|
| 17 |
+
cmd = install_data(dist)
|
| 18 |
+
cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
|
| 19 |
+
|
| 20 |
+
# data_files can contain
|
| 21 |
+
# - simple files
|
| 22 |
+
# - a Path object
|
| 23 |
+
# - a tuple with a path, and a list of file
|
| 24 |
+
one = os.path.join(pkg_dir, 'one')
|
| 25 |
+
self.write_file(one, 'xxx')
|
| 26 |
+
inst2 = os.path.join(pkg_dir, 'inst2')
|
| 27 |
+
two = os.path.join(pkg_dir, 'two')
|
| 28 |
+
self.write_file(two, 'xxx')
|
| 29 |
+
three = pathlib.Path(pkg_dir) / 'three'
|
| 30 |
+
self.write_file(three, 'xxx')
|
| 31 |
+
|
| 32 |
+
cmd.data_files = [one, (inst2, [two]), three]
|
| 33 |
+
assert cmd.get_inputs() == [one, (inst2, [two]), three]
|
| 34 |
+
|
| 35 |
+
# let's run the command
|
| 36 |
+
cmd.ensure_finalized()
|
| 37 |
+
cmd.run()
|
| 38 |
+
|
| 39 |
+
# let's check the result
|
| 40 |
+
assert len(cmd.get_outputs()) == 3
|
| 41 |
+
rthree = os.path.split(one)[-1]
|
| 42 |
+
assert os.path.exists(os.path.join(inst, rthree))
|
| 43 |
+
rtwo = os.path.split(two)[-1]
|
| 44 |
+
assert os.path.exists(os.path.join(inst2, rtwo))
|
| 45 |
+
rone = os.path.split(one)[-1]
|
| 46 |
+
assert os.path.exists(os.path.join(inst, rone))
|
| 47 |
+
cmd.outfiles = []
|
| 48 |
+
|
| 49 |
+
# let's try with warn_dir one
|
| 50 |
+
cmd.warn_dir = True
|
| 51 |
+
cmd.ensure_finalized()
|
| 52 |
+
cmd.run()
|
| 53 |
+
|
| 54 |
+
# let's check the result
|
| 55 |
+
assert len(cmd.get_outputs()) == 3
|
| 56 |
+
assert os.path.exists(os.path.join(inst, rthree))
|
| 57 |
+
assert os.path.exists(os.path.join(inst2, rtwo))
|
| 58 |
+
assert os.path.exists(os.path.join(inst, rone))
|
| 59 |
+
cmd.outfiles = []
|
| 60 |
+
|
| 61 |
+
# now using root and empty dir
|
| 62 |
+
cmd.root = os.path.join(pkg_dir, 'root')
|
| 63 |
+
inst5 = os.path.join(pkg_dir, 'inst5')
|
| 64 |
+
four = os.path.join(cmd.install_dir, 'four')
|
| 65 |
+
self.write_file(four, 'xx')
|
| 66 |
+
cmd.data_files = [one, (inst2, [two]), three, ('inst5', [four]), (inst5, [])]
|
| 67 |
+
cmd.ensure_finalized()
|
| 68 |
+
cmd.run()
|
| 69 |
+
|
| 70 |
+
# let's check the result
|
| 71 |
+
assert len(cmd.get_outputs()) == 5
|
| 72 |
+
assert os.path.exists(os.path.join(inst, rthree))
|
| 73 |
+
assert os.path.exists(os.path.join(inst2, rtwo))
|
| 74 |
+
assert os.path.exists(os.path.join(inst, rone))
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install_headers.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.install_headers."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
from distutils.command.install_headers import install_headers
|
| 5 |
+
from distutils.tests import support
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@pytest.mark.usefixtures('save_env')
|
| 11 |
+
class TestInstallHeaders(
|
| 12 |
+
support.TempdirManager,
|
| 13 |
+
):
|
| 14 |
+
def test_simple_run(self):
|
| 15 |
+
# we have two headers
|
| 16 |
+
header_list = self.mkdtemp()
|
| 17 |
+
header1 = os.path.join(header_list, 'header1')
|
| 18 |
+
header2 = os.path.join(header_list, 'header2')
|
| 19 |
+
self.write_file(header1)
|
| 20 |
+
self.write_file(header2)
|
| 21 |
+
headers = [header1, header2]
|
| 22 |
+
|
| 23 |
+
pkg_dir, dist = self.create_dist(headers=headers)
|
| 24 |
+
cmd = install_headers(dist)
|
| 25 |
+
assert cmd.get_inputs() == headers
|
| 26 |
+
|
| 27 |
+
# let's run the command
|
| 28 |
+
cmd.install_dir = os.path.join(pkg_dir, 'inst')
|
| 29 |
+
cmd.ensure_finalized()
|
| 30 |
+
cmd.run()
|
| 31 |
+
|
| 32 |
+
# let's check the results
|
| 33 |
+
assert len(cmd.get_outputs()) == 2
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_install_lib.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.install_data."""
|
| 2 |
+
|
| 3 |
+
import importlib.util
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
from distutils.command.install_lib import install_lib
|
| 7 |
+
from distutils.errors import DistutilsOptionError
|
| 8 |
+
from distutils.extension import Extension
|
| 9 |
+
from distutils.tests import support
|
| 10 |
+
|
| 11 |
+
import pytest
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@support.combine_markers
|
| 15 |
+
@pytest.mark.usefixtures('save_env')
|
| 16 |
+
class TestInstallLib(
|
| 17 |
+
support.TempdirManager,
|
| 18 |
+
):
|
| 19 |
+
def test_finalize_options(self):
|
| 20 |
+
dist = self.create_dist()[1]
|
| 21 |
+
cmd = install_lib(dist)
|
| 22 |
+
|
| 23 |
+
cmd.finalize_options()
|
| 24 |
+
assert cmd.compile == 1
|
| 25 |
+
assert cmd.optimize == 0
|
| 26 |
+
|
| 27 |
+
# optimize must be 0, 1, or 2
|
| 28 |
+
cmd.optimize = 'foo'
|
| 29 |
+
with pytest.raises(DistutilsOptionError):
|
| 30 |
+
cmd.finalize_options()
|
| 31 |
+
cmd.optimize = '4'
|
| 32 |
+
with pytest.raises(DistutilsOptionError):
|
| 33 |
+
cmd.finalize_options()
|
| 34 |
+
|
| 35 |
+
cmd.optimize = '2'
|
| 36 |
+
cmd.finalize_options()
|
| 37 |
+
assert cmd.optimize == 2
|
| 38 |
+
|
| 39 |
+
@pytest.mark.skipif('sys.dont_write_bytecode')
|
| 40 |
+
def test_byte_compile(self):
|
| 41 |
+
project_dir, dist = self.create_dist()
|
| 42 |
+
os.chdir(project_dir)
|
| 43 |
+
cmd = install_lib(dist)
|
| 44 |
+
cmd.compile = cmd.optimize = 1
|
| 45 |
+
|
| 46 |
+
f = os.path.join(project_dir, 'foo.py')
|
| 47 |
+
self.write_file(f, '# python file')
|
| 48 |
+
cmd.byte_compile([f])
|
| 49 |
+
pyc_file = importlib.util.cache_from_source('foo.py', optimization='')
|
| 50 |
+
pyc_opt_file = importlib.util.cache_from_source(
|
| 51 |
+
'foo.py', optimization=cmd.optimize
|
| 52 |
+
)
|
| 53 |
+
assert os.path.exists(pyc_file)
|
| 54 |
+
assert os.path.exists(pyc_opt_file)
|
| 55 |
+
|
| 56 |
+
def test_get_outputs(self):
|
| 57 |
+
project_dir, dist = self.create_dist()
|
| 58 |
+
os.chdir(project_dir)
|
| 59 |
+
os.mkdir('spam')
|
| 60 |
+
cmd = install_lib(dist)
|
| 61 |
+
|
| 62 |
+
# setting up a dist environment
|
| 63 |
+
cmd.compile = cmd.optimize = 1
|
| 64 |
+
cmd.install_dir = self.mkdtemp()
|
| 65 |
+
f = os.path.join(project_dir, 'spam', '__init__.py')
|
| 66 |
+
self.write_file(f, '# python package')
|
| 67 |
+
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
|
| 68 |
+
cmd.distribution.packages = ['spam']
|
| 69 |
+
cmd.distribution.script_name = 'setup.py'
|
| 70 |
+
|
| 71 |
+
# get_outputs should return 4 elements: spam/__init__.py and .pyc,
|
| 72 |
+
# foo.import-tag-abiflags.so / foo.pyd
|
| 73 |
+
outputs = cmd.get_outputs()
|
| 74 |
+
assert len(outputs) == 4, outputs
|
| 75 |
+
|
| 76 |
+
def test_get_inputs(self):
|
| 77 |
+
project_dir, dist = self.create_dist()
|
| 78 |
+
os.chdir(project_dir)
|
| 79 |
+
os.mkdir('spam')
|
| 80 |
+
cmd = install_lib(dist)
|
| 81 |
+
|
| 82 |
+
# setting up a dist environment
|
| 83 |
+
cmd.compile = cmd.optimize = 1
|
| 84 |
+
cmd.install_dir = self.mkdtemp()
|
| 85 |
+
f = os.path.join(project_dir, 'spam', '__init__.py')
|
| 86 |
+
self.write_file(f, '# python package')
|
| 87 |
+
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
|
| 88 |
+
cmd.distribution.packages = ['spam']
|
| 89 |
+
cmd.distribution.script_name = 'setup.py'
|
| 90 |
+
|
| 91 |
+
# get_inputs should return 2 elements: spam/__init__.py and
|
| 92 |
+
# foo.import-tag-abiflags.so / foo.pyd
|
| 93 |
+
inputs = cmd.get_inputs()
|
| 94 |
+
assert len(inputs) == 2, inputs
|
| 95 |
+
|
| 96 |
+
def test_dont_write_bytecode(self, caplog):
|
| 97 |
+
# makes sure byte_compile is not used
|
| 98 |
+
dist = self.create_dist()[1]
|
| 99 |
+
cmd = install_lib(dist)
|
| 100 |
+
cmd.compile = True
|
| 101 |
+
cmd.optimize = 1
|
| 102 |
+
|
| 103 |
+
old_dont_write_bytecode = sys.dont_write_bytecode
|
| 104 |
+
sys.dont_write_bytecode = True
|
| 105 |
+
try:
|
| 106 |
+
cmd.byte_compile([])
|
| 107 |
+
finally:
|
| 108 |
+
sys.dont_write_bytecode = old_dont_write_bytecode
|
| 109 |
+
|
| 110 |
+
assert 'byte-compiling is disabled' in caplog.messages[0]
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_modified.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils._modified."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import types
|
| 5 |
+
from distutils._modified import newer, newer_group, newer_pairwise, newer_pairwise_group
|
| 6 |
+
from distutils.errors import DistutilsFileError
|
| 7 |
+
from distutils.tests import support
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestDepUtil(support.TempdirManager):
|
| 13 |
+
def test_newer(self):
|
| 14 |
+
tmpdir = self.mkdtemp()
|
| 15 |
+
new_file = os.path.join(tmpdir, 'new')
|
| 16 |
+
old_file = os.path.abspath(__file__)
|
| 17 |
+
|
| 18 |
+
# Raise DistutilsFileError if 'new_file' does not exist.
|
| 19 |
+
with pytest.raises(DistutilsFileError):
|
| 20 |
+
newer(new_file, old_file)
|
| 21 |
+
|
| 22 |
+
# Return true if 'new_file' exists and is more recently modified than
|
| 23 |
+
# 'old_file', or if 'new_file' exists and 'old_file' doesn't.
|
| 24 |
+
self.write_file(new_file)
|
| 25 |
+
assert newer(new_file, 'I_dont_exist')
|
| 26 |
+
assert newer(new_file, old_file)
|
| 27 |
+
|
| 28 |
+
# Return false if both exist and 'old_file' is the same age or younger
|
| 29 |
+
# than 'new_file'.
|
| 30 |
+
assert not newer(old_file, new_file)
|
| 31 |
+
|
| 32 |
+
def _setup_1234(self):
|
| 33 |
+
tmpdir = self.mkdtemp()
|
| 34 |
+
sources = os.path.join(tmpdir, 'sources')
|
| 35 |
+
targets = os.path.join(tmpdir, 'targets')
|
| 36 |
+
os.mkdir(sources)
|
| 37 |
+
os.mkdir(targets)
|
| 38 |
+
one = os.path.join(sources, 'one')
|
| 39 |
+
two = os.path.join(sources, 'two')
|
| 40 |
+
three = os.path.abspath(__file__) # I am the old file
|
| 41 |
+
four = os.path.join(targets, 'four')
|
| 42 |
+
self.write_file(one)
|
| 43 |
+
self.write_file(two)
|
| 44 |
+
self.write_file(four)
|
| 45 |
+
return one, two, three, four
|
| 46 |
+
|
| 47 |
+
def test_newer_pairwise(self):
|
| 48 |
+
one, two, three, four = self._setup_1234()
|
| 49 |
+
|
| 50 |
+
assert newer_pairwise([one, two], [three, four]) == ([one], [three])
|
| 51 |
+
|
| 52 |
+
def test_newer_pairwise_mismatch(self):
|
| 53 |
+
one, two, three, four = self._setup_1234()
|
| 54 |
+
|
| 55 |
+
with pytest.raises(ValueError):
|
| 56 |
+
newer_pairwise([one], [three, four])
|
| 57 |
+
|
| 58 |
+
with pytest.raises(ValueError):
|
| 59 |
+
newer_pairwise([one, two], [three])
|
| 60 |
+
|
| 61 |
+
def test_newer_pairwise_empty(self):
|
| 62 |
+
assert newer_pairwise([], []) == ([], [])
|
| 63 |
+
|
| 64 |
+
def test_newer_pairwise_fresh(self):
|
| 65 |
+
one, two, three, four = self._setup_1234()
|
| 66 |
+
|
| 67 |
+
assert newer_pairwise([one, three], [two, four]) == ([], [])
|
| 68 |
+
|
| 69 |
+
def test_newer_group(self):
|
| 70 |
+
tmpdir = self.mkdtemp()
|
| 71 |
+
sources = os.path.join(tmpdir, 'sources')
|
| 72 |
+
os.mkdir(sources)
|
| 73 |
+
one = os.path.join(sources, 'one')
|
| 74 |
+
two = os.path.join(sources, 'two')
|
| 75 |
+
three = os.path.join(sources, 'three')
|
| 76 |
+
old_file = os.path.abspath(__file__)
|
| 77 |
+
|
| 78 |
+
# return true if 'old_file' is out-of-date with respect to any file
|
| 79 |
+
# listed in 'sources'.
|
| 80 |
+
self.write_file(one)
|
| 81 |
+
self.write_file(two)
|
| 82 |
+
self.write_file(three)
|
| 83 |
+
assert newer_group([one, two, three], old_file)
|
| 84 |
+
assert not newer_group([one, two, old_file], three)
|
| 85 |
+
|
| 86 |
+
# missing handling
|
| 87 |
+
os.remove(one)
|
| 88 |
+
with pytest.raises(OSError):
|
| 89 |
+
newer_group([one, two, old_file], three)
|
| 90 |
+
|
| 91 |
+
assert not newer_group([one, two, old_file], three, missing='ignore')
|
| 92 |
+
|
| 93 |
+
assert newer_group([one, two, old_file], three, missing='newer')
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
@pytest.fixture
|
| 97 |
+
def groups_target(tmp_path):
|
| 98 |
+
"""
|
| 99 |
+
Set up some older sources, a target, and newer sources.
|
| 100 |
+
|
| 101 |
+
Returns a simple namespace with these values.
|
| 102 |
+
"""
|
| 103 |
+
filenames = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h']
|
| 104 |
+
paths = [tmp_path / name for name in filenames]
|
| 105 |
+
|
| 106 |
+
for mtime, path in enumerate(paths):
|
| 107 |
+
path.write_text('', encoding='utf-8')
|
| 108 |
+
|
| 109 |
+
# make sure modification times are sequential
|
| 110 |
+
os.utime(path, (mtime, mtime))
|
| 111 |
+
|
| 112 |
+
return types.SimpleNamespace(older=paths[:2], target=paths[2], newer=paths[3:])
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def test_newer_pairwise_group(groups_target):
|
| 116 |
+
older = newer_pairwise_group([groups_target.older], [groups_target.target])
|
| 117 |
+
newer = newer_pairwise_group([groups_target.newer], [groups_target.target])
|
| 118 |
+
assert older == ([], [])
|
| 119 |
+
assert newer == ([groups_target.newer], [groups_target.target])
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def test_newer_group_no_sources_no_target(tmp_path):
|
| 123 |
+
"""
|
| 124 |
+
Consider no sources and no target "newer".
|
| 125 |
+
"""
|
| 126 |
+
assert newer_group([], str(tmp_path / 'does-not-exist'))
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_msvccompiler.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils._msvccompiler."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import sysconfig
|
| 6 |
+
import threading
|
| 7 |
+
import unittest.mock as mock
|
| 8 |
+
from distutils import _msvccompiler
|
| 9 |
+
from distutils.errors import DistutilsPlatformError
|
| 10 |
+
from distutils.tests import support
|
| 11 |
+
from distutils.util import get_platform
|
| 12 |
+
|
| 13 |
+
import pytest
|
| 14 |
+
|
| 15 |
+
needs_winreg = pytest.mark.skipif('not hasattr(_msvccompiler, "winreg")')
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class Testmsvccompiler(support.TempdirManager):
|
| 19 |
+
def test_no_compiler(self, monkeypatch):
|
| 20 |
+
# makes sure query_vcvarsall raises
|
| 21 |
+
# a DistutilsPlatformError if the compiler
|
| 22 |
+
# is not found
|
| 23 |
+
def _find_vcvarsall(plat_spec):
|
| 24 |
+
return None, None
|
| 25 |
+
|
| 26 |
+
monkeypatch.setattr(_msvccompiler, '_find_vcvarsall', _find_vcvarsall)
|
| 27 |
+
|
| 28 |
+
with pytest.raises(DistutilsPlatformError):
|
| 29 |
+
_msvccompiler._get_vc_env(
|
| 30 |
+
'wont find this version',
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
@pytest.mark.skipif(
|
| 34 |
+
not sysconfig.get_platform().startswith("win"),
|
| 35 |
+
reason="Only run test for non-mingw Windows platforms",
|
| 36 |
+
)
|
| 37 |
+
@pytest.mark.parametrize(
|
| 38 |
+
"plat_name, expected",
|
| 39 |
+
[
|
| 40 |
+
("win-arm64", "win-arm64"),
|
| 41 |
+
("win-amd64", "win-amd64"),
|
| 42 |
+
(None, get_platform()),
|
| 43 |
+
],
|
| 44 |
+
)
|
| 45 |
+
def test_cross_platform_compilation_paths(self, monkeypatch, plat_name, expected):
|
| 46 |
+
"""
|
| 47 |
+
Ensure a specified target platform is passed to _get_vcvars_spec.
|
| 48 |
+
"""
|
| 49 |
+
compiler = _msvccompiler.MSVCCompiler()
|
| 50 |
+
|
| 51 |
+
def _get_vcvars_spec(host_platform, platform):
|
| 52 |
+
assert platform == expected
|
| 53 |
+
|
| 54 |
+
monkeypatch.setattr(_msvccompiler, '_get_vcvars_spec', _get_vcvars_spec)
|
| 55 |
+
compiler.initialize(plat_name)
|
| 56 |
+
|
| 57 |
+
@needs_winreg
|
| 58 |
+
def test_get_vc_env_unicode(self):
|
| 59 |
+
test_var = 'ṰḖṤṪ┅ṼẨṜ'
|
| 60 |
+
test_value = '₃⁴₅'
|
| 61 |
+
|
| 62 |
+
# Ensure we don't early exit from _get_vc_env
|
| 63 |
+
old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
|
| 64 |
+
os.environ[test_var] = test_value
|
| 65 |
+
try:
|
| 66 |
+
env = _msvccompiler._get_vc_env('x86')
|
| 67 |
+
assert test_var.lower() in env
|
| 68 |
+
assert test_value == env[test_var.lower()]
|
| 69 |
+
finally:
|
| 70 |
+
os.environ.pop(test_var)
|
| 71 |
+
if old_distutils_use_sdk:
|
| 72 |
+
os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
|
| 73 |
+
|
| 74 |
+
@needs_winreg
|
| 75 |
+
@pytest.mark.parametrize('ver', (2015, 2017))
|
| 76 |
+
def test_get_vc(self, ver):
|
| 77 |
+
# This function cannot be mocked, so pass if VC is found
|
| 78 |
+
# and skip otherwise.
|
| 79 |
+
lookup = getattr(_msvccompiler, f'_find_vc{ver}')
|
| 80 |
+
expected_version = {2015: 14, 2017: 15}[ver]
|
| 81 |
+
version, path = lookup()
|
| 82 |
+
if not version:
|
| 83 |
+
pytest.skip(f"VS {ver} is not installed")
|
| 84 |
+
assert version >= expected_version
|
| 85 |
+
assert os.path.isdir(path)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class CheckThread(threading.Thread):
|
| 89 |
+
exc_info = None
|
| 90 |
+
|
| 91 |
+
def run(self):
|
| 92 |
+
try:
|
| 93 |
+
super().run()
|
| 94 |
+
except Exception:
|
| 95 |
+
self.exc_info = sys.exc_info()
|
| 96 |
+
|
| 97 |
+
def __bool__(self):
|
| 98 |
+
return not self.exc_info
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class TestSpawn:
|
| 102 |
+
def test_concurrent_safe(self):
|
| 103 |
+
"""
|
| 104 |
+
Concurrent calls to spawn should have consistent results.
|
| 105 |
+
"""
|
| 106 |
+
compiler = _msvccompiler.MSVCCompiler()
|
| 107 |
+
compiler._paths = "expected"
|
| 108 |
+
inner_cmd = 'import os; assert os.environ["PATH"] == "expected"'
|
| 109 |
+
command = [sys.executable, '-c', inner_cmd]
|
| 110 |
+
|
| 111 |
+
threads = [
|
| 112 |
+
CheckThread(target=compiler.spawn, args=[command]) for n in range(100)
|
| 113 |
+
]
|
| 114 |
+
for thread in threads:
|
| 115 |
+
thread.start()
|
| 116 |
+
for thread in threads:
|
| 117 |
+
thread.join()
|
| 118 |
+
assert all(threads)
|
| 119 |
+
|
| 120 |
+
def test_concurrent_safe_fallback(self):
|
| 121 |
+
"""
|
| 122 |
+
If CCompiler.spawn has been monkey-patched without support
|
| 123 |
+
for an env, it should still execute.
|
| 124 |
+
"""
|
| 125 |
+
from distutils import ccompiler
|
| 126 |
+
|
| 127 |
+
compiler = _msvccompiler.MSVCCompiler()
|
| 128 |
+
compiler._paths = "expected"
|
| 129 |
+
|
| 130 |
+
def CCompiler_spawn(self, cmd):
|
| 131 |
+
"A spawn without an env argument."
|
| 132 |
+
assert os.environ["PATH"] == "expected"
|
| 133 |
+
|
| 134 |
+
with mock.patch.object(ccompiler.CCompiler, 'spawn', CCompiler_spawn):
|
| 135 |
+
compiler.spawn(["n/a"])
|
| 136 |
+
|
| 137 |
+
assert os.environ.get("PATH") != "expected"
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_sdist.py
ADDED
|
@@ -0,0 +1,470 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.command.sdist."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import shutil # noqa: F401
|
| 6 |
+
import tarfile
|
| 7 |
+
import zipfile
|
| 8 |
+
from distutils.archive_util import ARCHIVE_FORMATS
|
| 9 |
+
from distutils.command.sdist import sdist, show_formats
|
| 10 |
+
from distutils.core import Distribution
|
| 11 |
+
from distutils.errors import DistutilsOptionError
|
| 12 |
+
from distutils.filelist import FileList
|
| 13 |
+
from os.path import join
|
| 14 |
+
from textwrap import dedent
|
| 15 |
+
|
| 16 |
+
import jaraco.path
|
| 17 |
+
import path
|
| 18 |
+
import pytest
|
| 19 |
+
from more_itertools import ilen
|
| 20 |
+
|
| 21 |
+
from . import support
|
| 22 |
+
from .unix_compat import grp, pwd, require_uid_0, require_unix_id
|
| 23 |
+
|
| 24 |
+
SETUP_PY = """
|
| 25 |
+
from distutils.core import setup
|
| 26 |
+
import somecode
|
| 27 |
+
|
| 28 |
+
setup(name='fake')
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
MANIFEST = """\
|
| 32 |
+
# file GENERATED by distutils, do NOT edit
|
| 33 |
+
README
|
| 34 |
+
buildout.cfg
|
| 35 |
+
inroot.txt
|
| 36 |
+
setup.py
|
| 37 |
+
data%(sep)sdata.dt
|
| 38 |
+
scripts%(sep)sscript.py
|
| 39 |
+
some%(sep)sfile.txt
|
| 40 |
+
some%(sep)sother_file.txt
|
| 41 |
+
somecode%(sep)s__init__.py
|
| 42 |
+
somecode%(sep)sdoc.dat
|
| 43 |
+
somecode%(sep)sdoc.txt
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@pytest.fixture(autouse=True)
|
| 48 |
+
def project_dir(request, distutils_managed_tempdir):
|
| 49 |
+
self = request.instance
|
| 50 |
+
self.tmp_dir = self.mkdtemp()
|
| 51 |
+
jaraco.path.build(
|
| 52 |
+
{
|
| 53 |
+
'somecode': {
|
| 54 |
+
'__init__.py': '#',
|
| 55 |
+
},
|
| 56 |
+
'README': 'xxx',
|
| 57 |
+
'setup.py': SETUP_PY,
|
| 58 |
+
},
|
| 59 |
+
self.tmp_dir,
|
| 60 |
+
)
|
| 61 |
+
with path.Path(self.tmp_dir):
|
| 62 |
+
yield
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def clean_lines(filepath):
|
| 66 |
+
with pathlib.Path(filepath).open(encoding='utf-8') as f:
|
| 67 |
+
yield from filter(None, map(str.strip, f))
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class TestSDist(support.TempdirManager):
|
| 71 |
+
def get_cmd(self, metadata=None):
|
| 72 |
+
"""Returns a cmd"""
|
| 73 |
+
if metadata is None:
|
| 74 |
+
metadata = {
|
| 75 |
+
'name': 'ns.fake--pkg',
|
| 76 |
+
'version': '1.0',
|
| 77 |
+
'url': 'xxx',
|
| 78 |
+
'author': 'xxx',
|
| 79 |
+
'author_email': 'xxx',
|
| 80 |
+
}
|
| 81 |
+
dist = Distribution(metadata)
|
| 82 |
+
dist.script_name = 'setup.py'
|
| 83 |
+
dist.packages = ['somecode']
|
| 84 |
+
dist.include_package_data = True
|
| 85 |
+
cmd = sdist(dist)
|
| 86 |
+
cmd.dist_dir = 'dist'
|
| 87 |
+
return dist, cmd
|
| 88 |
+
|
| 89 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 90 |
+
def test_prune_file_list(self):
|
| 91 |
+
# this test creates a project with some VCS dirs and an NFS rename
|
| 92 |
+
# file, then launches sdist to check they get pruned on all systems
|
| 93 |
+
|
| 94 |
+
# creating VCS directories with some files in them
|
| 95 |
+
os.mkdir(join(self.tmp_dir, 'somecode', '.svn'))
|
| 96 |
+
self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx')
|
| 97 |
+
|
| 98 |
+
os.mkdir(join(self.tmp_dir, 'somecode', '.hg'))
|
| 99 |
+
self.write_file((self.tmp_dir, 'somecode', '.hg', 'ok'), 'xxx')
|
| 100 |
+
|
| 101 |
+
os.mkdir(join(self.tmp_dir, 'somecode', '.git'))
|
| 102 |
+
self.write_file((self.tmp_dir, 'somecode', '.git', 'ok'), 'xxx')
|
| 103 |
+
|
| 104 |
+
self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx')
|
| 105 |
+
|
| 106 |
+
# now building a sdist
|
| 107 |
+
dist, cmd = self.get_cmd()
|
| 108 |
+
|
| 109 |
+
# zip is available universally
|
| 110 |
+
# (tar might not be installed under win32)
|
| 111 |
+
cmd.formats = ['zip']
|
| 112 |
+
|
| 113 |
+
cmd.ensure_finalized()
|
| 114 |
+
cmd.run()
|
| 115 |
+
|
| 116 |
+
# now let's check what we have
|
| 117 |
+
dist_folder = join(self.tmp_dir, 'dist')
|
| 118 |
+
files = os.listdir(dist_folder)
|
| 119 |
+
assert files == ['ns_fake_pkg-1.0.zip']
|
| 120 |
+
|
| 121 |
+
zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
|
| 122 |
+
try:
|
| 123 |
+
content = zip_file.namelist()
|
| 124 |
+
finally:
|
| 125 |
+
zip_file.close()
|
| 126 |
+
|
| 127 |
+
# making sure everything has been pruned correctly
|
| 128 |
+
expected = [
|
| 129 |
+
'',
|
| 130 |
+
'PKG-INFO',
|
| 131 |
+
'README',
|
| 132 |
+
'setup.py',
|
| 133 |
+
'somecode/',
|
| 134 |
+
'somecode/__init__.py',
|
| 135 |
+
]
|
| 136 |
+
assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
|
| 137 |
+
|
| 138 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 139 |
+
@pytest.mark.skipif("not shutil.which('tar')")
|
| 140 |
+
@pytest.mark.skipif("not shutil.which('gzip')")
|
| 141 |
+
def test_make_distribution(self):
|
| 142 |
+
# now building a sdist
|
| 143 |
+
dist, cmd = self.get_cmd()
|
| 144 |
+
|
| 145 |
+
# creating a gztar then a tar
|
| 146 |
+
cmd.formats = ['gztar', 'tar']
|
| 147 |
+
cmd.ensure_finalized()
|
| 148 |
+
cmd.run()
|
| 149 |
+
|
| 150 |
+
# making sure we have two files
|
| 151 |
+
dist_folder = join(self.tmp_dir, 'dist')
|
| 152 |
+
result = os.listdir(dist_folder)
|
| 153 |
+
result.sort()
|
| 154 |
+
assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
|
| 155 |
+
|
| 156 |
+
os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar'))
|
| 157 |
+
os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar.gz'))
|
| 158 |
+
|
| 159 |
+
# now trying a tar then a gztar
|
| 160 |
+
cmd.formats = ['tar', 'gztar']
|
| 161 |
+
|
| 162 |
+
cmd.ensure_finalized()
|
| 163 |
+
cmd.run()
|
| 164 |
+
|
| 165 |
+
result = os.listdir(dist_folder)
|
| 166 |
+
result.sort()
|
| 167 |
+
assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
|
| 168 |
+
|
| 169 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 170 |
+
def test_add_defaults(self):
|
| 171 |
+
# https://bugs.python.org/issue2279
|
| 172 |
+
|
| 173 |
+
# add_default should also include
|
| 174 |
+
# data_files and package_data
|
| 175 |
+
dist, cmd = self.get_cmd()
|
| 176 |
+
|
| 177 |
+
# filling data_files by pointing files
|
| 178 |
+
# in package_data
|
| 179 |
+
dist.package_data = {'': ['*.cfg', '*.dat'], 'somecode': ['*.txt']}
|
| 180 |
+
self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
|
| 181 |
+
self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#')
|
| 182 |
+
|
| 183 |
+
# adding some data in data_files
|
| 184 |
+
data_dir = join(self.tmp_dir, 'data')
|
| 185 |
+
os.mkdir(data_dir)
|
| 186 |
+
self.write_file((data_dir, 'data.dt'), '#')
|
| 187 |
+
some_dir = join(self.tmp_dir, 'some')
|
| 188 |
+
os.mkdir(some_dir)
|
| 189 |
+
# make sure VCS directories are pruned (#14004)
|
| 190 |
+
hg_dir = join(self.tmp_dir, '.hg')
|
| 191 |
+
os.mkdir(hg_dir)
|
| 192 |
+
self.write_file((hg_dir, 'last-message.txt'), '#')
|
| 193 |
+
# a buggy regex used to prevent this from working on windows (#6884)
|
| 194 |
+
self.write_file((self.tmp_dir, 'buildout.cfg'), '#')
|
| 195 |
+
self.write_file((self.tmp_dir, 'inroot.txt'), '#')
|
| 196 |
+
self.write_file((some_dir, 'file.txt'), '#')
|
| 197 |
+
self.write_file((some_dir, 'other_file.txt'), '#')
|
| 198 |
+
|
| 199 |
+
dist.data_files = [
|
| 200 |
+
('data', ['data/data.dt', 'buildout.cfg', 'inroot.txt', 'notexisting']),
|
| 201 |
+
'some/file.txt',
|
| 202 |
+
'some/other_file.txt',
|
| 203 |
+
]
|
| 204 |
+
|
| 205 |
+
# adding a script
|
| 206 |
+
script_dir = join(self.tmp_dir, 'scripts')
|
| 207 |
+
os.mkdir(script_dir)
|
| 208 |
+
self.write_file((script_dir, 'script.py'), '#')
|
| 209 |
+
dist.scripts = [join('scripts', 'script.py')]
|
| 210 |
+
|
| 211 |
+
cmd.formats = ['zip']
|
| 212 |
+
cmd.use_defaults = True
|
| 213 |
+
|
| 214 |
+
cmd.ensure_finalized()
|
| 215 |
+
cmd.run()
|
| 216 |
+
|
| 217 |
+
# now let's check what we have
|
| 218 |
+
dist_folder = join(self.tmp_dir, 'dist')
|
| 219 |
+
files = os.listdir(dist_folder)
|
| 220 |
+
assert files == ['ns_fake_pkg-1.0.zip']
|
| 221 |
+
|
| 222 |
+
zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
|
| 223 |
+
try:
|
| 224 |
+
content = zip_file.namelist()
|
| 225 |
+
finally:
|
| 226 |
+
zip_file.close()
|
| 227 |
+
|
| 228 |
+
# making sure everything was added
|
| 229 |
+
expected = [
|
| 230 |
+
'',
|
| 231 |
+
'PKG-INFO',
|
| 232 |
+
'README',
|
| 233 |
+
'buildout.cfg',
|
| 234 |
+
'data/',
|
| 235 |
+
'data/data.dt',
|
| 236 |
+
'inroot.txt',
|
| 237 |
+
'scripts/',
|
| 238 |
+
'scripts/script.py',
|
| 239 |
+
'setup.py',
|
| 240 |
+
'some/',
|
| 241 |
+
'some/file.txt',
|
| 242 |
+
'some/other_file.txt',
|
| 243 |
+
'somecode/',
|
| 244 |
+
'somecode/__init__.py',
|
| 245 |
+
'somecode/doc.dat',
|
| 246 |
+
'somecode/doc.txt',
|
| 247 |
+
]
|
| 248 |
+
assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
|
| 249 |
+
|
| 250 |
+
# checking the MANIFEST
|
| 251 |
+
manifest = pathlib.Path(self.tmp_dir, 'MANIFEST').read_text(encoding='utf-8')
|
| 252 |
+
assert manifest == MANIFEST % {'sep': os.sep}
|
| 253 |
+
|
| 254 |
+
@staticmethod
|
| 255 |
+
def warnings(messages, prefix='warning: '):
|
| 256 |
+
return [msg for msg in messages if msg.startswith(prefix)]
|
| 257 |
+
|
| 258 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 259 |
+
def test_metadata_check_option(self, caplog):
|
| 260 |
+
# testing the `medata-check` option
|
| 261 |
+
dist, cmd = self.get_cmd(metadata={})
|
| 262 |
+
|
| 263 |
+
# this should raise some warnings !
|
| 264 |
+
# with the `check` subcommand
|
| 265 |
+
cmd.ensure_finalized()
|
| 266 |
+
cmd.run()
|
| 267 |
+
assert len(self.warnings(caplog.messages, 'warning: check: ')) == 1
|
| 268 |
+
|
| 269 |
+
# trying with a complete set of metadata
|
| 270 |
+
caplog.clear()
|
| 271 |
+
dist, cmd = self.get_cmd()
|
| 272 |
+
cmd.ensure_finalized()
|
| 273 |
+
cmd.metadata_check = 0
|
| 274 |
+
cmd.run()
|
| 275 |
+
assert len(self.warnings(caplog.messages, 'warning: check: ')) == 0
|
| 276 |
+
|
| 277 |
+
def test_show_formats(self, capsys):
|
| 278 |
+
show_formats()
|
| 279 |
+
|
| 280 |
+
# the output should be a header line + one line per format
|
| 281 |
+
num_formats = len(ARCHIVE_FORMATS.keys())
|
| 282 |
+
output = [
|
| 283 |
+
line
|
| 284 |
+
for line in capsys.readouterr().out.split('\n')
|
| 285 |
+
if line.strip().startswith('--formats=')
|
| 286 |
+
]
|
| 287 |
+
assert len(output) == num_formats
|
| 288 |
+
|
| 289 |
+
def test_finalize_options(self):
|
| 290 |
+
dist, cmd = self.get_cmd()
|
| 291 |
+
cmd.finalize_options()
|
| 292 |
+
|
| 293 |
+
# default options set by finalize
|
| 294 |
+
assert cmd.manifest == 'MANIFEST'
|
| 295 |
+
assert cmd.template == 'MANIFEST.in'
|
| 296 |
+
assert cmd.dist_dir == 'dist'
|
| 297 |
+
|
| 298 |
+
# formats has to be a string splitable on (' ', ',') or
|
| 299 |
+
# a stringlist
|
| 300 |
+
cmd.formats = 1
|
| 301 |
+
with pytest.raises(DistutilsOptionError):
|
| 302 |
+
cmd.finalize_options()
|
| 303 |
+
cmd.formats = ['zip']
|
| 304 |
+
cmd.finalize_options()
|
| 305 |
+
|
| 306 |
+
# formats has to be known
|
| 307 |
+
cmd.formats = 'supazipa'
|
| 308 |
+
with pytest.raises(DistutilsOptionError):
|
| 309 |
+
cmd.finalize_options()
|
| 310 |
+
|
| 311 |
+
# the following tests make sure there is a nice error message instead
|
| 312 |
+
# of a traceback when parsing an invalid manifest template
|
| 313 |
+
|
| 314 |
+
def _check_template(self, content, caplog):
|
| 315 |
+
dist, cmd = self.get_cmd()
|
| 316 |
+
os.chdir(self.tmp_dir)
|
| 317 |
+
self.write_file('MANIFEST.in', content)
|
| 318 |
+
cmd.ensure_finalized()
|
| 319 |
+
cmd.filelist = FileList()
|
| 320 |
+
cmd.read_template()
|
| 321 |
+
assert len(self.warnings(caplog.messages)) == 1
|
| 322 |
+
|
| 323 |
+
def test_invalid_template_unknown_command(self, caplog):
|
| 324 |
+
self._check_template('taunt knights *', caplog)
|
| 325 |
+
|
| 326 |
+
def test_invalid_template_wrong_arguments(self, caplog):
|
| 327 |
+
# this manifest command takes one argument
|
| 328 |
+
self._check_template('prune', caplog)
|
| 329 |
+
|
| 330 |
+
@pytest.mark.skipif("platform.system() != 'Windows'")
|
| 331 |
+
def test_invalid_template_wrong_path(self, caplog):
|
| 332 |
+
# on Windows, trailing slashes are not allowed
|
| 333 |
+
# this used to crash instead of raising a warning: #8286
|
| 334 |
+
self._check_template('include examples/', caplog)
|
| 335 |
+
|
| 336 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 337 |
+
def test_get_file_list(self):
|
| 338 |
+
# make sure MANIFEST is recalculated
|
| 339 |
+
dist, cmd = self.get_cmd()
|
| 340 |
+
|
| 341 |
+
# filling data_files by pointing files in package_data
|
| 342 |
+
dist.package_data = {'somecode': ['*.txt']}
|
| 343 |
+
self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
|
| 344 |
+
cmd.formats = ['gztar']
|
| 345 |
+
cmd.ensure_finalized()
|
| 346 |
+
cmd.run()
|
| 347 |
+
|
| 348 |
+
assert ilen(clean_lines(cmd.manifest)) == 5
|
| 349 |
+
|
| 350 |
+
# adding a file
|
| 351 |
+
self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
|
| 352 |
+
|
| 353 |
+
# make sure build_py is reinitialized, like a fresh run
|
| 354 |
+
build_py = dist.get_command_obj('build_py')
|
| 355 |
+
build_py.finalized = False
|
| 356 |
+
build_py.ensure_finalized()
|
| 357 |
+
|
| 358 |
+
cmd.run()
|
| 359 |
+
|
| 360 |
+
manifest2 = list(clean_lines(cmd.manifest))
|
| 361 |
+
|
| 362 |
+
# do we have the new file in MANIFEST ?
|
| 363 |
+
assert len(manifest2) == 6
|
| 364 |
+
assert 'doc2.txt' in manifest2[-1]
|
| 365 |
+
|
| 366 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 367 |
+
def test_manifest_marker(self):
|
| 368 |
+
# check that autogenerated MANIFESTs have a marker
|
| 369 |
+
dist, cmd = self.get_cmd()
|
| 370 |
+
cmd.ensure_finalized()
|
| 371 |
+
cmd.run()
|
| 372 |
+
|
| 373 |
+
assert (
|
| 374 |
+
next(clean_lines(cmd.manifest))
|
| 375 |
+
== '# file GENERATED by distutils, do NOT edit'
|
| 376 |
+
)
|
| 377 |
+
|
| 378 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 379 |
+
def test_manifest_comments(self):
|
| 380 |
+
# make sure comments don't cause exceptions or wrong includes
|
| 381 |
+
contents = dedent(
|
| 382 |
+
"""\
|
| 383 |
+
# bad.py
|
| 384 |
+
#bad.py
|
| 385 |
+
good.py
|
| 386 |
+
"""
|
| 387 |
+
)
|
| 388 |
+
dist, cmd = self.get_cmd()
|
| 389 |
+
cmd.ensure_finalized()
|
| 390 |
+
self.write_file((self.tmp_dir, cmd.manifest), contents)
|
| 391 |
+
self.write_file((self.tmp_dir, 'good.py'), '# pick me!')
|
| 392 |
+
self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!")
|
| 393 |
+
self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!")
|
| 394 |
+
cmd.run()
|
| 395 |
+
assert cmd.filelist.files == ['good.py']
|
| 396 |
+
|
| 397 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 398 |
+
def test_manual_manifest(self):
|
| 399 |
+
# check that a MANIFEST without a marker is left alone
|
| 400 |
+
dist, cmd = self.get_cmd()
|
| 401 |
+
cmd.formats = ['gztar']
|
| 402 |
+
cmd.ensure_finalized()
|
| 403 |
+
self.write_file((self.tmp_dir, cmd.manifest), 'README.manual')
|
| 404 |
+
self.write_file(
|
| 405 |
+
(self.tmp_dir, 'README.manual'),
|
| 406 |
+
'This project maintains its MANIFEST file itself.',
|
| 407 |
+
)
|
| 408 |
+
cmd.run()
|
| 409 |
+
assert cmd.filelist.files == ['README.manual']
|
| 410 |
+
|
| 411 |
+
assert list(clean_lines(cmd.manifest)) == ['README.manual']
|
| 412 |
+
|
| 413 |
+
archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
|
| 414 |
+
archive = tarfile.open(archive_name)
|
| 415 |
+
try:
|
| 416 |
+
filenames = [tarinfo.name for tarinfo in archive]
|
| 417 |
+
finally:
|
| 418 |
+
archive.close()
|
| 419 |
+
assert sorted(filenames) == [
|
| 420 |
+
'ns_fake_pkg-1.0',
|
| 421 |
+
'ns_fake_pkg-1.0/PKG-INFO',
|
| 422 |
+
'ns_fake_pkg-1.0/README.manual',
|
| 423 |
+
]
|
| 424 |
+
|
| 425 |
+
@pytest.mark.usefixtures('needs_zlib')
|
| 426 |
+
@require_unix_id
|
| 427 |
+
@require_uid_0
|
| 428 |
+
@pytest.mark.skipif("not shutil.which('tar')")
|
| 429 |
+
@pytest.mark.skipif("not shutil.which('gzip')")
|
| 430 |
+
def test_make_distribution_owner_group(self):
|
| 431 |
+
# now building a sdist
|
| 432 |
+
dist, cmd = self.get_cmd()
|
| 433 |
+
|
| 434 |
+
# creating a gztar and specifying the owner+group
|
| 435 |
+
cmd.formats = ['gztar']
|
| 436 |
+
cmd.owner = pwd.getpwuid(0)[0]
|
| 437 |
+
cmd.group = grp.getgrgid(0)[0]
|
| 438 |
+
cmd.ensure_finalized()
|
| 439 |
+
cmd.run()
|
| 440 |
+
|
| 441 |
+
# making sure we have the good rights
|
| 442 |
+
archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
|
| 443 |
+
archive = tarfile.open(archive_name)
|
| 444 |
+
try:
|
| 445 |
+
for member in archive.getmembers():
|
| 446 |
+
assert member.uid == 0
|
| 447 |
+
assert member.gid == 0
|
| 448 |
+
finally:
|
| 449 |
+
archive.close()
|
| 450 |
+
|
| 451 |
+
# building a sdist again
|
| 452 |
+
dist, cmd = self.get_cmd()
|
| 453 |
+
|
| 454 |
+
# creating a gztar
|
| 455 |
+
cmd.formats = ['gztar']
|
| 456 |
+
cmd.ensure_finalized()
|
| 457 |
+
cmd.run()
|
| 458 |
+
|
| 459 |
+
# making sure we have the good rights
|
| 460 |
+
archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
|
| 461 |
+
archive = tarfile.open(archive_name)
|
| 462 |
+
|
| 463 |
+
# note that we are not testing the group ownership here
|
| 464 |
+
# because, depending on the platforms and the container
|
| 465 |
+
# rights (see #7408)
|
| 466 |
+
try:
|
| 467 |
+
for member in archive.getmembers():
|
| 468 |
+
assert member.uid == os.getuid()
|
| 469 |
+
finally:
|
| 470 |
+
archive.close()
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_sysconfig.py
ADDED
|
@@ -0,0 +1,319 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.sysconfig."""
|
| 2 |
+
|
| 3 |
+
import contextlib
|
| 4 |
+
import distutils
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
import subprocess
|
| 8 |
+
import sys
|
| 9 |
+
from distutils import sysconfig
|
| 10 |
+
from distutils.ccompiler import new_compiler # noqa: F401
|
| 11 |
+
from distutils.unixccompiler import UnixCCompiler
|
| 12 |
+
|
| 13 |
+
import jaraco.envs
|
| 14 |
+
import path
|
| 15 |
+
import pytest
|
| 16 |
+
from jaraco.text import trim
|
| 17 |
+
from test.support import swap_item
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _gen_makefile(root, contents):
|
| 21 |
+
jaraco.path.build({'Makefile': trim(contents)}, root)
|
| 22 |
+
return root / 'Makefile'
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@pytest.mark.usefixtures('save_env')
|
| 26 |
+
class TestSysconfig:
|
| 27 |
+
def test_get_config_h_filename(self):
|
| 28 |
+
config_h = sysconfig.get_config_h_filename()
|
| 29 |
+
assert os.path.isfile(config_h)
|
| 30 |
+
|
| 31 |
+
@pytest.mark.skipif("platform.system() == 'Windows'")
|
| 32 |
+
@pytest.mark.skipif("sys.implementation.name != 'cpython'")
|
| 33 |
+
def test_get_makefile_filename(self):
|
| 34 |
+
makefile = sysconfig.get_makefile_filename()
|
| 35 |
+
assert os.path.isfile(makefile)
|
| 36 |
+
|
| 37 |
+
def test_get_python_lib(self, tmp_path):
|
| 38 |
+
assert sysconfig.get_python_lib() != sysconfig.get_python_lib(prefix=tmp_path)
|
| 39 |
+
|
| 40 |
+
def test_get_config_vars(self):
|
| 41 |
+
cvars = sysconfig.get_config_vars()
|
| 42 |
+
assert isinstance(cvars, dict)
|
| 43 |
+
assert cvars
|
| 44 |
+
|
| 45 |
+
@pytest.mark.skipif('sysconfig.IS_PYPY')
|
| 46 |
+
@pytest.mark.skipif('sysconfig.python_build')
|
| 47 |
+
@pytest.mark.xfail('platform.system() == "Windows"')
|
| 48 |
+
def test_srcdir_simple(self):
|
| 49 |
+
# See #15364.
|
| 50 |
+
srcdir = pathlib.Path(sysconfig.get_config_var('srcdir'))
|
| 51 |
+
|
| 52 |
+
assert srcdir.absolute()
|
| 53 |
+
assert srcdir.is_dir()
|
| 54 |
+
|
| 55 |
+
makefile = pathlib.Path(sysconfig.get_makefile_filename())
|
| 56 |
+
assert makefile.parent.samefile(srcdir)
|
| 57 |
+
|
| 58 |
+
@pytest.mark.skipif('sysconfig.IS_PYPY')
|
| 59 |
+
@pytest.mark.skipif('not sysconfig.python_build')
|
| 60 |
+
def test_srcdir_python_build(self):
|
| 61 |
+
# See #15364.
|
| 62 |
+
srcdir = pathlib.Path(sysconfig.get_config_var('srcdir'))
|
| 63 |
+
|
| 64 |
+
# The python executable has not been installed so srcdir
|
| 65 |
+
# should be a full source checkout.
|
| 66 |
+
Python_h = srcdir.joinpath('Include', 'Python.h')
|
| 67 |
+
assert Python_h.is_file()
|
| 68 |
+
assert sysconfig._is_python_source_dir(srcdir)
|
| 69 |
+
assert sysconfig._is_python_source_dir(str(srcdir))
|
| 70 |
+
|
| 71 |
+
def test_srcdir_independent_of_cwd(self):
|
| 72 |
+
"""
|
| 73 |
+
srcdir should be independent of the current working directory
|
| 74 |
+
"""
|
| 75 |
+
# See #15364.
|
| 76 |
+
srcdir = sysconfig.get_config_var('srcdir')
|
| 77 |
+
with path.Path('..'):
|
| 78 |
+
srcdir2 = sysconfig.get_config_var('srcdir')
|
| 79 |
+
assert srcdir == srcdir2
|
| 80 |
+
|
| 81 |
+
def customize_compiler(self):
|
| 82 |
+
# make sure AR gets caught
|
| 83 |
+
class compiler:
|
| 84 |
+
compiler_type = 'unix'
|
| 85 |
+
executables = UnixCCompiler.executables
|
| 86 |
+
|
| 87 |
+
def __init__(self):
|
| 88 |
+
self.exes = {}
|
| 89 |
+
|
| 90 |
+
def set_executables(self, **kw):
|
| 91 |
+
for k, v in kw.items():
|
| 92 |
+
self.exes[k] = v
|
| 93 |
+
|
| 94 |
+
sysconfig_vars = {
|
| 95 |
+
'AR': 'sc_ar',
|
| 96 |
+
'CC': 'sc_cc',
|
| 97 |
+
'CXX': 'sc_cxx',
|
| 98 |
+
'ARFLAGS': '--sc-arflags',
|
| 99 |
+
'CFLAGS': '--sc-cflags',
|
| 100 |
+
'CCSHARED': '--sc-ccshared',
|
| 101 |
+
'LDSHARED': 'sc_ldshared',
|
| 102 |
+
'SHLIB_SUFFIX': 'sc_shutil_suffix',
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
comp = compiler()
|
| 106 |
+
with contextlib.ExitStack() as cm:
|
| 107 |
+
for key, value in sysconfig_vars.items():
|
| 108 |
+
cm.enter_context(swap_item(sysconfig._config_vars, key, value))
|
| 109 |
+
sysconfig.customize_compiler(comp)
|
| 110 |
+
|
| 111 |
+
return comp
|
| 112 |
+
|
| 113 |
+
@pytest.mark.skipif("not isinstance(new_compiler(), UnixCCompiler)")
|
| 114 |
+
@pytest.mark.usefixtures('disable_macos_customization')
|
| 115 |
+
def test_customize_compiler(self):
|
| 116 |
+
# Make sure that sysconfig._config_vars is initialized
|
| 117 |
+
sysconfig.get_config_vars()
|
| 118 |
+
|
| 119 |
+
os.environ['AR'] = 'env_ar'
|
| 120 |
+
os.environ['CC'] = 'env_cc'
|
| 121 |
+
os.environ['CPP'] = 'env_cpp'
|
| 122 |
+
os.environ['CXX'] = 'env_cxx --env-cxx-flags'
|
| 123 |
+
os.environ['LDSHARED'] = 'env_ldshared'
|
| 124 |
+
os.environ['LDFLAGS'] = '--env-ldflags'
|
| 125 |
+
os.environ['ARFLAGS'] = '--env-arflags'
|
| 126 |
+
os.environ['CFLAGS'] = '--env-cflags'
|
| 127 |
+
os.environ['CPPFLAGS'] = '--env-cppflags'
|
| 128 |
+
os.environ['RANLIB'] = 'env_ranlib'
|
| 129 |
+
|
| 130 |
+
comp = self.customize_compiler()
|
| 131 |
+
assert comp.exes['archiver'] == 'env_ar --env-arflags'
|
| 132 |
+
assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags'
|
| 133 |
+
assert comp.exes['compiler'] == 'env_cc --env-cflags --env-cppflags'
|
| 134 |
+
assert comp.exes['compiler_so'] == (
|
| 135 |
+
'env_cc --env-cflags --env-cppflags --sc-ccshared'
|
| 136 |
+
)
|
| 137 |
+
assert (
|
| 138 |
+
comp.exes['compiler_cxx']
|
| 139 |
+
== 'env_cxx --env-cxx-flags --sc-cflags --env-cppflags'
|
| 140 |
+
)
|
| 141 |
+
assert comp.exes['linker_exe'] == 'env_cc'
|
| 142 |
+
assert comp.exes['linker_so'] == (
|
| 143 |
+
'env_ldshared --env-ldflags --env-cflags --env-cppflags'
|
| 144 |
+
)
|
| 145 |
+
assert comp.shared_lib_extension == 'sc_shutil_suffix'
|
| 146 |
+
|
| 147 |
+
if sys.platform == "darwin":
|
| 148 |
+
assert comp.exes['ranlib'] == 'env_ranlib'
|
| 149 |
+
else:
|
| 150 |
+
assert 'ranlib' not in comp.exes
|
| 151 |
+
|
| 152 |
+
del os.environ['AR']
|
| 153 |
+
del os.environ['CC']
|
| 154 |
+
del os.environ['CPP']
|
| 155 |
+
del os.environ['CXX']
|
| 156 |
+
del os.environ['LDSHARED']
|
| 157 |
+
del os.environ['LDFLAGS']
|
| 158 |
+
del os.environ['ARFLAGS']
|
| 159 |
+
del os.environ['CFLAGS']
|
| 160 |
+
del os.environ['CPPFLAGS']
|
| 161 |
+
del os.environ['RANLIB']
|
| 162 |
+
|
| 163 |
+
comp = self.customize_compiler()
|
| 164 |
+
assert comp.exes['archiver'] == 'sc_ar --sc-arflags'
|
| 165 |
+
assert comp.exes['preprocessor'] == 'sc_cc -E'
|
| 166 |
+
assert comp.exes['compiler'] == 'sc_cc --sc-cflags'
|
| 167 |
+
assert comp.exes['compiler_so'] == 'sc_cc --sc-cflags --sc-ccshared'
|
| 168 |
+
assert comp.exes['compiler_cxx'] == 'sc_cxx --sc-cflags'
|
| 169 |
+
assert comp.exes['linker_exe'] == 'sc_cc'
|
| 170 |
+
assert comp.exes['linker_so'] == 'sc_ldshared'
|
| 171 |
+
assert comp.shared_lib_extension == 'sc_shutil_suffix'
|
| 172 |
+
assert 'ranlib' not in comp.exes
|
| 173 |
+
|
| 174 |
+
def test_parse_makefile_base(self, tmp_path):
|
| 175 |
+
makefile = _gen_makefile(
|
| 176 |
+
tmp_path,
|
| 177 |
+
"""
|
| 178 |
+
CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'
|
| 179 |
+
VAR=$OTHER
|
| 180 |
+
OTHER=foo
|
| 181 |
+
""",
|
| 182 |
+
)
|
| 183 |
+
d = sysconfig.parse_makefile(makefile)
|
| 184 |
+
assert d == {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", 'OTHER': 'foo'}
|
| 185 |
+
|
| 186 |
+
def test_parse_makefile_literal_dollar(self, tmp_path):
|
| 187 |
+
makefile = _gen_makefile(
|
| 188 |
+
tmp_path,
|
| 189 |
+
"""
|
| 190 |
+
CONFIG_ARGS= '--arg1=optarg1' 'ENV=\\$$LIB'
|
| 191 |
+
VAR=$OTHER
|
| 192 |
+
OTHER=foo
|
| 193 |
+
""",
|
| 194 |
+
)
|
| 195 |
+
d = sysconfig.parse_makefile(makefile)
|
| 196 |
+
assert d == {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", 'OTHER': 'foo'}
|
| 197 |
+
|
| 198 |
+
def test_sysconfig_module(self):
|
| 199 |
+
import sysconfig as global_sysconfig
|
| 200 |
+
|
| 201 |
+
assert global_sysconfig.get_config_var('CFLAGS') == sysconfig.get_config_var(
|
| 202 |
+
'CFLAGS'
|
| 203 |
+
)
|
| 204 |
+
assert global_sysconfig.get_config_var('LDFLAGS') == sysconfig.get_config_var(
|
| 205 |
+
'LDFLAGS'
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
# On macOS, binary installers support extension module building on
|
| 209 |
+
# various levels of the operating system with differing Xcode
|
| 210 |
+
# configurations, requiring customization of some of the
|
| 211 |
+
# compiler configuration directives to suit the environment on
|
| 212 |
+
# the installed machine. Some of these customizations may require
|
| 213 |
+
# running external programs and are thus deferred until needed by
|
| 214 |
+
# the first extension module build. Only
|
| 215 |
+
# the Distutils version of sysconfig is used for extension module
|
| 216 |
+
# builds, which happens earlier in the Distutils tests. This may
|
| 217 |
+
# cause the following tests to fail since no tests have caused
|
| 218 |
+
# the global version of sysconfig to call the customization yet.
|
| 219 |
+
# The solution for now is to simply skip this test in this case.
|
| 220 |
+
# The longer-term solution is to only have one version of sysconfig.
|
| 221 |
+
@pytest.mark.skipif("sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER')")
|
| 222 |
+
def test_sysconfig_compiler_vars(self):
|
| 223 |
+
import sysconfig as global_sysconfig
|
| 224 |
+
|
| 225 |
+
if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):
|
| 226 |
+
pytest.skip('compiler flags customized')
|
| 227 |
+
assert global_sysconfig.get_config_var('LDSHARED') == sysconfig.get_config_var(
|
| 228 |
+
'LDSHARED'
|
| 229 |
+
)
|
| 230 |
+
assert global_sysconfig.get_config_var('CC') == sysconfig.get_config_var('CC')
|
| 231 |
+
|
| 232 |
+
@pytest.mark.skipif("not sysconfig.get_config_var('EXT_SUFFIX')")
|
| 233 |
+
def test_SO_deprecation(self):
|
| 234 |
+
with pytest.warns(DeprecationWarning):
|
| 235 |
+
sysconfig.get_config_var('SO')
|
| 236 |
+
|
| 237 |
+
def test_customize_compiler_before_get_config_vars(self, tmp_path):
|
| 238 |
+
# Issue #21923: test that a Distribution compiler
|
| 239 |
+
# instance can be called without an explicit call to
|
| 240 |
+
# get_config_vars().
|
| 241 |
+
jaraco.path.build(
|
| 242 |
+
{
|
| 243 |
+
'file': trim("""
|
| 244 |
+
from distutils.core import Distribution
|
| 245 |
+
config = Distribution().get_command_obj('config')
|
| 246 |
+
# try_compile may pass or it may fail if no compiler
|
| 247 |
+
# is found but it should not raise an exception.
|
| 248 |
+
rc = config.try_compile('int x;')
|
| 249 |
+
""")
|
| 250 |
+
},
|
| 251 |
+
tmp_path,
|
| 252 |
+
)
|
| 253 |
+
p = subprocess.Popen(
|
| 254 |
+
[sys.executable, tmp_path / 'file'],
|
| 255 |
+
stdout=subprocess.PIPE,
|
| 256 |
+
stderr=subprocess.STDOUT,
|
| 257 |
+
universal_newlines=True,
|
| 258 |
+
encoding='utf-8',
|
| 259 |
+
)
|
| 260 |
+
outs, errs = p.communicate()
|
| 261 |
+
assert 0 == p.returncode, "Subprocess failed: " + outs
|
| 262 |
+
|
| 263 |
+
def test_parse_config_h(self):
|
| 264 |
+
config_h = sysconfig.get_config_h_filename()
|
| 265 |
+
input = {}
|
| 266 |
+
with open(config_h, encoding="utf-8") as f:
|
| 267 |
+
result = sysconfig.parse_config_h(f, g=input)
|
| 268 |
+
assert input is result
|
| 269 |
+
with open(config_h, encoding="utf-8") as f:
|
| 270 |
+
result = sysconfig.parse_config_h(f)
|
| 271 |
+
assert isinstance(result, dict)
|
| 272 |
+
|
| 273 |
+
@pytest.mark.skipif("platform.system() != 'Windows'")
|
| 274 |
+
@pytest.mark.skipif("sys.implementation.name != 'cpython'")
|
| 275 |
+
def test_win_ext_suffix(self):
|
| 276 |
+
assert sysconfig.get_config_var("EXT_SUFFIX").endswith(".pyd")
|
| 277 |
+
assert sysconfig.get_config_var("EXT_SUFFIX") != ".pyd"
|
| 278 |
+
|
| 279 |
+
@pytest.mark.skipif("platform.system() != 'Windows'")
|
| 280 |
+
@pytest.mark.skipif("sys.implementation.name != 'cpython'")
|
| 281 |
+
@pytest.mark.skipif(
|
| 282 |
+
'\\PCbuild\\'.casefold() not in sys.executable.casefold(),
|
| 283 |
+
reason='Need sys.executable to be in a source tree',
|
| 284 |
+
)
|
| 285 |
+
def test_win_build_venv_from_source_tree(self, tmp_path):
|
| 286 |
+
"""Ensure distutils.sysconfig detects venvs from source tree builds."""
|
| 287 |
+
env = jaraco.envs.VEnv()
|
| 288 |
+
env.create_opts = env.clean_opts
|
| 289 |
+
env.root = tmp_path
|
| 290 |
+
env.ensure_env()
|
| 291 |
+
cmd = [
|
| 292 |
+
env.exe(),
|
| 293 |
+
"-c",
|
| 294 |
+
"import distutils.sysconfig; print(distutils.sysconfig.python_build)",
|
| 295 |
+
]
|
| 296 |
+
distutils_path = os.path.dirname(os.path.dirname(distutils.__file__))
|
| 297 |
+
out = subprocess.check_output(
|
| 298 |
+
cmd, env={**os.environ, "PYTHONPATH": distutils_path}
|
| 299 |
+
)
|
| 300 |
+
assert out == "True"
|
| 301 |
+
|
| 302 |
+
def test_get_python_inc_missing_config_dir(self, monkeypatch):
|
| 303 |
+
"""
|
| 304 |
+
In portable Python installations, the sysconfig will be broken,
|
| 305 |
+
pointing to the directories where the installation was built and
|
| 306 |
+
not where it currently is. In this case, ensure that the missing
|
| 307 |
+
directory isn't used for get_python_inc.
|
| 308 |
+
|
| 309 |
+
See pypa/distutils#178.
|
| 310 |
+
"""
|
| 311 |
+
|
| 312 |
+
def override(name):
|
| 313 |
+
if name == 'INCLUDEPY':
|
| 314 |
+
return '/does-not-exist'
|
| 315 |
+
return sysconfig.get_config_var(name)
|
| 316 |
+
|
| 317 |
+
monkeypatch.setattr(sysconfig, 'get_config_var', override)
|
| 318 |
+
|
| 319 |
+
assert os.path.exists(sysconfig.get_python_inc())
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_text_file.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.text_file."""
|
| 2 |
+
|
| 3 |
+
from distutils.tests import support
|
| 4 |
+
from distutils.text_file import TextFile
|
| 5 |
+
|
| 6 |
+
import jaraco.path
|
| 7 |
+
import path
|
| 8 |
+
|
| 9 |
+
TEST_DATA = """# test file
|
| 10 |
+
|
| 11 |
+
line 3 \\
|
| 12 |
+
# intervening comment
|
| 13 |
+
continues on next line
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class TestTextFile(support.TempdirManager):
|
| 18 |
+
def test_class(self):
|
| 19 |
+
# old tests moved from text_file.__main__
|
| 20 |
+
# so they are really called by the buildbots
|
| 21 |
+
|
| 22 |
+
# result 1: no fancy options
|
| 23 |
+
result1 = [
|
| 24 |
+
'# test file\n',
|
| 25 |
+
'\n',
|
| 26 |
+
'line 3 \\\n',
|
| 27 |
+
'# intervening comment\n',
|
| 28 |
+
' continues on next line\n',
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
# result 2: just strip comments
|
| 32 |
+
result2 = ["\n", "line 3 \\\n", " continues on next line\n"]
|
| 33 |
+
|
| 34 |
+
# result 3: just strip blank lines
|
| 35 |
+
result3 = [
|
| 36 |
+
"# test file\n",
|
| 37 |
+
"line 3 \\\n",
|
| 38 |
+
"# intervening comment\n",
|
| 39 |
+
" continues on next line\n",
|
| 40 |
+
]
|
| 41 |
+
|
| 42 |
+
# result 4: default, strip comments, blank lines,
|
| 43 |
+
# and trailing whitespace
|
| 44 |
+
result4 = ["line 3 \\", " continues on next line"]
|
| 45 |
+
|
| 46 |
+
# result 5: strip comments and blanks, plus join lines (but don't
|
| 47 |
+
# "collapse" joined lines
|
| 48 |
+
result5 = ["line 3 continues on next line"]
|
| 49 |
+
|
| 50 |
+
# result 6: strip comments and blanks, plus join lines (and
|
| 51 |
+
# "collapse" joined lines
|
| 52 |
+
result6 = ["line 3 continues on next line"]
|
| 53 |
+
|
| 54 |
+
def test_input(count, description, file, expected_result):
|
| 55 |
+
result = file.readlines()
|
| 56 |
+
assert result == expected_result
|
| 57 |
+
|
| 58 |
+
tmp_path = path.Path(self.mkdtemp())
|
| 59 |
+
filename = tmp_path / 'test.txt'
|
| 60 |
+
jaraco.path.build({filename.name: TEST_DATA}, tmp_path)
|
| 61 |
+
|
| 62 |
+
in_file = TextFile(
|
| 63 |
+
filename,
|
| 64 |
+
strip_comments=False,
|
| 65 |
+
skip_blanks=False,
|
| 66 |
+
lstrip_ws=False,
|
| 67 |
+
rstrip_ws=False,
|
| 68 |
+
)
|
| 69 |
+
try:
|
| 70 |
+
test_input(1, "no processing", in_file, result1)
|
| 71 |
+
finally:
|
| 72 |
+
in_file.close()
|
| 73 |
+
|
| 74 |
+
in_file = TextFile(
|
| 75 |
+
filename,
|
| 76 |
+
strip_comments=True,
|
| 77 |
+
skip_blanks=False,
|
| 78 |
+
lstrip_ws=False,
|
| 79 |
+
rstrip_ws=False,
|
| 80 |
+
)
|
| 81 |
+
try:
|
| 82 |
+
test_input(2, "strip comments", in_file, result2)
|
| 83 |
+
finally:
|
| 84 |
+
in_file.close()
|
| 85 |
+
|
| 86 |
+
in_file = TextFile(
|
| 87 |
+
filename,
|
| 88 |
+
strip_comments=False,
|
| 89 |
+
skip_blanks=True,
|
| 90 |
+
lstrip_ws=False,
|
| 91 |
+
rstrip_ws=False,
|
| 92 |
+
)
|
| 93 |
+
try:
|
| 94 |
+
test_input(3, "strip blanks", in_file, result3)
|
| 95 |
+
finally:
|
| 96 |
+
in_file.close()
|
| 97 |
+
|
| 98 |
+
in_file = TextFile(filename)
|
| 99 |
+
try:
|
| 100 |
+
test_input(4, "default processing", in_file, result4)
|
| 101 |
+
finally:
|
| 102 |
+
in_file.close()
|
| 103 |
+
|
| 104 |
+
in_file = TextFile(
|
| 105 |
+
filename,
|
| 106 |
+
strip_comments=True,
|
| 107 |
+
skip_blanks=True,
|
| 108 |
+
join_lines=True,
|
| 109 |
+
rstrip_ws=True,
|
| 110 |
+
)
|
| 111 |
+
try:
|
| 112 |
+
test_input(5, "join lines without collapsing", in_file, result5)
|
| 113 |
+
finally:
|
| 114 |
+
in_file.close()
|
| 115 |
+
|
| 116 |
+
in_file = TextFile(
|
| 117 |
+
filename,
|
| 118 |
+
strip_comments=True,
|
| 119 |
+
skip_blanks=True,
|
| 120 |
+
join_lines=True,
|
| 121 |
+
rstrip_ws=True,
|
| 122 |
+
collapse_join=True,
|
| 123 |
+
)
|
| 124 |
+
try:
|
| 125 |
+
test_input(6, "join lines with collapsing", in_file, result6)
|
| 126 |
+
finally:
|
| 127 |
+
in_file.close()
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_unixccompiler.py
ADDED
|
@@ -0,0 +1,350 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.unixccompiler."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import unittest.mock as mock
|
| 6 |
+
from distutils import sysconfig
|
| 7 |
+
from distutils.compat import consolidate_linker_args
|
| 8 |
+
from distutils.errors import DistutilsPlatformError
|
| 9 |
+
from distutils.unixccompiler import UnixCCompiler
|
| 10 |
+
from distutils.util import _clear_cached_macosx_ver
|
| 11 |
+
|
| 12 |
+
import pytest
|
| 13 |
+
|
| 14 |
+
from . import support
|
| 15 |
+
from .compat.py39 import EnvironmentVarGuard
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@pytest.fixture(autouse=True)
|
| 19 |
+
def save_values(monkeypatch):
|
| 20 |
+
monkeypatch.setattr(sys, 'platform', sys.platform)
|
| 21 |
+
monkeypatch.setattr(sysconfig, 'get_config_var', sysconfig.get_config_var)
|
| 22 |
+
monkeypatch.setattr(sysconfig, 'get_config_vars', sysconfig.get_config_vars)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@pytest.fixture(autouse=True)
|
| 26 |
+
def compiler_wrapper(request):
|
| 27 |
+
class CompilerWrapper(UnixCCompiler):
|
| 28 |
+
def rpath_foo(self):
|
| 29 |
+
return self.runtime_library_dir_option('/foo')
|
| 30 |
+
|
| 31 |
+
request.instance.cc = CompilerWrapper()
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class TestUnixCCompiler(support.TempdirManager):
|
| 35 |
+
@pytest.mark.skipif('platform.system == "Windows"')
|
| 36 |
+
def test_runtime_libdir_option(self): # noqa: C901
|
| 37 |
+
# Issue #5900; GitHub Issue #37
|
| 38 |
+
#
|
| 39 |
+
# Ensure RUNPATH is added to extension modules with RPATH if
|
| 40 |
+
# GNU ld is used
|
| 41 |
+
|
| 42 |
+
# darwin
|
| 43 |
+
sys.platform = 'darwin'
|
| 44 |
+
darwin_ver_var = 'MACOSX_DEPLOYMENT_TARGET'
|
| 45 |
+
darwin_rpath_flag = '-Wl,-rpath,/foo'
|
| 46 |
+
darwin_lib_flag = '-L/foo'
|
| 47 |
+
|
| 48 |
+
# (macOS version from syscfg, macOS version from env var) -> flag
|
| 49 |
+
# Version value of None generates two tests: as None and as empty string
|
| 50 |
+
# Expected flag value of None means an mismatch exception is expected
|
| 51 |
+
darwin_test_cases = [
|
| 52 |
+
((None, None), darwin_lib_flag),
|
| 53 |
+
((None, '11'), darwin_rpath_flag),
|
| 54 |
+
(('10', None), darwin_lib_flag),
|
| 55 |
+
(('10.3', None), darwin_lib_flag),
|
| 56 |
+
(('10.3.1', None), darwin_lib_flag),
|
| 57 |
+
(('10.5', None), darwin_rpath_flag),
|
| 58 |
+
(('10.5.1', None), darwin_rpath_flag),
|
| 59 |
+
(('10.3', '10.3'), darwin_lib_flag),
|
| 60 |
+
(('10.3', '10.5'), darwin_rpath_flag),
|
| 61 |
+
(('10.5', '10.3'), darwin_lib_flag),
|
| 62 |
+
(('10.5', '11'), darwin_rpath_flag),
|
| 63 |
+
(('10.4', '10'), None),
|
| 64 |
+
]
|
| 65 |
+
|
| 66 |
+
def make_darwin_gcv(syscfg_macosx_ver):
|
| 67 |
+
def gcv(var):
|
| 68 |
+
if var == darwin_ver_var:
|
| 69 |
+
return syscfg_macosx_ver
|
| 70 |
+
return "xxx"
|
| 71 |
+
|
| 72 |
+
return gcv
|
| 73 |
+
|
| 74 |
+
def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag):
|
| 75 |
+
env = os.environ
|
| 76 |
+
msg = f"macOS version = (sysconfig={syscfg_macosx_ver!r}, env={env_macosx_ver!r})"
|
| 77 |
+
|
| 78 |
+
# Save
|
| 79 |
+
old_gcv = sysconfig.get_config_var
|
| 80 |
+
old_env_macosx_ver = env.get(darwin_ver_var)
|
| 81 |
+
|
| 82 |
+
# Setup environment
|
| 83 |
+
_clear_cached_macosx_ver()
|
| 84 |
+
sysconfig.get_config_var = make_darwin_gcv(syscfg_macosx_ver)
|
| 85 |
+
if env_macosx_ver is not None:
|
| 86 |
+
env[darwin_ver_var] = env_macosx_ver
|
| 87 |
+
elif darwin_ver_var in env:
|
| 88 |
+
env.pop(darwin_ver_var)
|
| 89 |
+
|
| 90 |
+
# Run the test
|
| 91 |
+
if expected_flag is not None:
|
| 92 |
+
assert self.cc.rpath_foo() == expected_flag, msg
|
| 93 |
+
else:
|
| 94 |
+
with pytest.raises(
|
| 95 |
+
DistutilsPlatformError, match=darwin_ver_var + r' mismatch'
|
| 96 |
+
):
|
| 97 |
+
self.cc.rpath_foo()
|
| 98 |
+
|
| 99 |
+
# Restore
|
| 100 |
+
if old_env_macosx_ver is not None:
|
| 101 |
+
env[darwin_ver_var] = old_env_macosx_ver
|
| 102 |
+
elif darwin_ver_var in env:
|
| 103 |
+
env.pop(darwin_ver_var)
|
| 104 |
+
sysconfig.get_config_var = old_gcv
|
| 105 |
+
_clear_cached_macosx_ver()
|
| 106 |
+
|
| 107 |
+
for macosx_vers, expected_flag in darwin_test_cases:
|
| 108 |
+
syscfg_macosx_ver, env_macosx_ver = macosx_vers
|
| 109 |
+
do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag)
|
| 110 |
+
# Bonus test cases with None interpreted as empty string
|
| 111 |
+
if syscfg_macosx_ver is None:
|
| 112 |
+
do_darwin_test("", env_macosx_ver, expected_flag)
|
| 113 |
+
if env_macosx_ver is None:
|
| 114 |
+
do_darwin_test(syscfg_macosx_ver, "", expected_flag)
|
| 115 |
+
if syscfg_macosx_ver is None and env_macosx_ver is None:
|
| 116 |
+
do_darwin_test("", "", expected_flag)
|
| 117 |
+
|
| 118 |
+
old_gcv = sysconfig.get_config_var
|
| 119 |
+
|
| 120 |
+
# hp-ux
|
| 121 |
+
sys.platform = 'hp-ux'
|
| 122 |
+
|
| 123 |
+
def gcv(v):
|
| 124 |
+
return 'xxx'
|
| 125 |
+
|
| 126 |
+
sysconfig.get_config_var = gcv
|
| 127 |
+
assert self.cc.rpath_foo() == ['+s', '-L/foo']
|
| 128 |
+
|
| 129 |
+
def gcv(v):
|
| 130 |
+
return 'gcc'
|
| 131 |
+
|
| 132 |
+
sysconfig.get_config_var = gcv
|
| 133 |
+
assert self.cc.rpath_foo() == ['-Wl,+s', '-L/foo']
|
| 134 |
+
|
| 135 |
+
def gcv(v):
|
| 136 |
+
return 'g++'
|
| 137 |
+
|
| 138 |
+
sysconfig.get_config_var = gcv
|
| 139 |
+
assert self.cc.rpath_foo() == ['-Wl,+s', '-L/foo']
|
| 140 |
+
|
| 141 |
+
sysconfig.get_config_var = old_gcv
|
| 142 |
+
|
| 143 |
+
# GCC GNULD
|
| 144 |
+
sys.platform = 'bar'
|
| 145 |
+
|
| 146 |
+
def gcv(v):
|
| 147 |
+
if v == 'CC':
|
| 148 |
+
return 'gcc'
|
| 149 |
+
elif v == 'GNULD':
|
| 150 |
+
return 'yes'
|
| 151 |
+
|
| 152 |
+
sysconfig.get_config_var = gcv
|
| 153 |
+
assert self.cc.rpath_foo() == consolidate_linker_args([
|
| 154 |
+
'-Wl,--enable-new-dtags',
|
| 155 |
+
'-Wl,-rpath,/foo',
|
| 156 |
+
])
|
| 157 |
+
|
| 158 |
+
def gcv(v):
|
| 159 |
+
if v == 'CC':
|
| 160 |
+
return 'gcc -pthread -B /bar'
|
| 161 |
+
elif v == 'GNULD':
|
| 162 |
+
return 'yes'
|
| 163 |
+
|
| 164 |
+
sysconfig.get_config_var = gcv
|
| 165 |
+
assert self.cc.rpath_foo() == consolidate_linker_args([
|
| 166 |
+
'-Wl,--enable-new-dtags',
|
| 167 |
+
'-Wl,-rpath,/foo',
|
| 168 |
+
])
|
| 169 |
+
|
| 170 |
+
# GCC non-GNULD
|
| 171 |
+
sys.platform = 'bar'
|
| 172 |
+
|
| 173 |
+
def gcv(v):
|
| 174 |
+
if v == 'CC':
|
| 175 |
+
return 'gcc'
|
| 176 |
+
elif v == 'GNULD':
|
| 177 |
+
return 'no'
|
| 178 |
+
|
| 179 |
+
sysconfig.get_config_var = gcv
|
| 180 |
+
assert self.cc.rpath_foo() == '-Wl,-R/foo'
|
| 181 |
+
|
| 182 |
+
# GCC GNULD with fully qualified configuration prefix
|
| 183 |
+
# see #7617
|
| 184 |
+
sys.platform = 'bar'
|
| 185 |
+
|
| 186 |
+
def gcv(v):
|
| 187 |
+
if v == 'CC':
|
| 188 |
+
return 'x86_64-pc-linux-gnu-gcc-4.4.2'
|
| 189 |
+
elif v == 'GNULD':
|
| 190 |
+
return 'yes'
|
| 191 |
+
|
| 192 |
+
sysconfig.get_config_var = gcv
|
| 193 |
+
assert self.cc.rpath_foo() == consolidate_linker_args([
|
| 194 |
+
'-Wl,--enable-new-dtags',
|
| 195 |
+
'-Wl,-rpath,/foo',
|
| 196 |
+
])
|
| 197 |
+
|
| 198 |
+
# non-GCC GNULD
|
| 199 |
+
sys.platform = 'bar'
|
| 200 |
+
|
| 201 |
+
def gcv(v):
|
| 202 |
+
if v == 'CC':
|
| 203 |
+
return 'cc'
|
| 204 |
+
elif v == 'GNULD':
|
| 205 |
+
return 'yes'
|
| 206 |
+
|
| 207 |
+
sysconfig.get_config_var = gcv
|
| 208 |
+
assert self.cc.rpath_foo() == consolidate_linker_args([
|
| 209 |
+
'-Wl,--enable-new-dtags',
|
| 210 |
+
'-Wl,-rpath,/foo',
|
| 211 |
+
])
|
| 212 |
+
|
| 213 |
+
# non-GCC non-GNULD
|
| 214 |
+
sys.platform = 'bar'
|
| 215 |
+
|
| 216 |
+
def gcv(v):
|
| 217 |
+
if v == 'CC':
|
| 218 |
+
return 'cc'
|
| 219 |
+
elif v == 'GNULD':
|
| 220 |
+
return 'no'
|
| 221 |
+
|
| 222 |
+
sysconfig.get_config_var = gcv
|
| 223 |
+
assert self.cc.rpath_foo() == '-Wl,-R/foo'
|
| 224 |
+
|
| 225 |
+
@pytest.mark.skipif('platform.system == "Windows"')
|
| 226 |
+
def test_cc_overrides_ldshared(self):
|
| 227 |
+
# Issue #18080:
|
| 228 |
+
# ensure that setting CC env variable also changes default linker
|
| 229 |
+
def gcv(v):
|
| 230 |
+
if v == 'LDSHARED':
|
| 231 |
+
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
| 232 |
+
return 'gcc-4.2'
|
| 233 |
+
|
| 234 |
+
def gcvs(*args, _orig=sysconfig.get_config_vars):
|
| 235 |
+
if args:
|
| 236 |
+
return list(map(sysconfig.get_config_var, args))
|
| 237 |
+
return _orig()
|
| 238 |
+
|
| 239 |
+
sysconfig.get_config_var = gcv
|
| 240 |
+
sysconfig.get_config_vars = gcvs
|
| 241 |
+
with EnvironmentVarGuard() as env:
|
| 242 |
+
env['CC'] = 'my_cc'
|
| 243 |
+
del env['LDSHARED']
|
| 244 |
+
sysconfig.customize_compiler(self.cc)
|
| 245 |
+
assert self.cc.linker_so[0] == 'my_cc'
|
| 246 |
+
|
| 247 |
+
@pytest.mark.skipif('platform.system == "Windows"')
|
| 248 |
+
@pytest.mark.usefixtures('disable_macos_customization')
|
| 249 |
+
def test_cc_overrides_ldshared_for_cxx_correctly(self):
|
| 250 |
+
"""
|
| 251 |
+
Ensure that setting CC env variable also changes default linker
|
| 252 |
+
correctly when building C++ extensions.
|
| 253 |
+
|
| 254 |
+
pypa/distutils#126
|
| 255 |
+
"""
|
| 256 |
+
|
| 257 |
+
def gcv(v):
|
| 258 |
+
if v == 'LDSHARED':
|
| 259 |
+
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
| 260 |
+
elif v == 'LDCXXSHARED':
|
| 261 |
+
return 'g++-4.2 -bundle -undefined dynamic_lookup '
|
| 262 |
+
elif v == 'CXX':
|
| 263 |
+
return 'g++-4.2'
|
| 264 |
+
elif v == 'CC':
|
| 265 |
+
return 'gcc-4.2'
|
| 266 |
+
return ''
|
| 267 |
+
|
| 268 |
+
def gcvs(*args, _orig=sysconfig.get_config_vars):
|
| 269 |
+
if args:
|
| 270 |
+
return list(map(sysconfig.get_config_var, args))
|
| 271 |
+
return _orig()
|
| 272 |
+
|
| 273 |
+
sysconfig.get_config_var = gcv
|
| 274 |
+
sysconfig.get_config_vars = gcvs
|
| 275 |
+
with (
|
| 276 |
+
mock.patch.object(self.cc, 'spawn', return_value=None) as mock_spawn,
|
| 277 |
+
mock.patch.object(self.cc, '_need_link', return_value=True),
|
| 278 |
+
mock.patch.object(self.cc, 'mkpath', return_value=None),
|
| 279 |
+
EnvironmentVarGuard() as env,
|
| 280 |
+
):
|
| 281 |
+
env['CC'] = 'ccache my_cc'
|
| 282 |
+
env['CXX'] = 'my_cxx'
|
| 283 |
+
del env['LDSHARED']
|
| 284 |
+
sysconfig.customize_compiler(self.cc)
|
| 285 |
+
assert self.cc.linker_so[0:2] == ['ccache', 'my_cc']
|
| 286 |
+
self.cc.link(None, [], 'a.out', target_lang='c++')
|
| 287 |
+
call_args = mock_spawn.call_args[0][0]
|
| 288 |
+
expected = ['my_cxx', '-bundle', '-undefined', 'dynamic_lookup']
|
| 289 |
+
assert call_args[:4] == expected
|
| 290 |
+
|
| 291 |
+
@pytest.mark.skipif('platform.system == "Windows"')
|
| 292 |
+
def test_explicit_ldshared(self):
|
| 293 |
+
# Issue #18080:
|
| 294 |
+
# ensure that setting CC env variable does not change
|
| 295 |
+
# explicit LDSHARED setting for linker
|
| 296 |
+
def gcv(v):
|
| 297 |
+
if v == 'LDSHARED':
|
| 298 |
+
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
| 299 |
+
return 'gcc-4.2'
|
| 300 |
+
|
| 301 |
+
def gcvs(*args, _orig=sysconfig.get_config_vars):
|
| 302 |
+
if args:
|
| 303 |
+
return list(map(sysconfig.get_config_var, args))
|
| 304 |
+
return _orig()
|
| 305 |
+
|
| 306 |
+
sysconfig.get_config_var = gcv
|
| 307 |
+
sysconfig.get_config_vars = gcvs
|
| 308 |
+
with EnvironmentVarGuard() as env:
|
| 309 |
+
env['CC'] = 'my_cc'
|
| 310 |
+
env['LDSHARED'] = 'my_ld -bundle -dynamic'
|
| 311 |
+
sysconfig.customize_compiler(self.cc)
|
| 312 |
+
assert self.cc.linker_so[0] == 'my_ld'
|
| 313 |
+
|
| 314 |
+
def test_has_function(self):
|
| 315 |
+
# Issue https://github.com/pypa/distutils/issues/64:
|
| 316 |
+
# ensure that setting output_dir does not raise
|
| 317 |
+
# FileNotFoundError: [Errno 2] No such file or directory: 'a.out'
|
| 318 |
+
self.cc.output_dir = 'scratch'
|
| 319 |
+
os.chdir(self.mkdtemp())
|
| 320 |
+
self.cc.has_function('abort')
|
| 321 |
+
|
| 322 |
+
def test_find_library_file(self, monkeypatch):
|
| 323 |
+
compiler = UnixCCompiler()
|
| 324 |
+
compiler._library_root = lambda dir: dir
|
| 325 |
+
monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
|
| 326 |
+
|
| 327 |
+
libname = 'libabc.dylib' if sys.platform != 'cygwin' else 'cygabc.dll'
|
| 328 |
+
dirs = ('/foo/bar/missing', '/foo/bar/existing')
|
| 329 |
+
assert (
|
| 330 |
+
compiler.find_library_file(dirs, 'abc').replace('\\', '/')
|
| 331 |
+
== f'/foo/bar/existing/{libname}'
|
| 332 |
+
)
|
| 333 |
+
assert (
|
| 334 |
+
compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
|
| 335 |
+
== f'/foo/bar/existing/{libname}'
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
monkeypatch.setattr(
|
| 339 |
+
os.path,
|
| 340 |
+
'exists',
|
| 341 |
+
lambda d: 'existing' in d and '.a' in d and '.dll.a' not in d,
|
| 342 |
+
)
|
| 343 |
+
assert (
|
| 344 |
+
compiler.find_library_file(dirs, 'abc').replace('\\', '/')
|
| 345 |
+
== '/foo/bar/existing/libabc.a'
|
| 346 |
+
)
|
| 347 |
+
assert (
|
| 348 |
+
compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
|
| 349 |
+
== '/foo/bar/existing/libabc.a'
|
| 350 |
+
)
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_util.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.util."""
|
| 2 |
+
|
| 3 |
+
import email
|
| 4 |
+
import email.generator
|
| 5 |
+
import email.policy
|
| 6 |
+
import io
|
| 7 |
+
import os
|
| 8 |
+
import pathlib
|
| 9 |
+
import sys
|
| 10 |
+
import sysconfig as stdlib_sysconfig
|
| 11 |
+
import unittest.mock as mock
|
| 12 |
+
from copy import copy
|
| 13 |
+
from distutils import sysconfig, util
|
| 14 |
+
from distutils.errors import DistutilsByteCompileError, DistutilsPlatformError
|
| 15 |
+
from distutils.util import (
|
| 16 |
+
byte_compile,
|
| 17 |
+
change_root,
|
| 18 |
+
check_environ,
|
| 19 |
+
convert_path,
|
| 20 |
+
get_host_platform,
|
| 21 |
+
get_platform,
|
| 22 |
+
grok_environment_error,
|
| 23 |
+
rfc822_escape,
|
| 24 |
+
split_quoted,
|
| 25 |
+
strtobool,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
import pytest
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@pytest.fixture(autouse=True)
|
| 32 |
+
def environment(monkeypatch):
|
| 33 |
+
monkeypatch.setattr(os, 'name', os.name)
|
| 34 |
+
monkeypatch.setattr(sys, 'platform', sys.platform)
|
| 35 |
+
monkeypatch.setattr(sys, 'version', sys.version)
|
| 36 |
+
monkeypatch.setattr(os, 'sep', os.sep)
|
| 37 |
+
monkeypatch.setattr(os.path, 'join', os.path.join)
|
| 38 |
+
monkeypatch.setattr(os.path, 'isabs', os.path.isabs)
|
| 39 |
+
monkeypatch.setattr(os.path, 'splitdrive', os.path.splitdrive)
|
| 40 |
+
monkeypatch.setattr(sysconfig, '_config_vars', copy(sysconfig._config_vars))
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@pytest.mark.usefixtures('save_env')
|
| 44 |
+
class TestUtil:
|
| 45 |
+
def test_get_host_platform(self):
|
| 46 |
+
with mock.patch('os.name', 'nt'):
|
| 47 |
+
with mock.patch('sys.version', '... [... (ARM64)]'):
|
| 48 |
+
assert get_host_platform() == 'win-arm64'
|
| 49 |
+
with mock.patch('sys.version', '... [... (ARM)]'):
|
| 50 |
+
assert get_host_platform() == 'win-arm32'
|
| 51 |
+
|
| 52 |
+
with mock.patch('sys.version_info', (3, 9, 0, 'final', 0)):
|
| 53 |
+
assert get_host_platform() == stdlib_sysconfig.get_platform()
|
| 54 |
+
|
| 55 |
+
def test_get_platform(self):
|
| 56 |
+
with mock.patch('os.name', 'nt'):
|
| 57 |
+
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x86'}):
|
| 58 |
+
assert get_platform() == 'win32'
|
| 59 |
+
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x64'}):
|
| 60 |
+
assert get_platform() == 'win-amd64'
|
| 61 |
+
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm'}):
|
| 62 |
+
assert get_platform() == 'win-arm32'
|
| 63 |
+
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}):
|
| 64 |
+
assert get_platform() == 'win-arm64'
|
| 65 |
+
|
| 66 |
+
def test_convert_path(self):
|
| 67 |
+
expected = os.sep.join(('', 'home', 'to', 'my', 'stuff'))
|
| 68 |
+
assert convert_path('/home/to/my/stuff') == expected
|
| 69 |
+
assert convert_path(pathlib.Path('/home/to/my/stuff')) == expected
|
| 70 |
+
assert convert_path('.') == os.curdir
|
| 71 |
+
|
| 72 |
+
def test_change_root(self):
|
| 73 |
+
# linux/mac
|
| 74 |
+
os.name = 'posix'
|
| 75 |
+
|
| 76 |
+
def _isabs(path):
|
| 77 |
+
return path[0] == '/'
|
| 78 |
+
|
| 79 |
+
os.path.isabs = _isabs
|
| 80 |
+
|
| 81 |
+
def _join(*path):
|
| 82 |
+
return '/'.join(path)
|
| 83 |
+
|
| 84 |
+
os.path.join = _join
|
| 85 |
+
|
| 86 |
+
assert change_root('/root', '/old/its/here') == '/root/old/its/here'
|
| 87 |
+
assert change_root('/root', 'its/here') == '/root/its/here'
|
| 88 |
+
|
| 89 |
+
# windows
|
| 90 |
+
os.name = 'nt'
|
| 91 |
+
os.sep = '\\'
|
| 92 |
+
|
| 93 |
+
def _isabs(path):
|
| 94 |
+
return path.startswith('c:\\')
|
| 95 |
+
|
| 96 |
+
os.path.isabs = _isabs
|
| 97 |
+
|
| 98 |
+
def _splitdrive(path):
|
| 99 |
+
if path.startswith('c:'):
|
| 100 |
+
return ('', path.replace('c:', ''))
|
| 101 |
+
return ('', path)
|
| 102 |
+
|
| 103 |
+
os.path.splitdrive = _splitdrive
|
| 104 |
+
|
| 105 |
+
def _join(*path):
|
| 106 |
+
return '\\'.join(path)
|
| 107 |
+
|
| 108 |
+
os.path.join = _join
|
| 109 |
+
|
| 110 |
+
assert (
|
| 111 |
+
change_root('c:\\root', 'c:\\old\\its\\here') == 'c:\\root\\old\\its\\here'
|
| 112 |
+
)
|
| 113 |
+
assert change_root('c:\\root', 'its\\here') == 'c:\\root\\its\\here'
|
| 114 |
+
|
| 115 |
+
# BugsBunny os (it's a great os)
|
| 116 |
+
os.name = 'BugsBunny'
|
| 117 |
+
with pytest.raises(DistutilsPlatformError):
|
| 118 |
+
change_root('c:\\root', 'its\\here')
|
| 119 |
+
|
| 120 |
+
# XXX platforms to be covered: mac
|
| 121 |
+
|
| 122 |
+
def test_check_environ(self):
|
| 123 |
+
util.check_environ.cache_clear()
|
| 124 |
+
os.environ.pop('HOME', None)
|
| 125 |
+
|
| 126 |
+
check_environ()
|
| 127 |
+
|
| 128 |
+
assert os.environ['PLAT'] == get_platform()
|
| 129 |
+
|
| 130 |
+
@pytest.mark.skipif("os.name != 'posix'")
|
| 131 |
+
def test_check_environ_getpwuid(self):
|
| 132 |
+
util.check_environ.cache_clear()
|
| 133 |
+
os.environ.pop('HOME', None)
|
| 134 |
+
|
| 135 |
+
import pwd
|
| 136 |
+
|
| 137 |
+
# only set pw_dir field, other fields are not used
|
| 138 |
+
result = pwd.struct_passwd((
|
| 139 |
+
None,
|
| 140 |
+
None,
|
| 141 |
+
None,
|
| 142 |
+
None,
|
| 143 |
+
None,
|
| 144 |
+
'/home/distutils',
|
| 145 |
+
None,
|
| 146 |
+
))
|
| 147 |
+
with mock.patch.object(pwd, 'getpwuid', return_value=result):
|
| 148 |
+
check_environ()
|
| 149 |
+
assert os.environ['HOME'] == '/home/distutils'
|
| 150 |
+
|
| 151 |
+
util.check_environ.cache_clear()
|
| 152 |
+
os.environ.pop('HOME', None)
|
| 153 |
+
|
| 154 |
+
# bpo-10496: Catch pwd.getpwuid() error
|
| 155 |
+
with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError):
|
| 156 |
+
check_environ()
|
| 157 |
+
assert 'HOME' not in os.environ
|
| 158 |
+
|
| 159 |
+
def test_split_quoted(self):
|
| 160 |
+
assert split_quoted('""one"" "two" \'three\' \\four') == [
|
| 161 |
+
'one',
|
| 162 |
+
'two',
|
| 163 |
+
'three',
|
| 164 |
+
'four',
|
| 165 |
+
]
|
| 166 |
+
|
| 167 |
+
def test_strtobool(self):
|
| 168 |
+
yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
|
| 169 |
+
no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N')
|
| 170 |
+
|
| 171 |
+
for y in yes:
|
| 172 |
+
assert strtobool(y)
|
| 173 |
+
|
| 174 |
+
for n in no:
|
| 175 |
+
assert not strtobool(n)
|
| 176 |
+
|
| 177 |
+
indent = 8 * ' '
|
| 178 |
+
|
| 179 |
+
@pytest.mark.parametrize(
|
| 180 |
+
"given,wanted",
|
| 181 |
+
[
|
| 182 |
+
# 0x0b, 0x0c, ..., etc are also considered a line break by Python
|
| 183 |
+
("hello\x0b\nworld\n", f"hello\x0b{indent}\n{indent}world\n{indent}"),
|
| 184 |
+
("hello\x1eworld", f"hello\x1e{indent}world"),
|
| 185 |
+
("", ""),
|
| 186 |
+
(
|
| 187 |
+
"I am a\npoor\nlonesome\nheader\n",
|
| 188 |
+
f"I am a\n{indent}poor\n{indent}lonesome\n{indent}header\n{indent}",
|
| 189 |
+
),
|
| 190 |
+
],
|
| 191 |
+
)
|
| 192 |
+
def test_rfc822_escape(self, given, wanted):
|
| 193 |
+
"""
|
| 194 |
+
We want to ensure a multi-line header parses correctly.
|
| 195 |
+
|
| 196 |
+
For interoperability, the escaped value should also "round-trip" over
|
| 197 |
+
`email.generator.Generator.flatten` and `email.message_from_*`
|
| 198 |
+
(see pypa/setuptools#4033).
|
| 199 |
+
|
| 200 |
+
The main issue is that internally `email.policy.EmailPolicy` uses
|
| 201 |
+
`splitlines` which will split on some control chars. If all the new lines
|
| 202 |
+
are not prefixed with spaces, the parser will interrupt reading
|
| 203 |
+
the current header and produce an incomplete value, while
|
| 204 |
+
incorrectly interpreting the rest of the headers as part of the payload.
|
| 205 |
+
"""
|
| 206 |
+
res = rfc822_escape(given)
|
| 207 |
+
|
| 208 |
+
policy = email.policy.EmailPolicy(
|
| 209 |
+
utf8=True,
|
| 210 |
+
mangle_from_=False,
|
| 211 |
+
max_line_length=0,
|
| 212 |
+
)
|
| 213 |
+
with io.StringIO() as buffer:
|
| 214 |
+
raw = f"header: {res}\nother-header: 42\n\npayload\n"
|
| 215 |
+
orig = email.message_from_string(raw)
|
| 216 |
+
email.generator.Generator(buffer, policy=policy).flatten(orig)
|
| 217 |
+
buffer.seek(0)
|
| 218 |
+
regen = email.message_from_file(buffer)
|
| 219 |
+
|
| 220 |
+
for msg in (orig, regen):
|
| 221 |
+
assert msg.get_payload() == "payload\n"
|
| 222 |
+
assert msg["other-header"] == "42"
|
| 223 |
+
# Generator may replace control chars with `\n`
|
| 224 |
+
assert set(msg["header"].splitlines()) == set(res.splitlines())
|
| 225 |
+
|
| 226 |
+
assert res == wanted
|
| 227 |
+
|
| 228 |
+
def test_dont_write_bytecode(self):
|
| 229 |
+
# makes sure byte_compile raise a DistutilsError
|
| 230 |
+
# if sys.dont_write_bytecode is True
|
| 231 |
+
old_dont_write_bytecode = sys.dont_write_bytecode
|
| 232 |
+
sys.dont_write_bytecode = True
|
| 233 |
+
try:
|
| 234 |
+
with pytest.raises(DistutilsByteCompileError):
|
| 235 |
+
byte_compile([])
|
| 236 |
+
finally:
|
| 237 |
+
sys.dont_write_bytecode = old_dont_write_bytecode
|
| 238 |
+
|
| 239 |
+
def test_grok_environment_error(self):
|
| 240 |
+
# test obsolete function to ensure backward compat (#4931)
|
| 241 |
+
exc = OSError("Unable to find batch file")
|
| 242 |
+
msg = grok_environment_error(exc)
|
| 243 |
+
assert msg == "error: Unable to find batch file"
|
llava/lib/python3.10/site-packages/setuptools/_distutils/tests/test_version.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Tests for distutils.version."""
|
| 2 |
+
|
| 3 |
+
import distutils
|
| 4 |
+
from distutils.version import LooseVersion, StrictVersion
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@pytest.fixture(autouse=True)
|
| 10 |
+
def suppress_deprecation():
|
| 11 |
+
with distutils.version.suppress_known_deprecation():
|
| 12 |
+
yield
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestVersion:
|
| 16 |
+
def test_prerelease(self):
|
| 17 |
+
version = StrictVersion('1.2.3a1')
|
| 18 |
+
assert version.version == (1, 2, 3)
|
| 19 |
+
assert version.prerelease == ('a', 1)
|
| 20 |
+
assert str(version) == '1.2.3a1'
|
| 21 |
+
|
| 22 |
+
version = StrictVersion('1.2.0')
|
| 23 |
+
assert str(version) == '1.2'
|
| 24 |
+
|
| 25 |
+
def test_cmp_strict(self):
|
| 26 |
+
versions = (
|
| 27 |
+
('1.5.1', '1.5.2b2', -1),
|
| 28 |
+
('161', '3.10a', ValueError),
|
| 29 |
+
('8.02', '8.02', 0),
|
| 30 |
+
('3.4j', '1996.07.12', ValueError),
|
| 31 |
+
('3.2.pl0', '3.1.1.6', ValueError),
|
| 32 |
+
('2g6', '11g', ValueError),
|
| 33 |
+
('0.9', '2.2', -1),
|
| 34 |
+
('1.2.1', '1.2', 1),
|
| 35 |
+
('1.1', '1.2.2', -1),
|
| 36 |
+
('1.2', '1.1', 1),
|
| 37 |
+
('1.2.1', '1.2.2', -1),
|
| 38 |
+
('1.2.2', '1.2', 1),
|
| 39 |
+
('1.2', '1.2.2', -1),
|
| 40 |
+
('0.4.0', '0.4', 0),
|
| 41 |
+
('1.13++', '5.5.kw', ValueError),
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
for v1, v2, wanted in versions:
|
| 45 |
+
try:
|
| 46 |
+
res = StrictVersion(v1)._cmp(StrictVersion(v2))
|
| 47 |
+
except ValueError:
|
| 48 |
+
if wanted is ValueError:
|
| 49 |
+
continue
|
| 50 |
+
else:
|
| 51 |
+
raise AssertionError(f"cmp({v1}, {v2}) shouldn't raise ValueError")
|
| 52 |
+
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
| 53 |
+
res = StrictVersion(v1)._cmp(v2)
|
| 54 |
+
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
| 55 |
+
res = StrictVersion(v1)._cmp(object())
|
| 56 |
+
assert res is NotImplemented, (
|
| 57 |
+
f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
def test_cmp(self):
|
| 61 |
+
versions = (
|
| 62 |
+
('1.5.1', '1.5.2b2', -1),
|
| 63 |
+
('161', '3.10a', 1),
|
| 64 |
+
('8.02', '8.02', 0),
|
| 65 |
+
('3.4j', '1996.07.12', -1),
|
| 66 |
+
('3.2.pl0', '3.1.1.6', 1),
|
| 67 |
+
('2g6', '11g', -1),
|
| 68 |
+
('0.960923', '2.2beta29', -1),
|
| 69 |
+
('1.13++', '5.5.kw', -1),
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
for v1, v2, wanted in versions:
|
| 73 |
+
res = LooseVersion(v1)._cmp(LooseVersion(v2))
|
| 74 |
+
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
| 75 |
+
res = LooseVersion(v1)._cmp(v2)
|
| 76 |
+
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
| 77 |
+
res = LooseVersion(v1)._cmp(object())
|
| 78 |
+
assert res is NotImplemented, (
|
| 79 |
+
f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
|
| 80 |
+
)
|
minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Addis_Ababa
ADDED
|
Binary file (191 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Dar_es_Salaam
ADDED
|
Binary file (191 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Mbabane
ADDED
|
Binary file (190 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Nouakchott
ADDED
|
Binary file (130 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tzdata/zoneinfo/Africa/Porto-Novo
ADDED
|
Binary file (180 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_batch_norm_impl_index_compositeimplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeimplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor,int64_t> _batch_norm_impl_index(const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, const ::std::optional<at::Tensor> & running_mean, const ::std::optional<at::Tensor> & running_var, bool training, double momentum, double eps, bool cudnn_enabled);
|
| 21 |
+
|
| 22 |
+
} // namespace compositeimplicitautograd
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_asin_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::vector<at::Tensor> _foreach_asin(at::TensorList self);
|
| 21 |
+
TORCH_API void _foreach_asin_out(at::TensorList out, at::TensorList self);
|
| 22 |
+
TORCH_API void _foreach_asin_outf(at::TensorList self, at::TensorList out);
|
| 23 |
+
TORCH_API void _foreach_asin_(at::TensorList self);
|
| 24 |
+
|
| 25 |
+
} // namespace compositeexplicitautograd
|
| 26 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_indices_copy.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_indices_copy_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_indices_copy(Tensor self) -> Tensor
|
| 26 |
+
inline at::Tensor _indices_copy(const at::Tensor & self) {
|
| 27 |
+
return at::_ops::_indices_copy::call(self);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_indices_copy.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & _indices_copy_out(at::Tensor & out, const at::Tensor & self) {
|
| 32 |
+
return at::_ops::_indices_copy_out::call(self, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::_indices_copy.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & _indices_copy_outf(const at::Tensor & self, at::Tensor & out) {
|
| 36 |
+
return at::_ops::_indices_copy_out::call(self, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_jagged_to_padded_dense_forward.h
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_jagged_to_padded_dense_forward_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_jagged_to_padded_dense_forward(Tensor values, Tensor[] offsets, SymInt[] max_lengths, float padding_value=0.0) -> Tensor
|
| 26 |
+
inline at::Tensor _jagged_to_padded_dense_forward(const at::Tensor & values, at::TensorList offsets, at::IntArrayRef max_lengths, double padding_value=0.0) {
|
| 27 |
+
return at::_ops::_jagged_to_padded_dense_forward::call(values, offsets, c10::fromIntArrayRefSlow(max_lengths), padding_value);
|
| 28 |
+
}
|
| 29 |
+
namespace symint {
|
| 30 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 31 |
+
at::Tensor _jagged_to_padded_dense_forward(const at::Tensor & values, at::TensorList offsets, at::IntArrayRef max_lengths, double padding_value=0.0) {
|
| 32 |
+
return at::_ops::_jagged_to_padded_dense_forward::call(values, offsets, c10::fromIntArrayRefSlow(max_lengths), padding_value);
|
| 33 |
+
}
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
// aten::_jagged_to_padded_dense_forward(Tensor values, Tensor[] offsets, SymInt[] max_lengths, float padding_value=0.0) -> Tensor
|
| 37 |
+
inline at::Tensor _jagged_to_padded_dense_forward_symint(const at::Tensor & values, at::TensorList offsets, c10::SymIntArrayRef max_lengths, double padding_value=0.0) {
|
| 38 |
+
return at::_ops::_jagged_to_padded_dense_forward::call(values, offsets, max_lengths, padding_value);
|
| 39 |
+
}
|
| 40 |
+
namespace symint {
|
| 41 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 42 |
+
at::Tensor _jagged_to_padded_dense_forward(const at::Tensor & values, at::TensorList offsets, c10::SymIntArrayRef max_lengths, double padding_value=0.0) {
|
| 43 |
+
return at::_ops::_jagged_to_padded_dense_forward::call(values, offsets, max_lengths, padding_value);
|
| 44 |
+
}
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_lstm_mps_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &> _lstm_mps_out(at::Tensor & out0, at::Tensor & out1, at::Tensor & out2, at::Tensor & out3, at::Tensor & out4, at::Tensor & out5, const at::Tensor & input, at::TensorList hx, at::TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first);
|
| 21 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &> _lstm_mps_outf(const at::Tensor & input, at::TensorList hx, at::TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first, at::Tensor & out0, at::Tensor & out1, at::Tensor & out2, at::Tensor & out3, at::Tensor & out4, at::Tensor & out5);
|
| 22 |
+
|
| 23 |
+
} // namespace compositeexplicitautograd
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_pad_enum_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor _pad_enum_symint(const at::Tensor & self, c10::SymIntArrayRef pad, int64_t mode, ::std::optional<double> value=::std::nullopt);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sample_dirichlet_native.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor & _sample_dirichlet_out(const at::Tensor & self, ::std::optional<at::Generator> generator, at::Tensor & out);
|
| 20 |
+
TORCH_API at::Tensor _s_dirichlet_cpu(const at::Tensor & self, ::std::optional<at::Generator> generator=::std::nullopt);
|
| 21 |
+
TORCH_API at::Tensor _s_dirichlet_cuda(const at::Tensor & self, ::std::optional<at::Generator> generator=::std::nullopt);
|
| 22 |
+
} // namespace native
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_softmax_backward_data_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor _softmax_backward_data(const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, at::ScalarType input_dtype);
|
| 21 |
+
TORCH_API at::Tensor & _softmax_backward_data_out(at::Tensor & grad_input, const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, at::ScalarType input_dtype);
|
| 22 |
+
TORCH_API at::Tensor & _softmax_backward_data_outf(const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, at::ScalarType input_dtype, at::Tensor & grad_input);
|
| 23 |
+
|
| 24 |
+
} // namespace cuda
|
| 25 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_softmax_backward_data.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_sparse_softmax_backward_data_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_sparse_softmax_backward_data(Tensor grad_output, Tensor output, int dim, Tensor self) -> Tensor
|
| 26 |
+
inline at::Tensor _sparse_softmax_backward_data(const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, const at::Tensor & self) {
|
| 27 |
+
return at::_ops::_sparse_softmax_backward_data::call(grad_output, output, dim, self);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_sparse_softmax_backward_data.out(Tensor grad_output, Tensor output, int dim, Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & _sparse_softmax_backward_data_out(at::Tensor & out, const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, const at::Tensor & self) {
|
| 32 |
+
return at::_ops::_sparse_softmax_backward_data_out::call(grad_output, output, dim, self, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::_sparse_softmax_backward_data.out(Tensor grad_output, Tensor output, int dim, Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & _sparse_softmax_backward_data_outf(const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, const at::Tensor & self, at::Tensor & out) {
|
| 36 |
+
return at::_ops::_sparse_softmax_backward_data_out::call(grad_output, output, dim, self, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_softmax_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor & _sparse_softmax_out(at::Tensor & out, const at::Tensor & self, int64_t dim, bool half_to_float);
|
| 21 |
+
TORCH_API at::Tensor & _sparse_softmax_outf(const at::Tensor & self, int64_t dim, bool half_to_float, at::Tensor & out);
|
| 22 |
+
|
| 23 |
+
} // namespace compositeexplicitautograd
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_nearest_exact2d_backward.h
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_upsample_nearest_exact2d_backward_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_upsample_nearest_exact2d_backward.grad_input(Tensor grad_output, SymInt[2] output_size, SymInt[4] input_size, float? scales_h=None, float? scales_w=None, *, Tensor(a!) grad_input) -> Tensor(a!)
|
| 26 |
+
inline at::Tensor & _upsample_nearest_exact2d_backward_out(at::Tensor & grad_input, const at::Tensor & grad_output, at::IntArrayRef output_size, at::IntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 27 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, c10::fromIntArrayRefSlow(output_size), c10::fromIntArrayRefSlow(input_size), scales_h, scales_w, grad_input);
|
| 28 |
+
}
|
| 29 |
+
namespace symint {
|
| 30 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 31 |
+
at::Tensor & _upsample_nearest_exact2d_backward_out(at::Tensor & grad_input, const at::Tensor & grad_output, at::IntArrayRef output_size, at::IntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 32 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, c10::fromIntArrayRefSlow(output_size), c10::fromIntArrayRefSlow(input_size), scales_h, scales_w, grad_input);
|
| 33 |
+
}
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
// aten::_upsample_nearest_exact2d_backward.grad_input(Tensor grad_output, SymInt[2] output_size, SymInt[4] input_size, float? scales_h=None, float? scales_w=None, *, Tensor(a!) grad_input) -> Tensor(a!)
|
| 37 |
+
inline at::Tensor & _upsample_nearest_exact2d_backward_outf(const at::Tensor & grad_output, at::IntArrayRef output_size, at::IntArrayRef input_size, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & grad_input) {
|
| 38 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, c10::fromIntArrayRefSlow(output_size), c10::fromIntArrayRefSlow(input_size), scales_h, scales_w, grad_input);
|
| 39 |
+
}
|
| 40 |
+
namespace symint {
|
| 41 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 42 |
+
at::Tensor & _upsample_nearest_exact2d_backward_outf(const at::Tensor & grad_output, at::IntArrayRef output_size, at::IntArrayRef input_size, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & grad_input) {
|
| 43 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, c10::fromIntArrayRefSlow(output_size), c10::fromIntArrayRefSlow(input_size), scales_h, scales_w, grad_input);
|
| 44 |
+
}
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
// aten::_upsample_nearest_exact2d_backward.grad_input(Tensor grad_output, SymInt[2] output_size, SymInt[4] input_size, float? scales_h=None, float? scales_w=None, *, Tensor(a!) grad_input) -> Tensor(a!)
|
| 48 |
+
inline at::Tensor & _upsample_nearest_exact2d_backward_symint_out(at::Tensor & grad_input, const at::Tensor & grad_output, c10::SymIntArrayRef output_size, c10::SymIntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 49 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, output_size, input_size, scales_h, scales_w, grad_input);
|
| 50 |
+
}
|
| 51 |
+
namespace symint {
|
| 52 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 53 |
+
at::Tensor & _upsample_nearest_exact2d_backward_out(at::Tensor & grad_input, const at::Tensor & grad_output, c10::SymIntArrayRef output_size, c10::SymIntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 54 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, output_size, input_size, scales_h, scales_w, grad_input);
|
| 55 |
+
}
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
// aten::_upsample_nearest_exact2d_backward.grad_input(Tensor grad_output, SymInt[2] output_size, SymInt[4] input_size, float? scales_h=None, float? scales_w=None, *, Tensor(a!) grad_input) -> Tensor(a!)
|
| 59 |
+
inline at::Tensor & _upsample_nearest_exact2d_backward_symint_outf(const at::Tensor & grad_output, c10::SymIntArrayRef output_size, c10::SymIntArrayRef input_size, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & grad_input) {
|
| 60 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, output_size, input_size, scales_h, scales_w, grad_input);
|
| 61 |
+
}
|
| 62 |
+
namespace symint {
|
| 63 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 64 |
+
at::Tensor & _upsample_nearest_exact2d_backward_outf(const at::Tensor & grad_output, c10::SymIntArrayRef output_size, c10::SymIntArrayRef input_size, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & grad_input) {
|
| 65 |
+
return at::_ops::_upsample_nearest_exact2d_backward_grad_input::call(grad_output, output_size, input_size, scales_h, scales_w, grad_input);
|
| 66 |
+
}
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
// aten::_upsample_nearest_exact2d_backward(Tensor grad_output, SymInt[2] output_size, SymInt[4] input_size, float? scales_h=None, float? scales_w=None) -> Tensor
|
| 70 |
+
inline at::Tensor _upsample_nearest_exact2d_backward(const at::Tensor & grad_output, at::IntArrayRef output_size, at::IntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 71 |
+
return at::_ops::_upsample_nearest_exact2d_backward::call(grad_output, c10::fromIntArrayRefSlow(output_size), c10::fromIntArrayRefSlow(input_size), scales_h, scales_w);
|
| 72 |
+
}
|
| 73 |
+
namespace symint {
|
| 74 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 75 |
+
at::Tensor _upsample_nearest_exact2d_backward(const at::Tensor & grad_output, at::IntArrayRef output_size, at::IntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 76 |
+
return at::_ops::_upsample_nearest_exact2d_backward::call(grad_output, c10::fromIntArrayRefSlow(output_size), c10::fromIntArrayRefSlow(input_size), scales_h, scales_w);
|
| 77 |
+
}
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
// aten::_upsample_nearest_exact2d_backward(Tensor grad_output, SymInt[2] output_size, SymInt[4] input_size, float? scales_h=None, float? scales_w=None) -> Tensor
|
| 81 |
+
inline at::Tensor _upsample_nearest_exact2d_backward_symint(const at::Tensor & grad_output, c10::SymIntArrayRef output_size, c10::SymIntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 82 |
+
return at::_ops::_upsample_nearest_exact2d_backward::call(grad_output, output_size, input_size, scales_h, scales_w);
|
| 83 |
+
}
|
| 84 |
+
namespace symint {
|
| 85 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 86 |
+
at::Tensor _upsample_nearest_exact2d_backward(const at::Tensor & grad_output, c10::SymIntArrayRef output_size, c10::SymIntArrayRef input_size, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt) {
|
| 87 |
+
return at::_ops::_upsample_nearest_exact2d_backward::call(grad_output, output_size, input_size, scales_h, scales_w);
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_weight_norm_interface_backward_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &> _weight_norm_interface_backward_out(at::Tensor & out0, at::Tensor & out1, const at::Tensor & grad_w, const at::Tensor & saved_v, const at::Tensor & saved_g, const at::Tensor & saved_norms, int64_t dim);
|
| 21 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &> _weight_norm_interface_backward_outf(const at::Tensor & grad_w, const at::Tensor & saved_v, const at::Tensor & saved_g, const at::Tensor & saved_norms, int64_t dim, at::Tensor & out0, at::Tensor & out1);
|
| 22 |
+
|
| 23 |
+
} // namespace compositeexplicitautograd
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/affine_grid_generator_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API affine_grid_generator {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &, c10::SymIntArrayRef, bool);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::affine_grid_generator")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "affine_grid_generator(Tensor theta, SymInt[] size, bool align_corners) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & theta, c10::SymIntArrayRef size, bool align_corners);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & theta, c10::SymIntArrayRef size, bool align_corners);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API affine_grid_generator_out {
|
| 29 |
+
using schema = at::Tensor & (const at::Tensor &, c10::SymIntArrayRef, bool, at::Tensor &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::affine_grid_generator")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "affine_grid_generator.out(Tensor theta, SymInt[] size, bool align_corners, *, Tensor(a!) out) -> Tensor(a!)")
|
| 35 |
+
static at::Tensor & call(const at::Tensor & theta, c10::SymIntArrayRef size, bool align_corners, at::Tensor & out);
|
| 36 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & theta, c10::SymIntArrayRef size, bool align_corners, at::Tensor & out);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/bincount.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/bincount_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::bincount(Tensor self, Tensor? weights=None, int minlength=0) -> Tensor
|
| 26 |
+
inline at::Tensor bincount(const at::Tensor & self, const ::std::optional<at::Tensor> & weights={}, int64_t minlength=0) {
|
| 27 |
+
return at::_ops::bincount::call(self, weights, minlength);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::bincount.out(Tensor self, Tensor? weights=None, int minlength=0, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & bincount_out(at::Tensor & out, const at::Tensor & self, const ::std::optional<at::Tensor> & weights={}, int64_t minlength=0) {
|
| 32 |
+
return at::_ops::bincount_out::call(self, weights, minlength, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::bincount.out(Tensor self, Tensor? weights=None, int minlength=0, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & bincount_outf(const at::Tensor & self, const ::std::optional<at::Tensor> & weights, int64_t minlength, at::Tensor & out) {
|
| 36 |
+
return at::_ops::bincount_out::call(self, weights, minlength, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|