ZTWHHH commited on
Commit
60ac002
·
verified ·
1 Parent(s): 1c21b1a

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/quantizable/__pycache__/__init__.cpython-310.pyc +0 -0
  3. evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/quantizable/modules/__pycache__/rnn.cpython-310.pyc +0 -0
  4. evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/quantized/dynamic/__pycache__/__init__.cpython-310.pyc +0 -0
  5. evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/__init__.py +1 -0
  6. evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/quantized/__init__.py +10 -0
  7. evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/quantized/linear.py +197 -0
  8. evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/quantized/utils.py +42 -0
  9. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/core.cpython-310.pyc +0 -0
  10. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-310.pyc +0 -0
  11. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/zosccompiler.cpython-310.pyc +0 -0
  12. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/_modified.py +73 -0
  13. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/ccompiler.py +1263 -0
  14. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/cygwinccompiler.py +339 -0
  15. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/debug.py +5 -0
  16. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/dir_util.py +244 -0
  17. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/dist.py +1317 -0
  18. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/file_util.py +236 -0
  19. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/filelist.py +366 -0
  20. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/log.py +56 -0
  21. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/spawn.py +117 -0
  22. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/sysconfig.py +583 -0
  23. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  24. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_archive_util.cpython-310.pyc +0 -0
  25. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_bdist.cpython-310.pyc +0 -0
  26. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_bdist_dumb.cpython-310.pyc +0 -0
  27. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_bdist_rpm.cpython-310.pyc +0 -0
  28. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_build.cpython-310.pyc +0 -0
  29. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_build_clib.cpython-310.pyc +0 -0
  30. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_build_scripts.cpython-310.pyc +0 -0
  31. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_ccompiler.cpython-310.pyc +0 -0
  32. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_check.cpython-310.pyc +0 -0
  33. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_clean.cpython-310.pyc +0 -0
  34. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_core.cpython-310.pyc +0 -0
  35. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_dir_util.cpython-310.pyc +0 -0
  36. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_file_util.cpython-310.pyc +0 -0
  37. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install.cpython-310.pyc +0 -0
  38. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_data.cpython-310.pyc +0 -0
  39. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_headers.cpython-310.pyc +0 -0
  40. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_lib.cpython-310.pyc +0 -0
  41. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_scripts.cpython-310.pyc +0 -0
  42. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_log.cpython-310.pyc +0 -0
  43. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_modified.cpython-310.pyc +0 -0
  44. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_spawn.cpython-310.pyc +0 -0
  45. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_unixccompiler.cpython-310.pyc +0 -0
  46. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_util.cpython-310.pyc +0 -0
  47. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_version.cpython-310.pyc +0 -0
  48. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/compat/__init__.py +0 -0
  49. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/compat/__pycache__/__init__.cpython-310.pyc +0 -0
  50. evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/compat/py39.py +40 -0
.gitattributes CHANGED
@@ -603,3 +603,4 @@ evalkit_internvl/lib/python3.10/site-packages/transformers/utils/__pycache__/dum
603
  evalkit_internvl/lib/python3.10/site-packages/transformers/__pycache__/modeling_outputs.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
604
  evalkit_internvl/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/__pycache__/latexparser.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
605
  evalkit_internvl/lib/python3.10/site-packages/torch/_refs/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
 
 
603
  evalkit_internvl/lib/python3.10/site-packages/transformers/__pycache__/modeling_outputs.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
604
  evalkit_internvl/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/__pycache__/latexparser.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
605
  evalkit_internvl/lib/python3.10/site-packages/torch/_refs/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
606
+ evalkit_tf437/lib/python3.10/site-packages/sklearn/utils/_random.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/quantizable/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (209 Bytes). View file
 
evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/quantizable/modules/__pycache__/rnn.cpython-310.pyc ADDED
Binary file (12.4 kB). View file
 
evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/quantized/dynamic/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (215 Bytes). View file
 
evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from . import quantized
evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/quantized/__init__.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from torch.ao.nn.sparse.quantized import dynamic
2
+
3
+ from .linear import Linear
4
+ from .linear import LinearPackedParams
5
+
6
+ __all__ = [
7
+ "dynamic",
8
+ "Linear",
9
+ "LinearPackedParams",
10
+ ]
evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/quantized/linear.py ADDED
@@ -0,0 +1,197 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ import torch
4
+ from torch.ao.nn.quantized.modules.utils import _quantize_weight, _hide_packed_params_repr
5
+
6
+ __all__ = ['LinearPackedParams', 'Linear']
7
+
8
+ # TODO (zaf): Inherit from `quantized.LinearPackedParams` (T83294430)
9
+ class LinearPackedParams(torch.nn.Module):
10
+ _version = 1
11
+
12
+ def __init__(self, row_block_size=1, col_block_size=4, dtype=torch.qint8):
13
+ super().__init__()
14
+
15
+ if dtype != torch.qint8:
16
+ raise NotImplementedError("Linear prepacking only supports QINT8")
17
+ self.dtype = dtype
18
+ wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
19
+ self.set_weight_bias(wq, None, row_block_size, col_block_size)
20
+
21
+ def _get_name(self):
22
+ return "SparseQuantizedLinearPackedParams"
23
+
24
+ @torch.jit.export
25
+ def set_weight_bias(self, weight: torch.Tensor, bias: Optional[torch.Tensor],
26
+ row_block_size: Optional[int], col_block_size: Optional[int]) -> None:
27
+ assert row_block_size is not None and col_block_size is not None
28
+ self._packed_params = torch.ops.sparse.qlinear_prepack(weight, bias, row_block_size, col_block_size)
29
+
30
+ @torch.jit.export
31
+ def _weight_bias(self):
32
+ (weight, bias, block_sizes) = torch.ops.sparse.qlinear_unpack(self._packed_params)
33
+ return (weight, bias, block_sizes[0], block_sizes[1])
34
+
35
+ def forward(self, x):
36
+ return x
37
+
38
+ def _save_to_state_dict(self, destination, prefix, keep_vars):
39
+ super()._save_to_state_dict(destination, prefix, keep_vars)
40
+ destination[prefix + 'dtype'] = self.dtype
41
+ destination[prefix + '_packed_params'] = self._weight_bias()
42
+
43
+ def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
44
+ missing_keys, unexpected_keys, error_msgs):
45
+ version = local_metadata.get('version', None)
46
+ assert version <= self._version
47
+
48
+ self.dtype = state_dict.pop(prefix + 'dtype')
49
+ weight, bias, row_block_size, col_block_size = state_dict.pop(prefix + '_packed_params')
50
+ self.set_weight_bias(weight, bias, row_block_size, col_block_size)
51
+
52
+ super()._load_from_state_dict(state_dict, prefix, local_metadata, False,
53
+ missing_keys, unexpected_keys, error_msgs)
54
+
55
+ @torch.jit.export
56
+ def __getstate__(self):
57
+ return self._packed_params, self.training, self.dtype
58
+
59
+ @torch.jit.export
60
+ def __setstate__(self, state):
61
+ (self._packed_params, self.training, self.dtype) = state
62
+
63
+ def __repr__(self):
64
+ return self._weight_bias().__repr__()
65
+
66
+ # TODO (zaf): Inherit from `quantized.Linear` (T83294430)
67
+ class Linear(torch.nn.Module):
68
+ r"""
69
+ A quantized sparse linear module with quantized tensor as inputs and outputs.
70
+ """
71
+ _version = 1
72
+ _FLOAT_MODULE = torch.nn.Linear
73
+
74
+ def __init__(self, in_features, out_features, row_block_size, col_block_size, bias=True, dtype=torch.qint8):
75
+ super().__init__()
76
+
77
+ if dtype != torch.qint8:
78
+ raise NotImplementedError("Only QINT8 is supported for Sparse Quantized Linear")
79
+
80
+ self.in_features = in_features
81
+ self.out_features = out_features
82
+
83
+ if bias:
84
+ bias = torch.zeros(self.out_features, dtype=torch.float)
85
+ else:
86
+ bias = None
87
+
88
+ qweight = torch._empty_affine_quantized([out_features, in_features],
89
+ scale=1, zero_point=0, dtype=torch.qint8)
90
+ self._packed_params = LinearPackedParams(row_block_size=row_block_size,
91
+ col_block_size=col_block_size,
92
+ dtype=dtype)
93
+ self._packed_params.set_weight_bias(qweight, bias, row_block_size, col_block_size)
94
+ self.scale = 1.0
95
+ self.zero_point = 0
96
+
97
+ @classmethod
98
+ def _get_name(cls):
99
+ return 'SparseQuantizedLinear'
100
+
101
+ def extra_repr(self):
102
+ return 'in_features={}, out_features={}, scale={}, zero_point={}, qscheme={}'.format(
103
+ self.in_features, self.out_features, self.scale, self.zero_point, self.weight().qscheme()
104
+ )
105
+
106
+ def __repr__(self):
107
+ return _hide_packed_params_repr(self, LinearPackedParams)
108
+
109
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
110
+ return torch.ops.sparse.qlinear(x, self._packed_params._packed_params, self.scale, self.zero_point)
111
+
112
+ def _save_to_state_dict(self, destination, prefix, keep_vars):
113
+ super()._save_to_state_dict(destination, prefix, keep_vars)
114
+ destination[prefix + 'scale'] = torch.tensor(self.scale)
115
+ destination[prefix + 'zero_point'] = torch.tensor(self.zero_point)
116
+
117
+ def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
118
+ missing_keys, unexpected_keys, error_msgs):
119
+ self.scale = float(state_dict[prefix + 'scale'])
120
+ state_dict.pop(prefix + 'scale')
121
+
122
+ self.zero_point = int(state_dict[prefix + 'zero_point'])
123
+ state_dict.pop(prefix + 'zero_point')
124
+
125
+ op_type = int(state_dict[prefix + 'op_type'])
126
+ state_dict.pop(prefix + 'op_type')
127
+
128
+ version = local_metadata.get('version', None)
129
+ assert version <= self._version
130
+
131
+ super()._load_from_state_dict(
132
+ state_dict, prefix, local_metadata, False,
133
+ missing_keys, unexpected_keys, error_msgs)
134
+
135
+ def _weight_bias(self):
136
+ return self._packed_params._weight_bias()
137
+
138
+ def weight(self):
139
+ return self._weight_bias()[0]
140
+
141
+ def bias(self):
142
+ return self._weight_bias()[1]
143
+
144
+ def set_weight_bias(self, w: torch.Tensor, b: Optional[torch.Tensor],
145
+ row_block_size: Optional[int], col_block_size: Optional[int]) -> None:
146
+ assert row_block_size is not None and col_block_size is not None
147
+ self._packed_params.set_weight_bias(w, b, row_block_size, col_block_size)
148
+
149
+ @classmethod
150
+ def from_float(cls, mod):
151
+ r"""Create a quantized sparse module from a float module.
152
+
153
+ We only care about the convert at this stage, no need for observers just yet.
154
+
155
+ TODO(zaf): Need to add the sparse params to the qconfig
156
+ """
157
+ assert type(mod) == cls._FLOAT_MODULE, cls._get_name() + \
158
+ '.from_float only works for ' + cls._FLOAT_MODULE.__name__
159
+ assert hasattr(mod, 'sparse_params'), \
160
+ ('Expecting the Linear to have `sparse_params`. Make sure you have provided arguments '
161
+ 'in the `sparsifier.squash_mask(params_to_save=("sparse_block_shape",))` method.')
162
+ sparse_block_shape = mod.sparse_params.get('sparse_block_shape', None) # type: ignore[operator, union-attr]
163
+ assert isinstance(sparse_block_shape, (tuple, list))
164
+ assert len(sparse_block_shape) == 2
165
+ # TODO: Need to add options to qconfig to avoid the calibration.
166
+ # TODO: Add calibration for the sparsity
167
+ assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
168
+ activation_post_process = mod.activation_post_process
169
+ weight_post_process = mod.qconfig.weight() # type: ignore[operator, union-attr]
170
+
171
+ # Assumption is that the weight is already sparsified by the
172
+ # `sparsifier.convert`
173
+ weight = mod.weight
174
+
175
+ weight_post_process(weight)
176
+ dtype = weight_post_process.dtype
177
+ act_scale, act_zp = activation_post_process.calculate_qparams() # type: ignore[operator, union-attr]
178
+ assert dtype == torch.qint8, 'Weight observer must have dtype torch.qint8'
179
+ w_sc, w_zp = weight_post_process.calculate_qparams()
180
+ if isinstance(w_zp, torch.Tensor):
181
+ assert not torch.any(w_zp.bool()), "All weight zero points must map to 0"
182
+ else:
183
+ assert w_zp == 0, 'Weight zero point must map to 0'
184
+ qweight = _quantize_weight(weight.float(), weight_post_process)
185
+
186
+ row_block_size = mod.sparse_params['sparse_block_shape'][0] # type: ignore[index]
187
+ col_block_size = mod.sparse_params['sparse_block_shape'][1] # type: ignore[index]
188
+ qlinear = cls(mod.in_features,
189
+ mod.out_features,
190
+ row_block_size,
191
+ col_block_size,
192
+ dtype=dtype)
193
+ qlinear.set_weight_bias(qweight, mod.bias,
194
+ row_block_size, col_block_size) # type: ignore[arg-type]
195
+ qlinear.scale = float(act_scale)
196
+ qlinear.zero_point = int(act_zp)
197
+ return qlinear
evalkit_internvl/lib/python3.10/site-packages/torch/ao/nn/sparse/quantized/utils.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import threading
2
+
3
+ __all__ = [
4
+ "LinearBlockSparsePattern"
5
+ ]
6
+
7
+ def _is_valid_linear_block_sparse_pattern(row_block_size, col_block_size):
8
+ return (row_block_size == 1 and col_block_size == 4) or \
9
+ (row_block_size == 8 and col_block_size == 1)
10
+
11
+ # This is a stop-gap measure as current flow does not allow module
12
+ # specific block sparse pattern.
13
+ # Infact there is no way to convey sparse pattern via module config
14
+ # of quantization flow. Thus using the global context to convey
15
+ # sparsity pattern.
16
+ # Once the flow supports it, this should be removed.
17
+ class LinearBlockSparsePattern:
18
+ rlock = threading.RLock()
19
+ row_block_size = 1
20
+ col_block_size = 4
21
+ prev_row_block_size = 1
22
+ prev_col_block_size = 4
23
+
24
+ def __init__(self, row_block_size=1, col_block_size=4):
25
+ assert(_is_valid_linear_block_sparse_pattern(row_block_size, col_block_size))
26
+ LinearBlockSparsePattern.rlock.acquire()
27
+ LinearBlockSparsePattern.prev_row_block_size = LinearBlockSparsePattern.row_block_size
28
+ LinearBlockSparsePattern.prev_col_block_size = LinearBlockSparsePattern.col_block_size
29
+ LinearBlockSparsePattern.row_block_size = row_block_size
30
+ LinearBlockSparsePattern.col_block_size = col_block_size
31
+
32
+ def __enter__(self):
33
+ pass
34
+
35
+ def __exit__(self, exc_type, exc_value, backtrace):
36
+ LinearBlockSparsePattern.row_block_size = LinearBlockSparsePattern.prev_row_block_size
37
+ LinearBlockSparsePattern.col_block_size = LinearBlockSparsePattern.prev_col_block_size
38
+ LinearBlockSparsePattern.rlock.release()
39
+
40
+ @staticmethod
41
+ def block_size():
42
+ return LinearBlockSparsePattern.row_block_size, LinearBlockSparsePattern.col_block_size
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/core.cpython-310.pyc ADDED
Binary file (7.18 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-310.pyc ADDED
Binary file (554 Bytes). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/zosccompiler.cpython-310.pyc ADDED
Binary file (4.25 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/_modified.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timestamp comparison of files and groups of files."""
2
+
3
+ import functools
4
+ import os.path
5
+
6
+ from jaraco.functools import splat
7
+
8
+ from .compat.py39 import zip_strict
9
+ from .errors import DistutilsFileError
10
+
11
+
12
+ def _newer(source, target):
13
+ return not os.path.exists(target) or (
14
+ os.path.getmtime(source) > os.path.getmtime(target)
15
+ )
16
+
17
+
18
+ def newer(source, target):
19
+ """
20
+ Is source modified more recently than target.
21
+
22
+ Returns True if 'source' is modified more recently than
23
+ 'target' or if 'target' does not exist.
24
+
25
+ Raises DistutilsFileError if 'source' does not exist.
26
+ """
27
+ if not os.path.exists(source):
28
+ raise DistutilsFileError(f"file '{os.path.abspath(source)}' does not exist")
29
+
30
+ return _newer(source, target)
31
+
32
+
33
+ def newer_pairwise(sources, targets, newer=newer):
34
+ """
35
+ Filter filenames where sources are newer than targets.
36
+
37
+ Walk two filename iterables in parallel, testing if each source is newer
38
+ than its corresponding target. Returns a pair of lists (sources,
39
+ targets) where source is newer than target, according to the semantics
40
+ of 'newer()'.
41
+ """
42
+ newer_pairs = filter(splat(newer), zip_strict(sources, targets))
43
+ return tuple(map(list, zip(*newer_pairs))) or ([], [])
44
+
45
+
46
+ def newer_group(sources, target, missing='error'):
47
+ """
48
+ Is target out-of-date with respect to any file in sources.
49
+
50
+ Return True if 'target' is out-of-date with respect to any file
51
+ listed in 'sources'. In other words, if 'target' exists and is newer
52
+ than every file in 'sources', return False; otherwise return True.
53
+ ``missing`` controls how to handle a missing source file:
54
+
55
+ - error (default): allow the ``stat()`` call to fail.
56
+ - ignore: silently disregard any missing source files.
57
+ - newer: treat missing source files as "target out of date". This
58
+ mode is handy in "dry-run" mode: it will pretend to carry out
59
+ commands that wouldn't work because inputs are missing, but
60
+ that doesn't matter because dry-run won't run the commands.
61
+ """
62
+
63
+ def missing_as_newer(source):
64
+ return missing == 'newer' and not os.path.exists(source)
65
+
66
+ ignored = os.path.exists if missing == 'ignore' else None
67
+ return not os.path.exists(target) or any(
68
+ missing_as_newer(source) or _newer(source, target)
69
+ for source in filter(ignored, sources)
70
+ )
71
+
72
+
73
+ newer_pairwise_group = functools.partial(newer_pairwise, newer=newer_group)
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/ccompiler.py ADDED
@@ -0,0 +1,1263 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.ccompiler
2
+
3
+ Contains CCompiler, an abstract base class that defines the interface
4
+ for the Distutils compiler abstraction model."""
5
+
6
+ import os
7
+ import pathlib
8
+ import re
9
+ import sys
10
+ import types
11
+ import warnings
12
+
13
+ from more_itertools import always_iterable
14
+
15
+ from ._log import log
16
+ from ._modified import newer_group
17
+ from .dir_util import mkpath
18
+ from .errors import (
19
+ CompileError,
20
+ DistutilsModuleError,
21
+ DistutilsPlatformError,
22
+ LinkError,
23
+ UnknownFileError,
24
+ )
25
+ from .file_util import move_file
26
+ from .spawn import spawn
27
+ from .util import execute, is_mingw, split_quoted
28
+
29
+
30
+ class CCompiler:
31
+ """Abstract base class to define the interface that must be implemented
32
+ by real compiler classes. Also has some utility methods used by
33
+ several compiler classes.
34
+
35
+ The basic idea behind a compiler abstraction class is that each
36
+ instance can be used for all the compile/link steps in building a
37
+ single project. Thus, attributes common to all of those compile and
38
+ link steps -- include directories, macros to define, libraries to link
39
+ against, etc. -- are attributes of the compiler instance. To allow for
40
+ variability in how individual files are treated, most of those
41
+ attributes may be varied on a per-compilation or per-link basis.
42
+ """
43
+
44
+ # 'compiler_type' is a class attribute that identifies this class. It
45
+ # keeps code that wants to know what kind of compiler it's dealing with
46
+ # from having to import all possible compiler classes just to do an
47
+ # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type'
48
+ # should really, really be one of the keys of the 'compiler_class'
49
+ # dictionary (see below -- used by the 'new_compiler()' factory
50
+ # function) -- authors of new compiler interface classes are
51
+ # responsible for updating 'compiler_class'!
52
+ compiler_type = None
53
+
54
+ # XXX things not handled by this compiler abstraction model:
55
+ # * client can't provide additional options for a compiler,
56
+ # e.g. warning, optimization, debugging flags. Perhaps this
57
+ # should be the domain of concrete compiler abstraction classes
58
+ # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
59
+ # class should have methods for the common ones.
60
+ # * can't completely override the include or library searchg
61
+ # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
62
+ # I'm not sure how widely supported this is even by Unix
63
+ # compilers, much less on other platforms. And I'm even less
64
+ # sure how useful it is; maybe for cross-compiling, but
65
+ # support for that is a ways off. (And anyways, cross
66
+ # compilers probably have a dedicated binary with the
67
+ # right paths compiled in. I hope.)
68
+ # * can't do really freaky things with the library list/library
69
+ # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
70
+ # different versions of libfoo.a in different locations. I
71
+ # think this is useless without the ability to null out the
72
+ # library search path anyways.
73
+
74
+ # Subclasses that rely on the standard filename generation methods
75
+ # implemented below should override these; see the comment near
76
+ # those methods ('object_filenames()' et. al.) for details:
77
+ src_extensions = None # list of strings
78
+ obj_extension = None # string
79
+ static_lib_extension = None
80
+ shared_lib_extension = None # string
81
+ static_lib_format = None # format string
82
+ shared_lib_format = None # prob. same as static_lib_format
83
+ exe_extension = None # string
84
+
85
+ # Default language settings. language_map is used to detect a source
86
+ # file or Extension target language, checking source filenames.
87
+ # language_order is used to detect the language precedence, when deciding
88
+ # what language to use when mixing source types. For example, if some
89
+ # extension has two files with ".c" extension, and one with ".cpp", it
90
+ # is still linked as c++.
91
+ language_map = {
92
+ ".c": "c",
93
+ ".cc": "c++",
94
+ ".cpp": "c++",
95
+ ".cxx": "c++",
96
+ ".m": "objc",
97
+ }
98
+ language_order = ["c++", "objc", "c"]
99
+
100
+ include_dirs = []
101
+ """
102
+ include dirs specific to this compiler class
103
+ """
104
+
105
+ library_dirs = []
106
+ """
107
+ library dirs specific to this compiler class
108
+ """
109
+
110
+ def __init__(self, verbose=False, dry_run=False, force=False):
111
+ self.dry_run = dry_run
112
+ self.force = force
113
+ self.verbose = verbose
114
+
115
+ # 'output_dir': a common output directory for object, library,
116
+ # shared object, and shared library files
117
+ self.output_dir = None
118
+
119
+ # 'macros': a list of macro definitions (or undefinitions). A
120
+ # macro definition is a 2-tuple (name, value), where the value is
121
+ # either a string or None (no explicit value). A macro
122
+ # undefinition is a 1-tuple (name,).
123
+ self.macros = []
124
+
125
+ # 'include_dirs': a list of directories to search for include files
126
+ self.include_dirs = []
127
+
128
+ # 'libraries': a list of libraries to include in any link
129
+ # (library names, not filenames: eg. "foo" not "libfoo.a")
130
+ self.libraries = []
131
+
132
+ # 'library_dirs': a list of directories to search for libraries
133
+ self.library_dirs = []
134
+
135
+ # 'runtime_library_dirs': a list of directories to search for
136
+ # shared libraries/objects at runtime
137
+ self.runtime_library_dirs = []
138
+
139
+ # 'objects': a list of object files (or similar, such as explicitly
140
+ # named library files) to include on any link
141
+ self.objects = []
142
+
143
+ for key in self.executables.keys():
144
+ self.set_executable(key, self.executables[key])
145
+
146
+ def set_executables(self, **kwargs):
147
+ """Define the executables (and options for them) that will be run
148
+ to perform the various stages of compilation. The exact set of
149
+ executables that may be specified here depends on the compiler
150
+ class (via the 'executables' class attribute), but most will have:
151
+ compiler the C/C++ compiler
152
+ linker_so linker used to create shared objects and libraries
153
+ linker_exe linker used to create binary executables
154
+ archiver static library creator
155
+
156
+ On platforms with a command-line (Unix, DOS/Windows), each of these
157
+ is a string that will be split into executable name and (optional)
158
+ list of arguments. (Splitting the string is done similarly to how
159
+ Unix shells operate: words are delimited by spaces, but quotes and
160
+ backslashes can override this. See
161
+ 'distutils.util.split_quoted()'.)
162
+ """
163
+
164
+ # Note that some CCompiler implementation classes will define class
165
+ # attributes 'cpp', 'cc', etc. with hard-coded executable names;
166
+ # this is appropriate when a compiler class is for exactly one
167
+ # compiler/OS combination (eg. MSVCCompiler). Other compiler
168
+ # classes (UnixCCompiler, in particular) are driven by information
169
+ # discovered at run-time, since there are many different ways to do
170
+ # basically the same things with Unix C compilers.
171
+
172
+ for key in kwargs:
173
+ if key not in self.executables:
174
+ raise ValueError(
175
+ f"unknown executable '{key}' for class {self.__class__.__name__}"
176
+ )
177
+ self.set_executable(key, kwargs[key])
178
+
179
+ def set_executable(self, key, value):
180
+ if isinstance(value, str):
181
+ setattr(self, key, split_quoted(value))
182
+ else:
183
+ setattr(self, key, value)
184
+
185
+ def _find_macro(self, name):
186
+ i = 0
187
+ for defn in self.macros:
188
+ if defn[0] == name:
189
+ return i
190
+ i += 1
191
+ return None
192
+
193
+ def _check_macro_definitions(self, definitions):
194
+ """Ensure that every element of 'definitions' is valid."""
195
+ for defn in definitions:
196
+ self._check_macro_definition(*defn)
197
+
198
+ def _check_macro_definition(self, defn):
199
+ """
200
+ Raise a TypeError if defn is not valid.
201
+
202
+ A valid definition is either a (name, value) 2-tuple or a (name,) tuple.
203
+ """
204
+ if not isinstance(defn, tuple) or not self._is_valid_macro(*defn):
205
+ raise TypeError(
206
+ f"invalid macro definition '{defn}': "
207
+ "must be tuple (string,), (string, string), or (string, None)"
208
+ )
209
+
210
+ @staticmethod
211
+ def _is_valid_macro(name, value=None):
212
+ """
213
+ A valid macro is a ``name : str`` and a ``value : str | None``.
214
+ """
215
+ return isinstance(name, str) and isinstance(value, (str, types.NoneType))
216
+
217
+ # -- Bookkeeping methods -------------------------------------------
218
+
219
+ def define_macro(self, name, value=None):
220
+ """Define a preprocessor macro for all compilations driven by this
221
+ compiler object. The optional parameter 'value' should be a
222
+ string; if it is not supplied, then the macro will be defined
223
+ without an explicit value and the exact outcome depends on the
224
+ compiler used (XXX true? does ANSI say anything about this?)
225
+ """
226
+ # Delete from the list of macro definitions/undefinitions if
227
+ # already there (so that this one will take precedence).
228
+ i = self._find_macro(name)
229
+ if i is not None:
230
+ del self.macros[i]
231
+
232
+ self.macros.append((name, value))
233
+
234
+ def undefine_macro(self, name):
235
+ """Undefine a preprocessor macro for all compilations driven by
236
+ this compiler object. If the same macro is defined by
237
+ 'define_macro()' and undefined by 'undefine_macro()' the last call
238
+ takes precedence (including multiple redefinitions or
239
+ undefinitions). If the macro is redefined/undefined on a
240
+ per-compilation basis (ie. in the call to 'compile()'), then that
241
+ takes precedence.
242
+ """
243
+ # Delete from the list of macro definitions/undefinitions if
244
+ # already there (so that this one will take precedence).
245
+ i = self._find_macro(name)
246
+ if i is not None:
247
+ del self.macros[i]
248
+
249
+ undefn = (name,)
250
+ self.macros.append(undefn)
251
+
252
+ def add_include_dir(self, dir):
253
+ """Add 'dir' to the list of directories that will be searched for
254
+ header files. The compiler is instructed to search directories in
255
+ the order in which they are supplied by successive calls to
256
+ 'add_include_dir()'.
257
+ """
258
+ self.include_dirs.append(dir)
259
+
260
+ def set_include_dirs(self, dirs):
261
+ """Set the list of directories that will be searched to 'dirs' (a
262
+ list of strings). Overrides any preceding calls to
263
+ 'add_include_dir()'; subsequence calls to 'add_include_dir()' add
264
+ to the list passed to 'set_include_dirs()'. This does not affect
265
+ any list of standard include directories that the compiler may
266
+ search by default.
267
+ """
268
+ self.include_dirs = dirs[:]
269
+
270
+ def add_library(self, libname):
271
+ """Add 'libname' to the list of libraries that will be included in
272
+ all links driven by this compiler object. Note that 'libname'
273
+ should *not* be the name of a file containing a library, but the
274
+ name of the library itself: the actual filename will be inferred by
275
+ the linker, the compiler, or the compiler class (depending on the
276
+ platform).
277
+
278
+ The linker will be instructed to link against libraries in the
279
+ order they were supplied to 'add_library()' and/or
280
+ 'set_libraries()'. It is perfectly valid to duplicate library
281
+ names; the linker will be instructed to link against libraries as
282
+ many times as they are mentioned.
283
+ """
284
+ self.libraries.append(libname)
285
+
286
+ def set_libraries(self, libnames):
287
+ """Set the list of libraries to be included in all links driven by
288
+ this compiler object to 'libnames' (a list of strings). This does
289
+ not affect any standard system libraries that the linker may
290
+ include by default.
291
+ """
292
+ self.libraries = libnames[:]
293
+
294
+ def add_library_dir(self, dir):
295
+ """Add 'dir' to the list of directories that will be searched for
296
+ libraries specified to 'add_library()' and 'set_libraries()'. The
297
+ linker will be instructed to search for libraries in the order they
298
+ are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
299
+ """
300
+ self.library_dirs.append(dir)
301
+
302
+ def set_library_dirs(self, dirs):
303
+ """Set the list of library search directories to 'dirs' (a list of
304
+ strings). This does not affect any standard library search path
305
+ that the linker may search by default.
306
+ """
307
+ self.library_dirs = dirs[:]
308
+
309
+ def add_runtime_library_dir(self, dir):
310
+ """Add 'dir' to the list of directories that will be searched for
311
+ shared libraries at runtime.
312
+ """
313
+ self.runtime_library_dirs.append(dir)
314
+
315
+ def set_runtime_library_dirs(self, dirs):
316
+ """Set the list of directories to search for shared libraries at
317
+ runtime to 'dirs' (a list of strings). This does not affect any
318
+ standard search path that the runtime linker may search by
319
+ default.
320
+ """
321
+ self.runtime_library_dirs = dirs[:]
322
+
323
+ def add_link_object(self, object):
324
+ """Add 'object' to the list of object files (or analogues, such as
325
+ explicitly named library files or the output of "resource
326
+ compilers") to be included in every link driven by this compiler
327
+ object.
328
+ """
329
+ self.objects.append(object)
330
+
331
+ def set_link_objects(self, objects):
332
+ """Set the list of object files (or analogues) to be included in
333
+ every link to 'objects'. This does not affect any standard object
334
+ files that the linker may include by default (such as system
335
+ libraries).
336
+ """
337
+ self.objects = objects[:]
338
+
339
+ # -- Private utility methods --------------------------------------
340
+ # (here for the convenience of subclasses)
341
+
342
+ # Helper method to prep compiler in subclass compile() methods
343
+
344
+ def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
345
+ """Process arguments and decide which source files to compile."""
346
+ outdir, macros, incdirs = self._fix_compile_args(outdir, macros, incdirs)
347
+
348
+ if extra is None:
349
+ extra = []
350
+
351
+ # Get the list of expected output (object) files
352
+ objects = self.object_filenames(sources, strip_dir=False, output_dir=outdir)
353
+ assert len(objects) == len(sources)
354
+
355
+ pp_opts = gen_preprocess_options(macros, incdirs)
356
+
357
+ build = {}
358
+ for i in range(len(sources)):
359
+ src = sources[i]
360
+ obj = objects[i]
361
+ ext = os.path.splitext(src)[1]
362
+ self.mkpath(os.path.dirname(obj))
363
+ build[obj] = (src, ext)
364
+
365
+ return macros, objects, extra, pp_opts, build
366
+
367
+ def _get_cc_args(self, pp_opts, debug, before):
368
+ # works for unixccompiler, cygwinccompiler
369
+ cc_args = pp_opts + ['-c']
370
+ if debug:
371
+ cc_args[:0] = ['-g']
372
+ if before:
373
+ cc_args[:0] = before
374
+ return cc_args
375
+
376
+ def _fix_compile_args(self, output_dir, macros, include_dirs):
377
+ """Typecheck and fix-up some of the arguments to the 'compile()'
378
+ method, and return fixed-up values. Specifically: if 'output_dir'
379
+ is None, replaces it with 'self.output_dir'; ensures that 'macros'
380
+ is a list, and augments it with 'self.macros'; ensures that
381
+ 'include_dirs' is a list, and augments it with 'self.include_dirs'.
382
+ Guarantees that the returned values are of the correct type,
383
+ i.e. for 'output_dir' either string or None, and for 'macros' and
384
+ 'include_dirs' either list or None.
385
+ """
386
+ if output_dir is None:
387
+ output_dir = self.output_dir
388
+ elif not isinstance(output_dir, str):
389
+ raise TypeError("'output_dir' must be a string or None")
390
+
391
+ if macros is None:
392
+ macros = list(self.macros)
393
+ elif isinstance(macros, list):
394
+ macros = macros + (self.macros or [])
395
+ else:
396
+ raise TypeError("'macros' (if supplied) must be a list of tuples")
397
+
398
+ if include_dirs is None:
399
+ include_dirs = list(self.include_dirs)
400
+ elif isinstance(include_dirs, (list, tuple)):
401
+ include_dirs = list(include_dirs) + (self.include_dirs or [])
402
+ else:
403
+ raise TypeError("'include_dirs' (if supplied) must be a list of strings")
404
+
405
+ # add include dirs for class
406
+ include_dirs += self.__class__.include_dirs
407
+
408
+ return output_dir, macros, include_dirs
409
+
410
+ def _prep_compile(self, sources, output_dir, depends=None):
411
+ """Decide which source files must be recompiled.
412
+
413
+ Determine the list of object files corresponding to 'sources',
414
+ and figure out which ones really need to be recompiled.
415
+ Return a list of all object files and a dictionary telling
416
+ which source files can be skipped.
417
+ """
418
+ # Get the list of expected output (object) files
419
+ objects = self.object_filenames(sources, output_dir=output_dir)
420
+ assert len(objects) == len(sources)
421
+
422
+ # Return an empty dict for the "which source files can be skipped"
423
+ # return value to preserve API compatibility.
424
+ return objects, {}
425
+
426
+ def _fix_object_args(self, objects, output_dir):
427
+ """Typecheck and fix up some arguments supplied to various methods.
428
+ Specifically: ensure that 'objects' is a list; if output_dir is
429
+ None, replace with self.output_dir. Return fixed versions of
430
+ 'objects' and 'output_dir'.
431
+ """
432
+ if not isinstance(objects, (list, tuple)):
433
+ raise TypeError("'objects' must be a list or tuple of strings")
434
+ objects = list(objects)
435
+
436
+ if output_dir is None:
437
+ output_dir = self.output_dir
438
+ elif not isinstance(output_dir, str):
439
+ raise TypeError("'output_dir' must be a string or None")
440
+
441
+ return (objects, output_dir)
442
+
443
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
444
+ """Typecheck and fix up some of the arguments supplied to the
445
+ 'link_*' methods. Specifically: ensure that all arguments are
446
+ lists, and augment them with their permanent versions
447
+ (eg. 'self.libraries' augments 'libraries'). Return a tuple with
448
+ fixed versions of all arguments.
449
+ """
450
+ if libraries is None:
451
+ libraries = list(self.libraries)
452
+ elif isinstance(libraries, (list, tuple)):
453
+ libraries = list(libraries) + (self.libraries or [])
454
+ else:
455
+ raise TypeError("'libraries' (if supplied) must be a list of strings")
456
+
457
+ if library_dirs is None:
458
+ library_dirs = list(self.library_dirs)
459
+ elif isinstance(library_dirs, (list, tuple)):
460
+ library_dirs = list(library_dirs) + (self.library_dirs or [])
461
+ else:
462
+ raise TypeError("'library_dirs' (if supplied) must be a list of strings")
463
+
464
+ # add library dirs for class
465
+ library_dirs += self.__class__.library_dirs
466
+
467
+ if runtime_library_dirs is None:
468
+ runtime_library_dirs = list(self.runtime_library_dirs)
469
+ elif isinstance(runtime_library_dirs, (list, tuple)):
470
+ runtime_library_dirs = list(runtime_library_dirs) + (
471
+ self.runtime_library_dirs or []
472
+ )
473
+ else:
474
+ raise TypeError(
475
+ "'runtime_library_dirs' (if supplied) must be a list of strings"
476
+ )
477
+
478
+ return (libraries, library_dirs, runtime_library_dirs)
479
+
480
+ def _need_link(self, objects, output_file):
481
+ """Return true if we need to relink the files listed in 'objects'
482
+ to recreate 'output_file'.
483
+ """
484
+ if self.force:
485
+ return True
486
+ else:
487
+ if self.dry_run:
488
+ newer = newer_group(objects, output_file, missing='newer')
489
+ else:
490
+ newer = newer_group(objects, output_file)
491
+ return newer
492
+
493
+ def detect_language(self, sources):
494
+ """Detect the language of a given file, or list of files. Uses
495
+ language_map, and language_order to do the job.
496
+ """
497
+ if not isinstance(sources, list):
498
+ sources = [sources]
499
+ lang = None
500
+ index = len(self.language_order)
501
+ for source in sources:
502
+ base, ext = os.path.splitext(source)
503
+ extlang = self.language_map.get(ext)
504
+ try:
505
+ extindex = self.language_order.index(extlang)
506
+ if extindex < index:
507
+ lang = extlang
508
+ index = extindex
509
+ except ValueError:
510
+ pass
511
+ return lang
512
+
513
+ # -- Worker methods ------------------------------------------------
514
+ # (must be implemented by subclasses)
515
+
516
+ def preprocess(
517
+ self,
518
+ source,
519
+ output_file=None,
520
+ macros=None,
521
+ include_dirs=None,
522
+ extra_preargs=None,
523
+ extra_postargs=None,
524
+ ):
525
+ """Preprocess a single C/C++ source file, named in 'source'.
526
+ Output will be written to file named 'output_file', or stdout if
527
+ 'output_file' not supplied. 'macros' is a list of macro
528
+ definitions as for 'compile()', which will augment the macros set
529
+ with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
530
+ list of directory names that will be added to the default list.
531
+
532
+ Raises PreprocessError on failure.
533
+ """
534
+ pass
535
+
536
+ def compile(
537
+ self,
538
+ sources,
539
+ output_dir=None,
540
+ macros=None,
541
+ include_dirs=None,
542
+ debug=False,
543
+ extra_preargs=None,
544
+ extra_postargs=None,
545
+ depends=None,
546
+ ):
547
+ """Compile one or more source files.
548
+
549
+ 'sources' must be a list of filenames, most likely C/C++
550
+ files, but in reality anything that can be handled by a
551
+ particular compiler and compiler class (eg. MSVCCompiler can
552
+ handle resource files in 'sources'). Return a list of object
553
+ filenames, one per source filename in 'sources'. Depending on
554
+ the implementation, not all source files will necessarily be
555
+ compiled, but all corresponding object filenames will be
556
+ returned.
557
+
558
+ If 'output_dir' is given, object files will be put under it, while
559
+ retaining their original path component. That is, "foo/bar.c"
560
+ normally compiles to "foo/bar.o" (for a Unix implementation); if
561
+ 'output_dir' is "build", then it would compile to
562
+ "build/foo/bar.o".
563
+
564
+ 'macros', if given, must be a list of macro definitions. A macro
565
+ definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
566
+ The former defines a macro; if the value is None, the macro is
567
+ defined without an explicit value. The 1-tuple case undefines a
568
+ macro. Later definitions/redefinitions/ undefinitions take
569
+ precedence.
570
+
571
+ 'include_dirs', if given, must be a list of strings, the
572
+ directories to add to the default include file search path for this
573
+ compilation only.
574
+
575
+ 'debug' is a boolean; if true, the compiler will be instructed to
576
+ output debug symbols in (or alongside) the object file(s).
577
+
578
+ 'extra_preargs' and 'extra_postargs' are implementation- dependent.
579
+ On platforms that have the notion of a command-line (e.g. Unix,
580
+ DOS/Windows), they are most likely lists of strings: extra
581
+ command-line arguments to prepend/append to the compiler command
582
+ line. On other platforms, consult the implementation class
583
+ documentation. In any event, they are intended as an escape hatch
584
+ for those occasions when the abstract compiler framework doesn't
585
+ cut the mustard.
586
+
587
+ 'depends', if given, is a list of filenames that all targets
588
+ depend on. If a source file is older than any file in
589
+ depends, then the source file will be recompiled. This
590
+ supports dependency tracking, but only at a coarse
591
+ granularity.
592
+
593
+ Raises CompileError on failure.
594
+ """
595
+ # A concrete compiler class can either override this method
596
+ # entirely or implement _compile().
597
+ macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
598
+ output_dir, macros, include_dirs, sources, depends, extra_postargs
599
+ )
600
+ cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
601
+
602
+ for obj in objects:
603
+ try:
604
+ src, ext = build[obj]
605
+ except KeyError:
606
+ continue
607
+ self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
608
+
609
+ # Return *all* object filenames, not just the ones we just built.
610
+ return objects
611
+
612
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
613
+ """Compile 'src' to product 'obj'."""
614
+ # A concrete compiler class that does not override compile()
615
+ # should implement _compile().
616
+ pass
617
+
618
+ def create_static_lib(
619
+ self, objects, output_libname, output_dir=None, debug=False, target_lang=None
620
+ ):
621
+ """Link a bunch of stuff together to create a static library file.
622
+ The "bunch of stuff" consists of the list of object files supplied
623
+ as 'objects', the extra object files supplied to
624
+ 'add_link_object()' and/or 'set_link_objects()', the libraries
625
+ supplied to 'add_library()' and/or 'set_libraries()', and the
626
+ libraries supplied as 'libraries' (if any).
627
+
628
+ 'output_libname' should be a library name, not a filename; the
629
+ filename will be inferred from the library name. 'output_dir' is
630
+ the directory where the library file will be put.
631
+
632
+ 'debug' is a boolean; if true, debugging information will be
633
+ included in the library (note that on most platforms, it is the
634
+ compile step where this matters: the 'debug' flag is included here
635
+ just for consistency).
636
+
637
+ 'target_lang' is the target language for which the given objects
638
+ are being compiled. This allows specific linkage time treatment of
639
+ certain languages.
640
+
641
+ Raises LibError on failure.
642
+ """
643
+ pass
644
+
645
+ # values for target_desc parameter in link()
646
+ SHARED_OBJECT = "shared_object"
647
+ SHARED_LIBRARY = "shared_library"
648
+ EXECUTABLE = "executable"
649
+
650
+ def link(
651
+ self,
652
+ target_desc,
653
+ objects,
654
+ output_filename,
655
+ output_dir=None,
656
+ libraries=None,
657
+ library_dirs=None,
658
+ runtime_library_dirs=None,
659
+ export_symbols=None,
660
+ debug=False,
661
+ extra_preargs=None,
662
+ extra_postargs=None,
663
+ build_temp=None,
664
+ target_lang=None,
665
+ ):
666
+ """Link a bunch of stuff together to create an executable or
667
+ shared library file.
668
+
669
+ The "bunch of stuff" consists of the list of object files supplied
670
+ as 'objects'. 'output_filename' should be a filename. If
671
+ 'output_dir' is supplied, 'output_filename' is relative to it
672
+ (i.e. 'output_filename' can provide directory components if
673
+ needed).
674
+
675
+ 'libraries' is a list of libraries to link against. These are
676
+ library names, not filenames, since they're translated into
677
+ filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
678
+ on Unix and "foo.lib" on DOS/Windows). However, they can include a
679
+ directory component, which means the linker will look in that
680
+ specific directory rather than searching all the normal locations.
681
+
682
+ 'library_dirs', if supplied, should be a list of directories to
683
+ search for libraries that were specified as bare library names
684
+ (ie. no directory component). These are on top of the system
685
+ default and those supplied to 'add_library_dir()' and/or
686
+ 'set_library_dirs()'. 'runtime_library_dirs' is a list of
687
+ directories that will be embedded into the shared library and used
688
+ to search for other shared libraries that *it* depends on at
689
+ run-time. (This may only be relevant on Unix.)
690
+
691
+ 'export_symbols' is a list of symbols that the shared library will
692
+ export. (This appears to be relevant only on Windows.)
693
+
694
+ 'debug' is as for 'compile()' and 'create_static_lib()', with the
695
+ slight distinction that it actually matters on most platforms (as
696
+ opposed to 'create_static_lib()', which includes a 'debug' flag
697
+ mostly for form's sake).
698
+
699
+ 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
700
+ of course that they supply command-line arguments for the
701
+ particular linker being used).
702
+
703
+ 'target_lang' is the target language for which the given objects
704
+ are being compiled. This allows specific linkage time treatment of
705
+ certain languages.
706
+
707
+ Raises LinkError on failure.
708
+ """
709
+ raise NotImplementedError
710
+
711
+ # Old 'link_*()' methods, rewritten to use the new 'link()' method.
712
+
713
+ def link_shared_lib(
714
+ self,
715
+ objects,
716
+ output_libname,
717
+ output_dir=None,
718
+ libraries=None,
719
+ library_dirs=None,
720
+ runtime_library_dirs=None,
721
+ export_symbols=None,
722
+ debug=False,
723
+ extra_preargs=None,
724
+ extra_postargs=None,
725
+ build_temp=None,
726
+ target_lang=None,
727
+ ):
728
+ self.link(
729
+ CCompiler.SHARED_LIBRARY,
730
+ objects,
731
+ self.library_filename(output_libname, lib_type='shared'),
732
+ output_dir,
733
+ libraries,
734
+ library_dirs,
735
+ runtime_library_dirs,
736
+ export_symbols,
737
+ debug,
738
+ extra_preargs,
739
+ extra_postargs,
740
+ build_temp,
741
+ target_lang,
742
+ )
743
+
744
+ def link_shared_object(
745
+ self,
746
+ objects,
747
+ output_filename,
748
+ output_dir=None,
749
+ libraries=None,
750
+ library_dirs=None,
751
+ runtime_library_dirs=None,
752
+ export_symbols=None,
753
+ debug=False,
754
+ extra_preargs=None,
755
+ extra_postargs=None,
756
+ build_temp=None,
757
+ target_lang=None,
758
+ ):
759
+ self.link(
760
+ CCompiler.SHARED_OBJECT,
761
+ objects,
762
+ output_filename,
763
+ output_dir,
764
+ libraries,
765
+ library_dirs,
766
+ runtime_library_dirs,
767
+ export_symbols,
768
+ debug,
769
+ extra_preargs,
770
+ extra_postargs,
771
+ build_temp,
772
+ target_lang,
773
+ )
774
+
775
+ def link_executable(
776
+ self,
777
+ objects,
778
+ output_progname,
779
+ output_dir=None,
780
+ libraries=None,
781
+ library_dirs=None,
782
+ runtime_library_dirs=None,
783
+ debug=False,
784
+ extra_preargs=None,
785
+ extra_postargs=None,
786
+ target_lang=None,
787
+ ):
788
+ self.link(
789
+ CCompiler.EXECUTABLE,
790
+ objects,
791
+ self.executable_filename(output_progname),
792
+ output_dir,
793
+ libraries,
794
+ library_dirs,
795
+ runtime_library_dirs,
796
+ None,
797
+ debug,
798
+ extra_preargs,
799
+ extra_postargs,
800
+ None,
801
+ target_lang,
802
+ )
803
+
804
+ # -- Miscellaneous methods -----------------------------------------
805
+ # These are all used by the 'gen_lib_options() function; there is
806
+ # no appropriate default implementation so subclasses should
807
+ # implement all of these.
808
+
809
+ def library_dir_option(self, dir):
810
+ """Return the compiler option to add 'dir' to the list of
811
+ directories searched for libraries.
812
+ """
813
+ raise NotImplementedError
814
+
815
+ def runtime_library_dir_option(self, dir):
816
+ """Return the compiler option to add 'dir' to the list of
817
+ directories searched for runtime libraries.
818
+ """
819
+ raise NotImplementedError
820
+
821
+ def library_option(self, lib):
822
+ """Return the compiler option to add 'lib' to the list of libraries
823
+ linked into the shared library or executable.
824
+ """
825
+ raise NotImplementedError
826
+
827
+ def has_function( # noqa: C901
828
+ self,
829
+ funcname,
830
+ includes=None,
831
+ include_dirs=None,
832
+ libraries=None,
833
+ library_dirs=None,
834
+ ):
835
+ """Return a boolean indicating whether funcname is provided as
836
+ a symbol on the current platform. The optional arguments can
837
+ be used to augment the compilation environment.
838
+
839
+ The libraries argument is a list of flags to be passed to the
840
+ linker to make additional symbol definitions available for
841
+ linking.
842
+
843
+ The includes and include_dirs arguments are deprecated.
844
+ Usually, supplying include files with function declarations
845
+ will cause function detection to fail even in cases where the
846
+ symbol is available for linking.
847
+
848
+ """
849
+ # this can't be included at module scope because it tries to
850
+ # import math which might not be available at that point - maybe
851
+ # the necessary logic should just be inlined?
852
+ import tempfile
853
+
854
+ if includes is None:
855
+ includes = []
856
+ else:
857
+ warnings.warn("includes is deprecated", DeprecationWarning)
858
+ if include_dirs is None:
859
+ include_dirs = []
860
+ else:
861
+ warnings.warn("include_dirs is deprecated", DeprecationWarning)
862
+ if libraries is None:
863
+ libraries = []
864
+ if library_dirs is None:
865
+ library_dirs = []
866
+ fd, fname = tempfile.mkstemp(".c", funcname, text=True)
867
+ with os.fdopen(fd, "w", encoding='utf-8') as f:
868
+ for incl in includes:
869
+ f.write(f"""#include "{incl}"\n""")
870
+ if not includes:
871
+ # Use "char func(void);" as the prototype to follow
872
+ # what autoconf does. This prototype does not match
873
+ # any well-known function the compiler might recognize
874
+ # as a builtin, so this ends up as a true link test.
875
+ # Without a fake prototype, the test would need to
876
+ # know the exact argument types, and the has_function
877
+ # interface does not provide that level of information.
878
+ f.write(
879
+ f"""\
880
+ #ifdef __cplusplus
881
+ extern "C"
882
+ #endif
883
+ char {funcname}(void);
884
+ """
885
+ )
886
+ f.write(
887
+ f"""\
888
+ int main (int argc, char **argv) {{
889
+ {funcname}();
890
+ return 0;
891
+ }}
892
+ """
893
+ )
894
+
895
+ try:
896
+ objects = self.compile([fname], include_dirs=include_dirs)
897
+ except CompileError:
898
+ return False
899
+ finally:
900
+ os.remove(fname)
901
+
902
+ try:
903
+ self.link_executable(
904
+ objects, "a.out", libraries=libraries, library_dirs=library_dirs
905
+ )
906
+ except (LinkError, TypeError):
907
+ return False
908
+ else:
909
+ os.remove(
910
+ self.executable_filename("a.out", output_dir=self.output_dir or '')
911
+ )
912
+ finally:
913
+ for fn in objects:
914
+ os.remove(fn)
915
+ return True
916
+
917
+ def find_library_file(self, dirs, lib, debug=False):
918
+ """Search the specified list of directories for a static or shared
919
+ library file 'lib' and return the full path to that file. If
920
+ 'debug' true, look for a debugging version (if that makes sense on
921
+ the current platform). Return None if 'lib' wasn't found in any of
922
+ the specified directories.
923
+ """
924
+ raise NotImplementedError
925
+
926
+ # -- Filename generation methods -----------------------------------
927
+
928
+ # The default implementation of the filename generating methods are
929
+ # prejudiced towards the Unix/DOS/Windows view of the world:
930
+ # * object files are named by replacing the source file extension
931
+ # (eg. .c/.cpp -> .o/.obj)
932
+ # * library files (shared or static) are named by plugging the
933
+ # library name and extension into a format string, eg.
934
+ # "lib%s.%s" % (lib_name, ".a") for Unix static libraries
935
+ # * executables are named by appending an extension (possibly
936
+ # empty) to the program name: eg. progname + ".exe" for
937
+ # Windows
938
+ #
939
+ # To reduce redundant code, these methods expect to find
940
+ # several attributes in the current object (presumably defined
941
+ # as class attributes):
942
+ # * src_extensions -
943
+ # list of C/C++ source file extensions, eg. ['.c', '.cpp']
944
+ # * obj_extension -
945
+ # object file extension, eg. '.o' or '.obj'
946
+ # * static_lib_extension -
947
+ # extension for static library files, eg. '.a' or '.lib'
948
+ # * shared_lib_extension -
949
+ # extension for shared library/object files, eg. '.so', '.dll'
950
+ # * static_lib_format -
951
+ # format string for generating static library filenames,
952
+ # eg. 'lib%s.%s' or '%s.%s'
953
+ # * shared_lib_format
954
+ # format string for generating shared library filenames
955
+ # (probably same as static_lib_format, since the extension
956
+ # is one of the intended parameters to the format string)
957
+ # * exe_extension -
958
+ # extension for executable files, eg. '' or '.exe'
959
+
960
+ def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
961
+ if output_dir is None:
962
+ output_dir = ''
963
+ return list(
964
+ self._make_out_path(output_dir, strip_dir, src_name)
965
+ for src_name in source_filenames
966
+ )
967
+
968
+ @property
969
+ def out_extensions(self):
970
+ return dict.fromkeys(self.src_extensions, self.obj_extension)
971
+
972
+ def _make_out_path(self, output_dir, strip_dir, src_name):
973
+ return self._make_out_path_exts(
974
+ output_dir, strip_dir, src_name, self.out_extensions
975
+ )
976
+
977
+ @classmethod
978
+ def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
979
+ r"""
980
+ >>> exts = {'.c': '.o'}
981
+ >>> CCompiler._make_out_path_exts('.', False, '/foo/bar.c', exts).replace('\\', '/')
982
+ './foo/bar.o'
983
+ >>> CCompiler._make_out_path_exts('.', True, '/foo/bar.c', exts).replace('\\', '/')
984
+ './bar.o'
985
+ """
986
+ src = pathlib.PurePath(src_name)
987
+ # Ensure base is relative to honor output_dir (python/cpython#37775).
988
+ base = cls._make_relative(src)
989
+ try:
990
+ new_ext = extensions[src.suffix]
991
+ except LookupError:
992
+ raise UnknownFileError(f"unknown file type '{src.suffix}' (from '{src}')")
993
+ if strip_dir:
994
+ base = pathlib.PurePath(base.name)
995
+ return os.path.join(output_dir, base.with_suffix(new_ext))
996
+
997
+ @staticmethod
998
+ def _make_relative(base: pathlib.Path):
999
+ return base.relative_to(base.anchor)
1000
+
1001
+ def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
1002
+ assert output_dir is not None
1003
+ if strip_dir:
1004
+ basename = os.path.basename(basename)
1005
+ return os.path.join(output_dir, basename + self.shared_lib_extension)
1006
+
1007
+ def executable_filename(self, basename, strip_dir=False, output_dir=''):
1008
+ assert output_dir is not None
1009
+ if strip_dir:
1010
+ basename = os.path.basename(basename)
1011
+ return os.path.join(output_dir, basename + (self.exe_extension or ''))
1012
+
1013
+ def library_filename(
1014
+ self,
1015
+ libname,
1016
+ lib_type='static',
1017
+ strip_dir=False,
1018
+ output_dir='', # or 'shared'
1019
+ ):
1020
+ assert output_dir is not None
1021
+ expected = '"static", "shared", "dylib", "xcode_stub"'
1022
+ if lib_type not in eval(expected):
1023
+ raise ValueError(f"'lib_type' must be {expected}")
1024
+ fmt = getattr(self, lib_type + "_lib_format")
1025
+ ext = getattr(self, lib_type + "_lib_extension")
1026
+
1027
+ dir, base = os.path.split(libname)
1028
+ filename = fmt % (base, ext)
1029
+ if strip_dir:
1030
+ dir = ''
1031
+
1032
+ return os.path.join(output_dir, dir, filename)
1033
+
1034
+ # -- Utility methods -----------------------------------------------
1035
+
1036
+ def announce(self, msg, level=1):
1037
+ log.debug(msg)
1038
+
1039
+ def debug_print(self, msg):
1040
+ from distutils.debug import DEBUG
1041
+
1042
+ if DEBUG:
1043
+ print(msg)
1044
+
1045
+ def warn(self, msg):
1046
+ sys.stderr.write(f"warning: {msg}\n")
1047
+
1048
+ def execute(self, func, args, msg=None, level=1):
1049
+ execute(func, args, msg, self.dry_run)
1050
+
1051
+ def spawn(self, cmd, **kwargs):
1052
+ spawn(cmd, dry_run=self.dry_run, **kwargs)
1053
+
1054
+ def move_file(self, src, dst):
1055
+ return move_file(src, dst, dry_run=self.dry_run)
1056
+
1057
+ def mkpath(self, name, mode=0o777):
1058
+ mkpath(name, mode, dry_run=self.dry_run)
1059
+
1060
+
1061
+ # Map a sys.platform/os.name ('posix', 'nt') to the default compiler
1062
+ # type for that platform. Keys are interpreted as re match
1063
+ # patterns. Order is important; platform mappings are preferred over
1064
+ # OS names.
1065
+ _default_compilers = (
1066
+ # Platform string mappings
1067
+ # on a cygwin built python we can use gcc like an ordinary UNIXish
1068
+ # compiler
1069
+ ('cygwin.*', 'unix'),
1070
+ ('zos', 'zos'),
1071
+ # OS name mappings
1072
+ ('posix', 'unix'),
1073
+ ('nt', 'msvc'),
1074
+ )
1075
+
1076
+
1077
+ def get_default_compiler(osname=None, platform=None):
1078
+ """Determine the default compiler to use for the given platform.
1079
+
1080
+ osname should be one of the standard Python OS names (i.e. the
1081
+ ones returned by os.name) and platform the common value
1082
+ returned by sys.platform for the platform in question.
1083
+
1084
+ The default values are os.name and sys.platform in case the
1085
+ parameters are not given.
1086
+ """
1087
+ if osname is None:
1088
+ osname = os.name
1089
+ if platform is None:
1090
+ platform = sys.platform
1091
+ # Mingw is a special case where sys.platform is 'win32' but we
1092
+ # want to use the 'mingw32' compiler, so check it first
1093
+ if is_mingw():
1094
+ return 'mingw32'
1095
+ for pattern, compiler in _default_compilers:
1096
+ if (
1097
+ re.match(pattern, platform) is not None
1098
+ or re.match(pattern, osname) is not None
1099
+ ):
1100
+ return compiler
1101
+ # Default to Unix compiler
1102
+ return 'unix'
1103
+
1104
+
1105
+ # Map compiler types to (module_name, class_name) pairs -- ie. where to
1106
+ # find the code that implements an interface to this compiler. (The module
1107
+ # is assumed to be in the 'distutils' package.)
1108
+ compiler_class = {
1109
+ 'unix': ('unixccompiler', 'UnixCCompiler', "standard UNIX-style compiler"),
1110
+ 'msvc': ('_msvccompiler', 'MSVCCompiler', "Microsoft Visual C++"),
1111
+ 'cygwin': (
1112
+ 'cygwinccompiler',
1113
+ 'CygwinCCompiler',
1114
+ "Cygwin port of GNU C Compiler for Win32",
1115
+ ),
1116
+ 'mingw32': (
1117
+ 'cygwinccompiler',
1118
+ 'Mingw32CCompiler',
1119
+ "Mingw32 port of GNU C Compiler for Win32",
1120
+ ),
1121
+ 'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"),
1122
+ 'zos': ('zosccompiler', 'zOSCCompiler', 'IBM XL C/C++ Compilers'),
1123
+ }
1124
+
1125
+
1126
+ def show_compilers():
1127
+ """Print list of available compilers (used by the "--help-compiler"
1128
+ options to "build", "build_ext", "build_clib").
1129
+ """
1130
+ # XXX this "knows" that the compiler option it's describing is
1131
+ # "--compiler", which just happens to be the case for the three
1132
+ # commands that use it.
1133
+ from distutils.fancy_getopt import FancyGetopt
1134
+
1135
+ compilers = sorted(
1136
+ ("compiler=" + compiler, None, compiler_class[compiler][2])
1137
+ for compiler in compiler_class.keys()
1138
+ )
1139
+ pretty_printer = FancyGetopt(compilers)
1140
+ pretty_printer.print_help("List of available compilers:")
1141
+
1142
+
1143
+ def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False):
1144
+ """Generate an instance of some CCompiler subclass for the supplied
1145
+ platform/compiler combination. 'plat' defaults to 'os.name'
1146
+ (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
1147
+ for that platform. Currently only 'posix' and 'nt' are supported, and
1148
+ the default compilers are "traditional Unix interface" (UnixCCompiler
1149
+ class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
1150
+ possible to ask for a Unix compiler object under Windows, and a
1151
+ Microsoft compiler object under Unix -- if you supply a value for
1152
+ 'compiler', 'plat' is ignored.
1153
+ """
1154
+ if plat is None:
1155
+ plat = os.name
1156
+
1157
+ try:
1158
+ if compiler is None:
1159
+ compiler = get_default_compiler(plat)
1160
+
1161
+ (module_name, class_name, long_description) = compiler_class[compiler]
1162
+ except KeyError:
1163
+ msg = f"don't know how to compile C/C++ code on platform '{plat}'"
1164
+ if compiler is not None:
1165
+ msg = msg + f" with '{compiler}' compiler"
1166
+ raise DistutilsPlatformError(msg)
1167
+
1168
+ try:
1169
+ module_name = "distutils." + module_name
1170
+ __import__(module_name)
1171
+ module = sys.modules[module_name]
1172
+ klass = vars(module)[class_name]
1173
+ except ImportError:
1174
+ raise DistutilsModuleError(
1175
+ f"can't compile C/C++ code: unable to load module '{module_name}'"
1176
+ )
1177
+ except KeyError:
1178
+ raise DistutilsModuleError(
1179
+ f"can't compile C/C++ code: unable to find class '{class_name}' "
1180
+ f"in module '{module_name}'"
1181
+ )
1182
+
1183
+ # XXX The None is necessary to preserve backwards compatibility
1184
+ # with classes that expect verbose to be the first positional
1185
+ # argument.
1186
+ return klass(None, dry_run, force)
1187
+
1188
+
1189
+ def gen_preprocess_options(macros, include_dirs):
1190
+ """Generate C pre-processor options (-D, -U, -I) as used by at least
1191
+ two types of compilers: the typical Unix compiler and Visual C++.
1192
+ 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
1193
+ means undefine (-U) macro 'name', and (name,value) means define (-D)
1194
+ macro 'name' to 'value'. 'include_dirs' is just a list of directory
1195
+ names to be added to the header file search path (-I). Returns a list
1196
+ of command-line options suitable for either Unix compilers or Visual
1197
+ C++.
1198
+ """
1199
+ # XXX it would be nice (mainly aesthetic, and so we don't generate
1200
+ # stupid-looking command lines) to go over 'macros' and eliminate
1201
+ # redundant definitions/undefinitions (ie. ensure that only the
1202
+ # latest mention of a particular macro winds up on the command
1203
+ # line). I don't think it's essential, though, since most (all?)
1204
+ # Unix C compilers only pay attention to the latest -D or -U
1205
+ # mention of a macro on their command line. Similar situation for
1206
+ # 'include_dirs'. I'm punting on both for now. Anyways, weeding out
1207
+ # redundancies like this should probably be the province of
1208
+ # CCompiler, since the data structures used are inherited from it
1209
+ # and therefore common to all CCompiler classes.
1210
+ pp_opts = []
1211
+ for macro in macros:
1212
+ if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
1213
+ raise TypeError(
1214
+ f"bad macro definition '{macro}': "
1215
+ "each element of 'macros' list must be a 1- or 2-tuple"
1216
+ )
1217
+
1218
+ if len(macro) == 1: # undefine this macro
1219
+ pp_opts.append(f"-U{macro[0]}")
1220
+ elif len(macro) == 2:
1221
+ if macro[1] is None: # define with no explicit value
1222
+ pp_opts.append(f"-D{macro[0]}")
1223
+ else:
1224
+ # XXX *don't* need to be clever about quoting the
1225
+ # macro value here, because we're going to avoid the
1226
+ # shell at all costs when we spawn the command!
1227
+ pp_opts.append("-D{}={}".format(*macro))
1228
+
1229
+ pp_opts.extend(f"-I{dir}" for dir in include_dirs)
1230
+ return pp_opts
1231
+
1232
+
1233
+ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
1234
+ """Generate linker options for searching library directories and
1235
+ linking with specific libraries. 'libraries' and 'library_dirs' are,
1236
+ respectively, lists of library names (not filenames!) and search
1237
+ directories. Returns a list of command-line options suitable for use
1238
+ with some compiler (depending on the two format strings passed in).
1239
+ """
1240
+ lib_opts = [compiler.library_dir_option(dir) for dir in library_dirs]
1241
+
1242
+ for dir in runtime_library_dirs:
1243
+ lib_opts.extend(always_iterable(compiler.runtime_library_dir_option(dir)))
1244
+
1245
+ # XXX it's important that we *not* remove redundant library mentions!
1246
+ # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
1247
+ # resolve all symbols. I just hope we never have to say "-lfoo obj.o
1248
+ # -lbar" to get things to work -- that's certainly a possibility, but a
1249
+ # pretty nasty way to arrange your C code.
1250
+
1251
+ for lib in libraries:
1252
+ (lib_dir, lib_name) = os.path.split(lib)
1253
+ if lib_dir:
1254
+ lib_file = compiler.find_library_file([lib_dir], lib_name)
1255
+ if lib_file:
1256
+ lib_opts.append(lib_file)
1257
+ else:
1258
+ compiler.warn(
1259
+ f"no library file corresponding to '{lib}' found (skipping)"
1260
+ )
1261
+ else:
1262
+ lib_opts.append(compiler.library_option(lib))
1263
+ return lib_opts
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/cygwinccompiler.py ADDED
@@ -0,0 +1,339 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.cygwinccompiler
2
+
3
+ Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
4
+ handles the Cygwin port of the GNU C compiler to Windows. It also contains
5
+ the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
6
+ cygwin in no-cygwin mode).
7
+ """
8
+
9
+ import copy
10
+ import os
11
+ import pathlib
12
+ import shlex
13
+ import sys
14
+ import warnings
15
+ from subprocess import check_output
16
+
17
+ from .errors import (
18
+ CCompilerError,
19
+ CompileError,
20
+ DistutilsExecError,
21
+ DistutilsPlatformError,
22
+ )
23
+ from .file_util import write_file
24
+ from .sysconfig import get_config_vars
25
+ from .unixccompiler import UnixCCompiler
26
+ from .version import LooseVersion, suppress_known_deprecation
27
+
28
+
29
+ def get_msvcr():
30
+ """No longer needed, but kept for backward compatibility."""
31
+ return []
32
+
33
+
34
+ _runtime_library_dirs_msg = (
35
+ "Unable to set runtime library search path on Windows, "
36
+ "usually indicated by `runtime_library_dirs` parameter to Extension"
37
+ )
38
+
39
+
40
+ class CygwinCCompiler(UnixCCompiler):
41
+ """Handles the Cygwin port of the GNU C compiler to Windows."""
42
+
43
+ compiler_type = 'cygwin'
44
+ obj_extension = ".o"
45
+ static_lib_extension = ".a"
46
+ shared_lib_extension = ".dll.a"
47
+ dylib_lib_extension = ".dll"
48
+ static_lib_format = "lib%s%s"
49
+ shared_lib_format = "lib%s%s"
50
+ dylib_lib_format = "cyg%s%s"
51
+ exe_extension = ".exe"
52
+
53
+ def __init__(self, verbose=False, dry_run=False, force=False):
54
+ super().__init__(verbose, dry_run, force)
55
+
56
+ status, details = check_config_h()
57
+ self.debug_print(f"Python's GCC status: {status} (details: {details})")
58
+ if status is not CONFIG_H_OK:
59
+ self.warn(
60
+ "Python's pyconfig.h doesn't seem to support your compiler. "
61
+ f"Reason: {details}. "
62
+ "Compiling may fail because of undefined preprocessor macros."
63
+ )
64
+
65
+ self.cc, self.cxx = get_config_vars('CC', 'CXX')
66
+
67
+ # Override 'CC' and 'CXX' environment variables for
68
+ # building using MINGW compiler for MSVC python.
69
+ self.cc = os.environ.get('CC', self.cc or 'gcc')
70
+ self.cxx = os.environ.get('CXX', self.cxx or 'g++')
71
+
72
+ self.linker_dll = self.cc
73
+ self.linker_dll_cxx = self.cxx
74
+ shared_option = "-shared"
75
+
76
+ self.set_executables(
77
+ compiler=f'{self.cc} -mcygwin -O -Wall',
78
+ compiler_so=f'{self.cc} -mcygwin -mdll -O -Wall',
79
+ compiler_cxx=f'{self.cxx} -mcygwin -O -Wall',
80
+ compiler_so_cxx=f'{self.cxx} -mcygwin -mdll -O -Wall',
81
+ linker_exe=f'{self.cc} -mcygwin',
82
+ linker_so=f'{self.linker_dll} -mcygwin {shared_option}',
83
+ linker_exe_cxx=f'{self.cxx} -mcygwin',
84
+ linker_so_cxx=f'{self.linker_dll_cxx} -mcygwin {shared_option}',
85
+ )
86
+
87
+ self.dll_libraries = get_msvcr()
88
+
89
+ @property
90
+ def gcc_version(self):
91
+ # Older numpy depended on this existing to check for ancient
92
+ # gcc versions. This doesn't make much sense with clang etc so
93
+ # just hardcode to something recent.
94
+ # https://github.com/numpy/numpy/pull/20333
95
+ warnings.warn(
96
+ "gcc_version attribute of CygwinCCompiler is deprecated. "
97
+ "Instead of returning actual gcc version a fixed value 11.2.0 is returned.",
98
+ DeprecationWarning,
99
+ stacklevel=2,
100
+ )
101
+ with suppress_known_deprecation():
102
+ return LooseVersion("11.2.0")
103
+
104
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
105
+ """Compiles the source by spawning GCC and windres if needed."""
106
+ if ext in ('.rc', '.res'):
107
+ # gcc needs '.res' and '.rc' compiled to object files !!!
108
+ try:
109
+ self.spawn(["windres", "-i", src, "-o", obj])
110
+ except DistutilsExecError as msg:
111
+ raise CompileError(msg)
112
+ else: # for other files use the C-compiler
113
+ try:
114
+ if self.detect_language(src) == 'c++':
115
+ self.spawn(
116
+ self.compiler_so_cxx
117
+ + cc_args
118
+ + [src, '-o', obj]
119
+ + extra_postargs
120
+ )
121
+ else:
122
+ self.spawn(
123
+ self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs
124
+ )
125
+ except DistutilsExecError as msg:
126
+ raise CompileError(msg)
127
+
128
+ def link(
129
+ self,
130
+ target_desc,
131
+ objects,
132
+ output_filename,
133
+ output_dir=None,
134
+ libraries=None,
135
+ library_dirs=None,
136
+ runtime_library_dirs=None,
137
+ export_symbols=None,
138
+ debug=False,
139
+ extra_preargs=None,
140
+ extra_postargs=None,
141
+ build_temp=None,
142
+ target_lang=None,
143
+ ):
144
+ """Link the objects."""
145
+ # use separate copies, so we can modify the lists
146
+ extra_preargs = copy.copy(extra_preargs or [])
147
+ libraries = copy.copy(libraries or [])
148
+ objects = copy.copy(objects or [])
149
+
150
+ if runtime_library_dirs:
151
+ self.warn(_runtime_library_dirs_msg)
152
+
153
+ # Additional libraries
154
+ libraries.extend(self.dll_libraries)
155
+
156
+ # handle export symbols by creating a def-file
157
+ # with executables this only works with gcc/ld as linker
158
+ if (export_symbols is not None) and (
159
+ target_desc != self.EXECUTABLE or self.linker_dll == "gcc"
160
+ ):
161
+ # (The linker doesn't do anything if output is up-to-date.
162
+ # So it would probably better to check if we really need this,
163
+ # but for this we had to insert some unchanged parts of
164
+ # UnixCCompiler, and this is not what we want.)
165
+
166
+ # we want to put some files in the same directory as the
167
+ # object files are, build_temp doesn't help much
168
+ # where are the object files
169
+ temp_dir = os.path.dirname(objects[0])
170
+ # name of dll to give the helper files the same base name
171
+ (dll_name, dll_extension) = os.path.splitext(
172
+ os.path.basename(output_filename)
173
+ )
174
+
175
+ # generate the filenames for these files
176
+ def_file = os.path.join(temp_dir, dll_name + ".def")
177
+
178
+ # Generate .def file
179
+ contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"]
180
+ contents.extend(export_symbols)
181
+ self.execute(write_file, (def_file, contents), f"writing {def_file}")
182
+
183
+ # next add options for def-file
184
+
185
+ # for gcc/ld the def-file is specified as any object files
186
+ objects.append(def_file)
187
+
188
+ # end: if ((export_symbols is not None) and
189
+ # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
190
+
191
+ # who wants symbols and a many times larger output file
192
+ # should explicitly switch the debug mode on
193
+ # otherwise we let ld strip the output file
194
+ # (On my machine: 10KiB < stripped_file < ??100KiB
195
+ # unstripped_file = stripped_file + XXX KiB
196
+ # ( XXX=254 for a typical python extension))
197
+ if not debug:
198
+ extra_preargs.append("-s")
199
+
200
+ UnixCCompiler.link(
201
+ self,
202
+ target_desc,
203
+ objects,
204
+ output_filename,
205
+ output_dir,
206
+ libraries,
207
+ library_dirs,
208
+ runtime_library_dirs,
209
+ None, # export_symbols, we do this in our def-file
210
+ debug,
211
+ extra_preargs,
212
+ extra_postargs,
213
+ build_temp,
214
+ target_lang,
215
+ )
216
+
217
+ def runtime_library_dir_option(self, dir):
218
+ # cygwin doesn't support rpath. While in theory we could error
219
+ # out like MSVC does, code might expect it to work like on Unix, so
220
+ # just warn and hope for the best.
221
+ self.warn(_runtime_library_dirs_msg)
222
+ return []
223
+
224
+ # -- Miscellaneous methods -----------------------------------------
225
+
226
+ def _make_out_path(self, output_dir, strip_dir, src_name):
227
+ # use normcase to make sure '.rc' is really '.rc' and not '.RC'
228
+ norm_src_name = os.path.normcase(src_name)
229
+ return super()._make_out_path(output_dir, strip_dir, norm_src_name)
230
+
231
+ @property
232
+ def out_extensions(self):
233
+ """
234
+ Add support for rc and res files.
235
+ """
236
+ return {
237
+ **super().out_extensions,
238
+ **{ext: ext + self.obj_extension for ext in ('.res', '.rc')},
239
+ }
240
+
241
+
242
+ # the same as cygwin plus some additional parameters
243
+ class Mingw32CCompiler(CygwinCCompiler):
244
+ """Handles the Mingw32 port of the GNU C compiler to Windows."""
245
+
246
+ compiler_type = 'mingw32'
247
+
248
+ def __init__(self, verbose=False, dry_run=False, force=False):
249
+ super().__init__(verbose, dry_run, force)
250
+
251
+ shared_option = "-shared"
252
+
253
+ if is_cygwincc(self.cc):
254
+ raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
255
+
256
+ self.set_executables(
257
+ compiler=f'{self.cc} -O -Wall',
258
+ compiler_so=f'{self.cc} -shared -O -Wall',
259
+ compiler_so_cxx=f'{self.cxx} -shared -O -Wall',
260
+ compiler_cxx=f'{self.cxx} -O -Wall',
261
+ linker_exe=f'{self.cc}',
262
+ linker_so=f'{self.linker_dll} {shared_option}',
263
+ linker_exe_cxx=f'{self.cxx}',
264
+ linker_so_cxx=f'{self.linker_dll_cxx} {shared_option}',
265
+ )
266
+
267
+ def runtime_library_dir_option(self, dir):
268
+ raise DistutilsPlatformError(_runtime_library_dirs_msg)
269
+
270
+
271
+ # Because these compilers aren't configured in Python's pyconfig.h file by
272
+ # default, we should at least warn the user if he is using an unmodified
273
+ # version.
274
+
275
+ CONFIG_H_OK = "ok"
276
+ CONFIG_H_NOTOK = "not ok"
277
+ CONFIG_H_UNCERTAIN = "uncertain"
278
+
279
+
280
+ def check_config_h():
281
+ """Check if the current Python installation appears amenable to building
282
+ extensions with GCC.
283
+
284
+ Returns a tuple (status, details), where 'status' is one of the following
285
+ constants:
286
+
287
+ - CONFIG_H_OK: all is well, go ahead and compile
288
+ - CONFIG_H_NOTOK: doesn't look good
289
+ - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
290
+
291
+ 'details' is a human-readable string explaining the situation.
292
+
293
+ Note there are two ways to conclude "OK": either 'sys.version' contains
294
+ the string "GCC" (implying that this Python was built with GCC), or the
295
+ installed "pyconfig.h" contains the string "__GNUC__".
296
+ """
297
+
298
+ # XXX since this function also checks sys.version, it's not strictly a
299
+ # "pyconfig.h" check -- should probably be renamed...
300
+
301
+ from distutils import sysconfig
302
+
303
+ # if sys.version contains GCC then python was compiled with GCC, and the
304
+ # pyconfig.h file should be OK
305
+ if "GCC" in sys.version:
306
+ return CONFIG_H_OK, "sys.version mentions 'GCC'"
307
+
308
+ # Clang would also work
309
+ if "Clang" in sys.version:
310
+ return CONFIG_H_OK, "sys.version mentions 'Clang'"
311
+
312
+ # let's see if __GNUC__ is mentioned in python.h
313
+ fn = sysconfig.get_config_h_filename()
314
+ try:
315
+ config_h = pathlib.Path(fn).read_text(encoding='utf-8')
316
+ except OSError as exc:
317
+ return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
318
+ else:
319
+ substring = '__GNUC__'
320
+ if substring in config_h:
321
+ code = CONFIG_H_OK
322
+ mention_inflected = 'mentions'
323
+ else:
324
+ code = CONFIG_H_NOTOK
325
+ mention_inflected = 'does not mention'
326
+ return code, f"{fn!r} {mention_inflected} {substring!r}"
327
+
328
+
329
+ def is_cygwincc(cc):
330
+ """Try to determine if the compiler that would be used is from cygwin."""
331
+ out_string = check_output(shlex.split(cc) + ['-dumpmachine'])
332
+ return out_string.strip().endswith(b'cygwin')
333
+
334
+
335
+ get_versions = None
336
+ """
337
+ A stand-in for the previous get_versions() function to prevent failures
338
+ when monkeypatched. See pypa/setuptools#2969.
339
+ """
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/debug.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import os
2
+
3
+ # If DISTUTILS_DEBUG is anything other than the empty string, we run in
4
+ # debug mode.
5
+ DEBUG = os.environ.get('DISTUTILS_DEBUG')
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/dir_util.py ADDED
@@ -0,0 +1,244 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.dir_util
2
+
3
+ Utility functions for manipulating directories and directory trees."""
4
+
5
+ import functools
6
+ import itertools
7
+ import os
8
+ import pathlib
9
+
10
+ from . import file_util
11
+ from ._log import log
12
+ from .errors import DistutilsFileError, DistutilsInternalError
13
+
14
+
15
+ class SkipRepeatAbsolutePaths(set):
16
+ """
17
+ Cache for mkpath.
18
+
19
+ In addition to cheapening redundant calls, eliminates redundant
20
+ "creating /foo/bar/baz" messages in dry-run mode.
21
+ """
22
+
23
+ def __init__(self):
24
+ SkipRepeatAbsolutePaths.instance = self
25
+
26
+ @classmethod
27
+ def clear(cls):
28
+ super(cls, cls.instance).clear()
29
+
30
+ def wrap(self, func):
31
+ @functools.wraps(func)
32
+ def wrapper(path, *args, **kwargs):
33
+ if path.absolute() in self:
34
+ return
35
+ result = func(path, *args, **kwargs)
36
+ self.add(path.absolute())
37
+ return result
38
+
39
+ return wrapper
40
+
41
+
42
+ # Python 3.8 compatibility
43
+ wrapper = SkipRepeatAbsolutePaths().wrap
44
+
45
+
46
+ @functools.singledispatch
47
+ @wrapper
48
+ def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False) -> None:
49
+ """Create a directory and any missing ancestor directories.
50
+
51
+ If the directory already exists (or if 'name' is the empty string, which
52
+ means the current directory, which of course exists), then do nothing.
53
+ Raise DistutilsFileError if unable to create some directory along the way
54
+ (eg. some sub-path exists, but is a file rather than a directory).
55
+ If 'verbose' is true, log the directory created.
56
+ """
57
+ if verbose and not name.is_dir():
58
+ log.info("creating %s", name)
59
+
60
+ try:
61
+ dry_run or name.mkdir(mode=mode, parents=True, exist_ok=True)
62
+ except OSError as exc:
63
+ raise DistutilsFileError(f"could not create '{name}': {exc.args[-1]}")
64
+
65
+
66
+ @mkpath.register
67
+ def _(name: str, *args, **kwargs):
68
+ return mkpath(pathlib.Path(name), *args, **kwargs)
69
+
70
+
71
+ @mkpath.register
72
+ def _(name: None, *args, **kwargs):
73
+ """
74
+ Detect a common bug -- name is None.
75
+ """
76
+ raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})")
77
+
78
+
79
+ def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
80
+ """Create all the empty directories under 'base_dir' needed to put 'files'
81
+ there.
82
+
83
+ 'base_dir' is just the name of a directory which doesn't necessarily
84
+ exist yet; 'files' is a list of filenames to be interpreted relative to
85
+ 'base_dir'. 'base_dir' + the directory portion of every file in 'files'
86
+ will be created if it doesn't already exist. 'mode', 'verbose' and
87
+ 'dry_run' flags are as for 'mkpath()'.
88
+ """
89
+ # First get the list of directories to create
90
+ need_dir = set(os.path.join(base_dir, os.path.dirname(file)) for file in files)
91
+
92
+ # Now create them
93
+ for dir in sorted(need_dir):
94
+ mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
95
+
96
+
97
+ def copy_tree(
98
+ src,
99
+ dst,
100
+ preserve_mode=True,
101
+ preserve_times=True,
102
+ preserve_symlinks=False,
103
+ update=False,
104
+ verbose=True,
105
+ dry_run=False,
106
+ ):
107
+ """Copy an entire directory tree 'src' to a new location 'dst'.
108
+
109
+ Both 'src' and 'dst' must be directory names. If 'src' is not a
110
+ directory, raise DistutilsFileError. If 'dst' does not exist, it is
111
+ created with 'mkpath()'. The end result of the copy is that every
112
+ file in 'src' is copied to 'dst', and directories under 'src' are
113
+ recursively copied to 'dst'. Return the list of files that were
114
+ copied or might have been copied, using their output name. The
115
+ return value is unaffected by 'update' or 'dry_run': it is simply
116
+ the list of all files under 'src', with the names changed to be
117
+ under 'dst'.
118
+
119
+ 'preserve_mode' and 'preserve_times' are the same as for
120
+ 'copy_file'; note that they only apply to regular files, not to
121
+ directories. If 'preserve_symlinks' is true, symlinks will be
122
+ copied as symlinks (on platforms that support them!); otherwise
123
+ (the default), the destination of the symlink will be copied.
124
+ 'update' and 'verbose' are the same as for 'copy_file'.
125
+ """
126
+ if not dry_run and not os.path.isdir(src):
127
+ raise DistutilsFileError(f"cannot copy tree '{src}': not a directory")
128
+ try:
129
+ names = os.listdir(src)
130
+ except OSError as e:
131
+ if dry_run:
132
+ names = []
133
+ else:
134
+ raise DistutilsFileError(f"error listing files in '{src}': {e.strerror}")
135
+
136
+ if not dry_run:
137
+ mkpath(dst, verbose=verbose)
138
+
139
+ copy_one = functools.partial(
140
+ _copy_one,
141
+ src=src,
142
+ dst=dst,
143
+ preserve_symlinks=preserve_symlinks,
144
+ verbose=verbose,
145
+ dry_run=dry_run,
146
+ preserve_mode=preserve_mode,
147
+ preserve_times=preserve_times,
148
+ update=update,
149
+ )
150
+ return list(itertools.chain.from_iterable(map(copy_one, names)))
151
+
152
+
153
+ def _copy_one(
154
+ name,
155
+ *,
156
+ src,
157
+ dst,
158
+ preserve_symlinks,
159
+ verbose,
160
+ dry_run,
161
+ preserve_mode,
162
+ preserve_times,
163
+ update,
164
+ ):
165
+ src_name = os.path.join(src, name)
166
+ dst_name = os.path.join(dst, name)
167
+
168
+ if name.startswith('.nfs'):
169
+ # skip NFS rename files
170
+ return
171
+
172
+ if preserve_symlinks and os.path.islink(src_name):
173
+ link_dest = os.readlink(src_name)
174
+ if verbose >= 1:
175
+ log.info("linking %s -> %s", dst_name, link_dest)
176
+ if not dry_run:
177
+ os.symlink(link_dest, dst_name)
178
+ yield dst_name
179
+
180
+ elif os.path.isdir(src_name):
181
+ yield from copy_tree(
182
+ src_name,
183
+ dst_name,
184
+ preserve_mode,
185
+ preserve_times,
186
+ preserve_symlinks,
187
+ update,
188
+ verbose=verbose,
189
+ dry_run=dry_run,
190
+ )
191
+ else:
192
+ file_util.copy_file(
193
+ src_name,
194
+ dst_name,
195
+ preserve_mode,
196
+ preserve_times,
197
+ update,
198
+ verbose=verbose,
199
+ dry_run=dry_run,
200
+ )
201
+ yield dst_name
202
+
203
+
204
+ def _build_cmdtuple(path, cmdtuples):
205
+ """Helper for remove_tree()."""
206
+ for f in os.listdir(path):
207
+ real_f = os.path.join(path, f)
208
+ if os.path.isdir(real_f) and not os.path.islink(real_f):
209
+ _build_cmdtuple(real_f, cmdtuples)
210
+ else:
211
+ cmdtuples.append((os.remove, real_f))
212
+ cmdtuples.append((os.rmdir, path))
213
+
214
+
215
+ def remove_tree(directory, verbose=True, dry_run=False):
216
+ """Recursively remove an entire directory tree.
217
+
218
+ Any errors are ignored (apart from being reported to stdout if 'verbose'
219
+ is true).
220
+ """
221
+ if verbose >= 1:
222
+ log.info("removing '%s' (and everything under it)", directory)
223
+ if dry_run:
224
+ return
225
+ cmdtuples = []
226
+ _build_cmdtuple(directory, cmdtuples)
227
+ for cmd in cmdtuples:
228
+ try:
229
+ cmd[0](cmd[1])
230
+ # Clear the cache
231
+ SkipRepeatAbsolutePaths.clear()
232
+ except OSError as exc:
233
+ log.warning("error removing %s: %s", directory, exc)
234
+
235
+
236
+ def ensure_relative(path):
237
+ """Take the full path 'path', and make it a relative path.
238
+
239
+ This is useful to make 'path' the second argument to os.path.join().
240
+ """
241
+ drive, path = os.path.splitdrive(path)
242
+ if path[0:1] == os.sep:
243
+ path = drive + path[1:]
244
+ return path
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/dist.py ADDED
@@ -0,0 +1,1317 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.dist
2
+
3
+ Provides the Distribution class, which represents the module distribution
4
+ being built/installed/distributed.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import contextlib
10
+ import logging
11
+ import os
12
+ import pathlib
13
+ import re
14
+ import sys
15
+ import warnings
16
+ from collections.abc import Iterable
17
+ from email import message_from_file
18
+ from typing import TYPE_CHECKING, Literal, TypeVar, overload
19
+
20
+ from packaging.utils import canonicalize_name, canonicalize_version
21
+
22
+ from ._log import log
23
+ from .debug import DEBUG
24
+ from .errors import (
25
+ DistutilsArgError,
26
+ DistutilsClassError,
27
+ DistutilsModuleError,
28
+ DistutilsOptionError,
29
+ )
30
+ from .fancy_getopt import FancyGetopt, translate_longopt
31
+ from .util import check_environ, rfc822_escape, strtobool
32
+
33
+ if TYPE_CHECKING:
34
+ # type-only import because of mutual dependence between these modules
35
+ from .cmd import Command
36
+
37
+ _CommandT = TypeVar("_CommandT", bound="Command")
38
+
39
+ # Regex to define acceptable Distutils command names. This is not *quite*
40
+ # the same as a Python NAME -- I don't allow leading underscores. The fact
41
+ # that they're very similar is no coincidence; the default naming scheme is
42
+ # to look for a Python module named after the command.
43
+ command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
44
+
45
+
46
+ def _ensure_list(value, fieldname):
47
+ if isinstance(value, str):
48
+ # a string containing comma separated values is okay. It will
49
+ # be converted to a list by Distribution.finalize_options().
50
+ pass
51
+ elif not isinstance(value, list):
52
+ # passing a tuple or an iterator perhaps, warn and convert
53
+ typename = type(value).__name__
54
+ msg = "Warning: '{fieldname}' should be a list, got type '{typename}'"
55
+ msg = msg.format(**locals())
56
+ log.warning(msg)
57
+ value = list(value)
58
+ return value
59
+
60
+
61
+ class Distribution:
62
+ """The core of the Distutils. Most of the work hiding behind 'setup'
63
+ is really done within a Distribution instance, which farms the work out
64
+ to the Distutils commands specified on the command line.
65
+
66
+ Setup scripts will almost never instantiate Distribution directly,
67
+ unless the 'setup()' function is totally inadequate to their needs.
68
+ However, it is conceivable that a setup script might wish to subclass
69
+ Distribution for some specialized purpose, and then pass the subclass
70
+ to 'setup()' as the 'distclass' keyword argument. If so, it is
71
+ necessary to respect the expectations that 'setup' has of Distribution.
72
+ See the code for 'setup()', in core.py, for details.
73
+ """
74
+
75
+ # 'global_options' describes the command-line options that may be
76
+ # supplied to the setup script prior to any actual commands.
77
+ # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
78
+ # these global options. This list should be kept to a bare minimum,
79
+ # since every global option is also valid as a command option -- and we
80
+ # don't want to pollute the commands with too many options that they
81
+ # have minimal control over.
82
+ # The fourth entry for verbose means that it can be repeated.
83
+ global_options = [
84
+ ('verbose', 'v', "run verbosely (default)", 1),
85
+ ('quiet', 'q', "run quietly (turns verbosity off)"),
86
+ ('dry-run', 'n', "don't actually do anything"),
87
+ ('help', 'h', "show detailed help message"),
88
+ ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
89
+ ]
90
+
91
+ # 'common_usage' is a short (2-3 line) string describing the common
92
+ # usage of the setup script.
93
+ common_usage = """\
94
+ Common commands: (see '--help-commands' for more)
95
+
96
+ setup.py build will build the package underneath 'build/'
97
+ setup.py install will install the package
98
+ """
99
+
100
+ # options that are not propagated to the commands
101
+ display_options = [
102
+ ('help-commands', None, "list all available commands"),
103
+ ('name', None, "print package name"),
104
+ ('version', 'V', "print package version"),
105
+ ('fullname', None, "print <package name>-<version>"),
106
+ ('author', None, "print the author's name"),
107
+ ('author-email', None, "print the author's email address"),
108
+ ('maintainer', None, "print the maintainer's name"),
109
+ ('maintainer-email', None, "print the maintainer's email address"),
110
+ ('contact', None, "print the maintainer's name if known, else the author's"),
111
+ (
112
+ 'contact-email',
113
+ None,
114
+ "print the maintainer's email address if known, else the author's",
115
+ ),
116
+ ('url', None, "print the URL for this package"),
117
+ ('license', None, "print the license of the package"),
118
+ ('licence', None, "alias for --license"),
119
+ ('description', None, "print the package description"),
120
+ ('long-description', None, "print the long package description"),
121
+ ('platforms', None, "print the list of platforms"),
122
+ ('classifiers', None, "print the list of classifiers"),
123
+ ('keywords', None, "print the list of keywords"),
124
+ ('provides', None, "print the list of packages/modules provided"),
125
+ ('requires', None, "print the list of packages/modules required"),
126
+ ('obsoletes', None, "print the list of packages/modules made obsolete"),
127
+ ]
128
+ display_option_names = [translate_longopt(x[0]) for x in display_options]
129
+
130
+ # negative options are options that exclude other options
131
+ negative_opt = {'quiet': 'verbose'}
132
+
133
+ # -- Creation/initialization methods -------------------------------
134
+
135
+ def __init__(self, attrs=None): # noqa: C901
136
+ """Construct a new Distribution instance: initialize all the
137
+ attributes of a Distribution, and then use 'attrs' (a dictionary
138
+ mapping attribute names to values) to assign some of those
139
+ attributes their "real" values. (Any attributes not mentioned in
140
+ 'attrs' will be assigned to some null value: 0, None, an empty list
141
+ or dictionary, etc.) Most importantly, initialize the
142
+ 'command_obj' attribute to the empty dictionary; this will be
143
+ filled in with real command objects by 'parse_command_line()'.
144
+ """
145
+
146
+ # Default values for our command-line options
147
+ self.verbose = True
148
+ self.dry_run = False
149
+ self.help = False
150
+ for attr in self.display_option_names:
151
+ setattr(self, attr, False)
152
+
153
+ # Store the distribution meta-data (name, version, author, and so
154
+ # forth) in a separate object -- we're getting to have enough
155
+ # information here (and enough command-line options) that it's
156
+ # worth it. Also delegate 'get_XXX()' methods to the 'metadata'
157
+ # object in a sneaky and underhanded (but efficient!) way.
158
+ self.metadata = DistributionMetadata()
159
+ for basename in self.metadata._METHOD_BASENAMES:
160
+ method_name = "get_" + basename
161
+ setattr(self, method_name, getattr(self.metadata, method_name))
162
+
163
+ # 'cmdclass' maps command names to class objects, so we
164
+ # can 1) quickly figure out which class to instantiate when
165
+ # we need to create a new command object, and 2) have a way
166
+ # for the setup script to override command classes
167
+ self.cmdclass = {}
168
+
169
+ # 'command_packages' is a list of packages in which commands
170
+ # are searched for. The factory for command 'foo' is expected
171
+ # to be named 'foo' in the module 'foo' in one of the packages
172
+ # named here. This list is searched from the left; an error
173
+ # is raised if no named package provides the command being
174
+ # searched for. (Always access using get_command_packages().)
175
+ self.command_packages = None
176
+
177
+ # 'script_name' and 'script_args' are usually set to sys.argv[0]
178
+ # and sys.argv[1:], but they can be overridden when the caller is
179
+ # not necessarily a setup script run from the command-line.
180
+ self.script_name = None
181
+ self.script_args: list[str] | None = None
182
+
183
+ # 'command_options' is where we store command options between
184
+ # parsing them (from config files, the command-line, etc.) and when
185
+ # they are actually needed -- ie. when the command in question is
186
+ # instantiated. It is a dictionary of dictionaries of 2-tuples:
187
+ # command_options = { command_name : { option : (source, value) } }
188
+ self.command_options = {}
189
+
190
+ # 'dist_files' is the list of (command, pyversion, file) that
191
+ # have been created by any dist commands run so far. This is
192
+ # filled regardless of whether the run is dry or not. pyversion
193
+ # gives sysconfig.get_python_version() if the dist file is
194
+ # specific to a Python version, 'any' if it is good for all
195
+ # Python versions on the target platform, and '' for a source
196
+ # file. pyversion should not be used to specify minimum or
197
+ # maximum required Python versions; use the metainfo for that
198
+ # instead.
199
+ self.dist_files = []
200
+
201
+ # These options are really the business of various commands, rather
202
+ # than of the Distribution itself. We provide aliases for them in
203
+ # Distribution as a convenience to the developer.
204
+ self.packages = None
205
+ self.package_data = {}
206
+ self.package_dir = None
207
+ self.py_modules = None
208
+ self.libraries = None
209
+ self.headers = None
210
+ self.ext_modules = None
211
+ self.ext_package = None
212
+ self.include_dirs = None
213
+ self.extra_path = None
214
+ self.scripts = None
215
+ self.data_files = None
216
+ self.password = ''
217
+
218
+ # And now initialize bookkeeping stuff that can't be supplied by
219
+ # the caller at all. 'command_obj' maps command names to
220
+ # Command instances -- that's how we enforce that every command
221
+ # class is a singleton.
222
+ self.command_obj = {}
223
+
224
+ # 'have_run' maps command names to boolean values; it keeps track
225
+ # of whether we have actually run a particular command, to make it
226
+ # cheap to "run" a command whenever we think we might need to -- if
227
+ # it's already been done, no need for expensive filesystem
228
+ # operations, we just check the 'have_run' dictionary and carry on.
229
+ # It's only safe to query 'have_run' for a command class that has
230
+ # been instantiated -- a false value will be inserted when the
231
+ # command object is created, and replaced with a true value when
232
+ # the command is successfully run. Thus it's probably best to use
233
+ # '.get()' rather than a straight lookup.
234
+ self.have_run = {}
235
+
236
+ # Now we'll use the attrs dictionary (ultimately, keyword args from
237
+ # the setup script) to possibly override any or all of these
238
+ # distribution options.
239
+
240
+ if attrs:
241
+ # Pull out the set of command options and work on them
242
+ # specifically. Note that this order guarantees that aliased
243
+ # command options will override any supplied redundantly
244
+ # through the general options dictionary.
245
+ options = attrs.get('options')
246
+ if options is not None:
247
+ del attrs['options']
248
+ for command, cmd_options in options.items():
249
+ opt_dict = self.get_option_dict(command)
250
+ for opt, val in cmd_options.items():
251
+ opt_dict[opt] = ("setup script", val)
252
+
253
+ if 'licence' in attrs:
254
+ attrs['license'] = attrs['licence']
255
+ del attrs['licence']
256
+ msg = "'licence' distribution option is deprecated; use 'license'"
257
+ warnings.warn(msg)
258
+
259
+ # Now work on the rest of the attributes. Any attribute that's
260
+ # not already defined is invalid!
261
+ for key, val in attrs.items():
262
+ if hasattr(self.metadata, "set_" + key):
263
+ getattr(self.metadata, "set_" + key)(val)
264
+ elif hasattr(self.metadata, key):
265
+ setattr(self.metadata, key, val)
266
+ elif hasattr(self, key):
267
+ setattr(self, key, val)
268
+ else:
269
+ msg = f"Unknown distribution option: {key!r}"
270
+ warnings.warn(msg)
271
+
272
+ # no-user-cfg is handled before other command line args
273
+ # because other args override the config files, and this
274
+ # one is needed before we can load the config files.
275
+ # If attrs['script_args'] wasn't passed, assume false.
276
+ #
277
+ # This also make sure we just look at the global options
278
+ self.want_user_cfg = True
279
+
280
+ if self.script_args is not None:
281
+ # Coerce any possible iterable from attrs into a list
282
+ self.script_args = list(self.script_args)
283
+ for arg in self.script_args:
284
+ if not arg.startswith('-'):
285
+ break
286
+ if arg == '--no-user-cfg':
287
+ self.want_user_cfg = False
288
+ break
289
+
290
+ self.finalize_options()
291
+
292
+ def get_option_dict(self, command):
293
+ """Get the option dictionary for a given command. If that
294
+ command's option dictionary hasn't been created yet, then create it
295
+ and return the new dictionary; otherwise, return the existing
296
+ option dictionary.
297
+ """
298
+ dict = self.command_options.get(command)
299
+ if dict is None:
300
+ dict = self.command_options[command] = {}
301
+ return dict
302
+
303
+ def dump_option_dicts(self, header=None, commands=None, indent=""):
304
+ from pprint import pformat
305
+
306
+ if commands is None: # dump all command option dicts
307
+ commands = sorted(self.command_options.keys())
308
+
309
+ if header is not None:
310
+ self.announce(indent + header)
311
+ indent = indent + " "
312
+
313
+ if not commands:
314
+ self.announce(indent + "no commands known yet")
315
+ return
316
+
317
+ for cmd_name in commands:
318
+ opt_dict = self.command_options.get(cmd_name)
319
+ if opt_dict is None:
320
+ self.announce(indent + f"no option dict for '{cmd_name}' command")
321
+ else:
322
+ self.announce(indent + f"option dict for '{cmd_name}' command:")
323
+ out = pformat(opt_dict)
324
+ for line in out.split('\n'):
325
+ self.announce(indent + " " + line)
326
+
327
+ # -- Config file finding/parsing methods ---------------------------
328
+
329
+ def find_config_files(self):
330
+ """Find as many configuration files as should be processed for this
331
+ platform, and return a list of filenames in the order in which they
332
+ should be parsed. The filenames returned are guaranteed to exist
333
+ (modulo nasty race conditions).
334
+
335
+ There are multiple possible config files:
336
+ - distutils.cfg in the Distutils installation directory (i.e.
337
+ where the top-level Distutils __inst__.py file lives)
338
+ - a file in the user's home directory named .pydistutils.cfg
339
+ on Unix and pydistutils.cfg on Windows/Mac; may be disabled
340
+ with the ``--no-user-cfg`` option
341
+ - setup.cfg in the current directory
342
+ - a file named by an environment variable
343
+ """
344
+ check_environ()
345
+ files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
346
+
347
+ if DEBUG:
348
+ self.announce("using config files: {}".format(', '.join(files)))
349
+
350
+ return files
351
+
352
+ def _gen_paths(self):
353
+ # The system-wide Distutils config file
354
+ sys_dir = pathlib.Path(sys.modules['distutils'].__file__).parent
355
+ yield sys_dir / "distutils.cfg"
356
+
357
+ # The per-user config file
358
+ prefix = '.' * (os.name == 'posix')
359
+ filename = prefix + 'pydistutils.cfg'
360
+ if self.want_user_cfg:
361
+ with contextlib.suppress(RuntimeError):
362
+ yield pathlib.Path('~').expanduser() / filename
363
+
364
+ # All platforms support local setup.cfg
365
+ yield pathlib.Path('setup.cfg')
366
+
367
+ # Additional config indicated in the environment
368
+ with contextlib.suppress(TypeError):
369
+ yield pathlib.Path(os.getenv("DIST_EXTRA_CONFIG"))
370
+
371
+ def parse_config_files(self, filenames=None): # noqa: C901
372
+ from configparser import ConfigParser
373
+
374
+ # Ignore install directory options if we have a venv
375
+ if sys.prefix != sys.base_prefix:
376
+ ignore_options = [
377
+ 'install-base',
378
+ 'install-platbase',
379
+ 'install-lib',
380
+ 'install-platlib',
381
+ 'install-purelib',
382
+ 'install-headers',
383
+ 'install-scripts',
384
+ 'install-data',
385
+ 'prefix',
386
+ 'exec-prefix',
387
+ 'home',
388
+ 'user',
389
+ 'root',
390
+ ]
391
+ else:
392
+ ignore_options = []
393
+
394
+ ignore_options = frozenset(ignore_options)
395
+
396
+ if filenames is None:
397
+ filenames = self.find_config_files()
398
+
399
+ if DEBUG:
400
+ self.announce("Distribution.parse_config_files():")
401
+
402
+ parser = ConfigParser()
403
+ for filename in filenames:
404
+ if DEBUG:
405
+ self.announce(f" reading {filename}")
406
+ parser.read(filename, encoding='utf-8')
407
+ for section in parser.sections():
408
+ options = parser.options(section)
409
+ opt_dict = self.get_option_dict(section)
410
+
411
+ for opt in options:
412
+ if opt != '__name__' and opt not in ignore_options:
413
+ val = parser.get(section, opt)
414
+ opt = opt.replace('-', '_')
415
+ opt_dict[opt] = (filename, val)
416
+
417
+ # Make the ConfigParser forget everything (so we retain
418
+ # the original filenames that options come from)
419
+ parser.__init__()
420
+
421
+ # If there was a "global" section in the config file, use it
422
+ # to set Distribution options.
423
+
424
+ if 'global' in self.command_options:
425
+ for opt, (_src, val) in self.command_options['global'].items():
426
+ alias = self.negative_opt.get(opt)
427
+ try:
428
+ if alias:
429
+ setattr(self, alias, not strtobool(val))
430
+ elif opt in ('verbose', 'dry_run'): # ugh!
431
+ setattr(self, opt, strtobool(val))
432
+ else:
433
+ setattr(self, opt, val)
434
+ except ValueError as msg:
435
+ raise DistutilsOptionError(msg)
436
+
437
+ # -- Command-line parsing methods ----------------------------------
438
+
439
+ def parse_command_line(self):
440
+ """Parse the setup script's command line, taken from the
441
+ 'script_args' instance attribute (which defaults to 'sys.argv[1:]'
442
+ -- see 'setup()' in core.py). This list is first processed for
443
+ "global options" -- options that set attributes of the Distribution
444
+ instance. Then, it is alternately scanned for Distutils commands
445
+ and options for that command. Each new command terminates the
446
+ options for the previous command. The allowed options for a
447
+ command are determined by the 'user_options' attribute of the
448
+ command class -- thus, we have to be able to load command classes
449
+ in order to parse the command line. Any error in that 'options'
450
+ attribute raises DistutilsGetoptError; any error on the
451
+ command-line raises DistutilsArgError. If no Distutils commands
452
+ were found on the command line, raises DistutilsArgError. Return
453
+ true if command-line was successfully parsed and we should carry
454
+ on with executing commands; false if no errors but we shouldn't
455
+ execute commands (currently, this only happens if user asks for
456
+ help).
457
+ """
458
+ #
459
+ # We now have enough information to show the Macintosh dialog
460
+ # that allows the user to interactively specify the "command line".
461
+ #
462
+ toplevel_options = self._get_toplevel_options()
463
+
464
+ # We have to parse the command line a bit at a time -- global
465
+ # options, then the first command, then its options, and so on --
466
+ # because each command will be handled by a different class, and
467
+ # the options that are valid for a particular class aren't known
468
+ # until we have loaded the command class, which doesn't happen
469
+ # until we know what the command is.
470
+
471
+ self.commands = []
472
+ parser = FancyGetopt(toplevel_options + self.display_options)
473
+ parser.set_negative_aliases(self.negative_opt)
474
+ parser.set_aliases({'licence': 'license'})
475
+ args = parser.getopt(args=self.script_args, object=self)
476
+ option_order = parser.get_option_order()
477
+ logging.getLogger().setLevel(logging.WARN - 10 * self.verbose)
478
+
479
+ # for display options we return immediately
480
+ if self.handle_display_options(option_order):
481
+ return
482
+ while args:
483
+ args = self._parse_command_opts(parser, args)
484
+ if args is None: # user asked for help (and got it)
485
+ return
486
+
487
+ # Handle the cases of --help as a "global" option, ie.
488
+ # "setup.py --help" and "setup.py --help command ...". For the
489
+ # former, we show global options (--verbose, --dry-run, etc.)
490
+ # and display-only options (--name, --version, etc.); for the
491
+ # latter, we omit the display-only options and show help for
492
+ # each command listed on the command line.
493
+ if self.help:
494
+ self._show_help(
495
+ parser, display_options=len(self.commands) == 0, commands=self.commands
496
+ )
497
+ return
498
+
499
+ # Oops, no commands found -- an end-user error
500
+ if not self.commands:
501
+ raise DistutilsArgError("no commands supplied")
502
+
503
+ # All is well: return true
504
+ return True
505
+
506
+ def _get_toplevel_options(self):
507
+ """Return the non-display options recognized at the top level.
508
+
509
+ This includes options that are recognized *only* at the top
510
+ level as well as options recognized for commands.
511
+ """
512
+ return self.global_options + [
513
+ (
514
+ "command-packages=",
515
+ None,
516
+ "list of packages that provide distutils commands",
517
+ ),
518
+ ]
519
+
520
+ def _parse_command_opts(self, parser, args): # noqa: C901
521
+ """Parse the command-line options for a single command.
522
+ 'parser' must be a FancyGetopt instance; 'args' must be the list
523
+ of arguments, starting with the current command (whose options
524
+ we are about to parse). Returns a new version of 'args' with
525
+ the next command at the front of the list; will be the empty
526
+ list if there are no more commands on the command line. Returns
527
+ None if the user asked for help on this command.
528
+ """
529
+ # late import because of mutual dependence between these modules
530
+ from distutils.cmd import Command
531
+
532
+ # Pull the current command from the head of the command line
533
+ command = args[0]
534
+ if not command_re.match(command):
535
+ raise SystemExit(f"invalid command name '{command}'")
536
+ self.commands.append(command)
537
+
538
+ # Dig up the command class that implements this command, so we
539
+ # 1) know that it's a valid command, and 2) know which options
540
+ # it takes.
541
+ try:
542
+ cmd_class = self.get_command_class(command)
543
+ except DistutilsModuleError as msg:
544
+ raise DistutilsArgError(msg)
545
+
546
+ # Require that the command class be derived from Command -- want
547
+ # to be sure that the basic "command" interface is implemented.
548
+ if not issubclass(cmd_class, Command):
549
+ raise DistutilsClassError(
550
+ f"command class {cmd_class} must subclass Command"
551
+ )
552
+
553
+ # Also make sure that the command object provides a list of its
554
+ # known options.
555
+ if not (
556
+ hasattr(cmd_class, 'user_options')
557
+ and isinstance(cmd_class.user_options, list)
558
+ ):
559
+ msg = (
560
+ "command class %s must provide "
561
+ "'user_options' attribute (a list of tuples)"
562
+ )
563
+ raise DistutilsClassError(msg % cmd_class)
564
+
565
+ # If the command class has a list of negative alias options,
566
+ # merge it in with the global negative aliases.
567
+ negative_opt = self.negative_opt
568
+ if hasattr(cmd_class, 'negative_opt'):
569
+ negative_opt = negative_opt.copy()
570
+ negative_opt.update(cmd_class.negative_opt)
571
+
572
+ # Check for help_options in command class. They have a different
573
+ # format (tuple of four) so we need to preprocess them here.
574
+ if hasattr(cmd_class, 'help_options') and isinstance(
575
+ cmd_class.help_options, list
576
+ ):
577
+ help_options = fix_help_options(cmd_class.help_options)
578
+ else:
579
+ help_options = []
580
+
581
+ # All commands support the global options too, just by adding
582
+ # in 'global_options'.
583
+ parser.set_option_table(
584
+ self.global_options + cmd_class.user_options + help_options
585
+ )
586
+ parser.set_negative_aliases(negative_opt)
587
+ (args, opts) = parser.getopt(args[1:])
588
+ if hasattr(opts, 'help') and opts.help:
589
+ self._show_help(parser, display_options=False, commands=[cmd_class])
590
+ return
591
+
592
+ if hasattr(cmd_class, 'help_options') and isinstance(
593
+ cmd_class.help_options, list
594
+ ):
595
+ help_option_found = 0
596
+ for help_option, _short, _desc, func in cmd_class.help_options:
597
+ if hasattr(opts, parser.get_attr_name(help_option)):
598
+ help_option_found = 1
599
+ if callable(func):
600
+ func()
601
+ else:
602
+ raise DistutilsClassError(
603
+ f"invalid help function {func!r} for help option '{help_option}': "
604
+ "must be a callable object (function, etc.)"
605
+ )
606
+
607
+ if help_option_found:
608
+ return
609
+
610
+ # Put the options from the command-line into their official
611
+ # holding pen, the 'command_options' dictionary.
612
+ opt_dict = self.get_option_dict(command)
613
+ for name, value in vars(opts).items():
614
+ opt_dict[name] = ("command line", value)
615
+
616
+ return args
617
+
618
+ def finalize_options(self):
619
+ """Set final values for all the options on the Distribution
620
+ instance, analogous to the .finalize_options() method of Command
621
+ objects.
622
+ """
623
+ for attr in ('keywords', 'platforms'):
624
+ value = getattr(self.metadata, attr)
625
+ if value is None:
626
+ continue
627
+ if isinstance(value, str):
628
+ value = [elm.strip() for elm in value.split(',')]
629
+ setattr(self.metadata, attr, value)
630
+
631
+ def _show_help(
632
+ self, parser, global_options=True, display_options=True, commands: Iterable = ()
633
+ ):
634
+ """Show help for the setup script command-line in the form of
635
+ several lists of command-line options. 'parser' should be a
636
+ FancyGetopt instance; do not expect it to be returned in the
637
+ same state, as its option table will be reset to make it
638
+ generate the correct help text.
639
+
640
+ If 'global_options' is true, lists the global options:
641
+ --verbose, --dry-run, etc. If 'display_options' is true, lists
642
+ the "display-only" options: --name, --version, etc. Finally,
643
+ lists per-command help for every command name or command class
644
+ in 'commands'.
645
+ """
646
+ # late import because of mutual dependence between these modules
647
+ from distutils.cmd import Command
648
+ from distutils.core import gen_usage
649
+
650
+ if global_options:
651
+ if display_options:
652
+ options = self._get_toplevel_options()
653
+ else:
654
+ options = self.global_options
655
+ parser.set_option_table(options)
656
+ parser.print_help(self.common_usage + "\nGlobal options:")
657
+ print()
658
+
659
+ if display_options:
660
+ parser.set_option_table(self.display_options)
661
+ parser.print_help(
662
+ "Information display options (just display information, ignore any commands)"
663
+ )
664
+ print()
665
+
666
+ for command in commands:
667
+ if isinstance(command, type) and issubclass(command, Command):
668
+ klass = command
669
+ else:
670
+ klass = self.get_command_class(command)
671
+ if hasattr(klass, 'help_options') and isinstance(klass.help_options, list):
672
+ parser.set_option_table(
673
+ klass.user_options + fix_help_options(klass.help_options)
674
+ )
675
+ else:
676
+ parser.set_option_table(klass.user_options)
677
+ parser.print_help(f"Options for '{klass.__name__}' command:")
678
+ print()
679
+
680
+ print(gen_usage(self.script_name))
681
+
682
+ def handle_display_options(self, option_order):
683
+ """If there were any non-global "display-only" options
684
+ (--help-commands or the metadata display options) on the command
685
+ line, display the requested info and return true; else return
686
+ false.
687
+ """
688
+ from distutils.core import gen_usage
689
+
690
+ # User just wants a list of commands -- we'll print it out and stop
691
+ # processing now (ie. if they ran "setup --help-commands foo bar",
692
+ # we ignore "foo bar").
693
+ if self.help_commands:
694
+ self.print_commands()
695
+ print()
696
+ print(gen_usage(self.script_name))
697
+ return 1
698
+
699
+ # If user supplied any of the "display metadata" options, then
700
+ # display that metadata in the order in which the user supplied the
701
+ # metadata options.
702
+ any_display_options = 0
703
+ is_display_option = set()
704
+ for option in self.display_options:
705
+ is_display_option.add(option[0])
706
+
707
+ for opt, val in option_order:
708
+ if val and opt in is_display_option:
709
+ opt = translate_longopt(opt)
710
+ value = getattr(self.metadata, "get_" + opt)()
711
+ if opt in ('keywords', 'platforms'):
712
+ print(','.join(value))
713
+ elif opt in ('classifiers', 'provides', 'requires', 'obsoletes'):
714
+ print('\n'.join(value))
715
+ else:
716
+ print(value)
717
+ any_display_options = 1
718
+
719
+ return any_display_options
720
+
721
+ def print_command_list(self, commands, header, max_length):
722
+ """Print a subset of the list of all commands -- used by
723
+ 'print_commands()'.
724
+ """
725
+ print(header + ":")
726
+
727
+ for cmd in commands:
728
+ klass = self.cmdclass.get(cmd)
729
+ if not klass:
730
+ klass = self.get_command_class(cmd)
731
+ try:
732
+ description = klass.description
733
+ except AttributeError:
734
+ description = "(no description available)"
735
+
736
+ print(f" {cmd:<{max_length}} {description}")
737
+
738
+ def print_commands(self):
739
+ """Print out a help message listing all available commands with a
740
+ description of each. The list is divided into "standard commands"
741
+ (listed in distutils.command.__all__) and "extra commands"
742
+ (mentioned in self.cmdclass, but not a standard command). The
743
+ descriptions come from the command class attribute
744
+ 'description'.
745
+ """
746
+ import distutils.command
747
+
748
+ std_commands = distutils.command.__all__
749
+ is_std = set(std_commands)
750
+
751
+ extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
752
+
753
+ max_length = 0
754
+ for cmd in std_commands + extra_commands:
755
+ if len(cmd) > max_length:
756
+ max_length = len(cmd)
757
+
758
+ self.print_command_list(std_commands, "Standard commands", max_length)
759
+ if extra_commands:
760
+ print()
761
+ self.print_command_list(extra_commands, "Extra commands", max_length)
762
+
763
+ def get_command_list(self):
764
+ """Get a list of (command, description) tuples.
765
+ The list is divided into "standard commands" (listed in
766
+ distutils.command.__all__) and "extra commands" (mentioned in
767
+ self.cmdclass, but not a standard command). The descriptions come
768
+ from the command class attribute 'description'.
769
+ """
770
+ # Currently this is only used on Mac OS, for the Mac-only GUI
771
+ # Distutils interface (by Jack Jansen)
772
+ import distutils.command
773
+
774
+ std_commands = distutils.command.__all__
775
+ is_std = set(std_commands)
776
+
777
+ extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
778
+
779
+ rv = []
780
+ for cmd in std_commands + extra_commands:
781
+ klass = self.cmdclass.get(cmd)
782
+ if not klass:
783
+ klass = self.get_command_class(cmd)
784
+ try:
785
+ description = klass.description
786
+ except AttributeError:
787
+ description = "(no description available)"
788
+ rv.append((cmd, description))
789
+ return rv
790
+
791
+ # -- Command class/object methods ----------------------------------
792
+
793
+ def get_command_packages(self):
794
+ """Return a list of packages from which commands are loaded."""
795
+ pkgs = self.command_packages
796
+ if not isinstance(pkgs, list):
797
+ if pkgs is None:
798
+ pkgs = ''
799
+ pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
800
+ if "distutils.command" not in pkgs:
801
+ pkgs.insert(0, "distutils.command")
802
+ self.command_packages = pkgs
803
+ return pkgs
804
+
805
+ def get_command_class(self, command):
806
+ """Return the class that implements the Distutils command named by
807
+ 'command'. First we check the 'cmdclass' dictionary; if the
808
+ command is mentioned there, we fetch the class object from the
809
+ dictionary and return it. Otherwise we load the command module
810
+ ("distutils.command." + command) and fetch the command class from
811
+ the module. The loaded class is also stored in 'cmdclass'
812
+ to speed future calls to 'get_command_class()'.
813
+
814
+ Raises DistutilsModuleError if the expected module could not be
815
+ found, or if that module does not define the expected class.
816
+ """
817
+ klass = self.cmdclass.get(command)
818
+ if klass:
819
+ return klass
820
+
821
+ for pkgname in self.get_command_packages():
822
+ module_name = f"{pkgname}.{command}"
823
+ klass_name = command
824
+
825
+ try:
826
+ __import__(module_name)
827
+ module = sys.modules[module_name]
828
+ except ImportError:
829
+ continue
830
+
831
+ try:
832
+ klass = getattr(module, klass_name)
833
+ except AttributeError:
834
+ raise DistutilsModuleError(
835
+ f"invalid command '{command}' (no class '{klass_name}' in module '{module_name}')"
836
+ )
837
+
838
+ self.cmdclass[command] = klass
839
+ return klass
840
+
841
+ raise DistutilsModuleError(f"invalid command '{command}'")
842
+
843
+ @overload
844
+ def get_command_obj(
845
+ self, command: str, create: Literal[True] = True
846
+ ) -> Command: ...
847
+ @overload
848
+ def get_command_obj(
849
+ self, command: str, create: Literal[False]
850
+ ) -> Command | None: ...
851
+ def get_command_obj(self, command: str, create: bool = True) -> Command | None:
852
+ """Return the command object for 'command'. Normally this object
853
+ is cached on a previous call to 'get_command_obj()'; if no command
854
+ object for 'command' is in the cache, then we either create and
855
+ return it (if 'create' is true) or return None.
856
+ """
857
+ cmd_obj = self.command_obj.get(command)
858
+ if not cmd_obj and create:
859
+ if DEBUG:
860
+ self.announce(
861
+ "Distribution.get_command_obj(): "
862
+ f"creating '{command}' command object"
863
+ )
864
+
865
+ klass = self.get_command_class(command)
866
+ cmd_obj = self.command_obj[command] = klass(self)
867
+ self.have_run[command] = False
868
+
869
+ # Set any options that were supplied in config files
870
+ # or on the command line. (NB. support for error
871
+ # reporting is lame here: any errors aren't reported
872
+ # until 'finalize_options()' is called, which means
873
+ # we won't report the source of the error.)
874
+ options = self.command_options.get(command)
875
+ if options:
876
+ self._set_command_options(cmd_obj, options)
877
+
878
+ return cmd_obj
879
+
880
+ def _set_command_options(self, command_obj, option_dict=None): # noqa: C901
881
+ """Set the options for 'command_obj' from 'option_dict'. Basically
882
+ this means copying elements of a dictionary ('option_dict') to
883
+ attributes of an instance ('command').
884
+
885
+ 'command_obj' must be a Command instance. If 'option_dict' is not
886
+ supplied, uses the standard option dictionary for this command
887
+ (from 'self.command_options').
888
+ """
889
+ command_name = command_obj.get_command_name()
890
+ if option_dict is None:
891
+ option_dict = self.get_option_dict(command_name)
892
+
893
+ if DEBUG:
894
+ self.announce(f" setting options for '{command_name}' command:")
895
+ for option, (source, value) in option_dict.items():
896
+ if DEBUG:
897
+ self.announce(f" {option} = {value} (from {source})")
898
+ try:
899
+ bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
900
+ except AttributeError:
901
+ bool_opts = []
902
+ try:
903
+ neg_opt = command_obj.negative_opt
904
+ except AttributeError:
905
+ neg_opt = {}
906
+
907
+ try:
908
+ is_string = isinstance(value, str)
909
+ if option in neg_opt and is_string:
910
+ setattr(command_obj, neg_opt[option], not strtobool(value))
911
+ elif option in bool_opts and is_string:
912
+ setattr(command_obj, option, strtobool(value))
913
+ elif hasattr(command_obj, option):
914
+ setattr(command_obj, option, value)
915
+ else:
916
+ raise DistutilsOptionError(
917
+ f"error in {source}: command '{command_name}' has no such option '{option}'"
918
+ )
919
+ except ValueError as msg:
920
+ raise DistutilsOptionError(msg)
921
+
922
+ @overload
923
+ def reinitialize_command(
924
+ self, command: str, reinit_subcommands: bool = False
925
+ ) -> Command: ...
926
+ @overload
927
+ def reinitialize_command(
928
+ self, command: _CommandT, reinit_subcommands: bool = False
929
+ ) -> _CommandT: ...
930
+ def reinitialize_command(
931
+ self, command: str | Command, reinit_subcommands=False
932
+ ) -> Command:
933
+ """Reinitializes a command to the state it was in when first
934
+ returned by 'get_command_obj()': ie., initialized but not yet
935
+ finalized. This provides the opportunity to sneak option
936
+ values in programmatically, overriding or supplementing
937
+ user-supplied values from the config files and command line.
938
+ You'll have to re-finalize the command object (by calling
939
+ 'finalize_options()' or 'ensure_finalized()') before using it for
940
+ real.
941
+
942
+ 'command' should be a command name (string) or command object. If
943
+ 'reinit_subcommands' is true, also reinitializes the command's
944
+ sub-commands, as declared by the 'sub_commands' class attribute (if
945
+ it has one). See the "install" command for an example. Only
946
+ reinitializes the sub-commands that actually matter, ie. those
947
+ whose test predicates return true.
948
+
949
+ Returns the reinitialized command object.
950
+ """
951
+ from distutils.cmd import Command
952
+
953
+ if not isinstance(command, Command):
954
+ command_name = command
955
+ command = self.get_command_obj(command_name)
956
+ else:
957
+ command_name = command.get_command_name()
958
+
959
+ if not command.finalized:
960
+ return command
961
+ command.initialize_options()
962
+ command.finalized = False
963
+ self.have_run[command_name] = False
964
+ self._set_command_options(command)
965
+
966
+ if reinit_subcommands:
967
+ for sub in command.get_sub_commands():
968
+ self.reinitialize_command(sub, reinit_subcommands)
969
+
970
+ return command
971
+
972
+ # -- Methods that operate on the Distribution ----------------------
973
+
974
+ def announce(self, msg, level=logging.INFO):
975
+ log.log(level, msg)
976
+
977
+ def run_commands(self):
978
+ """Run each command that was seen on the setup script command line.
979
+ Uses the list of commands found and cache of command objects
980
+ created by 'get_command_obj()'.
981
+ """
982
+ for cmd in self.commands:
983
+ self.run_command(cmd)
984
+
985
+ # -- Methods that operate on its Commands --------------------------
986
+
987
+ def run_command(self, command):
988
+ """Do whatever it takes to run a command (including nothing at all,
989
+ if the command has already been run). Specifically: if we have
990
+ already created and run the command named by 'command', return
991
+ silently without doing anything. If the command named by 'command'
992
+ doesn't even have a command object yet, create one. Then invoke
993
+ 'run()' on that command object (or an existing one).
994
+ """
995
+ # Already been here, done that? then return silently.
996
+ if self.have_run.get(command):
997
+ return
998
+
999
+ log.info("running %s", command)
1000
+ cmd_obj = self.get_command_obj(command)
1001
+ cmd_obj.ensure_finalized()
1002
+ cmd_obj.run()
1003
+ self.have_run[command] = True
1004
+
1005
+ # -- Distribution query methods ------------------------------------
1006
+
1007
+ def has_pure_modules(self):
1008
+ return len(self.packages or self.py_modules or []) > 0
1009
+
1010
+ def has_ext_modules(self):
1011
+ return self.ext_modules and len(self.ext_modules) > 0
1012
+
1013
+ def has_c_libraries(self):
1014
+ return self.libraries and len(self.libraries) > 0
1015
+
1016
+ def has_modules(self):
1017
+ return self.has_pure_modules() or self.has_ext_modules()
1018
+
1019
+ def has_headers(self):
1020
+ return self.headers and len(self.headers) > 0
1021
+
1022
+ def has_scripts(self):
1023
+ return self.scripts and len(self.scripts) > 0
1024
+
1025
+ def has_data_files(self):
1026
+ return self.data_files and len(self.data_files) > 0
1027
+
1028
+ def is_pure(self):
1029
+ return (
1030
+ self.has_pure_modules()
1031
+ and not self.has_ext_modules()
1032
+ and not self.has_c_libraries()
1033
+ )
1034
+
1035
+ # -- Metadata query methods ----------------------------------------
1036
+
1037
+ # If you're looking for 'get_name()', 'get_version()', and so forth,
1038
+ # they are defined in a sneaky way: the constructor binds self.get_XXX
1039
+ # to self.metadata.get_XXX. The actual code is in the
1040
+ # DistributionMetadata class, below.
1041
+
1042
+
1043
+ class DistributionMetadata:
1044
+ """Dummy class to hold the distribution meta-data: name, version,
1045
+ author, and so forth.
1046
+ """
1047
+
1048
+ _METHOD_BASENAMES = (
1049
+ "name",
1050
+ "version",
1051
+ "author",
1052
+ "author_email",
1053
+ "maintainer",
1054
+ "maintainer_email",
1055
+ "url",
1056
+ "license",
1057
+ "description",
1058
+ "long_description",
1059
+ "keywords",
1060
+ "platforms",
1061
+ "fullname",
1062
+ "contact",
1063
+ "contact_email",
1064
+ "classifiers",
1065
+ "download_url",
1066
+ # PEP 314
1067
+ "provides",
1068
+ "requires",
1069
+ "obsoletes",
1070
+ )
1071
+
1072
+ def __init__(self, path=None):
1073
+ if path is not None:
1074
+ self.read_pkg_file(open(path))
1075
+ else:
1076
+ self.name = None
1077
+ self.version = None
1078
+ self.author = None
1079
+ self.author_email = None
1080
+ self.maintainer = None
1081
+ self.maintainer_email = None
1082
+ self.url = None
1083
+ self.license = None
1084
+ self.description = None
1085
+ self.long_description = None
1086
+ self.keywords = None
1087
+ self.platforms = None
1088
+ self.classifiers = None
1089
+ self.download_url = None
1090
+ # PEP 314
1091
+ self.provides = None
1092
+ self.requires = None
1093
+ self.obsoletes = None
1094
+
1095
+ def read_pkg_file(self, file):
1096
+ """Reads the metadata values from a file object."""
1097
+ msg = message_from_file(file)
1098
+
1099
+ def _read_field(name):
1100
+ value = msg[name]
1101
+ if value and value != "UNKNOWN":
1102
+ return value
1103
+
1104
+ def _read_list(name):
1105
+ values = msg.get_all(name, None)
1106
+ if values == []:
1107
+ return None
1108
+ return values
1109
+
1110
+ metadata_version = msg['metadata-version']
1111
+ self.name = _read_field('name')
1112
+ self.version = _read_field('version')
1113
+ self.description = _read_field('summary')
1114
+ # we are filling author only.
1115
+ self.author = _read_field('author')
1116
+ self.maintainer = None
1117
+ self.author_email = _read_field('author-email')
1118
+ self.maintainer_email = None
1119
+ self.url = _read_field('home-page')
1120
+ self.license = _read_field('license')
1121
+
1122
+ if 'download-url' in msg:
1123
+ self.download_url = _read_field('download-url')
1124
+ else:
1125
+ self.download_url = None
1126
+
1127
+ self.long_description = _read_field('description')
1128
+ self.description = _read_field('summary')
1129
+
1130
+ if 'keywords' in msg:
1131
+ self.keywords = _read_field('keywords').split(',')
1132
+
1133
+ self.platforms = _read_list('platform')
1134
+ self.classifiers = _read_list('classifier')
1135
+
1136
+ # PEP 314 - these fields only exist in 1.1
1137
+ if metadata_version == '1.1':
1138
+ self.requires = _read_list('requires')
1139
+ self.provides = _read_list('provides')
1140
+ self.obsoletes = _read_list('obsoletes')
1141
+ else:
1142
+ self.requires = None
1143
+ self.provides = None
1144
+ self.obsoletes = None
1145
+
1146
+ def write_pkg_info(self, base_dir):
1147
+ """Write the PKG-INFO file into the release tree."""
1148
+ with open(
1149
+ os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8'
1150
+ ) as pkg_info:
1151
+ self.write_pkg_file(pkg_info)
1152
+
1153
+ def write_pkg_file(self, file):
1154
+ """Write the PKG-INFO format data to a file object."""
1155
+ version = '1.0'
1156
+ if (
1157
+ self.provides
1158
+ or self.requires
1159
+ or self.obsoletes
1160
+ or self.classifiers
1161
+ or self.download_url
1162
+ ):
1163
+ version = '1.1'
1164
+
1165
+ # required fields
1166
+ file.write(f'Metadata-Version: {version}\n')
1167
+ file.write(f'Name: {self.get_name()}\n')
1168
+ file.write(f'Version: {self.get_version()}\n')
1169
+
1170
+ def maybe_write(header, val):
1171
+ if val:
1172
+ file.write(f"{header}: {val}\n")
1173
+
1174
+ # optional fields
1175
+ maybe_write("Summary", self.get_description())
1176
+ maybe_write("Home-page", self.get_url())
1177
+ maybe_write("Author", self.get_contact())
1178
+ maybe_write("Author-email", self.get_contact_email())
1179
+ maybe_write("License", self.get_license())
1180
+ maybe_write("Download-URL", self.download_url)
1181
+ maybe_write("Description", rfc822_escape(self.get_long_description() or ""))
1182
+ maybe_write("Keywords", ",".join(self.get_keywords()))
1183
+
1184
+ self._write_list(file, 'Platform', self.get_platforms())
1185
+ self._write_list(file, 'Classifier', self.get_classifiers())
1186
+
1187
+ # PEP 314
1188
+ self._write_list(file, 'Requires', self.get_requires())
1189
+ self._write_list(file, 'Provides', self.get_provides())
1190
+ self._write_list(file, 'Obsoletes', self.get_obsoletes())
1191
+
1192
+ def _write_list(self, file, name, values):
1193
+ values = values or []
1194
+ for value in values:
1195
+ file.write(f'{name}: {value}\n')
1196
+
1197
+ # -- Metadata query methods ----------------------------------------
1198
+
1199
+ def get_name(self):
1200
+ return self.name or "UNKNOWN"
1201
+
1202
+ def get_version(self):
1203
+ return self.version or "0.0.0"
1204
+
1205
+ def get_fullname(self):
1206
+ return self._fullname(self.get_name(), self.get_version())
1207
+
1208
+ @staticmethod
1209
+ def _fullname(name: str, version: str) -> str:
1210
+ """
1211
+ >>> DistributionMetadata._fullname('setup.tools', '1.0-2')
1212
+ 'setup_tools-1.0.post2'
1213
+ >>> DistributionMetadata._fullname('setup-tools', '1.2post2')
1214
+ 'setup_tools-1.2.post2'
1215
+ >>> DistributionMetadata._fullname('setup-tools', '1.0-r2')
1216
+ 'setup_tools-1.0.post2'
1217
+ >>> DistributionMetadata._fullname('setup.tools', '1.0.post')
1218
+ 'setup_tools-1.0.post0'
1219
+ >>> DistributionMetadata._fullname('setup.tools', '1.0+ubuntu-1')
1220
+ 'setup_tools-1.0+ubuntu.1'
1221
+ """
1222
+ return "{}-{}".format(
1223
+ canonicalize_name(name).replace('-', '_'),
1224
+ canonicalize_version(version, strip_trailing_zero=False),
1225
+ )
1226
+
1227
+ def get_author(self):
1228
+ return self.author
1229
+
1230
+ def get_author_email(self):
1231
+ return self.author_email
1232
+
1233
+ def get_maintainer(self):
1234
+ return self.maintainer
1235
+
1236
+ def get_maintainer_email(self):
1237
+ return self.maintainer_email
1238
+
1239
+ def get_contact(self):
1240
+ return self.maintainer or self.author
1241
+
1242
+ def get_contact_email(self):
1243
+ return self.maintainer_email or self.author_email
1244
+
1245
+ def get_url(self):
1246
+ return self.url
1247
+
1248
+ def get_license(self):
1249
+ return self.license
1250
+
1251
+ get_licence = get_license
1252
+
1253
+ def get_description(self):
1254
+ return self.description
1255
+
1256
+ def get_long_description(self):
1257
+ return self.long_description
1258
+
1259
+ def get_keywords(self):
1260
+ return self.keywords or []
1261
+
1262
+ def set_keywords(self, value):
1263
+ self.keywords = _ensure_list(value, 'keywords')
1264
+
1265
+ def get_platforms(self):
1266
+ return self.platforms
1267
+
1268
+ def set_platforms(self, value):
1269
+ self.platforms = _ensure_list(value, 'platforms')
1270
+
1271
+ def get_classifiers(self):
1272
+ return self.classifiers or []
1273
+
1274
+ def set_classifiers(self, value):
1275
+ self.classifiers = _ensure_list(value, 'classifiers')
1276
+
1277
+ def get_download_url(self):
1278
+ return self.download_url
1279
+
1280
+ # PEP 314
1281
+ def get_requires(self):
1282
+ return self.requires or []
1283
+
1284
+ def set_requires(self, value):
1285
+ import distutils.versionpredicate
1286
+
1287
+ for v in value:
1288
+ distutils.versionpredicate.VersionPredicate(v)
1289
+ self.requires = list(value)
1290
+
1291
+ def get_provides(self):
1292
+ return self.provides or []
1293
+
1294
+ def set_provides(self, value):
1295
+ value = [v.strip() for v in value]
1296
+ for v in value:
1297
+ import distutils.versionpredicate
1298
+
1299
+ distutils.versionpredicate.split_provision(v)
1300
+ self.provides = value
1301
+
1302
+ def get_obsoletes(self):
1303
+ return self.obsoletes or []
1304
+
1305
+ def set_obsoletes(self, value):
1306
+ import distutils.versionpredicate
1307
+
1308
+ for v in value:
1309
+ distutils.versionpredicate.VersionPredicate(v)
1310
+ self.obsoletes = list(value)
1311
+
1312
+
1313
+ def fix_help_options(options):
1314
+ """Convert a 4-tuple 'help_options' list as found in various command
1315
+ classes to the 3-tuple form required by FancyGetopt.
1316
+ """
1317
+ return [opt[0:3] for opt in options]
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/file_util.py ADDED
@@ -0,0 +1,236 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.file_util
2
+
3
+ Utility functions for operating on single files.
4
+ """
5
+
6
+ import os
7
+
8
+ from ._log import log
9
+ from .errors import DistutilsFileError
10
+
11
+ # for generating verbose output in 'copy_file()'
12
+ _copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'}
13
+
14
+
15
+ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901
16
+ """Copy the file 'src' to 'dst'; both must be filenames. Any error
17
+ opening either file, reading from 'src', or writing to 'dst', raises
18
+ DistutilsFileError. Data is read/written in chunks of 'buffer_size'
19
+ bytes (default 16k). No attempt is made to handle anything apart from
20
+ regular files.
21
+ """
22
+ # Stolen from shutil module in the standard library, but with
23
+ # custom error-handling added.
24
+ fsrc = None
25
+ fdst = None
26
+ try:
27
+ try:
28
+ fsrc = open(src, 'rb')
29
+ except OSError as e:
30
+ raise DistutilsFileError(f"could not open '{src}': {e.strerror}")
31
+
32
+ if os.path.exists(dst):
33
+ try:
34
+ os.unlink(dst)
35
+ except OSError as e:
36
+ raise DistutilsFileError(f"could not delete '{dst}': {e.strerror}")
37
+
38
+ try:
39
+ fdst = open(dst, 'wb')
40
+ except OSError as e:
41
+ raise DistutilsFileError(f"could not create '{dst}': {e.strerror}")
42
+
43
+ while True:
44
+ try:
45
+ buf = fsrc.read(buffer_size)
46
+ except OSError as e:
47
+ raise DistutilsFileError(f"could not read from '{src}': {e.strerror}")
48
+
49
+ if not buf:
50
+ break
51
+
52
+ try:
53
+ fdst.write(buf)
54
+ except OSError as e:
55
+ raise DistutilsFileError(f"could not write to '{dst}': {e.strerror}")
56
+ finally:
57
+ if fdst:
58
+ fdst.close()
59
+ if fsrc:
60
+ fsrc.close()
61
+
62
+
63
+ def copy_file( # noqa: C901
64
+ src,
65
+ dst,
66
+ preserve_mode=True,
67
+ preserve_times=True,
68
+ update=False,
69
+ link=None,
70
+ verbose=True,
71
+ dry_run=False,
72
+ ):
73
+ """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
74
+ copied there with the same name; otherwise, it must be a filename. (If
75
+ the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
76
+ is true (the default), the file's mode (type and permission bits, or
77
+ whatever is analogous on the current platform) is copied. If
78
+ 'preserve_times' is true (the default), the last-modified and
79
+ last-access times are copied as well. If 'update' is true, 'src' will
80
+ only be copied if 'dst' does not exist, or if 'dst' does exist but is
81
+ older than 'src'.
82
+
83
+ 'link' allows you to make hard links (os.link) or symbolic links
84
+ (os.symlink) instead of copying: set it to "hard" or "sym"; if it is
85
+ None (the default), files are copied. Don't set 'link' on systems that
86
+ don't support it: 'copy_file()' doesn't check if hard or symbolic
87
+ linking is available. If hardlink fails, falls back to
88
+ _copy_file_contents().
89
+
90
+ Under Mac OS, uses the native file copy function in macostools; on
91
+ other systems, uses '_copy_file_contents()' to copy file contents.
92
+
93
+ Return a tuple (dest_name, copied): 'dest_name' is the actual name of
94
+ the output file, and 'copied' is true if the file was copied (or would
95
+ have been copied, if 'dry_run' true).
96
+ """
97
+ # XXX if the destination file already exists, we clobber it if
98
+ # copying, but blow up if linking. Hmmm. And I don't know what
99
+ # macostools.copyfile() does. Should definitely be consistent, and
100
+ # should probably blow up if destination exists and we would be
101
+ # changing it (ie. it's not already a hard/soft link to src OR
102
+ # (not update) and (src newer than dst).
103
+
104
+ from distutils._modified import newer
105
+ from stat import S_IMODE, ST_ATIME, ST_MODE, ST_MTIME
106
+
107
+ if not os.path.isfile(src):
108
+ raise DistutilsFileError(
109
+ f"can't copy '{src}': doesn't exist or not a regular file"
110
+ )
111
+
112
+ if os.path.isdir(dst):
113
+ dir = dst
114
+ dst = os.path.join(dst, os.path.basename(src))
115
+ else:
116
+ dir = os.path.dirname(dst)
117
+
118
+ if update and not newer(src, dst):
119
+ if verbose >= 1:
120
+ log.debug("not copying %s (output up-to-date)", src)
121
+ return (dst, False)
122
+
123
+ try:
124
+ action = _copy_action[link]
125
+ except KeyError:
126
+ raise ValueError(f"invalid value '{link}' for 'link' argument")
127
+
128
+ if verbose >= 1:
129
+ if os.path.basename(dst) == os.path.basename(src):
130
+ log.info("%s %s -> %s", action, src, dir)
131
+ else:
132
+ log.info("%s %s -> %s", action, src, dst)
133
+
134
+ if dry_run:
135
+ return (dst, True)
136
+
137
+ # If linking (hard or symbolic), use the appropriate system call
138
+ # (Unix only, of course, but that's the caller's responsibility)
139
+ elif link == 'hard':
140
+ if not (os.path.exists(dst) and os.path.samefile(src, dst)):
141
+ try:
142
+ os.link(src, dst)
143
+ except OSError:
144
+ # If hard linking fails, fall back on copying file
145
+ # (some special filesystems don't support hard linking
146
+ # even under Unix, see issue #8876).
147
+ pass
148
+ else:
149
+ return (dst, True)
150
+ elif link == 'sym':
151
+ if not (os.path.exists(dst) and os.path.samefile(src, dst)):
152
+ os.symlink(src, dst)
153
+ return (dst, True)
154
+
155
+ # Otherwise (non-Mac, not linking), copy the file contents and
156
+ # (optionally) copy the times and mode.
157
+ _copy_file_contents(src, dst)
158
+ if preserve_mode or preserve_times:
159
+ st = os.stat(src)
160
+
161
+ # According to David Ascher <da@ski.org>, utime() should be done
162
+ # before chmod() (at least under NT).
163
+ if preserve_times:
164
+ os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
165
+ if preserve_mode:
166
+ os.chmod(dst, S_IMODE(st[ST_MODE]))
167
+
168
+ return (dst, True)
169
+
170
+
171
+ # XXX I suspect this is Unix-specific -- need porting help!
172
+ def move_file(src, dst, verbose=True, dry_run=False): # noqa: C901
173
+ """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
174
+ be moved into it with the same name; otherwise, 'src' is just renamed
175
+ to 'dst'. Return the new full name of the file.
176
+
177
+ Handles cross-device moves on Unix using 'copy_file()'. What about
178
+ other systems???
179
+ """
180
+ import errno
181
+ from os.path import basename, dirname, exists, isdir, isfile
182
+
183
+ if verbose >= 1:
184
+ log.info("moving %s -> %s", src, dst)
185
+
186
+ if dry_run:
187
+ return dst
188
+
189
+ if not isfile(src):
190
+ raise DistutilsFileError(f"can't move '{src}': not a regular file")
191
+
192
+ if isdir(dst):
193
+ dst = os.path.join(dst, basename(src))
194
+ elif exists(dst):
195
+ raise DistutilsFileError(
196
+ f"can't move '{src}': destination '{dst}' already exists"
197
+ )
198
+
199
+ if not isdir(dirname(dst)):
200
+ raise DistutilsFileError(
201
+ f"can't move '{src}': destination '{dst}' not a valid path"
202
+ )
203
+
204
+ copy_it = False
205
+ try:
206
+ os.rename(src, dst)
207
+ except OSError as e:
208
+ (num, msg) = e.args
209
+ if num == errno.EXDEV:
210
+ copy_it = True
211
+ else:
212
+ raise DistutilsFileError(f"couldn't move '{src}' to '{dst}': {msg}")
213
+
214
+ if copy_it:
215
+ copy_file(src, dst, verbose=verbose)
216
+ try:
217
+ os.unlink(src)
218
+ except OSError as e:
219
+ (num, msg) = e.args
220
+ try:
221
+ os.unlink(dst)
222
+ except OSError:
223
+ pass
224
+ raise DistutilsFileError(
225
+ f"couldn't move '{src}' to '{dst}' by copy/delete: "
226
+ f"delete '{src}' failed: {msg}"
227
+ )
228
+ return dst
229
+
230
+
231
+ def write_file(filename, contents):
232
+ """Create a file with the specified name and write 'contents' (a
233
+ sequence of strings without line terminators) to it.
234
+ """
235
+ with open(filename, 'w', encoding='utf-8') as f:
236
+ f.writelines(line + '\n' for line in contents)
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/filelist.py ADDED
@@ -0,0 +1,366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.filelist
2
+
3
+ Provides the FileList class, used for poking about the filesystem
4
+ and building lists of files.
5
+ """
6
+
7
+ import fnmatch
8
+ import functools
9
+ import os
10
+ import re
11
+
12
+ from ._log import log
13
+ from .errors import DistutilsInternalError, DistutilsTemplateError
14
+ from .util import convert_path
15
+
16
+
17
+ class FileList:
18
+ """A list of files built by on exploring the filesystem and filtered by
19
+ applying various patterns to what we find there.
20
+
21
+ Instance attributes:
22
+ dir
23
+ directory from which files will be taken -- only used if
24
+ 'allfiles' not supplied to constructor
25
+ files
26
+ list of filenames currently being built/filtered/manipulated
27
+ allfiles
28
+ complete list of files under consideration (ie. without any
29
+ filtering applied)
30
+ """
31
+
32
+ def __init__(self, warn=None, debug_print=None):
33
+ # ignore argument to FileList, but keep them for backwards
34
+ # compatibility
35
+ self.allfiles = None
36
+ self.files = []
37
+
38
+ def set_allfiles(self, allfiles):
39
+ self.allfiles = allfiles
40
+
41
+ def findall(self, dir=os.curdir):
42
+ self.allfiles = findall(dir)
43
+
44
+ def debug_print(self, msg):
45
+ """Print 'msg' to stdout if the global DEBUG (taken from the
46
+ DISTUTILS_DEBUG environment variable) flag is true.
47
+ """
48
+ from distutils.debug import DEBUG
49
+
50
+ if DEBUG:
51
+ print(msg)
52
+
53
+ # Collection methods
54
+
55
+ def append(self, item):
56
+ self.files.append(item)
57
+
58
+ def extend(self, items):
59
+ self.files.extend(items)
60
+
61
+ def sort(self):
62
+ # Not a strict lexical sort!
63
+ sortable_files = sorted(map(os.path.split, self.files))
64
+ self.files = []
65
+ for sort_tuple in sortable_files:
66
+ self.files.append(os.path.join(*sort_tuple))
67
+
68
+ # Other miscellaneous utility methods
69
+
70
+ def remove_duplicates(self):
71
+ # Assumes list has been sorted!
72
+ for i in range(len(self.files) - 1, 0, -1):
73
+ if self.files[i] == self.files[i - 1]:
74
+ del self.files[i]
75
+
76
+ # "File template" methods
77
+
78
+ def _parse_template_line(self, line):
79
+ words = line.split()
80
+ action = words[0]
81
+
82
+ patterns = dir = dir_pattern = None
83
+
84
+ if action in ('include', 'exclude', 'global-include', 'global-exclude'):
85
+ if len(words) < 2:
86
+ raise DistutilsTemplateError(
87
+ f"'{action}' expects <pattern1> <pattern2> ..."
88
+ )
89
+ patterns = [convert_path(w) for w in words[1:]]
90
+ elif action in ('recursive-include', 'recursive-exclude'):
91
+ if len(words) < 3:
92
+ raise DistutilsTemplateError(
93
+ f"'{action}' expects <dir> <pattern1> <pattern2> ..."
94
+ )
95
+ dir = convert_path(words[1])
96
+ patterns = [convert_path(w) for w in words[2:]]
97
+ elif action in ('graft', 'prune'):
98
+ if len(words) != 2:
99
+ raise DistutilsTemplateError(
100
+ f"'{action}' expects a single <dir_pattern>"
101
+ )
102
+ dir_pattern = convert_path(words[1])
103
+ else:
104
+ raise DistutilsTemplateError(f"unknown action '{action}'")
105
+
106
+ return (action, patterns, dir, dir_pattern)
107
+
108
+ def process_template_line(self, line): # noqa: C901
109
+ # Parse the line: split it up, make sure the right number of words
110
+ # is there, and return the relevant words. 'action' is always
111
+ # defined: it's the first word of the line. Which of the other
112
+ # three are defined depends on the action; it'll be either
113
+ # patterns, (dir and patterns), or (dir_pattern).
114
+ (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
115
+
116
+ # OK, now we know that the action is valid and we have the
117
+ # right number of words on the line for that action -- so we
118
+ # can proceed with minimal error-checking.
119
+ if action == 'include':
120
+ self.debug_print("include " + ' '.join(patterns))
121
+ for pattern in patterns:
122
+ if not self.include_pattern(pattern, anchor=True):
123
+ log.warning("warning: no files found matching '%s'", pattern)
124
+
125
+ elif action == 'exclude':
126
+ self.debug_print("exclude " + ' '.join(patterns))
127
+ for pattern in patterns:
128
+ if not self.exclude_pattern(pattern, anchor=True):
129
+ log.warning(
130
+ "warning: no previously-included files found matching '%s'",
131
+ pattern,
132
+ )
133
+
134
+ elif action == 'global-include':
135
+ self.debug_print("global-include " + ' '.join(patterns))
136
+ for pattern in patterns:
137
+ if not self.include_pattern(pattern, anchor=False):
138
+ log.warning(
139
+ (
140
+ "warning: no files found matching '%s' "
141
+ "anywhere in distribution"
142
+ ),
143
+ pattern,
144
+ )
145
+
146
+ elif action == 'global-exclude':
147
+ self.debug_print("global-exclude " + ' '.join(patterns))
148
+ for pattern in patterns:
149
+ if not self.exclude_pattern(pattern, anchor=False):
150
+ log.warning(
151
+ (
152
+ "warning: no previously-included files matching "
153
+ "'%s' found anywhere in distribution"
154
+ ),
155
+ pattern,
156
+ )
157
+
158
+ elif action == 'recursive-include':
159
+ self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns)))
160
+ for pattern in patterns:
161
+ if not self.include_pattern(pattern, prefix=dir):
162
+ msg = "warning: no files found matching '%s' under directory '%s'"
163
+ log.warning(msg, pattern, dir)
164
+
165
+ elif action == 'recursive-exclude':
166
+ self.debug_print("recursive-exclude {} {}".format(dir, ' '.join(patterns)))
167
+ for pattern in patterns:
168
+ if not self.exclude_pattern(pattern, prefix=dir):
169
+ log.warning(
170
+ (
171
+ "warning: no previously-included files matching "
172
+ "'%s' found under directory '%s'"
173
+ ),
174
+ pattern,
175
+ dir,
176
+ )
177
+
178
+ elif action == 'graft':
179
+ self.debug_print("graft " + dir_pattern)
180
+ if not self.include_pattern(None, prefix=dir_pattern):
181
+ log.warning("warning: no directories found matching '%s'", dir_pattern)
182
+
183
+ elif action == 'prune':
184
+ self.debug_print("prune " + dir_pattern)
185
+ if not self.exclude_pattern(None, prefix=dir_pattern):
186
+ log.warning(
187
+ ("no previously-included directories found matching '%s'"),
188
+ dir_pattern,
189
+ )
190
+ else:
191
+ raise DistutilsInternalError(
192
+ f"this cannot happen: invalid action '{action}'"
193
+ )
194
+
195
+ # Filtering/selection methods
196
+
197
+ def include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
198
+ """Select strings (presumably filenames) from 'self.files' that
199
+ match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
200
+ are not quite the same as implemented by the 'fnmatch' module: '*'
201
+ and '?' match non-special characters, where "special" is platform-
202
+ dependent: slash on Unix; colon, slash, and backslash on
203
+ DOS/Windows; and colon on Mac OS.
204
+
205
+ If 'anchor' is true (the default), then the pattern match is more
206
+ stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
207
+ 'anchor' is false, both of these will match.
208
+
209
+ If 'prefix' is supplied, then only filenames starting with 'prefix'
210
+ (itself a pattern) and ending with 'pattern', with anything in between
211
+ them, will match. 'anchor' is ignored in this case.
212
+
213
+ If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
214
+ 'pattern' is assumed to be either a string containing a regex or a
215
+ regex object -- no translation is done, the regex is just compiled
216
+ and used as-is.
217
+
218
+ Selected strings will be added to self.files.
219
+
220
+ Return True if files are found, False otherwise.
221
+ """
222
+ # XXX docstring lying about what the special chars are?
223
+ files_found = False
224
+ pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
225
+ self.debug_print(f"include_pattern: applying regex r'{pattern_re.pattern}'")
226
+
227
+ # delayed loading of allfiles list
228
+ if self.allfiles is None:
229
+ self.findall()
230
+
231
+ for name in self.allfiles:
232
+ if pattern_re.search(name):
233
+ self.debug_print(" adding " + name)
234
+ self.files.append(name)
235
+ files_found = True
236
+ return files_found
237
+
238
+ def exclude_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
239
+ """Remove strings (presumably filenames) from 'files' that match
240
+ 'pattern'. Other parameters are the same as for
241
+ 'include_pattern()', above.
242
+ The list 'self.files' is modified in place.
243
+ Return True if files are found, False otherwise.
244
+ """
245
+ files_found = False
246
+ pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
247
+ self.debug_print(f"exclude_pattern: applying regex r'{pattern_re.pattern}'")
248
+ for i in range(len(self.files) - 1, -1, -1):
249
+ if pattern_re.search(self.files[i]):
250
+ self.debug_print(" removing " + self.files[i])
251
+ del self.files[i]
252
+ files_found = True
253
+ return files_found
254
+
255
+
256
+ # Utility functions
257
+
258
+
259
+ def _find_all_simple(path):
260
+ """
261
+ Find all files under 'path'
262
+ """
263
+ all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True))
264
+ results = (
265
+ os.path.join(base, file) for base, dirs, files in all_unique for file in files
266
+ )
267
+ return filter(os.path.isfile, results)
268
+
269
+
270
+ class _UniqueDirs(set):
271
+ """
272
+ Exclude previously-seen dirs from walk results,
273
+ avoiding infinite recursion.
274
+ Ref https://bugs.python.org/issue44497.
275
+ """
276
+
277
+ def __call__(self, walk_item):
278
+ """
279
+ Given an item from an os.walk result, determine
280
+ if the item represents a unique dir for this instance
281
+ and if not, prevent further traversal.
282
+ """
283
+ base, dirs, files = walk_item
284
+ stat = os.stat(base)
285
+ candidate = stat.st_dev, stat.st_ino
286
+ found = candidate in self
287
+ if found:
288
+ del dirs[:]
289
+ self.add(candidate)
290
+ return not found
291
+
292
+ @classmethod
293
+ def filter(cls, items):
294
+ return filter(cls(), items)
295
+
296
+
297
+ def findall(dir=os.curdir):
298
+ """
299
+ Find all files under 'dir' and return the list of full filenames.
300
+ Unless dir is '.', return full filenames with dir prepended.
301
+ """
302
+ files = _find_all_simple(dir)
303
+ if dir == os.curdir:
304
+ make_rel = functools.partial(os.path.relpath, start=dir)
305
+ files = map(make_rel, files)
306
+ return list(files)
307
+
308
+
309
+ def glob_to_re(pattern):
310
+ """Translate a shell-like glob pattern to a regular expression; return
311
+ a string containing the regex. Differs from 'fnmatch.translate()' in
312
+ that '*' does not match "special characters" (which are
313
+ platform-specific).
314
+ """
315
+ pattern_re = fnmatch.translate(pattern)
316
+
317
+ # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
318
+ # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
319
+ # and by extension they shouldn't match such "special characters" under
320
+ # any OS. So change all non-escaped dots in the RE to match any
321
+ # character except the special characters (currently: just os.sep).
322
+ sep = os.sep
323
+ if os.sep == '\\':
324
+ # we're using a regex to manipulate a regex, so we need
325
+ # to escape the backslash twice
326
+ sep = r'\\\\'
327
+ escaped = rf'\1[^{sep}]'
328
+ pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
329
+ return pattern_re
330
+
331
+
332
+ def translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
333
+ """Translate a shell-like wildcard pattern to a compiled regular
334
+ expression. Return the compiled regex. If 'is_regex' true,
335
+ then 'pattern' is directly compiled to a regex (if it's a string)
336
+ or just returned as-is (assumes it's a regex object).
337
+ """
338
+ if is_regex:
339
+ if isinstance(pattern, str):
340
+ return re.compile(pattern)
341
+ else:
342
+ return pattern
343
+
344
+ # ditch start and end characters
345
+ start, _, end = glob_to_re('_').partition('_')
346
+
347
+ if pattern:
348
+ pattern_re = glob_to_re(pattern)
349
+ assert pattern_re.startswith(start) and pattern_re.endswith(end)
350
+ else:
351
+ pattern_re = ''
352
+
353
+ if prefix is not None:
354
+ prefix_re = glob_to_re(prefix)
355
+ assert prefix_re.startswith(start) and prefix_re.endswith(end)
356
+ prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)]
357
+ sep = os.sep
358
+ if os.sep == '\\':
359
+ sep = r'\\'
360
+ pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)]
361
+ pattern_re = rf'{start}\A{prefix_re}{sep}.*{pattern_re}{end}'
362
+ else: # no prefix -- respect anchor flag
363
+ if anchor:
364
+ pattern_re = rf'{start}\A{pattern_re[len(start) :]}'
365
+
366
+ return re.compile(pattern_re)
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/log.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ A simple log mechanism styled after PEP 282.
3
+
4
+ Retained for compatibility and should not be used.
5
+ """
6
+
7
+ import logging
8
+ import warnings
9
+
10
+ from ._log import log as _global_log
11
+
12
+ DEBUG = logging.DEBUG
13
+ INFO = logging.INFO
14
+ WARN = logging.WARN
15
+ ERROR = logging.ERROR
16
+ FATAL = logging.FATAL
17
+
18
+ log = _global_log.log
19
+ debug = _global_log.debug
20
+ info = _global_log.info
21
+ warn = _global_log.warning
22
+ error = _global_log.error
23
+ fatal = _global_log.fatal
24
+
25
+
26
+ def set_threshold(level):
27
+ orig = _global_log.level
28
+ _global_log.setLevel(level)
29
+ return orig
30
+
31
+
32
+ def set_verbosity(v):
33
+ if v <= 0:
34
+ set_threshold(logging.WARN)
35
+ elif v == 1:
36
+ set_threshold(logging.INFO)
37
+ elif v >= 2:
38
+ set_threshold(logging.DEBUG)
39
+
40
+
41
+ class Log(logging.Logger):
42
+ """distutils.log.Log is deprecated, please use an alternative from `logging`."""
43
+
44
+ def __init__(self, threshold=WARN):
45
+ warnings.warn(Log.__doc__) # avoid DeprecationWarning to ensure warn is shown
46
+ super().__init__(__name__, level=threshold)
47
+
48
+ @property
49
+ def threshold(self):
50
+ return self.level
51
+
52
+ @threshold.setter
53
+ def threshold(self, level):
54
+ self.setLevel(level)
55
+
56
+ warn = logging.Logger.warning
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/spawn.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.spawn
2
+
3
+ Provides the 'spawn()' function, a front-end to various platform-
4
+ specific functions for launching another program in a sub-process.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import os
10
+ import platform
11
+ import shutil
12
+ import subprocess
13
+ import sys
14
+ import warnings
15
+ from collections.abc import Mapping
16
+
17
+ from ._log import log
18
+ from .debug import DEBUG
19
+ from .errors import DistutilsExecError
20
+
21
+
22
+ def _debug(cmd):
23
+ """
24
+ Render a subprocess command differently depending on DEBUG.
25
+ """
26
+ return cmd if DEBUG else cmd[0]
27
+
28
+
29
+ def _inject_macos_ver(env: Mapping[str:str] | None) -> Mapping[str:str] | None:
30
+ if platform.system() != 'Darwin':
31
+ return env
32
+
33
+ from .util import MACOSX_VERSION_VAR, get_macosx_target_ver
34
+
35
+ target_ver = get_macosx_target_ver()
36
+ update = {MACOSX_VERSION_VAR: target_ver} if target_ver else {}
37
+ return {**_resolve(env), **update}
38
+
39
+
40
+ def _resolve(env: Mapping[str:str] | None) -> Mapping[str:str]:
41
+ return os.environ if env is None else env
42
+
43
+
44
+ def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
45
+ """Run another program, specified as a command list 'cmd', in a new process.
46
+
47
+ 'cmd' is just the argument list for the new process, ie.
48
+ cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
49
+ There is no way to run a program with a name different from that of its
50
+ executable.
51
+
52
+ If 'search_path' is true (the default), the system's executable
53
+ search path will be used to find the program; otherwise, cmd[0]
54
+ must be the exact path to the executable. If 'dry_run' is true,
55
+ the command will not actually be run.
56
+
57
+ Raise DistutilsExecError if running the program fails in any way; just
58
+ return on success.
59
+ """
60
+ log.info(subprocess.list2cmdline(cmd))
61
+ if dry_run:
62
+ return
63
+
64
+ if search_path:
65
+ executable = shutil.which(cmd[0])
66
+ if executable is not None:
67
+ cmd[0] = executable
68
+
69
+ try:
70
+ subprocess.check_call(cmd, env=_inject_macos_ver(env))
71
+ except OSError as exc:
72
+ raise DistutilsExecError(
73
+ f"command {_debug(cmd)!r} failed: {exc.args[-1]}"
74
+ ) from exc
75
+ except subprocess.CalledProcessError as err:
76
+ raise DistutilsExecError(
77
+ f"command {_debug(cmd)!r} failed with exit code {err.returncode}"
78
+ ) from err
79
+
80
+
81
+ def find_executable(executable, path=None):
82
+ """Tries to find 'executable' in the directories listed in 'path'.
83
+
84
+ A string listing directories separated by 'os.pathsep'; defaults to
85
+ os.environ['PATH']. Returns the complete filename or None if not found.
86
+ """
87
+ warnings.warn(
88
+ 'Use shutil.which instead of find_executable', DeprecationWarning, stacklevel=2
89
+ )
90
+ _, ext = os.path.splitext(executable)
91
+ if (sys.platform == 'win32') and (ext != '.exe'):
92
+ executable = executable + '.exe'
93
+
94
+ if os.path.isfile(executable):
95
+ return executable
96
+
97
+ if path is None:
98
+ path = os.environ.get('PATH', None)
99
+ # bpo-35755: Don't fall through if PATH is the empty string
100
+ if path is None:
101
+ try:
102
+ path = os.confstr("CS_PATH")
103
+ except (AttributeError, ValueError):
104
+ # os.confstr() or CS_PATH is not available
105
+ path = os.defpath
106
+
107
+ # PATH='' doesn't match, whereas PATH=':' looks in the current directory
108
+ if not path:
109
+ return None
110
+
111
+ paths = path.split(os.pathsep)
112
+ for p in paths:
113
+ f = os.path.join(p, executable)
114
+ if os.path.isfile(f):
115
+ # the file exists, we have a shot at spawn working
116
+ return f
117
+ return None
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/sysconfig.py ADDED
@@ -0,0 +1,583 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provide access to Python's configuration information. The specific
2
+ configuration variables available depend heavily on the platform and
3
+ configuration. The values may be retrieved using
4
+ get_config_var(name), and the list of variables is available via
5
+ get_config_vars().keys(). Additional convenience functions are also
6
+ available.
7
+
8
+ Written by: Fred L. Drake, Jr.
9
+ Email: <fdrake@acm.org>
10
+ """
11
+
12
+ import functools
13
+ import os
14
+ import pathlib
15
+ import re
16
+ import sys
17
+ import sysconfig
18
+
19
+ from jaraco.functools import pass_none
20
+
21
+ from .compat import py39
22
+ from .errors import DistutilsPlatformError
23
+ from .util import is_mingw
24
+
25
+ IS_PYPY = '__pypy__' in sys.builtin_module_names
26
+
27
+ # These are needed in a couple of spots, so just compute them once.
28
+ PREFIX = os.path.normpath(sys.prefix)
29
+ EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
30
+ BASE_PREFIX = os.path.normpath(sys.base_prefix)
31
+ BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
32
+
33
+ # Path to the base directory of the project. On Windows the binary may
34
+ # live in project/PCbuild/win32 or project/PCbuild/amd64.
35
+ # set for cross builds
36
+ if "_PYTHON_PROJECT_BASE" in os.environ:
37
+ project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
38
+ else:
39
+ if sys.executable:
40
+ project_base = os.path.dirname(os.path.abspath(sys.executable))
41
+ else:
42
+ # sys.executable can be empty if argv[0] has been changed and Python is
43
+ # unable to retrieve the real program name
44
+ project_base = os.getcwd()
45
+
46
+
47
+ def _is_python_source_dir(d):
48
+ """
49
+ Return True if the target directory appears to point to an
50
+ un-installed Python.
51
+ """
52
+ modules = pathlib.Path(d).joinpath('Modules')
53
+ return any(modules.joinpath(fn).is_file() for fn in ('Setup', 'Setup.local'))
54
+
55
+
56
+ _sys_home = getattr(sys, '_home', None)
57
+
58
+
59
+ def _is_parent(dir_a, dir_b):
60
+ """
61
+ Return True if a is a parent of b.
62
+ """
63
+ return os.path.normcase(dir_a).startswith(os.path.normcase(dir_b))
64
+
65
+
66
+ if os.name == 'nt':
67
+
68
+ @pass_none
69
+ def _fix_pcbuild(d):
70
+ # In a venv, sys._home will be inside BASE_PREFIX rather than PREFIX.
71
+ prefixes = PREFIX, BASE_PREFIX
72
+ matched = (
73
+ prefix
74
+ for prefix in prefixes
75
+ if _is_parent(d, os.path.join(prefix, "PCbuild"))
76
+ )
77
+ return next(matched, d)
78
+
79
+ project_base = _fix_pcbuild(project_base)
80
+ _sys_home = _fix_pcbuild(_sys_home)
81
+
82
+
83
+ def _python_build():
84
+ if _sys_home:
85
+ return _is_python_source_dir(_sys_home)
86
+ return _is_python_source_dir(project_base)
87
+
88
+
89
+ python_build = _python_build()
90
+
91
+
92
+ # Calculate the build qualifier flags if they are defined. Adding the flags
93
+ # to the include and lib directories only makes sense for an installation, not
94
+ # an in-source build.
95
+ build_flags = ''
96
+ try:
97
+ if not python_build:
98
+ build_flags = sys.abiflags
99
+ except AttributeError:
100
+ # It's not a configure-based build, so the sys module doesn't have
101
+ # this attribute, which is fine.
102
+ pass
103
+
104
+
105
+ def get_python_version():
106
+ """Return a string containing the major and minor Python version,
107
+ leaving off the patchlevel. Sample return values could be '1.5'
108
+ or '2.2'.
109
+ """
110
+ return f'{sys.version_info.major}.{sys.version_info.minor}'
111
+
112
+
113
+ def get_python_inc(plat_specific=False, prefix=None):
114
+ """Return the directory containing installed Python header files.
115
+
116
+ If 'plat_specific' is false (the default), this is the path to the
117
+ non-platform-specific header files, i.e. Python.h and so on;
118
+ otherwise, this is the path to platform-specific header files
119
+ (namely pyconfig.h).
120
+
121
+ If 'prefix' is supplied, use it instead of sys.base_prefix or
122
+ sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
123
+ """
124
+ default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX
125
+ resolved_prefix = prefix if prefix is not None else default_prefix
126
+ # MinGW imitates posix like layout, but os.name != posix
127
+ os_name = "posix" if is_mingw() else os.name
128
+ try:
129
+ getter = globals()[f'_get_python_inc_{os_name}']
130
+ except KeyError:
131
+ raise DistutilsPlatformError(
132
+ "I don't know where Python installs its C header files "
133
+ f"on platform '{os.name}'"
134
+ )
135
+ return getter(resolved_prefix, prefix, plat_specific)
136
+
137
+
138
+ @pass_none
139
+ def _extant(path):
140
+ """
141
+ Replace path with None if it doesn't exist.
142
+ """
143
+ return path if os.path.exists(path) else None
144
+
145
+
146
+ def _get_python_inc_posix(prefix, spec_prefix, plat_specific):
147
+ if IS_PYPY and sys.version_info < (3, 8):
148
+ return os.path.join(prefix, 'include')
149
+ return (
150
+ _get_python_inc_posix_python(plat_specific)
151
+ or _extant(_get_python_inc_from_config(plat_specific, spec_prefix))
152
+ or _get_python_inc_posix_prefix(prefix)
153
+ )
154
+
155
+
156
+ def _get_python_inc_posix_python(plat_specific):
157
+ """
158
+ Assume the executable is in the build directory. The
159
+ pyconfig.h file should be in the same directory. Since
160
+ the build directory may not be the source directory,
161
+ use "srcdir" from the makefile to find the "Include"
162
+ directory.
163
+ """
164
+ if not python_build:
165
+ return
166
+ if plat_specific:
167
+ return _sys_home or project_base
168
+ incdir = os.path.join(get_config_var('srcdir'), 'Include')
169
+ return os.path.normpath(incdir)
170
+
171
+
172
+ def _get_python_inc_from_config(plat_specific, spec_prefix):
173
+ """
174
+ If no prefix was explicitly specified, provide the include
175
+ directory from the config vars. Useful when
176
+ cross-compiling, since the config vars may come from
177
+ the host
178
+ platform Python installation, while the current Python
179
+ executable is from the build platform installation.
180
+
181
+ >>> monkeypatch = getfixture('monkeypatch')
182
+ >>> gpifc = _get_python_inc_from_config
183
+ >>> monkeypatch.setitem(gpifc.__globals__, 'get_config_var', str.lower)
184
+ >>> gpifc(False, '/usr/bin/')
185
+ >>> gpifc(False, '')
186
+ >>> gpifc(False, None)
187
+ 'includepy'
188
+ >>> gpifc(True, None)
189
+ 'confincludepy'
190
+ """
191
+ if spec_prefix is None:
192
+ return get_config_var('CONF' * plat_specific + 'INCLUDEPY')
193
+
194
+
195
+ def _get_python_inc_posix_prefix(prefix):
196
+ implementation = 'pypy' if IS_PYPY else 'python'
197
+ python_dir = implementation + get_python_version() + build_flags
198
+ return os.path.join(prefix, "include", python_dir)
199
+
200
+
201
+ def _get_python_inc_nt(prefix, spec_prefix, plat_specific):
202
+ if python_build:
203
+ # Include both include dirs to ensure we can find pyconfig.h
204
+ return (
205
+ os.path.join(prefix, "include")
206
+ + os.path.pathsep
207
+ + os.path.dirname(sysconfig.get_config_h_filename())
208
+ )
209
+ return os.path.join(prefix, "include")
210
+
211
+
212
+ # allow this behavior to be monkey-patched. Ref pypa/distutils#2.
213
+ def _posix_lib(standard_lib, libpython, early_prefix, prefix):
214
+ if standard_lib:
215
+ return libpython
216
+ else:
217
+ return os.path.join(libpython, "site-packages")
218
+
219
+
220
+ def get_python_lib(plat_specific=False, standard_lib=False, prefix=None):
221
+ """Return the directory containing the Python library (standard or
222
+ site additions).
223
+
224
+ If 'plat_specific' is true, return the directory containing
225
+ platform-specific modules, i.e. any module from a non-pure-Python
226
+ module distribution; otherwise, return the platform-shared library
227
+ directory. If 'standard_lib' is true, return the directory
228
+ containing standard Python library modules; otherwise, return the
229
+ directory for site-specific modules.
230
+
231
+ If 'prefix' is supplied, use it instead of sys.base_prefix or
232
+ sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
233
+ """
234
+
235
+ if IS_PYPY and sys.version_info < (3, 8):
236
+ # PyPy-specific schema
237
+ if prefix is None:
238
+ prefix = PREFIX
239
+ if standard_lib:
240
+ return os.path.join(prefix, "lib-python", sys.version_info.major)
241
+ return os.path.join(prefix, 'site-packages')
242
+
243
+ early_prefix = prefix
244
+
245
+ if prefix is None:
246
+ if standard_lib:
247
+ prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
248
+ else:
249
+ prefix = plat_specific and EXEC_PREFIX or PREFIX
250
+
251
+ if os.name == "posix" or is_mingw():
252
+ if plat_specific or standard_lib:
253
+ # Platform-specific modules (any module from a non-pure-Python
254
+ # module distribution) or standard Python library modules.
255
+ libdir = getattr(sys, "platlibdir", "lib")
256
+ else:
257
+ # Pure Python
258
+ libdir = "lib"
259
+ implementation = 'pypy' if IS_PYPY else 'python'
260
+ libpython = os.path.join(prefix, libdir, implementation + get_python_version())
261
+ return _posix_lib(standard_lib, libpython, early_prefix, prefix)
262
+ elif os.name == "nt":
263
+ if standard_lib:
264
+ return os.path.join(prefix, "Lib")
265
+ else:
266
+ return os.path.join(prefix, "Lib", "site-packages")
267
+ else:
268
+ raise DistutilsPlatformError(
269
+ f"I don't know where Python installs its library on platform '{os.name}'"
270
+ )
271
+
272
+
273
+ @functools.lru_cache
274
+ def _customize_macos():
275
+ """
276
+ Perform first-time customization of compiler-related
277
+ config vars on macOS. Use after a compiler is known
278
+ to be needed. This customization exists primarily to support Pythons
279
+ from binary installers. The kind and paths to build tools on
280
+ the user system may vary significantly from the system
281
+ that Python itself was built on. Also the user OS
282
+ version and build tools may not support the same set
283
+ of CPU architectures for universal builds.
284
+ """
285
+
286
+ sys.platform == "darwin" and __import__('_osx_support').customize_compiler(
287
+ get_config_vars()
288
+ )
289
+
290
+
291
+ def customize_compiler(compiler):
292
+ """Do any platform-specific customization of a CCompiler instance.
293
+
294
+ Mainly needed on Unix, so we can plug in the information that
295
+ varies across Unices and is stored in Python's Makefile.
296
+ """
297
+ if compiler.compiler_type in ["unix", "cygwin"] or (
298
+ compiler.compiler_type == "mingw32" and is_mingw()
299
+ ):
300
+ _customize_macos()
301
+
302
+ (
303
+ cc,
304
+ cxx,
305
+ cflags,
306
+ ccshared,
307
+ ldshared,
308
+ ldcxxshared,
309
+ shlib_suffix,
310
+ ar,
311
+ ar_flags,
312
+ ) = get_config_vars(
313
+ 'CC',
314
+ 'CXX',
315
+ 'CFLAGS',
316
+ 'CCSHARED',
317
+ 'LDSHARED',
318
+ 'LDCXXSHARED',
319
+ 'SHLIB_SUFFIX',
320
+ 'AR',
321
+ 'ARFLAGS',
322
+ )
323
+
324
+ cxxflags = cflags
325
+
326
+ if 'CC' in os.environ:
327
+ newcc = os.environ['CC']
328
+ if 'LDSHARED' not in os.environ and ldshared.startswith(cc):
329
+ # If CC is overridden, use that as the default
330
+ # command for LDSHARED as well
331
+ ldshared = newcc + ldshared[len(cc) :]
332
+ cc = newcc
333
+ cxx = os.environ.get('CXX', cxx)
334
+ ldshared = os.environ.get('LDSHARED', ldshared)
335
+ ldcxxshared = os.environ.get('LDCXXSHARED', ldcxxshared)
336
+ cpp = os.environ.get(
337
+ 'CPP',
338
+ cc + " -E", # not always
339
+ )
340
+
341
+ ldshared = _add_flags(ldshared, 'LD')
342
+ ldcxxshared = _add_flags(ldcxxshared, 'LD')
343
+ cflags = os.environ.get('CFLAGS', cflags)
344
+ ldshared = _add_flags(ldshared, 'C')
345
+ cxxflags = os.environ.get('CXXFLAGS', cxxflags)
346
+ ldcxxshared = _add_flags(ldcxxshared, 'CXX')
347
+ cpp = _add_flags(cpp, 'CPP')
348
+ cflags = _add_flags(cflags, 'CPP')
349
+ cxxflags = _add_flags(cxxflags, 'CPP')
350
+ ldshared = _add_flags(ldshared, 'CPP')
351
+ ldcxxshared = _add_flags(ldcxxshared, 'CPP')
352
+
353
+ ar = os.environ.get('AR', ar)
354
+
355
+ archiver = ar + ' ' + os.environ.get('ARFLAGS', ar_flags)
356
+ cc_cmd = cc + ' ' + cflags
357
+ cxx_cmd = cxx + ' ' + cxxflags
358
+
359
+ compiler.set_executables(
360
+ preprocessor=cpp,
361
+ compiler=cc_cmd,
362
+ compiler_so=cc_cmd + ' ' + ccshared,
363
+ compiler_cxx=cxx_cmd,
364
+ compiler_so_cxx=cxx_cmd + ' ' + ccshared,
365
+ linker_so=ldshared,
366
+ linker_so_cxx=ldcxxshared,
367
+ linker_exe=cc,
368
+ linker_exe_cxx=cxx,
369
+ archiver=archiver,
370
+ )
371
+
372
+ if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None):
373
+ compiler.set_executables(ranlib=os.environ['RANLIB'])
374
+
375
+ compiler.shared_lib_extension = shlib_suffix
376
+
377
+
378
+ def get_config_h_filename():
379
+ """Return full pathname of installed pyconfig.h file."""
380
+ return sysconfig.get_config_h_filename()
381
+
382
+
383
+ def get_makefile_filename():
384
+ """Return full pathname of installed Makefile from the Python build."""
385
+ return sysconfig.get_makefile_filename()
386
+
387
+
388
+ def parse_config_h(fp, g=None):
389
+ """Parse a config.h-style file.
390
+
391
+ A dictionary containing name/value pairs is returned. If an
392
+ optional dictionary is passed in as the second argument, it is
393
+ used instead of a new dictionary.
394
+ """
395
+ return sysconfig.parse_config_h(fp, vars=g)
396
+
397
+
398
+ # Regexes needed for parsing Makefile (and similar syntaxes,
399
+ # like old-style Setup files).
400
+ _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
401
+ _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
402
+ _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
403
+
404
+
405
+ def parse_makefile(fn, g=None): # noqa: C901
406
+ """Parse a Makefile-style file.
407
+
408
+ A dictionary containing name/value pairs is returned. If an
409
+ optional dictionary is passed in as the second argument, it is
410
+ used instead of a new dictionary.
411
+ """
412
+ from distutils.text_file import TextFile
413
+
414
+ fp = TextFile(
415
+ fn,
416
+ strip_comments=True,
417
+ skip_blanks=True,
418
+ join_lines=True,
419
+ errors="surrogateescape",
420
+ )
421
+
422
+ if g is None:
423
+ g = {}
424
+ done = {}
425
+ notdone = {}
426
+
427
+ while True:
428
+ line = fp.readline()
429
+ if line is None: # eof
430
+ break
431
+ m = _variable_rx.match(line)
432
+ if m:
433
+ n, v = m.group(1, 2)
434
+ v = v.strip()
435
+ # `$$' is a literal `$' in make
436
+ tmpv = v.replace('$$', '')
437
+
438
+ if "$" in tmpv:
439
+ notdone[n] = v
440
+ else:
441
+ try:
442
+ v = int(v)
443
+ except ValueError:
444
+ # insert literal `$'
445
+ done[n] = v.replace('$$', '$')
446
+ else:
447
+ done[n] = v
448
+
449
+ # Variables with a 'PY_' prefix in the makefile. These need to
450
+ # be made available without that prefix through sysconfig.
451
+ # Special care is needed to ensure that variable expansion works, even
452
+ # if the expansion uses the name without a prefix.
453
+ renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
454
+
455
+ # do variable interpolation here
456
+ while notdone:
457
+ for name in list(notdone):
458
+ value = notdone[name]
459
+ m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
460
+ if m:
461
+ n = m.group(1)
462
+ found = True
463
+ if n in done:
464
+ item = str(done[n])
465
+ elif n in notdone:
466
+ # get it on a subsequent round
467
+ found = False
468
+ elif n in os.environ:
469
+ # do it like make: fall back to environment
470
+ item = os.environ[n]
471
+
472
+ elif n in renamed_variables:
473
+ if name.startswith('PY_') and name[3:] in renamed_variables:
474
+ item = ""
475
+
476
+ elif 'PY_' + n in notdone:
477
+ found = False
478
+
479
+ else:
480
+ item = str(done['PY_' + n])
481
+ else:
482
+ done[n] = item = ""
483
+ if found:
484
+ after = value[m.end() :]
485
+ value = value[: m.start()] + item + after
486
+ if "$" in after:
487
+ notdone[name] = value
488
+ else:
489
+ try:
490
+ value = int(value)
491
+ except ValueError:
492
+ done[name] = value.strip()
493
+ else:
494
+ done[name] = value
495
+ del notdone[name]
496
+
497
+ if name.startswith('PY_') and name[3:] in renamed_variables:
498
+ name = name[3:]
499
+ if name not in done:
500
+ done[name] = value
501
+ else:
502
+ # bogus variable reference; just drop it since we can't deal
503
+ del notdone[name]
504
+
505
+ fp.close()
506
+
507
+ # strip spurious spaces
508
+ for k, v in done.items():
509
+ if isinstance(v, str):
510
+ done[k] = v.strip()
511
+
512
+ # save the results in the global dictionary
513
+ g.update(done)
514
+ return g
515
+
516
+
517
+ def expand_makefile_vars(s, vars):
518
+ """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
519
+ 'string' according to 'vars' (a dictionary mapping variable names to
520
+ values). Variables not present in 'vars' are silently expanded to the
521
+ empty string. The variable values in 'vars' should not contain further
522
+ variable expansions; if 'vars' is the output of 'parse_makefile()',
523
+ you're fine. Returns a variable-expanded version of 's'.
524
+ """
525
+
526
+ # This algorithm does multiple expansion, so if vars['foo'] contains
527
+ # "${bar}", it will expand ${foo} to ${bar}, and then expand
528
+ # ${bar}... and so forth. This is fine as long as 'vars' comes from
529
+ # 'parse_makefile()', which takes care of such expansions eagerly,
530
+ # according to make's variable expansion semantics.
531
+
532
+ while True:
533
+ m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
534
+ if m:
535
+ (beg, end) = m.span()
536
+ s = s[0:beg] + vars.get(m.group(1)) + s[end:]
537
+ else:
538
+ break
539
+ return s
540
+
541
+
542
+ _config_vars = None
543
+
544
+
545
+ def get_config_vars(*args):
546
+ """With no arguments, return a dictionary of all configuration
547
+ variables relevant for the current platform. Generally this includes
548
+ everything needed to build extensions and install both pure modules and
549
+ extensions. On Unix, this means every variable defined in Python's
550
+ installed Makefile; on Windows it's a much smaller set.
551
+
552
+ With arguments, return a list of values that result from looking up
553
+ each argument in the configuration variable dictionary.
554
+ """
555
+ global _config_vars
556
+ if _config_vars is None:
557
+ _config_vars = sysconfig.get_config_vars().copy()
558
+ py39.add_ext_suffix(_config_vars)
559
+
560
+ return [_config_vars.get(name) for name in args] if args else _config_vars
561
+
562
+
563
+ def get_config_var(name):
564
+ """Return the value of a single variable using the dictionary
565
+ returned by 'get_config_vars()'. Equivalent to
566
+ get_config_vars().get(name)
567
+ """
568
+ if name == 'SO':
569
+ import warnings
570
+
571
+ warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
572
+ return get_config_vars().get(name)
573
+
574
+
575
+ @pass_none
576
+ def _add_flags(value: str, type: str) -> str:
577
+ """
578
+ Add any flags from the environment for the given type.
579
+
580
+ type is the prefix to FLAGS in the environment key (e.g. "C" for "CFLAGS").
581
+ """
582
+ flags = os.environ.get(f'{type}FLAGS')
583
+ return f'{value} {flags}' if flags else value
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.49 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_archive_util.cpython-310.pyc ADDED
Binary file (10.7 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_bdist.cpython-310.pyc ADDED
Binary file (1.3 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_bdist_dumb.cpython-310.pyc ADDED
Binary file (2.09 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_bdist_rpm.cpython-310.pyc ADDED
Binary file (3 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_build.cpython-310.pyc ADDED
Binary file (1.45 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_build_clib.cpython-310.pyc ADDED
Binary file (3.74 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_build_scripts.cpython-310.pyc ADDED
Binary file (3.11 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_ccompiler.cpython-310.pyc ADDED
Binary file (2.75 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_check.cpython-310.pyc ADDED
Binary file (4.41 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_clean.cpython-310.pyc ADDED
Binary file (1.31 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_core.cpython-310.pyc ADDED
Binary file (3.96 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_dir_util.cpython-310.pyc ADDED
Binary file (4.94 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_file_util.cpython-310.pyc ADDED
Binary file (3.49 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install.cpython-310.pyc ADDED
Binary file (7.39 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_data.cpython-310.pyc ADDED
Binary file (1.85 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_headers.cpython-310.pyc ADDED
Binary file (1.15 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_lib.cpython-310.pyc ADDED
Binary file (3.11 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_install_scripts.cpython-310.pyc ADDED
Binary file (1.67 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_log.cpython-310.pyc ADDED
Binary file (700 Bytes). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_modified.cpython-310.pyc ADDED
Binary file (4.19 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_spawn.cpython-310.pyc ADDED
Binary file (3.6 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_unixccompiler.cpython-310.pyc ADDED
Binary file (9.33 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_util.cpython-310.pyc ADDED
Binary file (7.88 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/__pycache__/test_version.cpython-310.pyc ADDED
Binary file (2.46 kB). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/compat/__init__.py ADDED
File without changes
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/compat/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (168 Bytes). View file
 
evalkit_tf437/lib/python3.10/site-packages/setuptools/_distutils/tests/compat/py39.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ if sys.version_info >= (3, 10):
4
+ from test.support.import_helper import (
5
+ CleanImport as CleanImport,
6
+ )
7
+ from test.support.import_helper import (
8
+ DirsOnSysPath as DirsOnSysPath,
9
+ )
10
+ from test.support.os_helper import (
11
+ EnvironmentVarGuard as EnvironmentVarGuard,
12
+ )
13
+ from test.support.os_helper import (
14
+ rmtree as rmtree,
15
+ )
16
+ from test.support.os_helper import (
17
+ skip_unless_symlink as skip_unless_symlink,
18
+ )
19
+ from test.support.os_helper import (
20
+ unlink as unlink,
21
+ )
22
+ else:
23
+ from test.support import (
24
+ CleanImport as CleanImport,
25
+ )
26
+ from test.support import (
27
+ DirsOnSysPath as DirsOnSysPath,
28
+ )
29
+ from test.support import (
30
+ EnvironmentVarGuard as EnvironmentVarGuard,
31
+ )
32
+ from test.support import (
33
+ rmtree as rmtree,
34
+ )
35
+ from test.support import (
36
+ skip_unless_symlink as skip_unless_symlink,
37
+ )
38
+ from test.support import (
39
+ unlink as unlink,
40
+ )