Datasets:
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__init__.py +342 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__main__.py +46 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/api.py +156 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/macos.py +64 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/unix.py +181 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/version.py +4 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/windows.py +184 -0
- .venv/Lib/site-packages/setuptools/_distutils/archive_util.py +280 -0
- .venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py +401 -0
- .venv/Lib/site-packages/setuptools/_distutils/ccompiler.py +1254 -0
- .venv/Lib/site-packages/setuptools/_distutils/cmd.py +435 -0
- .venv/Lib/site-packages/setuptools/_distutils/config.py +139 -0
- .venv/Lib/site-packages/setuptools/_distutils/core.py +291 -0
- .venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py +356 -0
- .venv/Lib/site-packages/setuptools/_distutils/debug.py +5 -0
- .venv/Lib/site-packages/setuptools/_distutils/dep_util.py +96 -0
- .venv/Lib/site-packages/setuptools/_distutils/dir_util.py +243 -0
- .venv/Lib/site-packages/setuptools/_distutils/dist.py +1287 -0
- .venv/Lib/site-packages/setuptools/_distutils/errors.py +127 -0
- .venv/Lib/site-packages/setuptools/_distutils/extension.py +248 -0
- .venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py +470 -0
- .venv/Lib/site-packages/setuptools/_distutils/file_util.py +248 -0
- .venv/Lib/site-packages/setuptools/_distutils/filelist.py +371 -0
- .venv/Lib/site-packages/setuptools/_distutils/log.py +57 -0
- .venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py +829 -0
- .venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py +692 -0
- .venv/Lib/site-packages/setuptools/_distutils/py38compat.py +8 -0
- .venv/Lib/site-packages/setuptools/_distutils/py39compat.py +22 -0
- .venv/Lib/site-packages/setuptools/_distutils/spawn.py +109 -0
- .venv/Lib/site-packages/setuptools/_distutils/sysconfig.py +559 -0
- .venv/Lib/site-packages/setuptools/_distutils/text_file.py +286 -0
- .venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py +400 -0
- .venv/Lib/site-packages/setuptools/_distutils/util.py +513 -0
- .venv/Lib/site-packages/setuptools/_distutils/version.py +357 -0
- .venv/Lib/site-packages/setuptools/archive_util.py +216 -0
- .venv/Lib/site-packages/setuptools/build_meta.py +534 -0
- .venv/Lib/site-packages/setuptools/command/__init__.py +12 -0
- .venv/Lib/site-packages/setuptools/command/_requirestxt.py +129 -0
- .venv/Lib/site-packages/setuptools/command/editable_wheel.py +894 -0
- .venv/Lib/site-packages/setuptools/command/egg_info.py +735 -0
- .venv/Lib/site-packages/setuptools/command/install.py +147 -0
- .venv/Lib/site-packages/setuptools/command/install_egg_info.py +57 -0
- .venv/Lib/site-packages/setuptools/command/install_lib.py +125 -0
- .venv/Lib/site-packages/setuptools/command/install_scripts.py +66 -0
- .venv/Lib/site-packages/setuptools/command/launcher manifest.xml +15 -0
- .venv/Lib/site-packages/setuptools/command/register.py +18 -0
- .venv/Lib/site-packages/setuptools/command/rotate.py +62 -0
- .venv/Lib/site-packages/setuptools/command/saveopts.py +21 -0
- .venv/Lib/site-packages/setuptools/command/sdist.py +215 -0
- .venv/Lib/site-packages/setuptools/command/setopt.py +138 -0
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__init__.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Utilities for determining application-specific dirs. See <https://github.com/platformdirs/platformdirs> for details and
|
| 3 |
+
usage.
|
| 4 |
+
"""
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
|
| 11 |
+
if sys.version_info >= (3, 8): # pragma: no cover (py38+)
|
| 12 |
+
from typing import Literal
|
| 13 |
+
else: # pragma: no cover (py38+)
|
| 14 |
+
from ..typing_extensions import Literal
|
| 15 |
+
|
| 16 |
+
from .api import PlatformDirsABC
|
| 17 |
+
from .version import __version__
|
| 18 |
+
from .version import __version_tuple__ as __version_info__
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _set_platform_dir_class() -> type[PlatformDirsABC]:
|
| 22 |
+
if sys.platform == "win32":
|
| 23 |
+
from .windows import Windows as Result
|
| 24 |
+
elif sys.platform == "darwin":
|
| 25 |
+
from .macos import MacOS as Result
|
| 26 |
+
else:
|
| 27 |
+
from .unix import Unix as Result
|
| 28 |
+
|
| 29 |
+
if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
|
| 30 |
+
|
| 31 |
+
if os.getenv("SHELL") or os.getenv("PREFIX"):
|
| 32 |
+
return Result
|
| 33 |
+
|
| 34 |
+
from .android import _android_folder
|
| 35 |
+
|
| 36 |
+
if _android_folder() is not None:
|
| 37 |
+
from .android import Android
|
| 38 |
+
|
| 39 |
+
return Android # return to avoid redefinition of result
|
| 40 |
+
|
| 41 |
+
return Result
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
PlatformDirs = _set_platform_dir_class() #: Currently active platform
|
| 45 |
+
AppDirs = PlatformDirs #: Backwards compatibility with appdirs
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def user_data_dir(
|
| 49 |
+
appname: str | None = None,
|
| 50 |
+
appauthor: str | None | Literal[False] = None,
|
| 51 |
+
version: str | None = None,
|
| 52 |
+
roaming: bool = False,
|
| 53 |
+
) -> str:
|
| 54 |
+
"""
|
| 55 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 56 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 57 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 58 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
| 59 |
+
:returns: data directory tied to the user
|
| 60 |
+
"""
|
| 61 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def site_data_dir(
|
| 65 |
+
appname: str | None = None,
|
| 66 |
+
appauthor: str | None | Literal[False] = None,
|
| 67 |
+
version: str | None = None,
|
| 68 |
+
multipath: bool = False,
|
| 69 |
+
) -> str:
|
| 70 |
+
"""
|
| 71 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 72 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 73 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 74 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
| 75 |
+
:returns: data directory shared by users
|
| 76 |
+
"""
|
| 77 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def user_config_dir(
|
| 81 |
+
appname: str | None = None,
|
| 82 |
+
appauthor: str | None | Literal[False] = None,
|
| 83 |
+
version: str | None = None,
|
| 84 |
+
roaming: bool = False,
|
| 85 |
+
) -> str:
|
| 86 |
+
"""
|
| 87 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 88 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 89 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 90 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
| 91 |
+
:returns: config directory tied to the user
|
| 92 |
+
"""
|
| 93 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def site_config_dir(
|
| 97 |
+
appname: str | None = None,
|
| 98 |
+
appauthor: str | None | Literal[False] = None,
|
| 99 |
+
version: str | None = None,
|
| 100 |
+
multipath: bool = False,
|
| 101 |
+
) -> str:
|
| 102 |
+
"""
|
| 103 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 104 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 105 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 106 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
| 107 |
+
:returns: config directory shared by the users
|
| 108 |
+
"""
|
| 109 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def user_cache_dir(
|
| 113 |
+
appname: str | None = None,
|
| 114 |
+
appauthor: str | None | Literal[False] = None,
|
| 115 |
+
version: str | None = None,
|
| 116 |
+
opinion: bool = True,
|
| 117 |
+
) -> str:
|
| 118 |
+
"""
|
| 119 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 120 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 121 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 122 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
| 123 |
+
:returns: cache directory tied to the user
|
| 124 |
+
"""
|
| 125 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def user_state_dir(
|
| 129 |
+
appname: str | None = None,
|
| 130 |
+
appauthor: str | None | Literal[False] = None,
|
| 131 |
+
version: str | None = None,
|
| 132 |
+
roaming: bool = False,
|
| 133 |
+
) -> str:
|
| 134 |
+
"""
|
| 135 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 136 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 137 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 138 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
| 139 |
+
:returns: state directory tied to the user
|
| 140 |
+
"""
|
| 141 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def user_log_dir(
|
| 145 |
+
appname: str | None = None,
|
| 146 |
+
appauthor: str | None | Literal[False] = None,
|
| 147 |
+
version: str | None = None,
|
| 148 |
+
opinion: bool = True,
|
| 149 |
+
) -> str:
|
| 150 |
+
"""
|
| 151 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 152 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 153 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 154 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
| 155 |
+
:returns: log directory tied to the user
|
| 156 |
+
"""
|
| 157 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def user_documents_dir() -> str:
|
| 161 |
+
"""
|
| 162 |
+
:returns: documents directory tied to the user
|
| 163 |
+
"""
|
| 164 |
+
return PlatformDirs().user_documents_dir
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def user_runtime_dir(
|
| 168 |
+
appname: str | None = None,
|
| 169 |
+
appauthor: str | None | Literal[False] = None,
|
| 170 |
+
version: str | None = None,
|
| 171 |
+
opinion: bool = True,
|
| 172 |
+
) -> str:
|
| 173 |
+
"""
|
| 174 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 175 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 176 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 177 |
+
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
| 178 |
+
:returns: runtime directory tied to the user
|
| 179 |
+
"""
|
| 180 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def user_data_path(
|
| 184 |
+
appname: str | None = None,
|
| 185 |
+
appauthor: str | None | Literal[False] = None,
|
| 186 |
+
version: str | None = None,
|
| 187 |
+
roaming: bool = False,
|
| 188 |
+
) -> Path:
|
| 189 |
+
"""
|
| 190 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 191 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 192 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 193 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
| 194 |
+
:returns: data path tied to the user
|
| 195 |
+
"""
|
| 196 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
def site_data_path(
|
| 200 |
+
appname: str | None = None,
|
| 201 |
+
appauthor: str | None | Literal[False] = None,
|
| 202 |
+
version: str | None = None,
|
| 203 |
+
multipath: bool = False,
|
| 204 |
+
) -> Path:
|
| 205 |
+
"""
|
| 206 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 207 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 208 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 209 |
+
:param multipath: See `multipath <platformdirs.api.PlatformDirsABC.multipath>`.
|
| 210 |
+
:returns: data path shared by users
|
| 211 |
+
"""
|
| 212 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def user_config_path(
|
| 216 |
+
appname: str | None = None,
|
| 217 |
+
appauthor: str | None | Literal[False] = None,
|
| 218 |
+
version: str | None = None,
|
| 219 |
+
roaming: bool = False,
|
| 220 |
+
) -> Path:
|
| 221 |
+
"""
|
| 222 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 223 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 224 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 225 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
| 226 |
+
:returns: config path tied to the user
|
| 227 |
+
"""
|
| 228 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def site_config_path(
|
| 232 |
+
appname: str | None = None,
|
| 233 |
+
appauthor: str | None | Literal[False] = None,
|
| 234 |
+
version: str | None = None,
|
| 235 |
+
multipath: bool = False,
|
| 236 |
+
) -> Path:
|
| 237 |
+
"""
|
| 238 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 239 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 240 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 241 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
| 242 |
+
:returns: config path shared by the users
|
| 243 |
+
"""
|
| 244 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def user_cache_path(
|
| 248 |
+
appname: str | None = None,
|
| 249 |
+
appauthor: str | None | Literal[False] = None,
|
| 250 |
+
version: str | None = None,
|
| 251 |
+
opinion: bool = True,
|
| 252 |
+
) -> Path:
|
| 253 |
+
"""
|
| 254 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 255 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 256 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 257 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
| 258 |
+
:returns: cache path tied to the user
|
| 259 |
+
"""
|
| 260 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
def user_state_path(
|
| 264 |
+
appname: str | None = None,
|
| 265 |
+
appauthor: str | None | Literal[False] = None,
|
| 266 |
+
version: str | None = None,
|
| 267 |
+
roaming: bool = False,
|
| 268 |
+
) -> Path:
|
| 269 |
+
"""
|
| 270 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 271 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 272 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 273 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
| 274 |
+
:returns: state path tied to the user
|
| 275 |
+
"""
|
| 276 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def user_log_path(
|
| 280 |
+
appname: str | None = None,
|
| 281 |
+
appauthor: str | None | Literal[False] = None,
|
| 282 |
+
version: str | None = None,
|
| 283 |
+
opinion: bool = True,
|
| 284 |
+
) -> Path:
|
| 285 |
+
"""
|
| 286 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 287 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 288 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 289 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
| 290 |
+
:returns: log path tied to the user
|
| 291 |
+
"""
|
| 292 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def user_documents_path() -> Path:
|
| 296 |
+
"""
|
| 297 |
+
:returns: documents path tied to the user
|
| 298 |
+
"""
|
| 299 |
+
return PlatformDirs().user_documents_path
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
def user_runtime_path(
|
| 303 |
+
appname: str | None = None,
|
| 304 |
+
appauthor: str | None | Literal[False] = None,
|
| 305 |
+
version: str | None = None,
|
| 306 |
+
opinion: bool = True,
|
| 307 |
+
) -> Path:
|
| 308 |
+
"""
|
| 309 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
| 310 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
| 311 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
| 312 |
+
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
| 313 |
+
:returns: runtime path tied to the user
|
| 314 |
+
"""
|
| 315 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
__all__ = [
|
| 319 |
+
"__version__",
|
| 320 |
+
"__version_info__",
|
| 321 |
+
"PlatformDirs",
|
| 322 |
+
"AppDirs",
|
| 323 |
+
"PlatformDirsABC",
|
| 324 |
+
"user_data_dir",
|
| 325 |
+
"user_config_dir",
|
| 326 |
+
"user_cache_dir",
|
| 327 |
+
"user_state_dir",
|
| 328 |
+
"user_log_dir",
|
| 329 |
+
"user_documents_dir",
|
| 330 |
+
"user_runtime_dir",
|
| 331 |
+
"site_data_dir",
|
| 332 |
+
"site_config_dir",
|
| 333 |
+
"user_data_path",
|
| 334 |
+
"user_config_path",
|
| 335 |
+
"user_cache_path",
|
| 336 |
+
"user_state_path",
|
| 337 |
+
"user_log_path",
|
| 338 |
+
"user_documents_path",
|
| 339 |
+
"user_runtime_path",
|
| 340 |
+
"site_data_path",
|
| 341 |
+
"site_config_path",
|
| 342 |
+
]
|
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/__main__.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from platformdirs import PlatformDirs, __version__
|
| 4 |
+
|
| 5 |
+
PROPS = (
|
| 6 |
+
"user_data_dir",
|
| 7 |
+
"user_config_dir",
|
| 8 |
+
"user_cache_dir",
|
| 9 |
+
"user_state_dir",
|
| 10 |
+
"user_log_dir",
|
| 11 |
+
"user_documents_dir",
|
| 12 |
+
"user_runtime_dir",
|
| 13 |
+
"site_data_dir",
|
| 14 |
+
"site_config_dir",
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def main() -> None:
|
| 19 |
+
app_name = "MyApp"
|
| 20 |
+
app_author = "MyCompany"
|
| 21 |
+
|
| 22 |
+
print(f"-- platformdirs {__version__} --")
|
| 23 |
+
|
| 24 |
+
print("-- app dirs (with optional 'version')")
|
| 25 |
+
dirs = PlatformDirs(app_name, app_author, version="1.0")
|
| 26 |
+
for prop in PROPS:
|
| 27 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
| 28 |
+
|
| 29 |
+
print("\n-- app dirs (without optional 'version')")
|
| 30 |
+
dirs = PlatformDirs(app_name, app_author)
|
| 31 |
+
for prop in PROPS:
|
| 32 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
| 33 |
+
|
| 34 |
+
print("\n-- app dirs (without optional 'appauthor')")
|
| 35 |
+
dirs = PlatformDirs(app_name)
|
| 36 |
+
for prop in PROPS:
|
| 37 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
| 38 |
+
|
| 39 |
+
print("\n-- app dirs (with disabled 'appauthor')")
|
| 40 |
+
dirs = PlatformDirs(app_name, appauthor=False)
|
| 41 |
+
for prop in PROPS:
|
| 42 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
if __name__ == "__main__":
|
| 46 |
+
main()
|
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/api.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from abc import ABC, abstractmethod
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
|
| 8 |
+
if sys.version_info >= (3, 8): # pragma: no branch
|
| 9 |
+
from typing import Literal # pragma: no cover
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class PlatformDirsABC(ABC):
|
| 13 |
+
"""
|
| 14 |
+
Abstract base class for platform directories.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
appname: str | None = None,
|
| 20 |
+
appauthor: str | None | Literal[False] = None,
|
| 21 |
+
version: str | None = None,
|
| 22 |
+
roaming: bool = False,
|
| 23 |
+
multipath: bool = False,
|
| 24 |
+
opinion: bool = True,
|
| 25 |
+
):
|
| 26 |
+
"""
|
| 27 |
+
Create a new platform directory.
|
| 28 |
+
|
| 29 |
+
:param appname: See `appname`.
|
| 30 |
+
:param appauthor: See `appauthor`.
|
| 31 |
+
:param version: See `version`.
|
| 32 |
+
:param roaming: See `roaming`.
|
| 33 |
+
:param multipath: See `multipath`.
|
| 34 |
+
:param opinion: See `opinion`.
|
| 35 |
+
"""
|
| 36 |
+
self.appname = appname #: The name of application.
|
| 37 |
+
self.appauthor = appauthor
|
| 38 |
+
"""
|
| 39 |
+
The name of the app author or distributing body for this application. Typically, it is the owning company name.
|
| 40 |
+
Defaults to `appname`. You may pass ``False`` to disable it.
|
| 41 |
+
"""
|
| 42 |
+
self.version = version
|
| 43 |
+
"""
|
| 44 |
+
An optional version path element to append to the path. You might want to use this if you want multiple versions
|
| 45 |
+
of your app to be able to run independently. If used, this would typically be ``<major>.<minor>``.
|
| 46 |
+
"""
|
| 47 |
+
self.roaming = roaming
|
| 48 |
+
"""
|
| 49 |
+
Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
|
| 50 |
+
for roaming profiles, this user data will be synced on login (see
|
| 51 |
+
`here <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>`_).
|
| 52 |
+
"""
|
| 53 |
+
self.multipath = multipath
|
| 54 |
+
"""
|
| 55 |
+
An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be
|
| 56 |
+
returned. By default, the first item would only be returned.
|
| 57 |
+
"""
|
| 58 |
+
self.opinion = opinion #: A flag to indicating to use opinionated values.
|
| 59 |
+
|
| 60 |
+
def _append_app_name_and_version(self, *base: str) -> str:
|
| 61 |
+
params = list(base[1:])
|
| 62 |
+
if self.appname:
|
| 63 |
+
params.append(self.appname)
|
| 64 |
+
if self.version:
|
| 65 |
+
params.append(self.version)
|
| 66 |
+
return os.path.join(base[0], *params)
|
| 67 |
+
|
| 68 |
+
@property
|
| 69 |
+
@abstractmethod
|
| 70 |
+
def user_data_dir(self) -> str:
|
| 71 |
+
""":return: data directory tied to the user"""
|
| 72 |
+
|
| 73 |
+
@property
|
| 74 |
+
@abstractmethod
|
| 75 |
+
def site_data_dir(self) -> str:
|
| 76 |
+
""":return: data directory shared by users"""
|
| 77 |
+
|
| 78 |
+
@property
|
| 79 |
+
@abstractmethod
|
| 80 |
+
def user_config_dir(self) -> str:
|
| 81 |
+
""":return: config directory tied to the user"""
|
| 82 |
+
|
| 83 |
+
@property
|
| 84 |
+
@abstractmethod
|
| 85 |
+
def site_config_dir(self) -> str:
|
| 86 |
+
""":return: config directory shared by the users"""
|
| 87 |
+
|
| 88 |
+
@property
|
| 89 |
+
@abstractmethod
|
| 90 |
+
def user_cache_dir(self) -> str:
|
| 91 |
+
""":return: cache directory tied to the user"""
|
| 92 |
+
|
| 93 |
+
@property
|
| 94 |
+
@abstractmethod
|
| 95 |
+
def user_state_dir(self) -> str:
|
| 96 |
+
""":return: state directory tied to the user"""
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
@abstractmethod
|
| 100 |
+
def user_log_dir(self) -> str:
|
| 101 |
+
""":return: log directory tied to the user"""
|
| 102 |
+
|
| 103 |
+
@property
|
| 104 |
+
@abstractmethod
|
| 105 |
+
def user_documents_dir(self) -> str:
|
| 106 |
+
""":return: documents directory tied to the user"""
|
| 107 |
+
|
| 108 |
+
@property
|
| 109 |
+
@abstractmethod
|
| 110 |
+
def user_runtime_dir(self) -> str:
|
| 111 |
+
""":return: runtime directory tied to the user"""
|
| 112 |
+
|
| 113 |
+
@property
|
| 114 |
+
def user_data_path(self) -> Path:
|
| 115 |
+
""":return: data path tied to the user"""
|
| 116 |
+
return Path(self.user_data_dir)
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def site_data_path(self) -> Path:
|
| 120 |
+
""":return: data path shared by users"""
|
| 121 |
+
return Path(self.site_data_dir)
|
| 122 |
+
|
| 123 |
+
@property
|
| 124 |
+
def user_config_path(self) -> Path:
|
| 125 |
+
""":return: config path tied to the user"""
|
| 126 |
+
return Path(self.user_config_dir)
|
| 127 |
+
|
| 128 |
+
@property
|
| 129 |
+
def site_config_path(self) -> Path:
|
| 130 |
+
""":return: config path shared by the users"""
|
| 131 |
+
return Path(self.site_config_dir)
|
| 132 |
+
|
| 133 |
+
@property
|
| 134 |
+
def user_cache_path(self) -> Path:
|
| 135 |
+
""":return: cache path tied to the user"""
|
| 136 |
+
return Path(self.user_cache_dir)
|
| 137 |
+
|
| 138 |
+
@property
|
| 139 |
+
def user_state_path(self) -> Path:
|
| 140 |
+
""":return: state path tied to the user"""
|
| 141 |
+
return Path(self.user_state_dir)
|
| 142 |
+
|
| 143 |
+
@property
|
| 144 |
+
def user_log_path(self) -> Path:
|
| 145 |
+
""":return: log path tied to the user"""
|
| 146 |
+
return Path(self.user_log_dir)
|
| 147 |
+
|
| 148 |
+
@property
|
| 149 |
+
def user_documents_path(self) -> Path:
|
| 150 |
+
""":return: documents path tied to the user"""
|
| 151 |
+
return Path(self.user_documents_dir)
|
| 152 |
+
|
| 153 |
+
@property
|
| 154 |
+
def user_runtime_path(self) -> Path:
|
| 155 |
+
""":return: runtime path tied to the user"""
|
| 156 |
+
return Path(self.user_runtime_dir)
|
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/macos.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
from .api import PlatformDirsABC
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class MacOS(PlatformDirsABC):
|
| 9 |
+
"""
|
| 10 |
+
Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
|
| 11 |
+
<https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/MacOSXDirectories/MacOSXDirectories.html>`_.
|
| 12 |
+
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>` and
|
| 13 |
+
`version <platformdirs.api.PlatformDirsABC.version>`.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
@property
|
| 17 |
+
def user_data_dir(self) -> str:
|
| 18 |
+
""":return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
|
| 19 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/"))
|
| 20 |
+
|
| 21 |
+
@property
|
| 22 |
+
def site_data_dir(self) -> str:
|
| 23 |
+
""":return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``"""
|
| 24 |
+
return self._append_app_name_and_version("/Library/Application Support")
|
| 25 |
+
|
| 26 |
+
@property
|
| 27 |
+
def user_config_dir(self) -> str:
|
| 28 |
+
""":return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``"""
|
| 29 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/"))
|
| 30 |
+
|
| 31 |
+
@property
|
| 32 |
+
def site_config_dir(self) -> str:
|
| 33 |
+
""":return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``"""
|
| 34 |
+
return self._append_app_name_and_version("/Library/Preferences")
|
| 35 |
+
|
| 36 |
+
@property
|
| 37 |
+
def user_cache_dir(self) -> str:
|
| 38 |
+
""":return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
|
| 39 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))
|
| 40 |
+
|
| 41 |
+
@property
|
| 42 |
+
def user_state_dir(self) -> str:
|
| 43 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
| 44 |
+
return self.user_data_dir
|
| 45 |
+
|
| 46 |
+
@property
|
| 47 |
+
def user_log_dir(self) -> str:
|
| 48 |
+
""":return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
|
| 49 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))
|
| 50 |
+
|
| 51 |
+
@property
|
| 52 |
+
def user_documents_dir(self) -> str:
|
| 53 |
+
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
|
| 54 |
+
return os.path.expanduser("~/Documents")
|
| 55 |
+
|
| 56 |
+
@property
|
| 57 |
+
def user_runtime_dir(self) -> str:
|
| 58 |
+
""":return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
|
| 59 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
__all__ = [
|
| 63 |
+
"MacOS",
|
| 64 |
+
]
|
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/unix.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from configparser import ConfigParser
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
|
| 8 |
+
from .api import PlatformDirsABC
|
| 9 |
+
|
| 10 |
+
if sys.platform.startswith("linux"): # pragma: no branch # no op check, only to please the type checker
|
| 11 |
+
from os import getuid
|
| 12 |
+
else:
|
| 13 |
+
|
| 14 |
+
def getuid() -> int:
|
| 15 |
+
raise RuntimeError("should only be used on Linux")
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class Unix(PlatformDirsABC):
|
| 19 |
+
"""
|
| 20 |
+
On Unix/Linux, we follow the
|
| 21 |
+
`XDG Basedir Spec <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_. The spec allows
|
| 22 |
+
overriding directories with environment variables. The examples show are the default values, alongside the name of
|
| 23 |
+
the environment variable that overrides them. Makes use of the
|
| 24 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
| 25 |
+
`version <platformdirs.api.PlatformDirsABC.version>`,
|
| 26 |
+
`multipath <platformdirs.api.PlatformDirsABC.multipath>`,
|
| 27 |
+
`opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
@property
|
| 31 |
+
def user_data_dir(self) -> str:
|
| 32 |
+
"""
|
| 33 |
+
:return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
|
| 34 |
+
``$XDG_DATA_HOME/$appname/$version``
|
| 35 |
+
"""
|
| 36 |
+
path = os.environ.get("XDG_DATA_HOME", "")
|
| 37 |
+
if not path.strip():
|
| 38 |
+
path = os.path.expanduser("~/.local/share")
|
| 39 |
+
return self._append_app_name_and_version(path)
|
| 40 |
+
|
| 41 |
+
@property
|
| 42 |
+
def site_data_dir(self) -> str:
|
| 43 |
+
"""
|
| 44 |
+
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
|
| 45 |
+
enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
| 46 |
+
path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
|
| 47 |
+
"""
|
| 48 |
+
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
|
| 49 |
+
path = os.environ.get("XDG_DATA_DIRS", "")
|
| 50 |
+
if not path.strip():
|
| 51 |
+
path = f"/usr/local/share{os.pathsep}/usr/share"
|
| 52 |
+
return self._with_multi_path(path)
|
| 53 |
+
|
| 54 |
+
def _with_multi_path(self, path: str) -> str:
|
| 55 |
+
path_list = path.split(os.pathsep)
|
| 56 |
+
if not self.multipath:
|
| 57 |
+
path_list = path_list[0:1]
|
| 58 |
+
path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list]
|
| 59 |
+
return os.pathsep.join(path_list)
|
| 60 |
+
|
| 61 |
+
@property
|
| 62 |
+
def user_config_dir(self) -> str:
|
| 63 |
+
"""
|
| 64 |
+
:return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
|
| 65 |
+
``$XDG_CONFIG_HOME/$appname/$version``
|
| 66 |
+
"""
|
| 67 |
+
path = os.environ.get("XDG_CONFIG_HOME", "")
|
| 68 |
+
if not path.strip():
|
| 69 |
+
path = os.path.expanduser("~/.config")
|
| 70 |
+
return self._append_app_name_and_version(path)
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
def site_config_dir(self) -> str:
|
| 74 |
+
"""
|
| 75 |
+
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
|
| 76 |
+
is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
| 77 |
+
path separator), e.g. ``/etc/xdg/$appname/$version``
|
| 78 |
+
"""
|
| 79 |
+
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
|
| 80 |
+
path = os.environ.get("XDG_CONFIG_DIRS", "")
|
| 81 |
+
if not path.strip():
|
| 82 |
+
path = "/etc/xdg"
|
| 83 |
+
return self._with_multi_path(path)
|
| 84 |
+
|
| 85 |
+
@property
|
| 86 |
+
def user_cache_dir(self) -> str:
|
| 87 |
+
"""
|
| 88 |
+
:return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
|
| 89 |
+
``~/$XDG_CACHE_HOME/$appname/$version``
|
| 90 |
+
"""
|
| 91 |
+
path = os.environ.get("XDG_CACHE_HOME", "")
|
| 92 |
+
if not path.strip():
|
| 93 |
+
path = os.path.expanduser("~/.cache")
|
| 94 |
+
return self._append_app_name_and_version(path)
|
| 95 |
+
|
| 96 |
+
@property
|
| 97 |
+
def user_state_dir(self) -> str:
|
| 98 |
+
"""
|
| 99 |
+
:return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
|
| 100 |
+
``$XDG_STATE_HOME/$appname/$version``
|
| 101 |
+
"""
|
| 102 |
+
path = os.environ.get("XDG_STATE_HOME", "")
|
| 103 |
+
if not path.strip():
|
| 104 |
+
path = os.path.expanduser("~/.local/state")
|
| 105 |
+
return self._append_app_name_and_version(path)
|
| 106 |
+
|
| 107 |
+
@property
|
| 108 |
+
def user_log_dir(self) -> str:
|
| 109 |
+
"""
|
| 110 |
+
:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it
|
| 111 |
+
"""
|
| 112 |
+
path = self.user_state_dir
|
| 113 |
+
if self.opinion:
|
| 114 |
+
path = os.path.join(path, "log")
|
| 115 |
+
return path
|
| 116 |
+
|
| 117 |
+
@property
|
| 118 |
+
def user_documents_dir(self) -> str:
|
| 119 |
+
"""
|
| 120 |
+
:return: documents directory tied to the user, e.g. ``~/Documents``
|
| 121 |
+
"""
|
| 122 |
+
documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR")
|
| 123 |
+
if documents_dir is None:
|
| 124 |
+
documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip()
|
| 125 |
+
if not documents_dir:
|
| 126 |
+
documents_dir = os.path.expanduser("~/Documents")
|
| 127 |
+
|
| 128 |
+
return documents_dir
|
| 129 |
+
|
| 130 |
+
@property
|
| 131 |
+
def user_runtime_dir(self) -> str:
|
| 132 |
+
"""
|
| 133 |
+
:return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
|
| 134 |
+
``$XDG_RUNTIME_DIR/$appname/$version``
|
| 135 |
+
"""
|
| 136 |
+
path = os.environ.get("XDG_RUNTIME_DIR", "")
|
| 137 |
+
if not path.strip():
|
| 138 |
+
path = f"/run/user/{getuid()}"
|
| 139 |
+
return self._append_app_name_and_version(path)
|
| 140 |
+
|
| 141 |
+
@property
|
| 142 |
+
def site_data_path(self) -> Path:
|
| 143 |
+
""":return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
|
| 144 |
+
return self._first_item_as_path_if_multipath(self.site_data_dir)
|
| 145 |
+
|
| 146 |
+
@property
|
| 147 |
+
def site_config_path(self) -> Path:
|
| 148 |
+
""":return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
|
| 149 |
+
return self._first_item_as_path_if_multipath(self.site_config_dir)
|
| 150 |
+
|
| 151 |
+
def _first_item_as_path_if_multipath(self, directory: str) -> Path:
|
| 152 |
+
if self.multipath:
|
| 153 |
+
# If multipath is True, the first path is returned.
|
| 154 |
+
directory = directory.split(os.pathsep)[0]
|
| 155 |
+
return Path(directory)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def _get_user_dirs_folder(key: str) -> str | None:
|
| 159 |
+
"""Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/"""
|
| 160 |
+
user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs")
|
| 161 |
+
if os.path.exists(user_dirs_config_path):
|
| 162 |
+
parser = ConfigParser()
|
| 163 |
+
|
| 164 |
+
with open(user_dirs_config_path) as stream:
|
| 165 |
+
# Add fake section header, so ConfigParser doesn't complain
|
| 166 |
+
parser.read_string(f"[top]\n{stream.read()}")
|
| 167 |
+
|
| 168 |
+
if key not in parser["top"]:
|
| 169 |
+
return None
|
| 170 |
+
|
| 171 |
+
path = parser["top"][key].strip('"')
|
| 172 |
+
# Handle relative home paths
|
| 173 |
+
path = path.replace("$HOME", os.path.expanduser("~"))
|
| 174 |
+
return path
|
| 175 |
+
|
| 176 |
+
return None
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
__all__ = [
|
| 180 |
+
"Unix",
|
| 181 |
+
]
|
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/version.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# file generated by setuptools_scm
|
| 2 |
+
# don't change, don't track in version control
|
| 3 |
+
__version__ = version = '2.6.2'
|
| 4 |
+
__version_tuple__ = version_tuple = (2, 6, 2)
|
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/windows.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import ctypes
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
from functools import lru_cache
|
| 7 |
+
from typing import Callable
|
| 8 |
+
|
| 9 |
+
from .api import PlatformDirsABC
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Windows(PlatformDirsABC):
|
| 13 |
+
"""`MSDN on where to store app data files
|
| 14 |
+
<http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120>`_.
|
| 15 |
+
Makes use of the
|
| 16 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
| 17 |
+
`appauthor <platformdirs.api.PlatformDirsABC.appauthor>`,
|
| 18 |
+
`version <platformdirs.api.PlatformDirsABC.version>`,
|
| 19 |
+
`roaming <platformdirs.api.PlatformDirsABC.roaming>`,
|
| 20 |
+
`opinion <platformdirs.api.PlatformDirsABC.opinion>`."""
|
| 21 |
+
|
| 22 |
+
@property
|
| 23 |
+
def user_data_dir(self) -> str:
|
| 24 |
+
"""
|
| 25 |
+
:return: data directory tied to the user, e.g.
|
| 26 |
+
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
|
| 27 |
+
``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
|
| 28 |
+
"""
|
| 29 |
+
const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
|
| 30 |
+
path = os.path.normpath(get_win_folder(const))
|
| 31 |
+
return self._append_parts(path)
|
| 32 |
+
|
| 33 |
+
def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
|
| 34 |
+
params = []
|
| 35 |
+
if self.appname:
|
| 36 |
+
if self.appauthor is not False:
|
| 37 |
+
author = self.appauthor or self.appname
|
| 38 |
+
params.append(author)
|
| 39 |
+
params.append(self.appname)
|
| 40 |
+
if opinion_value is not None and self.opinion:
|
| 41 |
+
params.append(opinion_value)
|
| 42 |
+
if self.version:
|
| 43 |
+
params.append(self.version)
|
| 44 |
+
return os.path.join(path, *params)
|
| 45 |
+
|
| 46 |
+
@property
|
| 47 |
+
def site_data_dir(self) -> str:
|
| 48 |
+
""":return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
|
| 49 |
+
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
|
| 50 |
+
return self._append_parts(path)
|
| 51 |
+
|
| 52 |
+
@property
|
| 53 |
+
def user_config_dir(self) -> str:
|
| 54 |
+
""":return: config directory tied to the user, same as `user_data_dir`"""
|
| 55 |
+
return self.user_data_dir
|
| 56 |
+
|
| 57 |
+
@property
|
| 58 |
+
def site_config_dir(self) -> str:
|
| 59 |
+
""":return: config directory shared by the users, same as `site_data_dir`"""
|
| 60 |
+
return self.site_data_dir
|
| 61 |
+
|
| 62 |
+
@property
|
| 63 |
+
def user_cache_dir(self) -> str:
|
| 64 |
+
"""
|
| 65 |
+
:return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
|
| 66 |
+
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
|
| 67 |
+
"""
|
| 68 |
+
path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
|
| 69 |
+
return self._append_parts(path, opinion_value="Cache")
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def user_state_dir(self) -> str:
|
| 73 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
| 74 |
+
return self.user_data_dir
|
| 75 |
+
|
| 76 |
+
@property
|
| 77 |
+
def user_log_dir(self) -> str:
|
| 78 |
+
"""
|
| 79 |
+
:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it
|
| 80 |
+
"""
|
| 81 |
+
path = self.user_data_dir
|
| 82 |
+
if self.opinion:
|
| 83 |
+
path = os.path.join(path, "Logs")
|
| 84 |
+
return path
|
| 85 |
+
|
| 86 |
+
@property
|
| 87 |
+
def user_documents_dir(self) -> str:
|
| 88 |
+
"""
|
| 89 |
+
:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``
|
| 90 |
+
"""
|
| 91 |
+
return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
|
| 92 |
+
|
| 93 |
+
@property
|
| 94 |
+
def user_runtime_dir(self) -> str:
|
| 95 |
+
"""
|
| 96 |
+
:return: runtime directory tied to the user, e.g.
|
| 97 |
+
``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
|
| 98 |
+
"""
|
| 99 |
+
path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))
|
| 100 |
+
return self._append_parts(path)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def get_win_folder_from_env_vars(csidl_name: str) -> str:
|
| 104 |
+
"""Get folder from environment variables."""
|
| 105 |
+
if csidl_name == "CSIDL_PERSONAL": # does not have an environment name
|
| 106 |
+
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")
|
| 107 |
+
|
| 108 |
+
env_var_name = {
|
| 109 |
+
"CSIDL_APPDATA": "APPDATA",
|
| 110 |
+
"CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
|
| 111 |
+
"CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
|
| 112 |
+
}.get(csidl_name)
|
| 113 |
+
if env_var_name is None:
|
| 114 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
| 115 |
+
result = os.environ.get(env_var_name)
|
| 116 |
+
if result is None:
|
| 117 |
+
raise ValueError(f"Unset environment variable: {env_var_name}")
|
| 118 |
+
return result
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def get_win_folder_from_registry(csidl_name: str) -> str:
|
| 122 |
+
"""Get folder from the registry.
|
| 123 |
+
|
| 124 |
+
This is a fallback technique at best. I'm not sure if using the
|
| 125 |
+
registry for this guarantees us the correct answer for all CSIDL_*
|
| 126 |
+
names.
|
| 127 |
+
"""
|
| 128 |
+
shell_folder_name = {
|
| 129 |
+
"CSIDL_APPDATA": "AppData",
|
| 130 |
+
"CSIDL_COMMON_APPDATA": "Common AppData",
|
| 131 |
+
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
| 132 |
+
"CSIDL_PERSONAL": "Personal",
|
| 133 |
+
}.get(csidl_name)
|
| 134 |
+
if shell_folder_name is None:
|
| 135 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
| 136 |
+
if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows
|
| 137 |
+
raise NotImplementedError
|
| 138 |
+
import winreg
|
| 139 |
+
|
| 140 |
+
key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
|
| 141 |
+
directory, _ = winreg.QueryValueEx(key, shell_folder_name)
|
| 142 |
+
return str(directory)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def get_win_folder_via_ctypes(csidl_name: str) -> str:
|
| 146 |
+
"""Get folder with ctypes."""
|
| 147 |
+
csidl_const = {
|
| 148 |
+
"CSIDL_APPDATA": 26,
|
| 149 |
+
"CSIDL_COMMON_APPDATA": 35,
|
| 150 |
+
"CSIDL_LOCAL_APPDATA": 28,
|
| 151 |
+
"CSIDL_PERSONAL": 5,
|
| 152 |
+
}.get(csidl_name)
|
| 153 |
+
if csidl_const is None:
|
| 154 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
| 155 |
+
|
| 156 |
+
buf = ctypes.create_unicode_buffer(1024)
|
| 157 |
+
windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker
|
| 158 |
+
windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
| 159 |
+
|
| 160 |
+
# Downgrade to short path name if it has highbit chars.
|
| 161 |
+
if any(ord(c) > 255 for c in buf):
|
| 162 |
+
buf2 = ctypes.create_unicode_buffer(1024)
|
| 163 |
+
if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
| 164 |
+
buf = buf2
|
| 165 |
+
|
| 166 |
+
return buf.value
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
def _pick_get_win_folder() -> Callable[[str], str]:
|
| 170 |
+
if hasattr(ctypes, "windll"):
|
| 171 |
+
return get_win_folder_via_ctypes
|
| 172 |
+
try:
|
| 173 |
+
import winreg # noqa: F401
|
| 174 |
+
except ImportError:
|
| 175 |
+
return get_win_folder_from_env_vars
|
| 176 |
+
else:
|
| 177 |
+
return get_win_folder_from_registry
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
|
| 181 |
+
|
| 182 |
+
__all__ = [
|
| 183 |
+
"Windows",
|
| 184 |
+
]
|
.venv/Lib/site-packages/setuptools/_distutils/archive_util.py
ADDED
|
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.archive_util
|
| 2 |
+
|
| 3 |
+
Utility functions for creating archive files (tarballs, zip files,
|
| 4 |
+
that sort of thing)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from warnings import warn
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import zipfile
|
| 12 |
+
except ImportError:
|
| 13 |
+
zipfile = None
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
from .errors import DistutilsExecError
|
| 17 |
+
from .spawn import spawn
|
| 18 |
+
from .dir_util import mkpath
|
| 19 |
+
from ._log import log
|
| 20 |
+
|
| 21 |
+
try:
|
| 22 |
+
from pwd import getpwnam
|
| 23 |
+
except ImportError:
|
| 24 |
+
getpwnam = None
|
| 25 |
+
|
| 26 |
+
try:
|
| 27 |
+
from grp import getgrnam
|
| 28 |
+
except ImportError:
|
| 29 |
+
getgrnam = None
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _get_gid(name):
|
| 33 |
+
"""Returns a gid, given a group name."""
|
| 34 |
+
if getgrnam is None or name is None:
|
| 35 |
+
return None
|
| 36 |
+
try:
|
| 37 |
+
result = getgrnam(name)
|
| 38 |
+
except KeyError:
|
| 39 |
+
result = None
|
| 40 |
+
if result is not None:
|
| 41 |
+
return result[2]
|
| 42 |
+
return None
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _get_uid(name):
|
| 46 |
+
"""Returns an uid, given a user name."""
|
| 47 |
+
if getpwnam is None or name is None:
|
| 48 |
+
return None
|
| 49 |
+
try:
|
| 50 |
+
result = getpwnam(name)
|
| 51 |
+
except KeyError:
|
| 52 |
+
result = None
|
| 53 |
+
if result is not None:
|
| 54 |
+
return result[2]
|
| 55 |
+
return None
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def make_tarball(
|
| 59 |
+
base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None
|
| 60 |
+
):
|
| 61 |
+
"""Create a (possibly compressed) tar file from all the files under
|
| 62 |
+
'base_dir'.
|
| 63 |
+
|
| 64 |
+
'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or
|
| 65 |
+
None. ("compress" will be deprecated in Python 3.2)
|
| 66 |
+
|
| 67 |
+
'owner' and 'group' can be used to define an owner and a group for the
|
| 68 |
+
archive that is being built. If not provided, the current owner and group
|
| 69 |
+
will be used.
|
| 70 |
+
|
| 71 |
+
The output tar file will be named 'base_dir' + ".tar", possibly plus
|
| 72 |
+
the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
|
| 73 |
+
|
| 74 |
+
Returns the output filename.
|
| 75 |
+
"""
|
| 76 |
+
tar_compression = {
|
| 77 |
+
'gzip': 'gz',
|
| 78 |
+
'bzip2': 'bz2',
|
| 79 |
+
'xz': 'xz',
|
| 80 |
+
None: '',
|
| 81 |
+
'compress': '',
|
| 82 |
+
}
|
| 83 |
+
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', 'compress': '.Z'}
|
| 84 |
+
|
| 85 |
+
# flags for compression program, each element of list will be an argument
|
| 86 |
+
if compress is not None and compress not in compress_ext.keys():
|
| 87 |
+
raise ValueError(
|
| 88 |
+
"bad value for 'compress': must be None, 'gzip', 'bzip2', "
|
| 89 |
+
"'xz' or 'compress'"
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
archive_name = base_name + '.tar'
|
| 93 |
+
if compress != 'compress':
|
| 94 |
+
archive_name += compress_ext.get(compress, '')
|
| 95 |
+
|
| 96 |
+
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
|
| 97 |
+
|
| 98 |
+
# creating the tarball
|
| 99 |
+
import tarfile # late import so Python build itself doesn't break
|
| 100 |
+
|
| 101 |
+
log.info('Creating tar archive')
|
| 102 |
+
|
| 103 |
+
uid = _get_uid(owner)
|
| 104 |
+
gid = _get_gid(group)
|
| 105 |
+
|
| 106 |
+
def _set_uid_gid(tarinfo):
|
| 107 |
+
if gid is not None:
|
| 108 |
+
tarinfo.gid = gid
|
| 109 |
+
tarinfo.gname = group
|
| 110 |
+
if uid is not None:
|
| 111 |
+
tarinfo.uid = uid
|
| 112 |
+
tarinfo.uname = owner
|
| 113 |
+
return tarinfo
|
| 114 |
+
|
| 115 |
+
if not dry_run:
|
| 116 |
+
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
|
| 117 |
+
try:
|
| 118 |
+
tar.add(base_dir, filter=_set_uid_gid)
|
| 119 |
+
finally:
|
| 120 |
+
tar.close()
|
| 121 |
+
|
| 122 |
+
# compression using `compress`
|
| 123 |
+
if compress == 'compress':
|
| 124 |
+
warn("'compress' is deprecated.", DeprecationWarning)
|
| 125 |
+
# the option varies depending on the platform
|
| 126 |
+
compressed_name = archive_name + compress_ext[compress]
|
| 127 |
+
if sys.platform == 'win32':
|
| 128 |
+
cmd = [compress, archive_name, compressed_name]
|
| 129 |
+
else:
|
| 130 |
+
cmd = [compress, '-f', archive_name]
|
| 131 |
+
spawn(cmd, dry_run=dry_run)
|
| 132 |
+
return compressed_name
|
| 133 |
+
|
| 134 |
+
return archive_name
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): # noqa: C901
|
| 138 |
+
"""Create a zip file from all the files under 'base_dir'.
|
| 139 |
+
|
| 140 |
+
The output zip file will be named 'base_name' + ".zip". Uses either the
|
| 141 |
+
"zipfile" Python module (if available) or the InfoZIP "zip" utility
|
| 142 |
+
(if installed and found on the default search path). If neither tool is
|
| 143 |
+
available, raises DistutilsExecError. Returns the name of the output zip
|
| 144 |
+
file.
|
| 145 |
+
"""
|
| 146 |
+
zip_filename = base_name + ".zip"
|
| 147 |
+
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
| 148 |
+
|
| 149 |
+
# If zipfile module is not available, try spawning an external
|
| 150 |
+
# 'zip' command.
|
| 151 |
+
if zipfile is None:
|
| 152 |
+
if verbose:
|
| 153 |
+
zipoptions = "-r"
|
| 154 |
+
else:
|
| 155 |
+
zipoptions = "-rq"
|
| 156 |
+
|
| 157 |
+
try:
|
| 158 |
+
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
|
| 159 |
+
except DistutilsExecError:
|
| 160 |
+
# XXX really should distinguish between "couldn't find
|
| 161 |
+
# external 'zip' command" and "zip failed".
|
| 162 |
+
raise DistutilsExecError(
|
| 163 |
+
(
|
| 164 |
+
"unable to create zip file '%s': "
|
| 165 |
+
"could neither import the 'zipfile' module nor "
|
| 166 |
+
"find a standalone zip utility"
|
| 167 |
+
)
|
| 168 |
+
% zip_filename
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
else:
|
| 172 |
+
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
| 173 |
+
|
| 174 |
+
if not dry_run:
|
| 175 |
+
try:
|
| 176 |
+
zip = zipfile.ZipFile(
|
| 177 |
+
zip_filename, "w", compression=zipfile.ZIP_DEFLATED
|
| 178 |
+
)
|
| 179 |
+
except RuntimeError:
|
| 180 |
+
zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED)
|
| 181 |
+
|
| 182 |
+
with zip:
|
| 183 |
+
if base_dir != os.curdir:
|
| 184 |
+
path = os.path.normpath(os.path.join(base_dir, ''))
|
| 185 |
+
zip.write(path, path)
|
| 186 |
+
log.info("adding '%s'", path)
|
| 187 |
+
for dirpath, dirnames, filenames in os.walk(base_dir):
|
| 188 |
+
for name in dirnames:
|
| 189 |
+
path = os.path.normpath(os.path.join(dirpath, name, ''))
|
| 190 |
+
zip.write(path, path)
|
| 191 |
+
log.info("adding '%s'", path)
|
| 192 |
+
for name in filenames:
|
| 193 |
+
path = os.path.normpath(os.path.join(dirpath, name))
|
| 194 |
+
if os.path.isfile(path):
|
| 195 |
+
zip.write(path, path)
|
| 196 |
+
log.info("adding '%s'", path)
|
| 197 |
+
|
| 198 |
+
return zip_filename
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
ARCHIVE_FORMATS = {
|
| 202 |
+
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
|
| 203 |
+
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
|
| 204 |
+
'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
|
| 205 |
+
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
|
| 206 |
+
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
|
| 207 |
+
'zip': (make_zipfile, [], "ZIP file"),
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def check_archive_formats(formats):
|
| 212 |
+
"""Returns the first format from the 'format' list that is unknown.
|
| 213 |
+
|
| 214 |
+
If all formats are known, returns None
|
| 215 |
+
"""
|
| 216 |
+
for format in formats:
|
| 217 |
+
if format not in ARCHIVE_FORMATS:
|
| 218 |
+
return format
|
| 219 |
+
return None
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def make_archive(
|
| 223 |
+
base_name,
|
| 224 |
+
format,
|
| 225 |
+
root_dir=None,
|
| 226 |
+
base_dir=None,
|
| 227 |
+
verbose=0,
|
| 228 |
+
dry_run=0,
|
| 229 |
+
owner=None,
|
| 230 |
+
group=None,
|
| 231 |
+
):
|
| 232 |
+
"""Create an archive file (eg. zip or tar).
|
| 233 |
+
|
| 234 |
+
'base_name' is the name of the file to create, minus any format-specific
|
| 235 |
+
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
|
| 236 |
+
"bztar", "xztar", or "ztar".
|
| 237 |
+
|
| 238 |
+
'root_dir' is a directory that will be the root directory of the
|
| 239 |
+
archive; ie. we typically chdir into 'root_dir' before creating the
|
| 240 |
+
archive. 'base_dir' is the directory where we start archiving from;
|
| 241 |
+
ie. 'base_dir' will be the common prefix of all files and
|
| 242 |
+
directories in the archive. 'root_dir' and 'base_dir' both default
|
| 243 |
+
to the current directory. Returns the name of the archive file.
|
| 244 |
+
|
| 245 |
+
'owner' and 'group' are used when creating a tar archive. By default,
|
| 246 |
+
uses the current owner and group.
|
| 247 |
+
"""
|
| 248 |
+
save_cwd = os.getcwd()
|
| 249 |
+
if root_dir is not None:
|
| 250 |
+
log.debug("changing into '%s'", root_dir)
|
| 251 |
+
base_name = os.path.abspath(base_name)
|
| 252 |
+
if not dry_run:
|
| 253 |
+
os.chdir(root_dir)
|
| 254 |
+
|
| 255 |
+
if base_dir is None:
|
| 256 |
+
base_dir = os.curdir
|
| 257 |
+
|
| 258 |
+
kwargs = {'dry_run': dry_run}
|
| 259 |
+
|
| 260 |
+
try:
|
| 261 |
+
format_info = ARCHIVE_FORMATS[format]
|
| 262 |
+
except KeyError:
|
| 263 |
+
raise ValueError("unknown archive format '%s'" % format)
|
| 264 |
+
|
| 265 |
+
func = format_info[0]
|
| 266 |
+
for arg, val in format_info[1]:
|
| 267 |
+
kwargs[arg] = val
|
| 268 |
+
|
| 269 |
+
if format != 'zip':
|
| 270 |
+
kwargs['owner'] = owner
|
| 271 |
+
kwargs['group'] = group
|
| 272 |
+
|
| 273 |
+
try:
|
| 274 |
+
filename = func(base_name, base_dir, **kwargs)
|
| 275 |
+
finally:
|
| 276 |
+
if root_dir is not None:
|
| 277 |
+
log.debug("changing back to '%s'", save_cwd)
|
| 278 |
+
os.chdir(save_cwd)
|
| 279 |
+
|
| 280 |
+
return filename
|
.venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py
ADDED
|
@@ -0,0 +1,401 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.bcppcompiler
|
| 2 |
+
|
| 3 |
+
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for the Borland C++ compiler.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# This implementation by Lyle Johnson, based on the original msvccompiler.py
|
| 8 |
+
# module and using the directions originally published by Gordon Williams.
|
| 9 |
+
|
| 10 |
+
# XXX looks like there's a LOT of overlap between these two classes:
|
| 11 |
+
# someone should sit down and factor out the common code as
|
| 12 |
+
# WindowsCCompiler! --GPW
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
import warnings
|
| 17 |
+
|
| 18 |
+
from .errors import (
|
| 19 |
+
DistutilsExecError,
|
| 20 |
+
CompileError,
|
| 21 |
+
LibError,
|
| 22 |
+
LinkError,
|
| 23 |
+
UnknownFileError,
|
| 24 |
+
)
|
| 25 |
+
from .ccompiler import CCompiler, gen_preprocess_options
|
| 26 |
+
from .file_util import write_file
|
| 27 |
+
from .dep_util import newer
|
| 28 |
+
from ._log import log
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
warnings.warn(
|
| 32 |
+
"bcppcompiler is deprecated and slated to be removed "
|
| 33 |
+
"in the future. Please discontinue use or file an issue "
|
| 34 |
+
"with pypa/distutils describing your use case.",
|
| 35 |
+
DeprecationWarning,
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class BCPPCompiler(CCompiler):
|
| 40 |
+
"""Concrete class that implements an interface to the Borland C/C++
|
| 41 |
+
compiler, as defined by the CCompiler abstract class.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
compiler_type = 'bcpp'
|
| 45 |
+
|
| 46 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 47 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 48 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 49 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 50 |
+
# though, so it's worth thinking about.
|
| 51 |
+
executables = {}
|
| 52 |
+
|
| 53 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 54 |
+
_c_extensions = ['.c']
|
| 55 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 56 |
+
|
| 57 |
+
# Needed for the filename generation methods provided by the
|
| 58 |
+
# base class, CCompiler.
|
| 59 |
+
src_extensions = _c_extensions + _cpp_extensions
|
| 60 |
+
obj_extension = '.obj'
|
| 61 |
+
static_lib_extension = '.lib'
|
| 62 |
+
shared_lib_extension = '.dll'
|
| 63 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 64 |
+
exe_extension = '.exe'
|
| 65 |
+
|
| 66 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 67 |
+
super().__init__(verbose, dry_run, force)
|
| 68 |
+
|
| 69 |
+
# These executables are assumed to all be in the path.
|
| 70 |
+
# Borland doesn't seem to use any special registry settings to
|
| 71 |
+
# indicate their installation locations.
|
| 72 |
+
|
| 73 |
+
self.cc = "bcc32.exe"
|
| 74 |
+
self.linker = "ilink32.exe"
|
| 75 |
+
self.lib = "tlib.exe"
|
| 76 |
+
|
| 77 |
+
self.preprocess_options = None
|
| 78 |
+
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
|
| 79 |
+
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
|
| 80 |
+
|
| 81 |
+
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
|
| 82 |
+
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
|
| 83 |
+
self.ldflags_static = []
|
| 84 |
+
self.ldflags_exe = ['/Gn', '/q', '/x']
|
| 85 |
+
self.ldflags_exe_debug = ['/Gn', '/q', '/x', '/r']
|
| 86 |
+
|
| 87 |
+
# -- Worker methods ------------------------------------------------
|
| 88 |
+
|
| 89 |
+
def compile( # noqa: C901
|
| 90 |
+
self,
|
| 91 |
+
sources,
|
| 92 |
+
output_dir=None,
|
| 93 |
+
macros=None,
|
| 94 |
+
include_dirs=None,
|
| 95 |
+
debug=0,
|
| 96 |
+
extra_preargs=None,
|
| 97 |
+
extra_postargs=None,
|
| 98 |
+
depends=None,
|
| 99 |
+
):
|
| 100 |
+
macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
|
| 101 |
+
output_dir, macros, include_dirs, sources, depends, extra_postargs
|
| 102 |
+
)
|
| 103 |
+
compile_opts = extra_preargs or []
|
| 104 |
+
compile_opts.append('-c')
|
| 105 |
+
if debug:
|
| 106 |
+
compile_opts.extend(self.compile_options_debug)
|
| 107 |
+
else:
|
| 108 |
+
compile_opts.extend(self.compile_options)
|
| 109 |
+
|
| 110 |
+
for obj in objects:
|
| 111 |
+
try:
|
| 112 |
+
src, ext = build[obj]
|
| 113 |
+
except KeyError:
|
| 114 |
+
continue
|
| 115 |
+
# XXX why do the normpath here?
|
| 116 |
+
src = os.path.normpath(src)
|
| 117 |
+
obj = os.path.normpath(obj)
|
| 118 |
+
# XXX _setup_compile() did a mkpath() too but before the normpath.
|
| 119 |
+
# Is it possible to skip the normpath?
|
| 120 |
+
self.mkpath(os.path.dirname(obj))
|
| 121 |
+
|
| 122 |
+
if ext == '.res':
|
| 123 |
+
# This is already a binary file -- skip it.
|
| 124 |
+
continue # the 'for' loop
|
| 125 |
+
if ext == '.rc':
|
| 126 |
+
# This needs to be compiled to a .res file -- do it now.
|
| 127 |
+
try:
|
| 128 |
+
self.spawn(["brcc32", "-fo", obj, src])
|
| 129 |
+
except DistutilsExecError as msg:
|
| 130 |
+
raise CompileError(msg)
|
| 131 |
+
continue # the 'for' loop
|
| 132 |
+
|
| 133 |
+
# The next two are both for the real compiler.
|
| 134 |
+
if ext in self._c_extensions:
|
| 135 |
+
input_opt = ""
|
| 136 |
+
elif ext in self._cpp_extensions:
|
| 137 |
+
input_opt = "-P"
|
| 138 |
+
else:
|
| 139 |
+
# Unknown file type -- no extra options. The compiler
|
| 140 |
+
# will probably fail, but let it just in case this is a
|
| 141 |
+
# file the compiler recognizes even if we don't.
|
| 142 |
+
input_opt = ""
|
| 143 |
+
|
| 144 |
+
output_opt = "-o" + obj
|
| 145 |
+
|
| 146 |
+
# Compiler command line syntax is: "bcc32 [options] file(s)".
|
| 147 |
+
# Note that the source file names must appear at the end of
|
| 148 |
+
# the command line.
|
| 149 |
+
try:
|
| 150 |
+
self.spawn(
|
| 151 |
+
[self.cc]
|
| 152 |
+
+ compile_opts
|
| 153 |
+
+ pp_opts
|
| 154 |
+
+ [input_opt, output_opt]
|
| 155 |
+
+ extra_postargs
|
| 156 |
+
+ [src]
|
| 157 |
+
)
|
| 158 |
+
except DistutilsExecError as msg:
|
| 159 |
+
raise CompileError(msg)
|
| 160 |
+
|
| 161 |
+
return objects
|
| 162 |
+
|
| 163 |
+
# compile ()
|
| 164 |
+
|
| 165 |
+
def create_static_lib(
|
| 166 |
+
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
| 167 |
+
):
|
| 168 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 169 |
+
output_filename = self.library_filename(output_libname, output_dir=output_dir)
|
| 170 |
+
|
| 171 |
+
if self._need_link(objects, output_filename):
|
| 172 |
+
lib_args = [output_filename, '/u'] + objects
|
| 173 |
+
if debug:
|
| 174 |
+
pass # XXX what goes here?
|
| 175 |
+
try:
|
| 176 |
+
self.spawn([self.lib] + lib_args)
|
| 177 |
+
except DistutilsExecError as msg:
|
| 178 |
+
raise LibError(msg)
|
| 179 |
+
else:
|
| 180 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 181 |
+
|
| 182 |
+
# create_static_lib ()
|
| 183 |
+
|
| 184 |
+
def link( # noqa: C901
|
| 185 |
+
self,
|
| 186 |
+
target_desc,
|
| 187 |
+
objects,
|
| 188 |
+
output_filename,
|
| 189 |
+
output_dir=None,
|
| 190 |
+
libraries=None,
|
| 191 |
+
library_dirs=None,
|
| 192 |
+
runtime_library_dirs=None,
|
| 193 |
+
export_symbols=None,
|
| 194 |
+
debug=0,
|
| 195 |
+
extra_preargs=None,
|
| 196 |
+
extra_postargs=None,
|
| 197 |
+
build_temp=None,
|
| 198 |
+
target_lang=None,
|
| 199 |
+
):
|
| 200 |
+
# XXX this ignores 'build_temp'! should follow the lead of
|
| 201 |
+
# msvccompiler.py
|
| 202 |
+
|
| 203 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 204 |
+
(libraries, library_dirs, runtime_library_dirs) = self._fix_lib_args(
|
| 205 |
+
libraries, library_dirs, runtime_library_dirs
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
if runtime_library_dirs:
|
| 209 |
+
log.warning(
|
| 210 |
+
"I don't know what to do with 'runtime_library_dirs': %s",
|
| 211 |
+
str(runtime_library_dirs),
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
if output_dir is not None:
|
| 215 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 216 |
+
|
| 217 |
+
if self._need_link(objects, output_filename):
|
| 218 |
+
# Figure out linker args based on type of target.
|
| 219 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 220 |
+
startup_obj = 'c0w32'
|
| 221 |
+
if debug:
|
| 222 |
+
ld_args = self.ldflags_exe_debug[:]
|
| 223 |
+
else:
|
| 224 |
+
ld_args = self.ldflags_exe[:]
|
| 225 |
+
else:
|
| 226 |
+
startup_obj = 'c0d32'
|
| 227 |
+
if debug:
|
| 228 |
+
ld_args = self.ldflags_shared_debug[:]
|
| 229 |
+
else:
|
| 230 |
+
ld_args = self.ldflags_shared[:]
|
| 231 |
+
|
| 232 |
+
# Create a temporary exports file for use by the linker
|
| 233 |
+
if export_symbols is None:
|
| 234 |
+
def_file = ''
|
| 235 |
+
else:
|
| 236 |
+
head, tail = os.path.split(output_filename)
|
| 237 |
+
modname, ext = os.path.splitext(tail)
|
| 238 |
+
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
|
| 239 |
+
def_file = os.path.join(temp_dir, '%s.def' % modname)
|
| 240 |
+
contents = ['EXPORTS']
|
| 241 |
+
for sym in export_symbols or []:
|
| 242 |
+
contents.append(' {}=_{}'.format(sym, sym))
|
| 243 |
+
self.execute(write_file, (def_file, contents), "writing %s" % def_file)
|
| 244 |
+
|
| 245 |
+
# Borland C++ has problems with '/' in paths
|
| 246 |
+
objects2 = map(os.path.normpath, objects)
|
| 247 |
+
# split objects in .obj and .res files
|
| 248 |
+
# Borland C++ needs them at different positions in the command line
|
| 249 |
+
objects = [startup_obj]
|
| 250 |
+
resources = []
|
| 251 |
+
for file in objects2:
|
| 252 |
+
(base, ext) = os.path.splitext(os.path.normcase(file))
|
| 253 |
+
if ext == '.res':
|
| 254 |
+
resources.append(file)
|
| 255 |
+
else:
|
| 256 |
+
objects.append(file)
|
| 257 |
+
|
| 258 |
+
for ell in library_dirs:
|
| 259 |
+
ld_args.append("/L%s" % os.path.normpath(ell))
|
| 260 |
+
ld_args.append("/L.") # we sometimes use relative paths
|
| 261 |
+
|
| 262 |
+
# list of object files
|
| 263 |
+
ld_args.extend(objects)
|
| 264 |
+
|
| 265 |
+
# XXX the command-line syntax for Borland C++ is a bit wonky;
|
| 266 |
+
# certain filenames are jammed together in one big string, but
|
| 267 |
+
# comma-delimited. This doesn't mesh too well with the
|
| 268 |
+
# Unix-centric attitude (with a DOS/Windows quoting hack) of
|
| 269 |
+
# 'spawn()', so constructing the argument list is a bit
|
| 270 |
+
# awkward. Note that doing the obvious thing and jamming all
|
| 271 |
+
# the filenames and commas into one argument would be wrong,
|
| 272 |
+
# because 'spawn()' would quote any filenames with spaces in
|
| 273 |
+
# them. Arghghh!. Apparently it works fine as coded...
|
| 274 |
+
|
| 275 |
+
# name of dll/exe file
|
| 276 |
+
ld_args.extend([',', output_filename])
|
| 277 |
+
# no map file and start libraries
|
| 278 |
+
ld_args.append(',,')
|
| 279 |
+
|
| 280 |
+
for lib in libraries:
|
| 281 |
+
# see if we find it and if there is a bcpp specific lib
|
| 282 |
+
# (xxx_bcpp.lib)
|
| 283 |
+
libfile = self.find_library_file(library_dirs, lib, debug)
|
| 284 |
+
if libfile is None:
|
| 285 |
+
ld_args.append(lib)
|
| 286 |
+
# probably a BCPP internal library -- don't warn
|
| 287 |
+
else:
|
| 288 |
+
# full name which prefers bcpp_xxx.lib over xxx.lib
|
| 289 |
+
ld_args.append(libfile)
|
| 290 |
+
|
| 291 |
+
# some default libraries
|
| 292 |
+
ld_args.extend(('import32', 'cw32mt'))
|
| 293 |
+
|
| 294 |
+
# def file for export symbols
|
| 295 |
+
ld_args.extend([',', def_file])
|
| 296 |
+
# add resource files
|
| 297 |
+
ld_args.append(',')
|
| 298 |
+
ld_args.extend(resources)
|
| 299 |
+
|
| 300 |
+
if extra_preargs:
|
| 301 |
+
ld_args[:0] = extra_preargs
|
| 302 |
+
if extra_postargs:
|
| 303 |
+
ld_args.extend(extra_postargs)
|
| 304 |
+
|
| 305 |
+
self.mkpath(os.path.dirname(output_filename))
|
| 306 |
+
try:
|
| 307 |
+
self.spawn([self.linker] + ld_args)
|
| 308 |
+
except DistutilsExecError as msg:
|
| 309 |
+
raise LinkError(msg)
|
| 310 |
+
|
| 311 |
+
else:
|
| 312 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 313 |
+
|
| 314 |
+
# link ()
|
| 315 |
+
|
| 316 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 317 |
+
|
| 318 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 319 |
+
# List of effective library names to try, in order of preference:
|
| 320 |
+
# xxx_bcpp.lib is better than xxx.lib
|
| 321 |
+
# and xxx_d.lib is better than xxx.lib if debug is set
|
| 322 |
+
#
|
| 323 |
+
# The "_bcpp" suffix is to handle a Python installation for people
|
| 324 |
+
# with multiple compilers (primarily Distutils hackers, I suspect
|
| 325 |
+
# ;-). The idea is they'd have one static library for each
|
| 326 |
+
# compiler they care about, since (almost?) every Windows compiler
|
| 327 |
+
# seems to have a different format for static libraries.
|
| 328 |
+
if debug:
|
| 329 |
+
dlib = lib + "_d"
|
| 330 |
+
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
|
| 331 |
+
else:
|
| 332 |
+
try_names = (lib + "_bcpp", lib)
|
| 333 |
+
|
| 334 |
+
for dir in dirs:
|
| 335 |
+
for name in try_names:
|
| 336 |
+
libfile = os.path.join(dir, self.library_filename(name))
|
| 337 |
+
if os.path.exists(libfile):
|
| 338 |
+
return libfile
|
| 339 |
+
else:
|
| 340 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 341 |
+
return None
|
| 342 |
+
|
| 343 |
+
# overwrite the one from CCompiler to support rc and res-files
|
| 344 |
+
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
| 345 |
+
if output_dir is None:
|
| 346 |
+
output_dir = ''
|
| 347 |
+
obj_names = []
|
| 348 |
+
for src_name in source_filenames:
|
| 349 |
+
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
| 350 |
+
(base, ext) = os.path.splitext(os.path.normcase(src_name))
|
| 351 |
+
if ext not in (self.src_extensions + ['.rc', '.res']):
|
| 352 |
+
raise UnknownFileError(
|
| 353 |
+
"unknown file type '{}' (from '{}')".format(ext, src_name)
|
| 354 |
+
)
|
| 355 |
+
if strip_dir:
|
| 356 |
+
base = os.path.basename(base)
|
| 357 |
+
if ext == '.res':
|
| 358 |
+
# these can go unchanged
|
| 359 |
+
obj_names.append(os.path.join(output_dir, base + ext))
|
| 360 |
+
elif ext == '.rc':
|
| 361 |
+
# these need to be compiled to .res-files
|
| 362 |
+
obj_names.append(os.path.join(output_dir, base + '.res'))
|
| 363 |
+
else:
|
| 364 |
+
obj_names.append(os.path.join(output_dir, base + self.obj_extension))
|
| 365 |
+
return obj_names
|
| 366 |
+
|
| 367 |
+
# object_filenames ()
|
| 368 |
+
|
| 369 |
+
def preprocess(
|
| 370 |
+
self,
|
| 371 |
+
source,
|
| 372 |
+
output_file=None,
|
| 373 |
+
macros=None,
|
| 374 |
+
include_dirs=None,
|
| 375 |
+
extra_preargs=None,
|
| 376 |
+
extra_postargs=None,
|
| 377 |
+
):
|
| 378 |
+
(_, macros, include_dirs) = self._fix_compile_args(None, macros, include_dirs)
|
| 379 |
+
pp_opts = gen_preprocess_options(macros, include_dirs)
|
| 380 |
+
pp_args = ['cpp32.exe'] + pp_opts
|
| 381 |
+
if output_file is not None:
|
| 382 |
+
pp_args.append('-o' + output_file)
|
| 383 |
+
if extra_preargs:
|
| 384 |
+
pp_args[:0] = extra_preargs
|
| 385 |
+
if extra_postargs:
|
| 386 |
+
pp_args.extend(extra_postargs)
|
| 387 |
+
pp_args.append(source)
|
| 388 |
+
|
| 389 |
+
# We need to preprocess: either we're being forced to, or the
|
| 390 |
+
# source file is newer than the target (or the target doesn't
|
| 391 |
+
# exist).
|
| 392 |
+
if self.force or output_file is None or newer(source, output_file):
|
| 393 |
+
if output_file:
|
| 394 |
+
self.mkpath(os.path.dirname(output_file))
|
| 395 |
+
try:
|
| 396 |
+
self.spawn(pp_args)
|
| 397 |
+
except DistutilsExecError as msg:
|
| 398 |
+
print(msg)
|
| 399 |
+
raise CompileError(msg)
|
| 400 |
+
|
| 401 |
+
# preprocess()
|
.venv/Lib/site-packages/setuptools/_distutils/ccompiler.py
ADDED
|
@@ -0,0 +1,1254 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.ccompiler
|
| 2 |
+
|
| 3 |
+
Contains CCompiler, an abstract base class that defines the interface
|
| 4 |
+
for the Distutils compiler abstraction model."""
|
| 5 |
+
|
| 6 |
+
import sys
|
| 7 |
+
import os
|
| 8 |
+
import re
|
| 9 |
+
import warnings
|
| 10 |
+
|
| 11 |
+
from .errors import (
|
| 12 |
+
CompileError,
|
| 13 |
+
LinkError,
|
| 14 |
+
UnknownFileError,
|
| 15 |
+
DistutilsPlatformError,
|
| 16 |
+
DistutilsModuleError,
|
| 17 |
+
)
|
| 18 |
+
from .spawn import spawn
|
| 19 |
+
from .file_util import move_file
|
| 20 |
+
from .dir_util import mkpath
|
| 21 |
+
from .dep_util import newer_group
|
| 22 |
+
from .util import split_quoted, execute
|
| 23 |
+
from ._log import log
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class CCompiler:
|
| 27 |
+
"""Abstract base class to define the interface that must be implemented
|
| 28 |
+
by real compiler classes. Also has some utility methods used by
|
| 29 |
+
several compiler classes.
|
| 30 |
+
|
| 31 |
+
The basic idea behind a compiler abstraction class is that each
|
| 32 |
+
instance can be used for all the compile/link steps in building a
|
| 33 |
+
single project. Thus, attributes common to all of those compile and
|
| 34 |
+
link steps -- include directories, macros to define, libraries to link
|
| 35 |
+
against, etc. -- are attributes of the compiler instance. To allow for
|
| 36 |
+
variability in how individual files are treated, most of those
|
| 37 |
+
attributes may be varied on a per-compilation or per-link basis.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
# 'compiler_type' is a class attribute that identifies this class. It
|
| 41 |
+
# keeps code that wants to know what kind of compiler it's dealing with
|
| 42 |
+
# from having to import all possible compiler classes just to do an
|
| 43 |
+
# 'isinstance'. In concrete CCompiler subclasses, 'compiler_type'
|
| 44 |
+
# should really, really be one of the keys of the 'compiler_class'
|
| 45 |
+
# dictionary (see below -- used by the 'new_compiler()' factory
|
| 46 |
+
# function) -- authors of new compiler interface classes are
|
| 47 |
+
# responsible for updating 'compiler_class'!
|
| 48 |
+
compiler_type = None
|
| 49 |
+
|
| 50 |
+
# XXX things not handled by this compiler abstraction model:
|
| 51 |
+
# * client can't provide additional options for a compiler,
|
| 52 |
+
# e.g. warning, optimization, debugging flags. Perhaps this
|
| 53 |
+
# should be the domain of concrete compiler abstraction classes
|
| 54 |
+
# (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
|
| 55 |
+
# class should have methods for the common ones.
|
| 56 |
+
# * can't completely override the include or library searchg
|
| 57 |
+
# path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
|
| 58 |
+
# I'm not sure how widely supported this is even by Unix
|
| 59 |
+
# compilers, much less on other platforms. And I'm even less
|
| 60 |
+
# sure how useful it is; maybe for cross-compiling, but
|
| 61 |
+
# support for that is a ways off. (And anyways, cross
|
| 62 |
+
# compilers probably have a dedicated binary with the
|
| 63 |
+
# right paths compiled in. I hope.)
|
| 64 |
+
# * can't do really freaky things with the library list/library
|
| 65 |
+
# dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
|
| 66 |
+
# different versions of libfoo.a in different locations. I
|
| 67 |
+
# think this is useless without the ability to null out the
|
| 68 |
+
# library search path anyways.
|
| 69 |
+
|
| 70 |
+
# Subclasses that rely on the standard filename generation methods
|
| 71 |
+
# implemented below should override these; see the comment near
|
| 72 |
+
# those methods ('object_filenames()' et. al.) for details:
|
| 73 |
+
src_extensions = None # list of strings
|
| 74 |
+
obj_extension = None # string
|
| 75 |
+
static_lib_extension = None
|
| 76 |
+
shared_lib_extension = None # string
|
| 77 |
+
static_lib_format = None # format string
|
| 78 |
+
shared_lib_format = None # prob. same as static_lib_format
|
| 79 |
+
exe_extension = None # string
|
| 80 |
+
|
| 81 |
+
# Default language settings. language_map is used to detect a source
|
| 82 |
+
# file or Extension target language, checking source filenames.
|
| 83 |
+
# language_order is used to detect the language precedence, when deciding
|
| 84 |
+
# what language to use when mixing source types. For example, if some
|
| 85 |
+
# extension has two files with ".c" extension, and one with ".cpp", it
|
| 86 |
+
# is still linked as c++.
|
| 87 |
+
language_map = {
|
| 88 |
+
".c": "c",
|
| 89 |
+
".cc": "c++",
|
| 90 |
+
".cpp": "c++",
|
| 91 |
+
".cxx": "c++",
|
| 92 |
+
".m": "objc",
|
| 93 |
+
}
|
| 94 |
+
language_order = ["c++", "objc", "c"]
|
| 95 |
+
|
| 96 |
+
include_dirs = []
|
| 97 |
+
"""
|
| 98 |
+
include dirs specific to this compiler class
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
library_dirs = []
|
| 102 |
+
"""
|
| 103 |
+
library dirs specific to this compiler class
|
| 104 |
+
"""
|
| 105 |
+
|
| 106 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 107 |
+
self.dry_run = dry_run
|
| 108 |
+
self.force = force
|
| 109 |
+
self.verbose = verbose
|
| 110 |
+
|
| 111 |
+
# 'output_dir': a common output directory for object, library,
|
| 112 |
+
# shared object, and shared library files
|
| 113 |
+
self.output_dir = None
|
| 114 |
+
|
| 115 |
+
# 'macros': a list of macro definitions (or undefinitions). A
|
| 116 |
+
# macro definition is a 2-tuple (name, value), where the value is
|
| 117 |
+
# either a string or None (no explicit value). A macro
|
| 118 |
+
# undefinition is a 1-tuple (name,).
|
| 119 |
+
self.macros = []
|
| 120 |
+
|
| 121 |
+
# 'include_dirs': a list of directories to search for include files
|
| 122 |
+
self.include_dirs = []
|
| 123 |
+
|
| 124 |
+
# 'libraries': a list of libraries to include in any link
|
| 125 |
+
# (library names, not filenames: eg. "foo" not "libfoo.a")
|
| 126 |
+
self.libraries = []
|
| 127 |
+
|
| 128 |
+
# 'library_dirs': a list of directories to search for libraries
|
| 129 |
+
self.library_dirs = []
|
| 130 |
+
|
| 131 |
+
# 'runtime_library_dirs': a list of directories to search for
|
| 132 |
+
# shared libraries/objects at runtime
|
| 133 |
+
self.runtime_library_dirs = []
|
| 134 |
+
|
| 135 |
+
# 'objects': a list of object files (or similar, such as explicitly
|
| 136 |
+
# named library files) to include on any link
|
| 137 |
+
self.objects = []
|
| 138 |
+
|
| 139 |
+
for key in self.executables.keys():
|
| 140 |
+
self.set_executable(key, self.executables[key])
|
| 141 |
+
|
| 142 |
+
def set_executables(self, **kwargs):
|
| 143 |
+
"""Define the executables (and options for them) that will be run
|
| 144 |
+
to perform the various stages of compilation. The exact set of
|
| 145 |
+
executables that may be specified here depends on the compiler
|
| 146 |
+
class (via the 'executables' class attribute), but most will have:
|
| 147 |
+
compiler the C/C++ compiler
|
| 148 |
+
linker_so linker used to create shared objects and libraries
|
| 149 |
+
linker_exe linker used to create binary executables
|
| 150 |
+
archiver static library creator
|
| 151 |
+
|
| 152 |
+
On platforms with a command-line (Unix, DOS/Windows), each of these
|
| 153 |
+
is a string that will be split into executable name and (optional)
|
| 154 |
+
list of arguments. (Splitting the string is done similarly to how
|
| 155 |
+
Unix shells operate: words are delimited by spaces, but quotes and
|
| 156 |
+
backslashes can override this. See
|
| 157 |
+
'distutils.util.split_quoted()'.)
|
| 158 |
+
"""
|
| 159 |
+
|
| 160 |
+
# Note that some CCompiler implementation classes will define class
|
| 161 |
+
# attributes 'cpp', 'cc', etc. with hard-coded executable names;
|
| 162 |
+
# this is appropriate when a compiler class is for exactly one
|
| 163 |
+
# compiler/OS combination (eg. MSVCCompiler). Other compiler
|
| 164 |
+
# classes (UnixCCompiler, in particular) are driven by information
|
| 165 |
+
# discovered at run-time, since there are many different ways to do
|
| 166 |
+
# basically the same things with Unix C compilers.
|
| 167 |
+
|
| 168 |
+
for key in kwargs:
|
| 169 |
+
if key not in self.executables:
|
| 170 |
+
raise ValueError(
|
| 171 |
+
"unknown executable '%s' for class %s"
|
| 172 |
+
% (key, self.__class__.__name__)
|
| 173 |
+
)
|
| 174 |
+
self.set_executable(key, kwargs[key])
|
| 175 |
+
|
| 176 |
+
def set_executable(self, key, value):
|
| 177 |
+
if isinstance(value, str):
|
| 178 |
+
setattr(self, key, split_quoted(value))
|
| 179 |
+
else:
|
| 180 |
+
setattr(self, key, value)
|
| 181 |
+
|
| 182 |
+
def _find_macro(self, name):
|
| 183 |
+
i = 0
|
| 184 |
+
for defn in self.macros:
|
| 185 |
+
if defn[0] == name:
|
| 186 |
+
return i
|
| 187 |
+
i += 1
|
| 188 |
+
return None
|
| 189 |
+
|
| 190 |
+
def _check_macro_definitions(self, definitions):
|
| 191 |
+
"""Ensures that every element of 'definitions' is a valid macro
|
| 192 |
+
definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
|
| 193 |
+
nothing if all definitions are OK, raise TypeError otherwise.
|
| 194 |
+
"""
|
| 195 |
+
for defn in definitions:
|
| 196 |
+
if not (
|
| 197 |
+
isinstance(defn, tuple)
|
| 198 |
+
and (
|
| 199 |
+
len(defn) in (1, 2)
|
| 200 |
+
and (isinstance(defn[1], str) or defn[1] is None)
|
| 201 |
+
)
|
| 202 |
+
and isinstance(defn[0], str)
|
| 203 |
+
):
|
| 204 |
+
raise TypeError(
|
| 205 |
+
("invalid macro definition '%s': " % defn)
|
| 206 |
+
+ "must be tuple (string,), (string, string), or "
|
| 207 |
+
+ "(string, None)"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
# -- Bookkeeping methods -------------------------------------------
|
| 211 |
+
|
| 212 |
+
def define_macro(self, name, value=None):
|
| 213 |
+
"""Define a preprocessor macro for all compilations driven by this
|
| 214 |
+
compiler object. The optional parameter 'value' should be a
|
| 215 |
+
string; if it is not supplied, then the macro will be defined
|
| 216 |
+
without an explicit value and the exact outcome depends on the
|
| 217 |
+
compiler used (XXX true? does ANSI say anything about this?)
|
| 218 |
+
"""
|
| 219 |
+
# Delete from the list of macro definitions/undefinitions if
|
| 220 |
+
# already there (so that this one will take precedence).
|
| 221 |
+
i = self._find_macro(name)
|
| 222 |
+
if i is not None:
|
| 223 |
+
del self.macros[i]
|
| 224 |
+
|
| 225 |
+
self.macros.append((name, value))
|
| 226 |
+
|
| 227 |
+
def undefine_macro(self, name):
|
| 228 |
+
"""Undefine a preprocessor macro for all compilations driven by
|
| 229 |
+
this compiler object. If the same macro is defined by
|
| 230 |
+
'define_macro()' and undefined by 'undefine_macro()' the last call
|
| 231 |
+
takes precedence (including multiple redefinitions or
|
| 232 |
+
undefinitions). If the macro is redefined/undefined on a
|
| 233 |
+
per-compilation basis (ie. in the call to 'compile()'), then that
|
| 234 |
+
takes precedence.
|
| 235 |
+
"""
|
| 236 |
+
# Delete from the list of macro definitions/undefinitions if
|
| 237 |
+
# already there (so that this one will take precedence).
|
| 238 |
+
i = self._find_macro(name)
|
| 239 |
+
if i is not None:
|
| 240 |
+
del self.macros[i]
|
| 241 |
+
|
| 242 |
+
undefn = (name,)
|
| 243 |
+
self.macros.append(undefn)
|
| 244 |
+
|
| 245 |
+
def add_include_dir(self, dir):
|
| 246 |
+
"""Add 'dir' to the list of directories that will be searched for
|
| 247 |
+
header files. The compiler is instructed to search directories in
|
| 248 |
+
the order in which they are supplied by successive calls to
|
| 249 |
+
'add_include_dir()'.
|
| 250 |
+
"""
|
| 251 |
+
self.include_dirs.append(dir)
|
| 252 |
+
|
| 253 |
+
def set_include_dirs(self, dirs):
|
| 254 |
+
"""Set the list of directories that will be searched to 'dirs' (a
|
| 255 |
+
list of strings). Overrides any preceding calls to
|
| 256 |
+
'add_include_dir()'; subsequence calls to 'add_include_dir()' add
|
| 257 |
+
to the list passed to 'set_include_dirs()'. This does not affect
|
| 258 |
+
any list of standard include directories that the compiler may
|
| 259 |
+
search by default.
|
| 260 |
+
"""
|
| 261 |
+
self.include_dirs = dirs[:]
|
| 262 |
+
|
| 263 |
+
def add_library(self, libname):
|
| 264 |
+
"""Add 'libname' to the list of libraries that will be included in
|
| 265 |
+
all links driven by this compiler object. Note that 'libname'
|
| 266 |
+
should *not* be the name of a file containing a library, but the
|
| 267 |
+
name of the library itself: the actual filename will be inferred by
|
| 268 |
+
the linker, the compiler, or the compiler class (depending on the
|
| 269 |
+
platform).
|
| 270 |
+
|
| 271 |
+
The linker will be instructed to link against libraries in the
|
| 272 |
+
order they were supplied to 'add_library()' and/or
|
| 273 |
+
'set_libraries()'. It is perfectly valid to duplicate library
|
| 274 |
+
names; the linker will be instructed to link against libraries as
|
| 275 |
+
many times as they are mentioned.
|
| 276 |
+
"""
|
| 277 |
+
self.libraries.append(libname)
|
| 278 |
+
|
| 279 |
+
def set_libraries(self, libnames):
|
| 280 |
+
"""Set the list of libraries to be included in all links driven by
|
| 281 |
+
this compiler object to 'libnames' (a list of strings). This does
|
| 282 |
+
not affect any standard system libraries that the linker may
|
| 283 |
+
include by default.
|
| 284 |
+
"""
|
| 285 |
+
self.libraries = libnames[:]
|
| 286 |
+
|
| 287 |
+
def add_library_dir(self, dir):
|
| 288 |
+
"""Add 'dir' to the list of directories that will be searched for
|
| 289 |
+
libraries specified to 'add_library()' and 'set_libraries()'. The
|
| 290 |
+
linker will be instructed to search for libraries in the order they
|
| 291 |
+
are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
|
| 292 |
+
"""
|
| 293 |
+
self.library_dirs.append(dir)
|
| 294 |
+
|
| 295 |
+
def set_library_dirs(self, dirs):
|
| 296 |
+
"""Set the list of library search directories to 'dirs' (a list of
|
| 297 |
+
strings). This does not affect any standard library search path
|
| 298 |
+
that the linker may search by default.
|
| 299 |
+
"""
|
| 300 |
+
self.library_dirs = dirs[:]
|
| 301 |
+
|
| 302 |
+
def add_runtime_library_dir(self, dir):
|
| 303 |
+
"""Add 'dir' to the list of directories that will be searched for
|
| 304 |
+
shared libraries at runtime.
|
| 305 |
+
"""
|
| 306 |
+
self.runtime_library_dirs.append(dir)
|
| 307 |
+
|
| 308 |
+
def set_runtime_library_dirs(self, dirs):
|
| 309 |
+
"""Set the list of directories to search for shared libraries at
|
| 310 |
+
runtime to 'dirs' (a list of strings). This does not affect any
|
| 311 |
+
standard search path that the runtime linker may search by
|
| 312 |
+
default.
|
| 313 |
+
"""
|
| 314 |
+
self.runtime_library_dirs = dirs[:]
|
| 315 |
+
|
| 316 |
+
def add_link_object(self, object):
|
| 317 |
+
"""Add 'object' to the list of object files (or analogues, such as
|
| 318 |
+
explicitly named library files or the output of "resource
|
| 319 |
+
compilers") to be included in every link driven by this compiler
|
| 320 |
+
object.
|
| 321 |
+
"""
|
| 322 |
+
self.objects.append(object)
|
| 323 |
+
|
| 324 |
+
def set_link_objects(self, objects):
|
| 325 |
+
"""Set the list of object files (or analogues) to be included in
|
| 326 |
+
every link to 'objects'. This does not affect any standard object
|
| 327 |
+
files that the linker may include by default (such as system
|
| 328 |
+
libraries).
|
| 329 |
+
"""
|
| 330 |
+
self.objects = objects[:]
|
| 331 |
+
|
| 332 |
+
# -- Private utility methods --------------------------------------
|
| 333 |
+
# (here for the convenience of subclasses)
|
| 334 |
+
|
| 335 |
+
# Helper method to prep compiler in subclass compile() methods
|
| 336 |
+
|
| 337 |
+
def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
|
| 338 |
+
"""Process arguments and decide which source files to compile."""
|
| 339 |
+
outdir, macros, incdirs = self._fix_compile_args(outdir, macros, incdirs)
|
| 340 |
+
|
| 341 |
+
if extra is None:
|
| 342 |
+
extra = []
|
| 343 |
+
|
| 344 |
+
# Get the list of expected output (object) files
|
| 345 |
+
objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir)
|
| 346 |
+
assert len(objects) == len(sources)
|
| 347 |
+
|
| 348 |
+
pp_opts = gen_preprocess_options(macros, incdirs)
|
| 349 |
+
|
| 350 |
+
build = {}
|
| 351 |
+
for i in range(len(sources)):
|
| 352 |
+
src = sources[i]
|
| 353 |
+
obj = objects[i]
|
| 354 |
+
ext = os.path.splitext(src)[1]
|
| 355 |
+
self.mkpath(os.path.dirname(obj))
|
| 356 |
+
build[obj] = (src, ext)
|
| 357 |
+
|
| 358 |
+
return macros, objects, extra, pp_opts, build
|
| 359 |
+
|
| 360 |
+
def _get_cc_args(self, pp_opts, debug, before):
|
| 361 |
+
# works for unixccompiler, cygwinccompiler
|
| 362 |
+
cc_args = pp_opts + ['-c']
|
| 363 |
+
if debug:
|
| 364 |
+
cc_args[:0] = ['-g']
|
| 365 |
+
if before:
|
| 366 |
+
cc_args[:0] = before
|
| 367 |
+
return cc_args
|
| 368 |
+
|
| 369 |
+
def _fix_compile_args(self, output_dir, macros, include_dirs):
|
| 370 |
+
"""Typecheck and fix-up some of the arguments to the 'compile()'
|
| 371 |
+
method, and return fixed-up values. Specifically: if 'output_dir'
|
| 372 |
+
is None, replaces it with 'self.output_dir'; ensures that 'macros'
|
| 373 |
+
is a list, and augments it with 'self.macros'; ensures that
|
| 374 |
+
'include_dirs' is a list, and augments it with 'self.include_dirs'.
|
| 375 |
+
Guarantees that the returned values are of the correct type,
|
| 376 |
+
i.e. for 'output_dir' either string or None, and for 'macros' and
|
| 377 |
+
'include_dirs' either list or None.
|
| 378 |
+
"""
|
| 379 |
+
if output_dir is None:
|
| 380 |
+
output_dir = self.output_dir
|
| 381 |
+
elif not isinstance(output_dir, str):
|
| 382 |
+
raise TypeError("'output_dir' must be a string or None")
|
| 383 |
+
|
| 384 |
+
if macros is None:
|
| 385 |
+
macros = self.macros
|
| 386 |
+
elif isinstance(macros, list):
|
| 387 |
+
macros = macros + (self.macros or [])
|
| 388 |
+
else:
|
| 389 |
+
raise TypeError("'macros' (if supplied) must be a list of tuples")
|
| 390 |
+
|
| 391 |
+
if include_dirs is None:
|
| 392 |
+
include_dirs = list(self.include_dirs)
|
| 393 |
+
elif isinstance(include_dirs, (list, tuple)):
|
| 394 |
+
include_dirs = list(include_dirs) + (self.include_dirs or [])
|
| 395 |
+
else:
|
| 396 |
+
raise TypeError("'include_dirs' (if supplied) must be a list of strings")
|
| 397 |
+
|
| 398 |
+
# add include dirs for class
|
| 399 |
+
include_dirs += self.__class__.include_dirs
|
| 400 |
+
|
| 401 |
+
return output_dir, macros, include_dirs
|
| 402 |
+
|
| 403 |
+
def _prep_compile(self, sources, output_dir, depends=None):
|
| 404 |
+
"""Decide which source files must be recompiled.
|
| 405 |
+
|
| 406 |
+
Determine the list of object files corresponding to 'sources',
|
| 407 |
+
and figure out which ones really need to be recompiled.
|
| 408 |
+
Return a list of all object files and a dictionary telling
|
| 409 |
+
which source files can be skipped.
|
| 410 |
+
"""
|
| 411 |
+
# Get the list of expected output (object) files
|
| 412 |
+
objects = self.object_filenames(sources, output_dir=output_dir)
|
| 413 |
+
assert len(objects) == len(sources)
|
| 414 |
+
|
| 415 |
+
# Return an empty dict for the "which source files can be skipped"
|
| 416 |
+
# return value to preserve API compatibility.
|
| 417 |
+
return objects, {}
|
| 418 |
+
|
| 419 |
+
def _fix_object_args(self, objects, output_dir):
|
| 420 |
+
"""Typecheck and fix up some arguments supplied to various methods.
|
| 421 |
+
Specifically: ensure that 'objects' is a list; if output_dir is
|
| 422 |
+
None, replace with self.output_dir. Return fixed versions of
|
| 423 |
+
'objects' and 'output_dir'.
|
| 424 |
+
"""
|
| 425 |
+
if not isinstance(objects, (list, tuple)):
|
| 426 |
+
raise TypeError("'objects' must be a list or tuple of strings")
|
| 427 |
+
objects = list(objects)
|
| 428 |
+
|
| 429 |
+
if output_dir is None:
|
| 430 |
+
output_dir = self.output_dir
|
| 431 |
+
elif not isinstance(output_dir, str):
|
| 432 |
+
raise TypeError("'output_dir' must be a string or None")
|
| 433 |
+
|
| 434 |
+
return (objects, output_dir)
|
| 435 |
+
|
| 436 |
+
def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
| 437 |
+
"""Typecheck and fix up some of the arguments supplied to the
|
| 438 |
+
'link_*' methods. Specifically: ensure that all arguments are
|
| 439 |
+
lists, and augment them with their permanent versions
|
| 440 |
+
(eg. 'self.libraries' augments 'libraries'). Return a tuple with
|
| 441 |
+
fixed versions of all arguments.
|
| 442 |
+
"""
|
| 443 |
+
if libraries is None:
|
| 444 |
+
libraries = self.libraries
|
| 445 |
+
elif isinstance(libraries, (list, tuple)):
|
| 446 |
+
libraries = list(libraries) + (self.libraries or [])
|
| 447 |
+
else:
|
| 448 |
+
raise TypeError("'libraries' (if supplied) must be a list of strings")
|
| 449 |
+
|
| 450 |
+
if library_dirs is None:
|
| 451 |
+
library_dirs = self.library_dirs
|
| 452 |
+
elif isinstance(library_dirs, (list, tuple)):
|
| 453 |
+
library_dirs = list(library_dirs) + (self.library_dirs or [])
|
| 454 |
+
else:
|
| 455 |
+
raise TypeError("'library_dirs' (if supplied) must be a list of strings")
|
| 456 |
+
|
| 457 |
+
# add library dirs for class
|
| 458 |
+
library_dirs += self.__class__.library_dirs
|
| 459 |
+
|
| 460 |
+
if runtime_library_dirs is None:
|
| 461 |
+
runtime_library_dirs = self.runtime_library_dirs
|
| 462 |
+
elif isinstance(runtime_library_dirs, (list, tuple)):
|
| 463 |
+
runtime_library_dirs = list(runtime_library_dirs) + (
|
| 464 |
+
self.runtime_library_dirs or []
|
| 465 |
+
)
|
| 466 |
+
else:
|
| 467 |
+
raise TypeError(
|
| 468 |
+
"'runtime_library_dirs' (if supplied) " "must be a list of strings"
|
| 469 |
+
)
|
| 470 |
+
|
| 471 |
+
return (libraries, library_dirs, runtime_library_dirs)
|
| 472 |
+
|
| 473 |
+
def _need_link(self, objects, output_file):
|
| 474 |
+
"""Return true if we need to relink the files listed in 'objects'
|
| 475 |
+
to recreate 'output_file'.
|
| 476 |
+
"""
|
| 477 |
+
if self.force:
|
| 478 |
+
return True
|
| 479 |
+
else:
|
| 480 |
+
if self.dry_run:
|
| 481 |
+
newer = newer_group(objects, output_file, missing='newer')
|
| 482 |
+
else:
|
| 483 |
+
newer = newer_group(objects, output_file)
|
| 484 |
+
return newer
|
| 485 |
+
|
| 486 |
+
def detect_language(self, sources):
|
| 487 |
+
"""Detect the language of a given file, or list of files. Uses
|
| 488 |
+
language_map, and language_order to do the job.
|
| 489 |
+
"""
|
| 490 |
+
if not isinstance(sources, list):
|
| 491 |
+
sources = [sources]
|
| 492 |
+
lang = None
|
| 493 |
+
index = len(self.language_order)
|
| 494 |
+
for source in sources:
|
| 495 |
+
base, ext = os.path.splitext(source)
|
| 496 |
+
extlang = self.language_map.get(ext)
|
| 497 |
+
try:
|
| 498 |
+
extindex = self.language_order.index(extlang)
|
| 499 |
+
if extindex < index:
|
| 500 |
+
lang = extlang
|
| 501 |
+
index = extindex
|
| 502 |
+
except ValueError:
|
| 503 |
+
pass
|
| 504 |
+
return lang
|
| 505 |
+
|
| 506 |
+
# -- Worker methods ------------------------------------------------
|
| 507 |
+
# (must be implemented by subclasses)
|
| 508 |
+
|
| 509 |
+
def preprocess(
|
| 510 |
+
self,
|
| 511 |
+
source,
|
| 512 |
+
output_file=None,
|
| 513 |
+
macros=None,
|
| 514 |
+
include_dirs=None,
|
| 515 |
+
extra_preargs=None,
|
| 516 |
+
extra_postargs=None,
|
| 517 |
+
):
|
| 518 |
+
"""Preprocess a single C/C++ source file, named in 'source'.
|
| 519 |
+
Output will be written to file named 'output_file', or stdout if
|
| 520 |
+
'output_file' not supplied. 'macros' is a list of macro
|
| 521 |
+
definitions as for 'compile()', which will augment the macros set
|
| 522 |
+
with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
|
| 523 |
+
list of directory names that will be added to the default list.
|
| 524 |
+
|
| 525 |
+
Raises PreprocessError on failure.
|
| 526 |
+
"""
|
| 527 |
+
pass
|
| 528 |
+
|
| 529 |
+
def compile(
|
| 530 |
+
self,
|
| 531 |
+
sources,
|
| 532 |
+
output_dir=None,
|
| 533 |
+
macros=None,
|
| 534 |
+
include_dirs=None,
|
| 535 |
+
debug=0,
|
| 536 |
+
extra_preargs=None,
|
| 537 |
+
extra_postargs=None,
|
| 538 |
+
depends=None,
|
| 539 |
+
):
|
| 540 |
+
"""Compile one or more source files.
|
| 541 |
+
|
| 542 |
+
'sources' must be a list of filenames, most likely C/C++
|
| 543 |
+
files, but in reality anything that can be handled by a
|
| 544 |
+
particular compiler and compiler class (eg. MSVCCompiler can
|
| 545 |
+
handle resource files in 'sources'). Return a list of object
|
| 546 |
+
filenames, one per source filename in 'sources'. Depending on
|
| 547 |
+
the implementation, not all source files will necessarily be
|
| 548 |
+
compiled, but all corresponding object filenames will be
|
| 549 |
+
returned.
|
| 550 |
+
|
| 551 |
+
If 'output_dir' is given, object files will be put under it, while
|
| 552 |
+
retaining their original path component. That is, "foo/bar.c"
|
| 553 |
+
normally compiles to "foo/bar.o" (for a Unix implementation); if
|
| 554 |
+
'output_dir' is "build", then it would compile to
|
| 555 |
+
"build/foo/bar.o".
|
| 556 |
+
|
| 557 |
+
'macros', if given, must be a list of macro definitions. A macro
|
| 558 |
+
definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
|
| 559 |
+
The former defines a macro; if the value is None, the macro is
|
| 560 |
+
defined without an explicit value. The 1-tuple case undefines a
|
| 561 |
+
macro. Later definitions/redefinitions/ undefinitions take
|
| 562 |
+
precedence.
|
| 563 |
+
|
| 564 |
+
'include_dirs', if given, must be a list of strings, the
|
| 565 |
+
directories to add to the default include file search path for this
|
| 566 |
+
compilation only.
|
| 567 |
+
|
| 568 |
+
'debug' is a boolean; if true, the compiler will be instructed to
|
| 569 |
+
output debug symbols in (or alongside) the object file(s).
|
| 570 |
+
|
| 571 |
+
'extra_preargs' and 'extra_postargs' are implementation- dependent.
|
| 572 |
+
On platforms that have the notion of a command-line (e.g. Unix,
|
| 573 |
+
DOS/Windows), they are most likely lists of strings: extra
|
| 574 |
+
command-line arguments to prepend/append to the compiler command
|
| 575 |
+
line. On other platforms, consult the implementation class
|
| 576 |
+
documentation. In any event, they are intended as an escape hatch
|
| 577 |
+
for those occasions when the abstract compiler framework doesn't
|
| 578 |
+
cut the mustard.
|
| 579 |
+
|
| 580 |
+
'depends', if given, is a list of filenames that all targets
|
| 581 |
+
depend on. If a source file is older than any file in
|
| 582 |
+
depends, then the source file will be recompiled. This
|
| 583 |
+
supports dependency tracking, but only at a coarse
|
| 584 |
+
granularity.
|
| 585 |
+
|
| 586 |
+
Raises CompileError on failure.
|
| 587 |
+
"""
|
| 588 |
+
# A concrete compiler class can either override this method
|
| 589 |
+
# entirely or implement _compile().
|
| 590 |
+
macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
|
| 591 |
+
output_dir, macros, include_dirs, sources, depends, extra_postargs
|
| 592 |
+
)
|
| 593 |
+
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
|
| 594 |
+
|
| 595 |
+
for obj in objects:
|
| 596 |
+
try:
|
| 597 |
+
src, ext = build[obj]
|
| 598 |
+
except KeyError:
|
| 599 |
+
continue
|
| 600 |
+
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
|
| 601 |
+
|
| 602 |
+
# Return *all* object filenames, not just the ones we just built.
|
| 603 |
+
return objects
|
| 604 |
+
|
| 605 |
+
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
| 606 |
+
"""Compile 'src' to product 'obj'."""
|
| 607 |
+
# A concrete compiler class that does not override compile()
|
| 608 |
+
# should implement _compile().
|
| 609 |
+
pass
|
| 610 |
+
|
| 611 |
+
def create_static_lib(
|
| 612 |
+
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
| 613 |
+
):
|
| 614 |
+
"""Link a bunch of stuff together to create a static library file.
|
| 615 |
+
The "bunch of stuff" consists of the list of object files supplied
|
| 616 |
+
as 'objects', the extra object files supplied to
|
| 617 |
+
'add_link_object()' and/or 'set_link_objects()', the libraries
|
| 618 |
+
supplied to 'add_library()' and/or 'set_libraries()', and the
|
| 619 |
+
libraries supplied as 'libraries' (if any).
|
| 620 |
+
|
| 621 |
+
'output_libname' should be a library name, not a filename; the
|
| 622 |
+
filename will be inferred from the library name. 'output_dir' is
|
| 623 |
+
the directory where the library file will be put.
|
| 624 |
+
|
| 625 |
+
'debug' is a boolean; if true, debugging information will be
|
| 626 |
+
included in the library (note that on most platforms, it is the
|
| 627 |
+
compile step where this matters: the 'debug' flag is included here
|
| 628 |
+
just for consistency).
|
| 629 |
+
|
| 630 |
+
'target_lang' is the target language for which the given objects
|
| 631 |
+
are being compiled. This allows specific linkage time treatment of
|
| 632 |
+
certain languages.
|
| 633 |
+
|
| 634 |
+
Raises LibError on failure.
|
| 635 |
+
"""
|
| 636 |
+
pass
|
| 637 |
+
|
| 638 |
+
# values for target_desc parameter in link()
|
| 639 |
+
SHARED_OBJECT = "shared_object"
|
| 640 |
+
SHARED_LIBRARY = "shared_library"
|
| 641 |
+
EXECUTABLE = "executable"
|
| 642 |
+
|
| 643 |
+
def link(
|
| 644 |
+
self,
|
| 645 |
+
target_desc,
|
| 646 |
+
objects,
|
| 647 |
+
output_filename,
|
| 648 |
+
output_dir=None,
|
| 649 |
+
libraries=None,
|
| 650 |
+
library_dirs=None,
|
| 651 |
+
runtime_library_dirs=None,
|
| 652 |
+
export_symbols=None,
|
| 653 |
+
debug=0,
|
| 654 |
+
extra_preargs=None,
|
| 655 |
+
extra_postargs=None,
|
| 656 |
+
build_temp=None,
|
| 657 |
+
target_lang=None,
|
| 658 |
+
):
|
| 659 |
+
"""Link a bunch of stuff together to create an executable or
|
| 660 |
+
shared library file.
|
| 661 |
+
|
| 662 |
+
The "bunch of stuff" consists of the list of object files supplied
|
| 663 |
+
as 'objects'. 'output_filename' should be a filename. If
|
| 664 |
+
'output_dir' is supplied, 'output_filename' is relative to it
|
| 665 |
+
(i.e. 'output_filename' can provide directory components if
|
| 666 |
+
needed).
|
| 667 |
+
|
| 668 |
+
'libraries' is a list of libraries to link against. These are
|
| 669 |
+
library names, not filenames, since they're translated into
|
| 670 |
+
filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
|
| 671 |
+
on Unix and "foo.lib" on DOS/Windows). However, they can include a
|
| 672 |
+
directory component, which means the linker will look in that
|
| 673 |
+
specific directory rather than searching all the normal locations.
|
| 674 |
+
|
| 675 |
+
'library_dirs', if supplied, should be a list of directories to
|
| 676 |
+
search for libraries that were specified as bare library names
|
| 677 |
+
(ie. no directory component). These are on top of the system
|
| 678 |
+
default and those supplied to 'add_library_dir()' and/or
|
| 679 |
+
'set_library_dirs()'. 'runtime_library_dirs' is a list of
|
| 680 |
+
directories that will be embedded into the shared library and used
|
| 681 |
+
to search for other shared libraries that *it* depends on at
|
| 682 |
+
run-time. (This may only be relevant on Unix.)
|
| 683 |
+
|
| 684 |
+
'export_symbols' is a list of symbols that the shared library will
|
| 685 |
+
export. (This appears to be relevant only on Windows.)
|
| 686 |
+
|
| 687 |
+
'debug' is as for 'compile()' and 'create_static_lib()', with the
|
| 688 |
+
slight distinction that it actually matters on most platforms (as
|
| 689 |
+
opposed to 'create_static_lib()', which includes a 'debug' flag
|
| 690 |
+
mostly for form's sake).
|
| 691 |
+
|
| 692 |
+
'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
|
| 693 |
+
of course that they supply command-line arguments for the
|
| 694 |
+
particular linker being used).
|
| 695 |
+
|
| 696 |
+
'target_lang' is the target language for which the given objects
|
| 697 |
+
are being compiled. This allows specific linkage time treatment of
|
| 698 |
+
certain languages.
|
| 699 |
+
|
| 700 |
+
Raises LinkError on failure.
|
| 701 |
+
"""
|
| 702 |
+
raise NotImplementedError
|
| 703 |
+
|
| 704 |
+
# Old 'link_*()' methods, rewritten to use the new 'link()' method.
|
| 705 |
+
|
| 706 |
+
def link_shared_lib(
|
| 707 |
+
self,
|
| 708 |
+
objects,
|
| 709 |
+
output_libname,
|
| 710 |
+
output_dir=None,
|
| 711 |
+
libraries=None,
|
| 712 |
+
library_dirs=None,
|
| 713 |
+
runtime_library_dirs=None,
|
| 714 |
+
export_symbols=None,
|
| 715 |
+
debug=0,
|
| 716 |
+
extra_preargs=None,
|
| 717 |
+
extra_postargs=None,
|
| 718 |
+
build_temp=None,
|
| 719 |
+
target_lang=None,
|
| 720 |
+
):
|
| 721 |
+
self.link(
|
| 722 |
+
CCompiler.SHARED_LIBRARY,
|
| 723 |
+
objects,
|
| 724 |
+
self.library_filename(output_libname, lib_type='shared'),
|
| 725 |
+
output_dir,
|
| 726 |
+
libraries,
|
| 727 |
+
library_dirs,
|
| 728 |
+
runtime_library_dirs,
|
| 729 |
+
export_symbols,
|
| 730 |
+
debug,
|
| 731 |
+
extra_preargs,
|
| 732 |
+
extra_postargs,
|
| 733 |
+
build_temp,
|
| 734 |
+
target_lang,
|
| 735 |
+
)
|
| 736 |
+
|
| 737 |
+
def link_shared_object(
|
| 738 |
+
self,
|
| 739 |
+
objects,
|
| 740 |
+
output_filename,
|
| 741 |
+
output_dir=None,
|
| 742 |
+
libraries=None,
|
| 743 |
+
library_dirs=None,
|
| 744 |
+
runtime_library_dirs=None,
|
| 745 |
+
export_symbols=None,
|
| 746 |
+
debug=0,
|
| 747 |
+
extra_preargs=None,
|
| 748 |
+
extra_postargs=None,
|
| 749 |
+
build_temp=None,
|
| 750 |
+
target_lang=None,
|
| 751 |
+
):
|
| 752 |
+
self.link(
|
| 753 |
+
CCompiler.SHARED_OBJECT,
|
| 754 |
+
objects,
|
| 755 |
+
output_filename,
|
| 756 |
+
output_dir,
|
| 757 |
+
libraries,
|
| 758 |
+
library_dirs,
|
| 759 |
+
runtime_library_dirs,
|
| 760 |
+
export_symbols,
|
| 761 |
+
debug,
|
| 762 |
+
extra_preargs,
|
| 763 |
+
extra_postargs,
|
| 764 |
+
build_temp,
|
| 765 |
+
target_lang,
|
| 766 |
+
)
|
| 767 |
+
|
| 768 |
+
def link_executable(
|
| 769 |
+
self,
|
| 770 |
+
objects,
|
| 771 |
+
output_progname,
|
| 772 |
+
output_dir=None,
|
| 773 |
+
libraries=None,
|
| 774 |
+
library_dirs=None,
|
| 775 |
+
runtime_library_dirs=None,
|
| 776 |
+
debug=0,
|
| 777 |
+
extra_preargs=None,
|
| 778 |
+
extra_postargs=None,
|
| 779 |
+
target_lang=None,
|
| 780 |
+
):
|
| 781 |
+
self.link(
|
| 782 |
+
CCompiler.EXECUTABLE,
|
| 783 |
+
objects,
|
| 784 |
+
self.executable_filename(output_progname),
|
| 785 |
+
output_dir,
|
| 786 |
+
libraries,
|
| 787 |
+
library_dirs,
|
| 788 |
+
runtime_library_dirs,
|
| 789 |
+
None,
|
| 790 |
+
debug,
|
| 791 |
+
extra_preargs,
|
| 792 |
+
extra_postargs,
|
| 793 |
+
None,
|
| 794 |
+
target_lang,
|
| 795 |
+
)
|
| 796 |
+
|
| 797 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 798 |
+
# These are all used by the 'gen_lib_options() function; there is
|
| 799 |
+
# no appropriate default implementation so subclasses should
|
| 800 |
+
# implement all of these.
|
| 801 |
+
|
| 802 |
+
def library_dir_option(self, dir):
|
| 803 |
+
"""Return the compiler option to add 'dir' to the list of
|
| 804 |
+
directories searched for libraries.
|
| 805 |
+
"""
|
| 806 |
+
raise NotImplementedError
|
| 807 |
+
|
| 808 |
+
def runtime_library_dir_option(self, dir):
|
| 809 |
+
"""Return the compiler option to add 'dir' to the list of
|
| 810 |
+
directories searched for runtime libraries.
|
| 811 |
+
"""
|
| 812 |
+
raise NotImplementedError
|
| 813 |
+
|
| 814 |
+
def library_option(self, lib):
|
| 815 |
+
"""Return the compiler option to add 'lib' to the list of libraries
|
| 816 |
+
linked into the shared library or executable.
|
| 817 |
+
"""
|
| 818 |
+
raise NotImplementedError
|
| 819 |
+
|
| 820 |
+
def has_function( # noqa: C901
|
| 821 |
+
self,
|
| 822 |
+
funcname,
|
| 823 |
+
includes=None,
|
| 824 |
+
include_dirs=None,
|
| 825 |
+
libraries=None,
|
| 826 |
+
library_dirs=None,
|
| 827 |
+
):
|
| 828 |
+
"""Return a boolean indicating whether funcname is provided as
|
| 829 |
+
a symbol on the current platform. The optional arguments can
|
| 830 |
+
be used to augment the compilation environment.
|
| 831 |
+
|
| 832 |
+
The libraries argument is a list of flags to be passed to the
|
| 833 |
+
linker to make additional symbol definitions available for
|
| 834 |
+
linking.
|
| 835 |
+
|
| 836 |
+
The includes and include_dirs arguments are deprecated.
|
| 837 |
+
Usually, supplying include files with function declarations
|
| 838 |
+
will cause function detection to fail even in cases where the
|
| 839 |
+
symbol is available for linking.
|
| 840 |
+
|
| 841 |
+
"""
|
| 842 |
+
# this can't be included at module scope because it tries to
|
| 843 |
+
# import math which might not be available at that point - maybe
|
| 844 |
+
# the necessary logic should just be inlined?
|
| 845 |
+
import tempfile
|
| 846 |
+
|
| 847 |
+
if includes is None:
|
| 848 |
+
includes = []
|
| 849 |
+
else:
|
| 850 |
+
warnings.warn("includes is deprecated", DeprecationWarning)
|
| 851 |
+
if include_dirs is None:
|
| 852 |
+
include_dirs = []
|
| 853 |
+
else:
|
| 854 |
+
warnings.warn("include_dirs is deprecated", DeprecationWarning)
|
| 855 |
+
if libraries is None:
|
| 856 |
+
libraries = []
|
| 857 |
+
if library_dirs is None:
|
| 858 |
+
library_dirs = []
|
| 859 |
+
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
|
| 860 |
+
f = os.fdopen(fd, "w")
|
| 861 |
+
try:
|
| 862 |
+
for incl in includes:
|
| 863 |
+
f.write("""#include "%s"\n""" % incl)
|
| 864 |
+
if not includes:
|
| 865 |
+
# Use "char func(void);" as the prototype to follow
|
| 866 |
+
# what autoconf does. This prototype does not match
|
| 867 |
+
# any well-known function the compiler might recognize
|
| 868 |
+
# as a builtin, so this ends up as a true link test.
|
| 869 |
+
# Without a fake prototype, the test would need to
|
| 870 |
+
# know the exact argument types, and the has_function
|
| 871 |
+
# interface does not provide that level of information.
|
| 872 |
+
f.write(
|
| 873 |
+
"""\
|
| 874 |
+
#ifdef __cplusplus
|
| 875 |
+
extern "C"
|
| 876 |
+
#endif
|
| 877 |
+
char %s(void);
|
| 878 |
+
"""
|
| 879 |
+
% funcname
|
| 880 |
+
)
|
| 881 |
+
f.write(
|
| 882 |
+
"""\
|
| 883 |
+
int main (int argc, char **argv) {
|
| 884 |
+
%s();
|
| 885 |
+
return 0;
|
| 886 |
+
}
|
| 887 |
+
"""
|
| 888 |
+
% funcname
|
| 889 |
+
)
|
| 890 |
+
finally:
|
| 891 |
+
f.close()
|
| 892 |
+
try:
|
| 893 |
+
objects = self.compile([fname], include_dirs=include_dirs)
|
| 894 |
+
except CompileError:
|
| 895 |
+
return False
|
| 896 |
+
finally:
|
| 897 |
+
os.remove(fname)
|
| 898 |
+
|
| 899 |
+
try:
|
| 900 |
+
self.link_executable(
|
| 901 |
+
objects, "a.out", libraries=libraries, library_dirs=library_dirs
|
| 902 |
+
)
|
| 903 |
+
except (LinkError, TypeError):
|
| 904 |
+
return False
|
| 905 |
+
else:
|
| 906 |
+
os.remove(
|
| 907 |
+
self.executable_filename("a.out", output_dir=self.output_dir or '')
|
| 908 |
+
)
|
| 909 |
+
finally:
|
| 910 |
+
for fn in objects:
|
| 911 |
+
os.remove(fn)
|
| 912 |
+
return True
|
| 913 |
+
|
| 914 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 915 |
+
"""Search the specified list of directories for a static or shared
|
| 916 |
+
library file 'lib' and return the full path to that file. If
|
| 917 |
+
'debug' true, look for a debugging version (if that makes sense on
|
| 918 |
+
the current platform). Return None if 'lib' wasn't found in any of
|
| 919 |
+
the specified directories.
|
| 920 |
+
"""
|
| 921 |
+
raise NotImplementedError
|
| 922 |
+
|
| 923 |
+
# -- Filename generation methods -----------------------------------
|
| 924 |
+
|
| 925 |
+
# The default implementation of the filename generating methods are
|
| 926 |
+
# prejudiced towards the Unix/DOS/Windows view of the world:
|
| 927 |
+
# * object files are named by replacing the source file extension
|
| 928 |
+
# (eg. .c/.cpp -> .o/.obj)
|
| 929 |
+
# * library files (shared or static) are named by plugging the
|
| 930 |
+
# library name and extension into a format string, eg.
|
| 931 |
+
# "lib%s.%s" % (lib_name, ".a") for Unix static libraries
|
| 932 |
+
# * executables are named by appending an extension (possibly
|
| 933 |
+
# empty) to the program name: eg. progname + ".exe" for
|
| 934 |
+
# Windows
|
| 935 |
+
#
|
| 936 |
+
# To reduce redundant code, these methods expect to find
|
| 937 |
+
# several attributes in the current object (presumably defined
|
| 938 |
+
# as class attributes):
|
| 939 |
+
# * src_extensions -
|
| 940 |
+
# list of C/C++ source file extensions, eg. ['.c', '.cpp']
|
| 941 |
+
# * obj_extension -
|
| 942 |
+
# object file extension, eg. '.o' or '.obj'
|
| 943 |
+
# * static_lib_extension -
|
| 944 |
+
# extension for static library files, eg. '.a' or '.lib'
|
| 945 |
+
# * shared_lib_extension -
|
| 946 |
+
# extension for shared library/object files, eg. '.so', '.dll'
|
| 947 |
+
# * static_lib_format -
|
| 948 |
+
# format string for generating static library filenames,
|
| 949 |
+
# eg. 'lib%s.%s' or '%s.%s'
|
| 950 |
+
# * shared_lib_format
|
| 951 |
+
# format string for generating shared library filenames
|
| 952 |
+
# (probably same as static_lib_format, since the extension
|
| 953 |
+
# is one of the intended parameters to the format string)
|
| 954 |
+
# * exe_extension -
|
| 955 |
+
# extension for executable files, eg. '' or '.exe'
|
| 956 |
+
|
| 957 |
+
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
| 958 |
+
if output_dir is None:
|
| 959 |
+
output_dir = ''
|
| 960 |
+
return list(
|
| 961 |
+
self._make_out_path(output_dir, strip_dir, src_name)
|
| 962 |
+
for src_name in source_filenames
|
| 963 |
+
)
|
| 964 |
+
|
| 965 |
+
@property
|
| 966 |
+
def out_extensions(self):
|
| 967 |
+
return dict.fromkeys(self.src_extensions, self.obj_extension)
|
| 968 |
+
|
| 969 |
+
def _make_out_path(self, output_dir, strip_dir, src_name):
|
| 970 |
+
base, ext = os.path.splitext(src_name)
|
| 971 |
+
base = self._make_relative(base)
|
| 972 |
+
try:
|
| 973 |
+
new_ext = self.out_extensions[ext]
|
| 974 |
+
except LookupError:
|
| 975 |
+
raise UnknownFileError(
|
| 976 |
+
"unknown file type '{}' (from '{}')".format(ext, src_name)
|
| 977 |
+
)
|
| 978 |
+
if strip_dir:
|
| 979 |
+
base = os.path.basename(base)
|
| 980 |
+
return os.path.join(output_dir, base + new_ext)
|
| 981 |
+
|
| 982 |
+
@staticmethod
|
| 983 |
+
def _make_relative(base):
|
| 984 |
+
"""
|
| 985 |
+
In order to ensure that a filename always honors the
|
| 986 |
+
indicated output_dir, make sure it's relative.
|
| 987 |
+
Ref python/cpython#37775.
|
| 988 |
+
"""
|
| 989 |
+
# Chop off the drive
|
| 990 |
+
no_drive = os.path.splitdrive(base)[1]
|
| 991 |
+
# If abs, chop off leading /
|
| 992 |
+
return no_drive[os.path.isabs(no_drive) :]
|
| 993 |
+
|
| 994 |
+
def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
|
| 995 |
+
assert output_dir is not None
|
| 996 |
+
if strip_dir:
|
| 997 |
+
basename = os.path.basename(basename)
|
| 998 |
+
return os.path.join(output_dir, basename + self.shared_lib_extension)
|
| 999 |
+
|
| 1000 |
+
def executable_filename(self, basename, strip_dir=0, output_dir=''):
|
| 1001 |
+
assert output_dir is not None
|
| 1002 |
+
if strip_dir:
|
| 1003 |
+
basename = os.path.basename(basename)
|
| 1004 |
+
return os.path.join(output_dir, basename + (self.exe_extension or ''))
|
| 1005 |
+
|
| 1006 |
+
def library_filename(
|
| 1007 |
+
self, libname, lib_type='static', strip_dir=0, output_dir='' # or 'shared'
|
| 1008 |
+
):
|
| 1009 |
+
assert output_dir is not None
|
| 1010 |
+
expected = '"static", "shared", "dylib", "xcode_stub"'
|
| 1011 |
+
if lib_type not in eval(expected):
|
| 1012 |
+
raise ValueError(f"'lib_type' must be {expected}")
|
| 1013 |
+
fmt = getattr(self, lib_type + "_lib_format")
|
| 1014 |
+
ext = getattr(self, lib_type + "_lib_extension")
|
| 1015 |
+
|
| 1016 |
+
dir, base = os.path.split(libname)
|
| 1017 |
+
filename = fmt % (base, ext)
|
| 1018 |
+
if strip_dir:
|
| 1019 |
+
dir = ''
|
| 1020 |
+
|
| 1021 |
+
return os.path.join(output_dir, dir, filename)
|
| 1022 |
+
|
| 1023 |
+
# -- Utility methods -----------------------------------------------
|
| 1024 |
+
|
| 1025 |
+
def announce(self, msg, level=1):
|
| 1026 |
+
log.debug(msg)
|
| 1027 |
+
|
| 1028 |
+
def debug_print(self, msg):
|
| 1029 |
+
from distutils.debug import DEBUG
|
| 1030 |
+
|
| 1031 |
+
if DEBUG:
|
| 1032 |
+
print(msg)
|
| 1033 |
+
|
| 1034 |
+
def warn(self, msg):
|
| 1035 |
+
sys.stderr.write("warning: %s\n" % msg)
|
| 1036 |
+
|
| 1037 |
+
def execute(self, func, args, msg=None, level=1):
|
| 1038 |
+
execute(func, args, msg, self.dry_run)
|
| 1039 |
+
|
| 1040 |
+
def spawn(self, cmd, **kwargs):
|
| 1041 |
+
spawn(cmd, dry_run=self.dry_run, **kwargs)
|
| 1042 |
+
|
| 1043 |
+
def move_file(self, src, dst):
|
| 1044 |
+
return move_file(src, dst, dry_run=self.dry_run)
|
| 1045 |
+
|
| 1046 |
+
def mkpath(self, name, mode=0o777):
|
| 1047 |
+
mkpath(name, mode, dry_run=self.dry_run)
|
| 1048 |
+
|
| 1049 |
+
|
| 1050 |
+
# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
|
| 1051 |
+
# type for that platform. Keys are interpreted as re match
|
| 1052 |
+
# patterns. Order is important; platform mappings are preferred over
|
| 1053 |
+
# OS names.
|
| 1054 |
+
_default_compilers = (
|
| 1055 |
+
# Platform string mappings
|
| 1056 |
+
# on a cygwin built python we can use gcc like an ordinary UNIXish
|
| 1057 |
+
# compiler
|
| 1058 |
+
('cygwin.*', 'unix'),
|
| 1059 |
+
# OS name mappings
|
| 1060 |
+
('posix', 'unix'),
|
| 1061 |
+
('nt', 'msvc'),
|
| 1062 |
+
)
|
| 1063 |
+
|
| 1064 |
+
|
| 1065 |
+
def get_default_compiler(osname=None, platform=None):
|
| 1066 |
+
"""Determine the default compiler to use for the given platform.
|
| 1067 |
+
|
| 1068 |
+
osname should be one of the standard Python OS names (i.e. the
|
| 1069 |
+
ones returned by os.name) and platform the common value
|
| 1070 |
+
returned by sys.platform for the platform in question.
|
| 1071 |
+
|
| 1072 |
+
The default values are os.name and sys.platform in case the
|
| 1073 |
+
parameters are not given.
|
| 1074 |
+
"""
|
| 1075 |
+
if osname is None:
|
| 1076 |
+
osname = os.name
|
| 1077 |
+
if platform is None:
|
| 1078 |
+
platform = sys.platform
|
| 1079 |
+
for pattern, compiler in _default_compilers:
|
| 1080 |
+
if (
|
| 1081 |
+
re.match(pattern, platform) is not None
|
| 1082 |
+
or re.match(pattern, osname) is not None
|
| 1083 |
+
):
|
| 1084 |
+
return compiler
|
| 1085 |
+
# Default to Unix compiler
|
| 1086 |
+
return 'unix'
|
| 1087 |
+
|
| 1088 |
+
|
| 1089 |
+
# Map compiler types to (module_name, class_name) pairs -- ie. where to
|
| 1090 |
+
# find the code that implements an interface to this compiler. (The module
|
| 1091 |
+
# is assumed to be in the 'distutils' package.)
|
| 1092 |
+
compiler_class = {
|
| 1093 |
+
'unix': ('unixccompiler', 'UnixCCompiler', "standard UNIX-style compiler"),
|
| 1094 |
+
'msvc': ('_msvccompiler', 'MSVCCompiler', "Microsoft Visual C++"),
|
| 1095 |
+
'cygwin': (
|
| 1096 |
+
'cygwinccompiler',
|
| 1097 |
+
'CygwinCCompiler',
|
| 1098 |
+
"Cygwin port of GNU C Compiler for Win32",
|
| 1099 |
+
),
|
| 1100 |
+
'mingw32': (
|
| 1101 |
+
'cygwinccompiler',
|
| 1102 |
+
'Mingw32CCompiler',
|
| 1103 |
+
"Mingw32 port of GNU C Compiler for Win32",
|
| 1104 |
+
),
|
| 1105 |
+
'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"),
|
| 1106 |
+
}
|
| 1107 |
+
|
| 1108 |
+
|
| 1109 |
+
def show_compilers():
|
| 1110 |
+
"""Print list of available compilers (used by the "--help-compiler"
|
| 1111 |
+
options to "build", "build_ext", "build_clib").
|
| 1112 |
+
"""
|
| 1113 |
+
# XXX this "knows" that the compiler option it's describing is
|
| 1114 |
+
# "--compiler", which just happens to be the case for the three
|
| 1115 |
+
# commands that use it.
|
| 1116 |
+
from distutils.fancy_getopt import FancyGetopt
|
| 1117 |
+
|
| 1118 |
+
compilers = []
|
| 1119 |
+
for compiler in compiler_class.keys():
|
| 1120 |
+
compilers.append(("compiler=" + compiler, None, compiler_class[compiler][2]))
|
| 1121 |
+
compilers.sort()
|
| 1122 |
+
pretty_printer = FancyGetopt(compilers)
|
| 1123 |
+
pretty_printer.print_help("List of available compilers:")
|
| 1124 |
+
|
| 1125 |
+
|
| 1126 |
+
def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
|
| 1127 |
+
"""Generate an instance of some CCompiler subclass for the supplied
|
| 1128 |
+
platform/compiler combination. 'plat' defaults to 'os.name'
|
| 1129 |
+
(eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
|
| 1130 |
+
for that platform. Currently only 'posix' and 'nt' are supported, and
|
| 1131 |
+
the default compilers are "traditional Unix interface" (UnixCCompiler
|
| 1132 |
+
class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
|
| 1133 |
+
possible to ask for a Unix compiler object under Windows, and a
|
| 1134 |
+
Microsoft compiler object under Unix -- if you supply a value for
|
| 1135 |
+
'compiler', 'plat' is ignored.
|
| 1136 |
+
"""
|
| 1137 |
+
if plat is None:
|
| 1138 |
+
plat = os.name
|
| 1139 |
+
|
| 1140 |
+
try:
|
| 1141 |
+
if compiler is None:
|
| 1142 |
+
compiler = get_default_compiler(plat)
|
| 1143 |
+
|
| 1144 |
+
(module_name, class_name, long_description) = compiler_class[compiler]
|
| 1145 |
+
except KeyError:
|
| 1146 |
+
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
|
| 1147 |
+
if compiler is not None:
|
| 1148 |
+
msg = msg + " with '%s' compiler" % compiler
|
| 1149 |
+
raise DistutilsPlatformError(msg)
|
| 1150 |
+
|
| 1151 |
+
try:
|
| 1152 |
+
module_name = "distutils." + module_name
|
| 1153 |
+
__import__(module_name)
|
| 1154 |
+
module = sys.modules[module_name]
|
| 1155 |
+
klass = vars(module)[class_name]
|
| 1156 |
+
except ImportError:
|
| 1157 |
+
raise DistutilsModuleError(
|
| 1158 |
+
"can't compile C/C++ code: unable to load module '%s'" % module_name
|
| 1159 |
+
)
|
| 1160 |
+
except KeyError:
|
| 1161 |
+
raise DistutilsModuleError(
|
| 1162 |
+
"can't compile C/C++ code: unable to find class '%s' "
|
| 1163 |
+
"in module '%s'" % (class_name, module_name)
|
| 1164 |
+
)
|
| 1165 |
+
|
| 1166 |
+
# XXX The None is necessary to preserve backwards compatibility
|
| 1167 |
+
# with classes that expect verbose to be the first positional
|
| 1168 |
+
# argument.
|
| 1169 |
+
return klass(None, dry_run, force)
|
| 1170 |
+
|
| 1171 |
+
|
| 1172 |
+
def gen_preprocess_options(macros, include_dirs):
|
| 1173 |
+
"""Generate C pre-processor options (-D, -U, -I) as used by at least
|
| 1174 |
+
two types of compilers: the typical Unix compiler and Visual C++.
|
| 1175 |
+
'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
|
| 1176 |
+
means undefine (-U) macro 'name', and (name,value) means define (-D)
|
| 1177 |
+
macro 'name' to 'value'. 'include_dirs' is just a list of directory
|
| 1178 |
+
names to be added to the header file search path (-I). Returns a list
|
| 1179 |
+
of command-line options suitable for either Unix compilers or Visual
|
| 1180 |
+
C++.
|
| 1181 |
+
"""
|
| 1182 |
+
# XXX it would be nice (mainly aesthetic, and so we don't generate
|
| 1183 |
+
# stupid-looking command lines) to go over 'macros' and eliminate
|
| 1184 |
+
# redundant definitions/undefinitions (ie. ensure that only the
|
| 1185 |
+
# latest mention of a particular macro winds up on the command
|
| 1186 |
+
# line). I don't think it's essential, though, since most (all?)
|
| 1187 |
+
# Unix C compilers only pay attention to the latest -D or -U
|
| 1188 |
+
# mention of a macro on their command line. Similar situation for
|
| 1189 |
+
# 'include_dirs'. I'm punting on both for now. Anyways, weeding out
|
| 1190 |
+
# redundancies like this should probably be the province of
|
| 1191 |
+
# CCompiler, since the data structures used are inherited from it
|
| 1192 |
+
# and therefore common to all CCompiler classes.
|
| 1193 |
+
pp_opts = []
|
| 1194 |
+
for macro in macros:
|
| 1195 |
+
if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
|
| 1196 |
+
raise TypeError(
|
| 1197 |
+
"bad macro definition '%s': "
|
| 1198 |
+
"each element of 'macros' list must be a 1- or 2-tuple" % macro
|
| 1199 |
+
)
|
| 1200 |
+
|
| 1201 |
+
if len(macro) == 1: # undefine this macro
|
| 1202 |
+
pp_opts.append("-U%s" % macro[0])
|
| 1203 |
+
elif len(macro) == 2:
|
| 1204 |
+
if macro[1] is None: # define with no explicit value
|
| 1205 |
+
pp_opts.append("-D%s" % macro[0])
|
| 1206 |
+
else:
|
| 1207 |
+
# XXX *don't* need to be clever about quoting the
|
| 1208 |
+
# macro value here, because we're going to avoid the
|
| 1209 |
+
# shell at all costs when we spawn the command!
|
| 1210 |
+
pp_opts.append("-D%s=%s" % macro)
|
| 1211 |
+
|
| 1212 |
+
for dir in include_dirs:
|
| 1213 |
+
pp_opts.append("-I%s" % dir)
|
| 1214 |
+
return pp_opts
|
| 1215 |
+
|
| 1216 |
+
|
| 1217 |
+
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
|
| 1218 |
+
"""Generate linker options for searching library directories and
|
| 1219 |
+
linking with specific libraries. 'libraries' and 'library_dirs' are,
|
| 1220 |
+
respectively, lists of library names (not filenames!) and search
|
| 1221 |
+
directories. Returns a list of command-line options suitable for use
|
| 1222 |
+
with some compiler (depending on the two format strings passed in).
|
| 1223 |
+
"""
|
| 1224 |
+
lib_opts = []
|
| 1225 |
+
|
| 1226 |
+
for dir in library_dirs:
|
| 1227 |
+
lib_opts.append(compiler.library_dir_option(dir))
|
| 1228 |
+
|
| 1229 |
+
for dir in runtime_library_dirs:
|
| 1230 |
+
opt = compiler.runtime_library_dir_option(dir)
|
| 1231 |
+
if isinstance(opt, list):
|
| 1232 |
+
lib_opts = lib_opts + opt
|
| 1233 |
+
else:
|
| 1234 |
+
lib_opts.append(opt)
|
| 1235 |
+
|
| 1236 |
+
# XXX it's important that we *not* remove redundant library mentions!
|
| 1237 |
+
# sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
|
| 1238 |
+
# resolve all symbols. I just hope we never have to say "-lfoo obj.o
|
| 1239 |
+
# -lbar" to get things to work -- that's certainly a possibility, but a
|
| 1240 |
+
# pretty nasty way to arrange your C code.
|
| 1241 |
+
|
| 1242 |
+
for lib in libraries:
|
| 1243 |
+
(lib_dir, lib_name) = os.path.split(lib)
|
| 1244 |
+
if lib_dir:
|
| 1245 |
+
lib_file = compiler.find_library_file([lib_dir], lib_name)
|
| 1246 |
+
if lib_file:
|
| 1247 |
+
lib_opts.append(lib_file)
|
| 1248 |
+
else:
|
| 1249 |
+
compiler.warn(
|
| 1250 |
+
"no library file corresponding to " "'%s' found (skipping)" % lib
|
| 1251 |
+
)
|
| 1252 |
+
else:
|
| 1253 |
+
lib_opts.append(compiler.library_option(lib))
|
| 1254 |
+
return lib_opts
|
.venv/Lib/site-packages/setuptools/_distutils/cmd.py
ADDED
|
@@ -0,0 +1,435 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.cmd
|
| 2 |
+
|
| 3 |
+
Provides the Command class, the base class for the command classes
|
| 4 |
+
in the distutils.command package.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
from .errors import DistutilsOptionError
|
| 13 |
+
from . import util, dir_util, file_util, archive_util, dep_util
|
| 14 |
+
from ._log import log
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class Command:
|
| 18 |
+
"""Abstract base class for defining command classes, the "worker bees"
|
| 19 |
+
of the Distutils. A useful analogy for command classes is to think of
|
| 20 |
+
them as subroutines with local variables called "options". The options
|
| 21 |
+
are "declared" in 'initialize_options()' and "defined" (given their
|
| 22 |
+
final values, aka "finalized") in 'finalize_options()', both of which
|
| 23 |
+
must be defined by every command class. The distinction between the
|
| 24 |
+
two is necessary because option values might come from the outside
|
| 25 |
+
world (command line, config file, ...), and any options dependent on
|
| 26 |
+
other options must be computed *after* these outside influences have
|
| 27 |
+
been processed -- hence 'finalize_options()'. The "body" of the
|
| 28 |
+
subroutine, where it does all its work based on the values of its
|
| 29 |
+
options, is the 'run()' method, which must also be implemented by every
|
| 30 |
+
command class.
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
# 'sub_commands' formalizes the notion of a "family" of commands,
|
| 34 |
+
# eg. "install" as the parent with sub-commands "install_lib",
|
| 35 |
+
# "install_headers", etc. The parent of a family of commands
|
| 36 |
+
# defines 'sub_commands' as a class attribute; it's a list of
|
| 37 |
+
# (command_name : string, predicate : unbound_method | string | None)
|
| 38 |
+
# tuples, where 'predicate' is a method of the parent command that
|
| 39 |
+
# determines whether the corresponding command is applicable in the
|
| 40 |
+
# current situation. (Eg. we "install_headers" is only applicable if
|
| 41 |
+
# we have any C header files to install.) If 'predicate' is None,
|
| 42 |
+
# that command is always applicable.
|
| 43 |
+
#
|
| 44 |
+
# 'sub_commands' is usually defined at the *end* of a class, because
|
| 45 |
+
# predicates can be unbound methods, so they must already have been
|
| 46 |
+
# defined. The canonical example is the "install" command.
|
| 47 |
+
sub_commands = []
|
| 48 |
+
|
| 49 |
+
# -- Creation/initialization methods -------------------------------
|
| 50 |
+
|
| 51 |
+
def __init__(self, dist):
|
| 52 |
+
"""Create and initialize a new Command object. Most importantly,
|
| 53 |
+
invokes the 'initialize_options()' method, which is the real
|
| 54 |
+
initializer and depends on the actual command being
|
| 55 |
+
instantiated.
|
| 56 |
+
"""
|
| 57 |
+
# late import because of mutual dependence between these classes
|
| 58 |
+
from distutils.dist import Distribution
|
| 59 |
+
|
| 60 |
+
if not isinstance(dist, Distribution):
|
| 61 |
+
raise TypeError("dist must be a Distribution instance")
|
| 62 |
+
if self.__class__ is Command:
|
| 63 |
+
raise RuntimeError("Command is an abstract class")
|
| 64 |
+
|
| 65 |
+
self.distribution = dist
|
| 66 |
+
self.initialize_options()
|
| 67 |
+
|
| 68 |
+
# Per-command versions of the global flags, so that the user can
|
| 69 |
+
# customize Distutils' behaviour command-by-command and let some
|
| 70 |
+
# commands fall back on the Distribution's behaviour. None means
|
| 71 |
+
# "not defined, check self.distribution's copy", while 0 or 1 mean
|
| 72 |
+
# false and true (duh). Note that this means figuring out the real
|
| 73 |
+
# value of each flag is a touch complicated -- hence "self._dry_run"
|
| 74 |
+
# will be handled by __getattr__, below.
|
| 75 |
+
# XXX This needs to be fixed.
|
| 76 |
+
self._dry_run = None
|
| 77 |
+
|
| 78 |
+
# verbose is largely ignored, but needs to be set for
|
| 79 |
+
# backwards compatibility (I think)?
|
| 80 |
+
self.verbose = dist.verbose
|
| 81 |
+
|
| 82 |
+
# Some commands define a 'self.force' option to ignore file
|
| 83 |
+
# timestamps, but methods defined *here* assume that
|
| 84 |
+
# 'self.force' exists for all commands. So define it here
|
| 85 |
+
# just to be safe.
|
| 86 |
+
self.force = None
|
| 87 |
+
|
| 88 |
+
# The 'help' flag is just used for command-line parsing, so
|
| 89 |
+
# none of that complicated bureaucracy is needed.
|
| 90 |
+
self.help = 0
|
| 91 |
+
|
| 92 |
+
# 'finalized' records whether or not 'finalize_options()' has been
|
| 93 |
+
# called. 'finalize_options()' itself should not pay attention to
|
| 94 |
+
# this flag: it is the business of 'ensure_finalized()', which
|
| 95 |
+
# always calls 'finalize_options()', to respect/update it.
|
| 96 |
+
self.finalized = 0
|
| 97 |
+
|
| 98 |
+
# XXX A more explicit way to customize dry_run would be better.
|
| 99 |
+
def __getattr__(self, attr):
|
| 100 |
+
if attr == 'dry_run':
|
| 101 |
+
myval = getattr(self, "_" + attr)
|
| 102 |
+
if myval is None:
|
| 103 |
+
return getattr(self.distribution, attr)
|
| 104 |
+
else:
|
| 105 |
+
return myval
|
| 106 |
+
else:
|
| 107 |
+
raise AttributeError(attr)
|
| 108 |
+
|
| 109 |
+
def ensure_finalized(self):
|
| 110 |
+
if not self.finalized:
|
| 111 |
+
self.finalize_options()
|
| 112 |
+
self.finalized = 1
|
| 113 |
+
|
| 114 |
+
# Subclasses must define:
|
| 115 |
+
# initialize_options()
|
| 116 |
+
# provide default values for all options; may be customized by
|
| 117 |
+
# setup script, by options from config file(s), or by command-line
|
| 118 |
+
# options
|
| 119 |
+
# finalize_options()
|
| 120 |
+
# decide on the final values for all options; this is called
|
| 121 |
+
# after all possible intervention from the outside world
|
| 122 |
+
# (command-line, option file, etc.) has been processed
|
| 123 |
+
# run()
|
| 124 |
+
# run the command: do whatever it is we're here to do,
|
| 125 |
+
# controlled by the command's various option values
|
| 126 |
+
|
| 127 |
+
def initialize_options(self):
|
| 128 |
+
"""Set default values for all the options that this command
|
| 129 |
+
supports. Note that these defaults may be overridden by other
|
| 130 |
+
commands, by the setup script, by config files, or by the
|
| 131 |
+
command-line. Thus, this is not the place to code dependencies
|
| 132 |
+
between options; generally, 'initialize_options()' implementations
|
| 133 |
+
are just a bunch of "self.foo = None" assignments.
|
| 134 |
+
|
| 135 |
+
This method must be implemented by all command classes.
|
| 136 |
+
"""
|
| 137 |
+
raise RuntimeError(
|
| 138 |
+
"abstract method -- subclass %s must override" % self.__class__
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
def finalize_options(self):
|
| 142 |
+
"""Set final values for all the options that this command supports.
|
| 143 |
+
This is always called as late as possible, ie. after any option
|
| 144 |
+
assignments from the command-line or from other commands have been
|
| 145 |
+
done. Thus, this is the place to code option dependencies: if
|
| 146 |
+
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
|
| 147 |
+
long as 'foo' still has the same value it was assigned in
|
| 148 |
+
'initialize_options()'.
|
| 149 |
+
|
| 150 |
+
This method must be implemented by all command classes.
|
| 151 |
+
"""
|
| 152 |
+
raise RuntimeError(
|
| 153 |
+
"abstract method -- subclass %s must override" % self.__class__
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
def dump_options(self, header=None, indent=""):
|
| 157 |
+
from distutils.fancy_getopt import longopt_xlate
|
| 158 |
+
|
| 159 |
+
if header is None:
|
| 160 |
+
header = "command options for '%s':" % self.get_command_name()
|
| 161 |
+
self.announce(indent + header, level=logging.INFO)
|
| 162 |
+
indent = indent + " "
|
| 163 |
+
for option, _, _ in self.user_options:
|
| 164 |
+
option = option.translate(longopt_xlate)
|
| 165 |
+
if option[-1] == "=":
|
| 166 |
+
option = option[:-1]
|
| 167 |
+
value = getattr(self, option)
|
| 168 |
+
self.announce(indent + "{} = {}".format(option, value), level=logging.INFO)
|
| 169 |
+
|
| 170 |
+
def run(self):
|
| 171 |
+
"""A command's raison d'etre: carry out the action it exists to
|
| 172 |
+
perform, controlled by the options initialized in
|
| 173 |
+
'initialize_options()', customized by other commands, the setup
|
| 174 |
+
script, the command-line, and config files, and finalized in
|
| 175 |
+
'finalize_options()'. All terminal output and filesystem
|
| 176 |
+
interaction should be done by 'run()'.
|
| 177 |
+
|
| 178 |
+
This method must be implemented by all command classes.
|
| 179 |
+
"""
|
| 180 |
+
raise RuntimeError(
|
| 181 |
+
"abstract method -- subclass %s must override" % self.__class__
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
def announce(self, msg, level=logging.DEBUG):
|
| 185 |
+
log.log(level, msg)
|
| 186 |
+
|
| 187 |
+
def debug_print(self, msg):
|
| 188 |
+
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
| 189 |
+
DISTUTILS_DEBUG environment variable) flag is true.
|
| 190 |
+
"""
|
| 191 |
+
from distutils.debug import DEBUG
|
| 192 |
+
|
| 193 |
+
if DEBUG:
|
| 194 |
+
print(msg)
|
| 195 |
+
sys.stdout.flush()
|
| 196 |
+
|
| 197 |
+
# -- Option validation methods -------------------------------------
|
| 198 |
+
# (these are very handy in writing the 'finalize_options()' method)
|
| 199 |
+
#
|
| 200 |
+
# NB. the general philosophy here is to ensure that a particular option
|
| 201 |
+
# value meets certain type and value constraints. If not, we try to
|
| 202 |
+
# force it into conformance (eg. if we expect a list but have a string,
|
| 203 |
+
# split the string on comma and/or whitespace). If we can't force the
|
| 204 |
+
# option into conformance, raise DistutilsOptionError. Thus, command
|
| 205 |
+
# classes need do nothing more than (eg.)
|
| 206 |
+
# self.ensure_string_list('foo')
|
| 207 |
+
# and they can be guaranteed that thereafter, self.foo will be
|
| 208 |
+
# a list of strings.
|
| 209 |
+
|
| 210 |
+
def _ensure_stringlike(self, option, what, default=None):
|
| 211 |
+
val = getattr(self, option)
|
| 212 |
+
if val is None:
|
| 213 |
+
setattr(self, option, default)
|
| 214 |
+
return default
|
| 215 |
+
elif not isinstance(val, str):
|
| 216 |
+
raise DistutilsOptionError(
|
| 217 |
+
"'{}' must be a {} (got `{}`)".format(option, what, val)
|
| 218 |
+
)
|
| 219 |
+
return val
|
| 220 |
+
|
| 221 |
+
def ensure_string(self, option, default=None):
|
| 222 |
+
"""Ensure that 'option' is a string; if not defined, set it to
|
| 223 |
+
'default'.
|
| 224 |
+
"""
|
| 225 |
+
self._ensure_stringlike(option, "string", default)
|
| 226 |
+
|
| 227 |
+
def ensure_string_list(self, option):
|
| 228 |
+
r"""Ensure that 'option' is a list of strings. If 'option' is
|
| 229 |
+
currently a string, we split it either on /,\s*/ or /\s+/, so
|
| 230 |
+
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
| 231 |
+
["foo", "bar", "baz"].
|
| 232 |
+
"""
|
| 233 |
+
val = getattr(self, option)
|
| 234 |
+
if val is None:
|
| 235 |
+
return
|
| 236 |
+
elif isinstance(val, str):
|
| 237 |
+
setattr(self, option, re.split(r',\s*|\s+', val))
|
| 238 |
+
else:
|
| 239 |
+
if isinstance(val, list):
|
| 240 |
+
ok = all(isinstance(v, str) for v in val)
|
| 241 |
+
else:
|
| 242 |
+
ok = False
|
| 243 |
+
if not ok:
|
| 244 |
+
raise DistutilsOptionError(
|
| 245 |
+
"'{}' must be a list of strings (got {!r})".format(option, val)
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
def _ensure_tested_string(self, option, tester, what, error_fmt, default=None):
|
| 249 |
+
val = self._ensure_stringlike(option, what, default)
|
| 250 |
+
if val is not None and not tester(val):
|
| 251 |
+
raise DistutilsOptionError(
|
| 252 |
+
("error in '%s' option: " + error_fmt) % (option, val)
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
def ensure_filename(self, option):
|
| 256 |
+
"""Ensure that 'option' is the name of an existing file."""
|
| 257 |
+
self._ensure_tested_string(
|
| 258 |
+
option, os.path.isfile, "filename", "'%s' does not exist or is not a file"
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
def ensure_dirname(self, option):
|
| 262 |
+
self._ensure_tested_string(
|
| 263 |
+
option,
|
| 264 |
+
os.path.isdir,
|
| 265 |
+
"directory name",
|
| 266 |
+
"'%s' does not exist or is not a directory",
|
| 267 |
+
)
|
| 268 |
+
|
| 269 |
+
# -- Convenience methods for commands ------------------------------
|
| 270 |
+
|
| 271 |
+
def get_command_name(self):
|
| 272 |
+
if hasattr(self, 'command_name'):
|
| 273 |
+
return self.command_name
|
| 274 |
+
else:
|
| 275 |
+
return self.__class__.__name__
|
| 276 |
+
|
| 277 |
+
def set_undefined_options(self, src_cmd, *option_pairs):
|
| 278 |
+
"""Set the values of any "undefined" options from corresponding
|
| 279 |
+
option values in some other command object. "Undefined" here means
|
| 280 |
+
"is None", which is the convention used to indicate that an option
|
| 281 |
+
has not been changed between 'initialize_options()' and
|
| 282 |
+
'finalize_options()'. Usually called from 'finalize_options()' for
|
| 283 |
+
options that depend on some other command rather than another
|
| 284 |
+
option of the same command. 'src_cmd' is the other command from
|
| 285 |
+
which option values will be taken (a command object will be created
|
| 286 |
+
for it if necessary); the remaining arguments are
|
| 287 |
+
'(src_option,dst_option)' tuples which mean "take the value of
|
| 288 |
+
'src_option' in the 'src_cmd' command object, and copy it to
|
| 289 |
+
'dst_option' in the current command object".
|
| 290 |
+
"""
|
| 291 |
+
# Option_pairs: list of (src_option, dst_option) tuples
|
| 292 |
+
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
|
| 293 |
+
src_cmd_obj.ensure_finalized()
|
| 294 |
+
for src_option, dst_option in option_pairs:
|
| 295 |
+
if getattr(self, dst_option) is None:
|
| 296 |
+
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
|
| 297 |
+
|
| 298 |
+
def get_finalized_command(self, command, create=1):
|
| 299 |
+
"""Wrapper around Distribution's 'get_command_obj()' method: find
|
| 300 |
+
(create if necessary and 'create' is true) the command object for
|
| 301 |
+
'command', call its 'ensure_finalized()' method, and return the
|
| 302 |
+
finalized command object.
|
| 303 |
+
"""
|
| 304 |
+
cmd_obj = self.distribution.get_command_obj(command, create)
|
| 305 |
+
cmd_obj.ensure_finalized()
|
| 306 |
+
return cmd_obj
|
| 307 |
+
|
| 308 |
+
# XXX rename to 'get_reinitialized_command()'? (should do the
|
| 309 |
+
# same in dist.py, if so)
|
| 310 |
+
def reinitialize_command(self, command, reinit_subcommands=0):
|
| 311 |
+
return self.distribution.reinitialize_command(command, reinit_subcommands)
|
| 312 |
+
|
| 313 |
+
def run_command(self, command):
|
| 314 |
+
"""Run some other command: uses the 'run_command()' method of
|
| 315 |
+
Distribution, which creates and finalizes the command object if
|
| 316 |
+
necessary and then invokes its 'run()' method.
|
| 317 |
+
"""
|
| 318 |
+
self.distribution.run_command(command)
|
| 319 |
+
|
| 320 |
+
def get_sub_commands(self):
|
| 321 |
+
"""Determine the sub-commands that are relevant in the current
|
| 322 |
+
distribution (ie., that need to be run). This is based on the
|
| 323 |
+
'sub_commands' class attribute: each tuple in that list may include
|
| 324 |
+
a method that we call to determine if the subcommand needs to be
|
| 325 |
+
run for the current distribution. Return a list of command names.
|
| 326 |
+
"""
|
| 327 |
+
commands = []
|
| 328 |
+
for cmd_name, method in self.sub_commands:
|
| 329 |
+
if method is None or method(self):
|
| 330 |
+
commands.append(cmd_name)
|
| 331 |
+
return commands
|
| 332 |
+
|
| 333 |
+
# -- External world manipulation -----------------------------------
|
| 334 |
+
|
| 335 |
+
def warn(self, msg):
|
| 336 |
+
log.warning("warning: %s: %s\n", self.get_command_name(), msg)
|
| 337 |
+
|
| 338 |
+
def execute(self, func, args, msg=None, level=1):
|
| 339 |
+
util.execute(func, args, msg, dry_run=self.dry_run)
|
| 340 |
+
|
| 341 |
+
def mkpath(self, name, mode=0o777):
|
| 342 |
+
dir_util.mkpath(name, mode, dry_run=self.dry_run)
|
| 343 |
+
|
| 344 |
+
def copy_file(
|
| 345 |
+
self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
|
| 346 |
+
):
|
| 347 |
+
"""Copy a file respecting verbose, dry-run and force flags. (The
|
| 348 |
+
former two default to whatever is in the Distribution object, and
|
| 349 |
+
the latter defaults to false for commands that don't define it.)"""
|
| 350 |
+
return file_util.copy_file(
|
| 351 |
+
infile,
|
| 352 |
+
outfile,
|
| 353 |
+
preserve_mode,
|
| 354 |
+
preserve_times,
|
| 355 |
+
not self.force,
|
| 356 |
+
link,
|
| 357 |
+
dry_run=self.dry_run,
|
| 358 |
+
)
|
| 359 |
+
|
| 360 |
+
def copy_tree(
|
| 361 |
+
self,
|
| 362 |
+
infile,
|
| 363 |
+
outfile,
|
| 364 |
+
preserve_mode=1,
|
| 365 |
+
preserve_times=1,
|
| 366 |
+
preserve_symlinks=0,
|
| 367 |
+
level=1,
|
| 368 |
+
):
|
| 369 |
+
"""Copy an entire directory tree respecting verbose, dry-run,
|
| 370 |
+
and force flags.
|
| 371 |
+
"""
|
| 372 |
+
return dir_util.copy_tree(
|
| 373 |
+
infile,
|
| 374 |
+
outfile,
|
| 375 |
+
preserve_mode,
|
| 376 |
+
preserve_times,
|
| 377 |
+
preserve_symlinks,
|
| 378 |
+
not self.force,
|
| 379 |
+
dry_run=self.dry_run,
|
| 380 |
+
)
|
| 381 |
+
|
| 382 |
+
def move_file(self, src, dst, level=1):
|
| 383 |
+
"""Move a file respecting dry-run flag."""
|
| 384 |
+
return file_util.move_file(src, dst, dry_run=self.dry_run)
|
| 385 |
+
|
| 386 |
+
def spawn(self, cmd, search_path=1, level=1):
|
| 387 |
+
"""Spawn an external command respecting dry-run flag."""
|
| 388 |
+
from distutils.spawn import spawn
|
| 389 |
+
|
| 390 |
+
spawn(cmd, search_path, dry_run=self.dry_run)
|
| 391 |
+
|
| 392 |
+
def make_archive(
|
| 393 |
+
self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None
|
| 394 |
+
):
|
| 395 |
+
return archive_util.make_archive(
|
| 396 |
+
base_name,
|
| 397 |
+
format,
|
| 398 |
+
root_dir,
|
| 399 |
+
base_dir,
|
| 400 |
+
dry_run=self.dry_run,
|
| 401 |
+
owner=owner,
|
| 402 |
+
group=group,
|
| 403 |
+
)
|
| 404 |
+
|
| 405 |
+
def make_file(
|
| 406 |
+
self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1
|
| 407 |
+
):
|
| 408 |
+
"""Special case of 'execute()' for operations that process one or
|
| 409 |
+
more input files and generate one output file. Works just like
|
| 410 |
+
'execute()', except the operation is skipped and a different
|
| 411 |
+
message printed if 'outfile' already exists and is newer than all
|
| 412 |
+
files listed in 'infiles'. If the command defined 'self.force',
|
| 413 |
+
and it is true, then the command is unconditionally run -- does no
|
| 414 |
+
timestamp checks.
|
| 415 |
+
"""
|
| 416 |
+
if skip_msg is None:
|
| 417 |
+
skip_msg = "skipping %s (inputs unchanged)" % outfile
|
| 418 |
+
|
| 419 |
+
# Allow 'infiles' to be a single string
|
| 420 |
+
if isinstance(infiles, str):
|
| 421 |
+
infiles = (infiles,)
|
| 422 |
+
elif not isinstance(infiles, (list, tuple)):
|
| 423 |
+
raise TypeError("'infiles' must be a string, or a list or tuple of strings")
|
| 424 |
+
|
| 425 |
+
if exec_msg is None:
|
| 426 |
+
exec_msg = "generating {} from {}".format(outfile, ', '.join(infiles))
|
| 427 |
+
|
| 428 |
+
# If 'outfile' must be regenerated (either because it doesn't
|
| 429 |
+
# exist, is out-of-date, or the 'force' flag is true) then
|
| 430 |
+
# perform the action that presumably regenerates it
|
| 431 |
+
if self.force or dep_util.newer_group(infiles, outfile):
|
| 432 |
+
self.execute(func, args, exec_msg, level)
|
| 433 |
+
# Otherwise, print the "skip" message
|
| 434 |
+
else:
|
| 435 |
+
log.debug(skip_msg)
|
.venv/Lib/site-packages/setuptools/_distutils/config.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.pypirc
|
| 2 |
+
|
| 3 |
+
Provides the PyPIRCCommand class, the base class for the command classes
|
| 4 |
+
that uses .pypirc in the distutils.command package.
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
from configparser import RawConfigParser
|
| 8 |
+
|
| 9 |
+
from .cmd import Command
|
| 10 |
+
|
| 11 |
+
DEFAULT_PYPIRC = """\
|
| 12 |
+
[distutils]
|
| 13 |
+
index-servers =
|
| 14 |
+
pypi
|
| 15 |
+
|
| 16 |
+
[pypi]
|
| 17 |
+
username:%s
|
| 18 |
+
password:%s
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class PyPIRCCommand(Command):
|
| 23 |
+
"""Base command that knows how to handle the .pypirc file"""
|
| 24 |
+
|
| 25 |
+
DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
|
| 26 |
+
DEFAULT_REALM = 'pypi'
|
| 27 |
+
repository = None
|
| 28 |
+
realm = None
|
| 29 |
+
|
| 30 |
+
user_options = [
|
| 31 |
+
('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY),
|
| 32 |
+
('show-response', None, 'display full response text from server'),
|
| 33 |
+
]
|
| 34 |
+
|
| 35 |
+
boolean_options = ['show-response']
|
| 36 |
+
|
| 37 |
+
def _get_rc_file(self):
|
| 38 |
+
"""Returns rc file path."""
|
| 39 |
+
return os.path.join(os.path.expanduser('~'), '.pypirc')
|
| 40 |
+
|
| 41 |
+
def _store_pypirc(self, username, password):
|
| 42 |
+
"""Creates a default .pypirc file."""
|
| 43 |
+
rc = self._get_rc_file()
|
| 44 |
+
with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
| 45 |
+
f.write(DEFAULT_PYPIRC % (username, password))
|
| 46 |
+
|
| 47 |
+
def _read_pypirc(self): # noqa: C901
|
| 48 |
+
"""Reads the .pypirc file."""
|
| 49 |
+
rc = self._get_rc_file()
|
| 50 |
+
if os.path.exists(rc):
|
| 51 |
+
self.announce('Using PyPI login from %s' % rc)
|
| 52 |
+
repository = self.repository or self.DEFAULT_REPOSITORY
|
| 53 |
+
|
| 54 |
+
config = RawConfigParser()
|
| 55 |
+
config.read(rc)
|
| 56 |
+
sections = config.sections()
|
| 57 |
+
if 'distutils' in sections:
|
| 58 |
+
# let's get the list of servers
|
| 59 |
+
index_servers = config.get('distutils', 'index-servers')
|
| 60 |
+
_servers = [
|
| 61 |
+
server.strip()
|
| 62 |
+
for server in index_servers.split('\n')
|
| 63 |
+
if server.strip() != ''
|
| 64 |
+
]
|
| 65 |
+
if _servers == []:
|
| 66 |
+
# nothing set, let's try to get the default pypi
|
| 67 |
+
if 'pypi' in sections:
|
| 68 |
+
_servers = ['pypi']
|
| 69 |
+
else:
|
| 70 |
+
# the file is not properly defined, returning
|
| 71 |
+
# an empty dict
|
| 72 |
+
return {}
|
| 73 |
+
for server in _servers:
|
| 74 |
+
current = {'server': server}
|
| 75 |
+
current['username'] = config.get(server, 'username')
|
| 76 |
+
|
| 77 |
+
# optional params
|
| 78 |
+
for key, default in (
|
| 79 |
+
('repository', self.DEFAULT_REPOSITORY),
|
| 80 |
+
('realm', self.DEFAULT_REALM),
|
| 81 |
+
('password', None),
|
| 82 |
+
):
|
| 83 |
+
if config.has_option(server, key):
|
| 84 |
+
current[key] = config.get(server, key)
|
| 85 |
+
else:
|
| 86 |
+
current[key] = default
|
| 87 |
+
|
| 88 |
+
# work around people having "repository" for the "pypi"
|
| 89 |
+
# section of their config set to the HTTP (rather than
|
| 90 |
+
# HTTPS) URL
|
| 91 |
+
if server == 'pypi' and repository in (
|
| 92 |
+
self.DEFAULT_REPOSITORY,
|
| 93 |
+
'pypi',
|
| 94 |
+
):
|
| 95 |
+
current['repository'] = self.DEFAULT_REPOSITORY
|
| 96 |
+
return current
|
| 97 |
+
|
| 98 |
+
if (
|
| 99 |
+
current['server'] == repository
|
| 100 |
+
or current['repository'] == repository
|
| 101 |
+
):
|
| 102 |
+
return current
|
| 103 |
+
elif 'server-login' in sections:
|
| 104 |
+
# old format
|
| 105 |
+
server = 'server-login'
|
| 106 |
+
if config.has_option(server, 'repository'):
|
| 107 |
+
repository = config.get(server, 'repository')
|
| 108 |
+
else:
|
| 109 |
+
repository = self.DEFAULT_REPOSITORY
|
| 110 |
+
return {
|
| 111 |
+
'username': config.get(server, 'username'),
|
| 112 |
+
'password': config.get(server, 'password'),
|
| 113 |
+
'repository': repository,
|
| 114 |
+
'server': server,
|
| 115 |
+
'realm': self.DEFAULT_REALM,
|
| 116 |
+
}
|
| 117 |
+
|
| 118 |
+
return {}
|
| 119 |
+
|
| 120 |
+
def _read_pypi_response(self, response):
|
| 121 |
+
"""Read and decode a PyPI HTTP response."""
|
| 122 |
+
import cgi
|
| 123 |
+
|
| 124 |
+
content_type = response.getheader('content-type', 'text/plain')
|
| 125 |
+
encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
|
| 126 |
+
return response.read().decode(encoding)
|
| 127 |
+
|
| 128 |
+
def initialize_options(self):
|
| 129 |
+
"""Initialize options."""
|
| 130 |
+
self.repository = None
|
| 131 |
+
self.realm = None
|
| 132 |
+
self.show_response = 0
|
| 133 |
+
|
| 134 |
+
def finalize_options(self):
|
| 135 |
+
"""Finalizes options."""
|
| 136 |
+
if self.repository is None:
|
| 137 |
+
self.repository = self.DEFAULT_REPOSITORY
|
| 138 |
+
if self.realm is None:
|
| 139 |
+
self.realm = self.DEFAULT_REALM
|
.venv/Lib/site-packages/setuptools/_distutils/core.py
ADDED
|
@@ -0,0 +1,291 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.core
|
| 2 |
+
|
| 3 |
+
The only module that needs to be imported to use the Distutils; provides
|
| 4 |
+
the 'setup' function (which is to be called from the setup script). Also
|
| 5 |
+
indirectly provides the Distribution and Command classes, although they are
|
| 6 |
+
really defined in distutils.dist and distutils.cmd.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import sys
|
| 11 |
+
import tokenize
|
| 12 |
+
|
| 13 |
+
from .debug import DEBUG
|
| 14 |
+
from .errors import (
|
| 15 |
+
DistutilsSetupError,
|
| 16 |
+
DistutilsError,
|
| 17 |
+
CCompilerError,
|
| 18 |
+
DistutilsArgError,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
# Mainly import these so setup scripts can "from distutils.core import" them.
|
| 22 |
+
from .dist import Distribution
|
| 23 |
+
from .cmd import Command
|
| 24 |
+
from .config import PyPIRCCommand
|
| 25 |
+
from .extension import Extension
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
__all__ = ['Distribution', 'Command', 'PyPIRCCommand', 'Extension', 'setup']
|
| 29 |
+
|
| 30 |
+
# This is a barebones help message generated displayed when the user
|
| 31 |
+
# runs the setup script with no arguments at all. More useful help
|
| 32 |
+
# is generated with various --help options: global help, list commands,
|
| 33 |
+
# and per-command help.
|
| 34 |
+
USAGE = """\
|
| 35 |
+
usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
|
| 36 |
+
or: %(script)s --help [cmd1 cmd2 ...]
|
| 37 |
+
or: %(script)s --help-commands
|
| 38 |
+
or: %(script)s cmd --help
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def gen_usage(script_name):
|
| 43 |
+
script = os.path.basename(script_name)
|
| 44 |
+
return USAGE % locals()
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
|
| 48 |
+
_setup_stop_after = None
|
| 49 |
+
_setup_distribution = None
|
| 50 |
+
|
| 51 |
+
# Legal keyword arguments for the setup() function
|
| 52 |
+
setup_keywords = (
|
| 53 |
+
'distclass',
|
| 54 |
+
'script_name',
|
| 55 |
+
'script_args',
|
| 56 |
+
'options',
|
| 57 |
+
'name',
|
| 58 |
+
'version',
|
| 59 |
+
'author',
|
| 60 |
+
'author_email',
|
| 61 |
+
'maintainer',
|
| 62 |
+
'maintainer_email',
|
| 63 |
+
'url',
|
| 64 |
+
'license',
|
| 65 |
+
'description',
|
| 66 |
+
'long_description',
|
| 67 |
+
'keywords',
|
| 68 |
+
'platforms',
|
| 69 |
+
'classifiers',
|
| 70 |
+
'download_url',
|
| 71 |
+
'requires',
|
| 72 |
+
'provides',
|
| 73 |
+
'obsoletes',
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
# Legal keyword arguments for the Extension constructor
|
| 77 |
+
extension_keywords = (
|
| 78 |
+
'name',
|
| 79 |
+
'sources',
|
| 80 |
+
'include_dirs',
|
| 81 |
+
'define_macros',
|
| 82 |
+
'undef_macros',
|
| 83 |
+
'library_dirs',
|
| 84 |
+
'libraries',
|
| 85 |
+
'runtime_library_dirs',
|
| 86 |
+
'extra_objects',
|
| 87 |
+
'extra_compile_args',
|
| 88 |
+
'extra_link_args',
|
| 89 |
+
'swig_opts',
|
| 90 |
+
'export_symbols',
|
| 91 |
+
'depends',
|
| 92 |
+
'language',
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def setup(**attrs): # noqa: C901
|
| 97 |
+
"""The gateway to the Distutils: do everything your setup script needs
|
| 98 |
+
to do, in a highly flexible and user-driven way. Briefly: create a
|
| 99 |
+
Distribution instance; find and parse config files; parse the command
|
| 100 |
+
line; run each Distutils command found there, customized by the options
|
| 101 |
+
supplied to 'setup()' (as keyword arguments), in config files, and on
|
| 102 |
+
the command line.
|
| 103 |
+
|
| 104 |
+
The Distribution instance might be an instance of a class supplied via
|
| 105 |
+
the 'distclass' keyword argument to 'setup'; if no such class is
|
| 106 |
+
supplied, then the Distribution class (in dist.py) is instantiated.
|
| 107 |
+
All other arguments to 'setup' (except for 'cmdclass') are used to set
|
| 108 |
+
attributes of the Distribution instance.
|
| 109 |
+
|
| 110 |
+
The 'cmdclass' argument, if supplied, is a dictionary mapping command
|
| 111 |
+
names to command classes. Each command encountered on the command line
|
| 112 |
+
will be turned into a command class, which is in turn instantiated; any
|
| 113 |
+
class found in 'cmdclass' is used in place of the default, which is
|
| 114 |
+
(for command 'foo_bar') class 'foo_bar' in module
|
| 115 |
+
'distutils.command.foo_bar'. The command class must provide a
|
| 116 |
+
'user_options' attribute which is a list of option specifiers for
|
| 117 |
+
'distutils.fancy_getopt'. Any command-line options between the current
|
| 118 |
+
and the next command are used to set attributes of the current command
|
| 119 |
+
object.
|
| 120 |
+
|
| 121 |
+
When the entire command-line has been successfully parsed, calls the
|
| 122 |
+
'run()' method on each command object in turn. This method will be
|
| 123 |
+
driven entirely by the Distribution object (which each command object
|
| 124 |
+
has a reference to, thanks to its constructor), and the
|
| 125 |
+
command-specific options that became attributes of each command
|
| 126 |
+
object.
|
| 127 |
+
"""
|
| 128 |
+
|
| 129 |
+
global _setup_stop_after, _setup_distribution
|
| 130 |
+
|
| 131 |
+
# Determine the distribution class -- either caller-supplied or
|
| 132 |
+
# our Distribution (see below).
|
| 133 |
+
klass = attrs.get('distclass')
|
| 134 |
+
if klass:
|
| 135 |
+
attrs.pop('distclass')
|
| 136 |
+
else:
|
| 137 |
+
klass = Distribution
|
| 138 |
+
|
| 139 |
+
if 'script_name' not in attrs:
|
| 140 |
+
attrs['script_name'] = os.path.basename(sys.argv[0])
|
| 141 |
+
if 'script_args' not in attrs:
|
| 142 |
+
attrs['script_args'] = sys.argv[1:]
|
| 143 |
+
|
| 144 |
+
# Create the Distribution instance, using the remaining arguments
|
| 145 |
+
# (ie. everything except distclass) to initialize it
|
| 146 |
+
try:
|
| 147 |
+
_setup_distribution = dist = klass(attrs)
|
| 148 |
+
except DistutilsSetupError as msg:
|
| 149 |
+
if 'name' not in attrs:
|
| 150 |
+
raise SystemExit("error in setup command: %s" % msg)
|
| 151 |
+
else:
|
| 152 |
+
raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg))
|
| 153 |
+
|
| 154 |
+
if _setup_stop_after == "init":
|
| 155 |
+
return dist
|
| 156 |
+
|
| 157 |
+
# Find and parse the config file(s): they will override options from
|
| 158 |
+
# the setup script, but be overridden by the command line.
|
| 159 |
+
dist.parse_config_files()
|
| 160 |
+
|
| 161 |
+
if DEBUG:
|
| 162 |
+
print("options (after parsing config files):")
|
| 163 |
+
dist.dump_option_dicts()
|
| 164 |
+
|
| 165 |
+
if _setup_stop_after == "config":
|
| 166 |
+
return dist
|
| 167 |
+
|
| 168 |
+
# Parse the command line and override config files; any
|
| 169 |
+
# command-line errors are the end user's fault, so turn them into
|
| 170 |
+
# SystemExit to suppress tracebacks.
|
| 171 |
+
try:
|
| 172 |
+
ok = dist.parse_command_line()
|
| 173 |
+
except DistutilsArgError as msg:
|
| 174 |
+
raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
|
| 175 |
+
|
| 176 |
+
if DEBUG:
|
| 177 |
+
print("options (after parsing command line):")
|
| 178 |
+
dist.dump_option_dicts()
|
| 179 |
+
|
| 180 |
+
if _setup_stop_after == "commandline":
|
| 181 |
+
return dist
|
| 182 |
+
|
| 183 |
+
# And finally, run all the commands found on the command line.
|
| 184 |
+
if ok:
|
| 185 |
+
return run_commands(dist)
|
| 186 |
+
|
| 187 |
+
return dist
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
# setup ()
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def run_commands(dist):
|
| 194 |
+
"""Given a Distribution object run all the commands,
|
| 195 |
+
raising ``SystemExit`` errors in the case of failure.
|
| 196 |
+
|
| 197 |
+
This function assumes that either ``sys.argv`` or ``dist.script_args``
|
| 198 |
+
is already set accordingly.
|
| 199 |
+
"""
|
| 200 |
+
try:
|
| 201 |
+
dist.run_commands()
|
| 202 |
+
except KeyboardInterrupt:
|
| 203 |
+
raise SystemExit("interrupted")
|
| 204 |
+
except OSError as exc:
|
| 205 |
+
if DEBUG:
|
| 206 |
+
sys.stderr.write("error: {}\n".format(exc))
|
| 207 |
+
raise
|
| 208 |
+
else:
|
| 209 |
+
raise SystemExit("error: {}".format(exc))
|
| 210 |
+
|
| 211 |
+
except (DistutilsError, CCompilerError) as msg:
|
| 212 |
+
if DEBUG:
|
| 213 |
+
raise
|
| 214 |
+
else:
|
| 215 |
+
raise SystemExit("error: " + str(msg))
|
| 216 |
+
|
| 217 |
+
return dist
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def run_setup(script_name, script_args=None, stop_after="run"):
|
| 221 |
+
"""Run a setup script in a somewhat controlled environment, and
|
| 222 |
+
return the Distribution instance that drives things. This is useful
|
| 223 |
+
if you need to find out the distribution meta-data (passed as
|
| 224 |
+
keyword args from 'script' to 'setup()', or the contents of the
|
| 225 |
+
config files or command-line.
|
| 226 |
+
|
| 227 |
+
'script_name' is a file that will be read and run with 'exec()';
|
| 228 |
+
'sys.argv[0]' will be replaced with 'script' for the duration of the
|
| 229 |
+
call. 'script_args' is a list of strings; if supplied,
|
| 230 |
+
'sys.argv[1:]' will be replaced by 'script_args' for the duration of
|
| 231 |
+
the call.
|
| 232 |
+
|
| 233 |
+
'stop_after' tells 'setup()' when to stop processing; possible
|
| 234 |
+
values:
|
| 235 |
+
init
|
| 236 |
+
stop after the Distribution instance has been created and
|
| 237 |
+
populated with the keyword arguments to 'setup()'
|
| 238 |
+
config
|
| 239 |
+
stop after config files have been parsed (and their data
|
| 240 |
+
stored in the Distribution instance)
|
| 241 |
+
commandline
|
| 242 |
+
stop after the command-line ('sys.argv[1:]' or 'script_args')
|
| 243 |
+
have been parsed (and the data stored in the Distribution)
|
| 244 |
+
run [default]
|
| 245 |
+
stop after all commands have been run (the same as if 'setup()'
|
| 246 |
+
had been called in the usual way
|
| 247 |
+
|
| 248 |
+
Returns the Distribution instance, which provides all information
|
| 249 |
+
used to drive the Distutils.
|
| 250 |
+
"""
|
| 251 |
+
if stop_after not in ('init', 'config', 'commandline', 'run'):
|
| 252 |
+
raise ValueError("invalid value for 'stop_after': {!r}".format(stop_after))
|
| 253 |
+
|
| 254 |
+
global _setup_stop_after, _setup_distribution
|
| 255 |
+
_setup_stop_after = stop_after
|
| 256 |
+
|
| 257 |
+
save_argv = sys.argv.copy()
|
| 258 |
+
g = {'__file__': script_name, '__name__': '__main__'}
|
| 259 |
+
try:
|
| 260 |
+
try:
|
| 261 |
+
sys.argv[0] = script_name
|
| 262 |
+
if script_args is not None:
|
| 263 |
+
sys.argv[1:] = script_args
|
| 264 |
+
# tokenize.open supports automatic encoding detection
|
| 265 |
+
with tokenize.open(script_name) as f:
|
| 266 |
+
code = f.read().replace(r'\r\n', r'\n')
|
| 267 |
+
exec(code, g)
|
| 268 |
+
finally:
|
| 269 |
+
sys.argv = save_argv
|
| 270 |
+
_setup_stop_after = None
|
| 271 |
+
except SystemExit:
|
| 272 |
+
# Hmm, should we do something if exiting with a non-zero code
|
| 273 |
+
# (ie. error)?
|
| 274 |
+
pass
|
| 275 |
+
|
| 276 |
+
if _setup_distribution is None:
|
| 277 |
+
raise RuntimeError(
|
| 278 |
+
(
|
| 279 |
+
"'distutils.core.setup()' was never called -- "
|
| 280 |
+
"perhaps '%s' is not a Distutils setup script?"
|
| 281 |
+
)
|
| 282 |
+
% script_name
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
# I wonder if the setup script's namespace -- g and l -- would be of
|
| 286 |
+
# any interest to callers?
|
| 287 |
+
# print "_setup_distribution:", _setup_distribution
|
| 288 |
+
return _setup_distribution
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
# run_setup ()
|
.venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.cygwinccompiler
|
| 2 |
+
|
| 3 |
+
Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
|
| 4 |
+
handles the Cygwin port of the GNU C compiler to Windows. It also contains
|
| 5 |
+
the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
|
| 6 |
+
cygwin in no-cygwin mode).
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import sys
|
| 12 |
+
import copy
|
| 13 |
+
import shlex
|
| 14 |
+
import warnings
|
| 15 |
+
from subprocess import check_output
|
| 16 |
+
|
| 17 |
+
from .unixccompiler import UnixCCompiler
|
| 18 |
+
from .file_util import write_file
|
| 19 |
+
from .errors import (
|
| 20 |
+
DistutilsExecError,
|
| 21 |
+
DistutilsPlatformError,
|
| 22 |
+
CCompilerError,
|
| 23 |
+
CompileError,
|
| 24 |
+
)
|
| 25 |
+
from .version import LooseVersion, suppress_known_deprecation
|
| 26 |
+
from ._collections import RangeMap
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
_msvcr_lookup = RangeMap.left(
|
| 30 |
+
{
|
| 31 |
+
# MSVC 7.0
|
| 32 |
+
1300: ['msvcr70'],
|
| 33 |
+
# MSVC 7.1
|
| 34 |
+
1310: ['msvcr71'],
|
| 35 |
+
# VS2005 / MSVC 8.0
|
| 36 |
+
1400: ['msvcr80'],
|
| 37 |
+
# VS2008 / MSVC 9.0
|
| 38 |
+
1500: ['msvcr90'],
|
| 39 |
+
# VS2010 / MSVC 10.0
|
| 40 |
+
1600: ['msvcr100'],
|
| 41 |
+
# VS2012 / MSVC 11.0
|
| 42 |
+
1700: ['msvcr110'],
|
| 43 |
+
# VS2013 / MSVC 12.0
|
| 44 |
+
1800: ['msvcr120'],
|
| 45 |
+
# VS2015 / MSVC 14.0
|
| 46 |
+
1900: ['vcruntime140'],
|
| 47 |
+
2000: RangeMap.undefined_value,
|
| 48 |
+
},
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def get_msvcr():
|
| 53 |
+
"""Include the appropriate MSVC runtime library if Python was built
|
| 54 |
+
with MSVC 7.0 or later.
|
| 55 |
+
"""
|
| 56 |
+
match = re.search(r'MSC v\.(\d{4})', sys.version)
|
| 57 |
+
try:
|
| 58 |
+
msc_ver = int(match.group(1))
|
| 59 |
+
except AttributeError:
|
| 60 |
+
return
|
| 61 |
+
try:
|
| 62 |
+
return _msvcr_lookup[msc_ver]
|
| 63 |
+
except KeyError:
|
| 64 |
+
raise ValueError("Unknown MS Compiler version %s " % msc_ver)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
_runtime_library_dirs_msg = (
|
| 68 |
+
"Unable to set runtime library search path on Windows, "
|
| 69 |
+
"usually indicated by `runtime_library_dirs` parameter to Extension"
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class CygwinCCompiler(UnixCCompiler):
|
| 74 |
+
"""Handles the Cygwin port of the GNU C compiler to Windows."""
|
| 75 |
+
|
| 76 |
+
compiler_type = 'cygwin'
|
| 77 |
+
obj_extension = ".o"
|
| 78 |
+
static_lib_extension = ".a"
|
| 79 |
+
shared_lib_extension = ".dll.a"
|
| 80 |
+
dylib_lib_extension = ".dll"
|
| 81 |
+
static_lib_format = "lib%s%s"
|
| 82 |
+
shared_lib_format = "lib%s%s"
|
| 83 |
+
dylib_lib_format = "cyg%s%s"
|
| 84 |
+
exe_extension = ".exe"
|
| 85 |
+
|
| 86 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 87 |
+
super().__init__(verbose, dry_run, force)
|
| 88 |
+
|
| 89 |
+
status, details = check_config_h()
|
| 90 |
+
self.debug_print(
|
| 91 |
+
"Python's GCC status: {} (details: {})".format(status, details)
|
| 92 |
+
)
|
| 93 |
+
if status is not CONFIG_H_OK:
|
| 94 |
+
self.warn(
|
| 95 |
+
"Python's pyconfig.h doesn't seem to support your compiler. "
|
| 96 |
+
"Reason: %s. "
|
| 97 |
+
"Compiling may fail because of undefined preprocessor macros." % details
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
self.cc = os.environ.get('CC', 'gcc')
|
| 101 |
+
self.cxx = os.environ.get('CXX', 'g++')
|
| 102 |
+
|
| 103 |
+
self.linker_dll = self.cc
|
| 104 |
+
shared_option = "-shared"
|
| 105 |
+
|
| 106 |
+
self.set_executables(
|
| 107 |
+
compiler='%s -mcygwin -O -Wall' % self.cc,
|
| 108 |
+
compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
|
| 109 |
+
compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
|
| 110 |
+
linker_exe='%s -mcygwin' % self.cc,
|
| 111 |
+
linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)),
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
# Include the appropriate MSVC runtime library if Python was built
|
| 115 |
+
# with MSVC 7.0 or later.
|
| 116 |
+
self.dll_libraries = get_msvcr()
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def gcc_version(self):
|
| 120 |
+
# Older numpy depended on this existing to check for ancient
|
| 121 |
+
# gcc versions. This doesn't make much sense with clang etc so
|
| 122 |
+
# just hardcode to something recent.
|
| 123 |
+
# https://github.com/numpy/numpy/pull/20333
|
| 124 |
+
warnings.warn(
|
| 125 |
+
"gcc_version attribute of CygwinCCompiler is deprecated. "
|
| 126 |
+
"Instead of returning actual gcc version a fixed value 11.2.0 is returned.",
|
| 127 |
+
DeprecationWarning,
|
| 128 |
+
stacklevel=2,
|
| 129 |
+
)
|
| 130 |
+
with suppress_known_deprecation():
|
| 131 |
+
return LooseVersion("11.2.0")
|
| 132 |
+
|
| 133 |
+
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
| 134 |
+
"""Compiles the source by spawning GCC and windres if needed."""
|
| 135 |
+
if ext in ('.rc', '.res'):
|
| 136 |
+
# gcc needs '.res' and '.rc' compiled to object files !!!
|
| 137 |
+
try:
|
| 138 |
+
self.spawn(["windres", "-i", src, "-o", obj])
|
| 139 |
+
except DistutilsExecError as msg:
|
| 140 |
+
raise CompileError(msg)
|
| 141 |
+
else: # for other files use the C-compiler
|
| 142 |
+
try:
|
| 143 |
+
self.spawn(
|
| 144 |
+
self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs
|
| 145 |
+
)
|
| 146 |
+
except DistutilsExecError as msg:
|
| 147 |
+
raise CompileError(msg)
|
| 148 |
+
|
| 149 |
+
def link(
|
| 150 |
+
self,
|
| 151 |
+
target_desc,
|
| 152 |
+
objects,
|
| 153 |
+
output_filename,
|
| 154 |
+
output_dir=None,
|
| 155 |
+
libraries=None,
|
| 156 |
+
library_dirs=None,
|
| 157 |
+
runtime_library_dirs=None,
|
| 158 |
+
export_symbols=None,
|
| 159 |
+
debug=0,
|
| 160 |
+
extra_preargs=None,
|
| 161 |
+
extra_postargs=None,
|
| 162 |
+
build_temp=None,
|
| 163 |
+
target_lang=None,
|
| 164 |
+
):
|
| 165 |
+
"""Link the objects."""
|
| 166 |
+
# use separate copies, so we can modify the lists
|
| 167 |
+
extra_preargs = copy.copy(extra_preargs or [])
|
| 168 |
+
libraries = copy.copy(libraries or [])
|
| 169 |
+
objects = copy.copy(objects or [])
|
| 170 |
+
|
| 171 |
+
if runtime_library_dirs:
|
| 172 |
+
self.warn(_runtime_library_dirs_msg)
|
| 173 |
+
|
| 174 |
+
# Additional libraries
|
| 175 |
+
libraries.extend(self.dll_libraries)
|
| 176 |
+
|
| 177 |
+
# handle export symbols by creating a def-file
|
| 178 |
+
# with executables this only works with gcc/ld as linker
|
| 179 |
+
if (export_symbols is not None) and (
|
| 180 |
+
target_desc != self.EXECUTABLE or self.linker_dll == "gcc"
|
| 181 |
+
):
|
| 182 |
+
# (The linker doesn't do anything if output is up-to-date.
|
| 183 |
+
# So it would probably better to check if we really need this,
|
| 184 |
+
# but for this we had to insert some unchanged parts of
|
| 185 |
+
# UnixCCompiler, and this is not what we want.)
|
| 186 |
+
|
| 187 |
+
# we want to put some files in the same directory as the
|
| 188 |
+
# object files are, build_temp doesn't help much
|
| 189 |
+
# where are the object files
|
| 190 |
+
temp_dir = os.path.dirname(objects[0])
|
| 191 |
+
# name of dll to give the helper files the same base name
|
| 192 |
+
(dll_name, dll_extension) = os.path.splitext(
|
| 193 |
+
os.path.basename(output_filename)
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# generate the filenames for these files
|
| 197 |
+
def_file = os.path.join(temp_dir, dll_name + ".def")
|
| 198 |
+
|
| 199 |
+
# Generate .def file
|
| 200 |
+
contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"]
|
| 201 |
+
for sym in export_symbols:
|
| 202 |
+
contents.append(sym)
|
| 203 |
+
self.execute(write_file, (def_file, contents), "writing %s" % def_file)
|
| 204 |
+
|
| 205 |
+
# next add options for def-file
|
| 206 |
+
|
| 207 |
+
# for gcc/ld the def-file is specified as any object files
|
| 208 |
+
objects.append(def_file)
|
| 209 |
+
|
| 210 |
+
# end: if ((export_symbols is not None) and
|
| 211 |
+
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
| 212 |
+
|
| 213 |
+
# who wants symbols and a many times larger output file
|
| 214 |
+
# should explicitly switch the debug mode on
|
| 215 |
+
# otherwise we let ld strip the output file
|
| 216 |
+
# (On my machine: 10KiB < stripped_file < ??100KiB
|
| 217 |
+
# unstripped_file = stripped_file + XXX KiB
|
| 218 |
+
# ( XXX=254 for a typical python extension))
|
| 219 |
+
if not debug:
|
| 220 |
+
extra_preargs.append("-s")
|
| 221 |
+
|
| 222 |
+
UnixCCompiler.link(
|
| 223 |
+
self,
|
| 224 |
+
target_desc,
|
| 225 |
+
objects,
|
| 226 |
+
output_filename,
|
| 227 |
+
output_dir,
|
| 228 |
+
libraries,
|
| 229 |
+
library_dirs,
|
| 230 |
+
runtime_library_dirs,
|
| 231 |
+
None, # export_symbols, we do this in our def-file
|
| 232 |
+
debug,
|
| 233 |
+
extra_preargs,
|
| 234 |
+
extra_postargs,
|
| 235 |
+
build_temp,
|
| 236 |
+
target_lang,
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
def runtime_library_dir_option(self, dir):
|
| 240 |
+
# cygwin doesn't support rpath. While in theory we could error
|
| 241 |
+
# out like MSVC does, code might expect it to work like on Unix, so
|
| 242 |
+
# just warn and hope for the best.
|
| 243 |
+
self.warn(_runtime_library_dirs_msg)
|
| 244 |
+
return []
|
| 245 |
+
|
| 246 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 247 |
+
|
| 248 |
+
def _make_out_path(self, output_dir, strip_dir, src_name):
|
| 249 |
+
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
| 250 |
+
norm_src_name = os.path.normcase(src_name)
|
| 251 |
+
return super()._make_out_path(output_dir, strip_dir, norm_src_name)
|
| 252 |
+
|
| 253 |
+
@property
|
| 254 |
+
def out_extensions(self):
|
| 255 |
+
"""
|
| 256 |
+
Add support for rc and res files.
|
| 257 |
+
"""
|
| 258 |
+
return {
|
| 259 |
+
**super().out_extensions,
|
| 260 |
+
**{ext: ext + self.obj_extension for ext in ('.res', '.rc')},
|
| 261 |
+
}
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
# the same as cygwin plus some additional parameters
|
| 265 |
+
class Mingw32CCompiler(CygwinCCompiler):
|
| 266 |
+
"""Handles the Mingw32 port of the GNU C compiler to Windows."""
|
| 267 |
+
|
| 268 |
+
compiler_type = 'mingw32'
|
| 269 |
+
|
| 270 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 271 |
+
super().__init__(verbose, dry_run, force)
|
| 272 |
+
|
| 273 |
+
shared_option = "-shared"
|
| 274 |
+
|
| 275 |
+
if is_cygwincc(self.cc):
|
| 276 |
+
raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
|
| 277 |
+
|
| 278 |
+
self.set_executables(
|
| 279 |
+
compiler='%s -O -Wall' % self.cc,
|
| 280 |
+
compiler_so='%s -mdll -O -Wall' % self.cc,
|
| 281 |
+
compiler_cxx='%s -O -Wall' % self.cxx,
|
| 282 |
+
linker_exe='%s' % self.cc,
|
| 283 |
+
linker_so='{} {}'.format(self.linker_dll, shared_option),
|
| 284 |
+
)
|
| 285 |
+
|
| 286 |
+
def runtime_library_dir_option(self, dir):
|
| 287 |
+
raise DistutilsPlatformError(_runtime_library_dirs_msg)
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
# Because these compilers aren't configured in Python's pyconfig.h file by
|
| 291 |
+
# default, we should at least warn the user if he is using an unmodified
|
| 292 |
+
# version.
|
| 293 |
+
|
| 294 |
+
CONFIG_H_OK = "ok"
|
| 295 |
+
CONFIG_H_NOTOK = "not ok"
|
| 296 |
+
CONFIG_H_UNCERTAIN = "uncertain"
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def check_config_h():
|
| 300 |
+
"""Check if the current Python installation appears amenable to building
|
| 301 |
+
extensions with GCC.
|
| 302 |
+
|
| 303 |
+
Returns a tuple (status, details), where 'status' is one of the following
|
| 304 |
+
constants:
|
| 305 |
+
|
| 306 |
+
- CONFIG_H_OK: all is well, go ahead and compile
|
| 307 |
+
- CONFIG_H_NOTOK: doesn't look good
|
| 308 |
+
- CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
|
| 309 |
+
|
| 310 |
+
'details' is a human-readable string explaining the situation.
|
| 311 |
+
|
| 312 |
+
Note there are two ways to conclude "OK": either 'sys.version' contains
|
| 313 |
+
the string "GCC" (implying that this Python was built with GCC), or the
|
| 314 |
+
installed "pyconfig.h" contains the string "__GNUC__".
|
| 315 |
+
"""
|
| 316 |
+
|
| 317 |
+
# XXX since this function also checks sys.version, it's not strictly a
|
| 318 |
+
# "pyconfig.h" check -- should probably be renamed...
|
| 319 |
+
|
| 320 |
+
from distutils import sysconfig
|
| 321 |
+
|
| 322 |
+
# if sys.version contains GCC then python was compiled with GCC, and the
|
| 323 |
+
# pyconfig.h file should be OK
|
| 324 |
+
if "GCC" in sys.version:
|
| 325 |
+
return CONFIG_H_OK, "sys.version mentions 'GCC'"
|
| 326 |
+
|
| 327 |
+
# Clang would also work
|
| 328 |
+
if "Clang" in sys.version:
|
| 329 |
+
return CONFIG_H_OK, "sys.version mentions 'Clang'"
|
| 330 |
+
|
| 331 |
+
# let's see if __GNUC__ is mentioned in python.h
|
| 332 |
+
fn = sysconfig.get_config_h_filename()
|
| 333 |
+
try:
|
| 334 |
+
config_h = open(fn)
|
| 335 |
+
try:
|
| 336 |
+
if "__GNUC__" in config_h.read():
|
| 337 |
+
return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
|
| 338 |
+
else:
|
| 339 |
+
return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
|
| 340 |
+
finally:
|
| 341 |
+
config_h.close()
|
| 342 |
+
except OSError as exc:
|
| 343 |
+
return (CONFIG_H_UNCERTAIN, "couldn't read '{}': {}".format(fn, exc.strerror))
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
def is_cygwincc(cc):
|
| 347 |
+
'''Try to determine if the compiler that would be used is from cygwin.'''
|
| 348 |
+
out_string = check_output(shlex.split(cc) + ['-dumpmachine'])
|
| 349 |
+
return out_string.strip().endswith(b'cygwin')
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
get_versions = None
|
| 353 |
+
"""
|
| 354 |
+
A stand-in for the previous get_versions() function to prevent failures
|
| 355 |
+
when monkeypatched. See pypa/setuptools#2969.
|
| 356 |
+
"""
|
.venv/Lib/site-packages/setuptools/_distutils/debug.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
# If DISTUTILS_DEBUG is anything other than the empty string, we run in
|
| 4 |
+
# debug mode.
|
| 5 |
+
DEBUG = os.environ.get('DISTUTILS_DEBUG')
|
.venv/Lib/site-packages/setuptools/_distutils/dep_util.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.dep_util
|
| 2 |
+
|
| 3 |
+
Utility functions for simple, timestamp-based dependency of files
|
| 4 |
+
and groups of files; also, function based entirely on such
|
| 5 |
+
timestamp dependency analysis."""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from .errors import DistutilsFileError
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def newer(source, target):
|
| 12 |
+
"""Return true if 'source' exists and is more recently modified than
|
| 13 |
+
'target', or if 'source' exists and 'target' doesn't. Return false if
|
| 14 |
+
both exist and 'target' is the same age or younger than 'source'.
|
| 15 |
+
Raise DistutilsFileError if 'source' does not exist.
|
| 16 |
+
"""
|
| 17 |
+
if not os.path.exists(source):
|
| 18 |
+
raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source))
|
| 19 |
+
if not os.path.exists(target):
|
| 20 |
+
return 1
|
| 21 |
+
|
| 22 |
+
from stat import ST_MTIME
|
| 23 |
+
|
| 24 |
+
mtime1 = os.stat(source)[ST_MTIME]
|
| 25 |
+
mtime2 = os.stat(target)[ST_MTIME]
|
| 26 |
+
|
| 27 |
+
return mtime1 > mtime2
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
# newer ()
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def newer_pairwise(sources, targets):
|
| 34 |
+
"""Walk two filename lists in parallel, testing if each source is newer
|
| 35 |
+
than its corresponding target. Return a pair of lists (sources,
|
| 36 |
+
targets) where source is newer than target, according to the semantics
|
| 37 |
+
of 'newer()'.
|
| 38 |
+
"""
|
| 39 |
+
if len(sources) != len(targets):
|
| 40 |
+
raise ValueError("'sources' and 'targets' must be same length")
|
| 41 |
+
|
| 42 |
+
# build a pair of lists (sources, targets) where source is newer
|
| 43 |
+
n_sources = []
|
| 44 |
+
n_targets = []
|
| 45 |
+
for i in range(len(sources)):
|
| 46 |
+
if newer(sources[i], targets[i]):
|
| 47 |
+
n_sources.append(sources[i])
|
| 48 |
+
n_targets.append(targets[i])
|
| 49 |
+
|
| 50 |
+
return (n_sources, n_targets)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
# newer_pairwise ()
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def newer_group(sources, target, missing='error'):
|
| 57 |
+
"""Return true if 'target' is out-of-date with respect to any file
|
| 58 |
+
listed in 'sources'. In other words, if 'target' exists and is newer
|
| 59 |
+
than every file in 'sources', return false; otherwise return true.
|
| 60 |
+
'missing' controls what we do when a source file is missing; the
|
| 61 |
+
default ("error") is to blow up with an OSError from inside 'stat()';
|
| 62 |
+
if it is "ignore", we silently drop any missing source files; if it is
|
| 63 |
+
"newer", any missing source files make us assume that 'target' is
|
| 64 |
+
out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
|
| 65 |
+
carry out commands that wouldn't work because inputs are missing, but
|
| 66 |
+
that doesn't matter because you're not actually going to run the
|
| 67 |
+
commands).
|
| 68 |
+
"""
|
| 69 |
+
# If the target doesn't even exist, then it's definitely out-of-date.
|
| 70 |
+
if not os.path.exists(target):
|
| 71 |
+
return 1
|
| 72 |
+
|
| 73 |
+
# Otherwise we have to find out the hard way: if *any* source file
|
| 74 |
+
# is more recent than 'target', then 'target' is out-of-date and
|
| 75 |
+
# we can immediately return true. If we fall through to the end
|
| 76 |
+
# of the loop, then 'target' is up-to-date and we return false.
|
| 77 |
+
from stat import ST_MTIME
|
| 78 |
+
|
| 79 |
+
target_mtime = os.stat(target)[ST_MTIME]
|
| 80 |
+
for source in sources:
|
| 81 |
+
if not os.path.exists(source):
|
| 82 |
+
if missing == 'error': # blow up when we stat() the file
|
| 83 |
+
pass
|
| 84 |
+
elif missing == 'ignore': # missing source dropped from
|
| 85 |
+
continue # target's dependency list
|
| 86 |
+
elif missing == 'newer': # missing source means target is
|
| 87 |
+
return 1 # out-of-date
|
| 88 |
+
|
| 89 |
+
source_mtime = os.stat(source)[ST_MTIME]
|
| 90 |
+
if source_mtime > target_mtime:
|
| 91 |
+
return 1
|
| 92 |
+
else:
|
| 93 |
+
return 0
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# newer_group ()
|
.venv/Lib/site-packages/setuptools/_distutils/dir_util.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.dir_util
|
| 2 |
+
|
| 3 |
+
Utility functions for manipulating directories and directory trees."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import errno
|
| 7 |
+
from .errors import DistutilsInternalError, DistutilsFileError
|
| 8 |
+
from ._log import log
|
| 9 |
+
|
| 10 |
+
# cache for by mkpath() -- in addition to cheapening redundant calls,
|
| 11 |
+
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
|
| 12 |
+
_path_created = {}
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
|
| 16 |
+
"""Create a directory and any missing ancestor directories.
|
| 17 |
+
|
| 18 |
+
If the directory already exists (or if 'name' is the empty string, which
|
| 19 |
+
means the current directory, which of course exists), then do nothing.
|
| 20 |
+
Raise DistutilsFileError if unable to create some directory along the way
|
| 21 |
+
(eg. some sub-path exists, but is a file rather than a directory).
|
| 22 |
+
If 'verbose' is true, print a one-line summary of each mkdir to stdout.
|
| 23 |
+
Return the list of directories actually created.
|
| 24 |
+
|
| 25 |
+
os.makedirs is not used because:
|
| 26 |
+
|
| 27 |
+
a) It's new to Python 1.5.2, and
|
| 28 |
+
b) it blows up if the directory already exists (in which case it should
|
| 29 |
+
silently succeed).
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
global _path_created
|
| 33 |
+
|
| 34 |
+
# Detect a common bug -- name is None
|
| 35 |
+
if not isinstance(name, str):
|
| 36 |
+
raise DistutilsInternalError(
|
| 37 |
+
"mkpath: 'name' must be a string (got {!r})".format(name)
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
# XXX what's the better way to handle verbosity? print as we create
|
| 41 |
+
# each directory in the path (the current behaviour), or only announce
|
| 42 |
+
# the creation of the whole path? (quite easy to do the latter since
|
| 43 |
+
# we're not using a recursive algorithm)
|
| 44 |
+
|
| 45 |
+
name = os.path.normpath(name)
|
| 46 |
+
created_dirs = []
|
| 47 |
+
if os.path.isdir(name) or name == '':
|
| 48 |
+
return created_dirs
|
| 49 |
+
if _path_created.get(os.path.abspath(name)):
|
| 50 |
+
return created_dirs
|
| 51 |
+
|
| 52 |
+
(head, tail) = os.path.split(name)
|
| 53 |
+
tails = [tail] # stack of lone dirs to create
|
| 54 |
+
|
| 55 |
+
while head and tail and not os.path.isdir(head):
|
| 56 |
+
(head, tail) = os.path.split(head)
|
| 57 |
+
tails.insert(0, tail) # push next higher dir onto stack
|
| 58 |
+
|
| 59 |
+
# now 'head' contains the deepest directory that already exists
|
| 60 |
+
# (that is, the child of 'head' in 'name' is the highest directory
|
| 61 |
+
# that does *not* exist)
|
| 62 |
+
for d in tails:
|
| 63 |
+
# print "head = %s, d = %s: " % (head, d),
|
| 64 |
+
head = os.path.join(head, d)
|
| 65 |
+
abs_head = os.path.abspath(head)
|
| 66 |
+
|
| 67 |
+
if _path_created.get(abs_head):
|
| 68 |
+
continue
|
| 69 |
+
|
| 70 |
+
if verbose >= 1:
|
| 71 |
+
log.info("creating %s", head)
|
| 72 |
+
|
| 73 |
+
if not dry_run:
|
| 74 |
+
try:
|
| 75 |
+
os.mkdir(head, mode)
|
| 76 |
+
except OSError as exc:
|
| 77 |
+
if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
|
| 78 |
+
raise DistutilsFileError(
|
| 79 |
+
"could not create '{}': {}".format(head, exc.args[-1])
|
| 80 |
+
)
|
| 81 |
+
created_dirs.append(head)
|
| 82 |
+
|
| 83 |
+
_path_created[abs_head] = 1
|
| 84 |
+
return created_dirs
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
|
| 88 |
+
"""Create all the empty directories under 'base_dir' needed to put 'files'
|
| 89 |
+
there.
|
| 90 |
+
|
| 91 |
+
'base_dir' is just the name of a directory which doesn't necessarily
|
| 92 |
+
exist yet; 'files' is a list of filenames to be interpreted relative to
|
| 93 |
+
'base_dir'. 'base_dir' + the directory portion of every file in 'files'
|
| 94 |
+
will be created if it doesn't already exist. 'mode', 'verbose' and
|
| 95 |
+
'dry_run' flags are as for 'mkpath()'.
|
| 96 |
+
"""
|
| 97 |
+
# First get the list of directories to create
|
| 98 |
+
need_dir = set()
|
| 99 |
+
for file in files:
|
| 100 |
+
need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
|
| 101 |
+
|
| 102 |
+
# Now create them
|
| 103 |
+
for dir in sorted(need_dir):
|
| 104 |
+
mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def copy_tree( # noqa: C901
|
| 108 |
+
src,
|
| 109 |
+
dst,
|
| 110 |
+
preserve_mode=1,
|
| 111 |
+
preserve_times=1,
|
| 112 |
+
preserve_symlinks=0,
|
| 113 |
+
update=0,
|
| 114 |
+
verbose=1,
|
| 115 |
+
dry_run=0,
|
| 116 |
+
):
|
| 117 |
+
"""Copy an entire directory tree 'src' to a new location 'dst'.
|
| 118 |
+
|
| 119 |
+
Both 'src' and 'dst' must be directory names. If 'src' is not a
|
| 120 |
+
directory, raise DistutilsFileError. If 'dst' does not exist, it is
|
| 121 |
+
created with 'mkpath()'. The end result of the copy is that every
|
| 122 |
+
file in 'src' is copied to 'dst', and directories under 'src' are
|
| 123 |
+
recursively copied to 'dst'. Return the list of files that were
|
| 124 |
+
copied or might have been copied, using their output name. The
|
| 125 |
+
return value is unaffected by 'update' or 'dry_run': it is simply
|
| 126 |
+
the list of all files under 'src', with the names changed to be
|
| 127 |
+
under 'dst'.
|
| 128 |
+
|
| 129 |
+
'preserve_mode' and 'preserve_times' are the same as for
|
| 130 |
+
'copy_file'; note that they only apply to regular files, not to
|
| 131 |
+
directories. If 'preserve_symlinks' is true, symlinks will be
|
| 132 |
+
copied as symlinks (on platforms that support them!); otherwise
|
| 133 |
+
(the default), the destination of the symlink will be copied.
|
| 134 |
+
'update' and 'verbose' are the same as for 'copy_file'.
|
| 135 |
+
"""
|
| 136 |
+
from distutils.file_util import copy_file
|
| 137 |
+
|
| 138 |
+
if not dry_run and not os.path.isdir(src):
|
| 139 |
+
raise DistutilsFileError("cannot copy tree '%s': not a directory" % src)
|
| 140 |
+
try:
|
| 141 |
+
names = os.listdir(src)
|
| 142 |
+
except OSError as e:
|
| 143 |
+
if dry_run:
|
| 144 |
+
names = []
|
| 145 |
+
else:
|
| 146 |
+
raise DistutilsFileError(
|
| 147 |
+
"error listing files in '{}': {}".format(src, e.strerror)
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
if not dry_run:
|
| 151 |
+
mkpath(dst, verbose=verbose)
|
| 152 |
+
|
| 153 |
+
outputs = []
|
| 154 |
+
|
| 155 |
+
for n in names:
|
| 156 |
+
src_name = os.path.join(src, n)
|
| 157 |
+
dst_name = os.path.join(dst, n)
|
| 158 |
+
|
| 159 |
+
if n.startswith('.nfs'):
|
| 160 |
+
# skip NFS rename files
|
| 161 |
+
continue
|
| 162 |
+
|
| 163 |
+
if preserve_symlinks and os.path.islink(src_name):
|
| 164 |
+
link_dest = os.readlink(src_name)
|
| 165 |
+
if verbose >= 1:
|
| 166 |
+
log.info("linking %s -> %s", dst_name, link_dest)
|
| 167 |
+
if not dry_run:
|
| 168 |
+
os.symlink(link_dest, dst_name)
|
| 169 |
+
outputs.append(dst_name)
|
| 170 |
+
|
| 171 |
+
elif os.path.isdir(src_name):
|
| 172 |
+
outputs.extend(
|
| 173 |
+
copy_tree(
|
| 174 |
+
src_name,
|
| 175 |
+
dst_name,
|
| 176 |
+
preserve_mode,
|
| 177 |
+
preserve_times,
|
| 178 |
+
preserve_symlinks,
|
| 179 |
+
update,
|
| 180 |
+
verbose=verbose,
|
| 181 |
+
dry_run=dry_run,
|
| 182 |
+
)
|
| 183 |
+
)
|
| 184 |
+
else:
|
| 185 |
+
copy_file(
|
| 186 |
+
src_name,
|
| 187 |
+
dst_name,
|
| 188 |
+
preserve_mode,
|
| 189 |
+
preserve_times,
|
| 190 |
+
update,
|
| 191 |
+
verbose=verbose,
|
| 192 |
+
dry_run=dry_run,
|
| 193 |
+
)
|
| 194 |
+
outputs.append(dst_name)
|
| 195 |
+
|
| 196 |
+
return outputs
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
def _build_cmdtuple(path, cmdtuples):
|
| 200 |
+
"""Helper for remove_tree()."""
|
| 201 |
+
for f in os.listdir(path):
|
| 202 |
+
real_f = os.path.join(path, f)
|
| 203 |
+
if os.path.isdir(real_f) and not os.path.islink(real_f):
|
| 204 |
+
_build_cmdtuple(real_f, cmdtuples)
|
| 205 |
+
else:
|
| 206 |
+
cmdtuples.append((os.remove, real_f))
|
| 207 |
+
cmdtuples.append((os.rmdir, path))
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def remove_tree(directory, verbose=1, dry_run=0):
|
| 211 |
+
"""Recursively remove an entire directory tree.
|
| 212 |
+
|
| 213 |
+
Any errors are ignored (apart from being reported to stdout if 'verbose'
|
| 214 |
+
is true).
|
| 215 |
+
"""
|
| 216 |
+
global _path_created
|
| 217 |
+
|
| 218 |
+
if verbose >= 1:
|
| 219 |
+
log.info("removing '%s' (and everything under it)", directory)
|
| 220 |
+
if dry_run:
|
| 221 |
+
return
|
| 222 |
+
cmdtuples = []
|
| 223 |
+
_build_cmdtuple(directory, cmdtuples)
|
| 224 |
+
for cmd in cmdtuples:
|
| 225 |
+
try:
|
| 226 |
+
cmd[0](cmd[1])
|
| 227 |
+
# remove dir from cache if it's already there
|
| 228 |
+
abspath = os.path.abspath(cmd[1])
|
| 229 |
+
if abspath in _path_created:
|
| 230 |
+
_path_created.pop(abspath)
|
| 231 |
+
except OSError as exc:
|
| 232 |
+
log.warning("error removing %s: %s", directory, exc)
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def ensure_relative(path):
|
| 236 |
+
"""Take the full path 'path', and make it a relative path.
|
| 237 |
+
|
| 238 |
+
This is useful to make 'path' the second argument to os.path.join().
|
| 239 |
+
"""
|
| 240 |
+
drive, path = os.path.splitdrive(path)
|
| 241 |
+
if path[0:1] == os.sep:
|
| 242 |
+
path = drive + path[1:]
|
| 243 |
+
return path
|
.venv/Lib/site-packages/setuptools/_distutils/dist.py
ADDED
|
@@ -0,0 +1,1287 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.dist
|
| 2 |
+
|
| 3 |
+
Provides the Distribution class, which represents the module distribution
|
| 4 |
+
being built/installed/distributed.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import pathlib
|
| 11 |
+
import contextlib
|
| 12 |
+
import logging
|
| 13 |
+
from email import message_from_file
|
| 14 |
+
|
| 15 |
+
try:
|
| 16 |
+
import warnings
|
| 17 |
+
except ImportError:
|
| 18 |
+
warnings = None
|
| 19 |
+
|
| 20 |
+
from .errors import (
|
| 21 |
+
DistutilsOptionError,
|
| 22 |
+
DistutilsModuleError,
|
| 23 |
+
DistutilsArgError,
|
| 24 |
+
DistutilsClassError,
|
| 25 |
+
)
|
| 26 |
+
from .fancy_getopt import FancyGetopt, translate_longopt
|
| 27 |
+
from .util import check_environ, strtobool, rfc822_escape
|
| 28 |
+
from ._log import log
|
| 29 |
+
from .debug import DEBUG
|
| 30 |
+
|
| 31 |
+
# Regex to define acceptable Distutils command names. This is not *quite*
|
| 32 |
+
# the same as a Python NAME -- I don't allow leading underscores. The fact
|
| 33 |
+
# that they're very similar is no coincidence; the default naming scheme is
|
| 34 |
+
# to look for a Python module named after the command.
|
| 35 |
+
command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def _ensure_list(value, fieldname):
|
| 39 |
+
if isinstance(value, str):
|
| 40 |
+
# a string containing comma separated values is okay. It will
|
| 41 |
+
# be converted to a list by Distribution.finalize_options().
|
| 42 |
+
pass
|
| 43 |
+
elif not isinstance(value, list):
|
| 44 |
+
# passing a tuple or an iterator perhaps, warn and convert
|
| 45 |
+
typename = type(value).__name__
|
| 46 |
+
msg = "Warning: '{fieldname}' should be a list, got type '{typename}'"
|
| 47 |
+
msg = msg.format(**locals())
|
| 48 |
+
log.warning(msg)
|
| 49 |
+
value = list(value)
|
| 50 |
+
return value
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class Distribution:
|
| 54 |
+
"""The core of the Distutils. Most of the work hiding behind 'setup'
|
| 55 |
+
is really done within a Distribution instance, which farms the work out
|
| 56 |
+
to the Distutils commands specified on the command line.
|
| 57 |
+
|
| 58 |
+
Setup scripts will almost never instantiate Distribution directly,
|
| 59 |
+
unless the 'setup()' function is totally inadequate to their needs.
|
| 60 |
+
However, it is conceivable that a setup script might wish to subclass
|
| 61 |
+
Distribution for some specialized purpose, and then pass the subclass
|
| 62 |
+
to 'setup()' as the 'distclass' keyword argument. If so, it is
|
| 63 |
+
necessary to respect the expectations that 'setup' has of Distribution.
|
| 64 |
+
See the code for 'setup()', in core.py, for details.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
# 'global_options' describes the command-line options that may be
|
| 68 |
+
# supplied to the setup script prior to any actual commands.
|
| 69 |
+
# Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
|
| 70 |
+
# these global options. This list should be kept to a bare minimum,
|
| 71 |
+
# since every global option is also valid as a command option -- and we
|
| 72 |
+
# don't want to pollute the commands with too many options that they
|
| 73 |
+
# have minimal control over.
|
| 74 |
+
# The fourth entry for verbose means that it can be repeated.
|
| 75 |
+
global_options = [
|
| 76 |
+
('verbose', 'v', "run verbosely (default)", 1),
|
| 77 |
+
('quiet', 'q', "run quietly (turns verbosity off)"),
|
| 78 |
+
('dry-run', 'n', "don't actually do anything"),
|
| 79 |
+
('help', 'h', "show detailed help message"),
|
| 80 |
+
('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
|
| 81 |
+
]
|
| 82 |
+
|
| 83 |
+
# 'common_usage' is a short (2-3 line) string describing the common
|
| 84 |
+
# usage of the setup script.
|
| 85 |
+
common_usage = """\
|
| 86 |
+
Common commands: (see '--help-commands' for more)
|
| 87 |
+
|
| 88 |
+
setup.py build will build the package underneath 'build/'
|
| 89 |
+
setup.py install will install the package
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
# options that are not propagated to the commands
|
| 93 |
+
display_options = [
|
| 94 |
+
('help-commands', None, "list all available commands"),
|
| 95 |
+
('name', None, "print package name"),
|
| 96 |
+
('version', 'V', "print package version"),
|
| 97 |
+
('fullname', None, "print <package name>-<version>"),
|
| 98 |
+
('author', None, "print the author's name"),
|
| 99 |
+
('author-email', None, "print the author's email address"),
|
| 100 |
+
('maintainer', None, "print the maintainer's name"),
|
| 101 |
+
('maintainer-email', None, "print the maintainer's email address"),
|
| 102 |
+
('contact', None, "print the maintainer's name if known, else the author's"),
|
| 103 |
+
(
|
| 104 |
+
'contact-email',
|
| 105 |
+
None,
|
| 106 |
+
"print the maintainer's email address if known, else the author's",
|
| 107 |
+
),
|
| 108 |
+
('url', None, "print the URL for this package"),
|
| 109 |
+
('license', None, "print the license of the package"),
|
| 110 |
+
('licence', None, "alias for --license"),
|
| 111 |
+
('description', None, "print the package description"),
|
| 112 |
+
('long-description', None, "print the long package description"),
|
| 113 |
+
('platforms', None, "print the list of platforms"),
|
| 114 |
+
('classifiers', None, "print the list of classifiers"),
|
| 115 |
+
('keywords', None, "print the list of keywords"),
|
| 116 |
+
('provides', None, "print the list of packages/modules provided"),
|
| 117 |
+
('requires', None, "print the list of packages/modules required"),
|
| 118 |
+
('obsoletes', None, "print the list of packages/modules made obsolete"),
|
| 119 |
+
]
|
| 120 |
+
display_option_names = [translate_longopt(x[0]) for x in display_options]
|
| 121 |
+
|
| 122 |
+
# negative options are options that exclude other options
|
| 123 |
+
negative_opt = {'quiet': 'verbose'}
|
| 124 |
+
|
| 125 |
+
# -- Creation/initialization methods -------------------------------
|
| 126 |
+
|
| 127 |
+
def __init__(self, attrs=None): # noqa: C901
|
| 128 |
+
"""Construct a new Distribution instance: initialize all the
|
| 129 |
+
attributes of a Distribution, and then use 'attrs' (a dictionary
|
| 130 |
+
mapping attribute names to values) to assign some of those
|
| 131 |
+
attributes their "real" values. (Any attributes not mentioned in
|
| 132 |
+
'attrs' will be assigned to some null value: 0, None, an empty list
|
| 133 |
+
or dictionary, etc.) Most importantly, initialize the
|
| 134 |
+
'command_obj' attribute to the empty dictionary; this will be
|
| 135 |
+
filled in with real command objects by 'parse_command_line()'.
|
| 136 |
+
"""
|
| 137 |
+
|
| 138 |
+
# Default values for our command-line options
|
| 139 |
+
self.verbose = 1
|
| 140 |
+
self.dry_run = 0
|
| 141 |
+
self.help = 0
|
| 142 |
+
for attr in self.display_option_names:
|
| 143 |
+
setattr(self, attr, 0)
|
| 144 |
+
|
| 145 |
+
# Store the distribution meta-data (name, version, author, and so
|
| 146 |
+
# forth) in a separate object -- we're getting to have enough
|
| 147 |
+
# information here (and enough command-line options) that it's
|
| 148 |
+
# worth it. Also delegate 'get_XXX()' methods to the 'metadata'
|
| 149 |
+
# object in a sneaky and underhanded (but efficient!) way.
|
| 150 |
+
self.metadata = DistributionMetadata()
|
| 151 |
+
for basename in self.metadata._METHOD_BASENAMES:
|
| 152 |
+
method_name = "get_" + basename
|
| 153 |
+
setattr(self, method_name, getattr(self.metadata, method_name))
|
| 154 |
+
|
| 155 |
+
# 'cmdclass' maps command names to class objects, so we
|
| 156 |
+
# can 1) quickly figure out which class to instantiate when
|
| 157 |
+
# we need to create a new command object, and 2) have a way
|
| 158 |
+
# for the setup script to override command classes
|
| 159 |
+
self.cmdclass = {}
|
| 160 |
+
|
| 161 |
+
# 'command_packages' is a list of packages in which commands
|
| 162 |
+
# are searched for. The factory for command 'foo' is expected
|
| 163 |
+
# to be named 'foo' in the module 'foo' in one of the packages
|
| 164 |
+
# named here. This list is searched from the left; an error
|
| 165 |
+
# is raised if no named package provides the command being
|
| 166 |
+
# searched for. (Always access using get_command_packages().)
|
| 167 |
+
self.command_packages = None
|
| 168 |
+
|
| 169 |
+
# 'script_name' and 'script_args' are usually set to sys.argv[0]
|
| 170 |
+
# and sys.argv[1:], but they can be overridden when the caller is
|
| 171 |
+
# not necessarily a setup script run from the command-line.
|
| 172 |
+
self.script_name = None
|
| 173 |
+
self.script_args = None
|
| 174 |
+
|
| 175 |
+
# 'command_options' is where we store command options between
|
| 176 |
+
# parsing them (from config files, the command-line, etc.) and when
|
| 177 |
+
# they are actually needed -- ie. when the command in question is
|
| 178 |
+
# instantiated. It is a dictionary of dictionaries of 2-tuples:
|
| 179 |
+
# command_options = { command_name : { option : (source, value) } }
|
| 180 |
+
self.command_options = {}
|
| 181 |
+
|
| 182 |
+
# 'dist_files' is the list of (command, pyversion, file) that
|
| 183 |
+
# have been created by any dist commands run so far. This is
|
| 184 |
+
# filled regardless of whether the run is dry or not. pyversion
|
| 185 |
+
# gives sysconfig.get_python_version() if the dist file is
|
| 186 |
+
# specific to a Python version, 'any' if it is good for all
|
| 187 |
+
# Python versions on the target platform, and '' for a source
|
| 188 |
+
# file. pyversion should not be used to specify minimum or
|
| 189 |
+
# maximum required Python versions; use the metainfo for that
|
| 190 |
+
# instead.
|
| 191 |
+
self.dist_files = []
|
| 192 |
+
|
| 193 |
+
# These options are really the business of various commands, rather
|
| 194 |
+
# than of the Distribution itself. We provide aliases for them in
|
| 195 |
+
# Distribution as a convenience to the developer.
|
| 196 |
+
self.packages = None
|
| 197 |
+
self.package_data = {}
|
| 198 |
+
self.package_dir = None
|
| 199 |
+
self.py_modules = None
|
| 200 |
+
self.libraries = None
|
| 201 |
+
self.headers = None
|
| 202 |
+
self.ext_modules = None
|
| 203 |
+
self.ext_package = None
|
| 204 |
+
self.include_dirs = None
|
| 205 |
+
self.extra_path = None
|
| 206 |
+
self.scripts = None
|
| 207 |
+
self.data_files = None
|
| 208 |
+
self.password = ''
|
| 209 |
+
|
| 210 |
+
# And now initialize bookkeeping stuff that can't be supplied by
|
| 211 |
+
# the caller at all. 'command_obj' maps command names to
|
| 212 |
+
# Command instances -- that's how we enforce that every command
|
| 213 |
+
# class is a singleton.
|
| 214 |
+
self.command_obj = {}
|
| 215 |
+
|
| 216 |
+
# 'have_run' maps command names to boolean values; it keeps track
|
| 217 |
+
# of whether we have actually run a particular command, to make it
|
| 218 |
+
# cheap to "run" a command whenever we think we might need to -- if
|
| 219 |
+
# it's already been done, no need for expensive filesystem
|
| 220 |
+
# operations, we just check the 'have_run' dictionary and carry on.
|
| 221 |
+
# It's only safe to query 'have_run' for a command class that has
|
| 222 |
+
# been instantiated -- a false value will be inserted when the
|
| 223 |
+
# command object is created, and replaced with a true value when
|
| 224 |
+
# the command is successfully run. Thus it's probably best to use
|
| 225 |
+
# '.get()' rather than a straight lookup.
|
| 226 |
+
self.have_run = {}
|
| 227 |
+
|
| 228 |
+
# Now we'll use the attrs dictionary (ultimately, keyword args from
|
| 229 |
+
# the setup script) to possibly override any or all of these
|
| 230 |
+
# distribution options.
|
| 231 |
+
|
| 232 |
+
if attrs:
|
| 233 |
+
# Pull out the set of command options and work on them
|
| 234 |
+
# specifically. Note that this order guarantees that aliased
|
| 235 |
+
# command options will override any supplied redundantly
|
| 236 |
+
# through the general options dictionary.
|
| 237 |
+
options = attrs.get('options')
|
| 238 |
+
if options is not None:
|
| 239 |
+
del attrs['options']
|
| 240 |
+
for command, cmd_options in options.items():
|
| 241 |
+
opt_dict = self.get_option_dict(command)
|
| 242 |
+
for opt, val in cmd_options.items():
|
| 243 |
+
opt_dict[opt] = ("setup script", val)
|
| 244 |
+
|
| 245 |
+
if 'licence' in attrs:
|
| 246 |
+
attrs['license'] = attrs['licence']
|
| 247 |
+
del attrs['licence']
|
| 248 |
+
msg = "'licence' distribution option is deprecated; use 'license'"
|
| 249 |
+
if warnings is not None:
|
| 250 |
+
warnings.warn(msg)
|
| 251 |
+
else:
|
| 252 |
+
sys.stderr.write(msg + "\n")
|
| 253 |
+
|
| 254 |
+
# Now work on the rest of the attributes. Any attribute that's
|
| 255 |
+
# not already defined is invalid!
|
| 256 |
+
for key, val in attrs.items():
|
| 257 |
+
if hasattr(self.metadata, "set_" + key):
|
| 258 |
+
getattr(self.metadata, "set_" + key)(val)
|
| 259 |
+
elif hasattr(self.metadata, key):
|
| 260 |
+
setattr(self.metadata, key, val)
|
| 261 |
+
elif hasattr(self, key):
|
| 262 |
+
setattr(self, key, val)
|
| 263 |
+
else:
|
| 264 |
+
msg = "Unknown distribution option: %s" % repr(key)
|
| 265 |
+
warnings.warn(msg)
|
| 266 |
+
|
| 267 |
+
# no-user-cfg is handled before other command line args
|
| 268 |
+
# because other args override the config files, and this
|
| 269 |
+
# one is needed before we can load the config files.
|
| 270 |
+
# If attrs['script_args'] wasn't passed, assume false.
|
| 271 |
+
#
|
| 272 |
+
# This also make sure we just look at the global options
|
| 273 |
+
self.want_user_cfg = True
|
| 274 |
+
|
| 275 |
+
if self.script_args is not None:
|
| 276 |
+
for arg in self.script_args:
|
| 277 |
+
if not arg.startswith('-'):
|
| 278 |
+
break
|
| 279 |
+
if arg == '--no-user-cfg':
|
| 280 |
+
self.want_user_cfg = False
|
| 281 |
+
break
|
| 282 |
+
|
| 283 |
+
self.finalize_options()
|
| 284 |
+
|
| 285 |
+
def get_option_dict(self, command):
|
| 286 |
+
"""Get the option dictionary for a given command. If that
|
| 287 |
+
command's option dictionary hasn't been created yet, then create it
|
| 288 |
+
and return the new dictionary; otherwise, return the existing
|
| 289 |
+
option dictionary.
|
| 290 |
+
"""
|
| 291 |
+
dict = self.command_options.get(command)
|
| 292 |
+
if dict is None:
|
| 293 |
+
dict = self.command_options[command] = {}
|
| 294 |
+
return dict
|
| 295 |
+
|
| 296 |
+
def dump_option_dicts(self, header=None, commands=None, indent=""):
|
| 297 |
+
from pprint import pformat
|
| 298 |
+
|
| 299 |
+
if commands is None: # dump all command option dicts
|
| 300 |
+
commands = sorted(self.command_options.keys())
|
| 301 |
+
|
| 302 |
+
if header is not None:
|
| 303 |
+
self.announce(indent + header)
|
| 304 |
+
indent = indent + " "
|
| 305 |
+
|
| 306 |
+
if not commands:
|
| 307 |
+
self.announce(indent + "no commands known yet")
|
| 308 |
+
return
|
| 309 |
+
|
| 310 |
+
for cmd_name in commands:
|
| 311 |
+
opt_dict = self.command_options.get(cmd_name)
|
| 312 |
+
if opt_dict is None:
|
| 313 |
+
self.announce(indent + "no option dict for '%s' command" % cmd_name)
|
| 314 |
+
else:
|
| 315 |
+
self.announce(indent + "option dict for '%s' command:" % cmd_name)
|
| 316 |
+
out = pformat(opt_dict)
|
| 317 |
+
for line in out.split('\n'):
|
| 318 |
+
self.announce(indent + " " + line)
|
| 319 |
+
|
| 320 |
+
# -- Config file finding/parsing methods ---------------------------
|
| 321 |
+
|
| 322 |
+
def find_config_files(self):
|
| 323 |
+
"""Find as many configuration files as should be processed for this
|
| 324 |
+
platform, and return a list of filenames in the order in which they
|
| 325 |
+
should be parsed. The filenames returned are guaranteed to exist
|
| 326 |
+
(modulo nasty race conditions).
|
| 327 |
+
|
| 328 |
+
There are multiple possible config files:
|
| 329 |
+
- distutils.cfg in the Distutils installation directory (i.e.
|
| 330 |
+
where the top-level Distutils __inst__.py file lives)
|
| 331 |
+
- a file in the user's home directory named .pydistutils.cfg
|
| 332 |
+
on Unix and pydistutils.cfg on Windows/Mac; may be disabled
|
| 333 |
+
with the ``--no-user-cfg`` option
|
| 334 |
+
- setup.cfg in the current directory
|
| 335 |
+
- a file named by an environment variable
|
| 336 |
+
"""
|
| 337 |
+
check_environ()
|
| 338 |
+
files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
|
| 339 |
+
|
| 340 |
+
if DEBUG:
|
| 341 |
+
self.announce("using config files: %s" % ', '.join(files))
|
| 342 |
+
|
| 343 |
+
return files
|
| 344 |
+
|
| 345 |
+
def _gen_paths(self):
|
| 346 |
+
# The system-wide Distutils config file
|
| 347 |
+
sys_dir = pathlib.Path(sys.modules['distutils'].__file__).parent
|
| 348 |
+
yield sys_dir / "distutils.cfg"
|
| 349 |
+
|
| 350 |
+
# The per-user config file
|
| 351 |
+
prefix = '.' * (os.name == 'posix')
|
| 352 |
+
filename = prefix + 'pydistutils.cfg'
|
| 353 |
+
if self.want_user_cfg:
|
| 354 |
+
yield pathlib.Path('~').expanduser() / filename
|
| 355 |
+
|
| 356 |
+
# All platforms support local setup.cfg
|
| 357 |
+
yield pathlib.Path('setup.cfg')
|
| 358 |
+
|
| 359 |
+
# Additional config indicated in the environment
|
| 360 |
+
with contextlib.suppress(TypeError):
|
| 361 |
+
yield pathlib.Path(os.getenv("DIST_EXTRA_CONFIG"))
|
| 362 |
+
|
| 363 |
+
def parse_config_files(self, filenames=None): # noqa: C901
|
| 364 |
+
from configparser import ConfigParser
|
| 365 |
+
|
| 366 |
+
# Ignore install directory options if we have a venv
|
| 367 |
+
if sys.prefix != sys.base_prefix:
|
| 368 |
+
ignore_options = [
|
| 369 |
+
'install-base',
|
| 370 |
+
'install-platbase',
|
| 371 |
+
'install-lib',
|
| 372 |
+
'install-platlib',
|
| 373 |
+
'install-purelib',
|
| 374 |
+
'install-headers',
|
| 375 |
+
'install-scripts',
|
| 376 |
+
'install-data',
|
| 377 |
+
'prefix',
|
| 378 |
+
'exec-prefix',
|
| 379 |
+
'home',
|
| 380 |
+
'user',
|
| 381 |
+
'root',
|
| 382 |
+
]
|
| 383 |
+
else:
|
| 384 |
+
ignore_options = []
|
| 385 |
+
|
| 386 |
+
ignore_options = frozenset(ignore_options)
|
| 387 |
+
|
| 388 |
+
if filenames is None:
|
| 389 |
+
filenames = self.find_config_files()
|
| 390 |
+
|
| 391 |
+
if DEBUG:
|
| 392 |
+
self.announce("Distribution.parse_config_files():")
|
| 393 |
+
|
| 394 |
+
parser = ConfigParser()
|
| 395 |
+
for filename in filenames:
|
| 396 |
+
if DEBUG:
|
| 397 |
+
self.announce(" reading %s" % filename)
|
| 398 |
+
parser.read(filename)
|
| 399 |
+
for section in parser.sections():
|
| 400 |
+
options = parser.options(section)
|
| 401 |
+
opt_dict = self.get_option_dict(section)
|
| 402 |
+
|
| 403 |
+
for opt in options:
|
| 404 |
+
if opt != '__name__' and opt not in ignore_options:
|
| 405 |
+
val = parser.get(section, opt)
|
| 406 |
+
opt = opt.replace('-', '_')
|
| 407 |
+
opt_dict[opt] = (filename, val)
|
| 408 |
+
|
| 409 |
+
# Make the ConfigParser forget everything (so we retain
|
| 410 |
+
# the original filenames that options come from)
|
| 411 |
+
parser.__init__()
|
| 412 |
+
|
| 413 |
+
# If there was a "global" section in the config file, use it
|
| 414 |
+
# to set Distribution options.
|
| 415 |
+
|
| 416 |
+
if 'global' in self.command_options:
|
| 417 |
+
for opt, (src, val) in self.command_options['global'].items():
|
| 418 |
+
alias = self.negative_opt.get(opt)
|
| 419 |
+
try:
|
| 420 |
+
if alias:
|
| 421 |
+
setattr(self, alias, not strtobool(val))
|
| 422 |
+
elif opt in ('verbose', 'dry_run'): # ugh!
|
| 423 |
+
setattr(self, opt, strtobool(val))
|
| 424 |
+
else:
|
| 425 |
+
setattr(self, opt, val)
|
| 426 |
+
except ValueError as msg:
|
| 427 |
+
raise DistutilsOptionError(msg)
|
| 428 |
+
|
| 429 |
+
# -- Command-line parsing methods ----------------------------------
|
| 430 |
+
|
| 431 |
+
def parse_command_line(self):
|
| 432 |
+
"""Parse the setup script's command line, taken from the
|
| 433 |
+
'script_args' instance attribute (which defaults to 'sys.argv[1:]'
|
| 434 |
+
-- see 'setup()' in core.py). This list is first processed for
|
| 435 |
+
"global options" -- options that set attributes of the Distribution
|
| 436 |
+
instance. Then, it is alternately scanned for Distutils commands
|
| 437 |
+
and options for that command. Each new command terminates the
|
| 438 |
+
options for the previous command. The allowed options for a
|
| 439 |
+
command are determined by the 'user_options' attribute of the
|
| 440 |
+
command class -- thus, we have to be able to load command classes
|
| 441 |
+
in order to parse the command line. Any error in that 'options'
|
| 442 |
+
attribute raises DistutilsGetoptError; any error on the
|
| 443 |
+
command-line raises DistutilsArgError. If no Distutils commands
|
| 444 |
+
were found on the command line, raises DistutilsArgError. Return
|
| 445 |
+
true if command-line was successfully parsed and we should carry
|
| 446 |
+
on with executing commands; false if no errors but we shouldn't
|
| 447 |
+
execute commands (currently, this only happens if user asks for
|
| 448 |
+
help).
|
| 449 |
+
"""
|
| 450 |
+
#
|
| 451 |
+
# We now have enough information to show the Macintosh dialog
|
| 452 |
+
# that allows the user to interactively specify the "command line".
|
| 453 |
+
#
|
| 454 |
+
toplevel_options = self._get_toplevel_options()
|
| 455 |
+
|
| 456 |
+
# We have to parse the command line a bit at a time -- global
|
| 457 |
+
# options, then the first command, then its options, and so on --
|
| 458 |
+
# because each command will be handled by a different class, and
|
| 459 |
+
# the options that are valid for a particular class aren't known
|
| 460 |
+
# until we have loaded the command class, which doesn't happen
|
| 461 |
+
# until we know what the command is.
|
| 462 |
+
|
| 463 |
+
self.commands = []
|
| 464 |
+
parser = FancyGetopt(toplevel_options + self.display_options)
|
| 465 |
+
parser.set_negative_aliases(self.negative_opt)
|
| 466 |
+
parser.set_aliases({'licence': 'license'})
|
| 467 |
+
args = parser.getopt(args=self.script_args, object=self)
|
| 468 |
+
option_order = parser.get_option_order()
|
| 469 |
+
logging.getLogger().setLevel(logging.WARN - 10 * self.verbose)
|
| 470 |
+
|
| 471 |
+
# for display options we return immediately
|
| 472 |
+
if self.handle_display_options(option_order):
|
| 473 |
+
return
|
| 474 |
+
while args:
|
| 475 |
+
args = self._parse_command_opts(parser, args)
|
| 476 |
+
if args is None: # user asked for help (and got it)
|
| 477 |
+
return
|
| 478 |
+
|
| 479 |
+
# Handle the cases of --help as a "global" option, ie.
|
| 480 |
+
# "setup.py --help" and "setup.py --help command ...". For the
|
| 481 |
+
# former, we show global options (--verbose, --dry-run, etc.)
|
| 482 |
+
# and display-only options (--name, --version, etc.); for the
|
| 483 |
+
# latter, we omit the display-only options and show help for
|
| 484 |
+
# each command listed on the command line.
|
| 485 |
+
if self.help:
|
| 486 |
+
self._show_help(
|
| 487 |
+
parser, display_options=len(self.commands) == 0, commands=self.commands
|
| 488 |
+
)
|
| 489 |
+
return
|
| 490 |
+
|
| 491 |
+
# Oops, no commands found -- an end-user error
|
| 492 |
+
if not self.commands:
|
| 493 |
+
raise DistutilsArgError("no commands supplied")
|
| 494 |
+
|
| 495 |
+
# All is well: return true
|
| 496 |
+
return True
|
| 497 |
+
|
| 498 |
+
def _get_toplevel_options(self):
|
| 499 |
+
"""Return the non-display options recognized at the top level.
|
| 500 |
+
|
| 501 |
+
This includes options that are recognized *only* at the top
|
| 502 |
+
level as well as options recognized for commands.
|
| 503 |
+
"""
|
| 504 |
+
return self.global_options + [
|
| 505 |
+
(
|
| 506 |
+
"command-packages=",
|
| 507 |
+
None,
|
| 508 |
+
"list of packages that provide distutils commands",
|
| 509 |
+
),
|
| 510 |
+
]
|
| 511 |
+
|
| 512 |
+
def _parse_command_opts(self, parser, args): # noqa: C901
|
| 513 |
+
"""Parse the command-line options for a single command.
|
| 514 |
+
'parser' must be a FancyGetopt instance; 'args' must be the list
|
| 515 |
+
of arguments, starting with the current command (whose options
|
| 516 |
+
we are about to parse). Returns a new version of 'args' with
|
| 517 |
+
the next command at the front of the list; will be the empty
|
| 518 |
+
list if there are no more commands on the command line. Returns
|
| 519 |
+
None if the user asked for help on this command.
|
| 520 |
+
"""
|
| 521 |
+
# late import because of mutual dependence between these modules
|
| 522 |
+
from distutils.cmd import Command
|
| 523 |
+
|
| 524 |
+
# Pull the current command from the head of the command line
|
| 525 |
+
command = args[0]
|
| 526 |
+
if not command_re.match(command):
|
| 527 |
+
raise SystemExit("invalid command name '%s'" % command)
|
| 528 |
+
self.commands.append(command)
|
| 529 |
+
|
| 530 |
+
# Dig up the command class that implements this command, so we
|
| 531 |
+
# 1) know that it's a valid command, and 2) know which options
|
| 532 |
+
# it takes.
|
| 533 |
+
try:
|
| 534 |
+
cmd_class = self.get_command_class(command)
|
| 535 |
+
except DistutilsModuleError as msg:
|
| 536 |
+
raise DistutilsArgError(msg)
|
| 537 |
+
|
| 538 |
+
# Require that the command class be derived from Command -- want
|
| 539 |
+
# to be sure that the basic "command" interface is implemented.
|
| 540 |
+
if not issubclass(cmd_class, Command):
|
| 541 |
+
raise DistutilsClassError(
|
| 542 |
+
"command class %s must subclass Command" % cmd_class
|
| 543 |
+
)
|
| 544 |
+
|
| 545 |
+
# Also make sure that the command object provides a list of its
|
| 546 |
+
# known options.
|
| 547 |
+
if not (
|
| 548 |
+
hasattr(cmd_class, 'user_options')
|
| 549 |
+
and isinstance(cmd_class.user_options, list)
|
| 550 |
+
):
|
| 551 |
+
msg = (
|
| 552 |
+
"command class %s must provide "
|
| 553 |
+
"'user_options' attribute (a list of tuples)"
|
| 554 |
+
)
|
| 555 |
+
raise DistutilsClassError(msg % cmd_class)
|
| 556 |
+
|
| 557 |
+
# If the command class has a list of negative alias options,
|
| 558 |
+
# merge it in with the global negative aliases.
|
| 559 |
+
negative_opt = self.negative_opt
|
| 560 |
+
if hasattr(cmd_class, 'negative_opt'):
|
| 561 |
+
negative_opt = negative_opt.copy()
|
| 562 |
+
negative_opt.update(cmd_class.negative_opt)
|
| 563 |
+
|
| 564 |
+
# Check for help_options in command class. They have a different
|
| 565 |
+
# format (tuple of four) so we need to preprocess them here.
|
| 566 |
+
if hasattr(cmd_class, 'help_options') and isinstance(
|
| 567 |
+
cmd_class.help_options, list
|
| 568 |
+
):
|
| 569 |
+
help_options = fix_help_options(cmd_class.help_options)
|
| 570 |
+
else:
|
| 571 |
+
help_options = []
|
| 572 |
+
|
| 573 |
+
# All commands support the global options too, just by adding
|
| 574 |
+
# in 'global_options'.
|
| 575 |
+
parser.set_option_table(
|
| 576 |
+
self.global_options + cmd_class.user_options + help_options
|
| 577 |
+
)
|
| 578 |
+
parser.set_negative_aliases(negative_opt)
|
| 579 |
+
(args, opts) = parser.getopt(args[1:])
|
| 580 |
+
if hasattr(opts, 'help') and opts.help:
|
| 581 |
+
self._show_help(parser, display_options=0, commands=[cmd_class])
|
| 582 |
+
return
|
| 583 |
+
|
| 584 |
+
if hasattr(cmd_class, 'help_options') and isinstance(
|
| 585 |
+
cmd_class.help_options, list
|
| 586 |
+
):
|
| 587 |
+
help_option_found = 0
|
| 588 |
+
for help_option, short, desc, func in cmd_class.help_options:
|
| 589 |
+
if hasattr(opts, parser.get_attr_name(help_option)):
|
| 590 |
+
help_option_found = 1
|
| 591 |
+
if callable(func):
|
| 592 |
+
func()
|
| 593 |
+
else:
|
| 594 |
+
raise DistutilsClassError(
|
| 595 |
+
"invalid help function %r for help option '%s': "
|
| 596 |
+
"must be a callable object (function, etc.)"
|
| 597 |
+
% (func, help_option)
|
| 598 |
+
)
|
| 599 |
+
|
| 600 |
+
if help_option_found:
|
| 601 |
+
return
|
| 602 |
+
|
| 603 |
+
# Put the options from the command-line into their official
|
| 604 |
+
# holding pen, the 'command_options' dictionary.
|
| 605 |
+
opt_dict = self.get_option_dict(command)
|
| 606 |
+
for name, value in vars(opts).items():
|
| 607 |
+
opt_dict[name] = ("command line", value)
|
| 608 |
+
|
| 609 |
+
return args
|
| 610 |
+
|
| 611 |
+
def finalize_options(self):
|
| 612 |
+
"""Set final values for all the options on the Distribution
|
| 613 |
+
instance, analogous to the .finalize_options() method of Command
|
| 614 |
+
objects.
|
| 615 |
+
"""
|
| 616 |
+
for attr in ('keywords', 'platforms'):
|
| 617 |
+
value = getattr(self.metadata, attr)
|
| 618 |
+
if value is None:
|
| 619 |
+
continue
|
| 620 |
+
if isinstance(value, str):
|
| 621 |
+
value = [elm.strip() for elm in value.split(',')]
|
| 622 |
+
setattr(self.metadata, attr, value)
|
| 623 |
+
|
| 624 |
+
def _show_help(self, parser, global_options=1, display_options=1, commands=[]):
|
| 625 |
+
"""Show help for the setup script command-line in the form of
|
| 626 |
+
several lists of command-line options. 'parser' should be a
|
| 627 |
+
FancyGetopt instance; do not expect it to be returned in the
|
| 628 |
+
same state, as its option table will be reset to make it
|
| 629 |
+
generate the correct help text.
|
| 630 |
+
|
| 631 |
+
If 'global_options' is true, lists the global options:
|
| 632 |
+
--verbose, --dry-run, etc. If 'display_options' is true, lists
|
| 633 |
+
the "display-only" options: --name, --version, etc. Finally,
|
| 634 |
+
lists per-command help for every command name or command class
|
| 635 |
+
in 'commands'.
|
| 636 |
+
"""
|
| 637 |
+
# late import because of mutual dependence between these modules
|
| 638 |
+
from distutils.core import gen_usage
|
| 639 |
+
from distutils.cmd import Command
|
| 640 |
+
|
| 641 |
+
if global_options:
|
| 642 |
+
if display_options:
|
| 643 |
+
options = self._get_toplevel_options()
|
| 644 |
+
else:
|
| 645 |
+
options = self.global_options
|
| 646 |
+
parser.set_option_table(options)
|
| 647 |
+
parser.print_help(self.common_usage + "\nGlobal options:")
|
| 648 |
+
print('')
|
| 649 |
+
|
| 650 |
+
if display_options:
|
| 651 |
+
parser.set_option_table(self.display_options)
|
| 652 |
+
parser.print_help(
|
| 653 |
+
"Information display options (just display "
|
| 654 |
+
+ "information, ignore any commands)"
|
| 655 |
+
)
|
| 656 |
+
print('')
|
| 657 |
+
|
| 658 |
+
for command in self.commands:
|
| 659 |
+
if isinstance(command, type) and issubclass(command, Command):
|
| 660 |
+
klass = command
|
| 661 |
+
else:
|
| 662 |
+
klass = self.get_command_class(command)
|
| 663 |
+
if hasattr(klass, 'help_options') and isinstance(klass.help_options, list):
|
| 664 |
+
parser.set_option_table(
|
| 665 |
+
klass.user_options + fix_help_options(klass.help_options)
|
| 666 |
+
)
|
| 667 |
+
else:
|
| 668 |
+
parser.set_option_table(klass.user_options)
|
| 669 |
+
parser.print_help("Options for '%s' command:" % klass.__name__)
|
| 670 |
+
print('')
|
| 671 |
+
|
| 672 |
+
print(gen_usage(self.script_name))
|
| 673 |
+
|
| 674 |
+
def handle_display_options(self, option_order):
|
| 675 |
+
"""If there were any non-global "display-only" options
|
| 676 |
+
(--help-commands or the metadata display options) on the command
|
| 677 |
+
line, display the requested info and return true; else return
|
| 678 |
+
false.
|
| 679 |
+
"""
|
| 680 |
+
from distutils.core import gen_usage
|
| 681 |
+
|
| 682 |
+
# User just wants a list of commands -- we'll print it out and stop
|
| 683 |
+
# processing now (ie. if they ran "setup --help-commands foo bar",
|
| 684 |
+
# we ignore "foo bar").
|
| 685 |
+
if self.help_commands:
|
| 686 |
+
self.print_commands()
|
| 687 |
+
print('')
|
| 688 |
+
print(gen_usage(self.script_name))
|
| 689 |
+
return 1
|
| 690 |
+
|
| 691 |
+
# If user supplied any of the "display metadata" options, then
|
| 692 |
+
# display that metadata in the order in which the user supplied the
|
| 693 |
+
# metadata options.
|
| 694 |
+
any_display_options = 0
|
| 695 |
+
is_display_option = {}
|
| 696 |
+
for option in self.display_options:
|
| 697 |
+
is_display_option[option[0]] = 1
|
| 698 |
+
|
| 699 |
+
for opt, val in option_order:
|
| 700 |
+
if val and is_display_option.get(opt):
|
| 701 |
+
opt = translate_longopt(opt)
|
| 702 |
+
value = getattr(self.metadata, "get_" + opt)()
|
| 703 |
+
if opt in ('keywords', 'platforms'):
|
| 704 |
+
print(','.join(value))
|
| 705 |
+
elif opt in ('classifiers', 'provides', 'requires', 'obsoletes'):
|
| 706 |
+
print('\n'.join(value))
|
| 707 |
+
else:
|
| 708 |
+
print(value)
|
| 709 |
+
any_display_options = 1
|
| 710 |
+
|
| 711 |
+
return any_display_options
|
| 712 |
+
|
| 713 |
+
def print_command_list(self, commands, header, max_length):
|
| 714 |
+
"""Print a subset of the list of all commands -- used by
|
| 715 |
+
'print_commands()'.
|
| 716 |
+
"""
|
| 717 |
+
print(header + ":")
|
| 718 |
+
|
| 719 |
+
for cmd in commands:
|
| 720 |
+
klass = self.cmdclass.get(cmd)
|
| 721 |
+
if not klass:
|
| 722 |
+
klass = self.get_command_class(cmd)
|
| 723 |
+
try:
|
| 724 |
+
description = klass.description
|
| 725 |
+
except AttributeError:
|
| 726 |
+
description = "(no description available)"
|
| 727 |
+
|
| 728 |
+
print(" %-*s %s" % (max_length, cmd, description))
|
| 729 |
+
|
| 730 |
+
def print_commands(self):
|
| 731 |
+
"""Print out a help message listing all available commands with a
|
| 732 |
+
description of each. The list is divided into "standard commands"
|
| 733 |
+
(listed in distutils.command.__all__) and "extra commands"
|
| 734 |
+
(mentioned in self.cmdclass, but not a standard command). The
|
| 735 |
+
descriptions come from the command class attribute
|
| 736 |
+
'description'.
|
| 737 |
+
"""
|
| 738 |
+
import distutils.command
|
| 739 |
+
|
| 740 |
+
std_commands = distutils.command.__all__
|
| 741 |
+
is_std = {}
|
| 742 |
+
for cmd in std_commands:
|
| 743 |
+
is_std[cmd] = 1
|
| 744 |
+
|
| 745 |
+
extra_commands = []
|
| 746 |
+
for cmd in self.cmdclass.keys():
|
| 747 |
+
if not is_std.get(cmd):
|
| 748 |
+
extra_commands.append(cmd)
|
| 749 |
+
|
| 750 |
+
max_length = 0
|
| 751 |
+
for cmd in std_commands + extra_commands:
|
| 752 |
+
if len(cmd) > max_length:
|
| 753 |
+
max_length = len(cmd)
|
| 754 |
+
|
| 755 |
+
self.print_command_list(std_commands, "Standard commands", max_length)
|
| 756 |
+
if extra_commands:
|
| 757 |
+
print()
|
| 758 |
+
self.print_command_list(extra_commands, "Extra commands", max_length)
|
| 759 |
+
|
| 760 |
+
def get_command_list(self):
|
| 761 |
+
"""Get a list of (command, description) tuples.
|
| 762 |
+
The list is divided into "standard commands" (listed in
|
| 763 |
+
distutils.command.__all__) and "extra commands" (mentioned in
|
| 764 |
+
self.cmdclass, but not a standard command). The descriptions come
|
| 765 |
+
from the command class attribute 'description'.
|
| 766 |
+
"""
|
| 767 |
+
# Currently this is only used on Mac OS, for the Mac-only GUI
|
| 768 |
+
# Distutils interface (by Jack Jansen)
|
| 769 |
+
import distutils.command
|
| 770 |
+
|
| 771 |
+
std_commands = distutils.command.__all__
|
| 772 |
+
is_std = {}
|
| 773 |
+
for cmd in std_commands:
|
| 774 |
+
is_std[cmd] = 1
|
| 775 |
+
|
| 776 |
+
extra_commands = []
|
| 777 |
+
for cmd in self.cmdclass.keys():
|
| 778 |
+
if not is_std.get(cmd):
|
| 779 |
+
extra_commands.append(cmd)
|
| 780 |
+
|
| 781 |
+
rv = []
|
| 782 |
+
for cmd in std_commands + extra_commands:
|
| 783 |
+
klass = self.cmdclass.get(cmd)
|
| 784 |
+
if not klass:
|
| 785 |
+
klass = self.get_command_class(cmd)
|
| 786 |
+
try:
|
| 787 |
+
description = klass.description
|
| 788 |
+
except AttributeError:
|
| 789 |
+
description = "(no description available)"
|
| 790 |
+
rv.append((cmd, description))
|
| 791 |
+
return rv
|
| 792 |
+
|
| 793 |
+
# -- Command class/object methods ----------------------------------
|
| 794 |
+
|
| 795 |
+
def get_command_packages(self):
|
| 796 |
+
"""Return a list of packages from which commands are loaded."""
|
| 797 |
+
pkgs = self.command_packages
|
| 798 |
+
if not isinstance(pkgs, list):
|
| 799 |
+
if pkgs is None:
|
| 800 |
+
pkgs = ''
|
| 801 |
+
pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
|
| 802 |
+
if "distutils.command" not in pkgs:
|
| 803 |
+
pkgs.insert(0, "distutils.command")
|
| 804 |
+
self.command_packages = pkgs
|
| 805 |
+
return pkgs
|
| 806 |
+
|
| 807 |
+
def get_command_class(self, command):
|
| 808 |
+
"""Return the class that implements the Distutils command named by
|
| 809 |
+
'command'. First we check the 'cmdclass' dictionary; if the
|
| 810 |
+
command is mentioned there, we fetch the class object from the
|
| 811 |
+
dictionary and return it. Otherwise we load the command module
|
| 812 |
+
("distutils.command." + command) and fetch the command class from
|
| 813 |
+
the module. The loaded class is also stored in 'cmdclass'
|
| 814 |
+
to speed future calls to 'get_command_class()'.
|
| 815 |
+
|
| 816 |
+
Raises DistutilsModuleError if the expected module could not be
|
| 817 |
+
found, or if that module does not define the expected class.
|
| 818 |
+
"""
|
| 819 |
+
klass = self.cmdclass.get(command)
|
| 820 |
+
if klass:
|
| 821 |
+
return klass
|
| 822 |
+
|
| 823 |
+
for pkgname in self.get_command_packages():
|
| 824 |
+
module_name = "{}.{}".format(pkgname, command)
|
| 825 |
+
klass_name = command
|
| 826 |
+
|
| 827 |
+
try:
|
| 828 |
+
__import__(module_name)
|
| 829 |
+
module = sys.modules[module_name]
|
| 830 |
+
except ImportError:
|
| 831 |
+
continue
|
| 832 |
+
|
| 833 |
+
try:
|
| 834 |
+
klass = getattr(module, klass_name)
|
| 835 |
+
except AttributeError:
|
| 836 |
+
raise DistutilsModuleError(
|
| 837 |
+
"invalid command '%s' (no class '%s' in module '%s')"
|
| 838 |
+
% (command, klass_name, module_name)
|
| 839 |
+
)
|
| 840 |
+
|
| 841 |
+
self.cmdclass[command] = klass
|
| 842 |
+
return klass
|
| 843 |
+
|
| 844 |
+
raise DistutilsModuleError("invalid command '%s'" % command)
|
| 845 |
+
|
| 846 |
+
def get_command_obj(self, command, create=1):
|
| 847 |
+
"""Return the command object for 'command'. Normally this object
|
| 848 |
+
is cached on a previous call to 'get_command_obj()'; if no command
|
| 849 |
+
object for 'command' is in the cache, then we either create and
|
| 850 |
+
return it (if 'create' is true) or return None.
|
| 851 |
+
"""
|
| 852 |
+
cmd_obj = self.command_obj.get(command)
|
| 853 |
+
if not cmd_obj and create:
|
| 854 |
+
if DEBUG:
|
| 855 |
+
self.announce(
|
| 856 |
+
"Distribution.get_command_obj(): "
|
| 857 |
+
"creating '%s' command object" % command
|
| 858 |
+
)
|
| 859 |
+
|
| 860 |
+
klass = self.get_command_class(command)
|
| 861 |
+
cmd_obj = self.command_obj[command] = klass(self)
|
| 862 |
+
self.have_run[command] = 0
|
| 863 |
+
|
| 864 |
+
# Set any options that were supplied in config files
|
| 865 |
+
# or on the command line. (NB. support for error
|
| 866 |
+
# reporting is lame here: any errors aren't reported
|
| 867 |
+
# until 'finalize_options()' is called, which means
|
| 868 |
+
# we won't report the source of the error.)
|
| 869 |
+
options = self.command_options.get(command)
|
| 870 |
+
if options:
|
| 871 |
+
self._set_command_options(cmd_obj, options)
|
| 872 |
+
|
| 873 |
+
return cmd_obj
|
| 874 |
+
|
| 875 |
+
def _set_command_options(self, command_obj, option_dict=None): # noqa: C901
|
| 876 |
+
"""Set the options for 'command_obj' from 'option_dict'. Basically
|
| 877 |
+
this means copying elements of a dictionary ('option_dict') to
|
| 878 |
+
attributes of an instance ('command').
|
| 879 |
+
|
| 880 |
+
'command_obj' must be a Command instance. If 'option_dict' is not
|
| 881 |
+
supplied, uses the standard option dictionary for this command
|
| 882 |
+
(from 'self.command_options').
|
| 883 |
+
"""
|
| 884 |
+
command_name = command_obj.get_command_name()
|
| 885 |
+
if option_dict is None:
|
| 886 |
+
option_dict = self.get_option_dict(command_name)
|
| 887 |
+
|
| 888 |
+
if DEBUG:
|
| 889 |
+
self.announce(" setting options for '%s' command:" % command_name)
|
| 890 |
+
for option, (source, value) in option_dict.items():
|
| 891 |
+
if DEBUG:
|
| 892 |
+
self.announce(" {} = {} (from {})".format(option, value, source))
|
| 893 |
+
try:
|
| 894 |
+
bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
|
| 895 |
+
except AttributeError:
|
| 896 |
+
bool_opts = []
|
| 897 |
+
try:
|
| 898 |
+
neg_opt = command_obj.negative_opt
|
| 899 |
+
except AttributeError:
|
| 900 |
+
neg_opt = {}
|
| 901 |
+
|
| 902 |
+
try:
|
| 903 |
+
is_string = isinstance(value, str)
|
| 904 |
+
if option in neg_opt and is_string:
|
| 905 |
+
setattr(command_obj, neg_opt[option], not strtobool(value))
|
| 906 |
+
elif option in bool_opts and is_string:
|
| 907 |
+
setattr(command_obj, option, strtobool(value))
|
| 908 |
+
elif hasattr(command_obj, option):
|
| 909 |
+
setattr(command_obj, option, value)
|
| 910 |
+
else:
|
| 911 |
+
raise DistutilsOptionError(
|
| 912 |
+
"error in %s: command '%s' has no such option '%s'"
|
| 913 |
+
% (source, command_name, option)
|
| 914 |
+
)
|
| 915 |
+
except ValueError as msg:
|
| 916 |
+
raise DistutilsOptionError(msg)
|
| 917 |
+
|
| 918 |
+
def reinitialize_command(self, command, reinit_subcommands=0):
|
| 919 |
+
"""Reinitializes a command to the state it was in when first
|
| 920 |
+
returned by 'get_command_obj()': ie., initialized but not yet
|
| 921 |
+
finalized. This provides the opportunity to sneak option
|
| 922 |
+
values in programmatically, overriding or supplementing
|
| 923 |
+
user-supplied values from the config files and command line.
|
| 924 |
+
You'll have to re-finalize the command object (by calling
|
| 925 |
+
'finalize_options()' or 'ensure_finalized()') before using it for
|
| 926 |
+
real.
|
| 927 |
+
|
| 928 |
+
'command' should be a command name (string) or command object. If
|
| 929 |
+
'reinit_subcommands' is true, also reinitializes the command's
|
| 930 |
+
sub-commands, as declared by the 'sub_commands' class attribute (if
|
| 931 |
+
it has one). See the "install" command for an example. Only
|
| 932 |
+
reinitializes the sub-commands that actually matter, ie. those
|
| 933 |
+
whose test predicates return true.
|
| 934 |
+
|
| 935 |
+
Returns the reinitialized command object.
|
| 936 |
+
"""
|
| 937 |
+
from distutils.cmd import Command
|
| 938 |
+
|
| 939 |
+
if not isinstance(command, Command):
|
| 940 |
+
command_name = command
|
| 941 |
+
command = self.get_command_obj(command_name)
|
| 942 |
+
else:
|
| 943 |
+
command_name = command.get_command_name()
|
| 944 |
+
|
| 945 |
+
if not command.finalized:
|
| 946 |
+
return command
|
| 947 |
+
command.initialize_options()
|
| 948 |
+
command.finalized = 0
|
| 949 |
+
self.have_run[command_name] = 0
|
| 950 |
+
self._set_command_options(command)
|
| 951 |
+
|
| 952 |
+
if reinit_subcommands:
|
| 953 |
+
for sub in command.get_sub_commands():
|
| 954 |
+
self.reinitialize_command(sub, reinit_subcommands)
|
| 955 |
+
|
| 956 |
+
return command
|
| 957 |
+
|
| 958 |
+
# -- Methods that operate on the Distribution ----------------------
|
| 959 |
+
|
| 960 |
+
def announce(self, msg, level=logging.INFO):
|
| 961 |
+
log.log(level, msg)
|
| 962 |
+
|
| 963 |
+
def run_commands(self):
|
| 964 |
+
"""Run each command that was seen on the setup script command line.
|
| 965 |
+
Uses the list of commands found and cache of command objects
|
| 966 |
+
created by 'get_command_obj()'.
|
| 967 |
+
"""
|
| 968 |
+
for cmd in self.commands:
|
| 969 |
+
self.run_command(cmd)
|
| 970 |
+
|
| 971 |
+
# -- Methods that operate on its Commands --------------------------
|
| 972 |
+
|
| 973 |
+
def run_command(self, command):
|
| 974 |
+
"""Do whatever it takes to run a command (including nothing at all,
|
| 975 |
+
if the command has already been run). Specifically: if we have
|
| 976 |
+
already created and run the command named by 'command', return
|
| 977 |
+
silently without doing anything. If the command named by 'command'
|
| 978 |
+
doesn't even have a command object yet, create one. Then invoke
|
| 979 |
+
'run()' on that command object (or an existing one).
|
| 980 |
+
"""
|
| 981 |
+
# Already been here, done that? then return silently.
|
| 982 |
+
if self.have_run.get(command):
|
| 983 |
+
return
|
| 984 |
+
|
| 985 |
+
log.info("running %s", command)
|
| 986 |
+
cmd_obj = self.get_command_obj(command)
|
| 987 |
+
cmd_obj.ensure_finalized()
|
| 988 |
+
cmd_obj.run()
|
| 989 |
+
self.have_run[command] = 1
|
| 990 |
+
|
| 991 |
+
# -- Distribution query methods ------------------------------------
|
| 992 |
+
|
| 993 |
+
def has_pure_modules(self):
|
| 994 |
+
return len(self.packages or self.py_modules or []) > 0
|
| 995 |
+
|
| 996 |
+
def has_ext_modules(self):
|
| 997 |
+
return self.ext_modules and len(self.ext_modules) > 0
|
| 998 |
+
|
| 999 |
+
def has_c_libraries(self):
|
| 1000 |
+
return self.libraries and len(self.libraries) > 0
|
| 1001 |
+
|
| 1002 |
+
def has_modules(self):
|
| 1003 |
+
return self.has_pure_modules() or self.has_ext_modules()
|
| 1004 |
+
|
| 1005 |
+
def has_headers(self):
|
| 1006 |
+
return self.headers and len(self.headers) > 0
|
| 1007 |
+
|
| 1008 |
+
def has_scripts(self):
|
| 1009 |
+
return self.scripts and len(self.scripts) > 0
|
| 1010 |
+
|
| 1011 |
+
def has_data_files(self):
|
| 1012 |
+
return self.data_files and len(self.data_files) > 0
|
| 1013 |
+
|
| 1014 |
+
def is_pure(self):
|
| 1015 |
+
return (
|
| 1016 |
+
self.has_pure_modules()
|
| 1017 |
+
and not self.has_ext_modules()
|
| 1018 |
+
and not self.has_c_libraries()
|
| 1019 |
+
)
|
| 1020 |
+
|
| 1021 |
+
# -- Metadata query methods ----------------------------------------
|
| 1022 |
+
|
| 1023 |
+
# If you're looking for 'get_name()', 'get_version()', and so forth,
|
| 1024 |
+
# they are defined in a sneaky way: the constructor binds self.get_XXX
|
| 1025 |
+
# to self.metadata.get_XXX. The actual code is in the
|
| 1026 |
+
# DistributionMetadata class, below.
|
| 1027 |
+
|
| 1028 |
+
|
| 1029 |
+
class DistributionMetadata:
|
| 1030 |
+
"""Dummy class to hold the distribution meta-data: name, version,
|
| 1031 |
+
author, and so forth.
|
| 1032 |
+
"""
|
| 1033 |
+
|
| 1034 |
+
_METHOD_BASENAMES = (
|
| 1035 |
+
"name",
|
| 1036 |
+
"version",
|
| 1037 |
+
"author",
|
| 1038 |
+
"author_email",
|
| 1039 |
+
"maintainer",
|
| 1040 |
+
"maintainer_email",
|
| 1041 |
+
"url",
|
| 1042 |
+
"license",
|
| 1043 |
+
"description",
|
| 1044 |
+
"long_description",
|
| 1045 |
+
"keywords",
|
| 1046 |
+
"platforms",
|
| 1047 |
+
"fullname",
|
| 1048 |
+
"contact",
|
| 1049 |
+
"contact_email",
|
| 1050 |
+
"classifiers",
|
| 1051 |
+
"download_url",
|
| 1052 |
+
# PEP 314
|
| 1053 |
+
"provides",
|
| 1054 |
+
"requires",
|
| 1055 |
+
"obsoletes",
|
| 1056 |
+
)
|
| 1057 |
+
|
| 1058 |
+
def __init__(self, path=None):
|
| 1059 |
+
if path is not None:
|
| 1060 |
+
self.read_pkg_file(open(path))
|
| 1061 |
+
else:
|
| 1062 |
+
self.name = None
|
| 1063 |
+
self.version = None
|
| 1064 |
+
self.author = None
|
| 1065 |
+
self.author_email = None
|
| 1066 |
+
self.maintainer = None
|
| 1067 |
+
self.maintainer_email = None
|
| 1068 |
+
self.url = None
|
| 1069 |
+
self.license = None
|
| 1070 |
+
self.description = None
|
| 1071 |
+
self.long_description = None
|
| 1072 |
+
self.keywords = None
|
| 1073 |
+
self.platforms = None
|
| 1074 |
+
self.classifiers = None
|
| 1075 |
+
self.download_url = None
|
| 1076 |
+
# PEP 314
|
| 1077 |
+
self.provides = None
|
| 1078 |
+
self.requires = None
|
| 1079 |
+
self.obsoletes = None
|
| 1080 |
+
|
| 1081 |
+
def read_pkg_file(self, file):
|
| 1082 |
+
"""Reads the metadata values from a file object."""
|
| 1083 |
+
msg = message_from_file(file)
|
| 1084 |
+
|
| 1085 |
+
def _read_field(name):
|
| 1086 |
+
value = msg[name]
|
| 1087 |
+
if value and value != "UNKNOWN":
|
| 1088 |
+
return value
|
| 1089 |
+
|
| 1090 |
+
def _read_list(name):
|
| 1091 |
+
values = msg.get_all(name, None)
|
| 1092 |
+
if values == []:
|
| 1093 |
+
return None
|
| 1094 |
+
return values
|
| 1095 |
+
|
| 1096 |
+
metadata_version = msg['metadata-version']
|
| 1097 |
+
self.name = _read_field('name')
|
| 1098 |
+
self.version = _read_field('version')
|
| 1099 |
+
self.description = _read_field('summary')
|
| 1100 |
+
# we are filling author only.
|
| 1101 |
+
self.author = _read_field('author')
|
| 1102 |
+
self.maintainer = None
|
| 1103 |
+
self.author_email = _read_field('author-email')
|
| 1104 |
+
self.maintainer_email = None
|
| 1105 |
+
self.url = _read_field('home-page')
|
| 1106 |
+
self.license = _read_field('license')
|
| 1107 |
+
|
| 1108 |
+
if 'download-url' in msg:
|
| 1109 |
+
self.download_url = _read_field('download-url')
|
| 1110 |
+
else:
|
| 1111 |
+
self.download_url = None
|
| 1112 |
+
|
| 1113 |
+
self.long_description = _read_field('description')
|
| 1114 |
+
self.description = _read_field('summary')
|
| 1115 |
+
|
| 1116 |
+
if 'keywords' in msg:
|
| 1117 |
+
self.keywords = _read_field('keywords').split(',')
|
| 1118 |
+
|
| 1119 |
+
self.platforms = _read_list('platform')
|
| 1120 |
+
self.classifiers = _read_list('classifier')
|
| 1121 |
+
|
| 1122 |
+
# PEP 314 - these fields only exist in 1.1
|
| 1123 |
+
if metadata_version == '1.1':
|
| 1124 |
+
self.requires = _read_list('requires')
|
| 1125 |
+
self.provides = _read_list('provides')
|
| 1126 |
+
self.obsoletes = _read_list('obsoletes')
|
| 1127 |
+
else:
|
| 1128 |
+
self.requires = None
|
| 1129 |
+
self.provides = None
|
| 1130 |
+
self.obsoletes = None
|
| 1131 |
+
|
| 1132 |
+
def write_pkg_info(self, base_dir):
|
| 1133 |
+
"""Write the PKG-INFO file into the release tree."""
|
| 1134 |
+
with open(
|
| 1135 |
+
os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8'
|
| 1136 |
+
) as pkg_info:
|
| 1137 |
+
self.write_pkg_file(pkg_info)
|
| 1138 |
+
|
| 1139 |
+
def write_pkg_file(self, file):
|
| 1140 |
+
"""Write the PKG-INFO format data to a file object."""
|
| 1141 |
+
version = '1.0'
|
| 1142 |
+
if (
|
| 1143 |
+
self.provides
|
| 1144 |
+
or self.requires
|
| 1145 |
+
or self.obsoletes
|
| 1146 |
+
or self.classifiers
|
| 1147 |
+
or self.download_url
|
| 1148 |
+
):
|
| 1149 |
+
version = '1.1'
|
| 1150 |
+
|
| 1151 |
+
# required fields
|
| 1152 |
+
file.write('Metadata-Version: %s\n' % version)
|
| 1153 |
+
file.write('Name: %s\n' % self.get_name())
|
| 1154 |
+
file.write('Version: %s\n' % self.get_version())
|
| 1155 |
+
|
| 1156 |
+
def maybe_write(header, val):
|
| 1157 |
+
if val:
|
| 1158 |
+
file.write(f"{header}: {val}\n")
|
| 1159 |
+
|
| 1160 |
+
# optional fields
|
| 1161 |
+
maybe_write("Summary", self.get_description())
|
| 1162 |
+
maybe_write("Home-page", self.get_url())
|
| 1163 |
+
maybe_write("Author", self.get_contact())
|
| 1164 |
+
maybe_write("Author-email", self.get_contact_email())
|
| 1165 |
+
maybe_write("License", self.get_license())
|
| 1166 |
+
maybe_write("Download-URL", self.download_url)
|
| 1167 |
+
maybe_write("Description", rfc822_escape(self.get_long_description() or ""))
|
| 1168 |
+
maybe_write("Keywords", ",".join(self.get_keywords()))
|
| 1169 |
+
|
| 1170 |
+
self._write_list(file, 'Platform', self.get_platforms())
|
| 1171 |
+
self._write_list(file, 'Classifier', self.get_classifiers())
|
| 1172 |
+
|
| 1173 |
+
# PEP 314
|
| 1174 |
+
self._write_list(file, 'Requires', self.get_requires())
|
| 1175 |
+
self._write_list(file, 'Provides', self.get_provides())
|
| 1176 |
+
self._write_list(file, 'Obsoletes', self.get_obsoletes())
|
| 1177 |
+
|
| 1178 |
+
def _write_list(self, file, name, values):
|
| 1179 |
+
values = values or []
|
| 1180 |
+
for value in values:
|
| 1181 |
+
file.write('{}: {}\n'.format(name, value))
|
| 1182 |
+
|
| 1183 |
+
# -- Metadata query methods ----------------------------------------
|
| 1184 |
+
|
| 1185 |
+
def get_name(self):
|
| 1186 |
+
return self.name or "UNKNOWN"
|
| 1187 |
+
|
| 1188 |
+
def get_version(self):
|
| 1189 |
+
return self.version or "0.0.0"
|
| 1190 |
+
|
| 1191 |
+
def get_fullname(self):
|
| 1192 |
+
return "{}-{}".format(self.get_name(), self.get_version())
|
| 1193 |
+
|
| 1194 |
+
def get_author(self):
|
| 1195 |
+
return self.author
|
| 1196 |
+
|
| 1197 |
+
def get_author_email(self):
|
| 1198 |
+
return self.author_email
|
| 1199 |
+
|
| 1200 |
+
def get_maintainer(self):
|
| 1201 |
+
return self.maintainer
|
| 1202 |
+
|
| 1203 |
+
def get_maintainer_email(self):
|
| 1204 |
+
return self.maintainer_email
|
| 1205 |
+
|
| 1206 |
+
def get_contact(self):
|
| 1207 |
+
return self.maintainer or self.author
|
| 1208 |
+
|
| 1209 |
+
def get_contact_email(self):
|
| 1210 |
+
return self.maintainer_email or self.author_email
|
| 1211 |
+
|
| 1212 |
+
def get_url(self):
|
| 1213 |
+
return self.url
|
| 1214 |
+
|
| 1215 |
+
def get_license(self):
|
| 1216 |
+
return self.license
|
| 1217 |
+
|
| 1218 |
+
get_licence = get_license
|
| 1219 |
+
|
| 1220 |
+
def get_description(self):
|
| 1221 |
+
return self.description
|
| 1222 |
+
|
| 1223 |
+
def get_long_description(self):
|
| 1224 |
+
return self.long_description
|
| 1225 |
+
|
| 1226 |
+
def get_keywords(self):
|
| 1227 |
+
return self.keywords or []
|
| 1228 |
+
|
| 1229 |
+
def set_keywords(self, value):
|
| 1230 |
+
self.keywords = _ensure_list(value, 'keywords')
|
| 1231 |
+
|
| 1232 |
+
def get_platforms(self):
|
| 1233 |
+
return self.platforms
|
| 1234 |
+
|
| 1235 |
+
def set_platforms(self, value):
|
| 1236 |
+
self.platforms = _ensure_list(value, 'platforms')
|
| 1237 |
+
|
| 1238 |
+
def get_classifiers(self):
|
| 1239 |
+
return self.classifiers or []
|
| 1240 |
+
|
| 1241 |
+
def set_classifiers(self, value):
|
| 1242 |
+
self.classifiers = _ensure_list(value, 'classifiers')
|
| 1243 |
+
|
| 1244 |
+
def get_download_url(self):
|
| 1245 |
+
return self.download_url
|
| 1246 |
+
|
| 1247 |
+
# PEP 314
|
| 1248 |
+
def get_requires(self):
|
| 1249 |
+
return self.requires or []
|
| 1250 |
+
|
| 1251 |
+
def set_requires(self, value):
|
| 1252 |
+
import distutils.versionpredicate
|
| 1253 |
+
|
| 1254 |
+
for v in value:
|
| 1255 |
+
distutils.versionpredicate.VersionPredicate(v)
|
| 1256 |
+
self.requires = list(value)
|
| 1257 |
+
|
| 1258 |
+
def get_provides(self):
|
| 1259 |
+
return self.provides or []
|
| 1260 |
+
|
| 1261 |
+
def set_provides(self, value):
|
| 1262 |
+
value = [v.strip() for v in value]
|
| 1263 |
+
for v in value:
|
| 1264 |
+
import distutils.versionpredicate
|
| 1265 |
+
|
| 1266 |
+
distutils.versionpredicate.split_provision(v)
|
| 1267 |
+
self.provides = value
|
| 1268 |
+
|
| 1269 |
+
def get_obsoletes(self):
|
| 1270 |
+
return self.obsoletes or []
|
| 1271 |
+
|
| 1272 |
+
def set_obsoletes(self, value):
|
| 1273 |
+
import distutils.versionpredicate
|
| 1274 |
+
|
| 1275 |
+
for v in value:
|
| 1276 |
+
distutils.versionpredicate.VersionPredicate(v)
|
| 1277 |
+
self.obsoletes = list(value)
|
| 1278 |
+
|
| 1279 |
+
|
| 1280 |
+
def fix_help_options(options):
|
| 1281 |
+
"""Convert a 4-tuple 'help_options' list as found in various command
|
| 1282 |
+
classes to the 3-tuple form required by FancyGetopt.
|
| 1283 |
+
"""
|
| 1284 |
+
new_options = []
|
| 1285 |
+
for help_tuple in options:
|
| 1286 |
+
new_options.append(help_tuple[0:3])
|
| 1287 |
+
return new_options
|
.venv/Lib/site-packages/setuptools/_distutils/errors.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.errors
|
| 2 |
+
|
| 3 |
+
Provides exceptions used by the Distutils modules. Note that Distutils
|
| 4 |
+
modules may raise standard exceptions; in particular, SystemExit is
|
| 5 |
+
usually raised for errors that are obviously the end-user's fault
|
| 6 |
+
(eg. bad command-line arguments).
|
| 7 |
+
|
| 8 |
+
This module is safe to use in "from ... import *" mode; it only exports
|
| 9 |
+
symbols whose names start with "Distutils" and end with "Error"."""
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class DistutilsError(Exception):
|
| 13 |
+
"""The root of all Distutils evil."""
|
| 14 |
+
|
| 15 |
+
pass
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class DistutilsModuleError(DistutilsError):
|
| 19 |
+
"""Unable to load an expected module, or to find an expected class
|
| 20 |
+
within some module (in particular, command modules and classes)."""
|
| 21 |
+
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class DistutilsClassError(DistutilsError):
|
| 26 |
+
"""Some command class (or possibly distribution class, if anyone
|
| 27 |
+
feels a need to subclass Distribution) is found not to be holding
|
| 28 |
+
up its end of the bargain, ie. implementing some part of the
|
| 29 |
+
"command "interface."""
|
| 30 |
+
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class DistutilsGetoptError(DistutilsError):
|
| 35 |
+
"""The option table provided to 'fancy_getopt()' is bogus."""
|
| 36 |
+
|
| 37 |
+
pass
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class DistutilsArgError(DistutilsError):
|
| 41 |
+
"""Raised by fancy_getopt in response to getopt.error -- ie. an
|
| 42 |
+
error in the command line usage."""
|
| 43 |
+
|
| 44 |
+
pass
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class DistutilsFileError(DistutilsError):
|
| 48 |
+
"""Any problems in the filesystem: expected file not found, etc.
|
| 49 |
+
Typically this is for problems that we detect before OSError
|
| 50 |
+
could be raised."""
|
| 51 |
+
|
| 52 |
+
pass
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class DistutilsOptionError(DistutilsError):
|
| 56 |
+
"""Syntactic/semantic errors in command options, such as use of
|
| 57 |
+
mutually conflicting options, or inconsistent options,
|
| 58 |
+
badly-spelled values, etc. No distinction is made between option
|
| 59 |
+
values originating in the setup script, the command line, config
|
| 60 |
+
files, or what-have-you -- but if we *know* something originated in
|
| 61 |
+
the setup script, we'll raise DistutilsSetupError instead."""
|
| 62 |
+
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class DistutilsSetupError(DistutilsError):
|
| 67 |
+
"""For errors that can be definitely blamed on the setup script,
|
| 68 |
+
such as invalid keyword arguments to 'setup()'."""
|
| 69 |
+
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class DistutilsPlatformError(DistutilsError):
|
| 74 |
+
"""We don't know how to do something on the current platform (but
|
| 75 |
+
we do know how to do it on some platform) -- eg. trying to compile
|
| 76 |
+
C files on a platform not supported by a CCompiler subclass."""
|
| 77 |
+
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class DistutilsExecError(DistutilsError):
|
| 82 |
+
"""Any problems executing an external program (such as the C
|
| 83 |
+
compiler, when compiling C files)."""
|
| 84 |
+
|
| 85 |
+
pass
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class DistutilsInternalError(DistutilsError):
|
| 89 |
+
"""Internal inconsistencies or impossibilities (obviously, this
|
| 90 |
+
should never be seen if the code is working!)."""
|
| 91 |
+
|
| 92 |
+
pass
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class DistutilsTemplateError(DistutilsError):
|
| 96 |
+
"""Syntax error in a file list template."""
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class DistutilsByteCompileError(DistutilsError):
|
| 100 |
+
"""Byte compile error."""
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
# Exception classes used by the CCompiler implementation classes
|
| 104 |
+
class CCompilerError(Exception):
|
| 105 |
+
"""Some compile/link operation failed."""
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class PreprocessError(CCompilerError):
|
| 109 |
+
"""Failure to preprocess one or more C/C++ files."""
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class CompileError(CCompilerError):
|
| 113 |
+
"""Failure to compile one or more C/C++ source files."""
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class LibError(CCompilerError):
|
| 117 |
+
"""Failure to create a static library from one or more C/C++ object
|
| 118 |
+
files."""
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class LinkError(CCompilerError):
|
| 122 |
+
"""Failure to link one or more C/C++ object files into an executable
|
| 123 |
+
or shared library file."""
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
class UnknownFileError(CCompilerError):
|
| 127 |
+
"""Attempt to process an unknown file type."""
|
.venv/Lib/site-packages/setuptools/_distutils/extension.py
ADDED
|
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.extension
|
| 2 |
+
|
| 3 |
+
Provides the Extension class, used to describe C/C++ extension
|
| 4 |
+
modules in setup scripts."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
# This class is really only used by the "build_ext" command, so it might
|
| 10 |
+
# make sense to put it in distutils.command.build_ext. However, that
|
| 11 |
+
# module is already big enough, and I want to make this class a bit more
|
| 12 |
+
# complex to simplify some common cases ("foo" module in "foo.c") and do
|
| 13 |
+
# better error-checking ("foo.c" actually exists).
|
| 14 |
+
#
|
| 15 |
+
# Also, putting this in build_ext.py means every setup script would have to
|
| 16 |
+
# import that large-ish module (indirectly, through distutils.core) in
|
| 17 |
+
# order to do anything.
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Extension:
|
| 21 |
+
"""Just a collection of attributes that describes an extension
|
| 22 |
+
module and everything needed to build it (hopefully in a portable
|
| 23 |
+
way, but there are hooks that let you be as unportable as you need).
|
| 24 |
+
|
| 25 |
+
Instance attributes:
|
| 26 |
+
name : string
|
| 27 |
+
the full name of the extension, including any packages -- ie.
|
| 28 |
+
*not* a filename or pathname, but Python dotted name
|
| 29 |
+
sources : [string]
|
| 30 |
+
list of source filenames, relative to the distribution root
|
| 31 |
+
(where the setup script lives), in Unix form (slash-separated)
|
| 32 |
+
for portability. Source files may be C, C++, SWIG (.i),
|
| 33 |
+
platform-specific resource files, or whatever else is recognized
|
| 34 |
+
by the "build_ext" command as source for a Python extension.
|
| 35 |
+
include_dirs : [string]
|
| 36 |
+
list of directories to search for C/C++ header files (in Unix
|
| 37 |
+
form for portability)
|
| 38 |
+
define_macros : [(name : string, value : string|None)]
|
| 39 |
+
list of macros to define; each macro is defined using a 2-tuple,
|
| 40 |
+
where 'value' is either the string to define it to or None to
|
| 41 |
+
define it without a particular value (equivalent of "#define
|
| 42 |
+
FOO" in source or -DFOO on Unix C compiler command line)
|
| 43 |
+
undef_macros : [string]
|
| 44 |
+
list of macros to undefine explicitly
|
| 45 |
+
library_dirs : [string]
|
| 46 |
+
list of directories to search for C/C++ libraries at link time
|
| 47 |
+
libraries : [string]
|
| 48 |
+
list of library names (not filenames or paths) to link against
|
| 49 |
+
runtime_library_dirs : [string]
|
| 50 |
+
list of directories to search for C/C++ libraries at run time
|
| 51 |
+
(for shared extensions, this is when the extension is loaded)
|
| 52 |
+
extra_objects : [string]
|
| 53 |
+
list of extra files to link with (eg. object files not implied
|
| 54 |
+
by 'sources', static library that must be explicitly specified,
|
| 55 |
+
binary resource files, etc.)
|
| 56 |
+
extra_compile_args : [string]
|
| 57 |
+
any extra platform- and compiler-specific information to use
|
| 58 |
+
when compiling the source files in 'sources'. For platforms and
|
| 59 |
+
compilers where "command line" makes sense, this is typically a
|
| 60 |
+
list of command-line arguments, but for other platforms it could
|
| 61 |
+
be anything.
|
| 62 |
+
extra_link_args : [string]
|
| 63 |
+
any extra platform- and compiler-specific information to use
|
| 64 |
+
when linking object files together to create the extension (or
|
| 65 |
+
to create a new static Python interpreter). Similar
|
| 66 |
+
interpretation as for 'extra_compile_args'.
|
| 67 |
+
export_symbols : [string]
|
| 68 |
+
list of symbols to be exported from a shared extension. Not
|
| 69 |
+
used on all platforms, and not generally necessary for Python
|
| 70 |
+
extensions, which typically export exactly one symbol: "init" +
|
| 71 |
+
extension_name.
|
| 72 |
+
swig_opts : [string]
|
| 73 |
+
any extra options to pass to SWIG if a source file has the .i
|
| 74 |
+
extension.
|
| 75 |
+
depends : [string]
|
| 76 |
+
list of files that the extension depends on
|
| 77 |
+
language : string
|
| 78 |
+
extension language (i.e. "c", "c++", "objc"). Will be detected
|
| 79 |
+
from the source extensions if not provided.
|
| 80 |
+
optional : boolean
|
| 81 |
+
specifies that a build failure in the extension should not abort the
|
| 82 |
+
build process, but simply not install the failing extension.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
# When adding arguments to this constructor, be sure to update
|
| 86 |
+
# setup_keywords in core.py.
|
| 87 |
+
def __init__(
|
| 88 |
+
self,
|
| 89 |
+
name,
|
| 90 |
+
sources,
|
| 91 |
+
include_dirs=None,
|
| 92 |
+
define_macros=None,
|
| 93 |
+
undef_macros=None,
|
| 94 |
+
library_dirs=None,
|
| 95 |
+
libraries=None,
|
| 96 |
+
runtime_library_dirs=None,
|
| 97 |
+
extra_objects=None,
|
| 98 |
+
extra_compile_args=None,
|
| 99 |
+
extra_link_args=None,
|
| 100 |
+
export_symbols=None,
|
| 101 |
+
swig_opts=None,
|
| 102 |
+
depends=None,
|
| 103 |
+
language=None,
|
| 104 |
+
optional=None,
|
| 105 |
+
**kw # To catch unknown keywords
|
| 106 |
+
):
|
| 107 |
+
if not isinstance(name, str):
|
| 108 |
+
raise AssertionError("'name' must be a string")
|
| 109 |
+
if not (isinstance(sources, list) and all(isinstance(v, str) for v in sources)):
|
| 110 |
+
raise AssertionError("'sources' must be a list of strings")
|
| 111 |
+
|
| 112 |
+
self.name = name
|
| 113 |
+
self.sources = sources
|
| 114 |
+
self.include_dirs = include_dirs or []
|
| 115 |
+
self.define_macros = define_macros or []
|
| 116 |
+
self.undef_macros = undef_macros or []
|
| 117 |
+
self.library_dirs = library_dirs or []
|
| 118 |
+
self.libraries = libraries or []
|
| 119 |
+
self.runtime_library_dirs = runtime_library_dirs or []
|
| 120 |
+
self.extra_objects = extra_objects or []
|
| 121 |
+
self.extra_compile_args = extra_compile_args or []
|
| 122 |
+
self.extra_link_args = extra_link_args or []
|
| 123 |
+
self.export_symbols = export_symbols or []
|
| 124 |
+
self.swig_opts = swig_opts or []
|
| 125 |
+
self.depends = depends or []
|
| 126 |
+
self.language = language
|
| 127 |
+
self.optional = optional
|
| 128 |
+
|
| 129 |
+
# If there are unknown keyword options, warn about them
|
| 130 |
+
if len(kw) > 0:
|
| 131 |
+
options = [repr(option) for option in kw]
|
| 132 |
+
options = ', '.join(sorted(options))
|
| 133 |
+
msg = "Unknown Extension options: %s" % options
|
| 134 |
+
warnings.warn(msg)
|
| 135 |
+
|
| 136 |
+
def __repr__(self):
|
| 137 |
+
return '<{}.{}({!r}) at {:#x}>'.format(
|
| 138 |
+
self.__class__.__module__,
|
| 139 |
+
self.__class__.__qualname__,
|
| 140 |
+
self.name,
|
| 141 |
+
id(self),
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def read_setup_file(filename): # noqa: C901
|
| 146 |
+
"""Reads a Setup file and returns Extension instances."""
|
| 147 |
+
from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx
|
| 148 |
+
|
| 149 |
+
from distutils.text_file import TextFile
|
| 150 |
+
from distutils.util import split_quoted
|
| 151 |
+
|
| 152 |
+
# First pass over the file to gather "VAR = VALUE" assignments.
|
| 153 |
+
vars = parse_makefile(filename)
|
| 154 |
+
|
| 155 |
+
# Second pass to gobble up the real content: lines of the form
|
| 156 |
+
# <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
|
| 157 |
+
file = TextFile(
|
| 158 |
+
filename,
|
| 159 |
+
strip_comments=1,
|
| 160 |
+
skip_blanks=1,
|
| 161 |
+
join_lines=1,
|
| 162 |
+
lstrip_ws=1,
|
| 163 |
+
rstrip_ws=1,
|
| 164 |
+
)
|
| 165 |
+
try:
|
| 166 |
+
extensions = []
|
| 167 |
+
|
| 168 |
+
while True:
|
| 169 |
+
line = file.readline()
|
| 170 |
+
if line is None: # eof
|
| 171 |
+
break
|
| 172 |
+
if _variable_rx.match(line): # VAR=VALUE, handled in first pass
|
| 173 |
+
continue
|
| 174 |
+
|
| 175 |
+
if line[0] == line[-1] == "*":
|
| 176 |
+
file.warn("'%s' lines not handled yet" % line)
|
| 177 |
+
continue
|
| 178 |
+
|
| 179 |
+
line = expand_makefile_vars(line, vars)
|
| 180 |
+
words = split_quoted(line)
|
| 181 |
+
|
| 182 |
+
# NB. this parses a slightly different syntax than the old
|
| 183 |
+
# makesetup script: here, there must be exactly one extension per
|
| 184 |
+
# line, and it must be the first word of the line. I have no idea
|
| 185 |
+
# why the old syntax supported multiple extensions per line, as
|
| 186 |
+
# they all wind up being the same.
|
| 187 |
+
|
| 188 |
+
module = words[0]
|
| 189 |
+
ext = Extension(module, [])
|
| 190 |
+
append_next_word = None
|
| 191 |
+
|
| 192 |
+
for word in words[1:]:
|
| 193 |
+
if append_next_word is not None:
|
| 194 |
+
append_next_word.append(word)
|
| 195 |
+
append_next_word = None
|
| 196 |
+
continue
|
| 197 |
+
|
| 198 |
+
suffix = os.path.splitext(word)[1]
|
| 199 |
+
switch = word[0:2]
|
| 200 |
+
value = word[2:]
|
| 201 |
+
|
| 202 |
+
if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
|
| 203 |
+
# hmm, should we do something about C vs. C++ sources?
|
| 204 |
+
# or leave it up to the CCompiler implementation to
|
| 205 |
+
# worry about?
|
| 206 |
+
ext.sources.append(word)
|
| 207 |
+
elif switch == "-I":
|
| 208 |
+
ext.include_dirs.append(value)
|
| 209 |
+
elif switch == "-D":
|
| 210 |
+
equals = value.find("=")
|
| 211 |
+
if equals == -1: # bare "-DFOO" -- no value
|
| 212 |
+
ext.define_macros.append((value, None))
|
| 213 |
+
else: # "-DFOO=blah"
|
| 214 |
+
ext.define_macros.append((value[0:equals], value[equals + 2 :]))
|
| 215 |
+
elif switch == "-U":
|
| 216 |
+
ext.undef_macros.append(value)
|
| 217 |
+
elif switch == "-C": # only here 'cause makesetup has it!
|
| 218 |
+
ext.extra_compile_args.append(word)
|
| 219 |
+
elif switch == "-l":
|
| 220 |
+
ext.libraries.append(value)
|
| 221 |
+
elif switch == "-L":
|
| 222 |
+
ext.library_dirs.append(value)
|
| 223 |
+
elif switch == "-R":
|
| 224 |
+
ext.runtime_library_dirs.append(value)
|
| 225 |
+
elif word == "-rpath":
|
| 226 |
+
append_next_word = ext.runtime_library_dirs
|
| 227 |
+
elif word == "-Xlinker":
|
| 228 |
+
append_next_word = ext.extra_link_args
|
| 229 |
+
elif word == "-Xcompiler":
|
| 230 |
+
append_next_word = ext.extra_compile_args
|
| 231 |
+
elif switch == "-u":
|
| 232 |
+
ext.extra_link_args.append(word)
|
| 233 |
+
if not value:
|
| 234 |
+
append_next_word = ext.extra_link_args
|
| 235 |
+
elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
|
| 236 |
+
# NB. a really faithful emulation of makesetup would
|
| 237 |
+
# append a .o file to extra_objects only if it
|
| 238 |
+
# had a slash in it; otherwise, it would s/.o/.c/
|
| 239 |
+
# and append it to sources. Hmmmm.
|
| 240 |
+
ext.extra_objects.append(word)
|
| 241 |
+
else:
|
| 242 |
+
file.warn("unrecognized argument '%s'" % word)
|
| 243 |
+
|
| 244 |
+
extensions.append(ext)
|
| 245 |
+
finally:
|
| 246 |
+
file.close()
|
| 247 |
+
|
| 248 |
+
return extensions
|
.venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py
ADDED
|
@@ -0,0 +1,470 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.fancy_getopt
|
| 2 |
+
|
| 3 |
+
Wrapper around the standard getopt module that provides the following
|
| 4 |
+
additional features:
|
| 5 |
+
* short and long options are tied together
|
| 6 |
+
* options have help strings, so fancy_getopt could potentially
|
| 7 |
+
create a complete usage summary
|
| 8 |
+
* options set attributes of a passed-in object
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
import string
|
| 13 |
+
import re
|
| 14 |
+
import getopt
|
| 15 |
+
from .errors import DistutilsGetoptError, DistutilsArgError
|
| 16 |
+
|
| 17 |
+
# Much like command_re in distutils.core, this is close to but not quite
|
| 18 |
+
# the same as a Python NAME -- except, in the spirit of most GNU
|
| 19 |
+
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
| 20 |
+
# The similarities to NAME are again not a coincidence...
|
| 21 |
+
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
| 22 |
+
longopt_re = re.compile(r'^%s$' % longopt_pat)
|
| 23 |
+
|
| 24 |
+
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
| 25 |
+
neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat))
|
| 26 |
+
|
| 27 |
+
# This is used to translate long options to legitimate Python identifiers
|
| 28 |
+
# (for use as attributes of some object).
|
| 29 |
+
longopt_xlate = str.maketrans('-', '_')
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class FancyGetopt:
|
| 33 |
+
"""Wrapper around the standard 'getopt()' module that provides some
|
| 34 |
+
handy extra functionality:
|
| 35 |
+
* short and long options are tied together
|
| 36 |
+
* options have help strings, and help text can be assembled
|
| 37 |
+
from them
|
| 38 |
+
* options set attributes of a passed-in object
|
| 39 |
+
* boolean options can have "negative aliases" -- eg. if
|
| 40 |
+
--quiet is the "negative alias" of --verbose, then "--quiet"
|
| 41 |
+
on the command line sets 'verbose' to false
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, option_table=None):
|
| 45 |
+
# The option table is (currently) a list of tuples. The
|
| 46 |
+
# tuples may have 3 or four values:
|
| 47 |
+
# (long_option, short_option, help_string [, repeatable])
|
| 48 |
+
# if an option takes an argument, its long_option should have '='
|
| 49 |
+
# appended; short_option should just be a single character, no ':'
|
| 50 |
+
# in any case. If a long_option doesn't have a corresponding
|
| 51 |
+
# short_option, short_option should be None. All option tuples
|
| 52 |
+
# must have long options.
|
| 53 |
+
self.option_table = option_table
|
| 54 |
+
|
| 55 |
+
# 'option_index' maps long option names to entries in the option
|
| 56 |
+
# table (ie. those 3-tuples).
|
| 57 |
+
self.option_index = {}
|
| 58 |
+
if self.option_table:
|
| 59 |
+
self._build_index()
|
| 60 |
+
|
| 61 |
+
# 'alias' records (duh) alias options; {'foo': 'bar'} means
|
| 62 |
+
# --foo is an alias for --bar
|
| 63 |
+
self.alias = {}
|
| 64 |
+
|
| 65 |
+
# 'negative_alias' keeps track of options that are the boolean
|
| 66 |
+
# opposite of some other option
|
| 67 |
+
self.negative_alias = {}
|
| 68 |
+
|
| 69 |
+
# These keep track of the information in the option table. We
|
| 70 |
+
# don't actually populate these structures until we're ready to
|
| 71 |
+
# parse the command-line, since the 'option_table' passed in here
|
| 72 |
+
# isn't necessarily the final word.
|
| 73 |
+
self.short_opts = []
|
| 74 |
+
self.long_opts = []
|
| 75 |
+
self.short2long = {}
|
| 76 |
+
self.attr_name = {}
|
| 77 |
+
self.takes_arg = {}
|
| 78 |
+
|
| 79 |
+
# And 'option_order' is filled up in 'getopt()'; it records the
|
| 80 |
+
# original order of options (and their values) on the command-line,
|
| 81 |
+
# but expands short options, converts aliases, etc.
|
| 82 |
+
self.option_order = []
|
| 83 |
+
|
| 84 |
+
def _build_index(self):
|
| 85 |
+
self.option_index.clear()
|
| 86 |
+
for option in self.option_table:
|
| 87 |
+
self.option_index[option[0]] = option
|
| 88 |
+
|
| 89 |
+
def set_option_table(self, option_table):
|
| 90 |
+
self.option_table = option_table
|
| 91 |
+
self._build_index()
|
| 92 |
+
|
| 93 |
+
def add_option(self, long_option, short_option=None, help_string=None):
|
| 94 |
+
if long_option in self.option_index:
|
| 95 |
+
raise DistutilsGetoptError(
|
| 96 |
+
"option conflict: already an option '%s'" % long_option
|
| 97 |
+
)
|
| 98 |
+
else:
|
| 99 |
+
option = (long_option, short_option, help_string)
|
| 100 |
+
self.option_table.append(option)
|
| 101 |
+
self.option_index[long_option] = option
|
| 102 |
+
|
| 103 |
+
def has_option(self, long_option):
|
| 104 |
+
"""Return true if the option table for this parser has an
|
| 105 |
+
option with long name 'long_option'."""
|
| 106 |
+
return long_option in self.option_index
|
| 107 |
+
|
| 108 |
+
def get_attr_name(self, long_option):
|
| 109 |
+
"""Translate long option name 'long_option' to the form it
|
| 110 |
+
has as an attribute of some object: ie., translate hyphens
|
| 111 |
+
to underscores."""
|
| 112 |
+
return long_option.translate(longopt_xlate)
|
| 113 |
+
|
| 114 |
+
def _check_alias_dict(self, aliases, what):
|
| 115 |
+
assert isinstance(aliases, dict)
|
| 116 |
+
for alias, opt in aliases.items():
|
| 117 |
+
if alias not in self.option_index:
|
| 118 |
+
raise DistutilsGetoptError(
|
| 119 |
+
("invalid %s '%s': " "option '%s' not defined")
|
| 120 |
+
% (what, alias, alias)
|
| 121 |
+
)
|
| 122 |
+
if opt not in self.option_index:
|
| 123 |
+
raise DistutilsGetoptError(
|
| 124 |
+
("invalid %s '%s': " "aliased option '%s' not defined")
|
| 125 |
+
% (what, alias, opt)
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
def set_aliases(self, alias):
|
| 129 |
+
"""Set the aliases for this option parser."""
|
| 130 |
+
self._check_alias_dict(alias, "alias")
|
| 131 |
+
self.alias = alias
|
| 132 |
+
|
| 133 |
+
def set_negative_aliases(self, negative_alias):
|
| 134 |
+
"""Set the negative aliases for this option parser.
|
| 135 |
+
'negative_alias' should be a dictionary mapping option names to
|
| 136 |
+
option names, both the key and value must already be defined
|
| 137 |
+
in the option table."""
|
| 138 |
+
self._check_alias_dict(negative_alias, "negative alias")
|
| 139 |
+
self.negative_alias = negative_alias
|
| 140 |
+
|
| 141 |
+
def _grok_option_table(self): # noqa: C901
|
| 142 |
+
"""Populate the various data structures that keep tabs on the
|
| 143 |
+
option table. Called by 'getopt()' before it can do anything
|
| 144 |
+
worthwhile.
|
| 145 |
+
"""
|
| 146 |
+
self.long_opts = []
|
| 147 |
+
self.short_opts = []
|
| 148 |
+
self.short2long.clear()
|
| 149 |
+
self.repeat = {}
|
| 150 |
+
|
| 151 |
+
for option in self.option_table:
|
| 152 |
+
if len(option) == 3:
|
| 153 |
+
long, short, help = option
|
| 154 |
+
repeat = 0
|
| 155 |
+
elif len(option) == 4:
|
| 156 |
+
long, short, help, repeat = option
|
| 157 |
+
else:
|
| 158 |
+
# the option table is part of the code, so simply
|
| 159 |
+
# assert that it is correct
|
| 160 |
+
raise ValueError("invalid option tuple: {!r}".format(option))
|
| 161 |
+
|
| 162 |
+
# Type- and value-check the option names
|
| 163 |
+
if not isinstance(long, str) or len(long) < 2:
|
| 164 |
+
raise DistutilsGetoptError(
|
| 165 |
+
("invalid long option '%s': " "must be a string of length >= 2")
|
| 166 |
+
% long
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
|
| 170 |
+
raise DistutilsGetoptError(
|
| 171 |
+
"invalid short option '%s': "
|
| 172 |
+
"must a single character or None" % short
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
self.repeat[long] = repeat
|
| 176 |
+
self.long_opts.append(long)
|
| 177 |
+
|
| 178 |
+
if long[-1] == '=': # option takes an argument?
|
| 179 |
+
if short:
|
| 180 |
+
short = short + ':'
|
| 181 |
+
long = long[0:-1]
|
| 182 |
+
self.takes_arg[long] = 1
|
| 183 |
+
else:
|
| 184 |
+
# Is option is a "negative alias" for some other option (eg.
|
| 185 |
+
# "quiet" == "!verbose")?
|
| 186 |
+
alias_to = self.negative_alias.get(long)
|
| 187 |
+
if alias_to is not None:
|
| 188 |
+
if self.takes_arg[alias_to]:
|
| 189 |
+
raise DistutilsGetoptError(
|
| 190 |
+
"invalid negative alias '%s': "
|
| 191 |
+
"aliased option '%s' takes a value" % (long, alias_to)
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
self.long_opts[-1] = long # XXX redundant?!
|
| 195 |
+
self.takes_arg[long] = 0
|
| 196 |
+
|
| 197 |
+
# If this is an alias option, make sure its "takes arg" flag is
|
| 198 |
+
# the same as the option it's aliased to.
|
| 199 |
+
alias_to = self.alias.get(long)
|
| 200 |
+
if alias_to is not None:
|
| 201 |
+
if self.takes_arg[long] != self.takes_arg[alias_to]:
|
| 202 |
+
raise DistutilsGetoptError(
|
| 203 |
+
"invalid alias '%s': inconsistent with "
|
| 204 |
+
"aliased option '%s' (one of them takes a value, "
|
| 205 |
+
"the other doesn't" % (long, alias_to)
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
# Now enforce some bondage on the long option name, so we can
|
| 209 |
+
# later translate it to an attribute name on some object. Have
|
| 210 |
+
# to do this a bit late to make sure we've removed any trailing
|
| 211 |
+
# '='.
|
| 212 |
+
if not longopt_re.match(long):
|
| 213 |
+
raise DistutilsGetoptError(
|
| 214 |
+
"invalid long option name '%s' "
|
| 215 |
+
"(must be letters, numbers, hyphens only" % long
|
| 216 |
+
)
|
| 217 |
+
|
| 218 |
+
self.attr_name[long] = self.get_attr_name(long)
|
| 219 |
+
if short:
|
| 220 |
+
self.short_opts.append(short)
|
| 221 |
+
self.short2long[short[0]] = long
|
| 222 |
+
|
| 223 |
+
def getopt(self, args=None, object=None): # noqa: C901
|
| 224 |
+
"""Parse command-line options in args. Store as attributes on object.
|
| 225 |
+
|
| 226 |
+
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
|
| 227 |
+
'object' is None or not supplied, creates a new OptionDummy
|
| 228 |
+
object, stores option values there, and returns a tuple (args,
|
| 229 |
+
object). If 'object' is supplied, it is modified in place and
|
| 230 |
+
'getopt()' just returns 'args'; in both cases, the returned
|
| 231 |
+
'args' is a modified copy of the passed-in 'args' list, which
|
| 232 |
+
is left untouched.
|
| 233 |
+
"""
|
| 234 |
+
if args is None:
|
| 235 |
+
args = sys.argv[1:]
|
| 236 |
+
if object is None:
|
| 237 |
+
object = OptionDummy()
|
| 238 |
+
created_object = True
|
| 239 |
+
else:
|
| 240 |
+
created_object = False
|
| 241 |
+
|
| 242 |
+
self._grok_option_table()
|
| 243 |
+
|
| 244 |
+
short_opts = ' '.join(self.short_opts)
|
| 245 |
+
try:
|
| 246 |
+
opts, args = getopt.getopt(args, short_opts, self.long_opts)
|
| 247 |
+
except getopt.error as msg:
|
| 248 |
+
raise DistutilsArgError(msg)
|
| 249 |
+
|
| 250 |
+
for opt, val in opts:
|
| 251 |
+
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
| 252 |
+
opt = self.short2long[opt[1]]
|
| 253 |
+
else:
|
| 254 |
+
assert len(opt) > 2 and opt[:2] == '--'
|
| 255 |
+
opt = opt[2:]
|
| 256 |
+
|
| 257 |
+
alias = self.alias.get(opt)
|
| 258 |
+
if alias:
|
| 259 |
+
opt = alias
|
| 260 |
+
|
| 261 |
+
if not self.takes_arg[opt]: # boolean option?
|
| 262 |
+
assert val == '', "boolean option can't have value"
|
| 263 |
+
alias = self.negative_alias.get(opt)
|
| 264 |
+
if alias:
|
| 265 |
+
opt = alias
|
| 266 |
+
val = 0
|
| 267 |
+
else:
|
| 268 |
+
val = 1
|
| 269 |
+
|
| 270 |
+
attr = self.attr_name[opt]
|
| 271 |
+
# The only repeating option at the moment is 'verbose'.
|
| 272 |
+
# It has a negative option -q quiet, which should set verbose = 0.
|
| 273 |
+
if val and self.repeat.get(attr) is not None:
|
| 274 |
+
val = getattr(object, attr, 0) + 1
|
| 275 |
+
setattr(object, attr, val)
|
| 276 |
+
self.option_order.append((opt, val))
|
| 277 |
+
|
| 278 |
+
# for opts
|
| 279 |
+
if created_object:
|
| 280 |
+
return args, object
|
| 281 |
+
else:
|
| 282 |
+
return args
|
| 283 |
+
|
| 284 |
+
def get_option_order(self):
|
| 285 |
+
"""Returns the list of (option, value) tuples processed by the
|
| 286 |
+
previous run of 'getopt()'. Raises RuntimeError if
|
| 287 |
+
'getopt()' hasn't been called yet.
|
| 288 |
+
"""
|
| 289 |
+
if self.option_order is None:
|
| 290 |
+
raise RuntimeError("'getopt()' hasn't been called yet")
|
| 291 |
+
else:
|
| 292 |
+
return self.option_order
|
| 293 |
+
|
| 294 |
+
def generate_help(self, header=None): # noqa: C901
|
| 295 |
+
"""Generate help text (a list of strings, one per suggested line of
|
| 296 |
+
output) from the option table for this FancyGetopt object.
|
| 297 |
+
"""
|
| 298 |
+
# Blithely assume the option table is good: probably wouldn't call
|
| 299 |
+
# 'generate_help()' unless you've already called 'getopt()'.
|
| 300 |
+
|
| 301 |
+
# First pass: determine maximum length of long option names
|
| 302 |
+
max_opt = 0
|
| 303 |
+
for option in self.option_table:
|
| 304 |
+
long = option[0]
|
| 305 |
+
short = option[1]
|
| 306 |
+
ell = len(long)
|
| 307 |
+
if long[-1] == '=':
|
| 308 |
+
ell = ell - 1
|
| 309 |
+
if short is not None:
|
| 310 |
+
ell = ell + 5 # " (-x)" where short == 'x'
|
| 311 |
+
if ell > max_opt:
|
| 312 |
+
max_opt = ell
|
| 313 |
+
|
| 314 |
+
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
|
| 315 |
+
|
| 316 |
+
# Typical help block looks like this:
|
| 317 |
+
# --foo controls foonabulation
|
| 318 |
+
# Help block for longest option looks like this:
|
| 319 |
+
# --flimflam set the flim-flam level
|
| 320 |
+
# and with wrapped text:
|
| 321 |
+
# --flimflam set the flim-flam level (must be between
|
| 322 |
+
# 0 and 100, except on Tuesdays)
|
| 323 |
+
# Options with short names will have the short name shown (but
|
| 324 |
+
# it doesn't contribute to max_opt):
|
| 325 |
+
# --foo (-f) controls foonabulation
|
| 326 |
+
# If adding the short option would make the left column too wide,
|
| 327 |
+
# we push the explanation off to the next line
|
| 328 |
+
# --flimflam (-l)
|
| 329 |
+
# set the flim-flam level
|
| 330 |
+
# Important parameters:
|
| 331 |
+
# - 2 spaces before option block start lines
|
| 332 |
+
# - 2 dashes for each long option name
|
| 333 |
+
# - min. 2 spaces between option and explanation (gutter)
|
| 334 |
+
# - 5 characters (incl. space) for short option name
|
| 335 |
+
|
| 336 |
+
# Now generate lines of help text. (If 80 columns were good enough
|
| 337 |
+
# for Jesus, then 78 columns are good enough for me!)
|
| 338 |
+
line_width = 78
|
| 339 |
+
text_width = line_width - opt_width
|
| 340 |
+
big_indent = ' ' * opt_width
|
| 341 |
+
if header:
|
| 342 |
+
lines = [header]
|
| 343 |
+
else:
|
| 344 |
+
lines = ['Option summary:']
|
| 345 |
+
|
| 346 |
+
for option in self.option_table:
|
| 347 |
+
long, short, help = option[:3]
|
| 348 |
+
text = wrap_text(help, text_width)
|
| 349 |
+
if long[-1] == '=':
|
| 350 |
+
long = long[0:-1]
|
| 351 |
+
|
| 352 |
+
# Case 1: no short option at all (makes life easy)
|
| 353 |
+
if short is None:
|
| 354 |
+
if text:
|
| 355 |
+
lines.append(" --%-*s %s" % (max_opt, long, text[0]))
|
| 356 |
+
else:
|
| 357 |
+
lines.append(" --%-*s " % (max_opt, long))
|
| 358 |
+
|
| 359 |
+
# Case 2: we have a short option, so we have to include it
|
| 360 |
+
# just after the long option
|
| 361 |
+
else:
|
| 362 |
+
opt_names = "{} (-{})".format(long, short)
|
| 363 |
+
if text:
|
| 364 |
+
lines.append(" --%-*s %s" % (max_opt, opt_names, text[0]))
|
| 365 |
+
else:
|
| 366 |
+
lines.append(" --%-*s" % opt_names)
|
| 367 |
+
|
| 368 |
+
for ell in text[1:]:
|
| 369 |
+
lines.append(big_indent + ell)
|
| 370 |
+
return lines
|
| 371 |
+
|
| 372 |
+
def print_help(self, header=None, file=None):
|
| 373 |
+
if file is None:
|
| 374 |
+
file = sys.stdout
|
| 375 |
+
for line in self.generate_help(header):
|
| 376 |
+
file.write(line + "\n")
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
def fancy_getopt(options, negative_opt, object, args):
|
| 380 |
+
parser = FancyGetopt(options)
|
| 381 |
+
parser.set_negative_aliases(negative_opt)
|
| 382 |
+
return parser.getopt(args, object)
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace}
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
def wrap_text(text, width):
|
| 389 |
+
"""wrap_text(text : string, width : int) -> [string]
|
| 390 |
+
|
| 391 |
+
Split 'text' into multiple lines of no more than 'width' characters
|
| 392 |
+
each, and return the list of strings that results.
|
| 393 |
+
"""
|
| 394 |
+
if text is None:
|
| 395 |
+
return []
|
| 396 |
+
if len(text) <= width:
|
| 397 |
+
return [text]
|
| 398 |
+
|
| 399 |
+
text = text.expandtabs()
|
| 400 |
+
text = text.translate(WS_TRANS)
|
| 401 |
+
chunks = re.split(r'( +|-+)', text)
|
| 402 |
+
chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
|
| 403 |
+
lines = []
|
| 404 |
+
|
| 405 |
+
while chunks:
|
| 406 |
+
cur_line = [] # list of chunks (to-be-joined)
|
| 407 |
+
cur_len = 0 # length of current line
|
| 408 |
+
|
| 409 |
+
while chunks:
|
| 410 |
+
ell = len(chunks[0])
|
| 411 |
+
if cur_len + ell <= width: # can squeeze (at least) this chunk in
|
| 412 |
+
cur_line.append(chunks[0])
|
| 413 |
+
del chunks[0]
|
| 414 |
+
cur_len = cur_len + ell
|
| 415 |
+
else: # this line is full
|
| 416 |
+
# drop last chunk if all space
|
| 417 |
+
if cur_line and cur_line[-1][0] == ' ':
|
| 418 |
+
del cur_line[-1]
|
| 419 |
+
break
|
| 420 |
+
|
| 421 |
+
if chunks: # any chunks left to process?
|
| 422 |
+
# if the current line is still empty, then we had a single
|
| 423 |
+
# chunk that's too big too fit on a line -- so we break
|
| 424 |
+
# down and break it up at the line width
|
| 425 |
+
if cur_len == 0:
|
| 426 |
+
cur_line.append(chunks[0][0:width])
|
| 427 |
+
chunks[0] = chunks[0][width:]
|
| 428 |
+
|
| 429 |
+
# all-whitespace chunks at the end of a line can be discarded
|
| 430 |
+
# (and we know from the re.split above that if a chunk has
|
| 431 |
+
# *any* whitespace, it is *all* whitespace)
|
| 432 |
+
if chunks[0][0] == ' ':
|
| 433 |
+
del chunks[0]
|
| 434 |
+
|
| 435 |
+
# and store this line in the list-of-all-lines -- as a single
|
| 436 |
+
# string, of course!
|
| 437 |
+
lines.append(''.join(cur_line))
|
| 438 |
+
|
| 439 |
+
return lines
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
def translate_longopt(opt):
|
| 443 |
+
"""Convert a long option name to a valid Python identifier by
|
| 444 |
+
changing "-" to "_".
|
| 445 |
+
"""
|
| 446 |
+
return opt.translate(longopt_xlate)
|
| 447 |
+
|
| 448 |
+
|
| 449 |
+
class OptionDummy:
|
| 450 |
+
"""Dummy class just used as a place to hold command-line option
|
| 451 |
+
values as instance attributes."""
|
| 452 |
+
|
| 453 |
+
def __init__(self, options=[]):
|
| 454 |
+
"""Create a new OptionDummy instance. The attributes listed in
|
| 455 |
+
'options' will be initialized to None."""
|
| 456 |
+
for opt in options:
|
| 457 |
+
setattr(self, opt, None)
|
| 458 |
+
|
| 459 |
+
|
| 460 |
+
if __name__ == "__main__":
|
| 461 |
+
text = """\
|
| 462 |
+
Tra-la-la, supercalifragilisticexpialidocious.
|
| 463 |
+
How *do* you spell that odd word, anyways?
|
| 464 |
+
(Someone ask Mary -- she'll know [or she'll
|
| 465 |
+
say, "How should I know?"].)"""
|
| 466 |
+
|
| 467 |
+
for w in (10, 20, 30, 40):
|
| 468 |
+
print("width: %d" % w)
|
| 469 |
+
print("\n".join(wrap_text(text, w)))
|
| 470 |
+
print()
|
.venv/Lib/site-packages/setuptools/_distutils/file_util.py
ADDED
|
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.file_util
|
| 2 |
+
|
| 3 |
+
Utility functions for operating on single files.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from .errors import DistutilsFileError
|
| 8 |
+
from ._log import log
|
| 9 |
+
|
| 10 |
+
# for generating verbose output in 'copy_file()'
|
| 11 |
+
_copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'}
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901
|
| 15 |
+
"""Copy the file 'src' to 'dst'; both must be filenames. Any error
|
| 16 |
+
opening either file, reading from 'src', or writing to 'dst', raises
|
| 17 |
+
DistutilsFileError. Data is read/written in chunks of 'buffer_size'
|
| 18 |
+
bytes (default 16k). No attempt is made to handle anything apart from
|
| 19 |
+
regular files.
|
| 20 |
+
"""
|
| 21 |
+
# Stolen from shutil module in the standard library, but with
|
| 22 |
+
# custom error-handling added.
|
| 23 |
+
fsrc = None
|
| 24 |
+
fdst = None
|
| 25 |
+
try:
|
| 26 |
+
try:
|
| 27 |
+
fsrc = open(src, 'rb')
|
| 28 |
+
except OSError as e:
|
| 29 |
+
raise DistutilsFileError("could not open '{}': {}".format(src, e.strerror))
|
| 30 |
+
|
| 31 |
+
if os.path.exists(dst):
|
| 32 |
+
try:
|
| 33 |
+
os.unlink(dst)
|
| 34 |
+
except OSError as e:
|
| 35 |
+
raise DistutilsFileError(
|
| 36 |
+
"could not delete '{}': {}".format(dst, e.strerror)
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
try:
|
| 40 |
+
fdst = open(dst, 'wb')
|
| 41 |
+
except OSError as e:
|
| 42 |
+
raise DistutilsFileError(
|
| 43 |
+
"could not create '{}': {}".format(dst, e.strerror)
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
while True:
|
| 47 |
+
try:
|
| 48 |
+
buf = fsrc.read(buffer_size)
|
| 49 |
+
except OSError as e:
|
| 50 |
+
raise DistutilsFileError(
|
| 51 |
+
"could not read from '{}': {}".format(src, e.strerror)
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
if not buf:
|
| 55 |
+
break
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
fdst.write(buf)
|
| 59 |
+
except OSError as e:
|
| 60 |
+
raise DistutilsFileError(
|
| 61 |
+
"could not write to '{}': {}".format(dst, e.strerror)
|
| 62 |
+
)
|
| 63 |
+
finally:
|
| 64 |
+
if fdst:
|
| 65 |
+
fdst.close()
|
| 66 |
+
if fsrc:
|
| 67 |
+
fsrc.close()
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def copy_file( # noqa: C901
|
| 71 |
+
src,
|
| 72 |
+
dst,
|
| 73 |
+
preserve_mode=1,
|
| 74 |
+
preserve_times=1,
|
| 75 |
+
update=0,
|
| 76 |
+
link=None,
|
| 77 |
+
verbose=1,
|
| 78 |
+
dry_run=0,
|
| 79 |
+
):
|
| 80 |
+
"""Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
|
| 81 |
+
copied there with the same name; otherwise, it must be a filename. (If
|
| 82 |
+
the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
|
| 83 |
+
is true (the default), the file's mode (type and permission bits, or
|
| 84 |
+
whatever is analogous on the current platform) is copied. If
|
| 85 |
+
'preserve_times' is true (the default), the last-modified and
|
| 86 |
+
last-access times are copied as well. If 'update' is true, 'src' will
|
| 87 |
+
only be copied if 'dst' does not exist, or if 'dst' does exist but is
|
| 88 |
+
older than 'src'.
|
| 89 |
+
|
| 90 |
+
'link' allows you to make hard links (os.link) or symbolic links
|
| 91 |
+
(os.symlink) instead of copying: set it to "hard" or "sym"; if it is
|
| 92 |
+
None (the default), files are copied. Don't set 'link' on systems that
|
| 93 |
+
don't support it: 'copy_file()' doesn't check if hard or symbolic
|
| 94 |
+
linking is available. If hardlink fails, falls back to
|
| 95 |
+
_copy_file_contents().
|
| 96 |
+
|
| 97 |
+
Under Mac OS, uses the native file copy function in macostools; on
|
| 98 |
+
other systems, uses '_copy_file_contents()' to copy file contents.
|
| 99 |
+
|
| 100 |
+
Return a tuple (dest_name, copied): 'dest_name' is the actual name of
|
| 101 |
+
the output file, and 'copied' is true if the file was copied (or would
|
| 102 |
+
have been copied, if 'dry_run' true).
|
| 103 |
+
"""
|
| 104 |
+
# XXX if the destination file already exists, we clobber it if
|
| 105 |
+
# copying, but blow up if linking. Hmmm. And I don't know what
|
| 106 |
+
# macostools.copyfile() does. Should definitely be consistent, and
|
| 107 |
+
# should probably blow up if destination exists and we would be
|
| 108 |
+
# changing it (ie. it's not already a hard/soft link to src OR
|
| 109 |
+
# (not update) and (src newer than dst).
|
| 110 |
+
|
| 111 |
+
from distutils.dep_util import newer
|
| 112 |
+
from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
|
| 113 |
+
|
| 114 |
+
if not os.path.isfile(src):
|
| 115 |
+
raise DistutilsFileError(
|
| 116 |
+
"can't copy '%s': doesn't exist or not a regular file" % src
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
if os.path.isdir(dst):
|
| 120 |
+
dir = dst
|
| 121 |
+
dst = os.path.join(dst, os.path.basename(src))
|
| 122 |
+
else:
|
| 123 |
+
dir = os.path.dirname(dst)
|
| 124 |
+
|
| 125 |
+
if update and not newer(src, dst):
|
| 126 |
+
if verbose >= 1:
|
| 127 |
+
log.debug("not copying %s (output up-to-date)", src)
|
| 128 |
+
return (dst, 0)
|
| 129 |
+
|
| 130 |
+
try:
|
| 131 |
+
action = _copy_action[link]
|
| 132 |
+
except KeyError:
|
| 133 |
+
raise ValueError("invalid value '%s' for 'link' argument" % link)
|
| 134 |
+
|
| 135 |
+
if verbose >= 1:
|
| 136 |
+
if os.path.basename(dst) == os.path.basename(src):
|
| 137 |
+
log.info("%s %s -> %s", action, src, dir)
|
| 138 |
+
else:
|
| 139 |
+
log.info("%s %s -> %s", action, src, dst)
|
| 140 |
+
|
| 141 |
+
if dry_run:
|
| 142 |
+
return (dst, 1)
|
| 143 |
+
|
| 144 |
+
# If linking (hard or symbolic), use the appropriate system call
|
| 145 |
+
# (Unix only, of course, but that's the caller's responsibility)
|
| 146 |
+
elif link == 'hard':
|
| 147 |
+
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
| 148 |
+
try:
|
| 149 |
+
os.link(src, dst)
|
| 150 |
+
return (dst, 1)
|
| 151 |
+
except OSError:
|
| 152 |
+
# If hard linking fails, fall back on copying file
|
| 153 |
+
# (some special filesystems don't support hard linking
|
| 154 |
+
# even under Unix, see issue #8876).
|
| 155 |
+
pass
|
| 156 |
+
elif link == 'sym':
|
| 157 |
+
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
| 158 |
+
os.symlink(src, dst)
|
| 159 |
+
return (dst, 1)
|
| 160 |
+
|
| 161 |
+
# Otherwise (non-Mac, not linking), copy the file contents and
|
| 162 |
+
# (optionally) copy the times and mode.
|
| 163 |
+
_copy_file_contents(src, dst)
|
| 164 |
+
if preserve_mode or preserve_times:
|
| 165 |
+
st = os.stat(src)
|
| 166 |
+
|
| 167 |
+
# According to David Ascher <da@ski.org>, utime() should be done
|
| 168 |
+
# before chmod() (at least under NT).
|
| 169 |
+
if preserve_times:
|
| 170 |
+
os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
|
| 171 |
+
if preserve_mode:
|
| 172 |
+
os.chmod(dst, S_IMODE(st[ST_MODE]))
|
| 173 |
+
|
| 174 |
+
return (dst, 1)
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
# XXX I suspect this is Unix-specific -- need porting help!
|
| 178 |
+
def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901
|
| 179 |
+
"""Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
|
| 180 |
+
be moved into it with the same name; otherwise, 'src' is just renamed
|
| 181 |
+
to 'dst'. Return the new full name of the file.
|
| 182 |
+
|
| 183 |
+
Handles cross-device moves on Unix using 'copy_file()'. What about
|
| 184 |
+
other systems???
|
| 185 |
+
"""
|
| 186 |
+
from os.path import exists, isfile, isdir, basename, dirname
|
| 187 |
+
import errno
|
| 188 |
+
|
| 189 |
+
if verbose >= 1:
|
| 190 |
+
log.info("moving %s -> %s", src, dst)
|
| 191 |
+
|
| 192 |
+
if dry_run:
|
| 193 |
+
return dst
|
| 194 |
+
|
| 195 |
+
if not isfile(src):
|
| 196 |
+
raise DistutilsFileError("can't move '%s': not a regular file" % src)
|
| 197 |
+
|
| 198 |
+
if isdir(dst):
|
| 199 |
+
dst = os.path.join(dst, basename(src))
|
| 200 |
+
elif exists(dst):
|
| 201 |
+
raise DistutilsFileError(
|
| 202 |
+
"can't move '{}': destination '{}' already exists".format(src, dst)
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
if not isdir(dirname(dst)):
|
| 206 |
+
raise DistutilsFileError(
|
| 207 |
+
"can't move '{}': destination '{}' not a valid path".format(src, dst)
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
copy_it = False
|
| 211 |
+
try:
|
| 212 |
+
os.rename(src, dst)
|
| 213 |
+
except OSError as e:
|
| 214 |
+
(num, msg) = e.args
|
| 215 |
+
if num == errno.EXDEV:
|
| 216 |
+
copy_it = True
|
| 217 |
+
else:
|
| 218 |
+
raise DistutilsFileError(
|
| 219 |
+
"couldn't move '{}' to '{}': {}".format(src, dst, msg)
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
if copy_it:
|
| 223 |
+
copy_file(src, dst, verbose=verbose)
|
| 224 |
+
try:
|
| 225 |
+
os.unlink(src)
|
| 226 |
+
except OSError as e:
|
| 227 |
+
(num, msg) = e.args
|
| 228 |
+
try:
|
| 229 |
+
os.unlink(dst)
|
| 230 |
+
except OSError:
|
| 231 |
+
pass
|
| 232 |
+
raise DistutilsFileError(
|
| 233 |
+
"couldn't move '%s' to '%s' by copy/delete: "
|
| 234 |
+
"delete '%s' failed: %s" % (src, dst, src, msg)
|
| 235 |
+
)
|
| 236 |
+
return dst
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
def write_file(filename, contents):
|
| 240 |
+
"""Create a file with the specified name and write 'contents' (a
|
| 241 |
+
sequence of strings without line terminators) to it.
|
| 242 |
+
"""
|
| 243 |
+
f = open(filename, "w")
|
| 244 |
+
try:
|
| 245 |
+
for line in contents:
|
| 246 |
+
f.write(line + "\n")
|
| 247 |
+
finally:
|
| 248 |
+
f.close()
|
.venv/Lib/site-packages/setuptools/_distutils/filelist.py
ADDED
|
@@ -0,0 +1,371 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.filelist
|
| 2 |
+
|
| 3 |
+
Provides the FileList class, used for poking about the filesystem
|
| 4 |
+
and building lists of files.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
import re
|
| 9 |
+
import fnmatch
|
| 10 |
+
import functools
|
| 11 |
+
|
| 12 |
+
from .util import convert_path
|
| 13 |
+
from .errors import DistutilsTemplateError, DistutilsInternalError
|
| 14 |
+
from ._log import log
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class FileList:
|
| 18 |
+
"""A list of files built by on exploring the filesystem and filtered by
|
| 19 |
+
applying various patterns to what we find there.
|
| 20 |
+
|
| 21 |
+
Instance attributes:
|
| 22 |
+
dir
|
| 23 |
+
directory from which files will be taken -- only used if
|
| 24 |
+
'allfiles' not supplied to constructor
|
| 25 |
+
files
|
| 26 |
+
list of filenames currently being built/filtered/manipulated
|
| 27 |
+
allfiles
|
| 28 |
+
complete list of files under consideration (ie. without any
|
| 29 |
+
filtering applied)
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def __init__(self, warn=None, debug_print=None):
|
| 33 |
+
# ignore argument to FileList, but keep them for backwards
|
| 34 |
+
# compatibility
|
| 35 |
+
self.allfiles = None
|
| 36 |
+
self.files = []
|
| 37 |
+
|
| 38 |
+
def set_allfiles(self, allfiles):
|
| 39 |
+
self.allfiles = allfiles
|
| 40 |
+
|
| 41 |
+
def findall(self, dir=os.curdir):
|
| 42 |
+
self.allfiles = findall(dir)
|
| 43 |
+
|
| 44 |
+
def debug_print(self, msg):
|
| 45 |
+
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
| 46 |
+
DISTUTILS_DEBUG environment variable) flag is true.
|
| 47 |
+
"""
|
| 48 |
+
from distutils.debug import DEBUG
|
| 49 |
+
|
| 50 |
+
if DEBUG:
|
| 51 |
+
print(msg)
|
| 52 |
+
|
| 53 |
+
# Collection methods
|
| 54 |
+
|
| 55 |
+
def append(self, item):
|
| 56 |
+
self.files.append(item)
|
| 57 |
+
|
| 58 |
+
def extend(self, items):
|
| 59 |
+
self.files.extend(items)
|
| 60 |
+
|
| 61 |
+
def sort(self):
|
| 62 |
+
# Not a strict lexical sort!
|
| 63 |
+
sortable_files = sorted(map(os.path.split, self.files))
|
| 64 |
+
self.files = []
|
| 65 |
+
for sort_tuple in sortable_files:
|
| 66 |
+
self.files.append(os.path.join(*sort_tuple))
|
| 67 |
+
|
| 68 |
+
# Other miscellaneous utility methods
|
| 69 |
+
|
| 70 |
+
def remove_duplicates(self):
|
| 71 |
+
# Assumes list has been sorted!
|
| 72 |
+
for i in range(len(self.files) - 1, 0, -1):
|
| 73 |
+
if self.files[i] == self.files[i - 1]:
|
| 74 |
+
del self.files[i]
|
| 75 |
+
|
| 76 |
+
# "File template" methods
|
| 77 |
+
|
| 78 |
+
def _parse_template_line(self, line):
|
| 79 |
+
words = line.split()
|
| 80 |
+
action = words[0]
|
| 81 |
+
|
| 82 |
+
patterns = dir = dir_pattern = None
|
| 83 |
+
|
| 84 |
+
if action in ('include', 'exclude', 'global-include', 'global-exclude'):
|
| 85 |
+
if len(words) < 2:
|
| 86 |
+
raise DistutilsTemplateError(
|
| 87 |
+
"'%s' expects <pattern1> <pattern2> ..." % action
|
| 88 |
+
)
|
| 89 |
+
patterns = [convert_path(w) for w in words[1:]]
|
| 90 |
+
elif action in ('recursive-include', 'recursive-exclude'):
|
| 91 |
+
if len(words) < 3:
|
| 92 |
+
raise DistutilsTemplateError(
|
| 93 |
+
"'%s' expects <dir> <pattern1> <pattern2> ..." % action
|
| 94 |
+
)
|
| 95 |
+
dir = convert_path(words[1])
|
| 96 |
+
patterns = [convert_path(w) for w in words[2:]]
|
| 97 |
+
elif action in ('graft', 'prune'):
|
| 98 |
+
if len(words) != 2:
|
| 99 |
+
raise DistutilsTemplateError(
|
| 100 |
+
"'%s' expects a single <dir_pattern>" % action
|
| 101 |
+
)
|
| 102 |
+
dir_pattern = convert_path(words[1])
|
| 103 |
+
else:
|
| 104 |
+
raise DistutilsTemplateError("unknown action '%s'" % action)
|
| 105 |
+
|
| 106 |
+
return (action, patterns, dir, dir_pattern)
|
| 107 |
+
|
| 108 |
+
def process_template_line(self, line): # noqa: C901
|
| 109 |
+
# Parse the line: split it up, make sure the right number of words
|
| 110 |
+
# is there, and return the relevant words. 'action' is always
|
| 111 |
+
# defined: it's the first word of the line. Which of the other
|
| 112 |
+
# three are defined depends on the action; it'll be either
|
| 113 |
+
# patterns, (dir and patterns), or (dir_pattern).
|
| 114 |
+
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
| 115 |
+
|
| 116 |
+
# OK, now we know that the action is valid and we have the
|
| 117 |
+
# right number of words on the line for that action -- so we
|
| 118 |
+
# can proceed with minimal error-checking.
|
| 119 |
+
if action == 'include':
|
| 120 |
+
self.debug_print("include " + ' '.join(patterns))
|
| 121 |
+
for pattern in patterns:
|
| 122 |
+
if not self.include_pattern(pattern, anchor=1):
|
| 123 |
+
log.warning("warning: no files found matching '%s'", pattern)
|
| 124 |
+
|
| 125 |
+
elif action == 'exclude':
|
| 126 |
+
self.debug_print("exclude " + ' '.join(patterns))
|
| 127 |
+
for pattern in patterns:
|
| 128 |
+
if not self.exclude_pattern(pattern, anchor=1):
|
| 129 |
+
log.warning(
|
| 130 |
+
(
|
| 131 |
+
"warning: no previously-included files "
|
| 132 |
+
"found matching '%s'"
|
| 133 |
+
),
|
| 134 |
+
pattern,
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
elif action == 'global-include':
|
| 138 |
+
self.debug_print("global-include " + ' '.join(patterns))
|
| 139 |
+
for pattern in patterns:
|
| 140 |
+
if not self.include_pattern(pattern, anchor=0):
|
| 141 |
+
log.warning(
|
| 142 |
+
(
|
| 143 |
+
"warning: no files found matching '%s' "
|
| 144 |
+
"anywhere in distribution"
|
| 145 |
+
),
|
| 146 |
+
pattern,
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
elif action == 'global-exclude':
|
| 150 |
+
self.debug_print("global-exclude " + ' '.join(patterns))
|
| 151 |
+
for pattern in patterns:
|
| 152 |
+
if not self.exclude_pattern(pattern, anchor=0):
|
| 153 |
+
log.warning(
|
| 154 |
+
(
|
| 155 |
+
"warning: no previously-included files matching "
|
| 156 |
+
"'%s' found anywhere in distribution"
|
| 157 |
+
),
|
| 158 |
+
pattern,
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
elif action == 'recursive-include':
|
| 162 |
+
self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns)))
|
| 163 |
+
for pattern in patterns:
|
| 164 |
+
if not self.include_pattern(pattern, prefix=dir):
|
| 165 |
+
msg = (
|
| 166 |
+
"warning: no files found matching '%s' " "under directory '%s'"
|
| 167 |
+
)
|
| 168 |
+
log.warning(msg, pattern, dir)
|
| 169 |
+
|
| 170 |
+
elif action == 'recursive-exclude':
|
| 171 |
+
self.debug_print("recursive-exclude {} {}".format(dir, ' '.join(patterns)))
|
| 172 |
+
for pattern in patterns:
|
| 173 |
+
if not self.exclude_pattern(pattern, prefix=dir):
|
| 174 |
+
log.warning(
|
| 175 |
+
(
|
| 176 |
+
"warning: no previously-included files matching "
|
| 177 |
+
"'%s' found under directory '%s'"
|
| 178 |
+
),
|
| 179 |
+
pattern,
|
| 180 |
+
dir,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
elif action == 'graft':
|
| 184 |
+
self.debug_print("graft " + dir_pattern)
|
| 185 |
+
if not self.include_pattern(None, prefix=dir_pattern):
|
| 186 |
+
log.warning("warning: no directories found matching '%s'", dir_pattern)
|
| 187 |
+
|
| 188 |
+
elif action == 'prune':
|
| 189 |
+
self.debug_print("prune " + dir_pattern)
|
| 190 |
+
if not self.exclude_pattern(None, prefix=dir_pattern):
|
| 191 |
+
log.warning(
|
| 192 |
+
("no previously-included directories found " "matching '%s'"),
|
| 193 |
+
dir_pattern,
|
| 194 |
+
)
|
| 195 |
+
else:
|
| 196 |
+
raise DistutilsInternalError(
|
| 197 |
+
"this cannot happen: invalid action '%s'" % action
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
# Filtering/selection methods
|
| 201 |
+
|
| 202 |
+
def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
|
| 203 |
+
"""Select strings (presumably filenames) from 'self.files' that
|
| 204 |
+
match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
|
| 205 |
+
are not quite the same as implemented by the 'fnmatch' module: '*'
|
| 206 |
+
and '?' match non-special characters, where "special" is platform-
|
| 207 |
+
dependent: slash on Unix; colon, slash, and backslash on
|
| 208 |
+
DOS/Windows; and colon on Mac OS.
|
| 209 |
+
|
| 210 |
+
If 'anchor' is true (the default), then the pattern match is more
|
| 211 |
+
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
| 212 |
+
'anchor' is false, both of these will match.
|
| 213 |
+
|
| 214 |
+
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
| 215 |
+
(itself a pattern) and ending with 'pattern', with anything in between
|
| 216 |
+
them, will match. 'anchor' is ignored in this case.
|
| 217 |
+
|
| 218 |
+
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
| 219 |
+
'pattern' is assumed to be either a string containing a regex or a
|
| 220 |
+
regex object -- no translation is done, the regex is just compiled
|
| 221 |
+
and used as-is.
|
| 222 |
+
|
| 223 |
+
Selected strings will be added to self.files.
|
| 224 |
+
|
| 225 |
+
Return True if files are found, False otherwise.
|
| 226 |
+
"""
|
| 227 |
+
# XXX docstring lying about what the special chars are?
|
| 228 |
+
files_found = False
|
| 229 |
+
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
| 230 |
+
self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern)
|
| 231 |
+
|
| 232 |
+
# delayed loading of allfiles list
|
| 233 |
+
if self.allfiles is None:
|
| 234 |
+
self.findall()
|
| 235 |
+
|
| 236 |
+
for name in self.allfiles:
|
| 237 |
+
if pattern_re.search(name):
|
| 238 |
+
self.debug_print(" adding " + name)
|
| 239 |
+
self.files.append(name)
|
| 240 |
+
files_found = True
|
| 241 |
+
return files_found
|
| 242 |
+
|
| 243 |
+
def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
|
| 244 |
+
"""Remove strings (presumably filenames) from 'files' that match
|
| 245 |
+
'pattern'. Other parameters are the same as for
|
| 246 |
+
'include_pattern()', above.
|
| 247 |
+
The list 'self.files' is modified in place.
|
| 248 |
+
Return True if files are found, False otherwise.
|
| 249 |
+
"""
|
| 250 |
+
files_found = False
|
| 251 |
+
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
| 252 |
+
self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern)
|
| 253 |
+
for i in range(len(self.files) - 1, -1, -1):
|
| 254 |
+
if pattern_re.search(self.files[i]):
|
| 255 |
+
self.debug_print(" removing " + self.files[i])
|
| 256 |
+
del self.files[i]
|
| 257 |
+
files_found = True
|
| 258 |
+
return files_found
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
# Utility functions
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def _find_all_simple(path):
|
| 265 |
+
"""
|
| 266 |
+
Find all files under 'path'
|
| 267 |
+
"""
|
| 268 |
+
all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True))
|
| 269 |
+
results = (
|
| 270 |
+
os.path.join(base, file) for base, dirs, files in all_unique for file in files
|
| 271 |
+
)
|
| 272 |
+
return filter(os.path.isfile, results)
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
class _UniqueDirs(set):
|
| 276 |
+
"""
|
| 277 |
+
Exclude previously-seen dirs from walk results,
|
| 278 |
+
avoiding infinite recursion.
|
| 279 |
+
Ref https://bugs.python.org/issue44497.
|
| 280 |
+
"""
|
| 281 |
+
|
| 282 |
+
def __call__(self, walk_item):
|
| 283 |
+
"""
|
| 284 |
+
Given an item from an os.walk result, determine
|
| 285 |
+
if the item represents a unique dir for this instance
|
| 286 |
+
and if not, prevent further traversal.
|
| 287 |
+
"""
|
| 288 |
+
base, dirs, files = walk_item
|
| 289 |
+
stat = os.stat(base)
|
| 290 |
+
candidate = stat.st_dev, stat.st_ino
|
| 291 |
+
found = candidate in self
|
| 292 |
+
if found:
|
| 293 |
+
del dirs[:]
|
| 294 |
+
self.add(candidate)
|
| 295 |
+
return not found
|
| 296 |
+
|
| 297 |
+
@classmethod
|
| 298 |
+
def filter(cls, items):
|
| 299 |
+
return filter(cls(), items)
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
def findall(dir=os.curdir):
|
| 303 |
+
"""
|
| 304 |
+
Find all files under 'dir' and return the list of full filenames.
|
| 305 |
+
Unless dir is '.', return full filenames with dir prepended.
|
| 306 |
+
"""
|
| 307 |
+
files = _find_all_simple(dir)
|
| 308 |
+
if dir == os.curdir:
|
| 309 |
+
make_rel = functools.partial(os.path.relpath, start=dir)
|
| 310 |
+
files = map(make_rel, files)
|
| 311 |
+
return list(files)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def glob_to_re(pattern):
|
| 315 |
+
"""Translate a shell-like glob pattern to a regular expression; return
|
| 316 |
+
a string containing the regex. Differs from 'fnmatch.translate()' in
|
| 317 |
+
that '*' does not match "special characters" (which are
|
| 318 |
+
platform-specific).
|
| 319 |
+
"""
|
| 320 |
+
pattern_re = fnmatch.translate(pattern)
|
| 321 |
+
|
| 322 |
+
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
| 323 |
+
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
| 324 |
+
# and by extension they shouldn't match such "special characters" under
|
| 325 |
+
# any OS. So change all non-escaped dots in the RE to match any
|
| 326 |
+
# character except the special characters (currently: just os.sep).
|
| 327 |
+
sep = os.sep
|
| 328 |
+
if os.sep == '\\':
|
| 329 |
+
# we're using a regex to manipulate a regex, so we need
|
| 330 |
+
# to escape the backslash twice
|
| 331 |
+
sep = r'\\\\'
|
| 332 |
+
escaped = r'\1[^%s]' % sep
|
| 333 |
+
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
| 334 |
+
return pattern_re
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
|
| 338 |
+
"""Translate a shell-like wildcard pattern to a compiled regular
|
| 339 |
+
expression. Return the compiled regex. If 'is_regex' true,
|
| 340 |
+
then 'pattern' is directly compiled to a regex (if it's a string)
|
| 341 |
+
or just returned as-is (assumes it's a regex object).
|
| 342 |
+
"""
|
| 343 |
+
if is_regex:
|
| 344 |
+
if isinstance(pattern, str):
|
| 345 |
+
return re.compile(pattern)
|
| 346 |
+
else:
|
| 347 |
+
return pattern
|
| 348 |
+
|
| 349 |
+
# ditch start and end characters
|
| 350 |
+
start, _, end = glob_to_re('_').partition('_')
|
| 351 |
+
|
| 352 |
+
if pattern:
|
| 353 |
+
pattern_re = glob_to_re(pattern)
|
| 354 |
+
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
| 355 |
+
else:
|
| 356 |
+
pattern_re = ''
|
| 357 |
+
|
| 358 |
+
if prefix is not None:
|
| 359 |
+
prefix_re = glob_to_re(prefix)
|
| 360 |
+
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
| 361 |
+
prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)]
|
| 362 |
+
sep = os.sep
|
| 363 |
+
if os.sep == '\\':
|
| 364 |
+
sep = r'\\'
|
| 365 |
+
pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)]
|
| 366 |
+
pattern_re = r'{}\A{}{}.*{}{}'.format(start, prefix_re, sep, pattern_re, end)
|
| 367 |
+
else: # no prefix -- respect anchor flag
|
| 368 |
+
if anchor:
|
| 369 |
+
pattern_re = r'{}\A{}'.format(start, pattern_re[len(start) :])
|
| 370 |
+
|
| 371 |
+
return re.compile(pattern_re)
|
.venv/Lib/site-packages/setuptools/_distutils/log.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
A simple log mechanism styled after PEP 282.
|
| 3 |
+
|
| 4 |
+
Retained for compatibility and should not be used.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import logging
|
| 8 |
+
import warnings
|
| 9 |
+
|
| 10 |
+
from ._log import log as _global_log
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
DEBUG = logging.DEBUG
|
| 14 |
+
INFO = logging.INFO
|
| 15 |
+
WARN = logging.WARN
|
| 16 |
+
ERROR = logging.ERROR
|
| 17 |
+
FATAL = logging.FATAL
|
| 18 |
+
|
| 19 |
+
log = _global_log.log
|
| 20 |
+
debug = _global_log.debug
|
| 21 |
+
info = _global_log.info
|
| 22 |
+
warn = _global_log.warning
|
| 23 |
+
error = _global_log.error
|
| 24 |
+
fatal = _global_log.fatal
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def set_threshold(level):
|
| 28 |
+
orig = _global_log.level
|
| 29 |
+
_global_log.setLevel(level)
|
| 30 |
+
return orig
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def set_verbosity(v):
|
| 34 |
+
if v <= 0:
|
| 35 |
+
set_threshold(logging.WARN)
|
| 36 |
+
elif v == 1:
|
| 37 |
+
set_threshold(logging.INFO)
|
| 38 |
+
elif v >= 2:
|
| 39 |
+
set_threshold(logging.DEBUG)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class Log(logging.Logger):
|
| 43 |
+
"""distutils.log.Log is deprecated, please use an alternative from `logging`."""
|
| 44 |
+
|
| 45 |
+
def __init__(self, threshold=WARN):
|
| 46 |
+
warnings.warn(Log.__doc__) # avoid DeprecationWarning to ensure warn is shown
|
| 47 |
+
super().__init__(__name__, level=threshold)
|
| 48 |
+
|
| 49 |
+
@property
|
| 50 |
+
def threshold(self):
|
| 51 |
+
return self.level
|
| 52 |
+
|
| 53 |
+
@threshold.setter
|
| 54 |
+
def threshold(self, level):
|
| 55 |
+
self.setLevel(level)
|
| 56 |
+
|
| 57 |
+
warn = logging.Logger.warning
|
.venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py
ADDED
|
@@ -0,0 +1,829 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.msvc9compiler
|
| 2 |
+
|
| 3 |
+
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for the Microsoft Visual Studio 2008.
|
| 5 |
+
|
| 6 |
+
The module is compatible with VS 2005 and VS 2008. You can find legacy support
|
| 7 |
+
for older versions of VS in distutils.msvccompiler.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Written by Perry Stoll
|
| 11 |
+
# hacked by Robin Becker and Thomas Heller to do a better job of
|
| 12 |
+
# finding DevStudio (through the registry)
|
| 13 |
+
# ported to VS2005 and VS 2008 by Christian Heimes
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
import subprocess
|
| 17 |
+
import sys
|
| 18 |
+
import re
|
| 19 |
+
import warnings
|
| 20 |
+
|
| 21 |
+
from .errors import (
|
| 22 |
+
DistutilsExecError,
|
| 23 |
+
DistutilsPlatformError,
|
| 24 |
+
CompileError,
|
| 25 |
+
LibError,
|
| 26 |
+
LinkError,
|
| 27 |
+
)
|
| 28 |
+
from .ccompiler import CCompiler, gen_lib_options
|
| 29 |
+
from ._log import log
|
| 30 |
+
from .util import get_platform
|
| 31 |
+
|
| 32 |
+
import winreg
|
| 33 |
+
|
| 34 |
+
warnings.warn(
|
| 35 |
+
"msvc9compiler is deprecated and slated to be removed "
|
| 36 |
+
"in the future. Please discontinue use or file an issue "
|
| 37 |
+
"with pypa/distutils describing your use case.",
|
| 38 |
+
DeprecationWarning,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
RegOpenKeyEx = winreg.OpenKeyEx
|
| 42 |
+
RegEnumKey = winreg.EnumKey
|
| 43 |
+
RegEnumValue = winreg.EnumValue
|
| 44 |
+
RegError = winreg.error
|
| 45 |
+
|
| 46 |
+
HKEYS = (
|
| 47 |
+
winreg.HKEY_USERS,
|
| 48 |
+
winreg.HKEY_CURRENT_USER,
|
| 49 |
+
winreg.HKEY_LOCAL_MACHINE,
|
| 50 |
+
winreg.HKEY_CLASSES_ROOT,
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
NATIVE_WIN64 = sys.platform == 'win32' and sys.maxsize > 2**32
|
| 54 |
+
if NATIVE_WIN64:
|
| 55 |
+
# Visual C++ is a 32-bit application, so we need to look in
|
| 56 |
+
# the corresponding registry branch, if we're running a
|
| 57 |
+
# 64-bit Python on Win64
|
| 58 |
+
VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
|
| 59 |
+
WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
|
| 60 |
+
NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
|
| 61 |
+
else:
|
| 62 |
+
VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
|
| 63 |
+
WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
|
| 64 |
+
NET_BASE = r"Software\Microsoft\.NETFramework"
|
| 65 |
+
|
| 66 |
+
# A map keyed by get_platform() return values to values accepted by
|
| 67 |
+
# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
|
| 68 |
+
# the param to cross-compile on x86 targeting amd64.)
|
| 69 |
+
PLAT_TO_VCVARS = {
|
| 70 |
+
'win32': 'x86',
|
| 71 |
+
'win-amd64': 'amd64',
|
| 72 |
+
}
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class Reg:
|
| 76 |
+
"""Helper class to read values from the registry"""
|
| 77 |
+
|
| 78 |
+
def get_value(cls, path, key):
|
| 79 |
+
for base in HKEYS:
|
| 80 |
+
d = cls.read_values(base, path)
|
| 81 |
+
if d and key in d:
|
| 82 |
+
return d[key]
|
| 83 |
+
raise KeyError(key)
|
| 84 |
+
|
| 85 |
+
get_value = classmethod(get_value)
|
| 86 |
+
|
| 87 |
+
def read_keys(cls, base, key):
|
| 88 |
+
"""Return list of registry keys."""
|
| 89 |
+
try:
|
| 90 |
+
handle = RegOpenKeyEx(base, key)
|
| 91 |
+
except RegError:
|
| 92 |
+
return None
|
| 93 |
+
L = []
|
| 94 |
+
i = 0
|
| 95 |
+
while True:
|
| 96 |
+
try:
|
| 97 |
+
k = RegEnumKey(handle, i)
|
| 98 |
+
except RegError:
|
| 99 |
+
break
|
| 100 |
+
L.append(k)
|
| 101 |
+
i += 1
|
| 102 |
+
return L
|
| 103 |
+
|
| 104 |
+
read_keys = classmethod(read_keys)
|
| 105 |
+
|
| 106 |
+
def read_values(cls, base, key):
|
| 107 |
+
"""Return dict of registry keys and values.
|
| 108 |
+
|
| 109 |
+
All names are converted to lowercase.
|
| 110 |
+
"""
|
| 111 |
+
try:
|
| 112 |
+
handle = RegOpenKeyEx(base, key)
|
| 113 |
+
except RegError:
|
| 114 |
+
return None
|
| 115 |
+
d = {}
|
| 116 |
+
i = 0
|
| 117 |
+
while True:
|
| 118 |
+
try:
|
| 119 |
+
name, value, type = RegEnumValue(handle, i)
|
| 120 |
+
except RegError:
|
| 121 |
+
break
|
| 122 |
+
name = name.lower()
|
| 123 |
+
d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
|
| 124 |
+
i += 1
|
| 125 |
+
return d
|
| 126 |
+
|
| 127 |
+
read_values = classmethod(read_values)
|
| 128 |
+
|
| 129 |
+
def convert_mbcs(s):
|
| 130 |
+
dec = getattr(s, "decode", None)
|
| 131 |
+
if dec is not None:
|
| 132 |
+
try:
|
| 133 |
+
s = dec("mbcs")
|
| 134 |
+
except UnicodeError:
|
| 135 |
+
pass
|
| 136 |
+
return s
|
| 137 |
+
|
| 138 |
+
convert_mbcs = staticmethod(convert_mbcs)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
class MacroExpander:
|
| 142 |
+
def __init__(self, version):
|
| 143 |
+
self.macros = {}
|
| 144 |
+
self.vsbase = VS_BASE % version
|
| 145 |
+
self.load_macros(version)
|
| 146 |
+
|
| 147 |
+
def set_macro(self, macro, path, key):
|
| 148 |
+
self.macros["$(%s)" % macro] = Reg.get_value(path, key)
|
| 149 |
+
|
| 150 |
+
def load_macros(self, version):
|
| 151 |
+
self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
|
| 152 |
+
self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
|
| 153 |
+
self.set_macro("FrameworkDir", NET_BASE, "installroot")
|
| 154 |
+
try:
|
| 155 |
+
if version >= 8.0:
|
| 156 |
+
self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0")
|
| 157 |
+
else:
|
| 158 |
+
raise KeyError("sdkinstallrootv2.0")
|
| 159 |
+
except KeyError:
|
| 160 |
+
raise DistutilsPlatformError(
|
| 161 |
+
"""Python was built with Visual Studio 2008;
|
| 162 |
+
extensions must be built with a compiler than can generate compatible binaries.
|
| 163 |
+
Visual Studio 2008 was not found on this system. If you have Cygwin installed,
|
| 164 |
+
you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
if version >= 9.0:
|
| 168 |
+
self.set_macro("FrameworkVersion", self.vsbase, "clr version")
|
| 169 |
+
self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
|
| 170 |
+
else:
|
| 171 |
+
p = r"Software\Microsoft\NET Framework Setup\Product"
|
| 172 |
+
for base in HKEYS:
|
| 173 |
+
try:
|
| 174 |
+
h = RegOpenKeyEx(base, p)
|
| 175 |
+
except RegError:
|
| 176 |
+
continue
|
| 177 |
+
key = RegEnumKey(h, 0)
|
| 178 |
+
d = Reg.get_value(base, r"{}\{}".format(p, key))
|
| 179 |
+
self.macros["$(FrameworkVersion)"] = d["version"]
|
| 180 |
+
|
| 181 |
+
def sub(self, s):
|
| 182 |
+
for k, v in self.macros.items():
|
| 183 |
+
s = s.replace(k, v)
|
| 184 |
+
return s
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def get_build_version():
|
| 188 |
+
"""Return the version of MSVC that was used to build Python.
|
| 189 |
+
|
| 190 |
+
For Python 2.3 and up, the version number is included in
|
| 191 |
+
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
| 192 |
+
"""
|
| 193 |
+
prefix = "MSC v."
|
| 194 |
+
i = sys.version.find(prefix)
|
| 195 |
+
if i == -1:
|
| 196 |
+
return 6
|
| 197 |
+
i = i + len(prefix)
|
| 198 |
+
s, rest = sys.version[i:].split(" ", 1)
|
| 199 |
+
majorVersion = int(s[:-2]) - 6
|
| 200 |
+
if majorVersion >= 13:
|
| 201 |
+
# v13 was skipped and should be v14
|
| 202 |
+
majorVersion += 1
|
| 203 |
+
minorVersion = int(s[2:3]) / 10.0
|
| 204 |
+
# I don't think paths are affected by minor version in version 6
|
| 205 |
+
if majorVersion == 6:
|
| 206 |
+
minorVersion = 0
|
| 207 |
+
if majorVersion >= 6:
|
| 208 |
+
return majorVersion + minorVersion
|
| 209 |
+
# else we don't know what version of the compiler this is
|
| 210 |
+
return None
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def normalize_and_reduce_paths(paths):
|
| 214 |
+
"""Return a list of normalized paths with duplicates removed.
|
| 215 |
+
|
| 216 |
+
The current order of paths is maintained.
|
| 217 |
+
"""
|
| 218 |
+
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
| 219 |
+
reduced_paths = []
|
| 220 |
+
for p in paths:
|
| 221 |
+
np = os.path.normpath(p)
|
| 222 |
+
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
| 223 |
+
if np not in reduced_paths:
|
| 224 |
+
reduced_paths.append(np)
|
| 225 |
+
return reduced_paths
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def removeDuplicates(variable):
|
| 229 |
+
"""Remove duplicate values of an environment variable."""
|
| 230 |
+
oldList = variable.split(os.pathsep)
|
| 231 |
+
newList = []
|
| 232 |
+
for i in oldList:
|
| 233 |
+
if i not in newList:
|
| 234 |
+
newList.append(i)
|
| 235 |
+
newVariable = os.pathsep.join(newList)
|
| 236 |
+
return newVariable
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
def find_vcvarsall(version):
|
| 240 |
+
"""Find the vcvarsall.bat file
|
| 241 |
+
|
| 242 |
+
At first it tries to find the productdir of VS 2008 in the registry. If
|
| 243 |
+
that fails it falls back to the VS90COMNTOOLS env var.
|
| 244 |
+
"""
|
| 245 |
+
vsbase = VS_BASE % version
|
| 246 |
+
try:
|
| 247 |
+
productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir")
|
| 248 |
+
except KeyError:
|
| 249 |
+
log.debug("Unable to find productdir in registry")
|
| 250 |
+
productdir = None
|
| 251 |
+
|
| 252 |
+
if not productdir or not os.path.isdir(productdir):
|
| 253 |
+
toolskey = "VS%0.f0COMNTOOLS" % version
|
| 254 |
+
toolsdir = os.environ.get(toolskey, None)
|
| 255 |
+
|
| 256 |
+
if toolsdir and os.path.isdir(toolsdir):
|
| 257 |
+
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
|
| 258 |
+
productdir = os.path.abspath(productdir)
|
| 259 |
+
if not os.path.isdir(productdir):
|
| 260 |
+
log.debug("%s is not a valid directory" % productdir)
|
| 261 |
+
return None
|
| 262 |
+
else:
|
| 263 |
+
log.debug("Env var %s is not set or invalid" % toolskey)
|
| 264 |
+
if not productdir:
|
| 265 |
+
log.debug("No productdir found")
|
| 266 |
+
return None
|
| 267 |
+
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
|
| 268 |
+
if os.path.isfile(vcvarsall):
|
| 269 |
+
return vcvarsall
|
| 270 |
+
log.debug("Unable to find vcvarsall.bat")
|
| 271 |
+
return None
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def query_vcvarsall(version, arch="x86"):
|
| 275 |
+
"""Launch vcvarsall.bat and read the settings from its environment"""
|
| 276 |
+
vcvarsall = find_vcvarsall(version)
|
| 277 |
+
interesting = {"include", "lib", "libpath", "path"}
|
| 278 |
+
result = {}
|
| 279 |
+
|
| 280 |
+
if vcvarsall is None:
|
| 281 |
+
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
|
| 282 |
+
log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
|
| 283 |
+
popen = subprocess.Popen(
|
| 284 |
+
'"{}" {} & set'.format(vcvarsall, arch),
|
| 285 |
+
stdout=subprocess.PIPE,
|
| 286 |
+
stderr=subprocess.PIPE,
|
| 287 |
+
)
|
| 288 |
+
try:
|
| 289 |
+
stdout, stderr = popen.communicate()
|
| 290 |
+
if popen.wait() != 0:
|
| 291 |
+
raise DistutilsPlatformError(stderr.decode("mbcs"))
|
| 292 |
+
|
| 293 |
+
stdout = stdout.decode("mbcs")
|
| 294 |
+
for line in stdout.split("\n"):
|
| 295 |
+
line = Reg.convert_mbcs(line)
|
| 296 |
+
if '=' not in line:
|
| 297 |
+
continue
|
| 298 |
+
line = line.strip()
|
| 299 |
+
key, value = line.split('=', 1)
|
| 300 |
+
key = key.lower()
|
| 301 |
+
if key in interesting:
|
| 302 |
+
if value.endswith(os.pathsep):
|
| 303 |
+
value = value[:-1]
|
| 304 |
+
result[key] = removeDuplicates(value)
|
| 305 |
+
|
| 306 |
+
finally:
|
| 307 |
+
popen.stdout.close()
|
| 308 |
+
popen.stderr.close()
|
| 309 |
+
|
| 310 |
+
if len(result) != len(interesting):
|
| 311 |
+
raise ValueError(str(list(result.keys())))
|
| 312 |
+
|
| 313 |
+
return result
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
# More globals
|
| 317 |
+
VERSION = get_build_version()
|
| 318 |
+
# MACROS = MacroExpander(VERSION)
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
class MSVCCompiler(CCompiler):
|
| 322 |
+
"""Concrete class that implements an interface to Microsoft Visual C++,
|
| 323 |
+
as defined by the CCompiler abstract class."""
|
| 324 |
+
|
| 325 |
+
compiler_type = 'msvc'
|
| 326 |
+
|
| 327 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 328 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 329 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 330 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 331 |
+
# though, so it's worth thinking about.
|
| 332 |
+
executables = {}
|
| 333 |
+
|
| 334 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 335 |
+
_c_extensions = ['.c']
|
| 336 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 337 |
+
_rc_extensions = ['.rc']
|
| 338 |
+
_mc_extensions = ['.mc']
|
| 339 |
+
|
| 340 |
+
# Needed for the filename generation methods provided by the
|
| 341 |
+
# base class, CCompiler.
|
| 342 |
+
src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
|
| 343 |
+
res_extension = '.res'
|
| 344 |
+
obj_extension = '.obj'
|
| 345 |
+
static_lib_extension = '.lib'
|
| 346 |
+
shared_lib_extension = '.dll'
|
| 347 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 348 |
+
exe_extension = '.exe'
|
| 349 |
+
|
| 350 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 351 |
+
super().__init__(verbose, dry_run, force)
|
| 352 |
+
self.__version = VERSION
|
| 353 |
+
self.__root = r"Software\Microsoft\VisualStudio"
|
| 354 |
+
# self.__macros = MACROS
|
| 355 |
+
self.__paths = []
|
| 356 |
+
# target platform (.plat_name is consistent with 'bdist')
|
| 357 |
+
self.plat_name = None
|
| 358 |
+
self.__arch = None # deprecated name
|
| 359 |
+
self.initialized = False
|
| 360 |
+
|
| 361 |
+
def initialize(self, plat_name=None): # noqa: C901
|
| 362 |
+
# multi-init means we would need to check platform same each time...
|
| 363 |
+
assert not self.initialized, "don't init multiple times"
|
| 364 |
+
if self.__version < 8.0:
|
| 365 |
+
raise DistutilsPlatformError(
|
| 366 |
+
"VC %0.1f is not supported by this module" % self.__version
|
| 367 |
+
)
|
| 368 |
+
if plat_name is None:
|
| 369 |
+
plat_name = get_platform()
|
| 370 |
+
# sanity check for platforms to prevent obscure errors later.
|
| 371 |
+
ok_plats = 'win32', 'win-amd64'
|
| 372 |
+
if plat_name not in ok_plats:
|
| 373 |
+
raise DistutilsPlatformError(
|
| 374 |
+
"--plat-name must be one of {}".format(ok_plats)
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
if (
|
| 378 |
+
"DISTUTILS_USE_SDK" in os.environ
|
| 379 |
+
and "MSSdk" in os.environ
|
| 380 |
+
and self.find_exe("cl.exe")
|
| 381 |
+
):
|
| 382 |
+
# Assume that the SDK set up everything alright; don't try to be
|
| 383 |
+
# smarter
|
| 384 |
+
self.cc = "cl.exe"
|
| 385 |
+
self.linker = "link.exe"
|
| 386 |
+
self.lib = "lib.exe"
|
| 387 |
+
self.rc = "rc.exe"
|
| 388 |
+
self.mc = "mc.exe"
|
| 389 |
+
else:
|
| 390 |
+
# On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
|
| 391 |
+
# to cross compile, you use 'x86_amd64'.
|
| 392 |
+
# On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
|
| 393 |
+
# compile use 'x86' (ie, it runs the x86 compiler directly)
|
| 394 |
+
if plat_name in (get_platform(), 'win32'):
|
| 395 |
+
# native build or cross-compile to win32
|
| 396 |
+
plat_spec = PLAT_TO_VCVARS[plat_name]
|
| 397 |
+
else:
|
| 398 |
+
# cross compile from win32 -> some 64bit
|
| 399 |
+
plat_spec = (
|
| 400 |
+
PLAT_TO_VCVARS[get_platform()] + '_' + PLAT_TO_VCVARS[plat_name]
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
vc_env = query_vcvarsall(VERSION, plat_spec)
|
| 404 |
+
|
| 405 |
+
self.__paths = vc_env['path'].split(os.pathsep)
|
| 406 |
+
os.environ['lib'] = vc_env['lib']
|
| 407 |
+
os.environ['include'] = vc_env['include']
|
| 408 |
+
|
| 409 |
+
if len(self.__paths) == 0:
|
| 410 |
+
raise DistutilsPlatformError(
|
| 411 |
+
"Python was built with %s, "
|
| 412 |
+
"and extensions need to be built with the same "
|
| 413 |
+
"version of the compiler, but it isn't installed." % self.__product
|
| 414 |
+
)
|
| 415 |
+
|
| 416 |
+
self.cc = self.find_exe("cl.exe")
|
| 417 |
+
self.linker = self.find_exe("link.exe")
|
| 418 |
+
self.lib = self.find_exe("lib.exe")
|
| 419 |
+
self.rc = self.find_exe("rc.exe") # resource compiler
|
| 420 |
+
self.mc = self.find_exe("mc.exe") # message compiler
|
| 421 |
+
# self.set_path_env_var('lib')
|
| 422 |
+
# self.set_path_env_var('include')
|
| 423 |
+
|
| 424 |
+
# extend the MSVC path with the current path
|
| 425 |
+
try:
|
| 426 |
+
for p in os.environ['path'].split(';'):
|
| 427 |
+
self.__paths.append(p)
|
| 428 |
+
except KeyError:
|
| 429 |
+
pass
|
| 430 |
+
self.__paths = normalize_and_reduce_paths(self.__paths)
|
| 431 |
+
os.environ['path'] = ";".join(self.__paths)
|
| 432 |
+
|
| 433 |
+
self.preprocess_options = None
|
| 434 |
+
if self.__arch == "x86":
|
| 435 |
+
self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/DNDEBUG']
|
| 436 |
+
self.compile_options_debug = [
|
| 437 |
+
'/nologo',
|
| 438 |
+
'/Od',
|
| 439 |
+
'/MDd',
|
| 440 |
+
'/W3',
|
| 441 |
+
'/Z7',
|
| 442 |
+
'/D_DEBUG',
|
| 443 |
+
]
|
| 444 |
+
else:
|
| 445 |
+
# Win64
|
| 446 |
+
self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG']
|
| 447 |
+
self.compile_options_debug = [
|
| 448 |
+
'/nologo',
|
| 449 |
+
'/Od',
|
| 450 |
+
'/MDd',
|
| 451 |
+
'/W3',
|
| 452 |
+
'/GS-',
|
| 453 |
+
'/Z7',
|
| 454 |
+
'/D_DEBUG',
|
| 455 |
+
]
|
| 456 |
+
|
| 457 |
+
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
| 458 |
+
if self.__version >= 7:
|
| 459 |
+
self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG']
|
| 460 |
+
self.ldflags_static = ['/nologo']
|
| 461 |
+
|
| 462 |
+
self.initialized = True
|
| 463 |
+
|
| 464 |
+
# -- Worker methods ------------------------------------------------
|
| 465 |
+
|
| 466 |
+
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
| 467 |
+
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
| 468 |
+
# for .rc input file
|
| 469 |
+
if output_dir is None:
|
| 470 |
+
output_dir = ''
|
| 471 |
+
obj_names = []
|
| 472 |
+
for src_name in source_filenames:
|
| 473 |
+
(base, ext) = os.path.splitext(src_name)
|
| 474 |
+
base = os.path.splitdrive(base)[1] # Chop off the drive
|
| 475 |
+
base = base[os.path.isabs(base) :] # If abs, chop off leading /
|
| 476 |
+
if ext not in self.src_extensions:
|
| 477 |
+
# Better to raise an exception instead of silently continuing
|
| 478 |
+
# and later complain about sources and targets having
|
| 479 |
+
# different lengths
|
| 480 |
+
raise CompileError("Don't know how to compile %s" % src_name)
|
| 481 |
+
if strip_dir:
|
| 482 |
+
base = os.path.basename(base)
|
| 483 |
+
if ext in self._rc_extensions:
|
| 484 |
+
obj_names.append(os.path.join(output_dir, base + self.res_extension))
|
| 485 |
+
elif ext in self._mc_extensions:
|
| 486 |
+
obj_names.append(os.path.join(output_dir, base + self.res_extension))
|
| 487 |
+
else:
|
| 488 |
+
obj_names.append(os.path.join(output_dir, base + self.obj_extension))
|
| 489 |
+
return obj_names
|
| 490 |
+
|
| 491 |
+
def compile( # noqa: C901
|
| 492 |
+
self,
|
| 493 |
+
sources,
|
| 494 |
+
output_dir=None,
|
| 495 |
+
macros=None,
|
| 496 |
+
include_dirs=None,
|
| 497 |
+
debug=0,
|
| 498 |
+
extra_preargs=None,
|
| 499 |
+
extra_postargs=None,
|
| 500 |
+
depends=None,
|
| 501 |
+
):
|
| 502 |
+
if not self.initialized:
|
| 503 |
+
self.initialize()
|
| 504 |
+
compile_info = self._setup_compile(
|
| 505 |
+
output_dir, macros, include_dirs, sources, depends, extra_postargs
|
| 506 |
+
)
|
| 507 |
+
macros, objects, extra_postargs, pp_opts, build = compile_info
|
| 508 |
+
|
| 509 |
+
compile_opts = extra_preargs or []
|
| 510 |
+
compile_opts.append('/c')
|
| 511 |
+
if debug:
|
| 512 |
+
compile_opts.extend(self.compile_options_debug)
|
| 513 |
+
else:
|
| 514 |
+
compile_opts.extend(self.compile_options)
|
| 515 |
+
|
| 516 |
+
for obj in objects:
|
| 517 |
+
try:
|
| 518 |
+
src, ext = build[obj]
|
| 519 |
+
except KeyError:
|
| 520 |
+
continue
|
| 521 |
+
if debug:
|
| 522 |
+
# pass the full pathname to MSVC in debug mode,
|
| 523 |
+
# this allows the debugger to find the source file
|
| 524 |
+
# without asking the user to browse for it
|
| 525 |
+
src = os.path.abspath(src)
|
| 526 |
+
|
| 527 |
+
if ext in self._c_extensions:
|
| 528 |
+
input_opt = "/Tc" + src
|
| 529 |
+
elif ext in self._cpp_extensions:
|
| 530 |
+
input_opt = "/Tp" + src
|
| 531 |
+
elif ext in self._rc_extensions:
|
| 532 |
+
# compile .RC to .RES file
|
| 533 |
+
input_opt = src
|
| 534 |
+
output_opt = "/fo" + obj
|
| 535 |
+
try:
|
| 536 |
+
self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt])
|
| 537 |
+
except DistutilsExecError as msg:
|
| 538 |
+
raise CompileError(msg)
|
| 539 |
+
continue
|
| 540 |
+
elif ext in self._mc_extensions:
|
| 541 |
+
# Compile .MC to .RC file to .RES file.
|
| 542 |
+
# * '-h dir' specifies the directory for the
|
| 543 |
+
# generated include file
|
| 544 |
+
# * '-r dir' specifies the target directory of the
|
| 545 |
+
# generated RC file and the binary message resource
|
| 546 |
+
# it includes
|
| 547 |
+
#
|
| 548 |
+
# For now (since there are no options to change this),
|
| 549 |
+
# we use the source-directory for the include file and
|
| 550 |
+
# the build directory for the RC file and message
|
| 551 |
+
# resources. This works at least for win32all.
|
| 552 |
+
h_dir = os.path.dirname(src)
|
| 553 |
+
rc_dir = os.path.dirname(obj)
|
| 554 |
+
try:
|
| 555 |
+
# first compile .MC to .RC and .H file
|
| 556 |
+
self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
|
| 557 |
+
base, _ = os.path.splitext(os.path.basename(src))
|
| 558 |
+
rc_file = os.path.join(rc_dir, base + '.rc')
|
| 559 |
+
# then compile .RC to .RES file
|
| 560 |
+
self.spawn([self.rc] + ["/fo" + obj] + [rc_file])
|
| 561 |
+
|
| 562 |
+
except DistutilsExecError as msg:
|
| 563 |
+
raise CompileError(msg)
|
| 564 |
+
continue
|
| 565 |
+
else:
|
| 566 |
+
# how to handle this file?
|
| 567 |
+
raise CompileError(
|
| 568 |
+
"Don't know how to compile {} to {}".format(src, obj)
|
| 569 |
+
)
|
| 570 |
+
|
| 571 |
+
output_opt = "/Fo" + obj
|
| 572 |
+
try:
|
| 573 |
+
self.spawn(
|
| 574 |
+
[self.cc]
|
| 575 |
+
+ compile_opts
|
| 576 |
+
+ pp_opts
|
| 577 |
+
+ [input_opt, output_opt]
|
| 578 |
+
+ extra_postargs
|
| 579 |
+
)
|
| 580 |
+
except DistutilsExecError as msg:
|
| 581 |
+
raise CompileError(msg)
|
| 582 |
+
|
| 583 |
+
return objects
|
| 584 |
+
|
| 585 |
+
def create_static_lib(
|
| 586 |
+
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
| 587 |
+
):
|
| 588 |
+
if not self.initialized:
|
| 589 |
+
self.initialize()
|
| 590 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 591 |
+
output_filename = self.library_filename(output_libname, output_dir=output_dir)
|
| 592 |
+
|
| 593 |
+
if self._need_link(objects, output_filename):
|
| 594 |
+
lib_args = objects + ['/OUT:' + output_filename]
|
| 595 |
+
if debug:
|
| 596 |
+
pass # XXX what goes here?
|
| 597 |
+
try:
|
| 598 |
+
self.spawn([self.lib] + lib_args)
|
| 599 |
+
except DistutilsExecError as msg:
|
| 600 |
+
raise LibError(msg)
|
| 601 |
+
else:
|
| 602 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 603 |
+
|
| 604 |
+
def link( # noqa: C901
|
| 605 |
+
self,
|
| 606 |
+
target_desc,
|
| 607 |
+
objects,
|
| 608 |
+
output_filename,
|
| 609 |
+
output_dir=None,
|
| 610 |
+
libraries=None,
|
| 611 |
+
library_dirs=None,
|
| 612 |
+
runtime_library_dirs=None,
|
| 613 |
+
export_symbols=None,
|
| 614 |
+
debug=0,
|
| 615 |
+
extra_preargs=None,
|
| 616 |
+
extra_postargs=None,
|
| 617 |
+
build_temp=None,
|
| 618 |
+
target_lang=None,
|
| 619 |
+
):
|
| 620 |
+
if not self.initialized:
|
| 621 |
+
self.initialize()
|
| 622 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 623 |
+
fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
|
| 624 |
+
(libraries, library_dirs, runtime_library_dirs) = fixed_args
|
| 625 |
+
|
| 626 |
+
if runtime_library_dirs:
|
| 627 |
+
self.warn(
|
| 628 |
+
"I don't know what to do with 'runtime_library_dirs': "
|
| 629 |
+
+ str(runtime_library_dirs)
|
| 630 |
+
)
|
| 631 |
+
|
| 632 |
+
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
|
| 633 |
+
if output_dir is not None:
|
| 634 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 635 |
+
|
| 636 |
+
if self._need_link(objects, output_filename):
|
| 637 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 638 |
+
if debug:
|
| 639 |
+
ldflags = self.ldflags_shared_debug[1:]
|
| 640 |
+
else:
|
| 641 |
+
ldflags = self.ldflags_shared[1:]
|
| 642 |
+
else:
|
| 643 |
+
if debug:
|
| 644 |
+
ldflags = self.ldflags_shared_debug
|
| 645 |
+
else:
|
| 646 |
+
ldflags = self.ldflags_shared
|
| 647 |
+
|
| 648 |
+
export_opts = []
|
| 649 |
+
for sym in export_symbols or []:
|
| 650 |
+
export_opts.append("/EXPORT:" + sym)
|
| 651 |
+
|
| 652 |
+
ld_args = (
|
| 653 |
+
ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
|
| 654 |
+
)
|
| 655 |
+
|
| 656 |
+
# The MSVC linker generates .lib and .exp files, which cannot be
|
| 657 |
+
# suppressed by any linker switches. The .lib files may even be
|
| 658 |
+
# needed! Make sure they are generated in the temporary build
|
| 659 |
+
# directory. Since they have different names for debug and release
|
| 660 |
+
# builds, they can go into the same directory.
|
| 661 |
+
build_temp = os.path.dirname(objects[0])
|
| 662 |
+
if export_symbols is not None:
|
| 663 |
+
(dll_name, dll_ext) = os.path.splitext(
|
| 664 |
+
os.path.basename(output_filename)
|
| 665 |
+
)
|
| 666 |
+
implib_file = os.path.join(build_temp, self.library_filename(dll_name))
|
| 667 |
+
ld_args.append('/IMPLIB:' + implib_file)
|
| 668 |
+
|
| 669 |
+
self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
|
| 670 |
+
|
| 671 |
+
if extra_preargs:
|
| 672 |
+
ld_args[:0] = extra_preargs
|
| 673 |
+
if extra_postargs:
|
| 674 |
+
ld_args.extend(extra_postargs)
|
| 675 |
+
|
| 676 |
+
self.mkpath(os.path.dirname(output_filename))
|
| 677 |
+
try:
|
| 678 |
+
self.spawn([self.linker] + ld_args)
|
| 679 |
+
except DistutilsExecError as msg:
|
| 680 |
+
raise LinkError(msg)
|
| 681 |
+
|
| 682 |
+
# embed the manifest
|
| 683 |
+
# XXX - this is somewhat fragile - if mt.exe fails, distutils
|
| 684 |
+
# will still consider the DLL up-to-date, but it will not have a
|
| 685 |
+
# manifest. Maybe we should link to a temp file? OTOH, that
|
| 686 |
+
# implies a build environment error that shouldn't go undetected.
|
| 687 |
+
mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
|
| 688 |
+
if mfinfo is not None:
|
| 689 |
+
mffilename, mfid = mfinfo
|
| 690 |
+
out_arg = '-outputresource:{};{}'.format(output_filename, mfid)
|
| 691 |
+
try:
|
| 692 |
+
self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg])
|
| 693 |
+
except DistutilsExecError as msg:
|
| 694 |
+
raise LinkError(msg)
|
| 695 |
+
else:
|
| 696 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 697 |
+
|
| 698 |
+
def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
|
| 699 |
+
# If we need a manifest at all, an embedded manifest is recommended.
|
| 700 |
+
# See MSDN article titled
|
| 701 |
+
# "How to: Embed a Manifest Inside a C/C++ Application"
|
| 702 |
+
# (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
|
| 703 |
+
# Ask the linker to generate the manifest in the temp dir, so
|
| 704 |
+
# we can check it, and possibly embed it, later.
|
| 705 |
+
temp_manifest = os.path.join(
|
| 706 |
+
build_temp, os.path.basename(output_filename) + ".manifest"
|
| 707 |
+
)
|
| 708 |
+
ld_args.append('/MANIFESTFILE:' + temp_manifest)
|
| 709 |
+
|
| 710 |
+
def manifest_get_embed_info(self, target_desc, ld_args):
|
| 711 |
+
# If a manifest should be embedded, return a tuple of
|
| 712 |
+
# (manifest_filename, resource_id). Returns None if no manifest
|
| 713 |
+
# should be embedded. See http://bugs.python.org/issue7833 for why
|
| 714 |
+
# we want to avoid any manifest for extension modules if we can)
|
| 715 |
+
for arg in ld_args:
|
| 716 |
+
if arg.startswith("/MANIFESTFILE:"):
|
| 717 |
+
temp_manifest = arg.split(":", 1)[1]
|
| 718 |
+
break
|
| 719 |
+
else:
|
| 720 |
+
# no /MANIFESTFILE so nothing to do.
|
| 721 |
+
return None
|
| 722 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 723 |
+
# by default, executables always get the manifest with the
|
| 724 |
+
# CRT referenced.
|
| 725 |
+
mfid = 1
|
| 726 |
+
else:
|
| 727 |
+
# Extension modules try and avoid any manifest if possible.
|
| 728 |
+
mfid = 2
|
| 729 |
+
temp_manifest = self._remove_visual_c_ref(temp_manifest)
|
| 730 |
+
if temp_manifest is None:
|
| 731 |
+
return None
|
| 732 |
+
return temp_manifest, mfid
|
| 733 |
+
|
| 734 |
+
def _remove_visual_c_ref(self, manifest_file):
|
| 735 |
+
try:
|
| 736 |
+
# Remove references to the Visual C runtime, so they will
|
| 737 |
+
# fall through to the Visual C dependency of Python.exe.
|
| 738 |
+
# This way, when installed for a restricted user (e.g.
|
| 739 |
+
# runtimes are not in WinSxS folder, but in Python's own
|
| 740 |
+
# folder), the runtimes do not need to be in every folder
|
| 741 |
+
# with .pyd's.
|
| 742 |
+
# Returns either the filename of the modified manifest or
|
| 743 |
+
# None if no manifest should be embedded.
|
| 744 |
+
manifest_f = open(manifest_file)
|
| 745 |
+
try:
|
| 746 |
+
manifest_buf = manifest_f.read()
|
| 747 |
+
finally:
|
| 748 |
+
manifest_f.close()
|
| 749 |
+
pattern = re.compile(
|
| 750 |
+
r"""<assemblyIdentity.*?name=("|')Microsoft\."""
|
| 751 |
+
r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
|
| 752 |
+
re.DOTALL,
|
| 753 |
+
)
|
| 754 |
+
manifest_buf = re.sub(pattern, "", manifest_buf)
|
| 755 |
+
pattern = r"<dependentAssembly>\s*</dependentAssembly>"
|
| 756 |
+
manifest_buf = re.sub(pattern, "", manifest_buf)
|
| 757 |
+
# Now see if any other assemblies are referenced - if not, we
|
| 758 |
+
# don't want a manifest embedded.
|
| 759 |
+
pattern = re.compile(
|
| 760 |
+
r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')"""
|
| 761 |
+
r""".*?(?:/>|</assemblyIdentity>)""",
|
| 762 |
+
re.DOTALL,
|
| 763 |
+
)
|
| 764 |
+
if re.search(pattern, manifest_buf) is None:
|
| 765 |
+
return None
|
| 766 |
+
|
| 767 |
+
manifest_f = open(manifest_file, 'w')
|
| 768 |
+
try:
|
| 769 |
+
manifest_f.write(manifest_buf)
|
| 770 |
+
return manifest_file
|
| 771 |
+
finally:
|
| 772 |
+
manifest_f.close()
|
| 773 |
+
except OSError:
|
| 774 |
+
pass
|
| 775 |
+
|
| 776 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 777 |
+
# These are all used by the 'gen_lib_options() function, in
|
| 778 |
+
# ccompiler.py.
|
| 779 |
+
|
| 780 |
+
def library_dir_option(self, dir):
|
| 781 |
+
return "/LIBPATH:" + dir
|
| 782 |
+
|
| 783 |
+
def runtime_library_dir_option(self, dir):
|
| 784 |
+
raise DistutilsPlatformError(
|
| 785 |
+
"don't know how to set runtime library search path for MSVC++"
|
| 786 |
+
)
|
| 787 |
+
|
| 788 |
+
def library_option(self, lib):
|
| 789 |
+
return self.library_filename(lib)
|
| 790 |
+
|
| 791 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 792 |
+
# Prefer a debugging library if found (and requested), but deal
|
| 793 |
+
# with it if we don't have one.
|
| 794 |
+
if debug:
|
| 795 |
+
try_names = [lib + "_d", lib]
|
| 796 |
+
else:
|
| 797 |
+
try_names = [lib]
|
| 798 |
+
for dir in dirs:
|
| 799 |
+
for name in try_names:
|
| 800 |
+
libfile = os.path.join(dir, self.library_filename(name))
|
| 801 |
+
if os.path.exists(libfile):
|
| 802 |
+
return libfile
|
| 803 |
+
else:
|
| 804 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 805 |
+
return None
|
| 806 |
+
|
| 807 |
+
# Helper methods for using the MSVC registry settings
|
| 808 |
+
|
| 809 |
+
def find_exe(self, exe):
|
| 810 |
+
"""Return path to an MSVC executable program.
|
| 811 |
+
|
| 812 |
+
Tries to find the program in several places: first, one of the
|
| 813 |
+
MSVC program search paths from the registry; next, the directories
|
| 814 |
+
in the PATH environment variable. If any of those work, return an
|
| 815 |
+
absolute path that is known to exist. If none of them work, just
|
| 816 |
+
return the original program name, 'exe'.
|
| 817 |
+
"""
|
| 818 |
+
for p in self.__paths:
|
| 819 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 820 |
+
if os.path.isfile(fn):
|
| 821 |
+
return fn
|
| 822 |
+
|
| 823 |
+
# didn't find it; try existing path
|
| 824 |
+
for p in os.environ['Path'].split(';'):
|
| 825 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 826 |
+
if os.path.isfile(fn):
|
| 827 |
+
return fn
|
| 828 |
+
|
| 829 |
+
return exe
|
.venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py
ADDED
|
@@ -0,0 +1,692 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.msvccompiler
|
| 2 |
+
|
| 3 |
+
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for the Microsoft Visual Studio.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# Written by Perry Stoll
|
| 8 |
+
# hacked by Robin Becker and Thomas Heller to do a better job of
|
| 9 |
+
# finding DevStudio (through the registry)
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
import os
|
| 13 |
+
import warnings
|
| 14 |
+
from .errors import (
|
| 15 |
+
DistutilsExecError,
|
| 16 |
+
DistutilsPlatformError,
|
| 17 |
+
CompileError,
|
| 18 |
+
LibError,
|
| 19 |
+
LinkError,
|
| 20 |
+
)
|
| 21 |
+
from .ccompiler import CCompiler, gen_lib_options
|
| 22 |
+
from ._log import log
|
| 23 |
+
|
| 24 |
+
_can_read_reg = False
|
| 25 |
+
try:
|
| 26 |
+
import winreg
|
| 27 |
+
|
| 28 |
+
_can_read_reg = True
|
| 29 |
+
hkey_mod = winreg
|
| 30 |
+
|
| 31 |
+
RegOpenKeyEx = winreg.OpenKeyEx
|
| 32 |
+
RegEnumKey = winreg.EnumKey
|
| 33 |
+
RegEnumValue = winreg.EnumValue
|
| 34 |
+
RegError = winreg.error
|
| 35 |
+
|
| 36 |
+
except ImportError:
|
| 37 |
+
try:
|
| 38 |
+
import win32api
|
| 39 |
+
import win32con
|
| 40 |
+
|
| 41 |
+
_can_read_reg = True
|
| 42 |
+
hkey_mod = win32con
|
| 43 |
+
|
| 44 |
+
RegOpenKeyEx = win32api.RegOpenKeyEx
|
| 45 |
+
RegEnumKey = win32api.RegEnumKey
|
| 46 |
+
RegEnumValue = win32api.RegEnumValue
|
| 47 |
+
RegError = win32api.error
|
| 48 |
+
except ImportError:
|
| 49 |
+
log.info(
|
| 50 |
+
"Warning: Can't read registry to find the "
|
| 51 |
+
"necessary compiler setting\n"
|
| 52 |
+
"Make sure that Python modules winreg, "
|
| 53 |
+
"win32api or win32con are installed."
|
| 54 |
+
)
|
| 55 |
+
pass
|
| 56 |
+
|
| 57 |
+
if _can_read_reg:
|
| 58 |
+
HKEYS = (
|
| 59 |
+
hkey_mod.HKEY_USERS,
|
| 60 |
+
hkey_mod.HKEY_CURRENT_USER,
|
| 61 |
+
hkey_mod.HKEY_LOCAL_MACHINE,
|
| 62 |
+
hkey_mod.HKEY_CLASSES_ROOT,
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
warnings.warn(
|
| 67 |
+
"msvccompiler is deprecated and slated to be removed "
|
| 68 |
+
"in the future. Please discontinue use or file an issue "
|
| 69 |
+
"with pypa/distutils describing your use case.",
|
| 70 |
+
DeprecationWarning,
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def read_keys(base, key):
|
| 75 |
+
"""Return list of registry keys."""
|
| 76 |
+
try:
|
| 77 |
+
handle = RegOpenKeyEx(base, key)
|
| 78 |
+
except RegError:
|
| 79 |
+
return None
|
| 80 |
+
L = []
|
| 81 |
+
i = 0
|
| 82 |
+
while True:
|
| 83 |
+
try:
|
| 84 |
+
k = RegEnumKey(handle, i)
|
| 85 |
+
except RegError:
|
| 86 |
+
break
|
| 87 |
+
L.append(k)
|
| 88 |
+
i += 1
|
| 89 |
+
return L
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def read_values(base, key):
|
| 93 |
+
"""Return dict of registry keys and values.
|
| 94 |
+
|
| 95 |
+
All names are converted to lowercase.
|
| 96 |
+
"""
|
| 97 |
+
try:
|
| 98 |
+
handle = RegOpenKeyEx(base, key)
|
| 99 |
+
except RegError:
|
| 100 |
+
return None
|
| 101 |
+
d = {}
|
| 102 |
+
i = 0
|
| 103 |
+
while True:
|
| 104 |
+
try:
|
| 105 |
+
name, value, type = RegEnumValue(handle, i)
|
| 106 |
+
except RegError:
|
| 107 |
+
break
|
| 108 |
+
name = name.lower()
|
| 109 |
+
d[convert_mbcs(name)] = convert_mbcs(value)
|
| 110 |
+
i += 1
|
| 111 |
+
return d
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def convert_mbcs(s):
|
| 115 |
+
dec = getattr(s, "decode", None)
|
| 116 |
+
if dec is not None:
|
| 117 |
+
try:
|
| 118 |
+
s = dec("mbcs")
|
| 119 |
+
except UnicodeError:
|
| 120 |
+
pass
|
| 121 |
+
return s
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class MacroExpander:
|
| 125 |
+
def __init__(self, version):
|
| 126 |
+
self.macros = {}
|
| 127 |
+
self.load_macros(version)
|
| 128 |
+
|
| 129 |
+
def set_macro(self, macro, path, key):
|
| 130 |
+
for base in HKEYS:
|
| 131 |
+
d = read_values(base, path)
|
| 132 |
+
if d:
|
| 133 |
+
self.macros["$(%s)" % macro] = d[key]
|
| 134 |
+
break
|
| 135 |
+
|
| 136 |
+
def load_macros(self, version):
|
| 137 |
+
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
|
| 138 |
+
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
|
| 139 |
+
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
|
| 140 |
+
net = r"Software\Microsoft\.NETFramework"
|
| 141 |
+
self.set_macro("FrameworkDir", net, "installroot")
|
| 142 |
+
try:
|
| 143 |
+
if version > 7.0:
|
| 144 |
+
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
|
| 145 |
+
else:
|
| 146 |
+
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
|
| 147 |
+
except KeyError:
|
| 148 |
+
raise DistutilsPlatformError(
|
| 149 |
+
"""Python was built with Visual Studio 2003;
|
| 150 |
+
extensions must be built with a compiler than can generate compatible binaries.
|
| 151 |
+
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
|
| 152 |
+
you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
|
| 153 |
+
)
|
| 154 |
+
|
| 155 |
+
p = r"Software\Microsoft\NET Framework Setup\Product"
|
| 156 |
+
for base in HKEYS:
|
| 157 |
+
try:
|
| 158 |
+
h = RegOpenKeyEx(base, p)
|
| 159 |
+
except RegError:
|
| 160 |
+
continue
|
| 161 |
+
key = RegEnumKey(h, 0)
|
| 162 |
+
d = read_values(base, r"{}\{}".format(p, key))
|
| 163 |
+
self.macros["$(FrameworkVersion)"] = d["version"]
|
| 164 |
+
|
| 165 |
+
def sub(self, s):
|
| 166 |
+
for k, v in self.macros.items():
|
| 167 |
+
s = s.replace(k, v)
|
| 168 |
+
return s
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def get_build_version():
|
| 172 |
+
"""Return the version of MSVC that was used to build Python.
|
| 173 |
+
|
| 174 |
+
For Python 2.3 and up, the version number is included in
|
| 175 |
+
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
| 176 |
+
"""
|
| 177 |
+
prefix = "MSC v."
|
| 178 |
+
i = sys.version.find(prefix)
|
| 179 |
+
if i == -1:
|
| 180 |
+
return 6
|
| 181 |
+
i = i + len(prefix)
|
| 182 |
+
s, rest = sys.version[i:].split(" ", 1)
|
| 183 |
+
majorVersion = int(s[:-2]) - 6
|
| 184 |
+
if majorVersion >= 13:
|
| 185 |
+
# v13 was skipped and should be v14
|
| 186 |
+
majorVersion += 1
|
| 187 |
+
minorVersion = int(s[2:3]) / 10.0
|
| 188 |
+
# I don't think paths are affected by minor version in version 6
|
| 189 |
+
if majorVersion == 6:
|
| 190 |
+
minorVersion = 0
|
| 191 |
+
if majorVersion >= 6:
|
| 192 |
+
return majorVersion + minorVersion
|
| 193 |
+
# else we don't know what version of the compiler this is
|
| 194 |
+
return None
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def get_build_architecture():
|
| 198 |
+
"""Return the processor architecture.
|
| 199 |
+
|
| 200 |
+
Possible results are "Intel" or "AMD64".
|
| 201 |
+
"""
|
| 202 |
+
|
| 203 |
+
prefix = " bit ("
|
| 204 |
+
i = sys.version.find(prefix)
|
| 205 |
+
if i == -1:
|
| 206 |
+
return "Intel"
|
| 207 |
+
j = sys.version.find(")", i)
|
| 208 |
+
return sys.version[i + len(prefix) : j]
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def normalize_and_reduce_paths(paths):
|
| 212 |
+
"""Return a list of normalized paths with duplicates removed.
|
| 213 |
+
|
| 214 |
+
The current order of paths is maintained.
|
| 215 |
+
"""
|
| 216 |
+
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
| 217 |
+
reduced_paths = []
|
| 218 |
+
for p in paths:
|
| 219 |
+
np = os.path.normpath(p)
|
| 220 |
+
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
| 221 |
+
if np not in reduced_paths:
|
| 222 |
+
reduced_paths.append(np)
|
| 223 |
+
return reduced_paths
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class MSVCCompiler(CCompiler):
|
| 227 |
+
"""Concrete class that implements an interface to Microsoft Visual C++,
|
| 228 |
+
as defined by the CCompiler abstract class."""
|
| 229 |
+
|
| 230 |
+
compiler_type = 'msvc'
|
| 231 |
+
|
| 232 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 233 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 234 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 235 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 236 |
+
# though, so it's worth thinking about.
|
| 237 |
+
executables = {}
|
| 238 |
+
|
| 239 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 240 |
+
_c_extensions = ['.c']
|
| 241 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 242 |
+
_rc_extensions = ['.rc']
|
| 243 |
+
_mc_extensions = ['.mc']
|
| 244 |
+
|
| 245 |
+
# Needed for the filename generation methods provided by the
|
| 246 |
+
# base class, CCompiler.
|
| 247 |
+
src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
|
| 248 |
+
res_extension = '.res'
|
| 249 |
+
obj_extension = '.obj'
|
| 250 |
+
static_lib_extension = '.lib'
|
| 251 |
+
shared_lib_extension = '.dll'
|
| 252 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 253 |
+
exe_extension = '.exe'
|
| 254 |
+
|
| 255 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 256 |
+
super().__init__(verbose, dry_run, force)
|
| 257 |
+
self.__version = get_build_version()
|
| 258 |
+
self.__arch = get_build_architecture()
|
| 259 |
+
if self.__arch == "Intel":
|
| 260 |
+
# x86
|
| 261 |
+
if self.__version >= 7:
|
| 262 |
+
self.__root = r"Software\Microsoft\VisualStudio"
|
| 263 |
+
self.__macros = MacroExpander(self.__version)
|
| 264 |
+
else:
|
| 265 |
+
self.__root = r"Software\Microsoft\Devstudio"
|
| 266 |
+
self.__product = "Visual Studio version %s" % self.__version
|
| 267 |
+
else:
|
| 268 |
+
# Win64. Assume this was built with the platform SDK
|
| 269 |
+
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
|
| 270 |
+
|
| 271 |
+
self.initialized = False
|
| 272 |
+
|
| 273 |
+
def initialize(self):
|
| 274 |
+
self.__paths = []
|
| 275 |
+
if (
|
| 276 |
+
"DISTUTILS_USE_SDK" in os.environ
|
| 277 |
+
and "MSSdk" in os.environ
|
| 278 |
+
and self.find_exe("cl.exe")
|
| 279 |
+
):
|
| 280 |
+
# Assume that the SDK set up everything alright; don't try to be
|
| 281 |
+
# smarter
|
| 282 |
+
self.cc = "cl.exe"
|
| 283 |
+
self.linker = "link.exe"
|
| 284 |
+
self.lib = "lib.exe"
|
| 285 |
+
self.rc = "rc.exe"
|
| 286 |
+
self.mc = "mc.exe"
|
| 287 |
+
else:
|
| 288 |
+
self.__paths = self.get_msvc_paths("path")
|
| 289 |
+
|
| 290 |
+
if len(self.__paths) == 0:
|
| 291 |
+
raise DistutilsPlatformError(
|
| 292 |
+
"Python was built with %s, "
|
| 293 |
+
"and extensions need to be built with the same "
|
| 294 |
+
"version of the compiler, but it isn't installed." % self.__product
|
| 295 |
+
)
|
| 296 |
+
|
| 297 |
+
self.cc = self.find_exe("cl.exe")
|
| 298 |
+
self.linker = self.find_exe("link.exe")
|
| 299 |
+
self.lib = self.find_exe("lib.exe")
|
| 300 |
+
self.rc = self.find_exe("rc.exe") # resource compiler
|
| 301 |
+
self.mc = self.find_exe("mc.exe") # message compiler
|
| 302 |
+
self.set_path_env_var('lib')
|
| 303 |
+
self.set_path_env_var('include')
|
| 304 |
+
|
| 305 |
+
# extend the MSVC path with the current path
|
| 306 |
+
try:
|
| 307 |
+
for p in os.environ['path'].split(';'):
|
| 308 |
+
self.__paths.append(p)
|
| 309 |
+
except KeyError:
|
| 310 |
+
pass
|
| 311 |
+
self.__paths = normalize_and_reduce_paths(self.__paths)
|
| 312 |
+
os.environ['path'] = ";".join(self.__paths)
|
| 313 |
+
|
| 314 |
+
self.preprocess_options = None
|
| 315 |
+
if self.__arch == "Intel":
|
| 316 |
+
self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GX', '/DNDEBUG']
|
| 317 |
+
self.compile_options_debug = [
|
| 318 |
+
'/nologo',
|
| 319 |
+
'/Od',
|
| 320 |
+
'/MDd',
|
| 321 |
+
'/W3',
|
| 322 |
+
'/GX',
|
| 323 |
+
'/Z7',
|
| 324 |
+
'/D_DEBUG',
|
| 325 |
+
]
|
| 326 |
+
else:
|
| 327 |
+
# Win64
|
| 328 |
+
self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG']
|
| 329 |
+
self.compile_options_debug = [
|
| 330 |
+
'/nologo',
|
| 331 |
+
'/Od',
|
| 332 |
+
'/MDd',
|
| 333 |
+
'/W3',
|
| 334 |
+
'/GS-',
|
| 335 |
+
'/Z7',
|
| 336 |
+
'/D_DEBUG',
|
| 337 |
+
]
|
| 338 |
+
|
| 339 |
+
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
| 340 |
+
if self.__version >= 7:
|
| 341 |
+
self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG']
|
| 342 |
+
else:
|
| 343 |
+
self.ldflags_shared_debug = [
|
| 344 |
+
'/DLL',
|
| 345 |
+
'/nologo',
|
| 346 |
+
'/INCREMENTAL:no',
|
| 347 |
+
'/pdb:None',
|
| 348 |
+
'/DEBUG',
|
| 349 |
+
]
|
| 350 |
+
self.ldflags_static = ['/nologo']
|
| 351 |
+
|
| 352 |
+
self.initialized = True
|
| 353 |
+
|
| 354 |
+
# -- Worker methods ------------------------------------------------
|
| 355 |
+
|
| 356 |
+
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
| 357 |
+
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
| 358 |
+
# for .rc input file
|
| 359 |
+
if output_dir is None:
|
| 360 |
+
output_dir = ''
|
| 361 |
+
obj_names = []
|
| 362 |
+
for src_name in source_filenames:
|
| 363 |
+
(base, ext) = os.path.splitext(src_name)
|
| 364 |
+
base = os.path.splitdrive(base)[1] # Chop off the drive
|
| 365 |
+
base = base[os.path.isabs(base) :] # If abs, chop off leading /
|
| 366 |
+
if ext not in self.src_extensions:
|
| 367 |
+
# Better to raise an exception instead of silently continuing
|
| 368 |
+
# and later complain about sources and targets having
|
| 369 |
+
# different lengths
|
| 370 |
+
raise CompileError("Don't know how to compile %s" % src_name)
|
| 371 |
+
if strip_dir:
|
| 372 |
+
base = os.path.basename(base)
|
| 373 |
+
if ext in self._rc_extensions:
|
| 374 |
+
obj_names.append(os.path.join(output_dir, base + self.res_extension))
|
| 375 |
+
elif ext in self._mc_extensions:
|
| 376 |
+
obj_names.append(os.path.join(output_dir, base + self.res_extension))
|
| 377 |
+
else:
|
| 378 |
+
obj_names.append(os.path.join(output_dir, base + self.obj_extension))
|
| 379 |
+
return obj_names
|
| 380 |
+
|
| 381 |
+
def compile( # noqa: C901
|
| 382 |
+
self,
|
| 383 |
+
sources,
|
| 384 |
+
output_dir=None,
|
| 385 |
+
macros=None,
|
| 386 |
+
include_dirs=None,
|
| 387 |
+
debug=0,
|
| 388 |
+
extra_preargs=None,
|
| 389 |
+
extra_postargs=None,
|
| 390 |
+
depends=None,
|
| 391 |
+
):
|
| 392 |
+
if not self.initialized:
|
| 393 |
+
self.initialize()
|
| 394 |
+
compile_info = self._setup_compile(
|
| 395 |
+
output_dir, macros, include_dirs, sources, depends, extra_postargs
|
| 396 |
+
)
|
| 397 |
+
macros, objects, extra_postargs, pp_opts, build = compile_info
|
| 398 |
+
|
| 399 |
+
compile_opts = extra_preargs or []
|
| 400 |
+
compile_opts.append('/c')
|
| 401 |
+
if debug:
|
| 402 |
+
compile_opts.extend(self.compile_options_debug)
|
| 403 |
+
else:
|
| 404 |
+
compile_opts.extend(self.compile_options)
|
| 405 |
+
|
| 406 |
+
for obj in objects:
|
| 407 |
+
try:
|
| 408 |
+
src, ext = build[obj]
|
| 409 |
+
except KeyError:
|
| 410 |
+
continue
|
| 411 |
+
if debug:
|
| 412 |
+
# pass the full pathname to MSVC in debug mode,
|
| 413 |
+
# this allows the debugger to find the source file
|
| 414 |
+
# without asking the user to browse for it
|
| 415 |
+
src = os.path.abspath(src)
|
| 416 |
+
|
| 417 |
+
if ext in self._c_extensions:
|
| 418 |
+
input_opt = "/Tc" + src
|
| 419 |
+
elif ext in self._cpp_extensions:
|
| 420 |
+
input_opt = "/Tp" + src
|
| 421 |
+
elif ext in self._rc_extensions:
|
| 422 |
+
# compile .RC to .RES file
|
| 423 |
+
input_opt = src
|
| 424 |
+
output_opt = "/fo" + obj
|
| 425 |
+
try:
|
| 426 |
+
self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt])
|
| 427 |
+
except DistutilsExecError as msg:
|
| 428 |
+
raise CompileError(msg)
|
| 429 |
+
continue
|
| 430 |
+
elif ext in self._mc_extensions:
|
| 431 |
+
# Compile .MC to .RC file to .RES file.
|
| 432 |
+
# * '-h dir' specifies the directory for the
|
| 433 |
+
# generated include file
|
| 434 |
+
# * '-r dir' specifies the target directory of the
|
| 435 |
+
# generated RC file and the binary message resource
|
| 436 |
+
# it includes
|
| 437 |
+
#
|
| 438 |
+
# For now (since there are no options to change this),
|
| 439 |
+
# we use the source-directory for the include file and
|
| 440 |
+
# the build directory for the RC file and message
|
| 441 |
+
# resources. This works at least for win32all.
|
| 442 |
+
h_dir = os.path.dirname(src)
|
| 443 |
+
rc_dir = os.path.dirname(obj)
|
| 444 |
+
try:
|
| 445 |
+
# first compile .MC to .RC and .H file
|
| 446 |
+
self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
|
| 447 |
+
base, _ = os.path.splitext(os.path.basename(src))
|
| 448 |
+
rc_file = os.path.join(rc_dir, base + '.rc')
|
| 449 |
+
# then compile .RC to .RES file
|
| 450 |
+
self.spawn([self.rc] + ["/fo" + obj] + [rc_file])
|
| 451 |
+
|
| 452 |
+
except DistutilsExecError as msg:
|
| 453 |
+
raise CompileError(msg)
|
| 454 |
+
continue
|
| 455 |
+
else:
|
| 456 |
+
# how to handle this file?
|
| 457 |
+
raise CompileError(
|
| 458 |
+
"Don't know how to compile {} to {}".format(src, obj)
|
| 459 |
+
)
|
| 460 |
+
|
| 461 |
+
output_opt = "/Fo" + obj
|
| 462 |
+
try:
|
| 463 |
+
self.spawn(
|
| 464 |
+
[self.cc]
|
| 465 |
+
+ compile_opts
|
| 466 |
+
+ pp_opts
|
| 467 |
+
+ [input_opt, output_opt]
|
| 468 |
+
+ extra_postargs
|
| 469 |
+
)
|
| 470 |
+
except DistutilsExecError as msg:
|
| 471 |
+
raise CompileError(msg)
|
| 472 |
+
|
| 473 |
+
return objects
|
| 474 |
+
|
| 475 |
+
def create_static_lib(
|
| 476 |
+
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
| 477 |
+
):
|
| 478 |
+
if not self.initialized:
|
| 479 |
+
self.initialize()
|
| 480 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 481 |
+
output_filename = self.library_filename(output_libname, output_dir=output_dir)
|
| 482 |
+
|
| 483 |
+
if self._need_link(objects, output_filename):
|
| 484 |
+
lib_args = objects + ['/OUT:' + output_filename]
|
| 485 |
+
if debug:
|
| 486 |
+
pass # XXX what goes here?
|
| 487 |
+
try:
|
| 488 |
+
self.spawn([self.lib] + lib_args)
|
| 489 |
+
except DistutilsExecError as msg:
|
| 490 |
+
raise LibError(msg)
|
| 491 |
+
else:
|
| 492 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 493 |
+
|
| 494 |
+
def link( # noqa: C901
|
| 495 |
+
self,
|
| 496 |
+
target_desc,
|
| 497 |
+
objects,
|
| 498 |
+
output_filename,
|
| 499 |
+
output_dir=None,
|
| 500 |
+
libraries=None,
|
| 501 |
+
library_dirs=None,
|
| 502 |
+
runtime_library_dirs=None,
|
| 503 |
+
export_symbols=None,
|
| 504 |
+
debug=0,
|
| 505 |
+
extra_preargs=None,
|
| 506 |
+
extra_postargs=None,
|
| 507 |
+
build_temp=None,
|
| 508 |
+
target_lang=None,
|
| 509 |
+
):
|
| 510 |
+
if not self.initialized:
|
| 511 |
+
self.initialize()
|
| 512 |
+
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
| 513 |
+
fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
|
| 514 |
+
(libraries, library_dirs, runtime_library_dirs) = fixed_args
|
| 515 |
+
|
| 516 |
+
if runtime_library_dirs:
|
| 517 |
+
self.warn(
|
| 518 |
+
"I don't know what to do with 'runtime_library_dirs': "
|
| 519 |
+
+ str(runtime_library_dirs)
|
| 520 |
+
)
|
| 521 |
+
|
| 522 |
+
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
|
| 523 |
+
if output_dir is not None:
|
| 524 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 525 |
+
|
| 526 |
+
if self._need_link(objects, output_filename):
|
| 527 |
+
if target_desc == CCompiler.EXECUTABLE:
|
| 528 |
+
if debug:
|
| 529 |
+
ldflags = self.ldflags_shared_debug[1:]
|
| 530 |
+
else:
|
| 531 |
+
ldflags = self.ldflags_shared[1:]
|
| 532 |
+
else:
|
| 533 |
+
if debug:
|
| 534 |
+
ldflags = self.ldflags_shared_debug
|
| 535 |
+
else:
|
| 536 |
+
ldflags = self.ldflags_shared
|
| 537 |
+
|
| 538 |
+
export_opts = []
|
| 539 |
+
for sym in export_symbols or []:
|
| 540 |
+
export_opts.append("/EXPORT:" + sym)
|
| 541 |
+
|
| 542 |
+
ld_args = (
|
| 543 |
+
ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
|
| 544 |
+
)
|
| 545 |
+
|
| 546 |
+
# The MSVC linker generates .lib and .exp files, which cannot be
|
| 547 |
+
# suppressed by any linker switches. The .lib files may even be
|
| 548 |
+
# needed! Make sure they are generated in the temporary build
|
| 549 |
+
# directory. Since they have different names for debug and release
|
| 550 |
+
# builds, they can go into the same directory.
|
| 551 |
+
if export_symbols is not None:
|
| 552 |
+
(dll_name, dll_ext) = os.path.splitext(
|
| 553 |
+
os.path.basename(output_filename)
|
| 554 |
+
)
|
| 555 |
+
implib_file = os.path.join(
|
| 556 |
+
os.path.dirname(objects[0]), self.library_filename(dll_name)
|
| 557 |
+
)
|
| 558 |
+
ld_args.append('/IMPLIB:' + implib_file)
|
| 559 |
+
|
| 560 |
+
if extra_preargs:
|
| 561 |
+
ld_args[:0] = extra_preargs
|
| 562 |
+
if extra_postargs:
|
| 563 |
+
ld_args.extend(extra_postargs)
|
| 564 |
+
|
| 565 |
+
self.mkpath(os.path.dirname(output_filename))
|
| 566 |
+
try:
|
| 567 |
+
self.spawn([self.linker] + ld_args)
|
| 568 |
+
except DistutilsExecError as msg:
|
| 569 |
+
raise LinkError(msg)
|
| 570 |
+
|
| 571 |
+
else:
|
| 572 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 573 |
+
|
| 574 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 575 |
+
# These are all used by the 'gen_lib_options() function, in
|
| 576 |
+
# ccompiler.py.
|
| 577 |
+
|
| 578 |
+
def library_dir_option(self, dir):
|
| 579 |
+
return "/LIBPATH:" + dir
|
| 580 |
+
|
| 581 |
+
def runtime_library_dir_option(self, dir):
|
| 582 |
+
raise DistutilsPlatformError(
|
| 583 |
+
"don't know how to set runtime library search path for MSVC++"
|
| 584 |
+
)
|
| 585 |
+
|
| 586 |
+
def library_option(self, lib):
|
| 587 |
+
return self.library_filename(lib)
|
| 588 |
+
|
| 589 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 590 |
+
# Prefer a debugging library if found (and requested), but deal
|
| 591 |
+
# with it if we don't have one.
|
| 592 |
+
if debug:
|
| 593 |
+
try_names = [lib + "_d", lib]
|
| 594 |
+
else:
|
| 595 |
+
try_names = [lib]
|
| 596 |
+
for dir in dirs:
|
| 597 |
+
for name in try_names:
|
| 598 |
+
libfile = os.path.join(dir, self.library_filename(name))
|
| 599 |
+
if os.path.exists(libfile):
|
| 600 |
+
return libfile
|
| 601 |
+
else:
|
| 602 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 603 |
+
return None
|
| 604 |
+
|
| 605 |
+
# Helper methods for using the MSVC registry settings
|
| 606 |
+
|
| 607 |
+
def find_exe(self, exe):
|
| 608 |
+
"""Return path to an MSVC executable program.
|
| 609 |
+
|
| 610 |
+
Tries to find the program in several places: first, one of the
|
| 611 |
+
MSVC program search paths from the registry; next, the directories
|
| 612 |
+
in the PATH environment variable. If any of those work, return an
|
| 613 |
+
absolute path that is known to exist. If none of them work, just
|
| 614 |
+
return the original program name, 'exe'.
|
| 615 |
+
"""
|
| 616 |
+
for p in self.__paths:
|
| 617 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 618 |
+
if os.path.isfile(fn):
|
| 619 |
+
return fn
|
| 620 |
+
|
| 621 |
+
# didn't find it; try existing path
|
| 622 |
+
for p in os.environ['Path'].split(';'):
|
| 623 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 624 |
+
if os.path.isfile(fn):
|
| 625 |
+
return fn
|
| 626 |
+
|
| 627 |
+
return exe
|
| 628 |
+
|
| 629 |
+
def get_msvc_paths(self, path, platform='x86'):
|
| 630 |
+
"""Get a list of devstudio directories (include, lib or path).
|
| 631 |
+
|
| 632 |
+
Return a list of strings. The list will be empty if unable to
|
| 633 |
+
access the registry or appropriate registry keys not found.
|
| 634 |
+
"""
|
| 635 |
+
if not _can_read_reg:
|
| 636 |
+
return []
|
| 637 |
+
|
| 638 |
+
path = path + " dirs"
|
| 639 |
+
if self.__version >= 7:
|
| 640 |
+
key = r"{}\{:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories".format(
|
| 641 |
+
self.__root,
|
| 642 |
+
self.__version,
|
| 643 |
+
)
|
| 644 |
+
else:
|
| 645 |
+
key = (
|
| 646 |
+
r"%s\6.0\Build System\Components\Platforms"
|
| 647 |
+
r"\Win32 (%s)\Directories" % (self.__root, platform)
|
| 648 |
+
)
|
| 649 |
+
|
| 650 |
+
for base in HKEYS:
|
| 651 |
+
d = read_values(base, key)
|
| 652 |
+
if d:
|
| 653 |
+
if self.__version >= 7:
|
| 654 |
+
return self.__macros.sub(d[path]).split(";")
|
| 655 |
+
else:
|
| 656 |
+
return d[path].split(";")
|
| 657 |
+
# MSVC 6 seems to create the registry entries we need only when
|
| 658 |
+
# the GUI is run.
|
| 659 |
+
if self.__version == 6:
|
| 660 |
+
for base in HKEYS:
|
| 661 |
+
if read_values(base, r"%s\6.0" % self.__root) is not None:
|
| 662 |
+
self.warn(
|
| 663 |
+
"It seems you have Visual Studio 6 installed, "
|
| 664 |
+
"but the expected registry settings are not present.\n"
|
| 665 |
+
"You must at least run the Visual Studio GUI once "
|
| 666 |
+
"so that these entries are created."
|
| 667 |
+
)
|
| 668 |
+
break
|
| 669 |
+
return []
|
| 670 |
+
|
| 671 |
+
def set_path_env_var(self, name):
|
| 672 |
+
"""Set environment variable 'name' to an MSVC path type value.
|
| 673 |
+
|
| 674 |
+
This is equivalent to a SET command prior to execution of spawned
|
| 675 |
+
commands.
|
| 676 |
+
"""
|
| 677 |
+
|
| 678 |
+
if name == "lib":
|
| 679 |
+
p = self.get_msvc_paths("library")
|
| 680 |
+
else:
|
| 681 |
+
p = self.get_msvc_paths(name)
|
| 682 |
+
if p:
|
| 683 |
+
os.environ[name] = ';'.join(p)
|
| 684 |
+
|
| 685 |
+
|
| 686 |
+
if get_build_version() >= 8.0:
|
| 687 |
+
log.debug("Importing new compiler from distutils.msvc9compiler")
|
| 688 |
+
OldMSVCCompiler = MSVCCompiler
|
| 689 |
+
from distutils.msvc9compiler import MSVCCompiler
|
| 690 |
+
|
| 691 |
+
# get_build_architecture not really relevant now we support cross-compile
|
| 692 |
+
from distutils.msvc9compiler import MacroExpander # noqa: F811
|
.venv/Lib/site-packages/setuptools/_distutils/py38compat.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def aix_platform(osname, version, release):
|
| 2 |
+
try:
|
| 3 |
+
import _aix_support
|
| 4 |
+
|
| 5 |
+
return _aix_support.aix_platform()
|
| 6 |
+
except ImportError:
|
| 7 |
+
pass
|
| 8 |
+
return "{}-{}.{}".format(osname, version, release)
|
.venv/Lib/site-packages/setuptools/_distutils/py39compat.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import platform
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def add_ext_suffix_39(vars):
|
| 6 |
+
"""
|
| 7 |
+
Ensure vars contains 'EXT_SUFFIX'. pypa/distutils#130
|
| 8 |
+
"""
|
| 9 |
+
import _imp
|
| 10 |
+
|
| 11 |
+
ext_suffix = _imp.extension_suffixes()[0]
|
| 12 |
+
vars.update(
|
| 13 |
+
EXT_SUFFIX=ext_suffix,
|
| 14 |
+
# sysconfig sets SO to match EXT_SUFFIX, so maintain
|
| 15 |
+
# that expectation.
|
| 16 |
+
# https://github.com/python/cpython/blob/785cc6770588de087d09e89a69110af2542be208/Lib/sysconfig.py#L671-L673
|
| 17 |
+
SO=ext_suffix,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
needs_ext_suffix = sys.version_info < (3, 10) and platform.system() == 'Windows'
|
| 22 |
+
add_ext_suffix = add_ext_suffix_39 if needs_ext_suffix else lambda vars: None
|
.venv/Lib/site-packages/setuptools/_distutils/spawn.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.spawn
|
| 2 |
+
|
| 3 |
+
Provides the 'spawn()' function, a front-end to various platform-
|
| 4 |
+
specific functions for launching another program in a sub-process.
|
| 5 |
+
Also provides the 'find_executable()' to search the path for a given
|
| 6 |
+
executable name.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import sys
|
| 10 |
+
import os
|
| 11 |
+
import subprocess
|
| 12 |
+
|
| 13 |
+
from .errors import DistutilsExecError
|
| 14 |
+
from .debug import DEBUG
|
| 15 |
+
from ._log import log
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901
|
| 19 |
+
"""Run another program, specified as a command list 'cmd', in a new process.
|
| 20 |
+
|
| 21 |
+
'cmd' is just the argument list for the new process, ie.
|
| 22 |
+
cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
|
| 23 |
+
There is no way to run a program with a name different from that of its
|
| 24 |
+
executable.
|
| 25 |
+
|
| 26 |
+
If 'search_path' is true (the default), the system's executable
|
| 27 |
+
search path will be used to find the program; otherwise, cmd[0]
|
| 28 |
+
must be the exact path to the executable. If 'dry_run' is true,
|
| 29 |
+
the command will not actually be run.
|
| 30 |
+
|
| 31 |
+
Raise DistutilsExecError if running the program fails in any way; just
|
| 32 |
+
return on success.
|
| 33 |
+
"""
|
| 34 |
+
# cmd is documented as a list, but just in case some code passes a tuple
|
| 35 |
+
# in, protect our %-formatting code against horrible death
|
| 36 |
+
cmd = list(cmd)
|
| 37 |
+
|
| 38 |
+
log.info(subprocess.list2cmdline(cmd))
|
| 39 |
+
if dry_run:
|
| 40 |
+
return
|
| 41 |
+
|
| 42 |
+
if search_path:
|
| 43 |
+
executable = find_executable(cmd[0])
|
| 44 |
+
if executable is not None:
|
| 45 |
+
cmd[0] = executable
|
| 46 |
+
|
| 47 |
+
env = env if env is not None else dict(os.environ)
|
| 48 |
+
|
| 49 |
+
if sys.platform == 'darwin':
|
| 50 |
+
from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
|
| 51 |
+
|
| 52 |
+
macosx_target_ver = get_macosx_target_ver()
|
| 53 |
+
if macosx_target_ver:
|
| 54 |
+
env[MACOSX_VERSION_VAR] = macosx_target_ver
|
| 55 |
+
|
| 56 |
+
try:
|
| 57 |
+
proc = subprocess.Popen(cmd, env=env)
|
| 58 |
+
proc.wait()
|
| 59 |
+
exitcode = proc.returncode
|
| 60 |
+
except OSError as exc:
|
| 61 |
+
if not DEBUG:
|
| 62 |
+
cmd = cmd[0]
|
| 63 |
+
raise DistutilsExecError(
|
| 64 |
+
"command {!r} failed: {}".format(cmd, exc.args[-1])
|
| 65 |
+
) from exc
|
| 66 |
+
|
| 67 |
+
if exitcode:
|
| 68 |
+
if not DEBUG:
|
| 69 |
+
cmd = cmd[0]
|
| 70 |
+
raise DistutilsExecError(
|
| 71 |
+
"command {!r} failed with exit code {}".format(cmd, exitcode)
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def find_executable(executable, path=None):
|
| 76 |
+
"""Tries to find 'executable' in the directories listed in 'path'.
|
| 77 |
+
|
| 78 |
+
A string listing directories separated by 'os.pathsep'; defaults to
|
| 79 |
+
os.environ['PATH']. Returns the complete filename or None if not found.
|
| 80 |
+
"""
|
| 81 |
+
_, ext = os.path.splitext(executable)
|
| 82 |
+
if (sys.platform == 'win32') and (ext != '.exe'):
|
| 83 |
+
executable = executable + '.exe'
|
| 84 |
+
|
| 85 |
+
if os.path.isfile(executable):
|
| 86 |
+
return executable
|
| 87 |
+
|
| 88 |
+
if path is None:
|
| 89 |
+
path = os.environ.get('PATH', None)
|
| 90 |
+
if path is None:
|
| 91 |
+
try:
|
| 92 |
+
path = os.confstr("CS_PATH")
|
| 93 |
+
except (AttributeError, ValueError):
|
| 94 |
+
# os.confstr() or CS_PATH is not available
|
| 95 |
+
path = os.defpath
|
| 96 |
+
# bpo-35755: Don't use os.defpath if the PATH environment variable is
|
| 97 |
+
# set to an empty string
|
| 98 |
+
|
| 99 |
+
# PATH='' doesn't match, whereas PATH=':' looks in the current directory
|
| 100 |
+
if not path:
|
| 101 |
+
return None
|
| 102 |
+
|
| 103 |
+
paths = path.split(os.pathsep)
|
| 104 |
+
for p in paths:
|
| 105 |
+
f = os.path.join(p, executable)
|
| 106 |
+
if os.path.isfile(f):
|
| 107 |
+
# the file exists, we have a shot at spawn working
|
| 108 |
+
return f
|
| 109 |
+
return None
|
.venv/Lib/site-packages/setuptools/_distutils/sysconfig.py
ADDED
|
@@ -0,0 +1,559 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Provide access to Python's configuration information. The specific
|
| 2 |
+
configuration variables available depend heavily on the platform and
|
| 3 |
+
configuration. The values may be retrieved using
|
| 4 |
+
get_config_var(name), and the list of variables is available via
|
| 5 |
+
get_config_vars().keys(). Additional convenience functions are also
|
| 6 |
+
available.
|
| 7 |
+
|
| 8 |
+
Written by: Fred L. Drake, Jr.
|
| 9 |
+
Email: <fdrake@acm.org>
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
import sys
|
| 15 |
+
import sysconfig
|
| 16 |
+
import pathlib
|
| 17 |
+
|
| 18 |
+
from .errors import DistutilsPlatformError
|
| 19 |
+
from . import py39compat
|
| 20 |
+
from ._functools import pass_none
|
| 21 |
+
|
| 22 |
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
| 23 |
+
|
| 24 |
+
# These are needed in a couple of spots, so just compute them once.
|
| 25 |
+
PREFIX = os.path.normpath(sys.prefix)
|
| 26 |
+
EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
|
| 27 |
+
BASE_PREFIX = os.path.normpath(sys.base_prefix)
|
| 28 |
+
BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
|
| 29 |
+
|
| 30 |
+
# Path to the base directory of the project. On Windows the binary may
|
| 31 |
+
# live in project/PCbuild/win32 or project/PCbuild/amd64.
|
| 32 |
+
# set for cross builds
|
| 33 |
+
if "_PYTHON_PROJECT_BASE" in os.environ:
|
| 34 |
+
project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
|
| 35 |
+
else:
|
| 36 |
+
if sys.executable:
|
| 37 |
+
project_base = os.path.dirname(os.path.abspath(sys.executable))
|
| 38 |
+
else:
|
| 39 |
+
# sys.executable can be empty if argv[0] has been changed and Python is
|
| 40 |
+
# unable to retrieve the real program name
|
| 41 |
+
project_base = os.getcwd()
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def _is_python_source_dir(d):
|
| 45 |
+
"""
|
| 46 |
+
Return True if the target directory appears to point to an
|
| 47 |
+
un-installed Python.
|
| 48 |
+
"""
|
| 49 |
+
modules = pathlib.Path(d).joinpath('Modules')
|
| 50 |
+
return any(modules.joinpath(fn).is_file() for fn in ('Setup', 'Setup.local'))
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
_sys_home = getattr(sys, '_home', None)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def _is_parent(dir_a, dir_b):
|
| 57 |
+
"""
|
| 58 |
+
Return True if a is a parent of b.
|
| 59 |
+
"""
|
| 60 |
+
return os.path.normcase(dir_a).startswith(os.path.normcase(dir_b))
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
if os.name == 'nt':
|
| 64 |
+
|
| 65 |
+
@pass_none
|
| 66 |
+
def _fix_pcbuild(d):
|
| 67 |
+
# In a venv, sys._home will be inside BASE_PREFIX rather than PREFIX.
|
| 68 |
+
prefixes = PREFIX, BASE_PREFIX
|
| 69 |
+
matched = (
|
| 70 |
+
prefix
|
| 71 |
+
for prefix in prefixes
|
| 72 |
+
if _is_parent(d, os.path.join(prefix, "PCbuild"))
|
| 73 |
+
)
|
| 74 |
+
return next(matched, d)
|
| 75 |
+
|
| 76 |
+
project_base = _fix_pcbuild(project_base)
|
| 77 |
+
_sys_home = _fix_pcbuild(_sys_home)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def _python_build():
|
| 81 |
+
if _sys_home:
|
| 82 |
+
return _is_python_source_dir(_sys_home)
|
| 83 |
+
return _is_python_source_dir(project_base)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
python_build = _python_build()
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# Calculate the build qualifier flags if they are defined. Adding the flags
|
| 90 |
+
# to the include and lib directories only makes sense for an installation, not
|
| 91 |
+
# an in-source build.
|
| 92 |
+
build_flags = ''
|
| 93 |
+
try:
|
| 94 |
+
if not python_build:
|
| 95 |
+
build_flags = sys.abiflags
|
| 96 |
+
except AttributeError:
|
| 97 |
+
# It's not a configure-based build, so the sys module doesn't have
|
| 98 |
+
# this attribute, which is fine.
|
| 99 |
+
pass
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def get_python_version():
|
| 103 |
+
"""Return a string containing the major and minor Python version,
|
| 104 |
+
leaving off the patchlevel. Sample return values could be '1.5'
|
| 105 |
+
or '2.2'.
|
| 106 |
+
"""
|
| 107 |
+
return '%d.%d' % sys.version_info[:2]
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def get_python_inc(plat_specific=0, prefix=None):
|
| 111 |
+
"""Return the directory containing installed Python header files.
|
| 112 |
+
|
| 113 |
+
If 'plat_specific' is false (the default), this is the path to the
|
| 114 |
+
non-platform-specific header files, i.e. Python.h and so on;
|
| 115 |
+
otherwise, this is the path to platform-specific header files
|
| 116 |
+
(namely pyconfig.h).
|
| 117 |
+
|
| 118 |
+
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
| 119 |
+
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
| 120 |
+
"""
|
| 121 |
+
default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX
|
| 122 |
+
resolved_prefix = prefix if prefix is not None else default_prefix
|
| 123 |
+
try:
|
| 124 |
+
getter = globals()[f'_get_python_inc_{os.name}']
|
| 125 |
+
except KeyError:
|
| 126 |
+
raise DistutilsPlatformError(
|
| 127 |
+
"I don't know where Python installs its C header files "
|
| 128 |
+
"on platform '%s'" % os.name
|
| 129 |
+
)
|
| 130 |
+
return getter(resolved_prefix, prefix, plat_specific)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
@pass_none
|
| 134 |
+
def _extant(path):
|
| 135 |
+
"""
|
| 136 |
+
Replace path with None if it doesn't exist.
|
| 137 |
+
"""
|
| 138 |
+
return path if os.path.exists(path) else None
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _get_python_inc_posix(prefix, spec_prefix, plat_specific):
|
| 142 |
+
if IS_PYPY and sys.version_info < (3, 8):
|
| 143 |
+
return os.path.join(prefix, 'include')
|
| 144 |
+
return (
|
| 145 |
+
_get_python_inc_posix_python(plat_specific)
|
| 146 |
+
or _extant(_get_python_inc_from_config(plat_specific, spec_prefix))
|
| 147 |
+
or _get_python_inc_posix_prefix(prefix)
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def _get_python_inc_posix_python(plat_specific):
|
| 152 |
+
"""
|
| 153 |
+
Assume the executable is in the build directory. The
|
| 154 |
+
pyconfig.h file should be in the same directory. Since
|
| 155 |
+
the build directory may not be the source directory,
|
| 156 |
+
use "srcdir" from the makefile to find the "Include"
|
| 157 |
+
directory.
|
| 158 |
+
"""
|
| 159 |
+
if not python_build:
|
| 160 |
+
return
|
| 161 |
+
if plat_specific:
|
| 162 |
+
return _sys_home or project_base
|
| 163 |
+
incdir = os.path.join(get_config_var('srcdir'), 'Include')
|
| 164 |
+
return os.path.normpath(incdir)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _get_python_inc_from_config(plat_specific, spec_prefix):
|
| 168 |
+
"""
|
| 169 |
+
If no prefix was explicitly specified, provide the include
|
| 170 |
+
directory from the config vars. Useful when
|
| 171 |
+
cross-compiling, since the config vars may come from
|
| 172 |
+
the host
|
| 173 |
+
platform Python installation, while the current Python
|
| 174 |
+
executable is from the build platform installation.
|
| 175 |
+
|
| 176 |
+
>>> monkeypatch = getfixture('monkeypatch')
|
| 177 |
+
>>> gpifc = _get_python_inc_from_config
|
| 178 |
+
>>> monkeypatch.setitem(gpifc.__globals__, 'get_config_var', str.lower)
|
| 179 |
+
>>> gpifc(False, '/usr/bin/')
|
| 180 |
+
>>> gpifc(False, '')
|
| 181 |
+
>>> gpifc(False, None)
|
| 182 |
+
'includepy'
|
| 183 |
+
>>> gpifc(True, None)
|
| 184 |
+
'confincludepy'
|
| 185 |
+
"""
|
| 186 |
+
if spec_prefix is None:
|
| 187 |
+
return get_config_var('CONF' * plat_specific + 'INCLUDEPY')
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def _get_python_inc_posix_prefix(prefix):
|
| 191 |
+
implementation = 'pypy' if IS_PYPY else 'python'
|
| 192 |
+
python_dir = implementation + get_python_version() + build_flags
|
| 193 |
+
return os.path.join(prefix, "include", python_dir)
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def _get_python_inc_nt(prefix, spec_prefix, plat_specific):
|
| 197 |
+
if python_build:
|
| 198 |
+
# Include both the include and PC dir to ensure we can find
|
| 199 |
+
# pyconfig.h
|
| 200 |
+
return (
|
| 201 |
+
os.path.join(prefix, "include")
|
| 202 |
+
+ os.path.pathsep
|
| 203 |
+
+ os.path.join(prefix, "PC")
|
| 204 |
+
)
|
| 205 |
+
return os.path.join(prefix, "include")
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
# allow this behavior to be monkey-patched. Ref pypa/distutils#2.
|
| 209 |
+
def _posix_lib(standard_lib, libpython, early_prefix, prefix):
|
| 210 |
+
if standard_lib:
|
| 211 |
+
return libpython
|
| 212 |
+
else:
|
| 213 |
+
return os.path.join(libpython, "site-packages")
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
| 217 |
+
"""Return the directory containing the Python library (standard or
|
| 218 |
+
site additions).
|
| 219 |
+
|
| 220 |
+
If 'plat_specific' is true, return the directory containing
|
| 221 |
+
platform-specific modules, i.e. any module from a non-pure-Python
|
| 222 |
+
module distribution; otherwise, return the platform-shared library
|
| 223 |
+
directory. If 'standard_lib' is true, return the directory
|
| 224 |
+
containing standard Python library modules; otherwise, return the
|
| 225 |
+
directory for site-specific modules.
|
| 226 |
+
|
| 227 |
+
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
| 228 |
+
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
| 229 |
+
"""
|
| 230 |
+
|
| 231 |
+
if IS_PYPY and sys.version_info < (3, 8):
|
| 232 |
+
# PyPy-specific schema
|
| 233 |
+
if prefix is None:
|
| 234 |
+
prefix = PREFIX
|
| 235 |
+
if standard_lib:
|
| 236 |
+
return os.path.join(prefix, "lib-python", sys.version[0])
|
| 237 |
+
return os.path.join(prefix, 'site-packages')
|
| 238 |
+
|
| 239 |
+
early_prefix = prefix
|
| 240 |
+
|
| 241 |
+
if prefix is None:
|
| 242 |
+
if standard_lib:
|
| 243 |
+
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
|
| 244 |
+
else:
|
| 245 |
+
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
| 246 |
+
|
| 247 |
+
if os.name == "posix":
|
| 248 |
+
if plat_specific or standard_lib:
|
| 249 |
+
# Platform-specific modules (any module from a non-pure-Python
|
| 250 |
+
# module distribution) or standard Python library modules.
|
| 251 |
+
libdir = getattr(sys, "platlibdir", "lib")
|
| 252 |
+
else:
|
| 253 |
+
# Pure Python
|
| 254 |
+
libdir = "lib"
|
| 255 |
+
implementation = 'pypy' if IS_PYPY else 'python'
|
| 256 |
+
libpython = os.path.join(prefix, libdir, implementation + get_python_version())
|
| 257 |
+
return _posix_lib(standard_lib, libpython, early_prefix, prefix)
|
| 258 |
+
elif os.name == "nt":
|
| 259 |
+
if standard_lib:
|
| 260 |
+
return os.path.join(prefix, "Lib")
|
| 261 |
+
else:
|
| 262 |
+
return os.path.join(prefix, "Lib", "site-packages")
|
| 263 |
+
else:
|
| 264 |
+
raise DistutilsPlatformError(
|
| 265 |
+
"I don't know where Python installs its library "
|
| 266 |
+
"on platform '%s'" % os.name
|
| 267 |
+
)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def customize_compiler(compiler): # noqa: C901
|
| 271 |
+
"""Do any platform-specific customization of a CCompiler instance.
|
| 272 |
+
|
| 273 |
+
Mainly needed on Unix, so we can plug in the information that
|
| 274 |
+
varies across Unices and is stored in Python's Makefile.
|
| 275 |
+
"""
|
| 276 |
+
if compiler.compiler_type == "unix":
|
| 277 |
+
if sys.platform == "darwin":
|
| 278 |
+
# Perform first-time customization of compiler-related
|
| 279 |
+
# config vars on OS X now that we know we need a compiler.
|
| 280 |
+
# This is primarily to support Pythons from binary
|
| 281 |
+
# installers. The kind and paths to build tools on
|
| 282 |
+
# the user system may vary significantly from the system
|
| 283 |
+
# that Python itself was built on. Also the user OS
|
| 284 |
+
# version and build tools may not support the same set
|
| 285 |
+
# of CPU architectures for universal builds.
|
| 286 |
+
global _config_vars
|
| 287 |
+
# Use get_config_var() to ensure _config_vars is initialized.
|
| 288 |
+
if not get_config_var('CUSTOMIZED_OSX_COMPILER'):
|
| 289 |
+
import _osx_support
|
| 290 |
+
|
| 291 |
+
_osx_support.customize_compiler(_config_vars)
|
| 292 |
+
_config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
|
| 293 |
+
|
| 294 |
+
(
|
| 295 |
+
cc,
|
| 296 |
+
cxx,
|
| 297 |
+
cflags,
|
| 298 |
+
ccshared,
|
| 299 |
+
ldshared,
|
| 300 |
+
shlib_suffix,
|
| 301 |
+
ar,
|
| 302 |
+
ar_flags,
|
| 303 |
+
) = get_config_vars(
|
| 304 |
+
'CC',
|
| 305 |
+
'CXX',
|
| 306 |
+
'CFLAGS',
|
| 307 |
+
'CCSHARED',
|
| 308 |
+
'LDSHARED',
|
| 309 |
+
'SHLIB_SUFFIX',
|
| 310 |
+
'AR',
|
| 311 |
+
'ARFLAGS',
|
| 312 |
+
)
|
| 313 |
+
|
| 314 |
+
if 'CC' in os.environ:
|
| 315 |
+
newcc = os.environ['CC']
|
| 316 |
+
if 'LDSHARED' not in os.environ and ldshared.startswith(cc):
|
| 317 |
+
# If CC is overridden, use that as the default
|
| 318 |
+
# command for LDSHARED as well
|
| 319 |
+
ldshared = newcc + ldshared[len(cc) :]
|
| 320 |
+
cc = newcc
|
| 321 |
+
if 'CXX' in os.environ:
|
| 322 |
+
cxx = os.environ['CXX']
|
| 323 |
+
if 'LDSHARED' in os.environ:
|
| 324 |
+
ldshared = os.environ['LDSHARED']
|
| 325 |
+
if 'CPP' in os.environ:
|
| 326 |
+
cpp = os.environ['CPP']
|
| 327 |
+
else:
|
| 328 |
+
cpp = cc + " -E" # not always
|
| 329 |
+
if 'LDFLAGS' in os.environ:
|
| 330 |
+
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
|
| 331 |
+
if 'CFLAGS' in os.environ:
|
| 332 |
+
cflags = cflags + ' ' + os.environ['CFLAGS']
|
| 333 |
+
ldshared = ldshared + ' ' + os.environ['CFLAGS']
|
| 334 |
+
if 'CPPFLAGS' in os.environ:
|
| 335 |
+
cpp = cpp + ' ' + os.environ['CPPFLAGS']
|
| 336 |
+
cflags = cflags + ' ' + os.environ['CPPFLAGS']
|
| 337 |
+
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
|
| 338 |
+
if 'AR' in os.environ:
|
| 339 |
+
ar = os.environ['AR']
|
| 340 |
+
if 'ARFLAGS' in os.environ:
|
| 341 |
+
archiver = ar + ' ' + os.environ['ARFLAGS']
|
| 342 |
+
else:
|
| 343 |
+
archiver = ar + ' ' + ar_flags
|
| 344 |
+
|
| 345 |
+
cc_cmd = cc + ' ' + cflags
|
| 346 |
+
compiler.set_executables(
|
| 347 |
+
preprocessor=cpp,
|
| 348 |
+
compiler=cc_cmd,
|
| 349 |
+
compiler_so=cc_cmd + ' ' + ccshared,
|
| 350 |
+
compiler_cxx=cxx,
|
| 351 |
+
linker_so=ldshared,
|
| 352 |
+
linker_exe=cc,
|
| 353 |
+
archiver=archiver,
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None):
|
| 357 |
+
compiler.set_executables(ranlib=os.environ['RANLIB'])
|
| 358 |
+
|
| 359 |
+
compiler.shared_lib_extension = shlib_suffix
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
def get_config_h_filename():
|
| 363 |
+
"""Return full pathname of installed pyconfig.h file."""
|
| 364 |
+
if python_build:
|
| 365 |
+
if os.name == "nt":
|
| 366 |
+
inc_dir = os.path.join(_sys_home or project_base, "PC")
|
| 367 |
+
else:
|
| 368 |
+
inc_dir = _sys_home or project_base
|
| 369 |
+
return os.path.join(inc_dir, 'pyconfig.h')
|
| 370 |
+
else:
|
| 371 |
+
return sysconfig.get_config_h_filename()
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def get_makefile_filename():
|
| 375 |
+
"""Return full pathname of installed Makefile from the Python build."""
|
| 376 |
+
return sysconfig.get_makefile_filename()
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
def parse_config_h(fp, g=None):
|
| 380 |
+
"""Parse a config.h-style file.
|
| 381 |
+
|
| 382 |
+
A dictionary containing name/value pairs is returned. If an
|
| 383 |
+
optional dictionary is passed in as the second argument, it is
|
| 384 |
+
used instead of a new dictionary.
|
| 385 |
+
"""
|
| 386 |
+
return sysconfig.parse_config_h(fp, vars=g)
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
# Regexes needed for parsing Makefile (and similar syntaxes,
|
| 390 |
+
# like old-style Setup files).
|
| 391 |
+
_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
|
| 392 |
+
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
|
| 393 |
+
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
|
| 394 |
+
|
| 395 |
+
|
| 396 |
+
def parse_makefile(fn, g=None): # noqa: C901
|
| 397 |
+
"""Parse a Makefile-style file.
|
| 398 |
+
|
| 399 |
+
A dictionary containing name/value pairs is returned. If an
|
| 400 |
+
optional dictionary is passed in as the second argument, it is
|
| 401 |
+
used instead of a new dictionary.
|
| 402 |
+
"""
|
| 403 |
+
from distutils.text_file import TextFile
|
| 404 |
+
|
| 405 |
+
fp = TextFile(
|
| 406 |
+
fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape"
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
if g is None:
|
| 410 |
+
g = {}
|
| 411 |
+
done = {}
|
| 412 |
+
notdone = {}
|
| 413 |
+
|
| 414 |
+
while True:
|
| 415 |
+
line = fp.readline()
|
| 416 |
+
if line is None: # eof
|
| 417 |
+
break
|
| 418 |
+
m = _variable_rx.match(line)
|
| 419 |
+
if m:
|
| 420 |
+
n, v = m.group(1, 2)
|
| 421 |
+
v = v.strip()
|
| 422 |
+
# `$$' is a literal `$' in make
|
| 423 |
+
tmpv = v.replace('$$', '')
|
| 424 |
+
|
| 425 |
+
if "$" in tmpv:
|
| 426 |
+
notdone[n] = v
|
| 427 |
+
else:
|
| 428 |
+
try:
|
| 429 |
+
v = int(v)
|
| 430 |
+
except ValueError:
|
| 431 |
+
# insert literal `$'
|
| 432 |
+
done[n] = v.replace('$$', '$')
|
| 433 |
+
else:
|
| 434 |
+
done[n] = v
|
| 435 |
+
|
| 436 |
+
# Variables with a 'PY_' prefix in the makefile. These need to
|
| 437 |
+
# be made available without that prefix through sysconfig.
|
| 438 |
+
# Special care is needed to ensure that variable expansion works, even
|
| 439 |
+
# if the expansion uses the name without a prefix.
|
| 440 |
+
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
|
| 441 |
+
|
| 442 |
+
# do variable interpolation here
|
| 443 |
+
while notdone:
|
| 444 |
+
for name in list(notdone):
|
| 445 |
+
value = notdone[name]
|
| 446 |
+
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
|
| 447 |
+
if m:
|
| 448 |
+
n = m.group(1)
|
| 449 |
+
found = True
|
| 450 |
+
if n in done:
|
| 451 |
+
item = str(done[n])
|
| 452 |
+
elif n in notdone:
|
| 453 |
+
# get it on a subsequent round
|
| 454 |
+
found = False
|
| 455 |
+
elif n in os.environ:
|
| 456 |
+
# do it like make: fall back to environment
|
| 457 |
+
item = os.environ[n]
|
| 458 |
+
|
| 459 |
+
elif n in renamed_variables:
|
| 460 |
+
if name.startswith('PY_') and name[3:] in renamed_variables:
|
| 461 |
+
item = ""
|
| 462 |
+
|
| 463 |
+
elif 'PY_' + n in notdone:
|
| 464 |
+
found = False
|
| 465 |
+
|
| 466 |
+
else:
|
| 467 |
+
item = str(done['PY_' + n])
|
| 468 |
+
else:
|
| 469 |
+
done[n] = item = ""
|
| 470 |
+
if found:
|
| 471 |
+
after = value[m.end() :]
|
| 472 |
+
value = value[: m.start()] + item + after
|
| 473 |
+
if "$" in after:
|
| 474 |
+
notdone[name] = value
|
| 475 |
+
else:
|
| 476 |
+
try:
|
| 477 |
+
value = int(value)
|
| 478 |
+
except ValueError:
|
| 479 |
+
done[name] = value.strip()
|
| 480 |
+
else:
|
| 481 |
+
done[name] = value
|
| 482 |
+
del notdone[name]
|
| 483 |
+
|
| 484 |
+
if name.startswith('PY_') and name[3:] in renamed_variables:
|
| 485 |
+
name = name[3:]
|
| 486 |
+
if name not in done:
|
| 487 |
+
done[name] = value
|
| 488 |
+
else:
|
| 489 |
+
# bogus variable reference; just drop it since we can't deal
|
| 490 |
+
del notdone[name]
|
| 491 |
+
|
| 492 |
+
fp.close()
|
| 493 |
+
|
| 494 |
+
# strip spurious spaces
|
| 495 |
+
for k, v in done.items():
|
| 496 |
+
if isinstance(v, str):
|
| 497 |
+
done[k] = v.strip()
|
| 498 |
+
|
| 499 |
+
# save the results in the global dictionary
|
| 500 |
+
g.update(done)
|
| 501 |
+
return g
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
def expand_makefile_vars(s, vars):
|
| 505 |
+
"""Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
|
| 506 |
+
'string' according to 'vars' (a dictionary mapping variable names to
|
| 507 |
+
values). Variables not present in 'vars' are silently expanded to the
|
| 508 |
+
empty string. The variable values in 'vars' should not contain further
|
| 509 |
+
variable expansions; if 'vars' is the output of 'parse_makefile()',
|
| 510 |
+
you're fine. Returns a variable-expanded version of 's'.
|
| 511 |
+
"""
|
| 512 |
+
|
| 513 |
+
# This algorithm does multiple expansion, so if vars['foo'] contains
|
| 514 |
+
# "${bar}", it will expand ${foo} to ${bar}, and then expand
|
| 515 |
+
# ${bar}... and so forth. This is fine as long as 'vars' comes from
|
| 516 |
+
# 'parse_makefile()', which takes care of such expansions eagerly,
|
| 517 |
+
# according to make's variable expansion semantics.
|
| 518 |
+
|
| 519 |
+
while True:
|
| 520 |
+
m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
|
| 521 |
+
if m:
|
| 522 |
+
(beg, end) = m.span()
|
| 523 |
+
s = s[0:beg] + vars.get(m.group(1)) + s[end:]
|
| 524 |
+
else:
|
| 525 |
+
break
|
| 526 |
+
return s
|
| 527 |
+
|
| 528 |
+
|
| 529 |
+
_config_vars = None
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
def get_config_vars(*args):
|
| 533 |
+
"""With no arguments, return a dictionary of all configuration
|
| 534 |
+
variables relevant for the current platform. Generally this includes
|
| 535 |
+
everything needed to build extensions and install both pure modules and
|
| 536 |
+
extensions. On Unix, this means every variable defined in Python's
|
| 537 |
+
installed Makefile; on Windows it's a much smaller set.
|
| 538 |
+
|
| 539 |
+
With arguments, return a list of values that result from looking up
|
| 540 |
+
each argument in the configuration variable dictionary.
|
| 541 |
+
"""
|
| 542 |
+
global _config_vars
|
| 543 |
+
if _config_vars is None:
|
| 544 |
+
_config_vars = sysconfig.get_config_vars().copy()
|
| 545 |
+
py39compat.add_ext_suffix(_config_vars)
|
| 546 |
+
|
| 547 |
+
return [_config_vars.get(name) for name in args] if args else _config_vars
|
| 548 |
+
|
| 549 |
+
|
| 550 |
+
def get_config_var(name):
|
| 551 |
+
"""Return the value of a single variable using the dictionary
|
| 552 |
+
returned by 'get_config_vars()'. Equivalent to
|
| 553 |
+
get_config_vars().get(name)
|
| 554 |
+
"""
|
| 555 |
+
if name == 'SO':
|
| 556 |
+
import warnings
|
| 557 |
+
|
| 558 |
+
warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
|
| 559 |
+
return get_config_vars().get(name)
|
.venv/Lib/site-packages/setuptools/_distutils/text_file.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""text_file
|
| 2 |
+
|
| 3 |
+
provides the TextFile class, which gives an interface to text files
|
| 4 |
+
that (optionally) takes care of stripping comments, ignoring blank
|
| 5 |
+
lines, and joining lines with backslashes."""
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TextFile:
|
| 11 |
+
"""Provides a file-like object that takes care of all the things you
|
| 12 |
+
commonly want to do when processing a text file that has some
|
| 13 |
+
line-by-line syntax: strip comments (as long as "#" is your
|
| 14 |
+
comment character), skip blank lines, join adjacent lines by
|
| 15 |
+
escaping the newline (ie. backslash at end of line), strip
|
| 16 |
+
leading and/or trailing whitespace. All of these are optional
|
| 17 |
+
and independently controllable.
|
| 18 |
+
|
| 19 |
+
Provides a 'warn()' method so you can generate warning messages that
|
| 20 |
+
report physical line number, even if the logical line in question
|
| 21 |
+
spans multiple physical lines. Also provides 'unreadline()' for
|
| 22 |
+
implementing line-at-a-time lookahead.
|
| 23 |
+
|
| 24 |
+
Constructor is called as:
|
| 25 |
+
|
| 26 |
+
TextFile (filename=None, file=None, **options)
|
| 27 |
+
|
| 28 |
+
It bombs (RuntimeError) if both 'filename' and 'file' are None;
|
| 29 |
+
'filename' should be a string, and 'file' a file object (or
|
| 30 |
+
something that provides 'readline()' and 'close()' methods). It is
|
| 31 |
+
recommended that you supply at least 'filename', so that TextFile
|
| 32 |
+
can include it in warning messages. If 'file' is not supplied,
|
| 33 |
+
TextFile creates its own using 'io.open()'.
|
| 34 |
+
|
| 35 |
+
The options are all boolean, and affect the value returned by
|
| 36 |
+
'readline()':
|
| 37 |
+
strip_comments [default: true]
|
| 38 |
+
strip from "#" to end-of-line, as well as any whitespace
|
| 39 |
+
leading up to the "#" -- unless it is escaped by a backslash
|
| 40 |
+
lstrip_ws [default: false]
|
| 41 |
+
strip leading whitespace from each line before returning it
|
| 42 |
+
rstrip_ws [default: true]
|
| 43 |
+
strip trailing whitespace (including line terminator!) from
|
| 44 |
+
each line before returning it
|
| 45 |
+
skip_blanks [default: true}
|
| 46 |
+
skip lines that are empty *after* stripping comments and
|
| 47 |
+
whitespace. (If both lstrip_ws and rstrip_ws are false,
|
| 48 |
+
then some lines may consist of solely whitespace: these will
|
| 49 |
+
*not* be skipped, even if 'skip_blanks' is true.)
|
| 50 |
+
join_lines [default: false]
|
| 51 |
+
if a backslash is the last non-newline character on a line
|
| 52 |
+
after stripping comments and whitespace, join the following line
|
| 53 |
+
to it to form one "logical line"; if N consecutive lines end
|
| 54 |
+
with a backslash, then N+1 physical lines will be joined to
|
| 55 |
+
form one logical line.
|
| 56 |
+
collapse_join [default: false]
|
| 57 |
+
strip leading whitespace from lines that are joined to their
|
| 58 |
+
predecessor; only matters if (join_lines and not lstrip_ws)
|
| 59 |
+
errors [default: 'strict']
|
| 60 |
+
error handler used to decode the file content
|
| 61 |
+
|
| 62 |
+
Note that since 'rstrip_ws' can strip the trailing newline, the
|
| 63 |
+
semantics of 'readline()' must differ from those of the builtin file
|
| 64 |
+
object's 'readline()' method! In particular, 'readline()' returns
|
| 65 |
+
None for end-of-file: an empty string might just be a blank line (or
|
| 66 |
+
an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
|
| 67 |
+
not."""
|
| 68 |
+
|
| 69 |
+
default_options = {
|
| 70 |
+
'strip_comments': 1,
|
| 71 |
+
'skip_blanks': 1,
|
| 72 |
+
'lstrip_ws': 0,
|
| 73 |
+
'rstrip_ws': 1,
|
| 74 |
+
'join_lines': 0,
|
| 75 |
+
'collapse_join': 0,
|
| 76 |
+
'errors': 'strict',
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
def __init__(self, filename=None, file=None, **options):
|
| 80 |
+
"""Construct a new TextFile object. At least one of 'filename'
|
| 81 |
+
(a string) and 'file' (a file-like object) must be supplied.
|
| 82 |
+
They keyword argument options are described above and affect
|
| 83 |
+
the values returned by 'readline()'."""
|
| 84 |
+
if filename is None and file is None:
|
| 85 |
+
raise RuntimeError(
|
| 86 |
+
"you must supply either or both of 'filename' and 'file'"
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
# set values for all options -- either from client option hash
|
| 90 |
+
# or fallback to default_options
|
| 91 |
+
for opt in self.default_options.keys():
|
| 92 |
+
if opt in options:
|
| 93 |
+
setattr(self, opt, options[opt])
|
| 94 |
+
else:
|
| 95 |
+
setattr(self, opt, self.default_options[opt])
|
| 96 |
+
|
| 97 |
+
# sanity check client option hash
|
| 98 |
+
for opt in options.keys():
|
| 99 |
+
if opt not in self.default_options:
|
| 100 |
+
raise KeyError("invalid TextFile option '%s'" % opt)
|
| 101 |
+
|
| 102 |
+
if file is None:
|
| 103 |
+
self.open(filename)
|
| 104 |
+
else:
|
| 105 |
+
self.filename = filename
|
| 106 |
+
self.file = file
|
| 107 |
+
self.current_line = 0 # assuming that file is at BOF!
|
| 108 |
+
|
| 109 |
+
# 'linebuf' is a stack of lines that will be emptied before we
|
| 110 |
+
# actually read from the file; it's only populated by an
|
| 111 |
+
# 'unreadline()' operation
|
| 112 |
+
self.linebuf = []
|
| 113 |
+
|
| 114 |
+
def open(self, filename):
|
| 115 |
+
"""Open a new file named 'filename'. This overrides both the
|
| 116 |
+
'filename' and 'file' arguments to the constructor."""
|
| 117 |
+
self.filename = filename
|
| 118 |
+
self.file = open(self.filename, errors=self.errors)
|
| 119 |
+
self.current_line = 0
|
| 120 |
+
|
| 121 |
+
def close(self):
|
| 122 |
+
"""Close the current file and forget everything we know about it
|
| 123 |
+
(filename, current line number)."""
|
| 124 |
+
file = self.file
|
| 125 |
+
self.file = None
|
| 126 |
+
self.filename = None
|
| 127 |
+
self.current_line = None
|
| 128 |
+
file.close()
|
| 129 |
+
|
| 130 |
+
def gen_error(self, msg, line=None):
|
| 131 |
+
outmsg = []
|
| 132 |
+
if line is None:
|
| 133 |
+
line = self.current_line
|
| 134 |
+
outmsg.append(self.filename + ", ")
|
| 135 |
+
if isinstance(line, (list, tuple)):
|
| 136 |
+
outmsg.append("lines %d-%d: " % tuple(line))
|
| 137 |
+
else:
|
| 138 |
+
outmsg.append("line %d: " % line)
|
| 139 |
+
outmsg.append(str(msg))
|
| 140 |
+
return "".join(outmsg)
|
| 141 |
+
|
| 142 |
+
def error(self, msg, line=None):
|
| 143 |
+
raise ValueError("error: " + self.gen_error(msg, line))
|
| 144 |
+
|
| 145 |
+
def warn(self, msg, line=None):
|
| 146 |
+
"""Print (to stderr) a warning message tied to the current logical
|
| 147 |
+
line in the current file. If the current logical line in the
|
| 148 |
+
file spans multiple physical lines, the warning refers to the
|
| 149 |
+
whole range, eg. "lines 3-5". If 'line' supplied, it overrides
|
| 150 |
+
the current line number; it may be a list or tuple to indicate a
|
| 151 |
+
range of physical lines, or an integer for a single physical
|
| 152 |
+
line."""
|
| 153 |
+
sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
|
| 154 |
+
|
| 155 |
+
def readline(self): # noqa: C901
|
| 156 |
+
"""Read and return a single logical line from the current file (or
|
| 157 |
+
from an internal buffer if lines have previously been "unread"
|
| 158 |
+
with 'unreadline()'). If the 'join_lines' option is true, this
|
| 159 |
+
may involve reading multiple physical lines concatenated into a
|
| 160 |
+
single string. Updates the current line number, so calling
|
| 161 |
+
'warn()' after 'readline()' emits a warning about the physical
|
| 162 |
+
line(s) just read. Returns None on end-of-file, since the empty
|
| 163 |
+
string can occur if 'rstrip_ws' is true but 'strip_blanks' is
|
| 164 |
+
not."""
|
| 165 |
+
# If any "unread" lines waiting in 'linebuf', return the top
|
| 166 |
+
# one. (We don't actually buffer read-ahead data -- lines only
|
| 167 |
+
# get put in 'linebuf' if the client explicitly does an
|
| 168 |
+
# 'unreadline()'.
|
| 169 |
+
if self.linebuf:
|
| 170 |
+
line = self.linebuf[-1]
|
| 171 |
+
del self.linebuf[-1]
|
| 172 |
+
return line
|
| 173 |
+
|
| 174 |
+
buildup_line = ''
|
| 175 |
+
|
| 176 |
+
while True:
|
| 177 |
+
# read the line, make it None if EOF
|
| 178 |
+
line = self.file.readline()
|
| 179 |
+
if line == '':
|
| 180 |
+
line = None
|
| 181 |
+
|
| 182 |
+
if self.strip_comments and line:
|
| 183 |
+
# Look for the first "#" in the line. If none, never
|
| 184 |
+
# mind. If we find one and it's the first character, or
|
| 185 |
+
# is not preceded by "\", then it starts a comment --
|
| 186 |
+
# strip the comment, strip whitespace before it, and
|
| 187 |
+
# carry on. Otherwise, it's just an escaped "#", so
|
| 188 |
+
# unescape it (and any other escaped "#"'s that might be
|
| 189 |
+
# lurking in there) and otherwise leave the line alone.
|
| 190 |
+
|
| 191 |
+
pos = line.find("#")
|
| 192 |
+
if pos == -1: # no "#" -- no comments
|
| 193 |
+
pass
|
| 194 |
+
|
| 195 |
+
# It's definitely a comment -- either "#" is the first
|
| 196 |
+
# character, or it's elsewhere and unescaped.
|
| 197 |
+
elif pos == 0 or line[pos - 1] != "\\":
|
| 198 |
+
# Have to preserve the trailing newline, because it's
|
| 199 |
+
# the job of a later step (rstrip_ws) to remove it --
|
| 200 |
+
# and if rstrip_ws is false, we'd better preserve it!
|
| 201 |
+
# (NB. this means that if the final line is all comment
|
| 202 |
+
# and has no trailing newline, we will think that it's
|
| 203 |
+
# EOF; I think that's OK.)
|
| 204 |
+
eol = (line[-1] == '\n') and '\n' or ''
|
| 205 |
+
line = line[0:pos] + eol
|
| 206 |
+
|
| 207 |
+
# If all that's left is whitespace, then skip line
|
| 208 |
+
# *now*, before we try to join it to 'buildup_line' --
|
| 209 |
+
# that way constructs like
|
| 210 |
+
# hello \\
|
| 211 |
+
# # comment that should be ignored
|
| 212 |
+
# there
|
| 213 |
+
# result in "hello there".
|
| 214 |
+
if line.strip() == "":
|
| 215 |
+
continue
|
| 216 |
+
else: # it's an escaped "#"
|
| 217 |
+
line = line.replace("\\#", "#")
|
| 218 |
+
|
| 219 |
+
# did previous line end with a backslash? then accumulate
|
| 220 |
+
if self.join_lines and buildup_line:
|
| 221 |
+
# oops: end of file
|
| 222 |
+
if line is None:
|
| 223 |
+
self.warn("continuation line immediately precedes " "end-of-file")
|
| 224 |
+
return buildup_line
|
| 225 |
+
|
| 226 |
+
if self.collapse_join:
|
| 227 |
+
line = line.lstrip()
|
| 228 |
+
line = buildup_line + line
|
| 229 |
+
|
| 230 |
+
# careful: pay attention to line number when incrementing it
|
| 231 |
+
if isinstance(self.current_line, list):
|
| 232 |
+
self.current_line[1] = self.current_line[1] + 1
|
| 233 |
+
else:
|
| 234 |
+
self.current_line = [self.current_line, self.current_line + 1]
|
| 235 |
+
# just an ordinary line, read it as usual
|
| 236 |
+
else:
|
| 237 |
+
if line is None: # eof
|
| 238 |
+
return None
|
| 239 |
+
|
| 240 |
+
# still have to be careful about incrementing the line number!
|
| 241 |
+
if isinstance(self.current_line, list):
|
| 242 |
+
self.current_line = self.current_line[1] + 1
|
| 243 |
+
else:
|
| 244 |
+
self.current_line = self.current_line + 1
|
| 245 |
+
|
| 246 |
+
# strip whitespace however the client wants (leading and
|
| 247 |
+
# trailing, or one or the other, or neither)
|
| 248 |
+
if self.lstrip_ws and self.rstrip_ws:
|
| 249 |
+
line = line.strip()
|
| 250 |
+
elif self.lstrip_ws:
|
| 251 |
+
line = line.lstrip()
|
| 252 |
+
elif self.rstrip_ws:
|
| 253 |
+
line = line.rstrip()
|
| 254 |
+
|
| 255 |
+
# blank line (whether we rstrip'ed or not)? skip to next line
|
| 256 |
+
# if appropriate
|
| 257 |
+
if line in ('', '\n') and self.skip_blanks:
|
| 258 |
+
continue
|
| 259 |
+
|
| 260 |
+
if self.join_lines:
|
| 261 |
+
if line[-1] == '\\':
|
| 262 |
+
buildup_line = line[:-1]
|
| 263 |
+
continue
|
| 264 |
+
|
| 265 |
+
if line[-2:] == '\\\n':
|
| 266 |
+
buildup_line = line[0:-2] + '\n'
|
| 267 |
+
continue
|
| 268 |
+
|
| 269 |
+
# well, I guess there's some actual content there: return it
|
| 270 |
+
return line
|
| 271 |
+
|
| 272 |
+
def readlines(self):
|
| 273 |
+
"""Read and return the list of all logical lines remaining in the
|
| 274 |
+
current file."""
|
| 275 |
+
lines = []
|
| 276 |
+
while True:
|
| 277 |
+
line = self.readline()
|
| 278 |
+
if line is None:
|
| 279 |
+
return lines
|
| 280 |
+
lines.append(line)
|
| 281 |
+
|
| 282 |
+
def unreadline(self, line):
|
| 283 |
+
"""Push 'line' (a string) onto an internal buffer that will be
|
| 284 |
+
checked by future 'readline()' calls. Handy for implementing
|
| 285 |
+
a parser with line-at-a-time lookahead."""
|
| 286 |
+
self.linebuf.append(line)
|
.venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py
ADDED
|
@@ -0,0 +1,400 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.unixccompiler
|
| 2 |
+
|
| 3 |
+
Contains the UnixCCompiler class, a subclass of CCompiler that handles
|
| 4 |
+
the "typical" Unix-style command-line C compiler:
|
| 5 |
+
* macros defined with -Dname[=value]
|
| 6 |
+
* macros undefined with -Uname
|
| 7 |
+
* include search directories specified with -Idir
|
| 8 |
+
* libraries specified with -lllib
|
| 9 |
+
* library search directories specified with -Ldir
|
| 10 |
+
* compile handled by 'cc' (or similar) executable with -c option:
|
| 11 |
+
compiles .c to .o
|
| 12 |
+
* link static library handled by 'ar' command (possibly with 'ranlib')
|
| 13 |
+
* link shared library handled by 'cc -shared'
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import sys
|
| 18 |
+
import re
|
| 19 |
+
import shlex
|
| 20 |
+
import itertools
|
| 21 |
+
|
| 22 |
+
from . import sysconfig
|
| 23 |
+
from .dep_util import newer
|
| 24 |
+
from .ccompiler import CCompiler, gen_preprocess_options, gen_lib_options
|
| 25 |
+
from .errors import DistutilsExecError, CompileError, LibError, LinkError
|
| 26 |
+
from ._log import log
|
| 27 |
+
from ._macos_compat import compiler_fixup
|
| 28 |
+
|
| 29 |
+
# XXX Things not currently handled:
|
| 30 |
+
# * optimization/debug/warning flags; we just use whatever's in Python's
|
| 31 |
+
# Makefile and live with it. Is this adequate? If not, we might
|
| 32 |
+
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
|
| 33 |
+
# SunCCompiler, and I suspect down that road lies madness.
|
| 34 |
+
# * even if we don't know a warning flag from an optimization flag,
|
| 35 |
+
# we need some way for outsiders to feed preprocessor/compiler/linker
|
| 36 |
+
# flags in to us -- eg. a sysadmin might want to mandate certain flags
|
| 37 |
+
# via a site config file, or a user might want to set something for
|
| 38 |
+
# compiling this module distribution only via the setup.py command
|
| 39 |
+
# line, whatever. As long as these options come from something on the
|
| 40 |
+
# current system, they can be as system-dependent as they like, and we
|
| 41 |
+
# should just happily stuff them into the preprocessor/compiler/linker
|
| 42 |
+
# options and carry on.
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _split_env(cmd):
|
| 46 |
+
"""
|
| 47 |
+
For macOS, split command into 'env' portion (if any)
|
| 48 |
+
and the rest of the linker command.
|
| 49 |
+
|
| 50 |
+
>>> _split_env(['a', 'b', 'c'])
|
| 51 |
+
([], ['a', 'b', 'c'])
|
| 52 |
+
>>> _split_env(['/usr/bin/env', 'A=3', 'gcc'])
|
| 53 |
+
(['/usr/bin/env', 'A=3'], ['gcc'])
|
| 54 |
+
"""
|
| 55 |
+
pivot = 0
|
| 56 |
+
if os.path.basename(cmd[0]) == "env":
|
| 57 |
+
pivot = 1
|
| 58 |
+
while '=' in cmd[pivot]:
|
| 59 |
+
pivot += 1
|
| 60 |
+
return cmd[:pivot], cmd[pivot:]
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def _split_aix(cmd):
|
| 64 |
+
"""
|
| 65 |
+
AIX platforms prefix the compiler with the ld_so_aix
|
| 66 |
+
script, so split that from the linker command.
|
| 67 |
+
|
| 68 |
+
>>> _split_aix(['a', 'b', 'c'])
|
| 69 |
+
([], ['a', 'b', 'c'])
|
| 70 |
+
>>> _split_aix(['/bin/foo/ld_so_aix', 'gcc'])
|
| 71 |
+
(['/bin/foo/ld_so_aix'], ['gcc'])
|
| 72 |
+
"""
|
| 73 |
+
pivot = os.path.basename(cmd[0]) == 'ld_so_aix'
|
| 74 |
+
return cmd[:pivot], cmd[pivot:]
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _linker_params(linker_cmd, compiler_cmd):
|
| 78 |
+
"""
|
| 79 |
+
The linker command usually begins with the compiler
|
| 80 |
+
command (possibly multiple elements), followed by zero or more
|
| 81 |
+
params for shared library building.
|
| 82 |
+
|
| 83 |
+
If the LDSHARED env variable overrides the linker command,
|
| 84 |
+
however, the commands may not match.
|
| 85 |
+
|
| 86 |
+
Return the best guess of the linker parameters by stripping
|
| 87 |
+
the linker command. If the compiler command does not
|
| 88 |
+
match the linker command, assume the linker command is
|
| 89 |
+
just the first element.
|
| 90 |
+
|
| 91 |
+
>>> _linker_params('gcc foo bar'.split(), ['gcc'])
|
| 92 |
+
['foo', 'bar']
|
| 93 |
+
>>> _linker_params('gcc foo bar'.split(), ['other'])
|
| 94 |
+
['foo', 'bar']
|
| 95 |
+
>>> _linker_params('ccache gcc foo bar'.split(), 'ccache gcc'.split())
|
| 96 |
+
['foo', 'bar']
|
| 97 |
+
>>> _linker_params(['gcc'], ['gcc'])
|
| 98 |
+
[]
|
| 99 |
+
"""
|
| 100 |
+
c_len = len(compiler_cmd)
|
| 101 |
+
pivot = c_len if linker_cmd[:c_len] == compiler_cmd else 1
|
| 102 |
+
return linker_cmd[pivot:]
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class UnixCCompiler(CCompiler):
|
| 106 |
+
compiler_type = 'unix'
|
| 107 |
+
|
| 108 |
+
# These are used by CCompiler in two places: the constructor sets
|
| 109 |
+
# instance attributes 'preprocessor', 'compiler', etc. from them, and
|
| 110 |
+
# 'set_executable()' allows any of these to be set. The defaults here
|
| 111 |
+
# are pretty generic; they will probably have to be set by an outsider
|
| 112 |
+
# (eg. using information discovered by the sysconfig about building
|
| 113 |
+
# Python extensions).
|
| 114 |
+
executables = {
|
| 115 |
+
'preprocessor': None,
|
| 116 |
+
'compiler': ["cc"],
|
| 117 |
+
'compiler_so': ["cc"],
|
| 118 |
+
'compiler_cxx': ["cc"],
|
| 119 |
+
'linker_so': ["cc", "-shared"],
|
| 120 |
+
'linker_exe': ["cc"],
|
| 121 |
+
'archiver': ["ar", "-cr"],
|
| 122 |
+
'ranlib': None,
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
if sys.platform[:6] == "darwin":
|
| 126 |
+
executables['ranlib'] = ["ranlib"]
|
| 127 |
+
|
| 128 |
+
# Needed for the filename generation methods provided by the base
|
| 129 |
+
# class, CCompiler. NB. whoever instantiates/uses a particular
|
| 130 |
+
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
|
| 131 |
+
# reasonable common default here, but it's not necessarily used on all
|
| 132 |
+
# Unices!
|
| 133 |
+
|
| 134 |
+
src_extensions = [".c", ".C", ".cc", ".cxx", ".cpp", ".m"]
|
| 135 |
+
obj_extension = ".o"
|
| 136 |
+
static_lib_extension = ".a"
|
| 137 |
+
shared_lib_extension = ".so"
|
| 138 |
+
dylib_lib_extension = ".dylib"
|
| 139 |
+
xcode_stub_lib_extension = ".tbd"
|
| 140 |
+
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
|
| 141 |
+
xcode_stub_lib_format = dylib_lib_format
|
| 142 |
+
if sys.platform == "cygwin":
|
| 143 |
+
exe_extension = ".exe"
|
| 144 |
+
|
| 145 |
+
def preprocess(
|
| 146 |
+
self,
|
| 147 |
+
source,
|
| 148 |
+
output_file=None,
|
| 149 |
+
macros=None,
|
| 150 |
+
include_dirs=None,
|
| 151 |
+
extra_preargs=None,
|
| 152 |
+
extra_postargs=None,
|
| 153 |
+
):
|
| 154 |
+
fixed_args = self._fix_compile_args(None, macros, include_dirs)
|
| 155 |
+
ignore, macros, include_dirs = fixed_args
|
| 156 |
+
pp_opts = gen_preprocess_options(macros, include_dirs)
|
| 157 |
+
pp_args = self.preprocessor + pp_opts
|
| 158 |
+
if output_file:
|
| 159 |
+
pp_args.extend(['-o', output_file])
|
| 160 |
+
if extra_preargs:
|
| 161 |
+
pp_args[:0] = extra_preargs
|
| 162 |
+
if extra_postargs:
|
| 163 |
+
pp_args.extend(extra_postargs)
|
| 164 |
+
pp_args.append(source)
|
| 165 |
+
|
| 166 |
+
# reasons to preprocess:
|
| 167 |
+
# - force is indicated
|
| 168 |
+
# - output is directed to stdout
|
| 169 |
+
# - source file is newer than the target
|
| 170 |
+
preprocess = self.force or output_file is None or newer(source, output_file)
|
| 171 |
+
if not preprocess:
|
| 172 |
+
return
|
| 173 |
+
|
| 174 |
+
if output_file:
|
| 175 |
+
self.mkpath(os.path.dirname(output_file))
|
| 176 |
+
|
| 177 |
+
try:
|
| 178 |
+
self.spawn(pp_args)
|
| 179 |
+
except DistutilsExecError as msg:
|
| 180 |
+
raise CompileError(msg)
|
| 181 |
+
|
| 182 |
+
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
| 183 |
+
compiler_so = compiler_fixup(self.compiler_so, cc_args + extra_postargs)
|
| 184 |
+
try:
|
| 185 |
+
self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs)
|
| 186 |
+
except DistutilsExecError as msg:
|
| 187 |
+
raise CompileError(msg)
|
| 188 |
+
|
| 189 |
+
def create_static_lib(
|
| 190 |
+
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
| 191 |
+
):
|
| 192 |
+
objects, output_dir = self._fix_object_args(objects, output_dir)
|
| 193 |
+
|
| 194 |
+
output_filename = self.library_filename(output_libname, output_dir=output_dir)
|
| 195 |
+
|
| 196 |
+
if self._need_link(objects, output_filename):
|
| 197 |
+
self.mkpath(os.path.dirname(output_filename))
|
| 198 |
+
self.spawn(self.archiver + [output_filename] + objects + self.objects)
|
| 199 |
+
|
| 200 |
+
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
|
| 201 |
+
# think the only major Unix that does. Maybe we need some
|
| 202 |
+
# platform intelligence here to skip ranlib if it's not
|
| 203 |
+
# needed -- or maybe Python's configure script took care of
|
| 204 |
+
# it for us, hence the check for leading colon.
|
| 205 |
+
if self.ranlib:
|
| 206 |
+
try:
|
| 207 |
+
self.spawn(self.ranlib + [output_filename])
|
| 208 |
+
except DistutilsExecError as msg:
|
| 209 |
+
raise LibError(msg)
|
| 210 |
+
else:
|
| 211 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 212 |
+
|
| 213 |
+
def link(
|
| 214 |
+
self,
|
| 215 |
+
target_desc,
|
| 216 |
+
objects,
|
| 217 |
+
output_filename,
|
| 218 |
+
output_dir=None,
|
| 219 |
+
libraries=None,
|
| 220 |
+
library_dirs=None,
|
| 221 |
+
runtime_library_dirs=None,
|
| 222 |
+
export_symbols=None,
|
| 223 |
+
debug=0,
|
| 224 |
+
extra_preargs=None,
|
| 225 |
+
extra_postargs=None,
|
| 226 |
+
build_temp=None,
|
| 227 |
+
target_lang=None,
|
| 228 |
+
):
|
| 229 |
+
objects, output_dir = self._fix_object_args(objects, output_dir)
|
| 230 |
+
fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
|
| 231 |
+
libraries, library_dirs, runtime_library_dirs = fixed_args
|
| 232 |
+
|
| 233 |
+
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
|
| 234 |
+
if not isinstance(output_dir, (str, type(None))):
|
| 235 |
+
raise TypeError("'output_dir' must be a string or None")
|
| 236 |
+
if output_dir is not None:
|
| 237 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 238 |
+
|
| 239 |
+
if self._need_link(objects, output_filename):
|
| 240 |
+
ld_args = objects + self.objects + lib_opts + ['-o', output_filename]
|
| 241 |
+
if debug:
|
| 242 |
+
ld_args[:0] = ['-g']
|
| 243 |
+
if extra_preargs:
|
| 244 |
+
ld_args[:0] = extra_preargs
|
| 245 |
+
if extra_postargs:
|
| 246 |
+
ld_args.extend(extra_postargs)
|
| 247 |
+
self.mkpath(os.path.dirname(output_filename))
|
| 248 |
+
try:
|
| 249 |
+
# Select a linker based on context: linker_exe when
|
| 250 |
+
# building an executable or linker_so (with shared options)
|
| 251 |
+
# when building a shared library.
|
| 252 |
+
building_exe = target_desc == CCompiler.EXECUTABLE
|
| 253 |
+
linker = (self.linker_exe if building_exe else self.linker_so)[:]
|
| 254 |
+
|
| 255 |
+
if target_lang == "c++" and self.compiler_cxx:
|
| 256 |
+
env, linker_ne = _split_env(linker)
|
| 257 |
+
aix, linker_na = _split_aix(linker_ne)
|
| 258 |
+
_, compiler_cxx_ne = _split_env(self.compiler_cxx)
|
| 259 |
+
_, linker_exe_ne = _split_env(self.linker_exe)
|
| 260 |
+
|
| 261 |
+
params = _linker_params(linker_na, linker_exe_ne)
|
| 262 |
+
linker = env + aix + compiler_cxx_ne + params
|
| 263 |
+
|
| 264 |
+
linker = compiler_fixup(linker, ld_args)
|
| 265 |
+
|
| 266 |
+
self.spawn(linker + ld_args)
|
| 267 |
+
except DistutilsExecError as msg:
|
| 268 |
+
raise LinkError(msg)
|
| 269 |
+
else:
|
| 270 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 271 |
+
|
| 272 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 273 |
+
# These are all used by the 'gen_lib_options() function, in
|
| 274 |
+
# ccompiler.py.
|
| 275 |
+
|
| 276 |
+
def library_dir_option(self, dir):
|
| 277 |
+
return "-L" + dir
|
| 278 |
+
|
| 279 |
+
def _is_gcc(self):
|
| 280 |
+
cc_var = sysconfig.get_config_var("CC")
|
| 281 |
+
compiler = os.path.basename(shlex.split(cc_var)[0])
|
| 282 |
+
return "gcc" in compiler or "g++" in compiler
|
| 283 |
+
|
| 284 |
+
def runtime_library_dir_option(self, dir):
|
| 285 |
+
# XXX Hackish, at the very least. See Python bug #445902:
|
| 286 |
+
# http://sourceforge.net/tracker/index.php
|
| 287 |
+
# ?func=detail&aid=445902&group_id=5470&atid=105470
|
| 288 |
+
# Linkers on different platforms need different options to
|
| 289 |
+
# specify that directories need to be added to the list of
|
| 290 |
+
# directories searched for dependencies when a dynamic library
|
| 291 |
+
# is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to
|
| 292 |
+
# be told to pass the -R option through to the linker, whereas
|
| 293 |
+
# other compilers and gcc on other systems just know this.
|
| 294 |
+
# Other compilers may need something slightly different. At
|
| 295 |
+
# this time, there's no way to determine this information from
|
| 296 |
+
# the configuration data stored in the Python installation, so
|
| 297 |
+
# we use this hack.
|
| 298 |
+
if sys.platform[:6] == "darwin":
|
| 299 |
+
from distutils.util import get_macosx_target_ver, split_version
|
| 300 |
+
|
| 301 |
+
macosx_target_ver = get_macosx_target_ver()
|
| 302 |
+
if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]:
|
| 303 |
+
return "-Wl,-rpath," + dir
|
| 304 |
+
else: # no support for -rpath on earlier macOS versions
|
| 305 |
+
return "-L" + dir
|
| 306 |
+
elif sys.platform[:7] == "freebsd":
|
| 307 |
+
return "-Wl,-rpath=" + dir
|
| 308 |
+
elif sys.platform[:5] == "hp-ux":
|
| 309 |
+
return [
|
| 310 |
+
"-Wl,+s" if self._is_gcc() else "+s",
|
| 311 |
+
"-L" + dir,
|
| 312 |
+
]
|
| 313 |
+
|
| 314 |
+
# For all compilers, `-Wl` is the presumed way to
|
| 315 |
+
# pass a compiler option to the linker and `-R` is
|
| 316 |
+
# the way to pass an RPATH.
|
| 317 |
+
if sysconfig.get_config_var("GNULD") == "yes":
|
| 318 |
+
# GNU ld needs an extra option to get a RUNPATH
|
| 319 |
+
# instead of just an RPATH.
|
| 320 |
+
return "-Wl,--enable-new-dtags,-R" + dir
|
| 321 |
+
else:
|
| 322 |
+
return "-Wl,-R" + dir
|
| 323 |
+
|
| 324 |
+
def library_option(self, lib):
|
| 325 |
+
return "-l" + lib
|
| 326 |
+
|
| 327 |
+
@staticmethod
|
| 328 |
+
def _library_root(dir):
|
| 329 |
+
"""
|
| 330 |
+
macOS users can specify an alternate SDK using'-isysroot'.
|
| 331 |
+
Calculate the SDK root if it is specified.
|
| 332 |
+
|
| 333 |
+
Note that, as of Xcode 7, Apple SDKs may contain textual stub
|
| 334 |
+
libraries with .tbd extensions rather than the normal .dylib
|
| 335 |
+
shared libraries installed in /. The Apple compiler tool
|
| 336 |
+
chain handles this transparently but it can cause problems
|
| 337 |
+
for programs that are being built with an SDK and searching
|
| 338 |
+
for specific libraries. Callers of find_library_file need to
|
| 339 |
+
keep in mind that the base filename of the returned SDK library
|
| 340 |
+
file might have a different extension from that of the library
|
| 341 |
+
file installed on the running system, for example:
|
| 342 |
+
/Applications/Xcode.app/Contents/Developer/Platforms/
|
| 343 |
+
MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
|
| 344 |
+
usr/lib/libedit.tbd
|
| 345 |
+
vs
|
| 346 |
+
/usr/lib/libedit.dylib
|
| 347 |
+
"""
|
| 348 |
+
cflags = sysconfig.get_config_var('CFLAGS')
|
| 349 |
+
match = re.search(r'-isysroot\s*(\S+)', cflags)
|
| 350 |
+
|
| 351 |
+
apply_root = (
|
| 352 |
+
sys.platform == 'darwin'
|
| 353 |
+
and match
|
| 354 |
+
and (
|
| 355 |
+
dir.startswith('/System/')
|
| 356 |
+
or (dir.startswith('/usr/') and not dir.startswith('/usr/local/'))
|
| 357 |
+
)
|
| 358 |
+
)
|
| 359 |
+
|
| 360 |
+
return os.path.join(match.group(1), dir[1:]) if apply_root else dir
|
| 361 |
+
|
| 362 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 363 |
+
r"""
|
| 364 |
+
Second-guess the linker with not much hard
|
| 365 |
+
data to go on: GCC seems to prefer the shared library, so
|
| 366 |
+
assume that *all* Unix C compilers do,
|
| 367 |
+
ignoring even GCC's "-static" option.
|
| 368 |
+
|
| 369 |
+
>>> compiler = UnixCCompiler()
|
| 370 |
+
>>> compiler._library_root = lambda dir: dir
|
| 371 |
+
>>> monkeypatch = getfixture('monkeypatch')
|
| 372 |
+
>>> monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
|
| 373 |
+
>>> dirs = ('/foo/bar/missing', '/foo/bar/existing')
|
| 374 |
+
>>> compiler.find_library_file(dirs, 'abc').replace('\\', '/')
|
| 375 |
+
'/foo/bar/existing/libabc.dylib'
|
| 376 |
+
>>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
|
| 377 |
+
'/foo/bar/existing/libabc.dylib'
|
| 378 |
+
>>> monkeypatch.setattr(os.path, 'exists',
|
| 379 |
+
... lambda d: 'existing' in d and '.a' in d)
|
| 380 |
+
>>> compiler.find_library_file(dirs, 'abc').replace('\\', '/')
|
| 381 |
+
'/foo/bar/existing/libabc.a'
|
| 382 |
+
>>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
|
| 383 |
+
'/foo/bar/existing/libabc.a'
|
| 384 |
+
"""
|
| 385 |
+
lib_names = (
|
| 386 |
+
self.library_filename(lib, lib_type=type)
|
| 387 |
+
for type in 'dylib xcode_stub shared static'.split()
|
| 388 |
+
)
|
| 389 |
+
|
| 390 |
+
roots = map(self._library_root, dirs)
|
| 391 |
+
|
| 392 |
+
searched = (
|
| 393 |
+
os.path.join(root, lib_name)
|
| 394 |
+
for root, lib_name in itertools.product(roots, lib_names)
|
| 395 |
+
)
|
| 396 |
+
|
| 397 |
+
found = filter(os.path.exists, searched)
|
| 398 |
+
|
| 399 |
+
# Return None if it could not be found in any dir.
|
| 400 |
+
return next(found, None)
|
.venv/Lib/site-packages/setuptools/_distutils/util.py
ADDED
|
@@ -0,0 +1,513 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.util
|
| 2 |
+
|
| 3 |
+
Miscellaneous utility functions -- anything that doesn't fit into
|
| 4 |
+
one of the other *util.py modules.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import importlib.util
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import string
|
| 11 |
+
import subprocess
|
| 12 |
+
import sys
|
| 13 |
+
import sysconfig
|
| 14 |
+
import functools
|
| 15 |
+
|
| 16 |
+
from .errors import DistutilsPlatformError, DistutilsByteCompileError
|
| 17 |
+
from .dep_util import newer
|
| 18 |
+
from .spawn import spawn
|
| 19 |
+
from ._log import log
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def get_host_platform():
|
| 23 |
+
"""
|
| 24 |
+
Return a string that identifies the current platform. Use this
|
| 25 |
+
function to distinguish platform-specific build directories and
|
| 26 |
+
platform-specific built distributions.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
# This function initially exposed platforms as defined in Python 3.9
|
| 30 |
+
# even with older Python versions when distutils was split out.
|
| 31 |
+
# Now it delegates to stdlib sysconfig, but maintains compatibility.
|
| 32 |
+
|
| 33 |
+
if sys.version_info < (3, 8):
|
| 34 |
+
if os.name == 'nt':
|
| 35 |
+
if '(arm)' in sys.version.lower():
|
| 36 |
+
return 'win-arm32'
|
| 37 |
+
if '(arm64)' in sys.version.lower():
|
| 38 |
+
return 'win-arm64'
|
| 39 |
+
|
| 40 |
+
if sys.version_info < (3, 9):
|
| 41 |
+
if os.name == "posix" and hasattr(os, 'uname'):
|
| 42 |
+
osname, host, release, version, machine = os.uname()
|
| 43 |
+
if osname[:3] == "aix":
|
| 44 |
+
from .py38compat import aix_platform
|
| 45 |
+
|
| 46 |
+
return aix_platform(osname, version, release)
|
| 47 |
+
|
| 48 |
+
return sysconfig.get_platform()
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def get_platform():
|
| 52 |
+
if os.name == 'nt':
|
| 53 |
+
TARGET_TO_PLAT = {
|
| 54 |
+
'x86': 'win32',
|
| 55 |
+
'x64': 'win-amd64',
|
| 56 |
+
'arm': 'win-arm32',
|
| 57 |
+
'arm64': 'win-arm64',
|
| 58 |
+
}
|
| 59 |
+
target = os.environ.get('VSCMD_ARG_TGT_ARCH')
|
| 60 |
+
return TARGET_TO_PLAT.get(target) or get_host_platform()
|
| 61 |
+
return get_host_platform()
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
if sys.platform == 'darwin':
|
| 65 |
+
_syscfg_macosx_ver = None # cache the version pulled from sysconfig
|
| 66 |
+
MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET'
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def _clear_cached_macosx_ver():
|
| 70 |
+
"""For testing only. Do not call."""
|
| 71 |
+
global _syscfg_macosx_ver
|
| 72 |
+
_syscfg_macosx_ver = None
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def get_macosx_target_ver_from_syscfg():
|
| 76 |
+
"""Get the version of macOS latched in the Python interpreter configuration.
|
| 77 |
+
Returns the version as a string or None if can't obtain one. Cached."""
|
| 78 |
+
global _syscfg_macosx_ver
|
| 79 |
+
if _syscfg_macosx_ver is None:
|
| 80 |
+
from distutils import sysconfig
|
| 81 |
+
|
| 82 |
+
ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or ''
|
| 83 |
+
if ver:
|
| 84 |
+
_syscfg_macosx_ver = ver
|
| 85 |
+
return _syscfg_macosx_ver
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def get_macosx_target_ver():
|
| 89 |
+
"""Return the version of macOS for which we are building.
|
| 90 |
+
|
| 91 |
+
The target version defaults to the version in sysconfig latched at time
|
| 92 |
+
the Python interpreter was built, unless overridden by an environment
|
| 93 |
+
variable. If neither source has a value, then None is returned"""
|
| 94 |
+
|
| 95 |
+
syscfg_ver = get_macosx_target_ver_from_syscfg()
|
| 96 |
+
env_ver = os.environ.get(MACOSX_VERSION_VAR)
|
| 97 |
+
|
| 98 |
+
if env_ver:
|
| 99 |
+
# Validate overridden version against sysconfig version, if have both.
|
| 100 |
+
# Ensure that the deployment target of the build process is not less
|
| 101 |
+
# than 10.3 if the interpreter was built for 10.3 or later. This
|
| 102 |
+
# ensures extension modules are built with correct compatibility
|
| 103 |
+
# values, specifically LDSHARED which can use
|
| 104 |
+
# '-undefined dynamic_lookup' which only works on >= 10.3.
|
| 105 |
+
if (
|
| 106 |
+
syscfg_ver
|
| 107 |
+
and split_version(syscfg_ver) >= [10, 3]
|
| 108 |
+
and split_version(env_ver) < [10, 3]
|
| 109 |
+
):
|
| 110 |
+
my_msg = (
|
| 111 |
+
'$' + MACOSX_VERSION_VAR + ' mismatch: '
|
| 112 |
+
'now "%s" but "%s" during configure; '
|
| 113 |
+
'must use 10.3 or later' % (env_ver, syscfg_ver)
|
| 114 |
+
)
|
| 115 |
+
raise DistutilsPlatformError(my_msg)
|
| 116 |
+
return env_ver
|
| 117 |
+
return syscfg_ver
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def split_version(s):
|
| 121 |
+
"""Convert a dot-separated string into a list of numbers for comparisons"""
|
| 122 |
+
return [int(n) for n in s.split('.')]
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def convert_path(pathname):
|
| 126 |
+
"""Return 'pathname' as a name that will work on the native filesystem,
|
| 127 |
+
i.e. split it on '/' and put it back together again using the current
|
| 128 |
+
directory separator. Needed because filenames in the setup script are
|
| 129 |
+
always supplied in Unix style, and have to be converted to the local
|
| 130 |
+
convention before we can actually use them in the filesystem. Raises
|
| 131 |
+
ValueError on non-Unix-ish systems if 'pathname' either starts or
|
| 132 |
+
ends with a slash.
|
| 133 |
+
"""
|
| 134 |
+
if os.sep == '/':
|
| 135 |
+
return pathname
|
| 136 |
+
if not pathname:
|
| 137 |
+
return pathname
|
| 138 |
+
if pathname[0] == '/':
|
| 139 |
+
raise ValueError("path '%s' cannot be absolute" % pathname)
|
| 140 |
+
if pathname[-1] == '/':
|
| 141 |
+
raise ValueError("path '%s' cannot end with '/'" % pathname)
|
| 142 |
+
|
| 143 |
+
paths = pathname.split('/')
|
| 144 |
+
while '.' in paths:
|
| 145 |
+
paths.remove('.')
|
| 146 |
+
if not paths:
|
| 147 |
+
return os.curdir
|
| 148 |
+
return os.path.join(*paths)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
# convert_path ()
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def change_root(new_root, pathname):
|
| 155 |
+
"""Return 'pathname' with 'new_root' prepended. If 'pathname' is
|
| 156 |
+
relative, this is equivalent to "os.path.join(new_root,pathname)".
|
| 157 |
+
Otherwise, it requires making 'pathname' relative and then joining the
|
| 158 |
+
two, which is tricky on DOS/Windows and Mac OS.
|
| 159 |
+
"""
|
| 160 |
+
if os.name == 'posix':
|
| 161 |
+
if not os.path.isabs(pathname):
|
| 162 |
+
return os.path.join(new_root, pathname)
|
| 163 |
+
else:
|
| 164 |
+
return os.path.join(new_root, pathname[1:])
|
| 165 |
+
|
| 166 |
+
elif os.name == 'nt':
|
| 167 |
+
(drive, path) = os.path.splitdrive(pathname)
|
| 168 |
+
if path[0] == '\\':
|
| 169 |
+
path = path[1:]
|
| 170 |
+
return os.path.join(new_root, path)
|
| 171 |
+
|
| 172 |
+
raise DistutilsPlatformError(f"nothing known about platform '{os.name}'")
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
@functools.lru_cache()
|
| 176 |
+
def check_environ():
|
| 177 |
+
"""Ensure that 'os.environ' has all the environment variables we
|
| 178 |
+
guarantee that users can use in config files, command-line options,
|
| 179 |
+
etc. Currently this includes:
|
| 180 |
+
HOME - user's home directory (Unix only)
|
| 181 |
+
PLAT - description of the current platform, including hardware
|
| 182 |
+
and OS (see 'get_platform()')
|
| 183 |
+
"""
|
| 184 |
+
if os.name == 'posix' and 'HOME' not in os.environ:
|
| 185 |
+
try:
|
| 186 |
+
import pwd
|
| 187 |
+
|
| 188 |
+
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
|
| 189 |
+
except (ImportError, KeyError):
|
| 190 |
+
# bpo-10496: if the current user identifier doesn't exist in the
|
| 191 |
+
# password database, do nothing
|
| 192 |
+
pass
|
| 193 |
+
|
| 194 |
+
if 'PLAT' not in os.environ:
|
| 195 |
+
os.environ['PLAT'] = get_platform()
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def subst_vars(s, local_vars):
|
| 199 |
+
"""
|
| 200 |
+
Perform variable substitution on 'string'.
|
| 201 |
+
Variables are indicated by format-style braces ("{var}").
|
| 202 |
+
Variable is substituted by the value found in the 'local_vars'
|
| 203 |
+
dictionary or in 'os.environ' if it's not in 'local_vars'.
|
| 204 |
+
'os.environ' is first checked/augmented to guarantee that it contains
|
| 205 |
+
certain values: see 'check_environ()'. Raise ValueError for any
|
| 206 |
+
variables not found in either 'local_vars' or 'os.environ'.
|
| 207 |
+
"""
|
| 208 |
+
check_environ()
|
| 209 |
+
lookup = dict(os.environ)
|
| 210 |
+
lookup.update((name, str(value)) for name, value in local_vars.items())
|
| 211 |
+
try:
|
| 212 |
+
return _subst_compat(s).format_map(lookup)
|
| 213 |
+
except KeyError as var:
|
| 214 |
+
raise ValueError(f"invalid variable {var}")
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def _subst_compat(s):
|
| 218 |
+
"""
|
| 219 |
+
Replace shell/Perl-style variable substitution with
|
| 220 |
+
format-style. For compatibility.
|
| 221 |
+
"""
|
| 222 |
+
|
| 223 |
+
def _subst(match):
|
| 224 |
+
return f'{{{match.group(1)}}}'
|
| 225 |
+
|
| 226 |
+
repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
|
| 227 |
+
if repl != s:
|
| 228 |
+
import warnings
|
| 229 |
+
|
| 230 |
+
warnings.warn(
|
| 231 |
+
"shell/Perl-style substitutions are deprecated",
|
| 232 |
+
DeprecationWarning,
|
| 233 |
+
)
|
| 234 |
+
return repl
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def grok_environment_error(exc, prefix="error: "):
|
| 238 |
+
# Function kept for backward compatibility.
|
| 239 |
+
# Used to try clever things with EnvironmentErrors,
|
| 240 |
+
# but nowadays str(exception) produces good messages.
|
| 241 |
+
return prefix + str(exc)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
# Needed by 'split_quoted()'
|
| 245 |
+
_wordchars_re = _squote_re = _dquote_re = None
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def _init_regex():
|
| 249 |
+
global _wordchars_re, _squote_re, _dquote_re
|
| 250 |
+
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
|
| 251 |
+
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
|
| 252 |
+
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
def split_quoted(s):
|
| 256 |
+
"""Split a string up according to Unix shell-like rules for quotes and
|
| 257 |
+
backslashes. In short: words are delimited by spaces, as long as those
|
| 258 |
+
spaces are not escaped by a backslash, or inside a quoted string.
|
| 259 |
+
Single and double quotes are equivalent, and the quote characters can
|
| 260 |
+
be backslash-escaped. The backslash is stripped from any two-character
|
| 261 |
+
escape sequence, leaving only the escaped character. The quote
|
| 262 |
+
characters are stripped from any quoted string. Returns a list of
|
| 263 |
+
words.
|
| 264 |
+
"""
|
| 265 |
+
|
| 266 |
+
# This is a nice algorithm for splitting up a single string, since it
|
| 267 |
+
# doesn't require character-by-character examination. It was a little
|
| 268 |
+
# bit of a brain-bender to get it working right, though...
|
| 269 |
+
if _wordchars_re is None:
|
| 270 |
+
_init_regex()
|
| 271 |
+
|
| 272 |
+
s = s.strip()
|
| 273 |
+
words = []
|
| 274 |
+
pos = 0
|
| 275 |
+
|
| 276 |
+
while s:
|
| 277 |
+
m = _wordchars_re.match(s, pos)
|
| 278 |
+
end = m.end()
|
| 279 |
+
if end == len(s):
|
| 280 |
+
words.append(s[:end])
|
| 281 |
+
break
|
| 282 |
+
|
| 283 |
+
if s[end] in string.whitespace:
|
| 284 |
+
# unescaped, unquoted whitespace: now
|
| 285 |
+
# we definitely have a word delimiter
|
| 286 |
+
words.append(s[:end])
|
| 287 |
+
s = s[end:].lstrip()
|
| 288 |
+
pos = 0
|
| 289 |
+
|
| 290 |
+
elif s[end] == '\\':
|
| 291 |
+
# preserve whatever is being escaped;
|
| 292 |
+
# will become part of the current word
|
| 293 |
+
s = s[:end] + s[end + 1 :]
|
| 294 |
+
pos = end + 1
|
| 295 |
+
|
| 296 |
+
else:
|
| 297 |
+
if s[end] == "'": # slurp singly-quoted string
|
| 298 |
+
m = _squote_re.match(s, end)
|
| 299 |
+
elif s[end] == '"': # slurp doubly-quoted string
|
| 300 |
+
m = _dquote_re.match(s, end)
|
| 301 |
+
else:
|
| 302 |
+
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
|
| 303 |
+
|
| 304 |
+
if m is None:
|
| 305 |
+
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
|
| 306 |
+
|
| 307 |
+
(beg, end) = m.span()
|
| 308 |
+
s = s[:beg] + s[beg + 1 : end - 1] + s[end:]
|
| 309 |
+
pos = m.end() - 2
|
| 310 |
+
|
| 311 |
+
if pos >= len(s):
|
| 312 |
+
words.append(s)
|
| 313 |
+
break
|
| 314 |
+
|
| 315 |
+
return words
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
# split_quoted ()
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
def execute(func, args, msg=None, verbose=0, dry_run=0):
|
| 322 |
+
"""Perform some action that affects the outside world (eg. by
|
| 323 |
+
writing to the filesystem). Such actions are special because they
|
| 324 |
+
are disabled by the 'dry_run' flag. This method takes care of all
|
| 325 |
+
that bureaucracy for you; all you have to do is supply the
|
| 326 |
+
function to call and an argument tuple for it (to embody the
|
| 327 |
+
"external action" being performed), and an optional message to
|
| 328 |
+
print.
|
| 329 |
+
"""
|
| 330 |
+
if msg is None:
|
| 331 |
+
msg = "{}{!r}".format(func.__name__, args)
|
| 332 |
+
if msg[-2:] == ',)': # correct for singleton tuple
|
| 333 |
+
msg = msg[0:-2] + ')'
|
| 334 |
+
|
| 335 |
+
log.info(msg)
|
| 336 |
+
if not dry_run:
|
| 337 |
+
func(*args)
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
def strtobool(val):
|
| 341 |
+
"""Convert a string representation of truth to true (1) or false (0).
|
| 342 |
+
|
| 343 |
+
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
|
| 344 |
+
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
|
| 345 |
+
'val' is anything else.
|
| 346 |
+
"""
|
| 347 |
+
val = val.lower()
|
| 348 |
+
if val in ('y', 'yes', 't', 'true', 'on', '1'):
|
| 349 |
+
return 1
|
| 350 |
+
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
|
| 351 |
+
return 0
|
| 352 |
+
else:
|
| 353 |
+
raise ValueError("invalid truth value {!r}".format(val))
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def byte_compile( # noqa: C901
|
| 357 |
+
py_files,
|
| 358 |
+
optimize=0,
|
| 359 |
+
force=0,
|
| 360 |
+
prefix=None,
|
| 361 |
+
base_dir=None,
|
| 362 |
+
verbose=1,
|
| 363 |
+
dry_run=0,
|
| 364 |
+
direct=None,
|
| 365 |
+
):
|
| 366 |
+
"""Byte-compile a collection of Python source files to .pyc
|
| 367 |
+
files in a __pycache__ subdirectory. 'py_files' is a list
|
| 368 |
+
of files to compile; any files that don't end in ".py" are silently
|
| 369 |
+
skipped. 'optimize' must be one of the following:
|
| 370 |
+
0 - don't optimize
|
| 371 |
+
1 - normal optimization (like "python -O")
|
| 372 |
+
2 - extra optimization (like "python -OO")
|
| 373 |
+
If 'force' is true, all files are recompiled regardless of
|
| 374 |
+
timestamps.
|
| 375 |
+
|
| 376 |
+
The source filename encoded in each bytecode file defaults to the
|
| 377 |
+
filenames listed in 'py_files'; you can modify these with 'prefix' and
|
| 378 |
+
'basedir'. 'prefix' is a string that will be stripped off of each
|
| 379 |
+
source filename, and 'base_dir' is a directory name that will be
|
| 380 |
+
prepended (after 'prefix' is stripped). You can supply either or both
|
| 381 |
+
(or neither) of 'prefix' and 'base_dir', as you wish.
|
| 382 |
+
|
| 383 |
+
If 'dry_run' is true, doesn't actually do anything that would
|
| 384 |
+
affect the filesystem.
|
| 385 |
+
|
| 386 |
+
Byte-compilation is either done directly in this interpreter process
|
| 387 |
+
with the standard py_compile module, or indirectly by writing a
|
| 388 |
+
temporary script and executing it. Normally, you should let
|
| 389 |
+
'byte_compile()' figure out to use direct compilation or not (see
|
| 390 |
+
the source for details). The 'direct' flag is used by the script
|
| 391 |
+
generated in indirect mode; unless you know what you're doing, leave
|
| 392 |
+
it set to None.
|
| 393 |
+
"""
|
| 394 |
+
|
| 395 |
+
# nothing is done if sys.dont_write_bytecode is True
|
| 396 |
+
if sys.dont_write_bytecode:
|
| 397 |
+
raise DistutilsByteCompileError('byte-compiling is disabled.')
|
| 398 |
+
|
| 399 |
+
# First, if the caller didn't force us into direct or indirect mode,
|
| 400 |
+
# figure out which mode we should be in. We take a conservative
|
| 401 |
+
# approach: choose direct mode *only* if the current interpreter is
|
| 402 |
+
# in debug mode and optimize is 0. If we're not in debug mode (-O
|
| 403 |
+
# or -OO), we don't know which level of optimization this
|
| 404 |
+
# interpreter is running with, so we can't do direct
|
| 405 |
+
# byte-compilation and be certain that it's the right thing. Thus,
|
| 406 |
+
# always compile indirectly if the current interpreter is in either
|
| 407 |
+
# optimize mode, or if either optimization level was requested by
|
| 408 |
+
# the caller.
|
| 409 |
+
if direct is None:
|
| 410 |
+
direct = __debug__ and optimize == 0
|
| 411 |
+
|
| 412 |
+
# "Indirect" byte-compilation: write a temporary script and then
|
| 413 |
+
# run it with the appropriate flags.
|
| 414 |
+
if not direct:
|
| 415 |
+
try:
|
| 416 |
+
from tempfile import mkstemp
|
| 417 |
+
|
| 418 |
+
(script_fd, script_name) = mkstemp(".py")
|
| 419 |
+
except ImportError:
|
| 420 |
+
from tempfile import mktemp
|
| 421 |
+
|
| 422 |
+
(script_fd, script_name) = None, mktemp(".py")
|
| 423 |
+
log.info("writing byte-compilation script '%s'", script_name)
|
| 424 |
+
if not dry_run:
|
| 425 |
+
if script_fd is not None:
|
| 426 |
+
script = os.fdopen(script_fd, "w")
|
| 427 |
+
else:
|
| 428 |
+
script = open(script_name, "w")
|
| 429 |
+
|
| 430 |
+
with script:
|
| 431 |
+
script.write(
|
| 432 |
+
"""\
|
| 433 |
+
from distutils.util import byte_compile
|
| 434 |
+
files = [
|
| 435 |
+
"""
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
# XXX would be nice to write absolute filenames, just for
|
| 439 |
+
# safety's sake (script should be more robust in the face of
|
| 440 |
+
# chdir'ing before running it). But this requires abspath'ing
|
| 441 |
+
# 'prefix' as well, and that breaks the hack in build_lib's
|
| 442 |
+
# 'byte_compile()' method that carefully tacks on a trailing
|
| 443 |
+
# slash (os.sep really) to make sure the prefix here is "just
|
| 444 |
+
# right". This whole prefix business is rather delicate -- the
|
| 445 |
+
# problem is that it's really a directory, but I'm treating it
|
| 446 |
+
# as a dumb string, so trailing slashes and so forth matter.
|
| 447 |
+
|
| 448 |
+
script.write(",\n".join(map(repr, py_files)) + "]\n")
|
| 449 |
+
script.write(
|
| 450 |
+
"""
|
| 451 |
+
byte_compile(files, optimize=%r, force=%r,
|
| 452 |
+
prefix=%r, base_dir=%r,
|
| 453 |
+
verbose=%r, dry_run=0,
|
| 454 |
+
direct=1)
|
| 455 |
+
"""
|
| 456 |
+
% (optimize, force, prefix, base_dir, verbose)
|
| 457 |
+
)
|
| 458 |
+
|
| 459 |
+
cmd = [sys.executable]
|
| 460 |
+
cmd.extend(subprocess._optim_args_from_interpreter_flags())
|
| 461 |
+
cmd.append(script_name)
|
| 462 |
+
spawn(cmd, dry_run=dry_run)
|
| 463 |
+
execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run)
|
| 464 |
+
|
| 465 |
+
# "Direct" byte-compilation: use the py_compile module to compile
|
| 466 |
+
# right here, right now. Note that the script generated in indirect
|
| 467 |
+
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
|
| 468 |
+
# cross-process recursion. Hey, it works!
|
| 469 |
+
else:
|
| 470 |
+
from py_compile import compile
|
| 471 |
+
|
| 472 |
+
for file in py_files:
|
| 473 |
+
if file[-3:] != ".py":
|
| 474 |
+
# This lets us be lazy and not filter filenames in
|
| 475 |
+
# the "install_lib" command.
|
| 476 |
+
continue
|
| 477 |
+
|
| 478 |
+
# Terminology from the py_compile module:
|
| 479 |
+
# cfile - byte-compiled file
|
| 480 |
+
# dfile - purported source filename (same as 'file' by default)
|
| 481 |
+
if optimize >= 0:
|
| 482 |
+
opt = '' if optimize == 0 else optimize
|
| 483 |
+
cfile = importlib.util.cache_from_source(file, optimization=opt)
|
| 484 |
+
else:
|
| 485 |
+
cfile = importlib.util.cache_from_source(file)
|
| 486 |
+
dfile = file
|
| 487 |
+
if prefix:
|
| 488 |
+
if file[: len(prefix)] != prefix:
|
| 489 |
+
raise ValueError(
|
| 490 |
+
"invalid prefix: filename %r doesn't start with %r"
|
| 491 |
+
% (file, prefix)
|
| 492 |
+
)
|
| 493 |
+
dfile = dfile[len(prefix) :]
|
| 494 |
+
if base_dir:
|
| 495 |
+
dfile = os.path.join(base_dir, dfile)
|
| 496 |
+
|
| 497 |
+
cfile_base = os.path.basename(cfile)
|
| 498 |
+
if direct:
|
| 499 |
+
if force or newer(file, cfile):
|
| 500 |
+
log.info("byte-compiling %s to %s", file, cfile_base)
|
| 501 |
+
if not dry_run:
|
| 502 |
+
compile(file, cfile, dfile)
|
| 503 |
+
else:
|
| 504 |
+
log.debug("skipping byte-compilation of %s to %s", file, cfile_base)
|
| 505 |
+
|
| 506 |
+
|
| 507 |
+
def rfc822_escape(header):
|
| 508 |
+
"""Return a version of the string escaped for inclusion in an
|
| 509 |
+
RFC-822 header, by ensuring there are 8 spaces space after each newline.
|
| 510 |
+
"""
|
| 511 |
+
lines = header.split('\n')
|
| 512 |
+
sep = '\n' + 8 * ' '
|
| 513 |
+
return sep.join(lines)
|
.venv/Lib/site-packages/setuptools/_distutils/version.py
ADDED
|
@@ -0,0 +1,357 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# distutils/version.py
|
| 3 |
+
#
|
| 4 |
+
# Implements multiple version numbering conventions for the
|
| 5 |
+
# Python Module Distribution Utilities.
|
| 6 |
+
#
|
| 7 |
+
# $Id$
|
| 8 |
+
#
|
| 9 |
+
|
| 10 |
+
"""Provides classes to represent module version numbers (one class for
|
| 11 |
+
each style of version numbering). There are currently two such classes
|
| 12 |
+
implemented: StrictVersion and LooseVersion.
|
| 13 |
+
|
| 14 |
+
Every version number class implements the following interface:
|
| 15 |
+
* the 'parse' method takes a string and parses it to some internal
|
| 16 |
+
representation; if the string is an invalid version number,
|
| 17 |
+
'parse' raises a ValueError exception
|
| 18 |
+
* the class constructor takes an optional string argument which,
|
| 19 |
+
if supplied, is passed to 'parse'
|
| 20 |
+
* __str__ reconstructs the string that was passed to 'parse' (or
|
| 21 |
+
an equivalent string -- ie. one that will generate an equivalent
|
| 22 |
+
version number instance)
|
| 23 |
+
* __repr__ generates Python code to recreate the version number instance
|
| 24 |
+
* _cmp compares the current instance with either another instance
|
| 25 |
+
of the same class or a string (which will be parsed to an instance
|
| 26 |
+
of the same class, thus must follow the same rules)
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
import re
|
| 30 |
+
import warnings
|
| 31 |
+
import contextlib
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@contextlib.contextmanager
|
| 35 |
+
def suppress_known_deprecation():
|
| 36 |
+
with warnings.catch_warnings(record=True) as ctx:
|
| 37 |
+
warnings.filterwarnings(
|
| 38 |
+
action='default',
|
| 39 |
+
category=DeprecationWarning,
|
| 40 |
+
message="distutils Version classes are deprecated.",
|
| 41 |
+
)
|
| 42 |
+
yield ctx
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class Version:
|
| 46 |
+
"""Abstract base class for version numbering classes. Just provides
|
| 47 |
+
constructor (__init__) and reproducer (__repr__), because those
|
| 48 |
+
seem to be the same for all version numbering classes; and route
|
| 49 |
+
rich comparisons to _cmp.
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
def __init__(self, vstring=None):
|
| 53 |
+
if vstring:
|
| 54 |
+
self.parse(vstring)
|
| 55 |
+
warnings.warn(
|
| 56 |
+
"distutils Version classes are deprecated. "
|
| 57 |
+
"Use packaging.version instead.",
|
| 58 |
+
DeprecationWarning,
|
| 59 |
+
stacklevel=2,
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
def __repr__(self):
|
| 63 |
+
return "{} ('{}')".format(self.__class__.__name__, str(self))
|
| 64 |
+
|
| 65 |
+
def __eq__(self, other):
|
| 66 |
+
c = self._cmp(other)
|
| 67 |
+
if c is NotImplemented:
|
| 68 |
+
return c
|
| 69 |
+
return c == 0
|
| 70 |
+
|
| 71 |
+
def __lt__(self, other):
|
| 72 |
+
c = self._cmp(other)
|
| 73 |
+
if c is NotImplemented:
|
| 74 |
+
return c
|
| 75 |
+
return c < 0
|
| 76 |
+
|
| 77 |
+
def __le__(self, other):
|
| 78 |
+
c = self._cmp(other)
|
| 79 |
+
if c is NotImplemented:
|
| 80 |
+
return c
|
| 81 |
+
return c <= 0
|
| 82 |
+
|
| 83 |
+
def __gt__(self, other):
|
| 84 |
+
c = self._cmp(other)
|
| 85 |
+
if c is NotImplemented:
|
| 86 |
+
return c
|
| 87 |
+
return c > 0
|
| 88 |
+
|
| 89 |
+
def __ge__(self, other):
|
| 90 |
+
c = self._cmp(other)
|
| 91 |
+
if c is NotImplemented:
|
| 92 |
+
return c
|
| 93 |
+
return c >= 0
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# Interface for version-number classes -- must be implemented
|
| 97 |
+
# by the following classes (the concrete ones -- Version should
|
| 98 |
+
# be treated as an abstract class).
|
| 99 |
+
# __init__ (string) - create and take same action as 'parse'
|
| 100 |
+
# (string parameter is optional)
|
| 101 |
+
# parse (string) - convert a string representation to whatever
|
| 102 |
+
# internal representation is appropriate for
|
| 103 |
+
# this style of version numbering
|
| 104 |
+
# __str__ (self) - convert back to a string; should be very similar
|
| 105 |
+
# (if not identical to) the string supplied to parse
|
| 106 |
+
# __repr__ (self) - generate Python code to recreate
|
| 107 |
+
# the instance
|
| 108 |
+
# _cmp (self, other) - compare two version numbers ('other' may
|
| 109 |
+
# be an unparsed version string, or another
|
| 110 |
+
# instance of your version class)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class StrictVersion(Version):
|
| 114 |
+
|
| 115 |
+
"""Version numbering for anal retentives and software idealists.
|
| 116 |
+
Implements the standard interface for version number classes as
|
| 117 |
+
described above. A version number consists of two or three
|
| 118 |
+
dot-separated numeric components, with an optional "pre-release" tag
|
| 119 |
+
on the end. The pre-release tag consists of the letter 'a' or 'b'
|
| 120 |
+
followed by a number. If the numeric components of two version
|
| 121 |
+
numbers are equal, then one with a pre-release tag will always
|
| 122 |
+
be deemed earlier (lesser) than one without.
|
| 123 |
+
|
| 124 |
+
The following are valid version numbers (shown in the order that
|
| 125 |
+
would be obtained by sorting according to the supplied cmp function):
|
| 126 |
+
|
| 127 |
+
0.4 0.4.0 (these two are equivalent)
|
| 128 |
+
0.4.1
|
| 129 |
+
0.5a1
|
| 130 |
+
0.5b3
|
| 131 |
+
0.5
|
| 132 |
+
0.9.6
|
| 133 |
+
1.0
|
| 134 |
+
1.0.4a3
|
| 135 |
+
1.0.4b1
|
| 136 |
+
1.0.4
|
| 137 |
+
|
| 138 |
+
The following are examples of invalid version numbers:
|
| 139 |
+
|
| 140 |
+
1
|
| 141 |
+
2.7.2.2
|
| 142 |
+
1.3.a4
|
| 143 |
+
1.3pl1
|
| 144 |
+
1.3c4
|
| 145 |
+
|
| 146 |
+
The rationale for this version numbering system will be explained
|
| 147 |
+
in the distutils documentation.
|
| 148 |
+
"""
|
| 149 |
+
|
| 150 |
+
version_re = re.compile(
|
| 151 |
+
r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE | re.ASCII
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
def parse(self, vstring):
|
| 155 |
+
match = self.version_re.match(vstring)
|
| 156 |
+
if not match:
|
| 157 |
+
raise ValueError("invalid version number '%s'" % vstring)
|
| 158 |
+
|
| 159 |
+
(major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6)
|
| 160 |
+
|
| 161 |
+
if patch:
|
| 162 |
+
self.version = tuple(map(int, [major, minor, patch]))
|
| 163 |
+
else:
|
| 164 |
+
self.version = tuple(map(int, [major, minor])) + (0,)
|
| 165 |
+
|
| 166 |
+
if prerelease:
|
| 167 |
+
self.prerelease = (prerelease[0], int(prerelease_num))
|
| 168 |
+
else:
|
| 169 |
+
self.prerelease = None
|
| 170 |
+
|
| 171 |
+
def __str__(self):
|
| 172 |
+
if self.version[2] == 0:
|
| 173 |
+
vstring = '.'.join(map(str, self.version[0:2]))
|
| 174 |
+
else:
|
| 175 |
+
vstring = '.'.join(map(str, self.version))
|
| 176 |
+
|
| 177 |
+
if self.prerelease:
|
| 178 |
+
vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
|
| 179 |
+
|
| 180 |
+
return vstring
|
| 181 |
+
|
| 182 |
+
def _cmp(self, other): # noqa: C901
|
| 183 |
+
if isinstance(other, str):
|
| 184 |
+
with suppress_known_deprecation():
|
| 185 |
+
other = StrictVersion(other)
|
| 186 |
+
elif not isinstance(other, StrictVersion):
|
| 187 |
+
return NotImplemented
|
| 188 |
+
|
| 189 |
+
if self.version != other.version:
|
| 190 |
+
# numeric versions don't match
|
| 191 |
+
# prerelease stuff doesn't matter
|
| 192 |
+
if self.version < other.version:
|
| 193 |
+
return -1
|
| 194 |
+
else:
|
| 195 |
+
return 1
|
| 196 |
+
|
| 197 |
+
# have to compare prerelease
|
| 198 |
+
# case 1: neither has prerelease; they're equal
|
| 199 |
+
# case 2: self has prerelease, other doesn't; other is greater
|
| 200 |
+
# case 3: self doesn't have prerelease, other does: self is greater
|
| 201 |
+
# case 4: both have prerelease: must compare them!
|
| 202 |
+
|
| 203 |
+
if not self.prerelease and not other.prerelease:
|
| 204 |
+
return 0
|
| 205 |
+
elif self.prerelease and not other.prerelease:
|
| 206 |
+
return -1
|
| 207 |
+
elif not self.prerelease and other.prerelease:
|
| 208 |
+
return 1
|
| 209 |
+
elif self.prerelease and other.prerelease:
|
| 210 |
+
if self.prerelease == other.prerelease:
|
| 211 |
+
return 0
|
| 212 |
+
elif self.prerelease < other.prerelease:
|
| 213 |
+
return -1
|
| 214 |
+
else:
|
| 215 |
+
return 1
|
| 216 |
+
else:
|
| 217 |
+
assert False, "never get here"
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
# end class StrictVersion
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
# The rules according to Greg Stein:
|
| 224 |
+
# 1) a version number has 1 or more numbers separated by a period or by
|
| 225 |
+
# sequences of letters. If only periods, then these are compared
|
| 226 |
+
# left-to-right to determine an ordering.
|
| 227 |
+
# 2) sequences of letters are part of the tuple for comparison and are
|
| 228 |
+
# compared lexicographically
|
| 229 |
+
# 3) recognize the numeric components may have leading zeroes
|
| 230 |
+
#
|
| 231 |
+
# The LooseVersion class below implements these rules: a version number
|
| 232 |
+
# string is split up into a tuple of integer and string components, and
|
| 233 |
+
# comparison is a simple tuple comparison. This means that version
|
| 234 |
+
# numbers behave in a predictable and obvious way, but a way that might
|
| 235 |
+
# not necessarily be how people *want* version numbers to behave. There
|
| 236 |
+
# wouldn't be a problem if people could stick to purely numeric version
|
| 237 |
+
# numbers: just split on period and compare the numbers as tuples.
|
| 238 |
+
# However, people insist on putting letters into their version numbers;
|
| 239 |
+
# the most common purpose seems to be:
|
| 240 |
+
# - indicating a "pre-release" version
|
| 241 |
+
# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
|
| 242 |
+
# - indicating a post-release patch ('p', 'pl', 'patch')
|
| 243 |
+
# but of course this can't cover all version number schemes, and there's
|
| 244 |
+
# no way to know what a programmer means without asking him.
|
| 245 |
+
#
|
| 246 |
+
# The problem is what to do with letters (and other non-numeric
|
| 247 |
+
# characters) in a version number. The current implementation does the
|
| 248 |
+
# obvious and predictable thing: keep them as strings and compare
|
| 249 |
+
# lexically within a tuple comparison. This has the desired effect if
|
| 250 |
+
# an appended letter sequence implies something "post-release":
|
| 251 |
+
# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
|
| 252 |
+
#
|
| 253 |
+
# However, if letters in a version number imply a pre-release version,
|
| 254 |
+
# the "obvious" thing isn't correct. Eg. you would expect that
|
| 255 |
+
# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
|
| 256 |
+
# implemented here, this just isn't so.
|
| 257 |
+
#
|
| 258 |
+
# Two possible solutions come to mind. The first is to tie the
|
| 259 |
+
# comparison algorithm to a particular set of semantic rules, as has
|
| 260 |
+
# been done in the StrictVersion class above. This works great as long
|
| 261 |
+
# as everyone can go along with bondage and discipline. Hopefully a
|
| 262 |
+
# (large) subset of Python module programmers will agree that the
|
| 263 |
+
# particular flavour of bondage and discipline provided by StrictVersion
|
| 264 |
+
# provides enough benefit to be worth using, and will submit their
|
| 265 |
+
# version numbering scheme to its domination. The free-thinking
|
| 266 |
+
# anarchists in the lot will never give in, though, and something needs
|
| 267 |
+
# to be done to accommodate them.
|
| 268 |
+
#
|
| 269 |
+
# Perhaps a "moderately strict" version class could be implemented that
|
| 270 |
+
# lets almost anything slide (syntactically), and makes some heuristic
|
| 271 |
+
# assumptions about non-digits in version number strings. This could
|
| 272 |
+
# sink into special-case-hell, though; if I was as talented and
|
| 273 |
+
# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
|
| 274 |
+
# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
|
| 275 |
+
# just as happy dealing with things like "2g6" and "1.13++". I don't
|
| 276 |
+
# think I'm smart enough to do it right though.
|
| 277 |
+
#
|
| 278 |
+
# In any case, I've coded the test suite for this module (see
|
| 279 |
+
# ../test/test_version.py) specifically to fail on things like comparing
|
| 280 |
+
# "1.2a2" and "1.2". That's not because the *code* is doing anything
|
| 281 |
+
# wrong, it's because the simple, obvious design doesn't match my
|
| 282 |
+
# complicated, hairy expectations for real-world version numbers. It
|
| 283 |
+
# would be a snap to fix the test suite to say, "Yep, LooseVersion does
|
| 284 |
+
# the Right Thing" (ie. the code matches the conception). But I'd rather
|
| 285 |
+
# have a conception that matches common notions about version numbers.
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
class LooseVersion(Version):
|
| 289 |
+
|
| 290 |
+
"""Version numbering for anarchists and software realists.
|
| 291 |
+
Implements the standard interface for version number classes as
|
| 292 |
+
described above. A version number consists of a series of numbers,
|
| 293 |
+
separated by either periods or strings of letters. When comparing
|
| 294 |
+
version numbers, the numeric components will be compared
|
| 295 |
+
numerically, and the alphabetic components lexically. The following
|
| 296 |
+
are all valid version numbers, in no particular order:
|
| 297 |
+
|
| 298 |
+
1.5.1
|
| 299 |
+
1.5.2b2
|
| 300 |
+
161
|
| 301 |
+
3.10a
|
| 302 |
+
8.02
|
| 303 |
+
3.4j
|
| 304 |
+
1996.07.12
|
| 305 |
+
3.2.pl0
|
| 306 |
+
3.1.1.6
|
| 307 |
+
2g6
|
| 308 |
+
11g
|
| 309 |
+
0.960923
|
| 310 |
+
2.2beta29
|
| 311 |
+
1.13++
|
| 312 |
+
5.5.kw
|
| 313 |
+
2.0b1pl0
|
| 314 |
+
|
| 315 |
+
In fact, there is no such thing as an invalid version number under
|
| 316 |
+
this scheme; the rules for comparison are simple and predictable,
|
| 317 |
+
but may not always give the results you want (for some definition
|
| 318 |
+
of "want").
|
| 319 |
+
"""
|
| 320 |
+
|
| 321 |
+
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
| 322 |
+
|
| 323 |
+
def parse(self, vstring):
|
| 324 |
+
# I've given up on thinking I can reconstruct the version string
|
| 325 |
+
# from the parsed tuple -- so I just store the string here for
|
| 326 |
+
# use by __str__
|
| 327 |
+
self.vstring = vstring
|
| 328 |
+
components = [x for x in self.component_re.split(vstring) if x and x != '.']
|
| 329 |
+
for i, obj in enumerate(components):
|
| 330 |
+
try:
|
| 331 |
+
components[i] = int(obj)
|
| 332 |
+
except ValueError:
|
| 333 |
+
pass
|
| 334 |
+
|
| 335 |
+
self.version = components
|
| 336 |
+
|
| 337 |
+
def __str__(self):
|
| 338 |
+
return self.vstring
|
| 339 |
+
|
| 340 |
+
def __repr__(self):
|
| 341 |
+
return "LooseVersion ('%s')" % str(self)
|
| 342 |
+
|
| 343 |
+
def _cmp(self, other):
|
| 344 |
+
if isinstance(other, str):
|
| 345 |
+
other = LooseVersion(other)
|
| 346 |
+
elif not isinstance(other, LooseVersion):
|
| 347 |
+
return NotImplemented
|
| 348 |
+
|
| 349 |
+
if self.version == other.version:
|
| 350 |
+
return 0
|
| 351 |
+
if self.version < other.version:
|
| 352 |
+
return -1
|
| 353 |
+
if self.version > other.version:
|
| 354 |
+
return 1
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
# end class LooseVersion
|
.venv/Lib/site-packages/setuptools/archive_util.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities for extracting common archive formats"""
|
| 2 |
+
|
| 3 |
+
import zipfile
|
| 4 |
+
import tarfile
|
| 5 |
+
import os
|
| 6 |
+
import shutil
|
| 7 |
+
import posixpath
|
| 8 |
+
import contextlib
|
| 9 |
+
from distutils.errors import DistutilsError
|
| 10 |
+
|
| 11 |
+
from ._path import ensure_directory
|
| 12 |
+
|
| 13 |
+
__all__ = [
|
| 14 |
+
"unpack_archive",
|
| 15 |
+
"unpack_zipfile",
|
| 16 |
+
"unpack_tarfile",
|
| 17 |
+
"default_filter",
|
| 18 |
+
"UnrecognizedFormat",
|
| 19 |
+
"extraction_drivers",
|
| 20 |
+
"unpack_directory",
|
| 21 |
+
]
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class UnrecognizedFormat(DistutilsError):
|
| 25 |
+
"""Couldn't recognize the archive type"""
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def default_filter(src, dst):
|
| 29 |
+
"""The default progress/filter callback; returns True for all files"""
|
| 30 |
+
return dst
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def unpack_archive(filename, extract_dir, progress_filter=default_filter, drivers=None):
|
| 34 |
+
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
| 35 |
+
|
| 36 |
+
`progress_filter` is a function taking two arguments: a source path
|
| 37 |
+
internal to the archive ('/'-separated), and a filesystem path where it
|
| 38 |
+
will be extracted. The callback must return the desired extract path
|
| 39 |
+
(which may be the same as the one passed in), or else ``None`` to skip
|
| 40 |
+
that file or directory. The callback can thus be used to report on the
|
| 41 |
+
progress of the extraction, as well as to filter the items extracted or
|
| 42 |
+
alter their extraction paths.
|
| 43 |
+
|
| 44 |
+
`drivers`, if supplied, must be a non-empty sequence of functions with the
|
| 45 |
+
same signature as this function (minus the `drivers` argument), that raise
|
| 46 |
+
``UnrecognizedFormat`` if they do not support extracting the designated
|
| 47 |
+
archive type. The `drivers` are tried in sequence until one is found that
|
| 48 |
+
does not raise an error, or until all are exhausted (in which case
|
| 49 |
+
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
|
| 50 |
+
drivers, the module's ``extraction_drivers`` constant will be used, which
|
| 51 |
+
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
|
| 52 |
+
order.
|
| 53 |
+
"""
|
| 54 |
+
for driver in drivers or extraction_drivers:
|
| 55 |
+
try:
|
| 56 |
+
driver(filename, extract_dir, progress_filter)
|
| 57 |
+
except UnrecognizedFormat:
|
| 58 |
+
continue
|
| 59 |
+
else:
|
| 60 |
+
return
|
| 61 |
+
else:
|
| 62 |
+
raise UnrecognizedFormat("Not a recognized archive type: %s" % filename)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
| 66 |
+
""" "Unpack" a directory, using the same interface as for archives
|
| 67 |
+
|
| 68 |
+
Raises ``UnrecognizedFormat`` if `filename` is not a directory
|
| 69 |
+
"""
|
| 70 |
+
if not os.path.isdir(filename):
|
| 71 |
+
raise UnrecognizedFormat("%s is not a directory" % filename)
|
| 72 |
+
|
| 73 |
+
paths = {
|
| 74 |
+
filename: ('', extract_dir),
|
| 75 |
+
}
|
| 76 |
+
for base, dirs, files in os.walk(filename):
|
| 77 |
+
src, dst = paths[base]
|
| 78 |
+
for d in dirs:
|
| 79 |
+
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
|
| 80 |
+
for f in files:
|
| 81 |
+
target = os.path.join(dst, f)
|
| 82 |
+
target = progress_filter(src + f, target)
|
| 83 |
+
if not target:
|
| 84 |
+
# skip non-files
|
| 85 |
+
continue
|
| 86 |
+
ensure_directory(target)
|
| 87 |
+
f = os.path.join(base, f)
|
| 88 |
+
shutil.copyfile(f, target)
|
| 89 |
+
shutil.copystat(f, target)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
| 93 |
+
"""Unpack zip `filename` to `extract_dir`
|
| 94 |
+
|
| 95 |
+
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
|
| 96 |
+
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
|
| 97 |
+
of the `progress_filter` argument.
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
if not zipfile.is_zipfile(filename):
|
| 101 |
+
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
|
| 102 |
+
|
| 103 |
+
with zipfile.ZipFile(filename) as z:
|
| 104 |
+
_unpack_zipfile_obj(z, extract_dir, progress_filter)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def _unpack_zipfile_obj(zipfile_obj, extract_dir, progress_filter=default_filter):
|
| 108 |
+
"""Internal/private API used by other parts of setuptools.
|
| 109 |
+
Similar to ``unpack_zipfile``, but receives an already opened :obj:`zipfile.ZipFile`
|
| 110 |
+
object instead of a filename.
|
| 111 |
+
"""
|
| 112 |
+
for info in zipfile_obj.infolist():
|
| 113 |
+
name = info.filename
|
| 114 |
+
|
| 115 |
+
# don't extract absolute paths or ones with .. in them
|
| 116 |
+
if name.startswith('/') or '..' in name.split('/'):
|
| 117 |
+
continue
|
| 118 |
+
|
| 119 |
+
target = os.path.join(extract_dir, *name.split('/'))
|
| 120 |
+
target = progress_filter(name, target)
|
| 121 |
+
if not target:
|
| 122 |
+
continue
|
| 123 |
+
if name.endswith('/'):
|
| 124 |
+
# directory
|
| 125 |
+
ensure_directory(target)
|
| 126 |
+
else:
|
| 127 |
+
# file
|
| 128 |
+
ensure_directory(target)
|
| 129 |
+
data = zipfile_obj.read(info.filename)
|
| 130 |
+
with open(target, 'wb') as f:
|
| 131 |
+
f.write(data)
|
| 132 |
+
unix_attributes = info.external_attr >> 16
|
| 133 |
+
if unix_attributes:
|
| 134 |
+
os.chmod(target, unix_attributes)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def _resolve_tar_file_or_dir(tar_obj, tar_member_obj):
|
| 138 |
+
"""Resolve any links and extract link targets as normal files."""
|
| 139 |
+
while tar_member_obj is not None and (
|
| 140 |
+
tar_member_obj.islnk() or tar_member_obj.issym()
|
| 141 |
+
):
|
| 142 |
+
linkpath = tar_member_obj.linkname
|
| 143 |
+
if tar_member_obj.issym():
|
| 144 |
+
base = posixpath.dirname(tar_member_obj.name)
|
| 145 |
+
linkpath = posixpath.join(base, linkpath)
|
| 146 |
+
linkpath = posixpath.normpath(linkpath)
|
| 147 |
+
tar_member_obj = tar_obj._getmember(linkpath)
|
| 148 |
+
|
| 149 |
+
is_file_or_dir = tar_member_obj is not None and (
|
| 150 |
+
tar_member_obj.isfile() or tar_member_obj.isdir()
|
| 151 |
+
)
|
| 152 |
+
if is_file_or_dir:
|
| 153 |
+
return tar_member_obj
|
| 154 |
+
|
| 155 |
+
raise LookupError('Got unknown file type')
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def _iter_open_tar(tar_obj, extract_dir, progress_filter):
|
| 159 |
+
"""Emit member-destination pairs from a tar archive."""
|
| 160 |
+
# don't do any chowning!
|
| 161 |
+
tar_obj.chown = lambda *args: None
|
| 162 |
+
|
| 163 |
+
with contextlib.closing(tar_obj):
|
| 164 |
+
for member in tar_obj:
|
| 165 |
+
name = member.name
|
| 166 |
+
# don't extract absolute paths or ones with .. in them
|
| 167 |
+
if name.startswith('/') or '..' in name.split('/'):
|
| 168 |
+
continue
|
| 169 |
+
|
| 170 |
+
prelim_dst = os.path.join(extract_dir, *name.split('/'))
|
| 171 |
+
|
| 172 |
+
try:
|
| 173 |
+
member = _resolve_tar_file_or_dir(tar_obj, member)
|
| 174 |
+
except LookupError:
|
| 175 |
+
continue
|
| 176 |
+
|
| 177 |
+
final_dst = progress_filter(name, prelim_dst)
|
| 178 |
+
if not final_dst:
|
| 179 |
+
continue
|
| 180 |
+
|
| 181 |
+
if final_dst.endswith(os.sep):
|
| 182 |
+
final_dst = final_dst[:-1]
|
| 183 |
+
|
| 184 |
+
yield member, final_dst
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
| 188 |
+
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
|
| 189 |
+
|
| 190 |
+
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
|
| 191 |
+
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
|
| 192 |
+
of the `progress_filter` argument.
|
| 193 |
+
"""
|
| 194 |
+
try:
|
| 195 |
+
tarobj = tarfile.open(filename)
|
| 196 |
+
except tarfile.TarError as e:
|
| 197 |
+
raise UnrecognizedFormat(
|
| 198 |
+
"%s is not a compressed or uncompressed tar file" % (filename,)
|
| 199 |
+
) from e
|
| 200 |
+
|
| 201 |
+
for member, final_dst in _iter_open_tar(
|
| 202 |
+
tarobj,
|
| 203 |
+
extract_dir,
|
| 204 |
+
progress_filter,
|
| 205 |
+
):
|
| 206 |
+
try:
|
| 207 |
+
# XXX Ugh
|
| 208 |
+
tarobj._extract_member(member, final_dst)
|
| 209 |
+
except tarfile.ExtractError:
|
| 210 |
+
# chown/chmod/mkfifo/mknode/makedev failed
|
| 211 |
+
pass
|
| 212 |
+
|
| 213 |
+
return True
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
.venv/Lib/site-packages/setuptools/build_meta.py
ADDED
|
@@ -0,0 +1,534 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A PEP 517 interface to setuptools
|
| 2 |
+
|
| 3 |
+
Previously, when a user or a command line tool (let's call it a "frontend")
|
| 4 |
+
needed to make a request of setuptools to take a certain action, for
|
| 5 |
+
example, generating a list of installation requirements, the frontend would
|
| 6 |
+
would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
|
| 7 |
+
|
| 8 |
+
PEP 517 defines a different method of interfacing with setuptools. Rather
|
| 9 |
+
than calling "setup.py" directly, the frontend should:
|
| 10 |
+
|
| 11 |
+
1. Set the current directory to the directory with a setup.py file
|
| 12 |
+
2. Import this module into a safe python interpreter (one in which
|
| 13 |
+
setuptools can potentially set global variables or crash hard).
|
| 14 |
+
3. Call one of the functions defined in PEP 517.
|
| 15 |
+
|
| 16 |
+
What each function does is defined in PEP 517. However, here is a "casual"
|
| 17 |
+
definition of the functions (this definition should not be relied on for
|
| 18 |
+
bug reports or API stability):
|
| 19 |
+
|
| 20 |
+
- `build_wheel`: build a wheel in the folder and return the basename
|
| 21 |
+
- `get_requires_for_build_wheel`: get the `setup_requires` to build
|
| 22 |
+
- `prepare_metadata_for_build_wheel`: get the `install_requires`
|
| 23 |
+
- `build_sdist`: build an sdist in the folder and return the basename
|
| 24 |
+
- `get_requires_for_build_sdist`: get the `setup_requires` to build
|
| 25 |
+
|
| 26 |
+
Again, this is not a formal definition! Just a "taste" of the module.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
import io
|
| 30 |
+
import os
|
| 31 |
+
import shlex
|
| 32 |
+
import sys
|
| 33 |
+
import tokenize
|
| 34 |
+
import shutil
|
| 35 |
+
import contextlib
|
| 36 |
+
import tempfile
|
| 37 |
+
import warnings
|
| 38 |
+
from pathlib import Path
|
| 39 |
+
from typing import Dict, Iterator, List, Optional, Union
|
| 40 |
+
|
| 41 |
+
import setuptools
|
| 42 |
+
import distutils
|
| 43 |
+
from . import errors
|
| 44 |
+
from ._path import same_path
|
| 45 |
+
from ._reqs import parse_strings
|
| 46 |
+
from .warnings import SetuptoolsDeprecationWarning
|
| 47 |
+
from distutils.util import strtobool
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
__all__ = [
|
| 51 |
+
'get_requires_for_build_sdist',
|
| 52 |
+
'get_requires_for_build_wheel',
|
| 53 |
+
'prepare_metadata_for_build_wheel',
|
| 54 |
+
'build_wheel',
|
| 55 |
+
'build_sdist',
|
| 56 |
+
'get_requires_for_build_editable',
|
| 57 |
+
'prepare_metadata_for_build_editable',
|
| 58 |
+
'build_editable',
|
| 59 |
+
'__legacy__',
|
| 60 |
+
'SetupRequirementsError',
|
| 61 |
+
]
|
| 62 |
+
|
| 63 |
+
SETUPTOOLS_ENABLE_FEATURES = os.getenv("SETUPTOOLS_ENABLE_FEATURES", "").lower()
|
| 64 |
+
LEGACY_EDITABLE = "legacy-editable" in SETUPTOOLS_ENABLE_FEATURES.replace("_", "-")
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class SetupRequirementsError(BaseException):
|
| 68 |
+
def __init__(self, specifiers):
|
| 69 |
+
self.specifiers = specifiers
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class Distribution(setuptools.dist.Distribution):
|
| 73 |
+
def fetch_build_eggs(self, specifiers):
|
| 74 |
+
specifier_list = list(parse_strings(specifiers))
|
| 75 |
+
|
| 76 |
+
raise SetupRequirementsError(specifier_list)
|
| 77 |
+
|
| 78 |
+
@classmethod
|
| 79 |
+
@contextlib.contextmanager
|
| 80 |
+
def patch(cls):
|
| 81 |
+
"""
|
| 82 |
+
Replace
|
| 83 |
+
distutils.dist.Distribution with this class
|
| 84 |
+
for the duration of this context.
|
| 85 |
+
"""
|
| 86 |
+
orig = distutils.core.Distribution
|
| 87 |
+
distutils.core.Distribution = cls
|
| 88 |
+
try:
|
| 89 |
+
yield
|
| 90 |
+
finally:
|
| 91 |
+
distutils.core.Distribution = orig
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@contextlib.contextmanager
|
| 95 |
+
def no_install_setup_requires():
|
| 96 |
+
"""Temporarily disable installing setup_requires
|
| 97 |
+
|
| 98 |
+
Under PEP 517, the backend reports build dependencies to the frontend,
|
| 99 |
+
and the frontend is responsible for ensuring they're installed.
|
| 100 |
+
So setuptools (acting as a backend) should not try to install them.
|
| 101 |
+
"""
|
| 102 |
+
orig = setuptools._install_setup_requires
|
| 103 |
+
setuptools._install_setup_requires = lambda attrs: None
|
| 104 |
+
try:
|
| 105 |
+
yield
|
| 106 |
+
finally:
|
| 107 |
+
setuptools._install_setup_requires = orig
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def _get_immediate_subdirectories(a_dir):
|
| 111 |
+
return [
|
| 112 |
+
name for name in os.listdir(a_dir) if os.path.isdir(os.path.join(a_dir, name))
|
| 113 |
+
]
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def _file_with_extension(directory, extension):
|
| 117 |
+
matching = (f for f in os.listdir(directory) if f.endswith(extension))
|
| 118 |
+
try:
|
| 119 |
+
(file,) = matching
|
| 120 |
+
except ValueError:
|
| 121 |
+
raise ValueError(
|
| 122 |
+
'No distribution was found. Ensure that `setup.py` '
|
| 123 |
+
'is not empty and that it calls `setup()`.'
|
| 124 |
+
)
|
| 125 |
+
return file
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _open_setup_script(setup_script):
|
| 129 |
+
if not os.path.exists(setup_script):
|
| 130 |
+
# Supply a default setup.py
|
| 131 |
+
return io.StringIO(u"from setuptools import setup; setup()")
|
| 132 |
+
|
| 133 |
+
return getattr(tokenize, 'open', open)(setup_script)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@contextlib.contextmanager
|
| 137 |
+
def suppress_known_deprecation():
|
| 138 |
+
with warnings.catch_warnings():
|
| 139 |
+
warnings.filterwarnings('ignore', 'setup.py install is deprecated')
|
| 140 |
+
yield
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
_ConfigSettings = Optional[Dict[str, Union[str, List[str], None]]]
|
| 144 |
+
"""
|
| 145 |
+
Currently the user can run::
|
| 146 |
+
|
| 147 |
+
pip install -e . --config-settings key=value
|
| 148 |
+
python -m build -C--key=value -C key=value
|
| 149 |
+
|
| 150 |
+
- pip will pass both key and value as strings and overwriting repeated keys
|
| 151 |
+
(pypa/pip#11059).
|
| 152 |
+
- build will accumulate values associated with repeated keys in a list.
|
| 153 |
+
It will also accept keys with no associated value.
|
| 154 |
+
This means that an option passed by build can be ``str | list[str] | None``.
|
| 155 |
+
- PEP 517 specifies that ``config_settings`` is an optional dict.
|
| 156 |
+
"""
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
class _ConfigSettingsTranslator:
|
| 160 |
+
"""Translate ``config_settings`` into distutils-style command arguments.
|
| 161 |
+
Only a limited number of options is currently supported.
|
| 162 |
+
"""
|
| 163 |
+
|
| 164 |
+
# See pypa/setuptools#1928 pypa/setuptools#2491
|
| 165 |
+
|
| 166 |
+
def _get_config(self, key: str, config_settings: _ConfigSettings) -> List[str]:
|
| 167 |
+
"""
|
| 168 |
+
Get the value of a specific key in ``config_settings`` as a list of strings.
|
| 169 |
+
|
| 170 |
+
>>> fn = _ConfigSettingsTranslator()._get_config
|
| 171 |
+
>>> fn("--global-option", None)
|
| 172 |
+
[]
|
| 173 |
+
>>> fn("--global-option", {})
|
| 174 |
+
[]
|
| 175 |
+
>>> fn("--global-option", {'--global-option': 'foo'})
|
| 176 |
+
['foo']
|
| 177 |
+
>>> fn("--global-option", {'--global-option': ['foo']})
|
| 178 |
+
['foo']
|
| 179 |
+
>>> fn("--global-option", {'--global-option': 'foo'})
|
| 180 |
+
['foo']
|
| 181 |
+
>>> fn("--global-option", {'--global-option': 'foo bar'})
|
| 182 |
+
['foo', 'bar']
|
| 183 |
+
"""
|
| 184 |
+
cfg = config_settings or {}
|
| 185 |
+
opts = cfg.get(key) or []
|
| 186 |
+
return shlex.split(opts) if isinstance(opts, str) else opts
|
| 187 |
+
|
| 188 |
+
def _valid_global_options(self):
|
| 189 |
+
"""Global options accepted by setuptools (e.g. quiet or verbose)."""
|
| 190 |
+
options = (opt[:2] for opt in setuptools.dist.Distribution.global_options)
|
| 191 |
+
return {flag for long_and_short in options for flag in long_and_short if flag}
|
| 192 |
+
|
| 193 |
+
def _global_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 194 |
+
"""
|
| 195 |
+
Let the user specify ``verbose`` or ``quiet`` + escape hatch via
|
| 196 |
+
``--global-option``.
|
| 197 |
+
Note: ``-v``, ``-vv``, ``-vvv`` have similar effects in setuptools,
|
| 198 |
+
so we just have to cover the basic scenario ``-v``.
|
| 199 |
+
|
| 200 |
+
>>> fn = _ConfigSettingsTranslator()._global_args
|
| 201 |
+
>>> list(fn(None))
|
| 202 |
+
[]
|
| 203 |
+
>>> list(fn({"verbose": "False"}))
|
| 204 |
+
['-q']
|
| 205 |
+
>>> list(fn({"verbose": "1"}))
|
| 206 |
+
['-v']
|
| 207 |
+
>>> list(fn({"--verbose": None}))
|
| 208 |
+
['-v']
|
| 209 |
+
>>> list(fn({"verbose": "true", "--global-option": "-q --no-user-cfg"}))
|
| 210 |
+
['-v', '-q', '--no-user-cfg']
|
| 211 |
+
>>> list(fn({"--quiet": None}))
|
| 212 |
+
['-q']
|
| 213 |
+
"""
|
| 214 |
+
cfg = config_settings or {}
|
| 215 |
+
falsey = {"false", "no", "0", "off"}
|
| 216 |
+
if "verbose" in cfg or "--verbose" in cfg:
|
| 217 |
+
level = str(cfg.get("verbose") or cfg.get("--verbose") or "1")
|
| 218 |
+
yield ("-q" if level.lower() in falsey else "-v")
|
| 219 |
+
if "quiet" in cfg or "--quiet" in cfg:
|
| 220 |
+
level = str(cfg.get("quiet") or cfg.get("--quiet") or "1")
|
| 221 |
+
yield ("-v" if level.lower() in falsey else "-q")
|
| 222 |
+
|
| 223 |
+
valid = self._valid_global_options()
|
| 224 |
+
args = self._get_config("--global-option", config_settings)
|
| 225 |
+
yield from (arg for arg in args if arg.strip("-") in valid)
|
| 226 |
+
|
| 227 |
+
def __dist_info_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 228 |
+
"""
|
| 229 |
+
The ``dist_info`` command accepts ``tag-date`` and ``tag-build``.
|
| 230 |
+
|
| 231 |
+
.. warning::
|
| 232 |
+
We cannot use this yet as it requires the ``sdist`` and ``bdist_wheel``
|
| 233 |
+
commands run in ``build_sdist`` and ``build_wheel`` to reuse the egg-info
|
| 234 |
+
directory created in ``prepare_metadata_for_build_wheel``.
|
| 235 |
+
|
| 236 |
+
>>> fn = _ConfigSettingsTranslator()._ConfigSettingsTranslator__dist_info_args
|
| 237 |
+
>>> list(fn(None))
|
| 238 |
+
[]
|
| 239 |
+
>>> list(fn({"tag-date": "False"}))
|
| 240 |
+
['--no-date']
|
| 241 |
+
>>> list(fn({"tag-date": None}))
|
| 242 |
+
['--no-date']
|
| 243 |
+
>>> list(fn({"tag-date": "true", "tag-build": ".a"}))
|
| 244 |
+
['--tag-date', '--tag-build', '.a']
|
| 245 |
+
"""
|
| 246 |
+
cfg = config_settings or {}
|
| 247 |
+
if "tag-date" in cfg:
|
| 248 |
+
val = strtobool(str(cfg["tag-date"] or "false"))
|
| 249 |
+
yield ("--tag-date" if val else "--no-date")
|
| 250 |
+
if "tag-build" in cfg:
|
| 251 |
+
yield from ["--tag-build", str(cfg["tag-build"])]
|
| 252 |
+
|
| 253 |
+
def _editable_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 254 |
+
"""
|
| 255 |
+
The ``editable_wheel`` command accepts ``editable-mode=strict``.
|
| 256 |
+
|
| 257 |
+
>>> fn = _ConfigSettingsTranslator()._editable_args
|
| 258 |
+
>>> list(fn(None))
|
| 259 |
+
[]
|
| 260 |
+
>>> list(fn({"editable-mode": "strict"}))
|
| 261 |
+
['--mode', 'strict']
|
| 262 |
+
"""
|
| 263 |
+
cfg = config_settings or {}
|
| 264 |
+
mode = cfg.get("editable-mode") or cfg.get("editable_mode")
|
| 265 |
+
if not mode:
|
| 266 |
+
return
|
| 267 |
+
yield from ["--mode", str(mode)]
|
| 268 |
+
|
| 269 |
+
def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 270 |
+
"""
|
| 271 |
+
Users may expect to pass arbitrary lists of arguments to a command
|
| 272 |
+
via "--global-option" (example provided in PEP 517 of a "escape hatch").
|
| 273 |
+
|
| 274 |
+
>>> fn = _ConfigSettingsTranslator()._arbitrary_args
|
| 275 |
+
>>> list(fn(None))
|
| 276 |
+
[]
|
| 277 |
+
>>> list(fn({}))
|
| 278 |
+
[]
|
| 279 |
+
>>> list(fn({'--build-option': 'foo'}))
|
| 280 |
+
['foo']
|
| 281 |
+
>>> list(fn({'--build-option': ['foo']}))
|
| 282 |
+
['foo']
|
| 283 |
+
>>> list(fn({'--build-option': 'foo'}))
|
| 284 |
+
['foo']
|
| 285 |
+
>>> list(fn({'--build-option': 'foo bar'}))
|
| 286 |
+
['foo', 'bar']
|
| 287 |
+
>>> warnings.simplefilter('error', SetuptoolsDeprecationWarning)
|
| 288 |
+
>>> list(fn({'--global-option': 'foo'})) # doctest: +IGNORE_EXCEPTION_DETAIL
|
| 289 |
+
Traceback (most recent call last):
|
| 290 |
+
SetuptoolsDeprecationWarning: ...arguments given via `--global-option`...
|
| 291 |
+
"""
|
| 292 |
+
args = self._get_config("--global-option", config_settings)
|
| 293 |
+
global_opts = self._valid_global_options()
|
| 294 |
+
bad_args = []
|
| 295 |
+
|
| 296 |
+
for arg in args:
|
| 297 |
+
if arg.strip("-") not in global_opts:
|
| 298 |
+
bad_args.append(arg)
|
| 299 |
+
yield arg
|
| 300 |
+
|
| 301 |
+
yield from self._get_config("--build-option", config_settings)
|
| 302 |
+
|
| 303 |
+
if bad_args:
|
| 304 |
+
SetuptoolsDeprecationWarning.emit(
|
| 305 |
+
"Incompatible `config_settings` passed to build backend.",
|
| 306 |
+
f"""
|
| 307 |
+
The arguments {bad_args!r} were given via `--global-option`.
|
| 308 |
+
Please use `--build-option` instead,
|
| 309 |
+
`--global-option` is reserved for flags like `--verbose` or `--quiet`.
|
| 310 |
+
""",
|
| 311 |
+
due_date=(2023, 9, 26), # Warning introduced in v64.0.1, 11/Aug/2022.
|
| 312 |
+
)
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
class _BuildMetaBackend(_ConfigSettingsTranslator):
|
| 316 |
+
def _get_build_requires(self, config_settings, requirements):
|
| 317 |
+
sys.argv = [
|
| 318 |
+
*sys.argv[:1],
|
| 319 |
+
*self._global_args(config_settings),
|
| 320 |
+
"egg_info",
|
| 321 |
+
*self._arbitrary_args(config_settings),
|
| 322 |
+
]
|
| 323 |
+
try:
|
| 324 |
+
with Distribution.patch():
|
| 325 |
+
self.run_setup()
|
| 326 |
+
except SetupRequirementsError as e:
|
| 327 |
+
requirements += e.specifiers
|
| 328 |
+
|
| 329 |
+
return requirements
|
| 330 |
+
|
| 331 |
+
def run_setup(self, setup_script='setup.py'):
|
| 332 |
+
# Note that we can reuse our build directory between calls
|
| 333 |
+
# Correctness comes first, then optimization later
|
| 334 |
+
__file__ = os.path.abspath(setup_script)
|
| 335 |
+
__name__ = '__main__'
|
| 336 |
+
|
| 337 |
+
with _open_setup_script(__file__) as f:
|
| 338 |
+
code = f.read().replace(r'\r\n', r'\n')
|
| 339 |
+
|
| 340 |
+
try:
|
| 341 |
+
exec(code, locals())
|
| 342 |
+
except SystemExit as e:
|
| 343 |
+
if e.code:
|
| 344 |
+
raise
|
| 345 |
+
# We ignore exit code indicating success
|
| 346 |
+
SetuptoolsDeprecationWarning.emit(
|
| 347 |
+
"Running `setup.py` directly as CLI tool is deprecated.",
|
| 348 |
+
"Please avoid using `sys.exit(0)` or similar statements "
|
| 349 |
+
"that don't fit in the paradigm of a configuration file.",
|
| 350 |
+
see_url="https://blog.ganssle.io/articles/2021/10/"
|
| 351 |
+
"setup-py-deprecated.html",
|
| 352 |
+
)
|
| 353 |
+
|
| 354 |
+
def get_requires_for_build_wheel(self, config_settings=None):
|
| 355 |
+
return self._get_build_requires(config_settings, requirements=['wheel'])
|
| 356 |
+
|
| 357 |
+
def get_requires_for_build_sdist(self, config_settings=None):
|
| 358 |
+
return self._get_build_requires(config_settings, requirements=[])
|
| 359 |
+
|
| 360 |
+
def _bubble_up_info_directory(self, metadata_directory: str, suffix: str) -> str:
|
| 361 |
+
"""
|
| 362 |
+
PEP 517 requires that the .dist-info directory be placed in the
|
| 363 |
+
metadata_directory. To comply, we MUST copy the directory to the root.
|
| 364 |
+
|
| 365 |
+
Returns the basename of the info directory, e.g. `proj-0.0.0.dist-info`.
|
| 366 |
+
"""
|
| 367 |
+
info_dir = self._find_info_directory(metadata_directory, suffix)
|
| 368 |
+
if not same_path(info_dir.parent, metadata_directory):
|
| 369 |
+
shutil.move(str(info_dir), metadata_directory)
|
| 370 |
+
# PEP 517 allow other files and dirs to exist in metadata_directory
|
| 371 |
+
return info_dir.name
|
| 372 |
+
|
| 373 |
+
def _find_info_directory(self, metadata_directory: str, suffix: str) -> Path:
|
| 374 |
+
for parent, dirs, _ in os.walk(metadata_directory):
|
| 375 |
+
candidates = [f for f in dirs if f.endswith(suffix)]
|
| 376 |
+
|
| 377 |
+
if len(candidates) != 0 or len(dirs) != 1:
|
| 378 |
+
assert len(candidates) == 1, f"Multiple {suffix} directories found"
|
| 379 |
+
return Path(parent, candidates[0])
|
| 380 |
+
|
| 381 |
+
msg = f"No {suffix} directory found in {metadata_directory}"
|
| 382 |
+
raise errors.InternalError(msg)
|
| 383 |
+
|
| 384 |
+
def prepare_metadata_for_build_wheel(
|
| 385 |
+
self, metadata_directory, config_settings=None
|
| 386 |
+
):
|
| 387 |
+
sys.argv = [
|
| 388 |
+
*sys.argv[:1],
|
| 389 |
+
*self._global_args(config_settings),
|
| 390 |
+
"dist_info",
|
| 391 |
+
"--output-dir",
|
| 392 |
+
metadata_directory,
|
| 393 |
+
"--keep-egg-info",
|
| 394 |
+
]
|
| 395 |
+
with no_install_setup_requires():
|
| 396 |
+
self.run_setup()
|
| 397 |
+
|
| 398 |
+
self._bubble_up_info_directory(metadata_directory, ".egg-info")
|
| 399 |
+
return self._bubble_up_info_directory(metadata_directory, ".dist-info")
|
| 400 |
+
|
| 401 |
+
def _build_with_temp_dir(
|
| 402 |
+
self, setup_command, result_extension, result_directory, config_settings
|
| 403 |
+
):
|
| 404 |
+
result_directory = os.path.abspath(result_directory)
|
| 405 |
+
|
| 406 |
+
# Build in a temporary directory, then copy to the target.
|
| 407 |
+
os.makedirs(result_directory, exist_ok=True)
|
| 408 |
+
temp_opts = {"prefix": ".tmp-", "dir": result_directory}
|
| 409 |
+
with tempfile.TemporaryDirectory(**temp_opts) as tmp_dist_dir:
|
| 410 |
+
sys.argv = [
|
| 411 |
+
*sys.argv[:1],
|
| 412 |
+
*self._global_args(config_settings),
|
| 413 |
+
*setup_command,
|
| 414 |
+
"--dist-dir",
|
| 415 |
+
tmp_dist_dir,
|
| 416 |
+
*self._arbitrary_args(config_settings),
|
| 417 |
+
]
|
| 418 |
+
with no_install_setup_requires():
|
| 419 |
+
self.run_setup()
|
| 420 |
+
|
| 421 |
+
result_basename = _file_with_extension(tmp_dist_dir, result_extension)
|
| 422 |
+
result_path = os.path.join(result_directory, result_basename)
|
| 423 |
+
if os.path.exists(result_path):
|
| 424 |
+
# os.rename will fail overwriting on non-Unix.
|
| 425 |
+
os.remove(result_path)
|
| 426 |
+
os.rename(os.path.join(tmp_dist_dir, result_basename), result_path)
|
| 427 |
+
|
| 428 |
+
return result_basename
|
| 429 |
+
|
| 430 |
+
def build_wheel(
|
| 431 |
+
self, wheel_directory, config_settings=None, metadata_directory=None
|
| 432 |
+
):
|
| 433 |
+
with suppress_known_deprecation():
|
| 434 |
+
return self._build_with_temp_dir(
|
| 435 |
+
['bdist_wheel'], '.whl', wheel_directory, config_settings
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
def build_sdist(self, sdist_directory, config_settings=None):
|
| 439 |
+
return self._build_with_temp_dir(
|
| 440 |
+
['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings
|
| 441 |
+
)
|
| 442 |
+
|
| 443 |
+
def _get_dist_info_dir(self, metadata_directory: Optional[str]) -> Optional[str]:
|
| 444 |
+
if not metadata_directory:
|
| 445 |
+
return None
|
| 446 |
+
dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info"))
|
| 447 |
+
assert len(dist_info_candidates) <= 1
|
| 448 |
+
return str(dist_info_candidates[0]) if dist_info_candidates else None
|
| 449 |
+
|
| 450 |
+
if not LEGACY_EDITABLE:
|
| 451 |
+
# PEP660 hooks:
|
| 452 |
+
# build_editable
|
| 453 |
+
# get_requires_for_build_editable
|
| 454 |
+
# prepare_metadata_for_build_editable
|
| 455 |
+
def build_editable(
|
| 456 |
+
self, wheel_directory, config_settings=None, metadata_directory=None
|
| 457 |
+
):
|
| 458 |
+
# XXX can or should we hide our editable_wheel command normally?
|
| 459 |
+
info_dir = self._get_dist_info_dir(metadata_directory)
|
| 460 |
+
opts = ["--dist-info-dir", info_dir] if info_dir else []
|
| 461 |
+
cmd = ["editable_wheel", *opts, *self._editable_args(config_settings)]
|
| 462 |
+
with suppress_known_deprecation():
|
| 463 |
+
return self._build_with_temp_dir(
|
| 464 |
+
cmd, ".whl", wheel_directory, config_settings
|
| 465 |
+
)
|
| 466 |
+
|
| 467 |
+
def get_requires_for_build_editable(self, config_settings=None):
|
| 468 |
+
return self.get_requires_for_build_wheel(config_settings)
|
| 469 |
+
|
| 470 |
+
def prepare_metadata_for_build_editable(
|
| 471 |
+
self, metadata_directory, config_settings=None
|
| 472 |
+
):
|
| 473 |
+
return self.prepare_metadata_for_build_wheel(
|
| 474 |
+
metadata_directory, config_settings
|
| 475 |
+
)
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
class _BuildMetaLegacyBackend(_BuildMetaBackend):
|
| 479 |
+
"""Compatibility backend for setuptools
|
| 480 |
+
|
| 481 |
+
This is a version of setuptools.build_meta that endeavors
|
| 482 |
+
to maintain backwards
|
| 483 |
+
compatibility with pre-PEP 517 modes of invocation. It
|
| 484 |
+
exists as a temporary
|
| 485 |
+
bridge between the old packaging mechanism and the new
|
| 486 |
+
packaging mechanism,
|
| 487 |
+
and will eventually be removed.
|
| 488 |
+
"""
|
| 489 |
+
|
| 490 |
+
def run_setup(self, setup_script='setup.py'):
|
| 491 |
+
# In order to maintain compatibility with scripts assuming that
|
| 492 |
+
# the setup.py script is in a directory on the PYTHONPATH, inject
|
| 493 |
+
# '' into sys.path. (pypa/setuptools#1642)
|
| 494 |
+
sys_path = list(sys.path) # Save the original path
|
| 495 |
+
|
| 496 |
+
script_dir = os.path.dirname(os.path.abspath(setup_script))
|
| 497 |
+
if script_dir not in sys.path:
|
| 498 |
+
sys.path.insert(0, script_dir)
|
| 499 |
+
|
| 500 |
+
# Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
|
| 501 |
+
# get the directory of the source code. They expect it to refer to the
|
| 502 |
+
# setup.py script.
|
| 503 |
+
sys_argv_0 = sys.argv[0]
|
| 504 |
+
sys.argv[0] = setup_script
|
| 505 |
+
|
| 506 |
+
try:
|
| 507 |
+
super(_BuildMetaLegacyBackend, self).run_setup(setup_script=setup_script)
|
| 508 |
+
finally:
|
| 509 |
+
# While PEP 517 frontends should be calling each hook in a fresh
|
| 510 |
+
# subprocess according to the standard (and thus it should not be
|
| 511 |
+
# strictly necessary to restore the old sys.path), we'll restore
|
| 512 |
+
# the original path so that the path manipulation does not persist
|
| 513 |
+
# within the hook after run_setup is called.
|
| 514 |
+
sys.path[:] = sys_path
|
| 515 |
+
sys.argv[0] = sys_argv_0
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
# The primary backend
|
| 519 |
+
_BACKEND = _BuildMetaBackend()
|
| 520 |
+
|
| 521 |
+
get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel
|
| 522 |
+
get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist
|
| 523 |
+
prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
|
| 524 |
+
build_wheel = _BACKEND.build_wheel
|
| 525 |
+
build_sdist = _BACKEND.build_sdist
|
| 526 |
+
|
| 527 |
+
if not LEGACY_EDITABLE:
|
| 528 |
+
get_requires_for_build_editable = _BACKEND.get_requires_for_build_editable
|
| 529 |
+
prepare_metadata_for_build_editable = _BACKEND.prepare_metadata_for_build_editable
|
| 530 |
+
build_editable = _BACKEND.build_editable
|
| 531 |
+
|
| 532 |
+
|
| 533 |
+
# The legacy backend
|
| 534 |
+
__legacy__ = _BuildMetaLegacyBackend()
|
.venv/Lib/site-packages/setuptools/command/__init__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils.command.bdist import bdist
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
if 'egg' not in bdist.format_commands:
|
| 5 |
+
try:
|
| 6 |
+
bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
|
| 7 |
+
except TypeError:
|
| 8 |
+
# For backward compatibility with older distutils (stdlib)
|
| 9 |
+
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
| 10 |
+
bdist.format_commands.append('egg')
|
| 11 |
+
|
| 12 |
+
del bdist, sys
|
.venv/Lib/site-packages/setuptools/command/_requirestxt.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper code used to generate ``requires.txt`` files in the egg-info directory.
|
| 2 |
+
|
| 3 |
+
The ``requires.txt`` file has an specific format:
|
| 4 |
+
- Environment markers need to be part of the section headers and
|
| 5 |
+
should not be part of the requirement spec itself.
|
| 6 |
+
|
| 7 |
+
See https://setuptools.pypa.io/en/latest/deprecated/python_eggs.html#requires-txt
|
| 8 |
+
"""
|
| 9 |
+
import io
|
| 10 |
+
from collections import defaultdict
|
| 11 |
+
from itertools import filterfalse
|
| 12 |
+
from typing import Dict, List, Tuple, Mapping, TypeVar
|
| 13 |
+
|
| 14 |
+
from ..extern.jaraco.text import yield_lines
|
| 15 |
+
from ..extern.packaging.requirements import Requirement
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# dict can work as an ordered set
|
| 19 |
+
_T = TypeVar("_T")
|
| 20 |
+
_Ordered = Dict[_T, None]
|
| 21 |
+
_ordered = dict
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _prepare(
|
| 25 |
+
install_requires: Dict[str, Requirement],
|
| 26 |
+
extras_require: Mapping[str, Dict[str, Requirement]],
|
| 27 |
+
) -> Tuple[List[str], Dict[str, List[str]]]:
|
| 28 |
+
"""Given values for ``install_requires`` and ``extras_require``
|
| 29 |
+
create modified versions in a way that can be written in ``requires.txt``
|
| 30 |
+
"""
|
| 31 |
+
extras = _convert_extras_requirements(extras_require)
|
| 32 |
+
return _move_install_requirements_markers(install_requires, extras)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _convert_extras_requirements(
|
| 36 |
+
extras_require: Dict[str, Dict[str, Requirement]],
|
| 37 |
+
) -> Mapping[str, _Ordered[Requirement]]:
|
| 38 |
+
"""
|
| 39 |
+
Convert requirements in `extras_require` of the form
|
| 40 |
+
`"extra": ["barbazquux; {marker}"]` to
|
| 41 |
+
`"extra:{marker}": ["barbazquux"]`.
|
| 42 |
+
"""
|
| 43 |
+
output: Mapping[str, _Ordered[Requirement]] = defaultdict(dict)
|
| 44 |
+
for section, v in extras_require.items():
|
| 45 |
+
# Do not strip empty sections.
|
| 46 |
+
output[section]
|
| 47 |
+
for r in v.values():
|
| 48 |
+
output[section + _suffix_for(r)].setdefault(r)
|
| 49 |
+
|
| 50 |
+
return output
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _move_install_requirements_markers(
|
| 54 |
+
install_requires: Dict[str, Requirement],
|
| 55 |
+
extras_require: Mapping[str, _Ordered[Requirement]],
|
| 56 |
+
) -> Tuple[List[str], Dict[str, List[str]]]:
|
| 57 |
+
"""
|
| 58 |
+
The ``requires.txt`` file has an specific format:
|
| 59 |
+
- Environment markers need to be part of the section headers and
|
| 60 |
+
should not be part of the requirement spec itself.
|
| 61 |
+
|
| 62 |
+
Move requirements in ``install_requires`` that are using environment
|
| 63 |
+
markers ``extras_require``.
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
# divide the install_requires into two sets, simple ones still
|
| 67 |
+
# handled by install_requires and more complex ones handled by extras_require.
|
| 68 |
+
|
| 69 |
+
inst_reqs = install_requires.values()
|
| 70 |
+
simple_reqs = filter(_no_marker, inst_reqs)
|
| 71 |
+
complex_reqs = filterfalse(_no_marker, inst_reqs)
|
| 72 |
+
simple_install_requires = list(map(str, simple_reqs))
|
| 73 |
+
|
| 74 |
+
for r in complex_reqs:
|
| 75 |
+
extras_require[':' + str(r.marker)].setdefault(r)
|
| 76 |
+
|
| 77 |
+
expanded_extras = dict(
|
| 78 |
+
# list(dict.fromkeys(...)) ensures a list of unique strings
|
| 79 |
+
(k, list(dict.fromkeys(str(r) for r in map(_clean_req, v))))
|
| 80 |
+
for k, v in extras_require.items()
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
return simple_install_requires, expanded_extras
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _suffix_for(req):
|
| 87 |
+
"""Return the 'extras_require' suffix for a given requirement."""
|
| 88 |
+
return ':' + str(req.marker) if req.marker else ''
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def _clean_req(req):
|
| 92 |
+
"""Given a Requirement, remove environment markers and return it"""
|
| 93 |
+
r = Requirement(str(req)) # create a copy before modifying.
|
| 94 |
+
r.marker = None
|
| 95 |
+
return r
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def _no_marker(req):
|
| 99 |
+
return not req.marker
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _write_requirements(stream, reqs):
|
| 103 |
+
lines = yield_lines(reqs or ())
|
| 104 |
+
|
| 105 |
+
def append_cr(line):
|
| 106 |
+
return line + '\n'
|
| 107 |
+
|
| 108 |
+
lines = map(append_cr, lines)
|
| 109 |
+
stream.writelines(lines)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def write_requirements(cmd, basename, filename):
|
| 113 |
+
dist = cmd.distribution
|
| 114 |
+
meta = dist.metadata
|
| 115 |
+
data = io.StringIO()
|
| 116 |
+
install_requires, extras_require = _prepare(
|
| 117 |
+
meta._normalized_install_requires, meta._normalized_extras_require
|
| 118 |
+
)
|
| 119 |
+
_write_requirements(data, install_requires)
|
| 120 |
+
for extra in sorted(extras_require):
|
| 121 |
+
data.write('\n[{extra}]\n'.format(**vars()))
|
| 122 |
+
_write_requirements(data, extras_require[extra])
|
| 123 |
+
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def write_setup_requirements(cmd, basename, filename):
|
| 127 |
+
data = io.StringIO()
|
| 128 |
+
_write_requirements(data, cmd.distribution.setup_requires)
|
| 129 |
+
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
|
.venv/Lib/site-packages/setuptools/command/editable_wheel.py
ADDED
|
@@ -0,0 +1,894 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Create a wheel that, when installed, will make the source package 'editable'
|
| 3 |
+
(add it to the interpreter's path, including metadata) per PEP 660. Replaces
|
| 4 |
+
'setup.py develop'.
|
| 5 |
+
|
| 6 |
+
.. note::
|
| 7 |
+
One of the mechanisms briefly mentioned in PEP 660 to implement editable installs is
|
| 8 |
+
to create a separated directory inside ``build`` and use a .pth file to point to that
|
| 9 |
+
directory. In the context of this file such directory is referred as
|
| 10 |
+
*auxiliary build directory* or ``auxiliary_dir``.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
import logging
|
| 14 |
+
import io
|
| 15 |
+
import os
|
| 16 |
+
import shutil
|
| 17 |
+
import sys
|
| 18 |
+
import traceback
|
| 19 |
+
from contextlib import suppress
|
| 20 |
+
from enum import Enum
|
| 21 |
+
from inspect import cleandoc
|
| 22 |
+
from itertools import chain
|
| 23 |
+
from pathlib import Path
|
| 24 |
+
from tempfile import TemporaryDirectory
|
| 25 |
+
from typing import (
|
| 26 |
+
TYPE_CHECKING,
|
| 27 |
+
Dict,
|
| 28 |
+
Iterable,
|
| 29 |
+
Iterator,
|
| 30 |
+
List,
|
| 31 |
+
Mapping,
|
| 32 |
+
Optional,
|
| 33 |
+
Tuple,
|
| 34 |
+
TypeVar,
|
| 35 |
+
Union,
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
from .. import (
|
| 39 |
+
Command,
|
| 40 |
+
_normalization,
|
| 41 |
+
_path,
|
| 42 |
+
errors,
|
| 43 |
+
namespaces,
|
| 44 |
+
)
|
| 45 |
+
from ..discovery import find_package_path
|
| 46 |
+
from ..dist import Distribution
|
| 47 |
+
from ..warnings import (
|
| 48 |
+
InformationOnly,
|
| 49 |
+
SetuptoolsDeprecationWarning,
|
| 50 |
+
SetuptoolsWarning,
|
| 51 |
+
)
|
| 52 |
+
from .build_py import build_py as build_py_cls
|
| 53 |
+
|
| 54 |
+
if TYPE_CHECKING:
|
| 55 |
+
from wheel.wheelfile import WheelFile # noqa
|
| 56 |
+
|
| 57 |
+
if sys.version_info >= (3, 8):
|
| 58 |
+
from typing import Protocol
|
| 59 |
+
elif TYPE_CHECKING:
|
| 60 |
+
from typing_extensions import Protocol
|
| 61 |
+
else:
|
| 62 |
+
from abc import ABC as Protocol
|
| 63 |
+
|
| 64 |
+
_Path = Union[str, Path]
|
| 65 |
+
_P = TypeVar("_P", bound=_Path)
|
| 66 |
+
_logger = logging.getLogger(__name__)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class _EditableMode(Enum):
|
| 70 |
+
"""
|
| 71 |
+
Possible editable installation modes:
|
| 72 |
+
`lenient` (new files automatically added to the package - DEFAULT);
|
| 73 |
+
`strict` (requires a new installation when files are added/removed); or
|
| 74 |
+
`compat` (attempts to emulate `python setup.py develop` - DEPRECATED).
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
STRICT = "strict"
|
| 78 |
+
LENIENT = "lenient"
|
| 79 |
+
COMPAT = "compat" # TODO: Remove `compat` after Dec/2022.
|
| 80 |
+
|
| 81 |
+
@classmethod
|
| 82 |
+
def convert(cls, mode: Optional[str]) -> "_EditableMode":
|
| 83 |
+
if not mode:
|
| 84 |
+
return _EditableMode.LENIENT # default
|
| 85 |
+
|
| 86 |
+
_mode = mode.upper()
|
| 87 |
+
if _mode not in _EditableMode.__members__:
|
| 88 |
+
raise errors.OptionError(f"Invalid editable mode: {mode!r}. Try: 'strict'.")
|
| 89 |
+
|
| 90 |
+
if _mode == "COMPAT":
|
| 91 |
+
SetuptoolsDeprecationWarning.emit(
|
| 92 |
+
"Compat editable installs",
|
| 93 |
+
"""
|
| 94 |
+
The 'compat' editable mode is transitional and will be removed
|
| 95 |
+
in future versions of `setuptools`.
|
| 96 |
+
Please adapt your code accordingly to use either the 'strict' or the
|
| 97 |
+
'lenient' modes.
|
| 98 |
+
""",
|
| 99 |
+
see_docs="userguide/development_mode.html",
|
| 100 |
+
# TODO: define due_date
|
| 101 |
+
# There is a series of shortcomings with the available editable install
|
| 102 |
+
# methods, and they are very controversial. This is something that still
|
| 103 |
+
# needs work.
|
| 104 |
+
# Moreover, `pip` is still hiding this warning, so users are not aware.
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
return _EditableMode[_mode]
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
_STRICT_WARNING = """
|
| 111 |
+
New or renamed files may not be automatically picked up without a new installation.
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
_LENIENT_WARNING = """
|
| 115 |
+
Options like `package-data`, `include/exclude-package-data` or
|
| 116 |
+
`packages.find.exclude/include` may have no effect.
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class editable_wheel(Command):
|
| 121 |
+
"""Build 'editable' wheel for development.
|
| 122 |
+
This command is private and reserved for internal use of setuptools,
|
| 123 |
+
users should rely on ``setuptools.build_meta`` APIs.
|
| 124 |
+
"""
|
| 125 |
+
|
| 126 |
+
description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create PEP 660 editable wheel"
|
| 127 |
+
|
| 128 |
+
user_options = [
|
| 129 |
+
("dist-dir=", "d", "directory to put final built distributions in"),
|
| 130 |
+
("dist-info-dir=", "I", "path to a pre-build .dist-info directory"),
|
| 131 |
+
("mode=", None, cleandoc(_EditableMode.__doc__ or "")),
|
| 132 |
+
]
|
| 133 |
+
|
| 134 |
+
def initialize_options(self):
|
| 135 |
+
self.dist_dir = None
|
| 136 |
+
self.dist_info_dir = None
|
| 137 |
+
self.project_dir = None
|
| 138 |
+
self.mode = None
|
| 139 |
+
|
| 140 |
+
def finalize_options(self):
|
| 141 |
+
dist = self.distribution
|
| 142 |
+
self.project_dir = dist.src_root or os.curdir
|
| 143 |
+
self.package_dir = dist.package_dir or {}
|
| 144 |
+
self.dist_dir = Path(self.dist_dir or os.path.join(self.project_dir, "dist"))
|
| 145 |
+
|
| 146 |
+
def run(self):
|
| 147 |
+
try:
|
| 148 |
+
self.dist_dir.mkdir(exist_ok=True)
|
| 149 |
+
self._ensure_dist_info()
|
| 150 |
+
|
| 151 |
+
# Add missing dist_info files
|
| 152 |
+
self.reinitialize_command("bdist_wheel")
|
| 153 |
+
bdist_wheel = self.get_finalized_command("bdist_wheel")
|
| 154 |
+
bdist_wheel.write_wheelfile(self.dist_info_dir)
|
| 155 |
+
|
| 156 |
+
self._create_wheel_file(bdist_wheel)
|
| 157 |
+
except Exception:
|
| 158 |
+
traceback.print_exc()
|
| 159 |
+
project = self.distribution.name or self.distribution.get_name()
|
| 160 |
+
_DebuggingTips.emit(project=project)
|
| 161 |
+
raise
|
| 162 |
+
|
| 163 |
+
def _ensure_dist_info(self):
|
| 164 |
+
if self.dist_info_dir is None:
|
| 165 |
+
dist_info = self.reinitialize_command("dist_info")
|
| 166 |
+
dist_info.output_dir = self.dist_dir
|
| 167 |
+
dist_info.ensure_finalized()
|
| 168 |
+
dist_info.run()
|
| 169 |
+
self.dist_info_dir = dist_info.dist_info_dir
|
| 170 |
+
else:
|
| 171 |
+
assert str(self.dist_info_dir).endswith(".dist-info")
|
| 172 |
+
assert Path(self.dist_info_dir, "METADATA").exists()
|
| 173 |
+
|
| 174 |
+
def _install_namespaces(self, installation_dir, pth_prefix):
|
| 175 |
+
# XXX: Only required to support the deprecated namespace practice
|
| 176 |
+
dist = self.distribution
|
| 177 |
+
if not dist.namespace_packages:
|
| 178 |
+
return
|
| 179 |
+
|
| 180 |
+
src_root = Path(self.project_dir, self.package_dir.get("", ".")).resolve()
|
| 181 |
+
installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root)
|
| 182 |
+
installer.install_namespaces()
|
| 183 |
+
|
| 184 |
+
def _find_egg_info_dir(self) -> Optional[str]:
|
| 185 |
+
parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path()
|
| 186 |
+
candidates = map(str, parent_dir.glob("*.egg-info"))
|
| 187 |
+
return next(candidates, None)
|
| 188 |
+
|
| 189 |
+
def _configure_build(
|
| 190 |
+
self, name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path
|
| 191 |
+
):
|
| 192 |
+
"""Configure commands to behave in the following ways:
|
| 193 |
+
|
| 194 |
+
- Build commands can write to ``build_lib`` if they really want to...
|
| 195 |
+
(but this folder is expected to be ignored and modules are expected to live
|
| 196 |
+
in the project directory...)
|
| 197 |
+
- Binary extensions should be built in-place (editable_mode = True)
|
| 198 |
+
- Data/header/script files are not part of the "editable" specification
|
| 199 |
+
so they are written directly to the unpacked_wheel directory.
|
| 200 |
+
"""
|
| 201 |
+
# Non-editable files (data, headers, scripts) are written directly to the
|
| 202 |
+
# unpacked_wheel
|
| 203 |
+
|
| 204 |
+
dist = self.distribution
|
| 205 |
+
wheel = str(unpacked_wheel)
|
| 206 |
+
build_lib = str(build_lib)
|
| 207 |
+
data = str(Path(unpacked_wheel, f"{name}.data", "data"))
|
| 208 |
+
headers = str(Path(unpacked_wheel, f"{name}.data", "headers"))
|
| 209 |
+
scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts"))
|
| 210 |
+
|
| 211 |
+
# egg-info may be generated again to create a manifest (used for package data)
|
| 212 |
+
egg_info = dist.reinitialize_command("egg_info", reinit_subcommands=True)
|
| 213 |
+
egg_info.egg_base = str(tmp_dir)
|
| 214 |
+
egg_info.ignore_egg_info_in_manifest = True
|
| 215 |
+
|
| 216 |
+
build = dist.reinitialize_command("build", reinit_subcommands=True)
|
| 217 |
+
install = dist.reinitialize_command("install", reinit_subcommands=True)
|
| 218 |
+
|
| 219 |
+
build.build_platlib = build.build_purelib = build.build_lib = build_lib
|
| 220 |
+
install.install_purelib = install.install_platlib = install.install_lib = wheel
|
| 221 |
+
install.install_scripts = build.build_scripts = scripts
|
| 222 |
+
install.install_headers = headers
|
| 223 |
+
install.install_data = data
|
| 224 |
+
|
| 225 |
+
install_scripts = dist.get_command_obj("install_scripts")
|
| 226 |
+
install_scripts.no_ep = True
|
| 227 |
+
|
| 228 |
+
build.build_temp = str(tmp_dir)
|
| 229 |
+
|
| 230 |
+
build_py = dist.get_command_obj("build_py")
|
| 231 |
+
build_py.compile = False
|
| 232 |
+
build_py.existing_egg_info_dir = self._find_egg_info_dir()
|
| 233 |
+
|
| 234 |
+
self._set_editable_mode()
|
| 235 |
+
|
| 236 |
+
build.ensure_finalized()
|
| 237 |
+
install.ensure_finalized()
|
| 238 |
+
|
| 239 |
+
def _set_editable_mode(self):
|
| 240 |
+
"""Set the ``editable_mode`` flag in the build sub-commands"""
|
| 241 |
+
dist = self.distribution
|
| 242 |
+
build = dist.get_command_obj("build")
|
| 243 |
+
for cmd_name in build.get_sub_commands():
|
| 244 |
+
cmd = dist.get_command_obj(cmd_name)
|
| 245 |
+
if hasattr(cmd, "editable_mode"):
|
| 246 |
+
cmd.editable_mode = True
|
| 247 |
+
elif hasattr(cmd, "inplace"):
|
| 248 |
+
cmd.inplace = True # backward compatibility with distutils
|
| 249 |
+
|
| 250 |
+
def _collect_build_outputs(self) -> Tuple[List[str], Dict[str, str]]:
|
| 251 |
+
files: List[str] = []
|
| 252 |
+
mapping: Dict[str, str] = {}
|
| 253 |
+
build = self.get_finalized_command("build")
|
| 254 |
+
|
| 255 |
+
for cmd_name in build.get_sub_commands():
|
| 256 |
+
cmd = self.get_finalized_command(cmd_name)
|
| 257 |
+
if hasattr(cmd, "get_outputs"):
|
| 258 |
+
files.extend(cmd.get_outputs() or [])
|
| 259 |
+
if hasattr(cmd, "get_output_mapping"):
|
| 260 |
+
mapping.update(cmd.get_output_mapping() or {})
|
| 261 |
+
|
| 262 |
+
return files, mapping
|
| 263 |
+
|
| 264 |
+
def _run_build_commands(
|
| 265 |
+
self, dist_name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path
|
| 266 |
+
) -> Tuple[List[str], Dict[str, str]]:
|
| 267 |
+
self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir)
|
| 268 |
+
self._run_build_subcommands()
|
| 269 |
+
files, mapping = self._collect_build_outputs()
|
| 270 |
+
self._run_install("headers")
|
| 271 |
+
self._run_install("scripts")
|
| 272 |
+
self._run_install("data")
|
| 273 |
+
return files, mapping
|
| 274 |
+
|
| 275 |
+
def _run_build_subcommands(self):
|
| 276 |
+
"""
|
| 277 |
+
Issue #3501 indicates that some plugins/customizations might rely on:
|
| 278 |
+
|
| 279 |
+
1. ``build_py`` not running
|
| 280 |
+
2. ``build_py`` always copying files to ``build_lib``
|
| 281 |
+
|
| 282 |
+
However both these assumptions may be false in editable_wheel.
|
| 283 |
+
This method implements a temporary workaround to support the ecosystem
|
| 284 |
+
while the implementations catch up.
|
| 285 |
+
"""
|
| 286 |
+
# TODO: Once plugins/customisations had the chance to catch up, replace
|
| 287 |
+
# `self._run_build_subcommands()` with `self.run_command("build")`.
|
| 288 |
+
# Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023.
|
| 289 |
+
build: Command = self.get_finalized_command("build")
|
| 290 |
+
for name in build.get_sub_commands():
|
| 291 |
+
cmd = self.get_finalized_command(name)
|
| 292 |
+
if name == "build_py" and type(cmd) != build_py_cls:
|
| 293 |
+
self._safely_run(name)
|
| 294 |
+
else:
|
| 295 |
+
self.run_command(name)
|
| 296 |
+
|
| 297 |
+
def _safely_run(self, cmd_name: str):
|
| 298 |
+
try:
|
| 299 |
+
return self.run_command(cmd_name)
|
| 300 |
+
except Exception:
|
| 301 |
+
SetuptoolsDeprecationWarning.emit(
|
| 302 |
+
"Customization incompatible with editable install",
|
| 303 |
+
f"""
|
| 304 |
+
{traceback.format_exc()}
|
| 305 |
+
|
| 306 |
+
If you are seeing this warning it is very likely that a setuptools
|
| 307 |
+
plugin or customization overrides the `{cmd_name}` command, without
|
| 308 |
+
taking into consideration how editable installs run build steps
|
| 309 |
+
starting from setuptools v64.0.0.
|
| 310 |
+
|
| 311 |
+
Plugin authors and developers relying on custom build steps are
|
| 312 |
+
encouraged to update their `{cmd_name}` implementation considering the
|
| 313 |
+
information about editable installs in
|
| 314 |
+
https://setuptools.pypa.io/en/latest/userguide/extension.html.
|
| 315 |
+
|
| 316 |
+
For the time being `setuptools` will silence this error and ignore
|
| 317 |
+
the faulty command, but this behaviour will change in future versions.
|
| 318 |
+
""",
|
| 319 |
+
# TODO: define due_date
|
| 320 |
+
# There is a series of shortcomings with the available editable install
|
| 321 |
+
# methods, and they are very controversial. This is something that still
|
| 322 |
+
# needs work.
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
def _create_wheel_file(self, bdist_wheel):
|
| 326 |
+
from wheel.wheelfile import WheelFile
|
| 327 |
+
|
| 328 |
+
dist_info = self.get_finalized_command("dist_info")
|
| 329 |
+
dist_name = dist_info.name
|
| 330 |
+
tag = "-".join(bdist_wheel.get_tag())
|
| 331 |
+
build_tag = "0.editable" # According to PEP 427 needs to start with digit
|
| 332 |
+
archive_name = f"{dist_name}-{build_tag}-{tag}.whl"
|
| 333 |
+
wheel_path = Path(self.dist_dir, archive_name)
|
| 334 |
+
if wheel_path.exists():
|
| 335 |
+
wheel_path.unlink()
|
| 336 |
+
|
| 337 |
+
unpacked_wheel = TemporaryDirectory(suffix=archive_name)
|
| 338 |
+
build_lib = TemporaryDirectory(suffix=".build-lib")
|
| 339 |
+
build_tmp = TemporaryDirectory(suffix=".build-temp")
|
| 340 |
+
|
| 341 |
+
with unpacked_wheel as unpacked, build_lib as lib, build_tmp as tmp:
|
| 342 |
+
unpacked_dist_info = Path(unpacked, Path(self.dist_info_dir).name)
|
| 343 |
+
shutil.copytree(self.dist_info_dir, unpacked_dist_info)
|
| 344 |
+
self._install_namespaces(unpacked, dist_name)
|
| 345 |
+
files, mapping = self._run_build_commands(dist_name, unpacked, lib, tmp)
|
| 346 |
+
strategy = self._select_strategy(dist_name, tag, lib)
|
| 347 |
+
with strategy, WheelFile(wheel_path, "w") as wheel_obj:
|
| 348 |
+
strategy(wheel_obj, files, mapping)
|
| 349 |
+
wheel_obj.write_files(unpacked)
|
| 350 |
+
|
| 351 |
+
return wheel_path
|
| 352 |
+
|
| 353 |
+
def _run_install(self, category: str):
|
| 354 |
+
has_category = getattr(self.distribution, f"has_{category}", None)
|
| 355 |
+
if has_category and has_category():
|
| 356 |
+
_logger.info(f"Installing {category} as non editable")
|
| 357 |
+
self.run_command(f"install_{category}")
|
| 358 |
+
|
| 359 |
+
def _select_strategy(
|
| 360 |
+
self,
|
| 361 |
+
name: str,
|
| 362 |
+
tag: str,
|
| 363 |
+
build_lib: _Path,
|
| 364 |
+
) -> "EditableStrategy":
|
| 365 |
+
"""Decides which strategy to use to implement an editable installation."""
|
| 366 |
+
build_name = f"__editable__.{name}-{tag}"
|
| 367 |
+
project_dir = Path(self.project_dir)
|
| 368 |
+
mode = _EditableMode.convert(self.mode)
|
| 369 |
+
|
| 370 |
+
if mode is _EditableMode.STRICT:
|
| 371 |
+
auxiliary_dir = _empty_dir(Path(self.project_dir, "build", build_name))
|
| 372 |
+
return _LinkTree(self.distribution, name, auxiliary_dir, build_lib)
|
| 373 |
+
|
| 374 |
+
packages = _find_packages(self.distribution)
|
| 375 |
+
has_simple_layout = _simple_layout(packages, self.package_dir, project_dir)
|
| 376 |
+
is_compat_mode = mode is _EditableMode.COMPAT
|
| 377 |
+
if set(self.package_dir) == {""} and has_simple_layout or is_compat_mode:
|
| 378 |
+
# src-layout(ish) is relatively safe for a simple pth file
|
| 379 |
+
src_dir = self.package_dir.get("", ".")
|
| 380 |
+
return _StaticPth(self.distribution, name, [Path(project_dir, src_dir)])
|
| 381 |
+
|
| 382 |
+
# Use a MetaPathFinder to avoid adding accidental top-level packages/modules
|
| 383 |
+
return _TopLevelFinder(self.distribution, name)
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class EditableStrategy(Protocol):
|
| 387 |
+
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
| 388 |
+
...
|
| 389 |
+
|
| 390 |
+
def __enter__(self):
|
| 391 |
+
...
|
| 392 |
+
|
| 393 |
+
def __exit__(self, _exc_type, _exc_value, _traceback):
|
| 394 |
+
...
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
class _StaticPth:
|
| 398 |
+
def __init__(self, dist: Distribution, name: str, path_entries: List[Path]):
|
| 399 |
+
self.dist = dist
|
| 400 |
+
self.name = name
|
| 401 |
+
self.path_entries = path_entries
|
| 402 |
+
|
| 403 |
+
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
| 404 |
+
entries = "\n".join((str(p.resolve()) for p in self.path_entries))
|
| 405 |
+
contents = _encode_pth(f"{entries}\n")
|
| 406 |
+
wheel.writestr(f"__editable__.{self.name}.pth", contents)
|
| 407 |
+
|
| 408 |
+
def __enter__(self):
|
| 409 |
+
msg = f"""
|
| 410 |
+
Editable install will be performed using .pth file to extend `sys.path` with:
|
| 411 |
+
{list(map(os.fspath, self.path_entries))!r}
|
| 412 |
+
"""
|
| 413 |
+
_logger.warning(msg + _LENIENT_WARNING)
|
| 414 |
+
return self
|
| 415 |
+
|
| 416 |
+
def __exit__(self, _exc_type, _exc_value, _traceback):
|
| 417 |
+
...
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
class _LinkTree(_StaticPth):
|
| 421 |
+
"""
|
| 422 |
+
Creates a ``.pth`` file that points to a link tree in the ``auxiliary_dir``.
|
| 423 |
+
|
| 424 |
+
This strategy will only link files (not dirs), so it can be implemented in
|
| 425 |
+
any OS, even if that means using hardlinks instead of symlinks.
|
| 426 |
+
|
| 427 |
+
By collocating ``auxiliary_dir`` and the original source code, limitations
|
| 428 |
+
with hardlinks should be avoided.
|
| 429 |
+
"""
|
| 430 |
+
|
| 431 |
+
def __init__(
|
| 432 |
+
self,
|
| 433 |
+
dist: Distribution,
|
| 434 |
+
name: str,
|
| 435 |
+
auxiliary_dir: _Path,
|
| 436 |
+
build_lib: _Path,
|
| 437 |
+
):
|
| 438 |
+
self.auxiliary_dir = Path(auxiliary_dir)
|
| 439 |
+
self.build_lib = Path(build_lib).resolve()
|
| 440 |
+
self._file = dist.get_command_obj("build_py").copy_file
|
| 441 |
+
super().__init__(dist, name, [self.auxiliary_dir])
|
| 442 |
+
|
| 443 |
+
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
| 444 |
+
self._create_links(files, mapping)
|
| 445 |
+
super().__call__(wheel, files, mapping)
|
| 446 |
+
|
| 447 |
+
def _normalize_output(self, file: str) -> Optional[str]:
|
| 448 |
+
# Files relative to build_lib will be normalized to None
|
| 449 |
+
with suppress(ValueError):
|
| 450 |
+
path = Path(file).resolve().relative_to(self.build_lib)
|
| 451 |
+
return str(path).replace(os.sep, '/')
|
| 452 |
+
return None
|
| 453 |
+
|
| 454 |
+
def _create_file(self, relative_output: str, src_file: str, link=None):
|
| 455 |
+
dest = self.auxiliary_dir / relative_output
|
| 456 |
+
if not dest.parent.is_dir():
|
| 457 |
+
dest.parent.mkdir(parents=True)
|
| 458 |
+
self._file(src_file, dest, link=link)
|
| 459 |
+
|
| 460 |
+
def _create_links(self, outputs, output_mapping):
|
| 461 |
+
self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
|
| 462 |
+
link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard"
|
| 463 |
+
mappings = {self._normalize_output(k): v for k, v in output_mapping.items()}
|
| 464 |
+
mappings.pop(None, None) # remove files that are not relative to build_lib
|
| 465 |
+
|
| 466 |
+
for output in outputs:
|
| 467 |
+
relative = self._normalize_output(output)
|
| 468 |
+
if relative and relative not in mappings:
|
| 469 |
+
self._create_file(relative, output)
|
| 470 |
+
|
| 471 |
+
for relative, src in mappings.items():
|
| 472 |
+
self._create_file(relative, src, link=link_type)
|
| 473 |
+
|
| 474 |
+
def __enter__(self):
|
| 475 |
+
msg = "Strict editable install will be performed using a link tree.\n"
|
| 476 |
+
_logger.warning(msg + _STRICT_WARNING)
|
| 477 |
+
return self
|
| 478 |
+
|
| 479 |
+
def __exit__(self, _exc_type, _exc_value, _traceback):
|
| 480 |
+
msg = f"""\n
|
| 481 |
+
Strict editable installation performed using the auxiliary directory:
|
| 482 |
+
{self.auxiliary_dir}
|
| 483 |
+
|
| 484 |
+
Please be careful to not remove this directory, otherwise you might not be able
|
| 485 |
+
to import/use your package.
|
| 486 |
+
"""
|
| 487 |
+
InformationOnly.emit("Editable installation.", msg)
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
class _TopLevelFinder:
|
| 491 |
+
def __init__(self, dist: Distribution, name: str):
|
| 492 |
+
self.dist = dist
|
| 493 |
+
self.name = name
|
| 494 |
+
|
| 495 |
+
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
| 496 |
+
src_root = self.dist.src_root or os.curdir
|
| 497 |
+
top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
|
| 498 |
+
package_dir = self.dist.package_dir or {}
|
| 499 |
+
roots = _find_package_roots(top_level, package_dir, src_root)
|
| 500 |
+
|
| 501 |
+
namespaces_: Dict[str, List[str]] = dict(
|
| 502 |
+
chain(
|
| 503 |
+
_find_namespaces(self.dist.packages or [], roots),
|
| 504 |
+
((ns, []) for ns in _find_virtual_namespaces(roots)),
|
| 505 |
+
)
|
| 506 |
+
)
|
| 507 |
+
|
| 508 |
+
legacy_namespaces = {
|
| 509 |
+
pkg: find_package_path(pkg, roots, self.dist.src_root or "")
|
| 510 |
+
for pkg in self.dist.namespace_packages or []
|
| 511 |
+
}
|
| 512 |
+
|
| 513 |
+
mapping = {**roots, **legacy_namespaces}
|
| 514 |
+
# ^-- We need to explicitly add the legacy_namespaces to the mapping to be
|
| 515 |
+
# able to import their modules even if another package sharing the same
|
| 516 |
+
# namespace is installed in a conventional (non-editable) way.
|
| 517 |
+
|
| 518 |
+
name = f"__editable__.{self.name}.finder"
|
| 519 |
+
finder = _normalization.safe_identifier(name)
|
| 520 |
+
content = bytes(_finder_template(name, mapping, namespaces_), "utf-8")
|
| 521 |
+
wheel.writestr(f"{finder}.py", content)
|
| 522 |
+
|
| 523 |
+
content = _encode_pth(f"import {finder}; {finder}.install()")
|
| 524 |
+
wheel.writestr(f"__editable__.{self.name}.pth", content)
|
| 525 |
+
|
| 526 |
+
def __enter__(self):
|
| 527 |
+
msg = "Editable install will be performed using a meta path finder.\n"
|
| 528 |
+
_logger.warning(msg + _LENIENT_WARNING)
|
| 529 |
+
return self
|
| 530 |
+
|
| 531 |
+
def __exit__(self, _exc_type, _exc_value, _traceback):
|
| 532 |
+
msg = """\n
|
| 533 |
+
Please be careful with folders in your working directory with the same
|
| 534 |
+
name as your package as they may take precedence during imports.
|
| 535 |
+
"""
|
| 536 |
+
InformationOnly.emit("Editable installation.", msg)
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def _encode_pth(content: str) -> bytes:
|
| 540 |
+
""".pth files are always read with 'locale' encoding, the recommendation
|
| 541 |
+
from the cpython core developers is to write them as ``open(path, "w")``
|
| 542 |
+
and ignore warnings (see python/cpython#77102, pypa/setuptools#3937).
|
| 543 |
+
This function tries to simulate this behaviour without having to create an
|
| 544 |
+
actual file, in a way that supports a range of active Python versions.
|
| 545 |
+
(There seems to be some variety in the way different version of Python handle
|
| 546 |
+
``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``).
|
| 547 |
+
"""
|
| 548 |
+
encoding = "locale" if sys.version_info >= (3, 10) else None
|
| 549 |
+
with io.BytesIO() as buffer:
|
| 550 |
+
wrapper = io.TextIOWrapper(buffer, encoding)
|
| 551 |
+
wrapper.write(content)
|
| 552 |
+
wrapper.flush()
|
| 553 |
+
buffer.seek(0)
|
| 554 |
+
return buffer.read()
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
def _can_symlink_files(base_dir: Path) -> bool:
|
| 558 |
+
with TemporaryDirectory(dir=str(base_dir.resolve())) as tmp:
|
| 559 |
+
path1, path2 = Path(tmp, "file1.txt"), Path(tmp, "file2.txt")
|
| 560 |
+
path1.write_text("file1", encoding="utf-8")
|
| 561 |
+
with suppress(AttributeError, NotImplementedError, OSError):
|
| 562 |
+
os.symlink(path1, path2)
|
| 563 |
+
if path2.is_symlink() and path2.read_text(encoding="utf-8") == "file1":
|
| 564 |
+
return True
|
| 565 |
+
|
| 566 |
+
try:
|
| 567 |
+
os.link(path1, path2) # Ensure hard links can be created
|
| 568 |
+
except Exception as ex:
|
| 569 |
+
msg = (
|
| 570 |
+
"File system does not seem to support either symlinks or hard links. "
|
| 571 |
+
"Strict editable installs require one of them to be supported."
|
| 572 |
+
)
|
| 573 |
+
raise LinksNotSupported(msg) from ex
|
| 574 |
+
return False
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
def _simple_layout(
|
| 578 |
+
packages: Iterable[str], package_dir: Dict[str, str], project_dir: Path
|
| 579 |
+
) -> bool:
|
| 580 |
+
"""Return ``True`` if:
|
| 581 |
+
- all packages are contained by the same parent directory, **and**
|
| 582 |
+
- all packages become importable if the parent directory is added to ``sys.path``.
|
| 583 |
+
|
| 584 |
+
>>> _simple_layout(['a'], {"": "src"}, "/tmp/myproj")
|
| 585 |
+
True
|
| 586 |
+
>>> _simple_layout(['a', 'a.b'], {"": "src"}, "/tmp/myproj")
|
| 587 |
+
True
|
| 588 |
+
>>> _simple_layout(['a', 'a.b'], {}, "/tmp/myproj")
|
| 589 |
+
True
|
| 590 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"": "src"}, "/tmp/myproj")
|
| 591 |
+
True
|
| 592 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "a", "b": "b"}, ".")
|
| 593 |
+
True
|
| 594 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a", "b": "_b"}, ".")
|
| 595 |
+
False
|
| 596 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a"}, "/tmp/myproj")
|
| 597 |
+
False
|
| 598 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a.a1.a2": "_a2"}, ".")
|
| 599 |
+
False
|
| 600 |
+
>>> _simple_layout(['a', 'a.b'], {"": "src", "a.b": "_ab"}, "/tmp/myproj")
|
| 601 |
+
False
|
| 602 |
+
>>> # Special cases, no packages yet:
|
| 603 |
+
>>> _simple_layout([], {"": "src"}, "/tmp/myproj")
|
| 604 |
+
True
|
| 605 |
+
>>> _simple_layout([], {"a": "_a", "": "src"}, "/tmp/myproj")
|
| 606 |
+
False
|
| 607 |
+
"""
|
| 608 |
+
layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages}
|
| 609 |
+
if not layout:
|
| 610 |
+
return set(package_dir) in ({}, {""})
|
| 611 |
+
parent = os.path.commonpath([_parent_path(k, v) for k, v in layout.items()])
|
| 612 |
+
return all(
|
| 613 |
+
_path.same_path(Path(parent, *key.split('.')), value)
|
| 614 |
+
for key, value in layout.items()
|
| 615 |
+
)
|
| 616 |
+
|
| 617 |
+
|
| 618 |
+
def _parent_path(pkg, pkg_path):
|
| 619 |
+
"""Infer the parent path containing a package, that if added to ``sys.path`` would
|
| 620 |
+
allow importing that package.
|
| 621 |
+
When ``pkg`` is directly mapped into a directory with a different name, return its
|
| 622 |
+
own path.
|
| 623 |
+
>>> _parent_path("a", "src/a")
|
| 624 |
+
'src'
|
| 625 |
+
>>> _parent_path("b", "src/c")
|
| 626 |
+
'src/c'
|
| 627 |
+
"""
|
| 628 |
+
parent = pkg_path[: -len(pkg)] if pkg_path.endswith(pkg) else pkg_path
|
| 629 |
+
return parent.rstrip("/" + os.sep)
|
| 630 |
+
|
| 631 |
+
|
| 632 |
+
def _find_packages(dist: Distribution) -> Iterator[str]:
|
| 633 |
+
yield from iter(dist.packages or [])
|
| 634 |
+
|
| 635 |
+
py_modules = dist.py_modules or []
|
| 636 |
+
nested_modules = [mod for mod in py_modules if "." in mod]
|
| 637 |
+
if dist.ext_package:
|
| 638 |
+
yield dist.ext_package
|
| 639 |
+
else:
|
| 640 |
+
ext_modules = dist.ext_modules or []
|
| 641 |
+
nested_modules += [x.name for x in ext_modules if "." in x.name]
|
| 642 |
+
|
| 643 |
+
for module in nested_modules:
|
| 644 |
+
package, _, _ = module.rpartition(".")
|
| 645 |
+
yield package
|
| 646 |
+
|
| 647 |
+
|
| 648 |
+
def _find_top_level_modules(dist: Distribution) -> Iterator[str]:
|
| 649 |
+
py_modules = dist.py_modules or []
|
| 650 |
+
yield from (mod for mod in py_modules if "." not in mod)
|
| 651 |
+
|
| 652 |
+
if not dist.ext_package:
|
| 653 |
+
ext_modules = dist.ext_modules or []
|
| 654 |
+
yield from (x.name for x in ext_modules if "." not in x.name)
|
| 655 |
+
|
| 656 |
+
|
| 657 |
+
def _find_package_roots(
|
| 658 |
+
packages: Iterable[str],
|
| 659 |
+
package_dir: Mapping[str, str],
|
| 660 |
+
src_root: _Path,
|
| 661 |
+
) -> Dict[str, str]:
|
| 662 |
+
pkg_roots: Dict[str, str] = {
|
| 663 |
+
pkg: _absolute_root(find_package_path(pkg, package_dir, src_root))
|
| 664 |
+
for pkg in sorted(packages)
|
| 665 |
+
}
|
| 666 |
+
|
| 667 |
+
return _remove_nested(pkg_roots)
|
| 668 |
+
|
| 669 |
+
|
| 670 |
+
def _absolute_root(path: _Path) -> str:
|
| 671 |
+
"""Works for packages and top-level modules"""
|
| 672 |
+
path_ = Path(path)
|
| 673 |
+
parent = path_.parent
|
| 674 |
+
|
| 675 |
+
if path_.exists():
|
| 676 |
+
return str(path_.resolve())
|
| 677 |
+
else:
|
| 678 |
+
return str(parent.resolve() / path_.name)
|
| 679 |
+
|
| 680 |
+
|
| 681 |
+
def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]:
|
| 682 |
+
"""By carefully designing ``package_dir``, it is possible to implement the logical
|
| 683 |
+
structure of PEP 420 in a package without the corresponding directories.
|
| 684 |
+
|
| 685 |
+
Moreover a parent package can be purposefully/accidentally skipped in the discovery
|
| 686 |
+
phase (e.g. ``find_packages(include=["mypkg.*"])``, when ``mypkg.foo`` is included
|
| 687 |
+
by ``mypkg`` itself is not).
|
| 688 |
+
We consider this case to also be a virtual namespace (ignoring the original
|
| 689 |
+
directory) to emulate a non-editable installation.
|
| 690 |
+
|
| 691 |
+
This function will try to find these kinds of namespaces.
|
| 692 |
+
"""
|
| 693 |
+
for pkg in pkg_roots:
|
| 694 |
+
if "." not in pkg:
|
| 695 |
+
continue
|
| 696 |
+
parts = pkg.split(".")
|
| 697 |
+
for i in range(len(parts) - 1, 0, -1):
|
| 698 |
+
partial_name = ".".join(parts[:i])
|
| 699 |
+
path = Path(find_package_path(partial_name, pkg_roots, ""))
|
| 700 |
+
if not path.exists() or partial_name not in pkg_roots:
|
| 701 |
+
# partial_name not in pkg_roots ==> purposefully/accidentally skipped
|
| 702 |
+
yield partial_name
|
| 703 |
+
|
| 704 |
+
|
| 705 |
+
def _find_namespaces(
|
| 706 |
+
packages: List[str], pkg_roots: Dict[str, str]
|
| 707 |
+
) -> Iterator[Tuple[str, List[str]]]:
|
| 708 |
+
for pkg in packages:
|
| 709 |
+
path = find_package_path(pkg, pkg_roots, "")
|
| 710 |
+
if Path(path).exists() and not Path(path, "__init__.py").exists():
|
| 711 |
+
yield (pkg, [path])
|
| 712 |
+
|
| 713 |
+
|
| 714 |
+
def _remove_nested(pkg_roots: Dict[str, str]) -> Dict[str, str]:
|
| 715 |
+
output = dict(pkg_roots.copy())
|
| 716 |
+
|
| 717 |
+
for pkg, path in reversed(list(pkg_roots.items())):
|
| 718 |
+
if any(
|
| 719 |
+
pkg != other and _is_nested(pkg, path, other, other_path)
|
| 720 |
+
for other, other_path in pkg_roots.items()
|
| 721 |
+
):
|
| 722 |
+
output.pop(pkg)
|
| 723 |
+
|
| 724 |
+
return output
|
| 725 |
+
|
| 726 |
+
|
| 727 |
+
def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool:
|
| 728 |
+
"""
|
| 729 |
+
Return ``True`` if ``pkg`` is nested inside ``parent`` both logically and in the
|
| 730 |
+
file system.
|
| 731 |
+
>>> _is_nested("a.b", "path/a/b", "a", "path/a")
|
| 732 |
+
True
|
| 733 |
+
>>> _is_nested("a.b", "path/a/b", "a", "otherpath/a")
|
| 734 |
+
False
|
| 735 |
+
>>> _is_nested("a.b", "path/a/b", "c", "path/c")
|
| 736 |
+
False
|
| 737 |
+
>>> _is_nested("a.a", "path/a/a", "a", "path/a")
|
| 738 |
+
True
|
| 739 |
+
>>> _is_nested("b.a", "path/b/a", "a", "path/a")
|
| 740 |
+
False
|
| 741 |
+
"""
|
| 742 |
+
norm_pkg_path = _path.normpath(pkg_path)
|
| 743 |
+
rest = pkg.replace(parent, "", 1).strip(".").split(".")
|
| 744 |
+
return pkg.startswith(parent) and norm_pkg_path == _path.normpath(
|
| 745 |
+
Path(parent_path, *rest)
|
| 746 |
+
)
|
| 747 |
+
|
| 748 |
+
|
| 749 |
+
def _empty_dir(dir_: _P) -> _P:
|
| 750 |
+
"""Create a directory ensured to be empty. Existing files may be removed."""
|
| 751 |
+
shutil.rmtree(dir_, ignore_errors=True)
|
| 752 |
+
os.makedirs(dir_)
|
| 753 |
+
return dir_
|
| 754 |
+
|
| 755 |
+
|
| 756 |
+
class _NamespaceInstaller(namespaces.Installer):
|
| 757 |
+
def __init__(self, distribution, installation_dir, editable_name, src_root):
|
| 758 |
+
self.distribution = distribution
|
| 759 |
+
self.src_root = src_root
|
| 760 |
+
self.installation_dir = installation_dir
|
| 761 |
+
self.editable_name = editable_name
|
| 762 |
+
self.outputs = []
|
| 763 |
+
self.dry_run = False
|
| 764 |
+
|
| 765 |
+
def _get_nspkg_file(self):
|
| 766 |
+
"""Installation target."""
|
| 767 |
+
return os.path.join(self.installation_dir, self.editable_name + self.nspkg_ext)
|
| 768 |
+
|
| 769 |
+
def _get_root(self):
|
| 770 |
+
"""Where the modules/packages should be loaded from."""
|
| 771 |
+
return repr(str(self.src_root))
|
| 772 |
+
|
| 773 |
+
|
| 774 |
+
_FINDER_TEMPLATE = """\
|
| 775 |
+
import sys
|
| 776 |
+
from importlib.machinery import ModuleSpec, PathFinder
|
| 777 |
+
from importlib.machinery import all_suffixes as module_suffixes
|
| 778 |
+
from importlib.util import spec_from_file_location
|
| 779 |
+
from itertools import chain
|
| 780 |
+
from pathlib import Path
|
| 781 |
+
|
| 782 |
+
MAPPING = {mapping!r}
|
| 783 |
+
NAMESPACES = {namespaces!r}
|
| 784 |
+
PATH_PLACEHOLDER = {name!r} + ".__path_hook__"
|
| 785 |
+
|
| 786 |
+
|
| 787 |
+
class _EditableFinder: # MetaPathFinder
|
| 788 |
+
@classmethod
|
| 789 |
+
def find_spec(cls, fullname, path=None, target=None):
|
| 790 |
+
extra_path = []
|
| 791 |
+
|
| 792 |
+
# Top-level packages and modules (we know these exist in the FS)
|
| 793 |
+
if fullname in MAPPING:
|
| 794 |
+
pkg_path = MAPPING[fullname]
|
| 795 |
+
return cls._find_spec(fullname, Path(pkg_path))
|
| 796 |
+
|
| 797 |
+
# Handle immediate children modules (required for namespaces to work)
|
| 798 |
+
# To avoid problems with case sensitivity in the file system we delegate
|
| 799 |
+
# to the importlib.machinery implementation.
|
| 800 |
+
parent, _, child = fullname.rpartition(".")
|
| 801 |
+
if parent and parent in MAPPING:
|
| 802 |
+
return PathFinder.find_spec(fullname, path=[MAPPING[parent], *extra_path])
|
| 803 |
+
|
| 804 |
+
# Other levels of nesting should be handled automatically by importlib
|
| 805 |
+
# using the parent path.
|
| 806 |
+
return None
|
| 807 |
+
|
| 808 |
+
@classmethod
|
| 809 |
+
def _find_spec(cls, fullname, candidate_path):
|
| 810 |
+
init = candidate_path / "__init__.py"
|
| 811 |
+
candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
|
| 812 |
+
for candidate in chain([init], candidates):
|
| 813 |
+
if candidate.exists():
|
| 814 |
+
return spec_from_file_location(fullname, candidate)
|
| 815 |
+
|
| 816 |
+
|
| 817 |
+
class _EditableNamespaceFinder: # PathEntryFinder
|
| 818 |
+
@classmethod
|
| 819 |
+
def _path_hook(cls, path):
|
| 820 |
+
if path == PATH_PLACEHOLDER:
|
| 821 |
+
return cls
|
| 822 |
+
raise ImportError
|
| 823 |
+
|
| 824 |
+
@classmethod
|
| 825 |
+
def _paths(cls, fullname):
|
| 826 |
+
# Ensure __path__ is not empty for the spec to be considered a namespace.
|
| 827 |
+
return NAMESPACES[fullname] or MAPPING.get(fullname) or [PATH_PLACEHOLDER]
|
| 828 |
+
|
| 829 |
+
@classmethod
|
| 830 |
+
def find_spec(cls, fullname, target=None):
|
| 831 |
+
if fullname in NAMESPACES:
|
| 832 |
+
spec = ModuleSpec(fullname, None, is_package=True)
|
| 833 |
+
spec.submodule_search_locations = cls._paths(fullname)
|
| 834 |
+
return spec
|
| 835 |
+
return None
|
| 836 |
+
|
| 837 |
+
@classmethod
|
| 838 |
+
def find_module(cls, fullname):
|
| 839 |
+
return None
|
| 840 |
+
|
| 841 |
+
|
| 842 |
+
def install():
|
| 843 |
+
if not any(finder == _EditableFinder for finder in sys.meta_path):
|
| 844 |
+
sys.meta_path.append(_EditableFinder)
|
| 845 |
+
|
| 846 |
+
if not NAMESPACES:
|
| 847 |
+
return
|
| 848 |
+
|
| 849 |
+
if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks):
|
| 850 |
+
# PathEntryFinder is needed to create NamespaceSpec without private APIS
|
| 851 |
+
sys.path_hooks.append(_EditableNamespaceFinder._path_hook)
|
| 852 |
+
if PATH_PLACEHOLDER not in sys.path:
|
| 853 |
+
sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook
|
| 854 |
+
"""
|
| 855 |
+
|
| 856 |
+
|
| 857 |
+
def _finder_template(
|
| 858 |
+
name: str, mapping: Mapping[str, str], namespaces: Dict[str, List[str]]
|
| 859 |
+
) -> str:
|
| 860 |
+
"""Create a string containing the code for the``MetaPathFinder`` and
|
| 861 |
+
``PathEntryFinder``.
|
| 862 |
+
"""
|
| 863 |
+
mapping = dict(sorted(mapping.items(), key=lambda p: p[0]))
|
| 864 |
+
return _FINDER_TEMPLATE.format(name=name, mapping=mapping, namespaces=namespaces)
|
| 865 |
+
|
| 866 |
+
|
| 867 |
+
class LinksNotSupported(errors.FileError):
|
| 868 |
+
"""File system does not seem to support either symlinks or hard links."""
|
| 869 |
+
|
| 870 |
+
|
| 871 |
+
class _DebuggingTips(SetuptoolsWarning):
|
| 872 |
+
_SUMMARY = "Problem in editable installation."
|
| 873 |
+
_DETAILS = """
|
| 874 |
+
An error happened while installing `{project}` in editable mode.
|
| 875 |
+
|
| 876 |
+
The following steps are recommended to help debug this problem:
|
| 877 |
+
|
| 878 |
+
- Try to install the project normally, without using the editable mode.
|
| 879 |
+
Does the error still persist?
|
| 880 |
+
(If it does, try fixing the problem before attempting the editable mode).
|
| 881 |
+
- If you are using binary extensions, make sure you have all OS-level
|
| 882 |
+
dependencies installed (e.g. compilers, toolchains, binary libraries, ...).
|
| 883 |
+
- Try the latest version of setuptools (maybe the error was already fixed).
|
| 884 |
+
- If you (or your project dependencies) are using any setuptools extension
|
| 885 |
+
or customization, make sure they support the editable mode.
|
| 886 |
+
|
| 887 |
+
After following the steps above, if the problem still persists and
|
| 888 |
+
you think this is related to how setuptools handles editable installations,
|
| 889 |
+
please submit a reproducible example
|
| 890 |
+
(see https://stackoverflow.com/help/minimal-reproducible-example) to:
|
| 891 |
+
|
| 892 |
+
https://github.com/pypa/setuptools/issues
|
| 893 |
+
"""
|
| 894 |
+
_SEE_DOCS = "userguide/development_mode.html"
|
.venv/Lib/site-packages/setuptools/command/egg_info.py
ADDED
|
@@ -0,0 +1,735 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""setuptools.command.egg_info
|
| 2 |
+
|
| 3 |
+
Create a distribution's .egg-info directory and contents"""
|
| 4 |
+
|
| 5 |
+
from distutils.filelist import FileList as _FileList
|
| 6 |
+
from distutils.errors import DistutilsInternalError
|
| 7 |
+
from distutils.util import convert_path
|
| 8 |
+
from distutils import log
|
| 9 |
+
import distutils.errors
|
| 10 |
+
import distutils.filelist
|
| 11 |
+
import functools
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
import sys
|
| 15 |
+
import time
|
| 16 |
+
import collections
|
| 17 |
+
|
| 18 |
+
from .._importlib import metadata
|
| 19 |
+
from .. import _entry_points, _normalization
|
| 20 |
+
from . import _requirestxt
|
| 21 |
+
|
| 22 |
+
from setuptools import Command
|
| 23 |
+
from setuptools.command.sdist import sdist
|
| 24 |
+
from setuptools.command.sdist import walk_revctrl
|
| 25 |
+
from setuptools.command.setopt import edit_config
|
| 26 |
+
from setuptools.command import bdist_egg
|
| 27 |
+
import setuptools.unicode_utils as unicode_utils
|
| 28 |
+
from setuptools.glob import glob
|
| 29 |
+
|
| 30 |
+
from setuptools.extern import packaging
|
| 31 |
+
from ..warnings import SetuptoolsDeprecationWarning
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
PY_MAJOR = '{}.{}'.format(*sys.version_info)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
|
| 38 |
+
"""
|
| 39 |
+
Translate a file path glob like '*.txt' in to a regular expression.
|
| 40 |
+
This differs from fnmatch.translate which allows wildcards to match
|
| 41 |
+
directory separators. It also knows about '**/' which matches any number of
|
| 42 |
+
directories.
|
| 43 |
+
"""
|
| 44 |
+
pat = ''
|
| 45 |
+
|
| 46 |
+
# This will split on '/' within [character classes]. This is deliberate.
|
| 47 |
+
chunks = glob.split(os.path.sep)
|
| 48 |
+
|
| 49 |
+
sep = re.escape(os.sep)
|
| 50 |
+
valid_char = '[^%s]' % (sep,)
|
| 51 |
+
|
| 52 |
+
for c, chunk in enumerate(chunks):
|
| 53 |
+
last_chunk = c == len(chunks) - 1
|
| 54 |
+
|
| 55 |
+
# Chunks that are a literal ** are globstars. They match anything.
|
| 56 |
+
if chunk == '**':
|
| 57 |
+
if last_chunk:
|
| 58 |
+
# Match anything if this is the last component
|
| 59 |
+
pat += '.*'
|
| 60 |
+
else:
|
| 61 |
+
# Match '(name/)*'
|
| 62 |
+
pat += '(?:%s+%s)*' % (valid_char, sep)
|
| 63 |
+
continue # Break here as the whole path component has been handled
|
| 64 |
+
|
| 65 |
+
# Find any special characters in the remainder
|
| 66 |
+
i = 0
|
| 67 |
+
chunk_len = len(chunk)
|
| 68 |
+
while i < chunk_len:
|
| 69 |
+
char = chunk[i]
|
| 70 |
+
if char == '*':
|
| 71 |
+
# Match any number of name characters
|
| 72 |
+
pat += valid_char + '*'
|
| 73 |
+
elif char == '?':
|
| 74 |
+
# Match a name character
|
| 75 |
+
pat += valid_char
|
| 76 |
+
elif char == '[':
|
| 77 |
+
# Character class
|
| 78 |
+
inner_i = i + 1
|
| 79 |
+
# Skip initial !/] chars
|
| 80 |
+
if inner_i < chunk_len and chunk[inner_i] == '!':
|
| 81 |
+
inner_i = inner_i + 1
|
| 82 |
+
if inner_i < chunk_len and chunk[inner_i] == ']':
|
| 83 |
+
inner_i = inner_i + 1
|
| 84 |
+
|
| 85 |
+
# Loop till the closing ] is found
|
| 86 |
+
while inner_i < chunk_len and chunk[inner_i] != ']':
|
| 87 |
+
inner_i = inner_i + 1
|
| 88 |
+
|
| 89 |
+
if inner_i >= chunk_len:
|
| 90 |
+
# Got to the end of the string without finding a closing ]
|
| 91 |
+
# Do not treat this as a matching group, but as a literal [
|
| 92 |
+
pat += re.escape(char)
|
| 93 |
+
else:
|
| 94 |
+
# Grab the insides of the [brackets]
|
| 95 |
+
inner = chunk[i + 1 : inner_i]
|
| 96 |
+
char_class = ''
|
| 97 |
+
|
| 98 |
+
# Class negation
|
| 99 |
+
if inner[0] == '!':
|
| 100 |
+
char_class = '^'
|
| 101 |
+
inner = inner[1:]
|
| 102 |
+
|
| 103 |
+
char_class += re.escape(inner)
|
| 104 |
+
pat += '[%s]' % (char_class,)
|
| 105 |
+
|
| 106 |
+
# Skip to the end ]
|
| 107 |
+
i = inner_i
|
| 108 |
+
else:
|
| 109 |
+
pat += re.escape(char)
|
| 110 |
+
i += 1
|
| 111 |
+
|
| 112 |
+
# Join each chunk with the dir separator
|
| 113 |
+
if not last_chunk:
|
| 114 |
+
pat += sep
|
| 115 |
+
|
| 116 |
+
pat += r'\Z'
|
| 117 |
+
return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class InfoCommon:
|
| 121 |
+
tag_build = None
|
| 122 |
+
tag_date = None
|
| 123 |
+
|
| 124 |
+
@property
|
| 125 |
+
def name(self):
|
| 126 |
+
return _normalization.safe_name(self.distribution.get_name())
|
| 127 |
+
|
| 128 |
+
def tagged_version(self):
|
| 129 |
+
tagged = self._maybe_tag(self.distribution.get_version())
|
| 130 |
+
return _normalization.best_effort_version(tagged)
|
| 131 |
+
|
| 132 |
+
def _maybe_tag(self, version):
|
| 133 |
+
"""
|
| 134 |
+
egg_info may be called more than once for a distribution,
|
| 135 |
+
in which case the version string already contains all tags.
|
| 136 |
+
"""
|
| 137 |
+
return (
|
| 138 |
+
version
|
| 139 |
+
if self.vtags and self._already_tagged(version)
|
| 140 |
+
else version + self.vtags
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
def _already_tagged(self, version: str) -> bool:
|
| 144 |
+
# Depending on their format, tags may change with version normalization.
|
| 145 |
+
# So in addition the regular tags, we have to search for the normalized ones.
|
| 146 |
+
return version.endswith(self.vtags) or version.endswith(self._safe_tags())
|
| 147 |
+
|
| 148 |
+
def _safe_tags(self) -> str:
|
| 149 |
+
# To implement this we can rely on `safe_version` pretending to be version 0
|
| 150 |
+
# followed by tags. Then we simply discard the starting 0 (fake version number)
|
| 151 |
+
return _normalization.best_effort_version(f"0{self.vtags}")[1:]
|
| 152 |
+
|
| 153 |
+
def tags(self) -> str:
|
| 154 |
+
version = ''
|
| 155 |
+
if self.tag_build:
|
| 156 |
+
version += self.tag_build
|
| 157 |
+
if self.tag_date:
|
| 158 |
+
version += time.strftime("%Y%m%d")
|
| 159 |
+
return version
|
| 160 |
+
|
| 161 |
+
vtags = property(tags)
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
class egg_info(InfoCommon, Command):
|
| 165 |
+
description = "create a distribution's .egg-info directory"
|
| 166 |
+
|
| 167 |
+
user_options = [
|
| 168 |
+
(
|
| 169 |
+
'egg-base=',
|
| 170 |
+
'e',
|
| 171 |
+
"directory containing .egg-info directories"
|
| 172 |
+
" (default: top of the source tree)",
|
| 173 |
+
),
|
| 174 |
+
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
| 175 |
+
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
| 176 |
+
('no-date', 'D', "Don't include date stamp [default]"),
|
| 177 |
+
]
|
| 178 |
+
|
| 179 |
+
boolean_options = ['tag-date']
|
| 180 |
+
negative_opt = {
|
| 181 |
+
'no-date': 'tag-date',
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
def initialize_options(self):
|
| 185 |
+
self.egg_base = None
|
| 186 |
+
self.egg_name = None
|
| 187 |
+
self.egg_info = None
|
| 188 |
+
self.egg_version = None
|
| 189 |
+
self.ignore_egg_info_in_manifest = False
|
| 190 |
+
|
| 191 |
+
####################################
|
| 192 |
+
# allow the 'tag_svn_revision' to be detected and
|
| 193 |
+
# set, supporting sdists built on older Setuptools.
|
| 194 |
+
@property
|
| 195 |
+
def tag_svn_revision(self):
|
| 196 |
+
pass
|
| 197 |
+
|
| 198 |
+
@tag_svn_revision.setter
|
| 199 |
+
def tag_svn_revision(self, value):
|
| 200 |
+
pass
|
| 201 |
+
|
| 202 |
+
####################################
|
| 203 |
+
|
| 204 |
+
def save_version_info(self, filename):
|
| 205 |
+
"""
|
| 206 |
+
Materialize the value of date into the
|
| 207 |
+
build tag. Install build keys in a deterministic order
|
| 208 |
+
to avoid arbitrary reordering on subsequent builds.
|
| 209 |
+
"""
|
| 210 |
+
egg_info = collections.OrderedDict()
|
| 211 |
+
# follow the order these keys would have been added
|
| 212 |
+
# when PYTHONHASHSEED=0
|
| 213 |
+
egg_info['tag_build'] = self.tags()
|
| 214 |
+
egg_info['tag_date'] = 0
|
| 215 |
+
edit_config(filename, dict(egg_info=egg_info))
|
| 216 |
+
|
| 217 |
+
def finalize_options(self):
|
| 218 |
+
# Note: we need to capture the current value returned
|
| 219 |
+
# by `self.tagged_version()`, so we can later update
|
| 220 |
+
# `self.distribution.metadata.version` without
|
| 221 |
+
# repercussions.
|
| 222 |
+
self.egg_name = self.name
|
| 223 |
+
self.egg_version = self.tagged_version()
|
| 224 |
+
parsed_version = packaging.version.Version(self.egg_version)
|
| 225 |
+
|
| 226 |
+
try:
|
| 227 |
+
is_version = isinstance(parsed_version, packaging.version.Version)
|
| 228 |
+
spec = "%s==%s" if is_version else "%s===%s"
|
| 229 |
+
packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
|
| 230 |
+
except ValueError as e:
|
| 231 |
+
raise distutils.errors.DistutilsOptionError(
|
| 232 |
+
"Invalid distribution name or version syntax: %s-%s"
|
| 233 |
+
% (self.egg_name, self.egg_version)
|
| 234 |
+
) from e
|
| 235 |
+
|
| 236 |
+
if self.egg_base is None:
|
| 237 |
+
dirs = self.distribution.package_dir
|
| 238 |
+
self.egg_base = (dirs or {}).get('', os.curdir)
|
| 239 |
+
|
| 240 |
+
self.ensure_dirname('egg_base')
|
| 241 |
+
self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info'
|
| 242 |
+
if self.egg_base != os.curdir:
|
| 243 |
+
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
| 244 |
+
|
| 245 |
+
# Set package version for the benefit of dumber commands
|
| 246 |
+
# (e.g. sdist, bdist_wininst, etc.)
|
| 247 |
+
#
|
| 248 |
+
self.distribution.metadata.version = self.egg_version
|
| 249 |
+
|
| 250 |
+
# If we bootstrapped around the lack of a PKG-INFO, as might be the
|
| 251 |
+
# case in a fresh checkout, make sure that any special tags get added
|
| 252 |
+
# to the version info
|
| 253 |
+
#
|
| 254 |
+
pd = self.distribution._patched_dist
|
| 255 |
+
key = getattr(pd, "key", None) or getattr(pd, "name", None)
|
| 256 |
+
if pd is not None and key == self.egg_name.lower():
|
| 257 |
+
pd._version = self.egg_version
|
| 258 |
+
pd._parsed_version = packaging.version.Version(self.egg_version)
|
| 259 |
+
self.distribution._patched_dist = None
|
| 260 |
+
|
| 261 |
+
def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
|
| 262 |
+
"""Compute filename of the output egg. Private API."""
|
| 263 |
+
return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
|
| 264 |
+
|
| 265 |
+
def write_or_delete_file(self, what, filename, data, force=False):
|
| 266 |
+
"""Write `data` to `filename` or delete if empty
|
| 267 |
+
|
| 268 |
+
If `data` is non-empty, this routine is the same as ``write_file()``.
|
| 269 |
+
If `data` is empty but not ``None``, this is the same as calling
|
| 270 |
+
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
|
| 271 |
+
unless `filename` exists, in which case a warning is issued about the
|
| 272 |
+
orphaned file (if `force` is false), or deleted (if `force` is true).
|
| 273 |
+
"""
|
| 274 |
+
if data:
|
| 275 |
+
self.write_file(what, filename, data)
|
| 276 |
+
elif os.path.exists(filename):
|
| 277 |
+
if data is None and not force:
|
| 278 |
+
log.warn("%s not set in setup(), but %s exists", what, filename)
|
| 279 |
+
return
|
| 280 |
+
else:
|
| 281 |
+
self.delete_file(filename)
|
| 282 |
+
|
| 283 |
+
def write_file(self, what, filename, data):
|
| 284 |
+
"""Write `data` to `filename` (if not a dry run) after announcing it
|
| 285 |
+
|
| 286 |
+
`what` is used in a log message to identify what is being written
|
| 287 |
+
to the file.
|
| 288 |
+
"""
|
| 289 |
+
log.info("writing %s to %s", what, filename)
|
| 290 |
+
data = data.encode("utf-8")
|
| 291 |
+
if not self.dry_run:
|
| 292 |
+
f = open(filename, 'wb')
|
| 293 |
+
f.write(data)
|
| 294 |
+
f.close()
|
| 295 |
+
|
| 296 |
+
def delete_file(self, filename):
|
| 297 |
+
"""Delete `filename` (if not a dry run) after announcing it"""
|
| 298 |
+
log.info("deleting %s", filename)
|
| 299 |
+
if not self.dry_run:
|
| 300 |
+
os.unlink(filename)
|
| 301 |
+
|
| 302 |
+
def run(self):
|
| 303 |
+
self.mkpath(self.egg_info)
|
| 304 |
+
try:
|
| 305 |
+
os.utime(self.egg_info, None)
|
| 306 |
+
except OSError as e:
|
| 307 |
+
msg = f"Cannot update time stamp of directory '{self.egg_info}'"
|
| 308 |
+
raise distutils.errors.DistutilsFileError(msg) from e
|
| 309 |
+
for ep in metadata.entry_points(group='egg_info.writers'):
|
| 310 |
+
writer = ep.load()
|
| 311 |
+
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
| 312 |
+
|
| 313 |
+
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
| 314 |
+
nl = os.path.join(self.egg_info, "native_libs.txt")
|
| 315 |
+
if os.path.exists(nl):
|
| 316 |
+
self.delete_file(nl)
|
| 317 |
+
|
| 318 |
+
self.find_sources()
|
| 319 |
+
|
| 320 |
+
def find_sources(self):
|
| 321 |
+
"""Generate SOURCES.txt manifest file"""
|
| 322 |
+
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
| 323 |
+
mm = manifest_maker(self.distribution)
|
| 324 |
+
mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest
|
| 325 |
+
mm.manifest = manifest_filename
|
| 326 |
+
mm.run()
|
| 327 |
+
self.filelist = mm.filelist
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
class FileList(_FileList):
|
| 331 |
+
# Implementations of the various MANIFEST.in commands
|
| 332 |
+
|
| 333 |
+
def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False):
|
| 334 |
+
super().__init__(warn, debug_print)
|
| 335 |
+
self.ignore_egg_info_dir = ignore_egg_info_dir
|
| 336 |
+
|
| 337 |
+
def process_template_line(self, line):
|
| 338 |
+
# Parse the line: split it up, make sure the right number of words
|
| 339 |
+
# is there, and return the relevant words. 'action' is always
|
| 340 |
+
# defined: it's the first word of the line. Which of the other
|
| 341 |
+
# three are defined depends on the action; it'll be either
|
| 342 |
+
# patterns, (dir and patterns), or (dir_pattern).
|
| 343 |
+
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
| 344 |
+
|
| 345 |
+
action_map = {
|
| 346 |
+
'include': self.include,
|
| 347 |
+
'exclude': self.exclude,
|
| 348 |
+
'global-include': self.global_include,
|
| 349 |
+
'global-exclude': self.global_exclude,
|
| 350 |
+
'recursive-include': functools.partial(
|
| 351 |
+
self.recursive_include,
|
| 352 |
+
dir,
|
| 353 |
+
),
|
| 354 |
+
'recursive-exclude': functools.partial(
|
| 355 |
+
self.recursive_exclude,
|
| 356 |
+
dir,
|
| 357 |
+
),
|
| 358 |
+
'graft': self.graft,
|
| 359 |
+
'prune': self.prune,
|
| 360 |
+
}
|
| 361 |
+
log_map = {
|
| 362 |
+
'include': "warning: no files found matching '%s'",
|
| 363 |
+
'exclude': ("warning: no previously-included files found " "matching '%s'"),
|
| 364 |
+
'global-include': (
|
| 365 |
+
"warning: no files found matching '%s' " "anywhere in distribution"
|
| 366 |
+
),
|
| 367 |
+
'global-exclude': (
|
| 368 |
+
"warning: no previously-included files matching "
|
| 369 |
+
"'%s' found anywhere in distribution"
|
| 370 |
+
),
|
| 371 |
+
'recursive-include': (
|
| 372 |
+
"warning: no files found matching '%s' " "under directory '%s'"
|
| 373 |
+
),
|
| 374 |
+
'recursive-exclude': (
|
| 375 |
+
"warning: no previously-included files matching "
|
| 376 |
+
"'%s' found under directory '%s'"
|
| 377 |
+
),
|
| 378 |
+
'graft': "warning: no directories found matching '%s'",
|
| 379 |
+
'prune': "no previously-included directories found matching '%s'",
|
| 380 |
+
}
|
| 381 |
+
|
| 382 |
+
try:
|
| 383 |
+
process_action = action_map[action]
|
| 384 |
+
except KeyError:
|
| 385 |
+
raise DistutilsInternalError(
|
| 386 |
+
"this cannot happen: invalid action '{action!s}'".format(action=action),
|
| 387 |
+
)
|
| 388 |
+
|
| 389 |
+
# OK, now we know that the action is valid and we have the
|
| 390 |
+
# right number of words on the line for that action -- so we
|
| 391 |
+
# can proceed with minimal error-checking.
|
| 392 |
+
|
| 393 |
+
action_is_recursive = action.startswith('recursive-')
|
| 394 |
+
if action in {'graft', 'prune'}:
|
| 395 |
+
patterns = [dir_pattern]
|
| 396 |
+
extra_log_args = (dir,) if action_is_recursive else ()
|
| 397 |
+
log_tmpl = log_map[action]
|
| 398 |
+
|
| 399 |
+
self.debug_print(
|
| 400 |
+
' '.join(
|
| 401 |
+
[action] + ([dir] if action_is_recursive else []) + patterns,
|
| 402 |
+
)
|
| 403 |
+
)
|
| 404 |
+
for pattern in patterns:
|
| 405 |
+
if not process_action(pattern):
|
| 406 |
+
log.warn(log_tmpl, pattern, *extra_log_args)
|
| 407 |
+
|
| 408 |
+
def _remove_files(self, predicate):
|
| 409 |
+
"""
|
| 410 |
+
Remove all files from the file list that match the predicate.
|
| 411 |
+
Return True if any matching files were removed
|
| 412 |
+
"""
|
| 413 |
+
found = False
|
| 414 |
+
for i in range(len(self.files) - 1, -1, -1):
|
| 415 |
+
if predicate(self.files[i]):
|
| 416 |
+
self.debug_print(" removing " + self.files[i])
|
| 417 |
+
del self.files[i]
|
| 418 |
+
found = True
|
| 419 |
+
return found
|
| 420 |
+
|
| 421 |
+
def include(self, pattern):
|
| 422 |
+
"""Include files that match 'pattern'."""
|
| 423 |
+
found = [f for f in glob(pattern) if not os.path.isdir(f)]
|
| 424 |
+
self.extend(found)
|
| 425 |
+
return bool(found)
|
| 426 |
+
|
| 427 |
+
def exclude(self, pattern):
|
| 428 |
+
"""Exclude files that match 'pattern'."""
|
| 429 |
+
match = translate_pattern(pattern)
|
| 430 |
+
return self._remove_files(match.match)
|
| 431 |
+
|
| 432 |
+
def recursive_include(self, dir, pattern):
|
| 433 |
+
"""
|
| 434 |
+
Include all files anywhere in 'dir/' that match the pattern.
|
| 435 |
+
"""
|
| 436 |
+
full_pattern = os.path.join(dir, '**', pattern)
|
| 437 |
+
found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)]
|
| 438 |
+
self.extend(found)
|
| 439 |
+
return bool(found)
|
| 440 |
+
|
| 441 |
+
def recursive_exclude(self, dir, pattern):
|
| 442 |
+
"""
|
| 443 |
+
Exclude any file anywhere in 'dir/' that match the pattern.
|
| 444 |
+
"""
|
| 445 |
+
match = translate_pattern(os.path.join(dir, '**', pattern))
|
| 446 |
+
return self._remove_files(match.match)
|
| 447 |
+
|
| 448 |
+
def graft(self, dir):
|
| 449 |
+
"""Include all files from 'dir/'."""
|
| 450 |
+
found = [
|
| 451 |
+
item
|
| 452 |
+
for match_dir in glob(dir)
|
| 453 |
+
for item in distutils.filelist.findall(match_dir)
|
| 454 |
+
]
|
| 455 |
+
self.extend(found)
|
| 456 |
+
return bool(found)
|
| 457 |
+
|
| 458 |
+
def prune(self, dir):
|
| 459 |
+
"""Filter out files from 'dir/'."""
|
| 460 |
+
match = translate_pattern(os.path.join(dir, '**'))
|
| 461 |
+
return self._remove_files(match.match)
|
| 462 |
+
|
| 463 |
+
def global_include(self, pattern):
|
| 464 |
+
"""
|
| 465 |
+
Include all files anywhere in the current directory that match the
|
| 466 |
+
pattern. This is very inefficient on large file trees.
|
| 467 |
+
"""
|
| 468 |
+
if self.allfiles is None:
|
| 469 |
+
self.findall()
|
| 470 |
+
match = translate_pattern(os.path.join('**', pattern))
|
| 471 |
+
found = [f for f in self.allfiles if match.match(f)]
|
| 472 |
+
self.extend(found)
|
| 473 |
+
return bool(found)
|
| 474 |
+
|
| 475 |
+
def global_exclude(self, pattern):
|
| 476 |
+
"""
|
| 477 |
+
Exclude all files anywhere that match the pattern.
|
| 478 |
+
"""
|
| 479 |
+
match = translate_pattern(os.path.join('**', pattern))
|
| 480 |
+
return self._remove_files(match.match)
|
| 481 |
+
|
| 482 |
+
def append(self, item):
|
| 483 |
+
if item.endswith('\r'): # Fix older sdists built on Windows
|
| 484 |
+
item = item[:-1]
|
| 485 |
+
path = convert_path(item)
|
| 486 |
+
|
| 487 |
+
if self._safe_path(path):
|
| 488 |
+
self.files.append(path)
|
| 489 |
+
|
| 490 |
+
def extend(self, paths):
|
| 491 |
+
self.files.extend(filter(self._safe_path, paths))
|
| 492 |
+
|
| 493 |
+
def _repair(self):
|
| 494 |
+
"""
|
| 495 |
+
Replace self.files with only safe paths
|
| 496 |
+
|
| 497 |
+
Because some owners of FileList manipulate the underlying
|
| 498 |
+
``files`` attribute directly, this method must be called to
|
| 499 |
+
repair those paths.
|
| 500 |
+
"""
|
| 501 |
+
self.files = list(filter(self._safe_path, self.files))
|
| 502 |
+
|
| 503 |
+
def _safe_path(self, path):
|
| 504 |
+
enc_warn = "'%s' not %s encodable -- skipping"
|
| 505 |
+
|
| 506 |
+
# To avoid accidental trans-codings errors, first to unicode
|
| 507 |
+
u_path = unicode_utils.filesys_decode(path)
|
| 508 |
+
if u_path is None:
|
| 509 |
+
log.warn("'%s' in unexpected encoding -- skipping" % path)
|
| 510 |
+
return False
|
| 511 |
+
|
| 512 |
+
# Must ensure utf-8 encodability
|
| 513 |
+
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
| 514 |
+
if utf8_path is None:
|
| 515 |
+
log.warn(enc_warn, path, 'utf-8')
|
| 516 |
+
return False
|
| 517 |
+
|
| 518 |
+
try:
|
| 519 |
+
# ignore egg-info paths
|
| 520 |
+
is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path
|
| 521 |
+
if self.ignore_egg_info_dir and is_egg_info:
|
| 522 |
+
return False
|
| 523 |
+
# accept is either way checks out
|
| 524 |
+
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
| 525 |
+
return True
|
| 526 |
+
# this will catch any encode errors decoding u_path
|
| 527 |
+
except UnicodeEncodeError:
|
| 528 |
+
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
| 529 |
+
|
| 530 |
+
|
| 531 |
+
class manifest_maker(sdist):
|
| 532 |
+
template = "MANIFEST.in"
|
| 533 |
+
|
| 534 |
+
def initialize_options(self):
|
| 535 |
+
self.use_defaults = 1
|
| 536 |
+
self.prune = 1
|
| 537 |
+
self.manifest_only = 1
|
| 538 |
+
self.force_manifest = 1
|
| 539 |
+
self.ignore_egg_info_dir = False
|
| 540 |
+
|
| 541 |
+
def finalize_options(self):
|
| 542 |
+
pass
|
| 543 |
+
|
| 544 |
+
def run(self):
|
| 545 |
+
self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir)
|
| 546 |
+
if not os.path.exists(self.manifest):
|
| 547 |
+
self.write_manifest() # it must exist so it'll get in the list
|
| 548 |
+
self.add_defaults()
|
| 549 |
+
if os.path.exists(self.template):
|
| 550 |
+
self.read_template()
|
| 551 |
+
self.add_license_files()
|
| 552 |
+
self._add_referenced_files()
|
| 553 |
+
self.prune_file_list()
|
| 554 |
+
self.filelist.sort()
|
| 555 |
+
self.filelist.remove_duplicates()
|
| 556 |
+
self.write_manifest()
|
| 557 |
+
|
| 558 |
+
def _manifest_normalize(self, path):
|
| 559 |
+
path = unicode_utils.filesys_decode(path)
|
| 560 |
+
return path.replace(os.sep, '/')
|
| 561 |
+
|
| 562 |
+
def write_manifest(self):
|
| 563 |
+
"""
|
| 564 |
+
Write the file list in 'self.filelist' to the manifest file
|
| 565 |
+
named by 'self.manifest'.
|
| 566 |
+
"""
|
| 567 |
+
self.filelist._repair()
|
| 568 |
+
|
| 569 |
+
# Now _repairs should encodability, but not unicode
|
| 570 |
+
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
| 571 |
+
msg = "writing manifest file '%s'" % self.manifest
|
| 572 |
+
self.execute(write_file, (self.manifest, files), msg)
|
| 573 |
+
|
| 574 |
+
def warn(self, msg):
|
| 575 |
+
if not self._should_suppress_warning(msg):
|
| 576 |
+
sdist.warn(self, msg)
|
| 577 |
+
|
| 578 |
+
@staticmethod
|
| 579 |
+
def _should_suppress_warning(msg):
|
| 580 |
+
"""
|
| 581 |
+
suppress missing-file warnings from sdist
|
| 582 |
+
"""
|
| 583 |
+
return re.match(r"standard file .*not found", msg)
|
| 584 |
+
|
| 585 |
+
def add_defaults(self):
|
| 586 |
+
sdist.add_defaults(self)
|
| 587 |
+
self.filelist.append(self.template)
|
| 588 |
+
self.filelist.append(self.manifest)
|
| 589 |
+
rcfiles = list(walk_revctrl())
|
| 590 |
+
if rcfiles:
|
| 591 |
+
self.filelist.extend(rcfiles)
|
| 592 |
+
elif os.path.exists(self.manifest):
|
| 593 |
+
self.read_manifest()
|
| 594 |
+
|
| 595 |
+
if os.path.exists("setup.py"):
|
| 596 |
+
# setup.py should be included by default, even if it's not
|
| 597 |
+
# the script called to create the sdist
|
| 598 |
+
self.filelist.append("setup.py")
|
| 599 |
+
|
| 600 |
+
ei_cmd = self.get_finalized_command('egg_info')
|
| 601 |
+
self.filelist.graft(ei_cmd.egg_info)
|
| 602 |
+
|
| 603 |
+
def add_license_files(self):
|
| 604 |
+
license_files = self.distribution.metadata.license_files or []
|
| 605 |
+
for lf in license_files:
|
| 606 |
+
log.info("adding license file '%s'", lf)
|
| 607 |
+
self.filelist.extend(license_files)
|
| 608 |
+
|
| 609 |
+
def _add_referenced_files(self):
|
| 610 |
+
"""Add files referenced by the config (e.g. `file:` directive) to filelist"""
|
| 611 |
+
referenced = getattr(self.distribution, '_referenced_files', [])
|
| 612 |
+
# ^-- fallback if dist comes from distutils or is a custom class
|
| 613 |
+
for rf in referenced:
|
| 614 |
+
log.debug("adding file referenced by config '%s'", rf)
|
| 615 |
+
self.filelist.extend(referenced)
|
| 616 |
+
|
| 617 |
+
def prune_file_list(self):
|
| 618 |
+
build = self.get_finalized_command('build')
|
| 619 |
+
base_dir = self.distribution.get_fullname()
|
| 620 |
+
self.filelist.prune(build.build_base)
|
| 621 |
+
self.filelist.prune(base_dir)
|
| 622 |
+
sep = re.escape(os.sep)
|
| 623 |
+
self.filelist.exclude_pattern(
|
| 624 |
+
r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1
|
| 625 |
+
)
|
| 626 |
+
|
| 627 |
+
def _safe_data_files(self, build_py):
|
| 628 |
+
"""
|
| 629 |
+
The parent class implementation of this method
|
| 630 |
+
(``sdist``) will try to include data files, which
|
| 631 |
+
might cause recursion problems when
|
| 632 |
+
``include_package_data=True``.
|
| 633 |
+
|
| 634 |
+
Therefore, avoid triggering any attempt of
|
| 635 |
+
analyzing/building the manifest again.
|
| 636 |
+
"""
|
| 637 |
+
if hasattr(build_py, 'get_data_files_without_manifest'):
|
| 638 |
+
return build_py.get_data_files_without_manifest()
|
| 639 |
+
|
| 640 |
+
SetuptoolsDeprecationWarning.emit(
|
| 641 |
+
"`build_py` command does not inherit from setuptools' `build_py`.",
|
| 642 |
+
"""
|
| 643 |
+
Custom 'build_py' does not implement 'get_data_files_without_manifest'.
|
| 644 |
+
Please extend command classes from setuptools instead of distutils.
|
| 645 |
+
""",
|
| 646 |
+
see_url="https://peps.python.org/pep-0632/",
|
| 647 |
+
# due_date not defined yet, old projects might still do it?
|
| 648 |
+
)
|
| 649 |
+
return build_py.get_data_files()
|
| 650 |
+
|
| 651 |
+
|
| 652 |
+
def write_file(filename, contents):
|
| 653 |
+
"""Create a file with the specified name and write 'contents' (a
|
| 654 |
+
sequence of strings without line terminators) to it.
|
| 655 |
+
"""
|
| 656 |
+
contents = "\n".join(contents)
|
| 657 |
+
|
| 658 |
+
# assuming the contents has been vetted for utf-8 encoding
|
| 659 |
+
contents = contents.encode("utf-8")
|
| 660 |
+
|
| 661 |
+
with open(filename, "wb") as f: # always write POSIX-style manifest
|
| 662 |
+
f.write(contents)
|
| 663 |
+
|
| 664 |
+
|
| 665 |
+
def write_pkg_info(cmd, basename, filename):
|
| 666 |
+
log.info("writing %s", filename)
|
| 667 |
+
if not cmd.dry_run:
|
| 668 |
+
metadata = cmd.distribution.metadata
|
| 669 |
+
metadata.version, oldver = cmd.egg_version, metadata.version
|
| 670 |
+
metadata.name, oldname = cmd.egg_name, metadata.name
|
| 671 |
+
|
| 672 |
+
try:
|
| 673 |
+
# write unescaped data to PKG-INFO, so older pkg_resources
|
| 674 |
+
# can still parse it
|
| 675 |
+
metadata.write_pkg_info(cmd.egg_info)
|
| 676 |
+
finally:
|
| 677 |
+
metadata.name, metadata.version = oldname, oldver
|
| 678 |
+
|
| 679 |
+
safe = getattr(cmd.distribution, 'zip_safe', None)
|
| 680 |
+
|
| 681 |
+
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
| 682 |
+
|
| 683 |
+
|
| 684 |
+
def warn_depends_obsolete(cmd, basename, filename):
|
| 685 |
+
"""
|
| 686 |
+
Unused: left to avoid errors when updating (from source) from <= 67.8.
|
| 687 |
+
Old installations have a .dist-info directory with the entry-point
|
| 688 |
+
``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``.
|
| 689 |
+
This may trigger errors when running the first egg_info in build_meta.
|
| 690 |
+
TODO: Remove this function in a version sufficiently > 68.
|
| 691 |
+
"""
|
| 692 |
+
|
| 693 |
+
|
| 694 |
+
# Export API used in entry_points
|
| 695 |
+
write_requirements = _requirestxt.write_requirements
|
| 696 |
+
write_setup_requirements = _requirestxt.write_setup_requirements
|
| 697 |
+
|
| 698 |
+
|
| 699 |
+
def write_toplevel_names(cmd, basename, filename):
|
| 700 |
+
pkgs = dict.fromkeys(
|
| 701 |
+
[k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()]
|
| 702 |
+
)
|
| 703 |
+
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
|
| 704 |
+
|
| 705 |
+
|
| 706 |
+
def overwrite_arg(cmd, basename, filename):
|
| 707 |
+
write_arg(cmd, basename, filename, True)
|
| 708 |
+
|
| 709 |
+
|
| 710 |
+
def write_arg(cmd, basename, filename, force=False):
|
| 711 |
+
argname = os.path.splitext(basename)[0]
|
| 712 |
+
value = getattr(cmd.distribution, argname, None)
|
| 713 |
+
if value is not None:
|
| 714 |
+
value = '\n'.join(value) + '\n'
|
| 715 |
+
cmd.write_or_delete_file(argname, filename, value, force)
|
| 716 |
+
|
| 717 |
+
|
| 718 |
+
def write_entries(cmd, basename, filename):
|
| 719 |
+
eps = _entry_points.load(cmd.distribution.entry_points)
|
| 720 |
+
defn = _entry_points.render(eps)
|
| 721 |
+
cmd.write_or_delete_file('entry points', filename, defn, True)
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
def _egg_basename(egg_name, egg_version, py_version=None, platform=None):
|
| 725 |
+
"""Compute filename of the output egg. Private API."""
|
| 726 |
+
name = _normalization.filename_component(egg_name)
|
| 727 |
+
version = _normalization.filename_component(egg_version)
|
| 728 |
+
egg = f"{name}-{version}-py{py_version or PY_MAJOR}"
|
| 729 |
+
if platform:
|
| 730 |
+
egg += f"-{platform}"
|
| 731 |
+
return egg
|
| 732 |
+
|
| 733 |
+
|
| 734 |
+
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
|
| 735 |
+
"""Deprecated behavior warning for EggInfo, bypassing suppression."""
|
.venv/Lib/site-packages/setuptools/command/install.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils.errors import DistutilsArgError
|
| 2 |
+
import inspect
|
| 3 |
+
import glob
|
| 4 |
+
import platform
|
| 5 |
+
import distutils.command.install as orig
|
| 6 |
+
|
| 7 |
+
import setuptools
|
| 8 |
+
from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
|
| 9 |
+
|
| 10 |
+
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
|
| 11 |
+
# now. See https://github.com/pypa/setuptools/issues/199/
|
| 12 |
+
_install = orig.install
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class install(orig.install):
|
| 16 |
+
"""Use easy_install to install the package, w/dependencies"""
|
| 17 |
+
|
| 18 |
+
user_options = orig.install.user_options + [
|
| 19 |
+
('old-and-unmanageable', None, "Try not to use this!"),
|
| 20 |
+
(
|
| 21 |
+
'single-version-externally-managed',
|
| 22 |
+
None,
|
| 23 |
+
"used by system package builders to create 'flat' eggs",
|
| 24 |
+
),
|
| 25 |
+
]
|
| 26 |
+
boolean_options = orig.install.boolean_options + [
|
| 27 |
+
'old-and-unmanageable',
|
| 28 |
+
'single-version-externally-managed',
|
| 29 |
+
]
|
| 30 |
+
new_commands = [
|
| 31 |
+
('install_egg_info', lambda self: True),
|
| 32 |
+
('install_scripts', lambda self: True),
|
| 33 |
+
]
|
| 34 |
+
_nc = dict(new_commands)
|
| 35 |
+
|
| 36 |
+
def initialize_options(self):
|
| 37 |
+
SetuptoolsDeprecationWarning.emit(
|
| 38 |
+
"setup.py install is deprecated.",
|
| 39 |
+
"""
|
| 40 |
+
Please avoid running ``setup.py`` directly.
|
| 41 |
+
Instead, use pypa/build, pypa/installer or other
|
| 42 |
+
standards-based tools.
|
| 43 |
+
""",
|
| 44 |
+
see_url="https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html",
|
| 45 |
+
# TODO: Document how to bootstrap setuptools without install
|
| 46 |
+
# (e.g. by unziping the wheel file)
|
| 47 |
+
# and then add a due_date to this warning.
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
orig.install.initialize_options(self)
|
| 51 |
+
self.old_and_unmanageable = None
|
| 52 |
+
self.single_version_externally_managed = None
|
| 53 |
+
|
| 54 |
+
def finalize_options(self):
|
| 55 |
+
orig.install.finalize_options(self)
|
| 56 |
+
if self.root:
|
| 57 |
+
self.single_version_externally_managed = True
|
| 58 |
+
elif self.single_version_externally_managed:
|
| 59 |
+
if not self.root and not self.record:
|
| 60 |
+
raise DistutilsArgError(
|
| 61 |
+
"You must specify --record or --root when building system"
|
| 62 |
+
" packages"
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
def handle_extra_path(self):
|
| 66 |
+
if self.root or self.single_version_externally_managed:
|
| 67 |
+
# explicit backward-compatibility mode, allow extra_path to work
|
| 68 |
+
return orig.install.handle_extra_path(self)
|
| 69 |
+
|
| 70 |
+
# Ignore extra_path when installing an egg (or being run by another
|
| 71 |
+
# command without --root or --single-version-externally-managed
|
| 72 |
+
self.path_file = None
|
| 73 |
+
self.extra_dirs = ''
|
| 74 |
+
|
| 75 |
+
def run(self):
|
| 76 |
+
# Explicit request for old-style install? Just do it
|
| 77 |
+
if self.old_and_unmanageable or self.single_version_externally_managed:
|
| 78 |
+
return orig.install.run(self)
|
| 79 |
+
|
| 80 |
+
if not self._called_from_setup(inspect.currentframe()):
|
| 81 |
+
# Run in backward-compatibility mode to support bdist_* commands.
|
| 82 |
+
orig.install.run(self)
|
| 83 |
+
else:
|
| 84 |
+
self.do_egg_install()
|
| 85 |
+
|
| 86 |
+
@staticmethod
|
| 87 |
+
def _called_from_setup(run_frame):
|
| 88 |
+
"""
|
| 89 |
+
Attempt to detect whether run() was called from setup() or by another
|
| 90 |
+
command. If called by setup(), the parent caller will be the
|
| 91 |
+
'run_command' method in 'distutils.dist', and *its* caller will be
|
| 92 |
+
the 'run_commands' method. If called any other way, the
|
| 93 |
+
immediate caller *might* be 'run_command', but it won't have been
|
| 94 |
+
called by 'run_commands'. Return True in that case or if a call stack
|
| 95 |
+
is unavailable. Return False otherwise.
|
| 96 |
+
"""
|
| 97 |
+
if run_frame is None:
|
| 98 |
+
msg = "Call stack not available. bdist_* commands may fail."
|
| 99 |
+
SetuptoolsWarning.emit(msg)
|
| 100 |
+
if platform.python_implementation() == 'IronPython':
|
| 101 |
+
msg = "For best results, pass -X:Frames to enable call stack."
|
| 102 |
+
SetuptoolsWarning.emit(msg)
|
| 103 |
+
return True
|
| 104 |
+
|
| 105 |
+
frames = inspect.getouterframes(run_frame)
|
| 106 |
+
for frame in frames[2:4]:
|
| 107 |
+
(caller,) = frame[:1]
|
| 108 |
+
info = inspect.getframeinfo(caller)
|
| 109 |
+
caller_module = caller.f_globals.get('__name__', '')
|
| 110 |
+
|
| 111 |
+
if caller_module == "setuptools.dist" and info.function == "run_command":
|
| 112 |
+
# Starting from v61.0.0 setuptools overwrites dist.run_command
|
| 113 |
+
continue
|
| 114 |
+
|
| 115 |
+
return caller_module == 'distutils.dist' and info.function == 'run_commands'
|
| 116 |
+
|
| 117 |
+
def do_egg_install(self):
|
| 118 |
+
easy_install = self.distribution.get_command_class('easy_install')
|
| 119 |
+
|
| 120 |
+
cmd = easy_install(
|
| 121 |
+
self.distribution,
|
| 122 |
+
args="x",
|
| 123 |
+
root=self.root,
|
| 124 |
+
record=self.record,
|
| 125 |
+
)
|
| 126 |
+
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
|
| 127 |
+
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
|
| 128 |
+
|
| 129 |
+
# pick up setup-dir .egg files only: no .egg-info
|
| 130 |
+
cmd.package_index.scan(glob.glob('*.egg'))
|
| 131 |
+
|
| 132 |
+
self.run_command('bdist_egg')
|
| 133 |
+
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
|
| 134 |
+
|
| 135 |
+
if setuptools.bootstrap_install_from:
|
| 136 |
+
# Bootstrap self-installation of setuptools
|
| 137 |
+
args.insert(0, setuptools.bootstrap_install_from)
|
| 138 |
+
|
| 139 |
+
cmd.args = args
|
| 140 |
+
cmd.run(show_deprecation=False)
|
| 141 |
+
setuptools.bootstrap_install_from = None
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
# XXX Python 3.1 doesn't see _nc if this is inside the class
|
| 145 |
+
install.sub_commands = [
|
| 146 |
+
cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc
|
| 147 |
+
] + install.new_commands
|
.venv/Lib/site-packages/setuptools/command/install_egg_info.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils import log, dir_util
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from setuptools import Command
|
| 5 |
+
from setuptools import namespaces
|
| 6 |
+
from setuptools.archive_util import unpack_archive
|
| 7 |
+
from .._path import ensure_directory
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class install_egg_info(namespaces.Installer, Command):
|
| 11 |
+
"""Install an .egg-info directory for the package"""
|
| 12 |
+
|
| 13 |
+
description = "Install an .egg-info directory for the package"
|
| 14 |
+
|
| 15 |
+
user_options = [
|
| 16 |
+
('install-dir=', 'd', "directory to install to"),
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
def initialize_options(self):
|
| 20 |
+
self.install_dir = None
|
| 21 |
+
|
| 22 |
+
def finalize_options(self):
|
| 23 |
+
self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
|
| 24 |
+
ei_cmd = self.get_finalized_command("egg_info")
|
| 25 |
+
basename = f"{ei_cmd._get_egg_basename()}.egg-info"
|
| 26 |
+
self.source = ei_cmd.egg_info
|
| 27 |
+
self.target = os.path.join(self.install_dir, basename)
|
| 28 |
+
self.outputs = []
|
| 29 |
+
|
| 30 |
+
def run(self):
|
| 31 |
+
self.run_command('egg_info')
|
| 32 |
+
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
| 33 |
+
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
| 34 |
+
elif os.path.exists(self.target):
|
| 35 |
+
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
| 36 |
+
if not self.dry_run:
|
| 37 |
+
ensure_directory(self.target)
|
| 38 |
+
self.execute(self.copytree, (), "Copying %s to %s" % (self.source, self.target))
|
| 39 |
+
self.install_namespaces()
|
| 40 |
+
|
| 41 |
+
def get_outputs(self):
|
| 42 |
+
return self.outputs
|
| 43 |
+
|
| 44 |
+
def copytree(self):
|
| 45 |
+
# Copy the .egg-info tree to site-packages
|
| 46 |
+
def skimmer(src, dst):
|
| 47 |
+
# filter out source-control directories; note that 'src' is always
|
| 48 |
+
# a '/'-separated path, regardless of platform. 'dst' is a
|
| 49 |
+
# platform-specific path.
|
| 50 |
+
for skip in '.svn/', 'CVS/':
|
| 51 |
+
if src.startswith(skip) or '/' + skip in src:
|
| 52 |
+
return None
|
| 53 |
+
self.outputs.append(dst)
|
| 54 |
+
log.debug("Copying %s to %s", src, dst)
|
| 55 |
+
return dst
|
| 56 |
+
|
| 57 |
+
unpack_archive(self.source, self.target, skimmer)
|
.venv/Lib/site-packages/setuptools/command/install_lib.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from itertools import product, starmap
|
| 4 |
+
import distutils.command.install_lib as orig
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class install_lib(orig.install_lib):
|
| 8 |
+
"""Don't add compiled flags to filenames of non-Python files"""
|
| 9 |
+
|
| 10 |
+
def run(self):
|
| 11 |
+
self.build()
|
| 12 |
+
outfiles = self.install()
|
| 13 |
+
if outfiles is not None:
|
| 14 |
+
# always compile, in case we have any extension stubs to deal with
|
| 15 |
+
self.byte_compile(outfiles)
|
| 16 |
+
|
| 17 |
+
def get_exclusions(self):
|
| 18 |
+
"""
|
| 19 |
+
Return a collections.Sized collections.Container of paths to be
|
| 20 |
+
excluded for single_version_externally_managed installations.
|
| 21 |
+
"""
|
| 22 |
+
all_packages = (
|
| 23 |
+
pkg
|
| 24 |
+
for ns_pkg in self._get_SVEM_NSPs()
|
| 25 |
+
for pkg in self._all_packages(ns_pkg)
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
excl_specs = product(all_packages, self._gen_exclusion_paths())
|
| 29 |
+
return set(starmap(self._exclude_pkg_path, excl_specs))
|
| 30 |
+
|
| 31 |
+
def _exclude_pkg_path(self, pkg, exclusion_path):
|
| 32 |
+
"""
|
| 33 |
+
Given a package name and exclusion path within that package,
|
| 34 |
+
compute the full exclusion path.
|
| 35 |
+
"""
|
| 36 |
+
parts = pkg.split('.') + [exclusion_path]
|
| 37 |
+
return os.path.join(self.install_dir, *parts)
|
| 38 |
+
|
| 39 |
+
@staticmethod
|
| 40 |
+
def _all_packages(pkg_name):
|
| 41 |
+
"""
|
| 42 |
+
>>> list(install_lib._all_packages('foo.bar.baz'))
|
| 43 |
+
['foo.bar.baz', 'foo.bar', 'foo']
|
| 44 |
+
"""
|
| 45 |
+
while pkg_name:
|
| 46 |
+
yield pkg_name
|
| 47 |
+
pkg_name, sep, child = pkg_name.rpartition('.')
|
| 48 |
+
|
| 49 |
+
def _get_SVEM_NSPs(self):
|
| 50 |
+
"""
|
| 51 |
+
Get namespace packages (list) but only for
|
| 52 |
+
single_version_externally_managed installations and empty otherwise.
|
| 53 |
+
"""
|
| 54 |
+
# TODO: is it necessary to short-circuit here? i.e. what's the cost
|
| 55 |
+
# if get_finalized_command is called even when namespace_packages is
|
| 56 |
+
# False?
|
| 57 |
+
if not self.distribution.namespace_packages:
|
| 58 |
+
return []
|
| 59 |
+
|
| 60 |
+
install_cmd = self.get_finalized_command('install')
|
| 61 |
+
svem = install_cmd.single_version_externally_managed
|
| 62 |
+
|
| 63 |
+
return self.distribution.namespace_packages if svem else []
|
| 64 |
+
|
| 65 |
+
@staticmethod
|
| 66 |
+
def _gen_exclusion_paths():
|
| 67 |
+
"""
|
| 68 |
+
Generate file paths to be excluded for namespace packages (bytecode
|
| 69 |
+
cache files).
|
| 70 |
+
"""
|
| 71 |
+
# always exclude the package module itself
|
| 72 |
+
yield '__init__.py'
|
| 73 |
+
|
| 74 |
+
yield '__init__.pyc'
|
| 75 |
+
yield '__init__.pyo'
|
| 76 |
+
|
| 77 |
+
if not hasattr(sys, 'implementation'):
|
| 78 |
+
return
|
| 79 |
+
|
| 80 |
+
base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag)
|
| 81 |
+
yield base + '.pyc'
|
| 82 |
+
yield base + '.pyo'
|
| 83 |
+
yield base + '.opt-1.pyc'
|
| 84 |
+
yield base + '.opt-2.pyc'
|
| 85 |
+
|
| 86 |
+
def copy_tree(
|
| 87 |
+
self,
|
| 88 |
+
infile,
|
| 89 |
+
outfile,
|
| 90 |
+
preserve_mode=1,
|
| 91 |
+
preserve_times=1,
|
| 92 |
+
preserve_symlinks=0,
|
| 93 |
+
level=1,
|
| 94 |
+
):
|
| 95 |
+
assert preserve_mode and preserve_times and not preserve_symlinks
|
| 96 |
+
exclude = self.get_exclusions()
|
| 97 |
+
|
| 98 |
+
if not exclude:
|
| 99 |
+
return orig.install_lib.copy_tree(self, infile, outfile)
|
| 100 |
+
|
| 101 |
+
# Exclude namespace package __init__.py* files from the output
|
| 102 |
+
|
| 103 |
+
from setuptools.archive_util import unpack_directory
|
| 104 |
+
from distutils import log
|
| 105 |
+
|
| 106 |
+
outfiles = []
|
| 107 |
+
|
| 108 |
+
def pf(src, dst):
|
| 109 |
+
if dst in exclude:
|
| 110 |
+
log.warn("Skipping installation of %s (namespace package)", dst)
|
| 111 |
+
return False
|
| 112 |
+
|
| 113 |
+
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
| 114 |
+
outfiles.append(dst)
|
| 115 |
+
return dst
|
| 116 |
+
|
| 117 |
+
unpack_directory(infile, outfile, pf)
|
| 118 |
+
return outfiles
|
| 119 |
+
|
| 120 |
+
def get_outputs(self):
|
| 121 |
+
outputs = orig.install_lib.get_outputs(self)
|
| 122 |
+
exclude = self.get_exclusions()
|
| 123 |
+
if exclude:
|
| 124 |
+
return [f for f in outputs if f not in exclude]
|
| 125 |
+
return outputs
|
.venv/Lib/site-packages/setuptools/command/install_scripts.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils import log
|
| 2 |
+
import distutils.command.install_scripts as orig
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
from .._path import ensure_directory
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class install_scripts(orig.install_scripts):
|
| 10 |
+
"""Do normal script install, plus any egg_info wrapper scripts"""
|
| 11 |
+
|
| 12 |
+
def initialize_options(self):
|
| 13 |
+
orig.install_scripts.initialize_options(self)
|
| 14 |
+
self.no_ep = False
|
| 15 |
+
|
| 16 |
+
def run(self):
|
| 17 |
+
self.run_command("egg_info")
|
| 18 |
+
if self.distribution.scripts:
|
| 19 |
+
orig.install_scripts.run(self) # run first to set up self.outfiles
|
| 20 |
+
else:
|
| 21 |
+
self.outfiles = []
|
| 22 |
+
if self.no_ep:
|
| 23 |
+
# don't install entry point scripts into .egg file!
|
| 24 |
+
return
|
| 25 |
+
self._install_ep_scripts()
|
| 26 |
+
|
| 27 |
+
def _install_ep_scripts(self):
|
| 28 |
+
# Delay import side-effects
|
| 29 |
+
from pkg_resources import Distribution, PathMetadata
|
| 30 |
+
from . import easy_install as ei
|
| 31 |
+
|
| 32 |
+
ei_cmd = self.get_finalized_command("egg_info")
|
| 33 |
+
dist = Distribution(
|
| 34 |
+
ei_cmd.egg_base,
|
| 35 |
+
PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
|
| 36 |
+
ei_cmd.egg_name,
|
| 37 |
+
ei_cmd.egg_version,
|
| 38 |
+
)
|
| 39 |
+
bs_cmd = self.get_finalized_command('build_scripts')
|
| 40 |
+
exec_param = getattr(bs_cmd, 'executable', None)
|
| 41 |
+
writer = ei.ScriptWriter
|
| 42 |
+
if exec_param == sys.executable:
|
| 43 |
+
# In case the path to the Python executable contains a space, wrap
|
| 44 |
+
# it so it's not split up.
|
| 45 |
+
exec_param = [exec_param]
|
| 46 |
+
# resolve the writer to the environment
|
| 47 |
+
writer = writer.best()
|
| 48 |
+
cmd = writer.command_spec_class.best().from_param(exec_param)
|
| 49 |
+
for args in writer.get_args(dist, cmd.as_header()):
|
| 50 |
+
self.write_script(*args)
|
| 51 |
+
|
| 52 |
+
def write_script(self, script_name, contents, mode="t", *ignored):
|
| 53 |
+
"""Write an executable file to the scripts directory"""
|
| 54 |
+
from setuptools.command.easy_install import chmod, current_umask
|
| 55 |
+
|
| 56 |
+
log.info("Installing %s script to %s", script_name, self.install_dir)
|
| 57 |
+
target = os.path.join(self.install_dir, script_name)
|
| 58 |
+
self.outfiles.append(target)
|
| 59 |
+
|
| 60 |
+
mask = current_umask()
|
| 61 |
+
if not self.dry_run:
|
| 62 |
+
ensure_directory(target)
|
| 63 |
+
f = open(target, "w" + mode)
|
| 64 |
+
f.write(contents)
|
| 65 |
+
f.close()
|
| 66 |
+
chmod(target, 0o777 - mask)
|
.venv/Lib/site-packages/setuptools/command/launcher manifest.xml
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
| 2 |
+
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
| 3 |
+
<assemblyIdentity version="1.0.0.0"
|
| 4 |
+
processorArchitecture="X86"
|
| 5 |
+
name="%(name)s"
|
| 6 |
+
type="win32"/>
|
| 7 |
+
<!-- Identify the application security requirements. -->
|
| 8 |
+
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
| 9 |
+
<security>
|
| 10 |
+
<requestedPrivileges>
|
| 11 |
+
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
| 12 |
+
</requestedPrivileges>
|
| 13 |
+
</security>
|
| 14 |
+
</trustInfo>
|
| 15 |
+
</assembly>
|
.venv/Lib/site-packages/setuptools/command/register.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils import log
|
| 2 |
+
import distutils.command.register as orig
|
| 3 |
+
|
| 4 |
+
from setuptools.errors import RemovedCommandError
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class register(orig.register):
|
| 8 |
+
"""Formerly used to register packages on PyPI."""
|
| 9 |
+
|
| 10 |
+
def run(self):
|
| 11 |
+
msg = (
|
| 12 |
+
"The register command has been removed, use twine to upload "
|
| 13 |
+
+ "instead (https://pypi.org/p/twine)"
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
self.announce("ERROR: " + msg, log.ERROR)
|
| 17 |
+
|
| 18 |
+
raise RemovedCommandError(msg)
|
.venv/Lib/site-packages/setuptools/command/rotate.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils.util import convert_path
|
| 2 |
+
from distutils import log
|
| 3 |
+
from distutils.errors import DistutilsOptionError
|
| 4 |
+
import os
|
| 5 |
+
import shutil
|
| 6 |
+
|
| 7 |
+
from setuptools import Command
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class rotate(Command):
|
| 11 |
+
"""Delete older distributions"""
|
| 12 |
+
|
| 13 |
+
description = "delete older distributions, keeping N newest files"
|
| 14 |
+
user_options = [
|
| 15 |
+
('match=', 'm', "patterns to match (required)"),
|
| 16 |
+
('dist-dir=', 'd', "directory where the distributions are"),
|
| 17 |
+
('keep=', 'k', "number of matching distributions to keep"),
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
boolean_options = []
|
| 21 |
+
|
| 22 |
+
def initialize_options(self):
|
| 23 |
+
self.match = None
|
| 24 |
+
self.dist_dir = None
|
| 25 |
+
self.keep = None
|
| 26 |
+
|
| 27 |
+
def finalize_options(self):
|
| 28 |
+
if self.match is None:
|
| 29 |
+
raise DistutilsOptionError(
|
| 30 |
+
"Must specify one or more (comma-separated) match patterns "
|
| 31 |
+
"(e.g. '.zip' or '.egg')"
|
| 32 |
+
)
|
| 33 |
+
if self.keep is None:
|
| 34 |
+
raise DistutilsOptionError("Must specify number of files to keep")
|
| 35 |
+
try:
|
| 36 |
+
self.keep = int(self.keep)
|
| 37 |
+
except ValueError as e:
|
| 38 |
+
raise DistutilsOptionError("--keep must be an integer") from e
|
| 39 |
+
if isinstance(self.match, str):
|
| 40 |
+
self.match = [convert_path(p.strip()) for p in self.match.split(',')]
|
| 41 |
+
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
| 42 |
+
|
| 43 |
+
def run(self):
|
| 44 |
+
self.run_command("egg_info")
|
| 45 |
+
from glob import glob
|
| 46 |
+
|
| 47 |
+
for pattern in self.match:
|
| 48 |
+
pattern = self.distribution.get_name() + '*' + pattern
|
| 49 |
+
files = glob(os.path.join(self.dist_dir, pattern))
|
| 50 |
+
files = [(os.path.getmtime(f), f) for f in files]
|
| 51 |
+
files.sort()
|
| 52 |
+
files.reverse()
|
| 53 |
+
|
| 54 |
+
log.info("%d file(s) matching %s", len(files), pattern)
|
| 55 |
+
files = files[self.keep :]
|
| 56 |
+
for t, f in files:
|
| 57 |
+
log.info("Deleting %s", f)
|
| 58 |
+
if not self.dry_run:
|
| 59 |
+
if os.path.isdir(f):
|
| 60 |
+
shutil.rmtree(f)
|
| 61 |
+
else:
|
| 62 |
+
os.unlink(f)
|
.venv/Lib/site-packages/setuptools/command/saveopts.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools.command.setopt import edit_config, option_base
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class saveopts(option_base):
|
| 5 |
+
"""Save command-line options to a file"""
|
| 6 |
+
|
| 7 |
+
description = "save supplied options to setup.cfg or other config file"
|
| 8 |
+
|
| 9 |
+
def run(self):
|
| 10 |
+
dist = self.distribution
|
| 11 |
+
settings = {}
|
| 12 |
+
|
| 13 |
+
for cmd in dist.command_options:
|
| 14 |
+
if cmd == 'saveopts':
|
| 15 |
+
continue # don't save our own options!
|
| 16 |
+
|
| 17 |
+
for opt, (src, val) in dist.get_option_dict(cmd).items():
|
| 18 |
+
if src == "command line":
|
| 19 |
+
settings.setdefault(cmd, {})[opt] = val
|
| 20 |
+
|
| 21 |
+
edit_config(self.filename, settings, self.dry_run)
|
.venv/Lib/site-packages/setuptools/command/sdist.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils import log
|
| 2 |
+
import distutils.command.sdist as orig
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import io
|
| 6 |
+
import contextlib
|
| 7 |
+
from itertools import chain
|
| 8 |
+
|
| 9 |
+
from .._importlib import metadata
|
| 10 |
+
from .build import _ORIGINAL_SUBCOMMANDS
|
| 11 |
+
|
| 12 |
+
_default_revctrl = list
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def walk_revctrl(dirname=''):
|
| 16 |
+
"""Find all files under revision control"""
|
| 17 |
+
for ep in metadata.entry_points(group='setuptools.file_finders'):
|
| 18 |
+
for item in ep.load()(dirname):
|
| 19 |
+
yield item
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class sdist(orig.sdist):
|
| 23 |
+
"""Smart sdist that finds anything supported by revision control"""
|
| 24 |
+
|
| 25 |
+
user_options = [
|
| 26 |
+
('formats=', None, "formats for source distribution (comma-separated list)"),
|
| 27 |
+
(
|
| 28 |
+
'keep-temp',
|
| 29 |
+
'k',
|
| 30 |
+
"keep the distribution tree around after creating " + "archive file(s)",
|
| 31 |
+
),
|
| 32 |
+
(
|
| 33 |
+
'dist-dir=',
|
| 34 |
+
'd',
|
| 35 |
+
"directory to put the source distribution archive(s) in " "[default: dist]",
|
| 36 |
+
),
|
| 37 |
+
(
|
| 38 |
+
'owner=',
|
| 39 |
+
'u',
|
| 40 |
+
"Owner name used when creating a tar file [default: current user]",
|
| 41 |
+
),
|
| 42 |
+
(
|
| 43 |
+
'group=',
|
| 44 |
+
'g',
|
| 45 |
+
"Group name used when creating a tar file [default: current group]",
|
| 46 |
+
),
|
| 47 |
+
]
|
| 48 |
+
|
| 49 |
+
negative_opt = {}
|
| 50 |
+
|
| 51 |
+
README_EXTENSIONS = ['', '.rst', '.txt', '.md']
|
| 52 |
+
READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
|
| 53 |
+
|
| 54 |
+
def run(self):
|
| 55 |
+
self.run_command('egg_info')
|
| 56 |
+
ei_cmd = self.get_finalized_command('egg_info')
|
| 57 |
+
self.filelist = ei_cmd.filelist
|
| 58 |
+
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
|
| 59 |
+
self.check_readme()
|
| 60 |
+
|
| 61 |
+
# Run sub commands
|
| 62 |
+
for cmd_name in self.get_sub_commands():
|
| 63 |
+
self.run_command(cmd_name)
|
| 64 |
+
|
| 65 |
+
self.make_distribution()
|
| 66 |
+
|
| 67 |
+
dist_files = getattr(self.distribution, 'dist_files', [])
|
| 68 |
+
for file in self.archive_files:
|
| 69 |
+
data = ('sdist', '', file)
|
| 70 |
+
if data not in dist_files:
|
| 71 |
+
dist_files.append(data)
|
| 72 |
+
|
| 73 |
+
def initialize_options(self):
|
| 74 |
+
orig.sdist.initialize_options(self)
|
| 75 |
+
|
| 76 |
+
self._default_to_gztar()
|
| 77 |
+
|
| 78 |
+
def _default_to_gztar(self):
|
| 79 |
+
# only needed on Python prior to 3.6.
|
| 80 |
+
if sys.version_info >= (3, 6, 0, 'beta', 1):
|
| 81 |
+
return
|
| 82 |
+
self.formats = ['gztar']
|
| 83 |
+
|
| 84 |
+
def make_distribution(self):
|
| 85 |
+
"""
|
| 86 |
+
Workaround for #516
|
| 87 |
+
"""
|
| 88 |
+
with self._remove_os_link():
|
| 89 |
+
orig.sdist.make_distribution(self)
|
| 90 |
+
|
| 91 |
+
@staticmethod
|
| 92 |
+
@contextlib.contextmanager
|
| 93 |
+
def _remove_os_link():
|
| 94 |
+
"""
|
| 95 |
+
In a context, remove and restore os.link if it exists
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
class NoValue:
|
| 99 |
+
pass
|
| 100 |
+
|
| 101 |
+
orig_val = getattr(os, 'link', NoValue)
|
| 102 |
+
try:
|
| 103 |
+
del os.link
|
| 104 |
+
except Exception:
|
| 105 |
+
pass
|
| 106 |
+
try:
|
| 107 |
+
yield
|
| 108 |
+
finally:
|
| 109 |
+
if orig_val is not NoValue:
|
| 110 |
+
setattr(os, 'link', orig_val)
|
| 111 |
+
|
| 112 |
+
def add_defaults(self):
|
| 113 |
+
super().add_defaults()
|
| 114 |
+
self._add_defaults_build_sub_commands()
|
| 115 |
+
|
| 116 |
+
def _add_defaults_optional(self):
|
| 117 |
+
super()._add_defaults_optional()
|
| 118 |
+
if os.path.isfile('pyproject.toml'):
|
| 119 |
+
self.filelist.append('pyproject.toml')
|
| 120 |
+
|
| 121 |
+
def _add_defaults_python(self):
|
| 122 |
+
"""getting python files"""
|
| 123 |
+
if self.distribution.has_pure_modules():
|
| 124 |
+
build_py = self.get_finalized_command('build_py')
|
| 125 |
+
self.filelist.extend(build_py.get_source_files())
|
| 126 |
+
self._add_data_files(self._safe_data_files(build_py))
|
| 127 |
+
|
| 128 |
+
def _add_defaults_build_sub_commands(self):
|
| 129 |
+
build = self.get_finalized_command("build")
|
| 130 |
+
missing_cmds = set(build.get_sub_commands()) - _ORIGINAL_SUBCOMMANDS
|
| 131 |
+
# ^-- the original built-in sub-commands are already handled by default.
|
| 132 |
+
cmds = (self.get_finalized_command(c) for c in missing_cmds)
|
| 133 |
+
files = (c.get_source_files() for c in cmds if hasattr(c, "get_source_files"))
|
| 134 |
+
self.filelist.extend(chain.from_iterable(files))
|
| 135 |
+
|
| 136 |
+
def _safe_data_files(self, build_py):
|
| 137 |
+
"""
|
| 138 |
+
Since the ``sdist`` class is also used to compute the MANIFEST
|
| 139 |
+
(via :obj:`setuptools.command.egg_info.manifest_maker`),
|
| 140 |
+
there might be recursion problems when trying to obtain the list of
|
| 141 |
+
data_files and ``include_package_data=True`` (which in turn depends on
|
| 142 |
+
the files included in the MANIFEST).
|
| 143 |
+
|
| 144 |
+
To avoid that, ``manifest_maker`` should be able to overwrite this
|
| 145 |
+
method and avoid recursive attempts to build/analyze the MANIFEST.
|
| 146 |
+
"""
|
| 147 |
+
return build_py.data_files
|
| 148 |
+
|
| 149 |
+
def _add_data_files(self, data_files):
|
| 150 |
+
"""
|
| 151 |
+
Add data files as found in build_py.data_files.
|
| 152 |
+
"""
|
| 153 |
+
self.filelist.extend(
|
| 154 |
+
os.path.join(src_dir, name)
|
| 155 |
+
for _, src_dir, _, filenames in data_files
|
| 156 |
+
for name in filenames
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
def _add_defaults_data_files(self):
|
| 160 |
+
try:
|
| 161 |
+
super()._add_defaults_data_files()
|
| 162 |
+
except TypeError:
|
| 163 |
+
log.warn("data_files contains unexpected objects")
|
| 164 |
+
|
| 165 |
+
def check_readme(self):
|
| 166 |
+
for f in self.READMES:
|
| 167 |
+
if os.path.exists(f):
|
| 168 |
+
return
|
| 169 |
+
else:
|
| 170 |
+
self.warn(
|
| 171 |
+
"standard file not found: should have one of " + ', '.join(self.READMES)
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
def make_release_tree(self, base_dir, files):
|
| 175 |
+
orig.sdist.make_release_tree(self, base_dir, files)
|
| 176 |
+
|
| 177 |
+
# Save any egg_info command line options used to create this sdist
|
| 178 |
+
dest = os.path.join(base_dir, 'setup.cfg')
|
| 179 |
+
if hasattr(os, 'link') and os.path.exists(dest):
|
| 180 |
+
# unlink and re-copy, since it might be hard-linked, and
|
| 181 |
+
# we don't want to change the source version
|
| 182 |
+
os.unlink(dest)
|
| 183 |
+
self.copy_file('setup.cfg', dest)
|
| 184 |
+
|
| 185 |
+
self.get_finalized_command('egg_info').save_version_info(dest)
|
| 186 |
+
|
| 187 |
+
def _manifest_is_not_generated(self):
|
| 188 |
+
# check for special comment used in 2.7.1 and higher
|
| 189 |
+
if not os.path.isfile(self.manifest):
|
| 190 |
+
return False
|
| 191 |
+
|
| 192 |
+
with io.open(self.manifest, 'rb') as fp:
|
| 193 |
+
first_line = fp.readline()
|
| 194 |
+
return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
|
| 195 |
+
|
| 196 |
+
def read_manifest(self):
|
| 197 |
+
"""Read the manifest file (named by 'self.manifest') and use it to
|
| 198 |
+
fill in 'self.filelist', the list of files to include in the source
|
| 199 |
+
distribution.
|
| 200 |
+
"""
|
| 201 |
+
log.info("reading manifest file '%s'", self.manifest)
|
| 202 |
+
manifest = open(self.manifest, 'rb')
|
| 203 |
+
for line in manifest:
|
| 204 |
+
# The manifest must contain UTF-8. See #303.
|
| 205 |
+
try:
|
| 206 |
+
line = line.decode('UTF-8')
|
| 207 |
+
except UnicodeDecodeError:
|
| 208 |
+
log.warn("%r not UTF-8 decodable -- skipping" % line)
|
| 209 |
+
continue
|
| 210 |
+
# ignore comments and blank lines
|
| 211 |
+
line = line.strip()
|
| 212 |
+
if line.startswith('#') or not line:
|
| 213 |
+
continue
|
| 214 |
+
self.filelist.append(line)
|
| 215 |
+
manifest.close()
|
.venv/Lib/site-packages/setuptools/command/setopt.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils.util import convert_path
|
| 2 |
+
from distutils import log
|
| 3 |
+
from distutils.errors import DistutilsOptionError
|
| 4 |
+
import distutils
|
| 5 |
+
import os
|
| 6 |
+
import configparser
|
| 7 |
+
|
| 8 |
+
from setuptools import Command
|
| 9 |
+
|
| 10 |
+
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def config_file(kind="local"):
|
| 14 |
+
"""Get the filename of the distutils, local, global, or per-user config
|
| 15 |
+
|
| 16 |
+
`kind` must be one of "local", "global", or "user"
|
| 17 |
+
"""
|
| 18 |
+
if kind == 'local':
|
| 19 |
+
return 'setup.cfg'
|
| 20 |
+
if kind == 'global':
|
| 21 |
+
return os.path.join(os.path.dirname(distutils.__file__), 'distutils.cfg')
|
| 22 |
+
if kind == 'user':
|
| 23 |
+
dot = os.name == 'posix' and '.' or ''
|
| 24 |
+
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
|
| 25 |
+
raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def edit_config(filename, settings, dry_run=False):
|
| 29 |
+
"""Edit a configuration file to include `settings`
|
| 30 |
+
|
| 31 |
+
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
|
| 32 |
+
command/section name. A ``None`` value means to delete the entire section,
|
| 33 |
+
while a dictionary lists settings to be changed or deleted in that section.
|
| 34 |
+
A setting of ``None`` means to delete that setting.
|
| 35 |
+
"""
|
| 36 |
+
log.debug("Reading configuration from %s", filename)
|
| 37 |
+
opts = configparser.RawConfigParser()
|
| 38 |
+
opts.optionxform = lambda x: x
|
| 39 |
+
opts.read([filename])
|
| 40 |
+
for section, options in settings.items():
|
| 41 |
+
if options is None:
|
| 42 |
+
log.info("Deleting section [%s] from %s", section, filename)
|
| 43 |
+
opts.remove_section(section)
|
| 44 |
+
else:
|
| 45 |
+
if not opts.has_section(section):
|
| 46 |
+
log.debug("Adding new section [%s] to %s", section, filename)
|
| 47 |
+
opts.add_section(section)
|
| 48 |
+
for option, value in options.items():
|
| 49 |
+
if value is None:
|
| 50 |
+
log.debug("Deleting %s.%s from %s", section, option, filename)
|
| 51 |
+
opts.remove_option(section, option)
|
| 52 |
+
if not opts.options(section):
|
| 53 |
+
log.info(
|
| 54 |
+
"Deleting empty [%s] section from %s", section, filename
|
| 55 |
+
)
|
| 56 |
+
opts.remove_section(section)
|
| 57 |
+
else:
|
| 58 |
+
log.debug(
|
| 59 |
+
"Setting %s.%s to %r in %s", section, option, value, filename
|
| 60 |
+
)
|
| 61 |
+
opts.set(section, option, value)
|
| 62 |
+
|
| 63 |
+
log.info("Writing %s", filename)
|
| 64 |
+
if not dry_run:
|
| 65 |
+
with open(filename, 'w') as f:
|
| 66 |
+
opts.write(f)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class option_base(Command):
|
| 70 |
+
"""Abstract base class for commands that mess with config files"""
|
| 71 |
+
|
| 72 |
+
user_options = [
|
| 73 |
+
('global-config', 'g', "save options to the site-wide distutils.cfg file"),
|
| 74 |
+
('user-config', 'u', "save options to the current user's pydistutils.cfg file"),
|
| 75 |
+
('filename=', 'f', "configuration file to use (default=setup.cfg)"),
|
| 76 |
+
]
|
| 77 |
+
|
| 78 |
+
boolean_options = [
|
| 79 |
+
'global-config',
|
| 80 |
+
'user-config',
|
| 81 |
+
]
|
| 82 |
+
|
| 83 |
+
def initialize_options(self):
|
| 84 |
+
self.global_config = None
|
| 85 |
+
self.user_config = None
|
| 86 |
+
self.filename = None
|
| 87 |
+
|
| 88 |
+
def finalize_options(self):
|
| 89 |
+
filenames = []
|
| 90 |
+
if self.global_config:
|
| 91 |
+
filenames.append(config_file('global'))
|
| 92 |
+
if self.user_config:
|
| 93 |
+
filenames.append(config_file('user'))
|
| 94 |
+
if self.filename is not None:
|
| 95 |
+
filenames.append(self.filename)
|
| 96 |
+
if not filenames:
|
| 97 |
+
filenames.append(config_file('local'))
|
| 98 |
+
if len(filenames) > 1:
|
| 99 |
+
raise DistutilsOptionError(
|
| 100 |
+
"Must specify only one configuration file option", filenames
|
| 101 |
+
)
|
| 102 |
+
(self.filename,) = filenames
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class setopt(option_base):
|
| 106 |
+
"""Save command-line options to a file"""
|
| 107 |
+
|
| 108 |
+
description = "set an option in setup.cfg or another config file"
|
| 109 |
+
|
| 110 |
+
user_options = [
|
| 111 |
+
('command=', 'c', 'command to set an option for'),
|
| 112 |
+
('option=', 'o', 'option to set'),
|
| 113 |
+
('set-value=', 's', 'value of the option'),
|
| 114 |
+
('remove', 'r', 'remove (unset) the value'),
|
| 115 |
+
] + option_base.user_options
|
| 116 |
+
|
| 117 |
+
boolean_options = option_base.boolean_options + ['remove']
|
| 118 |
+
|
| 119 |
+
def initialize_options(self):
|
| 120 |
+
option_base.initialize_options(self)
|
| 121 |
+
self.command = None
|
| 122 |
+
self.option = None
|
| 123 |
+
self.set_value = None
|
| 124 |
+
self.remove = None
|
| 125 |
+
|
| 126 |
+
def finalize_options(self):
|
| 127 |
+
option_base.finalize_options(self)
|
| 128 |
+
if self.command is None or self.option is None:
|
| 129 |
+
raise DistutilsOptionError("Must specify --command *and* --option")
|
| 130 |
+
if self.set_value is None and not self.remove:
|
| 131 |
+
raise DistutilsOptionError("Must specify --set-value or --remove")
|
| 132 |
+
|
| 133 |
+
def run(self):
|
| 134 |
+
edit_config(
|
| 135 |
+
self.filename,
|
| 136 |
+
{self.command: {self.option.replace('-', '_'): self.set_value}},
|
| 137 |
+
self.dry_run,
|
| 138 |
+
)
|