Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- llava/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc +3 -0
- llava/lib/python3.10/site-packages/setuptools/config/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/__pycache__/expand.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/__pycache__/setupcfg.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_apply_pyprojecttoml.py +488 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/NOTICE +438 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/error_reporting.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_exceptions.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/formats.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/error_reporting.py +336 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/extra_validations.py +52 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py +51 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/formats.py +375 -0
- llava/lib/python3.10/site-packages/setuptools/config/expand.py +452 -0
- llava/lib/python3.10/site-packages/setuptools/tests/__init__.py +13 -0
- llava/lib/python3.10/site-packages/setuptools/tests/fixtures.py +157 -0
- llava/lib/python3.10/site-packages/setuptools/tests/mod_with_constant.py +1 -0
- llava/lib/python3.10/site-packages/setuptools/tests/namespaces.py +90 -0
- llava/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py +1 -0
- llava/lib/python3.10/site-packages/setuptools/tests/server.py +86 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_archive_util.py +36 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py +73 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_build.py +33 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py +970 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_build_py.py +480 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_core_metadata.py +577 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_depends.py +15 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_develop.py +175 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_dist.py +278 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_dist_info.py +210 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_easy_install.py +1472 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py +1289 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_glob.py +45 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_logging.py +76 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py +138 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_packageindex.py +267 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_sandbox.py +134 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_sdist.py +972 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_setopt.py +40 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_shutil_wrapper.py +23 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_unicode_utils.py +10 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_warnings.py +106 -0
- llava/lib/python3.10/site-packages/setuptools/tests/test_wheel.py +714 -0
.gitattributes
CHANGED
|
@@ -1378,3 +1378,4 @@ minigpt2/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc
|
|
| 1378 |
minigpt2/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1379 |
minigpt2/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 1380 |
videochat2/lib/python3.10/site-packages/torch/lib/libtorch_python.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 1378 |
minigpt2/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1379 |
minigpt2/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 1380 |
videochat2/lib/python3.10/site-packages/torch/lib/libtorch_python.so filter=lfs diff=lfs merge=lfs -text
|
| 1381 |
+
llava/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
llava/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a70c0b759cab68fd858f44b8f9de49ef062362cabd0ea44a15df585b507d32e9
|
| 3 |
+
size 100370
|
llava/lib/python3.10/site-packages/setuptools/config/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.61 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-310.pyc
ADDED
|
Binary file (16.9 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/__pycache__/expand.cpython-310.pyc
ADDED
|
Binary file (18.2 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-310.pyc
ADDED
|
Binary file (16 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/__pycache__/setupcfg.cpython-310.pyc
ADDED
|
Binary file (24.2 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_apply_pyprojecttoml.py
ADDED
|
@@ -0,0 +1,488 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Translation layer between pyproject config and setuptools distribution and
|
| 2 |
+
metadata objects.
|
| 3 |
+
|
| 4 |
+
The distribution and metadata objects are modeled after (an old version of)
|
| 5 |
+
core metadata, therefore configs in the format specified for ``pyproject.toml``
|
| 6 |
+
need to be processed before being applied.
|
| 7 |
+
|
| 8 |
+
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import logging
|
| 14 |
+
import os
|
| 15 |
+
from collections.abc import Mapping
|
| 16 |
+
from email.headerregistry import Address
|
| 17 |
+
from functools import partial, reduce
|
| 18 |
+
from inspect import cleandoc
|
| 19 |
+
from itertools import chain
|
| 20 |
+
from types import MappingProxyType
|
| 21 |
+
from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union
|
| 22 |
+
|
| 23 |
+
from .. import _static
|
| 24 |
+
from .._path import StrPath
|
| 25 |
+
from ..errors import RemovedConfigError
|
| 26 |
+
from ..extension import Extension
|
| 27 |
+
from ..warnings import SetuptoolsWarning
|
| 28 |
+
|
| 29 |
+
if TYPE_CHECKING:
|
| 30 |
+
from typing_extensions import TypeAlias
|
| 31 |
+
|
| 32 |
+
from setuptools._importlib import metadata
|
| 33 |
+
from setuptools.dist import Distribution
|
| 34 |
+
|
| 35 |
+
from distutils.dist import _OptionsList # Comes from typeshed
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like
|
| 39 |
+
_ProjectReadmeValue: TypeAlias = Union[str, dict[str, str]]
|
| 40 |
+
_Correspondence: TypeAlias = Callable[["Distribution", Any, Union[StrPath, None]], None]
|
| 41 |
+
_T = TypeVar("_T")
|
| 42 |
+
|
| 43 |
+
_logger = logging.getLogger(__name__)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def apply(dist: Distribution, config: dict, filename: StrPath) -> Distribution:
|
| 47 |
+
"""Apply configuration dict read with :func:`read_configuration`"""
|
| 48 |
+
|
| 49 |
+
if not config:
|
| 50 |
+
return dist # short-circuit unrelated pyproject.toml file
|
| 51 |
+
|
| 52 |
+
root_dir = os.path.dirname(filename) or "."
|
| 53 |
+
|
| 54 |
+
_apply_project_table(dist, config, root_dir)
|
| 55 |
+
_apply_tool_table(dist, config, filename)
|
| 56 |
+
|
| 57 |
+
current_directory = os.getcwd()
|
| 58 |
+
os.chdir(root_dir)
|
| 59 |
+
try:
|
| 60 |
+
dist._finalize_requires()
|
| 61 |
+
dist._finalize_license_files()
|
| 62 |
+
finally:
|
| 63 |
+
os.chdir(current_directory)
|
| 64 |
+
|
| 65 |
+
return dist
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _apply_project_table(dist: Distribution, config: dict, root_dir: StrPath):
|
| 69 |
+
orig_config = config.get("project", {})
|
| 70 |
+
if not orig_config:
|
| 71 |
+
return # short-circuit
|
| 72 |
+
|
| 73 |
+
project_table = {k: _static.attempt_conversion(v) for k, v in orig_config.items()}
|
| 74 |
+
_handle_missing_dynamic(dist, project_table)
|
| 75 |
+
_unify_entry_points(project_table)
|
| 76 |
+
|
| 77 |
+
for field, value in project_table.items():
|
| 78 |
+
norm_key = json_compatible_key(field)
|
| 79 |
+
corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key)
|
| 80 |
+
if callable(corresp):
|
| 81 |
+
corresp(dist, value, root_dir)
|
| 82 |
+
else:
|
| 83 |
+
_set_config(dist, corresp, value)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
|
| 87 |
+
tool_table = config.get("tool", {}).get("setuptools", {})
|
| 88 |
+
if not tool_table:
|
| 89 |
+
return # short-circuit
|
| 90 |
+
|
| 91 |
+
for field, value in tool_table.items():
|
| 92 |
+
norm_key = json_compatible_key(field)
|
| 93 |
+
|
| 94 |
+
if norm_key in TOOL_TABLE_REMOVALS:
|
| 95 |
+
suggestion = cleandoc(TOOL_TABLE_REMOVALS[norm_key])
|
| 96 |
+
msg = f"""
|
| 97 |
+
The parameter `tool.setuptools.{field}` was long deprecated
|
| 98 |
+
and has been removed from `pyproject.toml`.
|
| 99 |
+
"""
|
| 100 |
+
raise RemovedConfigError("\n".join([cleandoc(msg), suggestion]))
|
| 101 |
+
|
| 102 |
+
norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
|
| 103 |
+
corresp = TOOL_TABLE_CORRESPONDENCE.get(norm_key, norm_key)
|
| 104 |
+
if callable(corresp):
|
| 105 |
+
corresp(dist, value)
|
| 106 |
+
else:
|
| 107 |
+
_set_config(dist, corresp, value)
|
| 108 |
+
|
| 109 |
+
_copy_command_options(config, dist, filename)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def _handle_missing_dynamic(dist: Distribution, project_table: dict):
|
| 113 |
+
"""Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``"""
|
| 114 |
+
dynamic = set(project_table.get("dynamic", []))
|
| 115 |
+
for field, getter in _PREVIOUSLY_DEFINED.items():
|
| 116 |
+
if not (field in project_table or field in dynamic):
|
| 117 |
+
value = getter(dist)
|
| 118 |
+
if value:
|
| 119 |
+
_MissingDynamic.emit(field=field, value=value)
|
| 120 |
+
project_table[field] = _RESET_PREVIOUSLY_DEFINED.get(field)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def json_compatible_key(key: str) -> str:
|
| 124 |
+
"""As defined in :pep:`566#json-compatible-metadata`"""
|
| 125 |
+
return key.lower().replace("-", "_")
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _set_config(dist: Distribution, field: str, value: Any):
|
| 129 |
+
val = _PREPROCESS.get(field, _noop)(dist, value)
|
| 130 |
+
setter = getattr(dist.metadata, f"set_{field}", None)
|
| 131 |
+
if setter:
|
| 132 |
+
setter(val)
|
| 133 |
+
elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES:
|
| 134 |
+
setattr(dist.metadata, field, val)
|
| 135 |
+
else:
|
| 136 |
+
setattr(dist, field, val)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
_CONTENT_TYPES = {
|
| 140 |
+
".md": "text/markdown",
|
| 141 |
+
".rst": "text/x-rst",
|
| 142 |
+
".txt": "text/plain",
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def _guess_content_type(file: str) -> str | None:
|
| 147 |
+
_, ext = os.path.splitext(file.lower())
|
| 148 |
+
if not ext:
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
if ext in _CONTENT_TYPES:
|
| 152 |
+
return _static.Str(_CONTENT_TYPES[ext])
|
| 153 |
+
|
| 154 |
+
valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items())
|
| 155 |
+
msg = f"only the following file extensions are recognized: {valid}."
|
| 156 |
+
raise ValueError(f"Undefined content type for {file}, {msg}")
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def _long_description(
|
| 160 |
+
dist: Distribution, val: _ProjectReadmeValue, root_dir: StrPath | None
|
| 161 |
+
):
|
| 162 |
+
from setuptools.config import expand
|
| 163 |
+
|
| 164 |
+
file: str | tuple[()]
|
| 165 |
+
if isinstance(val, str):
|
| 166 |
+
file = val
|
| 167 |
+
text = expand.read_files(file, root_dir)
|
| 168 |
+
ctype = _guess_content_type(file)
|
| 169 |
+
else:
|
| 170 |
+
file = val.get("file") or ()
|
| 171 |
+
text = val.get("text") or expand.read_files(file, root_dir)
|
| 172 |
+
ctype = val["content-type"]
|
| 173 |
+
|
| 174 |
+
# XXX: Is it completely safe to assume static?
|
| 175 |
+
_set_config(dist, "long_description", _static.Str(text))
|
| 176 |
+
|
| 177 |
+
if ctype:
|
| 178 |
+
_set_config(dist, "long_description_content_type", _static.Str(ctype))
|
| 179 |
+
|
| 180 |
+
if file:
|
| 181 |
+
dist._referenced_files.add(file)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def _license(dist: Distribution, val: dict, root_dir: StrPath | None):
|
| 185 |
+
from setuptools.config import expand
|
| 186 |
+
|
| 187 |
+
if "file" in val:
|
| 188 |
+
# XXX: Is it completely safe to assume static?
|
| 189 |
+
value = expand.read_files([val["file"]], root_dir)
|
| 190 |
+
_set_config(dist, "license", _static.Str(value))
|
| 191 |
+
dist._referenced_files.add(val["file"])
|
| 192 |
+
else:
|
| 193 |
+
_set_config(dist, "license", _static.Str(val["text"]))
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def _people(dist: Distribution, val: list[dict], _root_dir: StrPath | None, kind: str):
|
| 197 |
+
field = []
|
| 198 |
+
email_field = []
|
| 199 |
+
for person in val:
|
| 200 |
+
if "name" not in person:
|
| 201 |
+
email_field.append(person["email"])
|
| 202 |
+
elif "email" not in person:
|
| 203 |
+
field.append(person["name"])
|
| 204 |
+
else:
|
| 205 |
+
addr = Address(display_name=person["name"], addr_spec=person["email"])
|
| 206 |
+
email_field.append(str(addr))
|
| 207 |
+
|
| 208 |
+
if field:
|
| 209 |
+
_set_config(dist, kind, _static.Str(", ".join(field)))
|
| 210 |
+
if email_field:
|
| 211 |
+
_set_config(dist, f"{kind}_email", _static.Str(", ".join(email_field)))
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def _project_urls(dist: Distribution, val: dict, _root_dir: StrPath | None):
|
| 215 |
+
_set_config(dist, "project_urls", val)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def _python_requires(dist: Distribution, val: str, _root_dir: StrPath | None):
|
| 219 |
+
_set_config(dist, "python_requires", _static.SpecifierSet(val))
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def _dependencies(dist: Distribution, val: list, _root_dir: StrPath | None):
|
| 223 |
+
if getattr(dist, "install_requires", []):
|
| 224 |
+
msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)"
|
| 225 |
+
SetuptoolsWarning.emit(msg)
|
| 226 |
+
dist.install_requires = val
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def _optional_dependencies(dist: Distribution, val: dict, _root_dir: StrPath | None):
|
| 230 |
+
if getattr(dist, "extras_require", None):
|
| 231 |
+
msg = "`extras_require` overwritten in `pyproject.toml` (optional-dependencies)"
|
| 232 |
+
SetuptoolsWarning.emit(msg)
|
| 233 |
+
dist.extras_require = val
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def _ext_modules(dist: Distribution, val: list[dict]) -> list[Extension]:
|
| 237 |
+
existing = dist.ext_modules or []
|
| 238 |
+
args = ({k.replace("-", "_"): v for k, v in x.items()} for x in val)
|
| 239 |
+
new = [Extension(**kw) for kw in args]
|
| 240 |
+
return [*existing, *new]
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
def _noop(_dist: Distribution, val: _T) -> _T:
|
| 244 |
+
return val
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def _identity(val: _T) -> _T:
|
| 248 |
+
return val
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def _unify_entry_points(project_table: dict):
|
| 252 |
+
project = project_table
|
| 253 |
+
given = project.pop("entry-points", project.pop("entry_points", {}))
|
| 254 |
+
entry_points = dict(given) # Avoid problems with static
|
| 255 |
+
renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"}
|
| 256 |
+
for key, value in list(project.items()): # eager to allow modifications
|
| 257 |
+
norm_key = json_compatible_key(key)
|
| 258 |
+
if norm_key in renaming:
|
| 259 |
+
# Don't skip even if value is empty (reason: reset missing `dynamic`)
|
| 260 |
+
entry_points[renaming[norm_key]] = project.pop(key)
|
| 261 |
+
|
| 262 |
+
if entry_points:
|
| 263 |
+
project["entry-points"] = {
|
| 264 |
+
name: [f"{k} = {v}" for k, v in group.items()]
|
| 265 |
+
for name, group in entry_points.items()
|
| 266 |
+
if group # now we can skip empty groups
|
| 267 |
+
}
|
| 268 |
+
# Sometimes this will set `project["entry-points"] = {}`, and that is
|
| 269 |
+
# intentional (for resetting configurations that are missing `dynamic`).
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def _copy_command_options(pyproject: dict, dist: Distribution, filename: StrPath):
|
| 273 |
+
tool_table = pyproject.get("tool", {})
|
| 274 |
+
cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
|
| 275 |
+
valid_options = _valid_command_options(cmdclass)
|
| 276 |
+
|
| 277 |
+
cmd_opts = dist.command_options
|
| 278 |
+
for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items():
|
| 279 |
+
cmd = json_compatible_key(cmd)
|
| 280 |
+
valid = valid_options.get(cmd, set())
|
| 281 |
+
cmd_opts.setdefault(cmd, {})
|
| 282 |
+
for key, value in config.items():
|
| 283 |
+
key = json_compatible_key(key)
|
| 284 |
+
cmd_opts[cmd][key] = (str(filename), value)
|
| 285 |
+
if key not in valid:
|
| 286 |
+
# To avoid removing options that are specified dynamically we
|
| 287 |
+
# just log a warn...
|
| 288 |
+
_logger.warning(f"Command option {cmd}.{key} is not defined")
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
|
| 292 |
+
from setuptools.dist import Distribution
|
| 293 |
+
|
| 294 |
+
from .._importlib import metadata
|
| 295 |
+
|
| 296 |
+
valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
|
| 297 |
+
|
| 298 |
+
unloaded_entry_points = metadata.entry_points(group='distutils.commands')
|
| 299 |
+
loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points)
|
| 300 |
+
entry_points = (ep for ep in loaded_entry_points if ep)
|
| 301 |
+
for cmd, cmd_class in chain(entry_points, cmdclass.items()):
|
| 302 |
+
opts = valid_options.get(cmd, set())
|
| 303 |
+
opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", []))
|
| 304 |
+
valid_options[cmd] = opts
|
| 305 |
+
|
| 306 |
+
return valid_options
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def _load_ep(ep: metadata.EntryPoint) -> tuple[str, type] | None:
|
| 310 |
+
if ep.value.startswith("wheel.bdist_wheel"):
|
| 311 |
+
# Ignore deprecated entrypoint from wheel and avoid warning pypa/wheel#631
|
| 312 |
+
# TODO: remove check when `bdist_wheel` has been fully removed from pypa/wheel
|
| 313 |
+
return None
|
| 314 |
+
|
| 315 |
+
# Ignore all the errors
|
| 316 |
+
try:
|
| 317 |
+
return (ep.name, ep.load())
|
| 318 |
+
except Exception as ex:
|
| 319 |
+
msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}"
|
| 320 |
+
_logger.warning(f"{msg}: {ex}")
|
| 321 |
+
return None
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def _normalise_cmd_option_key(name: str) -> str:
|
| 325 |
+
return json_compatible_key(name).strip("_=")
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
def _normalise_cmd_options(desc: _OptionsList) -> set[str]:
|
| 329 |
+
return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def _get_previous_entrypoints(dist: Distribution) -> dict[str, list]:
|
| 333 |
+
ignore = ("console_scripts", "gui_scripts")
|
| 334 |
+
value = getattr(dist, "entry_points", None) or {}
|
| 335 |
+
return {k: v for k, v in value.items() if k not in ignore}
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def _get_previous_scripts(dist: Distribution) -> list | None:
|
| 339 |
+
value = getattr(dist, "entry_points", None) or {}
|
| 340 |
+
return value.get("console_scripts")
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def _get_previous_gui_scripts(dist: Distribution) -> list | None:
|
| 344 |
+
value = getattr(dist, "entry_points", None) or {}
|
| 345 |
+
return value.get("gui_scripts")
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def _set_static_list_metadata(attr: str, dist: Distribution, val: list) -> None:
|
| 349 |
+
"""Apply distutils metadata validation but preserve "static" behaviour"""
|
| 350 |
+
meta = dist.metadata
|
| 351 |
+
setter, getter = getattr(meta, f"set_{attr}"), getattr(meta, f"get_{attr}")
|
| 352 |
+
setter(val)
|
| 353 |
+
setattr(meta, attr, _static.List(getter()))
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _attrgetter(attr):
|
| 357 |
+
"""
|
| 358 |
+
Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found
|
| 359 |
+
>>> from types import SimpleNamespace
|
| 360 |
+
>>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13))
|
| 361 |
+
>>> _attrgetter("a")(obj)
|
| 362 |
+
42
|
| 363 |
+
>>> _attrgetter("b.c")(obj)
|
| 364 |
+
13
|
| 365 |
+
>>> _attrgetter("d")(obj) is None
|
| 366 |
+
True
|
| 367 |
+
"""
|
| 368 |
+
return partial(reduce, lambda acc, x: getattr(acc, x, None), attr.split("."))
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
def _some_attrgetter(*items):
|
| 372 |
+
"""
|
| 373 |
+
Return the first "truth-y" attribute or None
|
| 374 |
+
>>> from types import SimpleNamespace
|
| 375 |
+
>>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13))
|
| 376 |
+
>>> _some_attrgetter("d", "a", "b.c")(obj)
|
| 377 |
+
42
|
| 378 |
+
>>> _some_attrgetter("d", "e", "b.c", "a")(obj)
|
| 379 |
+
13
|
| 380 |
+
>>> _some_attrgetter("d", "e", "f")(obj) is None
|
| 381 |
+
True
|
| 382 |
+
"""
|
| 383 |
+
|
| 384 |
+
def _acessor(obj):
|
| 385 |
+
values = (_attrgetter(i)(obj) for i in items)
|
| 386 |
+
return next((i for i in values if i is not None), None)
|
| 387 |
+
|
| 388 |
+
return _acessor
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
PYPROJECT_CORRESPONDENCE: dict[str, _Correspondence] = {
|
| 392 |
+
"readme": _long_description,
|
| 393 |
+
"license": _license,
|
| 394 |
+
"authors": partial(_people, kind="author"),
|
| 395 |
+
"maintainers": partial(_people, kind="maintainer"),
|
| 396 |
+
"urls": _project_urls,
|
| 397 |
+
"dependencies": _dependencies,
|
| 398 |
+
"optional_dependencies": _optional_dependencies,
|
| 399 |
+
"requires_python": _python_requires,
|
| 400 |
+
}
|
| 401 |
+
|
| 402 |
+
TOOL_TABLE_RENAMES = {"script_files": "scripts"}
|
| 403 |
+
TOOL_TABLE_REMOVALS = {
|
| 404 |
+
"namespace_packages": """
|
| 405 |
+
Please migrate to implicit native namespaces instead.
|
| 406 |
+
See https://packaging.python.org/en/latest/guides/packaging-namespace-packages/.
|
| 407 |
+
""",
|
| 408 |
+
}
|
| 409 |
+
TOOL_TABLE_CORRESPONDENCE = {
|
| 410 |
+
# Fields with corresponding core metadata need to be marked as static:
|
| 411 |
+
"obsoletes": partial(_set_static_list_metadata, "obsoletes"),
|
| 412 |
+
"provides": partial(_set_static_list_metadata, "provides"),
|
| 413 |
+
"platforms": partial(_set_static_list_metadata, "platforms"),
|
| 414 |
+
}
|
| 415 |
+
|
| 416 |
+
SETUPTOOLS_PATCHES = {
|
| 417 |
+
"long_description_content_type",
|
| 418 |
+
"project_urls",
|
| 419 |
+
"provides_extras",
|
| 420 |
+
"license_file",
|
| 421 |
+
"license_files",
|
| 422 |
+
}
|
| 423 |
+
|
| 424 |
+
_PREPROCESS = {
|
| 425 |
+
"ext_modules": _ext_modules,
|
| 426 |
+
}
|
| 427 |
+
|
| 428 |
+
_PREVIOUSLY_DEFINED = {
|
| 429 |
+
"name": _attrgetter("metadata.name"),
|
| 430 |
+
"version": _attrgetter("metadata.version"),
|
| 431 |
+
"description": _attrgetter("metadata.description"),
|
| 432 |
+
"readme": _attrgetter("metadata.long_description"),
|
| 433 |
+
"requires-python": _some_attrgetter("python_requires", "metadata.python_requires"),
|
| 434 |
+
"license": _attrgetter("metadata.license"),
|
| 435 |
+
"authors": _some_attrgetter("metadata.author", "metadata.author_email"),
|
| 436 |
+
"maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"),
|
| 437 |
+
"keywords": _attrgetter("metadata.keywords"),
|
| 438 |
+
"classifiers": _attrgetter("metadata.classifiers"),
|
| 439 |
+
"urls": _attrgetter("metadata.project_urls"),
|
| 440 |
+
"entry-points": _get_previous_entrypoints,
|
| 441 |
+
"scripts": _get_previous_scripts,
|
| 442 |
+
"gui-scripts": _get_previous_gui_scripts,
|
| 443 |
+
"dependencies": _attrgetter("install_requires"),
|
| 444 |
+
"optional-dependencies": _attrgetter("extras_require"),
|
| 445 |
+
}
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
_RESET_PREVIOUSLY_DEFINED: dict = {
|
| 449 |
+
# Fix improper setting: given in `setup.py`, but not listed in `dynamic`
|
| 450 |
+
# dict: pyproject name => value to which reset
|
| 451 |
+
"license": _static.EMPTY_DICT,
|
| 452 |
+
"authors": _static.EMPTY_LIST,
|
| 453 |
+
"maintainers": _static.EMPTY_LIST,
|
| 454 |
+
"keywords": _static.EMPTY_LIST,
|
| 455 |
+
"classifiers": _static.EMPTY_LIST,
|
| 456 |
+
"urls": _static.EMPTY_DICT,
|
| 457 |
+
"entry-points": _static.EMPTY_DICT,
|
| 458 |
+
"scripts": _static.EMPTY_DICT,
|
| 459 |
+
"gui-scripts": _static.EMPTY_DICT,
|
| 460 |
+
"dependencies": _static.EMPTY_LIST,
|
| 461 |
+
"optional-dependencies": _static.EMPTY_DICT,
|
| 462 |
+
}
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
class _MissingDynamic(SetuptoolsWarning):
|
| 466 |
+
_SUMMARY = "`{field}` defined outside of `pyproject.toml` is ignored."
|
| 467 |
+
|
| 468 |
+
_DETAILS = """
|
| 469 |
+
The following seems to be defined outside of `pyproject.toml`:
|
| 470 |
+
|
| 471 |
+
`{field} = {value!r}`
|
| 472 |
+
|
| 473 |
+
According to the spec (see the link below), however, setuptools CANNOT
|
| 474 |
+
consider this value unless `{field}` is listed as `dynamic`.
|
| 475 |
+
|
| 476 |
+
https://packaging.python.org/en/latest/specifications/pyproject-toml/#declaring-project-metadata-the-project-table
|
| 477 |
+
|
| 478 |
+
To prevent this problem, you can list `{field}` under `dynamic` or alternatively
|
| 479 |
+
remove the `[project]` table from your file and rely entirely on other means of
|
| 480 |
+
configuration.
|
| 481 |
+
"""
|
| 482 |
+
# TODO: Consider removing this check in the future?
|
| 483 |
+
# There is a trade-off here between improving "debug-ability" and the cost
|
| 484 |
+
# of running/testing/maintaining these unnecessary checks...
|
| 485 |
+
|
| 486 |
+
@classmethod
|
| 487 |
+
def details(cls, field: str, value: Any) -> str:
|
| 488 |
+
return cls._DETAILS.format(field=field, value=value)
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/NOTICE
ADDED
|
@@ -0,0 +1,438 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The code contained in this directory was automatically generated using the
|
| 2 |
+
following command:
|
| 3 |
+
|
| 4 |
+
python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose -t distutils=setuptools/config/distutils.schema.json -t setuptools=setuptools/config/setuptools.schema.json
|
| 5 |
+
|
| 6 |
+
Please avoid changing it manually.
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
You can report issues or suggest changes directly to `validate-pyproject`
|
| 10 |
+
(or to the relevant plugin repository)
|
| 11 |
+
|
| 12 |
+
- https://github.com/abravalheri/validate-pyproject/issues
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
***
|
| 16 |
+
|
| 17 |
+
The following files include code from opensource projects
|
| 18 |
+
(either as direct copies or modified versions):
|
| 19 |
+
|
| 20 |
+
- `fastjsonschema_exceptions.py`:
|
| 21 |
+
- project: `fastjsonschema` - licensed under BSD-3-Clause
|
| 22 |
+
(https://github.com/horejsek/python-fastjsonschema)
|
| 23 |
+
- `extra_validations.py` and `format.py`, `error_reporting.py`:
|
| 24 |
+
- project: `validate-pyproject` - licensed under MPL-2.0
|
| 25 |
+
(https://github.com/abravalheri/validate-pyproject)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
Additionally the following files are automatically generated by tools provided
|
| 29 |
+
by the same projects:
|
| 30 |
+
|
| 31 |
+
- `__init__.py`
|
| 32 |
+
- `fastjsonschema_validations.py`
|
| 33 |
+
|
| 34 |
+
The relevant copyright notes and licenses are included below.
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
***
|
| 38 |
+
|
| 39 |
+
`fastjsonschema`
|
| 40 |
+
================
|
| 41 |
+
|
| 42 |
+
Copyright (c) 2018, Michal Horejsek
|
| 43 |
+
All rights reserved.
|
| 44 |
+
|
| 45 |
+
Redistribution and use in source and binary forms, with or without modification,
|
| 46 |
+
are permitted provided that the following conditions are met:
|
| 47 |
+
|
| 48 |
+
Redistributions of source code must retain the above copyright notice, this
|
| 49 |
+
list of conditions and the following disclaimer.
|
| 50 |
+
|
| 51 |
+
Redistributions in binary form must reproduce the above copyright notice, this
|
| 52 |
+
list of conditions and the following disclaimer in the documentation and/or
|
| 53 |
+
other materials provided with the distribution.
|
| 54 |
+
|
| 55 |
+
Neither the name of the {organization} nor the names of its
|
| 56 |
+
contributors may be used to endorse or promote products derived from
|
| 57 |
+
this software without specific prior written permission.
|
| 58 |
+
|
| 59 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
| 60 |
+
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 61 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 62 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
| 63 |
+
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 64 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 65 |
+
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
| 66 |
+
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 67 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
| 68 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
***
|
| 73 |
+
|
| 74 |
+
`validate-pyproject`
|
| 75 |
+
====================
|
| 76 |
+
|
| 77 |
+
Mozilla Public License, version 2.0
|
| 78 |
+
|
| 79 |
+
1. Definitions
|
| 80 |
+
|
| 81 |
+
1.1. "Contributor"
|
| 82 |
+
|
| 83 |
+
means each individual or legal entity that creates, contributes to the
|
| 84 |
+
creation of, or owns Covered Software.
|
| 85 |
+
|
| 86 |
+
1.2. "Contributor Version"
|
| 87 |
+
|
| 88 |
+
means the combination of the Contributions of others (if any) used by a
|
| 89 |
+
Contributor and that particular Contributor's Contribution.
|
| 90 |
+
|
| 91 |
+
1.3. "Contribution"
|
| 92 |
+
|
| 93 |
+
means Covered Software of a particular Contributor.
|
| 94 |
+
|
| 95 |
+
1.4. "Covered Software"
|
| 96 |
+
|
| 97 |
+
means Source Code Form to which the initial Contributor has attached the
|
| 98 |
+
notice in Exhibit A, the Executable Form of such Source Code Form, and
|
| 99 |
+
Modifications of such Source Code Form, in each case including portions
|
| 100 |
+
thereof.
|
| 101 |
+
|
| 102 |
+
1.5. "Incompatible With Secondary Licenses"
|
| 103 |
+
means
|
| 104 |
+
|
| 105 |
+
a. that the initial Contributor has attached the notice described in
|
| 106 |
+
Exhibit B to the Covered Software; or
|
| 107 |
+
|
| 108 |
+
b. that the Covered Software was made available under the terms of
|
| 109 |
+
version 1.1 or earlier of the License, but not also under the terms of
|
| 110 |
+
a Secondary License.
|
| 111 |
+
|
| 112 |
+
1.6. "Executable Form"
|
| 113 |
+
|
| 114 |
+
means any form of the work other than Source Code Form.
|
| 115 |
+
|
| 116 |
+
1.7. "Larger Work"
|
| 117 |
+
|
| 118 |
+
means a work that combines Covered Software with other material, in a
|
| 119 |
+
separate file or files, that is not Covered Software.
|
| 120 |
+
|
| 121 |
+
1.8. "License"
|
| 122 |
+
|
| 123 |
+
means this document.
|
| 124 |
+
|
| 125 |
+
1.9. "Licensable"
|
| 126 |
+
|
| 127 |
+
means having the right to grant, to the maximum extent possible, whether
|
| 128 |
+
at the time of the initial grant or subsequently, any and all of the
|
| 129 |
+
rights conveyed by this License.
|
| 130 |
+
|
| 131 |
+
1.10. "Modifications"
|
| 132 |
+
|
| 133 |
+
means any of the following:
|
| 134 |
+
|
| 135 |
+
a. any file in Source Code Form that results from an addition to,
|
| 136 |
+
deletion from, or modification of the contents of Covered Software; or
|
| 137 |
+
|
| 138 |
+
b. any new file in Source Code Form that contains any Covered Software.
|
| 139 |
+
|
| 140 |
+
1.11. "Patent Claims" of a Contributor
|
| 141 |
+
|
| 142 |
+
means any patent claim(s), including without limitation, method,
|
| 143 |
+
process, and apparatus claims, in any patent Licensable by such
|
| 144 |
+
Contributor that would be infringed, but for the grant of the License,
|
| 145 |
+
by the making, using, selling, offering for sale, having made, import,
|
| 146 |
+
or transfer of either its Contributions or its Contributor Version.
|
| 147 |
+
|
| 148 |
+
1.12. "Secondary License"
|
| 149 |
+
|
| 150 |
+
means either the GNU General Public License, Version 2.0, the GNU Lesser
|
| 151 |
+
General Public License, Version 2.1, the GNU Affero General Public
|
| 152 |
+
License, Version 3.0, or any later versions of those licenses.
|
| 153 |
+
|
| 154 |
+
1.13. "Source Code Form"
|
| 155 |
+
|
| 156 |
+
means the form of the work preferred for making modifications.
|
| 157 |
+
|
| 158 |
+
1.14. "You" (or "Your")
|
| 159 |
+
|
| 160 |
+
means an individual or a legal entity exercising rights under this
|
| 161 |
+
License. For legal entities, "You" includes any entity that controls, is
|
| 162 |
+
controlled by, or is under common control with You. For purposes of this
|
| 163 |
+
definition, "control" means (a) the power, direct or indirect, to cause
|
| 164 |
+
the direction or management of such entity, whether by contract or
|
| 165 |
+
otherwise, or (b) ownership of more than fifty percent (50%) of the
|
| 166 |
+
outstanding shares or beneficial ownership of such entity.
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
2. License Grants and Conditions
|
| 170 |
+
|
| 171 |
+
2.1. Grants
|
| 172 |
+
|
| 173 |
+
Each Contributor hereby grants You a world-wide, royalty-free,
|
| 174 |
+
non-exclusive license:
|
| 175 |
+
|
| 176 |
+
a. under intellectual property rights (other than patent or trademark)
|
| 177 |
+
Licensable by such Contributor to use, reproduce, make available,
|
| 178 |
+
modify, display, perform, distribute, and otherwise exploit its
|
| 179 |
+
Contributions, either on an unmodified basis, with Modifications, or
|
| 180 |
+
as part of a Larger Work; and
|
| 181 |
+
|
| 182 |
+
b. under Patent Claims of such Contributor to make, use, sell, offer for
|
| 183 |
+
sale, have made, import, and otherwise transfer either its
|
| 184 |
+
Contributions or its Contributor Version.
|
| 185 |
+
|
| 186 |
+
2.2. Effective Date
|
| 187 |
+
|
| 188 |
+
The licenses granted in Section 2.1 with respect to any Contribution
|
| 189 |
+
become effective for each Contribution on the date the Contributor first
|
| 190 |
+
distributes such Contribution.
|
| 191 |
+
|
| 192 |
+
2.3. Limitations on Grant Scope
|
| 193 |
+
|
| 194 |
+
The licenses granted in this Section 2 are the only rights granted under
|
| 195 |
+
this License. No additional rights or licenses will be implied from the
|
| 196 |
+
distribution or licensing of Covered Software under this License.
|
| 197 |
+
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
| 198 |
+
Contributor:
|
| 199 |
+
|
| 200 |
+
a. for any code that a Contributor has removed from Covered Software; or
|
| 201 |
+
|
| 202 |
+
b. for infringements caused by: (i) Your and any other third party's
|
| 203 |
+
modifications of Covered Software, or (ii) the combination of its
|
| 204 |
+
Contributions with other software (except as part of its Contributor
|
| 205 |
+
Version); or
|
| 206 |
+
|
| 207 |
+
c. under Patent Claims infringed by Covered Software in the absence of
|
| 208 |
+
its Contributions.
|
| 209 |
+
|
| 210 |
+
This License does not grant any rights in the trademarks, service marks,
|
| 211 |
+
or logos of any Contributor (except as may be necessary to comply with
|
| 212 |
+
the notice requirements in Section 3.4).
|
| 213 |
+
|
| 214 |
+
2.4. Subsequent Licenses
|
| 215 |
+
|
| 216 |
+
No Contributor makes additional grants as a result of Your choice to
|
| 217 |
+
distribute the Covered Software under a subsequent version of this
|
| 218 |
+
License (see Section 10.2) or under the terms of a Secondary License (if
|
| 219 |
+
permitted under the terms of Section 3.3).
|
| 220 |
+
|
| 221 |
+
2.5. Representation
|
| 222 |
+
|
| 223 |
+
Each Contributor represents that the Contributor believes its
|
| 224 |
+
Contributions are its original creation(s) or it has sufficient rights to
|
| 225 |
+
grant the rights to its Contributions conveyed by this License.
|
| 226 |
+
|
| 227 |
+
2.6. Fair Use
|
| 228 |
+
|
| 229 |
+
This License is not intended to limit any rights You have under
|
| 230 |
+
applicable copyright doctrines of fair use, fair dealing, or other
|
| 231 |
+
equivalents.
|
| 232 |
+
|
| 233 |
+
2.7. Conditions
|
| 234 |
+
|
| 235 |
+
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
|
| 236 |
+
Section 2.1.
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
3. Responsibilities
|
| 240 |
+
|
| 241 |
+
3.1. Distribution of Source Form
|
| 242 |
+
|
| 243 |
+
All distribution of Covered Software in Source Code Form, including any
|
| 244 |
+
Modifications that You create or to which You contribute, must be under
|
| 245 |
+
the terms of this License. You must inform recipients that the Source
|
| 246 |
+
Code Form of the Covered Software is governed by the terms of this
|
| 247 |
+
License, and how they can obtain a copy of this License. You may not
|
| 248 |
+
attempt to alter or restrict the recipients' rights in the Source Code
|
| 249 |
+
Form.
|
| 250 |
+
|
| 251 |
+
3.2. Distribution of Executable Form
|
| 252 |
+
|
| 253 |
+
If You distribute Covered Software in Executable Form then:
|
| 254 |
+
|
| 255 |
+
a. such Covered Software must also be made available in Source Code Form,
|
| 256 |
+
as described in Section 3.1, and You must inform recipients of the
|
| 257 |
+
Executable Form how they can obtain a copy of such Source Code Form by
|
| 258 |
+
reasonable means in a timely manner, at a charge no more than the cost
|
| 259 |
+
of distribution to the recipient; and
|
| 260 |
+
|
| 261 |
+
b. You may distribute such Executable Form under the terms of this
|
| 262 |
+
License, or sublicense it under different terms, provided that the
|
| 263 |
+
license for the Executable Form does not attempt to limit or alter the
|
| 264 |
+
recipients' rights in the Source Code Form under this License.
|
| 265 |
+
|
| 266 |
+
3.3. Distribution of a Larger Work
|
| 267 |
+
|
| 268 |
+
You may create and distribute a Larger Work under terms of Your choice,
|
| 269 |
+
provided that You also comply with the requirements of this License for
|
| 270 |
+
the Covered Software. If the Larger Work is a combination of Covered
|
| 271 |
+
Software with a work governed by one or more Secondary Licenses, and the
|
| 272 |
+
Covered Software is not Incompatible With Secondary Licenses, this
|
| 273 |
+
License permits You to additionally distribute such Covered Software
|
| 274 |
+
under the terms of such Secondary License(s), so that the recipient of
|
| 275 |
+
the Larger Work may, at their option, further distribute the Covered
|
| 276 |
+
Software under the terms of either this License or such Secondary
|
| 277 |
+
License(s).
|
| 278 |
+
|
| 279 |
+
3.4. Notices
|
| 280 |
+
|
| 281 |
+
You may not remove or alter the substance of any license notices
|
| 282 |
+
(including copyright notices, patent notices, disclaimers of warranty, or
|
| 283 |
+
limitations of liability) contained within the Source Code Form of the
|
| 284 |
+
Covered Software, except that You may alter any license notices to the
|
| 285 |
+
extent required to remedy known factual inaccuracies.
|
| 286 |
+
|
| 287 |
+
3.5. Application of Additional Terms
|
| 288 |
+
|
| 289 |
+
You may choose to offer, and to charge a fee for, warranty, support,
|
| 290 |
+
indemnity or liability obligations to one or more recipients of Covered
|
| 291 |
+
Software. However, You may do so only on Your own behalf, and not on
|
| 292 |
+
behalf of any Contributor. You must make it absolutely clear that any
|
| 293 |
+
such warranty, support, indemnity, or liability obligation is offered by
|
| 294 |
+
You alone, and You hereby agree to indemnify every Contributor for any
|
| 295 |
+
liability incurred by such Contributor as a result of warranty, support,
|
| 296 |
+
indemnity or liability terms You offer. You may include additional
|
| 297 |
+
disclaimers of warranty and limitations of liability specific to any
|
| 298 |
+
jurisdiction.
|
| 299 |
+
|
| 300 |
+
4. Inability to Comply Due to Statute or Regulation
|
| 301 |
+
|
| 302 |
+
If it is impossible for You to comply with any of the terms of this License
|
| 303 |
+
with respect to some or all of the Covered Software due to statute,
|
| 304 |
+
judicial order, or regulation then You must: (a) comply with the terms of
|
| 305 |
+
this License to the maximum extent possible; and (b) describe the
|
| 306 |
+
limitations and the code they affect. Such description must be placed in a
|
| 307 |
+
text file included with all distributions of the Covered Software under
|
| 308 |
+
this License. Except to the extent prohibited by statute or regulation,
|
| 309 |
+
such description must be sufficiently detailed for a recipient of ordinary
|
| 310 |
+
skill to be able to understand it.
|
| 311 |
+
|
| 312 |
+
5. Termination
|
| 313 |
+
|
| 314 |
+
5.1. The rights granted under this License will terminate automatically if You
|
| 315 |
+
fail to comply with any of its terms. However, if You become compliant,
|
| 316 |
+
then the rights granted under this License from a particular Contributor
|
| 317 |
+
are reinstated (a) provisionally, unless and until such Contributor
|
| 318 |
+
explicitly and finally terminates Your grants, and (b) on an ongoing
|
| 319 |
+
basis, if such Contributor fails to notify You of the non-compliance by
|
| 320 |
+
some reasonable means prior to 60 days after You have come back into
|
| 321 |
+
compliance. Moreover, Your grants from a particular Contributor are
|
| 322 |
+
reinstated on an ongoing basis if such Contributor notifies You of the
|
| 323 |
+
non-compliance by some reasonable means, this is the first time You have
|
| 324 |
+
received notice of non-compliance with this License from such
|
| 325 |
+
Contributor, and You become compliant prior to 30 days after Your receipt
|
| 326 |
+
of the notice.
|
| 327 |
+
|
| 328 |
+
5.2. If You initiate litigation against any entity by asserting a patent
|
| 329 |
+
infringement claim (excluding declaratory judgment actions,
|
| 330 |
+
counter-claims, and cross-claims) alleging that a Contributor Version
|
| 331 |
+
directly or indirectly infringes any patent, then the rights granted to
|
| 332 |
+
You by any and all Contributors for the Covered Software under Section
|
| 333 |
+
2.1 of this License shall terminate.
|
| 334 |
+
|
| 335 |
+
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
|
| 336 |
+
license agreements (excluding distributors and resellers) which have been
|
| 337 |
+
validly granted by You or Your distributors under this License prior to
|
| 338 |
+
termination shall survive termination.
|
| 339 |
+
|
| 340 |
+
6. Disclaimer of Warranty
|
| 341 |
+
|
| 342 |
+
Covered Software is provided under this License on an "as is" basis,
|
| 343 |
+
without warranty of any kind, either expressed, implied, or statutory,
|
| 344 |
+
including, without limitation, warranties that the Covered Software is free
|
| 345 |
+
of defects, merchantable, fit for a particular purpose or non-infringing.
|
| 346 |
+
The entire risk as to the quality and performance of the Covered Software
|
| 347 |
+
is with You. Should any Covered Software prove defective in any respect,
|
| 348 |
+
You (not any Contributor) assume the cost of any necessary servicing,
|
| 349 |
+
repair, or correction. This disclaimer of warranty constitutes an essential
|
| 350 |
+
part of this License. No use of any Covered Software is authorized under
|
| 351 |
+
this License except under this disclaimer.
|
| 352 |
+
|
| 353 |
+
7. Limitation of Liability
|
| 354 |
+
|
| 355 |
+
Under no circumstances and under no legal theory, whether tort (including
|
| 356 |
+
negligence), contract, or otherwise, shall any Contributor, or anyone who
|
| 357 |
+
distributes Covered Software as permitted above, be liable to You for any
|
| 358 |
+
direct, indirect, special, incidental, or consequential damages of any
|
| 359 |
+
character including, without limitation, damages for lost profits, loss of
|
| 360 |
+
goodwill, work stoppage, computer failure or malfunction, or any and all
|
| 361 |
+
other commercial damages or losses, even if such party shall have been
|
| 362 |
+
informed of the possibility of such damages. This limitation of liability
|
| 363 |
+
shall not apply to liability for death or personal injury resulting from
|
| 364 |
+
such party's negligence to the extent applicable law prohibits such
|
| 365 |
+
limitation. Some jurisdictions do not allow the exclusion or limitation of
|
| 366 |
+
incidental or consequential damages, so this exclusion and limitation may
|
| 367 |
+
not apply to You.
|
| 368 |
+
|
| 369 |
+
8. Litigation
|
| 370 |
+
|
| 371 |
+
Any litigation relating to this License may be brought only in the courts
|
| 372 |
+
of a jurisdiction where the defendant maintains its principal place of
|
| 373 |
+
business and such litigation shall be governed by laws of that
|
| 374 |
+
jurisdiction, without reference to its conflict-of-law provisions. Nothing
|
| 375 |
+
in this Section shall prevent a party's ability to bring cross-claims or
|
| 376 |
+
counter-claims.
|
| 377 |
+
|
| 378 |
+
9. Miscellaneous
|
| 379 |
+
|
| 380 |
+
This License represents the complete agreement concerning the subject
|
| 381 |
+
matter hereof. If any provision of this License is held to be
|
| 382 |
+
unenforceable, such provision shall be reformed only to the extent
|
| 383 |
+
necessary to make it enforceable. Any law or regulation which provides that
|
| 384 |
+
the language of a contract shall be construed against the drafter shall not
|
| 385 |
+
be used to construe this License against a Contributor.
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
10. Versions of the License
|
| 389 |
+
|
| 390 |
+
10.1. New Versions
|
| 391 |
+
|
| 392 |
+
Mozilla Foundation is the license steward. Except as provided in Section
|
| 393 |
+
10.3, no one other than the license steward has the right to modify or
|
| 394 |
+
publish new versions of this License. Each version will be given a
|
| 395 |
+
distinguishing version number.
|
| 396 |
+
|
| 397 |
+
10.2. Effect of New Versions
|
| 398 |
+
|
| 399 |
+
You may distribute the Covered Software under the terms of the version
|
| 400 |
+
of the License under which You originally received the Covered Software,
|
| 401 |
+
or under the terms of any subsequent version published by the license
|
| 402 |
+
steward.
|
| 403 |
+
|
| 404 |
+
10.3. Modified Versions
|
| 405 |
+
|
| 406 |
+
If you create software not governed by this License, and you want to
|
| 407 |
+
create a new license for such software, you may create and use a
|
| 408 |
+
modified version of this License if you rename the license and remove
|
| 409 |
+
any references to the name of the license steward (except to note that
|
| 410 |
+
such modified license differs from this License).
|
| 411 |
+
|
| 412 |
+
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
| 413 |
+
Licenses If You choose to distribute Source Code Form that is
|
| 414 |
+
Incompatible With Secondary Licenses under the terms of this version of
|
| 415 |
+
the License, the notice described in Exhibit B of this License must be
|
| 416 |
+
attached.
|
| 417 |
+
|
| 418 |
+
Exhibit A - Source Code Form License Notice
|
| 419 |
+
|
| 420 |
+
This Source Code Form is subject to the
|
| 421 |
+
terms of the Mozilla Public License, v.
|
| 422 |
+
2.0. If a copy of the MPL was not
|
| 423 |
+
distributed with this file, You can
|
| 424 |
+
obtain one at
|
| 425 |
+
https://mozilla.org/MPL/2.0/.
|
| 426 |
+
|
| 427 |
+
If it is not possible or desirable to put the notice in a particular file,
|
| 428 |
+
then You may include the notice in a location (such as a LICENSE file in a
|
| 429 |
+
relevant directory) where a recipient would be likely to look for such a
|
| 430 |
+
notice.
|
| 431 |
+
|
| 432 |
+
You may add additional accurate notices of copyright ownership.
|
| 433 |
+
|
| 434 |
+
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
| 435 |
+
|
| 436 |
+
This Source Code Form is "Incompatible
|
| 437 |
+
With Secondary Licenses", as defined by
|
| 438 |
+
the Mozilla Public License, v. 2.0.
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.49 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/error_reporting.cpython-310.pyc
ADDED
|
Binary file (12 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-310.pyc
ADDED
|
Binary file (1.58 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_exceptions.cpython-310.pyc
ADDED
|
Binary file (2.42 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-310.pyc
ADDED
|
Binary file (85.8 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/formats.cpython-310.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/error_reporting.py
ADDED
|
@@ -0,0 +1,336 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import json
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import typing
|
| 7 |
+
from contextlib import contextmanager
|
| 8 |
+
from textwrap import indent, wrap
|
| 9 |
+
from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Union
|
| 10 |
+
|
| 11 |
+
from .fastjsonschema_exceptions import JsonSchemaValueException
|
| 12 |
+
|
| 13 |
+
if typing.TYPE_CHECKING:
|
| 14 |
+
import sys
|
| 15 |
+
|
| 16 |
+
if sys.version_info < (3, 11):
|
| 17 |
+
from typing_extensions import Self
|
| 18 |
+
else:
|
| 19 |
+
from typing import Self
|
| 20 |
+
|
| 21 |
+
_logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
_MESSAGE_REPLACEMENTS = {
|
| 24 |
+
"must be named by propertyName definition": "keys must be named by",
|
| 25 |
+
"one of contains definition": "at least one item that matches",
|
| 26 |
+
" same as const definition:": "",
|
| 27 |
+
"only specified items": "only items matching the definition",
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
_SKIP_DETAILS = (
|
| 31 |
+
"must not be empty",
|
| 32 |
+
"is always invalid",
|
| 33 |
+
"must not be there",
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
_NEED_DETAILS = {"anyOf", "oneOf", "allOf", "contains", "propertyNames", "not", "items"}
|
| 37 |
+
|
| 38 |
+
_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)")
|
| 39 |
+
_IDENTIFIER = re.compile(r"^[\w_]+$", re.I)
|
| 40 |
+
|
| 41 |
+
_TOML_JARGON = {
|
| 42 |
+
"object": "table",
|
| 43 |
+
"property": "key",
|
| 44 |
+
"properties": "keys",
|
| 45 |
+
"property names": "keys",
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
_FORMATS_HELP = """
|
| 49 |
+
For more details about `format` see
|
| 50 |
+
https://validate-pyproject.readthedocs.io/en/latest/api/validate_pyproject.formats.html
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class ValidationError(JsonSchemaValueException):
|
| 55 |
+
"""Report violations of a given JSON schema.
|
| 56 |
+
|
| 57 |
+
This class extends :exc:`~fastjsonschema.JsonSchemaValueException`
|
| 58 |
+
by adding the following properties:
|
| 59 |
+
|
| 60 |
+
- ``summary``: an improved version of the ``JsonSchemaValueException`` error message
|
| 61 |
+
with only the necessary information)
|
| 62 |
+
|
| 63 |
+
- ``details``: more contextual information about the error like the failing schema
|
| 64 |
+
itself and the value that violates the schema.
|
| 65 |
+
|
| 66 |
+
Depending on the level of the verbosity of the ``logging`` configuration
|
| 67 |
+
the exception message will be only ``summary`` (default) or a combination of
|
| 68 |
+
``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`).
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
summary = ""
|
| 72 |
+
details = ""
|
| 73 |
+
_original_message = ""
|
| 74 |
+
|
| 75 |
+
@classmethod
|
| 76 |
+
def _from_jsonschema(cls, ex: JsonSchemaValueException) -> "Self":
|
| 77 |
+
formatter = _ErrorFormatting(ex)
|
| 78 |
+
obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule)
|
| 79 |
+
debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower()
|
| 80 |
+
if debug_code != "false": # pragma: no cover
|
| 81 |
+
obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__
|
| 82 |
+
obj._original_message = ex.message
|
| 83 |
+
obj.summary = formatter.summary
|
| 84 |
+
obj.details = formatter.details
|
| 85 |
+
return obj
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
@contextmanager
|
| 89 |
+
def detailed_errors() -> Generator[None, None, None]:
|
| 90 |
+
try:
|
| 91 |
+
yield
|
| 92 |
+
except JsonSchemaValueException as ex:
|
| 93 |
+
raise ValidationError._from_jsonschema(ex) from None
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class _ErrorFormatting:
|
| 97 |
+
def __init__(self, ex: JsonSchemaValueException):
|
| 98 |
+
self.ex = ex
|
| 99 |
+
self.name = f"`{self._simplify_name(ex.name)}`"
|
| 100 |
+
self._original_message: str = self.ex.message.replace(ex.name, self.name)
|
| 101 |
+
self._summary = ""
|
| 102 |
+
self._details = ""
|
| 103 |
+
|
| 104 |
+
def __str__(self) -> str:
|
| 105 |
+
if _logger.getEffectiveLevel() <= logging.DEBUG and self.details:
|
| 106 |
+
return f"{self.summary}\n\n{self.details}"
|
| 107 |
+
|
| 108 |
+
return self.summary
|
| 109 |
+
|
| 110 |
+
@property
|
| 111 |
+
def summary(self) -> str:
|
| 112 |
+
if not self._summary:
|
| 113 |
+
self._summary = self._expand_summary()
|
| 114 |
+
|
| 115 |
+
return self._summary
|
| 116 |
+
|
| 117 |
+
@property
|
| 118 |
+
def details(self) -> str:
|
| 119 |
+
if not self._details:
|
| 120 |
+
self._details = self._expand_details()
|
| 121 |
+
|
| 122 |
+
return self._details
|
| 123 |
+
|
| 124 |
+
@staticmethod
|
| 125 |
+
def _simplify_name(name: str) -> str:
|
| 126 |
+
x = len("data.")
|
| 127 |
+
return name[x:] if name.startswith("data.") else name
|
| 128 |
+
|
| 129 |
+
def _expand_summary(self) -> str:
|
| 130 |
+
msg = self._original_message
|
| 131 |
+
|
| 132 |
+
for bad, repl in _MESSAGE_REPLACEMENTS.items():
|
| 133 |
+
msg = msg.replace(bad, repl)
|
| 134 |
+
|
| 135 |
+
if any(substring in msg for substring in _SKIP_DETAILS):
|
| 136 |
+
return msg
|
| 137 |
+
|
| 138 |
+
schema = self.ex.rule_definition
|
| 139 |
+
if self.ex.rule in _NEED_DETAILS and schema:
|
| 140 |
+
summary = _SummaryWriter(_TOML_JARGON)
|
| 141 |
+
return f"{msg}:\n\n{indent(summary(schema), ' ')}"
|
| 142 |
+
|
| 143 |
+
return msg
|
| 144 |
+
|
| 145 |
+
def _expand_details(self) -> str:
|
| 146 |
+
optional = []
|
| 147 |
+
definition = self.ex.definition or {}
|
| 148 |
+
desc_lines = definition.pop("$$description", [])
|
| 149 |
+
desc = definition.pop("description", None) or " ".join(desc_lines)
|
| 150 |
+
if desc:
|
| 151 |
+
description = "\n".join(
|
| 152 |
+
wrap(
|
| 153 |
+
desc,
|
| 154 |
+
width=80,
|
| 155 |
+
initial_indent=" ",
|
| 156 |
+
subsequent_indent=" ",
|
| 157 |
+
break_long_words=False,
|
| 158 |
+
)
|
| 159 |
+
)
|
| 160 |
+
optional.append(f"DESCRIPTION:\n{description}")
|
| 161 |
+
schema = json.dumps(definition, indent=4)
|
| 162 |
+
value = json.dumps(self.ex.value, indent=4)
|
| 163 |
+
defaults = [
|
| 164 |
+
f"GIVEN VALUE:\n{indent(value, ' ')}",
|
| 165 |
+
f"OFFENDING RULE: {self.ex.rule!r}",
|
| 166 |
+
f"DEFINITION:\n{indent(schema, ' ')}",
|
| 167 |
+
]
|
| 168 |
+
msg = "\n\n".join(optional + defaults)
|
| 169 |
+
epilog = f"\n{_FORMATS_HELP}" if "format" in msg.lower() else ""
|
| 170 |
+
return msg + epilog
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class _SummaryWriter:
|
| 174 |
+
_IGNORE = frozenset(("description", "default", "title", "examples"))
|
| 175 |
+
|
| 176 |
+
def __init__(self, jargon: Optional[Dict[str, str]] = None):
|
| 177 |
+
self.jargon: Dict[str, str] = jargon or {}
|
| 178 |
+
# Clarify confusing terms
|
| 179 |
+
self._terms = {
|
| 180 |
+
"anyOf": "at least one of the following",
|
| 181 |
+
"oneOf": "exactly one of the following",
|
| 182 |
+
"allOf": "all of the following",
|
| 183 |
+
"not": "(*NOT* the following)",
|
| 184 |
+
"prefixItems": f"{self._jargon('items')} (in order)",
|
| 185 |
+
"items": "items",
|
| 186 |
+
"contains": "contains at least one of",
|
| 187 |
+
"propertyNames": (
|
| 188 |
+
f"non-predefined acceptable {self._jargon('property names')}"
|
| 189 |
+
),
|
| 190 |
+
"patternProperties": f"{self._jargon('properties')} named via pattern",
|
| 191 |
+
"const": "predefined value",
|
| 192 |
+
"enum": "one of",
|
| 193 |
+
}
|
| 194 |
+
# Attributes that indicate that the definition is easy and can be done
|
| 195 |
+
# inline (e.g. string and number)
|
| 196 |
+
self._guess_inline_defs = [
|
| 197 |
+
"enum",
|
| 198 |
+
"const",
|
| 199 |
+
"maxLength",
|
| 200 |
+
"minLength",
|
| 201 |
+
"pattern",
|
| 202 |
+
"format",
|
| 203 |
+
"minimum",
|
| 204 |
+
"maximum",
|
| 205 |
+
"exclusiveMinimum",
|
| 206 |
+
"exclusiveMaximum",
|
| 207 |
+
"multipleOf",
|
| 208 |
+
]
|
| 209 |
+
|
| 210 |
+
def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]:
|
| 211 |
+
if isinstance(term, list):
|
| 212 |
+
return [self.jargon.get(t, t) for t in term]
|
| 213 |
+
return self.jargon.get(term, term)
|
| 214 |
+
|
| 215 |
+
def __call__(
|
| 216 |
+
self,
|
| 217 |
+
schema: Union[dict, List[dict]],
|
| 218 |
+
prefix: str = "",
|
| 219 |
+
*,
|
| 220 |
+
_path: Sequence[str] = (),
|
| 221 |
+
) -> str:
|
| 222 |
+
if isinstance(schema, list):
|
| 223 |
+
return self._handle_list(schema, prefix, _path)
|
| 224 |
+
|
| 225 |
+
filtered = self._filter_unecessary(schema, _path)
|
| 226 |
+
simple = self._handle_simple_dict(filtered, _path)
|
| 227 |
+
if simple:
|
| 228 |
+
return f"{prefix}{simple}"
|
| 229 |
+
|
| 230 |
+
child_prefix = self._child_prefix(prefix, " ")
|
| 231 |
+
item_prefix = self._child_prefix(prefix, "- ")
|
| 232 |
+
indent = len(prefix) * " "
|
| 233 |
+
with io.StringIO() as buffer:
|
| 234 |
+
for i, (key, value) in enumerate(filtered.items()):
|
| 235 |
+
child_path = [*_path, key]
|
| 236 |
+
line_prefix = prefix if i == 0 else indent
|
| 237 |
+
buffer.write(f"{line_prefix}{self._label(child_path)}:")
|
| 238 |
+
# ^ just the first item should receive the complete prefix
|
| 239 |
+
if isinstance(value, dict):
|
| 240 |
+
filtered = self._filter_unecessary(value, child_path)
|
| 241 |
+
simple = self._handle_simple_dict(filtered, child_path)
|
| 242 |
+
buffer.write(
|
| 243 |
+
f" {simple}"
|
| 244 |
+
if simple
|
| 245 |
+
else f"\n{self(value, child_prefix, _path=child_path)}"
|
| 246 |
+
)
|
| 247 |
+
elif isinstance(value, list) and (
|
| 248 |
+
key != "type" or self._is_property(child_path)
|
| 249 |
+
):
|
| 250 |
+
children = self._handle_list(value, item_prefix, child_path)
|
| 251 |
+
sep = " " if children.startswith("[") else "\n"
|
| 252 |
+
buffer.write(f"{sep}{children}")
|
| 253 |
+
else:
|
| 254 |
+
buffer.write(f" {self._value(value, child_path)}\n")
|
| 255 |
+
return buffer.getvalue()
|
| 256 |
+
|
| 257 |
+
def _is_unecessary(self, path: Sequence[str]) -> bool:
|
| 258 |
+
if self._is_property(path) or not path: # empty path => instruction @ root
|
| 259 |
+
return False
|
| 260 |
+
key = path[-1]
|
| 261 |
+
return any(key.startswith(k) for k in "$_") or key in self._IGNORE
|
| 262 |
+
|
| 263 |
+
def _filter_unecessary(
|
| 264 |
+
self, schema: Dict[str, Any], path: Sequence[str]
|
| 265 |
+
) -> Dict[str, Any]:
|
| 266 |
+
return {
|
| 267 |
+
key: value
|
| 268 |
+
for key, value in schema.items()
|
| 269 |
+
if not self._is_unecessary([*path, key])
|
| 270 |
+
}
|
| 271 |
+
|
| 272 |
+
def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]:
|
| 273 |
+
inline = any(p in value for p in self._guess_inline_defs)
|
| 274 |
+
simple = not any(isinstance(v, (list, dict)) for v in value.values())
|
| 275 |
+
if inline or simple:
|
| 276 |
+
return f"{{{', '.join(self._inline_attrs(value, path))}}}\n"
|
| 277 |
+
return None
|
| 278 |
+
|
| 279 |
+
def _handle_list(
|
| 280 |
+
self, schemas: list, prefix: str = "", path: Sequence[str] = ()
|
| 281 |
+
) -> str:
|
| 282 |
+
if self._is_unecessary(path):
|
| 283 |
+
return ""
|
| 284 |
+
|
| 285 |
+
repr_ = repr(schemas)
|
| 286 |
+
if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60:
|
| 287 |
+
return f"{repr_}\n"
|
| 288 |
+
|
| 289 |
+
item_prefix = self._child_prefix(prefix, "- ")
|
| 290 |
+
return "".join(
|
| 291 |
+
self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas)
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
def _is_property(self, path: Sequence[str]) -> bool:
|
| 295 |
+
"""Check if the given path can correspond to an arbitrarily named property"""
|
| 296 |
+
counter = 0
|
| 297 |
+
for key in path[-2::-1]:
|
| 298 |
+
if key not in {"properties", "patternProperties"}:
|
| 299 |
+
break
|
| 300 |
+
counter += 1
|
| 301 |
+
|
| 302 |
+
# If the counter if even, the path correspond to a JSON Schema keyword
|
| 303 |
+
# otherwise it can be any arbitrary string naming a property
|
| 304 |
+
return counter % 2 == 1
|
| 305 |
+
|
| 306 |
+
def _label(self, path: Sequence[str]) -> str:
|
| 307 |
+
*parents, key = path
|
| 308 |
+
if not self._is_property(path):
|
| 309 |
+
norm_key = _separate_terms(key)
|
| 310 |
+
return self._terms.get(key) or " ".join(self._jargon(norm_key))
|
| 311 |
+
|
| 312 |
+
if parents[-1] == "patternProperties":
|
| 313 |
+
return f"(regex {key!r})"
|
| 314 |
+
return repr(key) # property name
|
| 315 |
+
|
| 316 |
+
def _value(self, value: Any, path: Sequence[str]) -> str:
|
| 317 |
+
if path[-1] == "type" and not self._is_property(path):
|
| 318 |
+
type_ = self._jargon(value)
|
| 319 |
+
return f"[{', '.join(type_)}]" if isinstance(type_, list) else type_
|
| 320 |
+
return repr(value)
|
| 321 |
+
|
| 322 |
+
def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]:
|
| 323 |
+
for key, value in schema.items():
|
| 324 |
+
child_path = [*path, key]
|
| 325 |
+
yield f"{self._label(child_path)}: {self._value(value, child_path)}"
|
| 326 |
+
|
| 327 |
+
def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str:
|
| 328 |
+
return len(parent_prefix) * " " + child_prefix
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def _separate_terms(word: str) -> List[str]:
|
| 332 |
+
"""
|
| 333 |
+
>>> _separate_terms("FooBar-foo")
|
| 334 |
+
['foo', 'bar', 'foo']
|
| 335 |
+
"""
|
| 336 |
+
return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w]
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/extra_validations.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""The purpose of this module is implement PEP 621 validations that are
|
| 2 |
+
difficult to express as a JSON Schema (or that are not supported by the current
|
| 3 |
+
JSON Schema library).
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from inspect import cleandoc
|
| 7 |
+
from typing import Mapping, TypeVar
|
| 8 |
+
|
| 9 |
+
from .error_reporting import ValidationError
|
| 10 |
+
|
| 11 |
+
T = TypeVar("T", bound=Mapping)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class RedefiningStaticFieldAsDynamic(ValidationError):
|
| 15 |
+
_DESC = """According to PEP 621:
|
| 16 |
+
|
| 17 |
+
Build back-ends MUST raise an error if the metadata specifies a field
|
| 18 |
+
statically as well as being listed in dynamic.
|
| 19 |
+
"""
|
| 20 |
+
__doc__ = _DESC
|
| 21 |
+
_URL = (
|
| 22 |
+
"https://packaging.python.org/en/latest/specifications/"
|
| 23 |
+
"pyproject-toml/#dynamic"
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def validate_project_dynamic(pyproject: T) -> T:
|
| 28 |
+
project_table = pyproject.get("project", {})
|
| 29 |
+
dynamic = project_table.get("dynamic", [])
|
| 30 |
+
|
| 31 |
+
for field in dynamic:
|
| 32 |
+
if field in project_table:
|
| 33 |
+
raise RedefiningStaticFieldAsDynamic(
|
| 34 |
+
message=f"You cannot provide a value for `project.{field}` and "
|
| 35 |
+
"list it under `project.dynamic` at the same time",
|
| 36 |
+
value={
|
| 37 |
+
field: project_table[field],
|
| 38 |
+
"...": " # ...",
|
| 39 |
+
"dynamic": dynamic,
|
| 40 |
+
},
|
| 41 |
+
name=f"data.project.{field}",
|
| 42 |
+
definition={
|
| 43 |
+
"description": cleandoc(RedefiningStaticFieldAsDynamic._DESC),
|
| 44 |
+
"see": RedefiningStaticFieldAsDynamic._URL,
|
| 45 |
+
},
|
| 46 |
+
rule="PEP 621",
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
return pyproject
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
EXTRA_VALIDATIONS = (validate_project_dynamic,)
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
SPLIT_RE = re.compile(r'[\.\[\]]+')
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class JsonSchemaException(ValueError):
|
| 8 |
+
"""
|
| 9 |
+
Base exception of ``fastjsonschema`` library.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class JsonSchemaValueException(JsonSchemaException):
|
| 14 |
+
"""
|
| 15 |
+
Exception raised by validation function. Available properties:
|
| 16 |
+
|
| 17 |
+
* ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``),
|
| 18 |
+
* invalid ``value`` (e.g. ``60``),
|
| 19 |
+
* ``name`` of a path in the data structure (e.g. ``data.property[index]``),
|
| 20 |
+
* ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``),
|
| 21 |
+
* the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``),
|
| 22 |
+
* ``rule`` which the ``value`` is breaking (e.g. ``maximum``)
|
| 23 |
+
* and ``rule_definition`` (e.g. ``42``).
|
| 24 |
+
|
| 25 |
+
.. versionchanged:: 2.14.0
|
| 26 |
+
Added all extra properties.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
def __init__(self, message, value=None, name=None, definition=None, rule=None):
|
| 30 |
+
super().__init__(message)
|
| 31 |
+
self.message = message
|
| 32 |
+
self.value = value
|
| 33 |
+
self.name = name
|
| 34 |
+
self.definition = definition
|
| 35 |
+
self.rule = rule
|
| 36 |
+
|
| 37 |
+
@property
|
| 38 |
+
def path(self):
|
| 39 |
+
return [item for item in SPLIT_RE.split(self.name) if item != '']
|
| 40 |
+
|
| 41 |
+
@property
|
| 42 |
+
def rule_definition(self):
|
| 43 |
+
if not self.rule or not self.definition:
|
| 44 |
+
return None
|
| 45 |
+
return self.definition.get(self.rule)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class JsonSchemaDefinitionException(JsonSchemaException):
|
| 49 |
+
"""
|
| 50 |
+
Exception raised by generator of validation function.
|
| 51 |
+
"""
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/formats.py
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The functions in this module are used to validate schemas with the
|
| 3 |
+
`format JSON Schema keyword
|
| 4 |
+
<https://json-schema.org/understanding-json-schema/reference/string#format>`_.
|
| 5 |
+
|
| 6 |
+
The correspondence is given by replacing the ``_`` character in the name of the
|
| 7 |
+
function with a ``-`` to obtain the format name and vice versa.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import builtins
|
| 11 |
+
import logging
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
import string
|
| 15 |
+
import typing
|
| 16 |
+
from itertools import chain as _chain
|
| 17 |
+
|
| 18 |
+
if typing.TYPE_CHECKING:
|
| 19 |
+
from typing_extensions import Literal
|
| 20 |
+
|
| 21 |
+
_logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
# -------------------------------------------------------------------------------------
|
| 24 |
+
# PEP 440
|
| 25 |
+
|
| 26 |
+
VERSION_PATTERN = r"""
|
| 27 |
+
v?
|
| 28 |
+
(?:
|
| 29 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
| 30 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
| 31 |
+
(?P<pre> # pre-release
|
| 32 |
+
[-_\.]?
|
| 33 |
+
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
| 34 |
+
[-_\.]?
|
| 35 |
+
(?P<pre_n>[0-9]+)?
|
| 36 |
+
)?
|
| 37 |
+
(?P<post> # post release
|
| 38 |
+
(?:-(?P<post_n1>[0-9]+))
|
| 39 |
+
|
|
| 40 |
+
(?:
|
| 41 |
+
[-_\.]?
|
| 42 |
+
(?P<post_l>post|rev|r)
|
| 43 |
+
[-_\.]?
|
| 44 |
+
(?P<post_n2>[0-9]+)?
|
| 45 |
+
)
|
| 46 |
+
)?
|
| 47 |
+
(?P<dev> # dev release
|
| 48 |
+
[-_\.]?
|
| 49 |
+
(?P<dev_l>dev)
|
| 50 |
+
[-_\.]?
|
| 51 |
+
(?P<dev_n>[0-9]+)?
|
| 52 |
+
)?
|
| 53 |
+
)
|
| 54 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def pep440(version: str) -> bool:
|
| 61 |
+
"""See :ref:`PyPA's version specification <pypa:version-specifiers>`
|
| 62 |
+
(initially introduced in :pep:`440`).
|
| 63 |
+
"""
|
| 64 |
+
return VERSION_REGEX.match(version) is not None
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# -------------------------------------------------------------------------------------
|
| 68 |
+
# PEP 508
|
| 69 |
+
|
| 70 |
+
PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
|
| 71 |
+
PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def pep508_identifier(name: str) -> bool:
|
| 75 |
+
"""See :ref:`PyPA's name specification <pypa:name-format>`
|
| 76 |
+
(initially introduced in :pep:`508#names`).
|
| 77 |
+
"""
|
| 78 |
+
return PEP508_IDENTIFIER_REGEX.match(name) is not None
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
try:
|
| 82 |
+
try:
|
| 83 |
+
from packaging import requirements as _req
|
| 84 |
+
except ImportError: # pragma: no cover
|
| 85 |
+
# let's try setuptools vendored version
|
| 86 |
+
from setuptools._vendor.packaging import ( # type: ignore[no-redef]
|
| 87 |
+
requirements as _req,
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
def pep508(value: str) -> bool:
|
| 91 |
+
"""See :ref:`PyPA's dependency specifiers <pypa:dependency-specifiers>`
|
| 92 |
+
(initially introduced in :pep:`508`).
|
| 93 |
+
"""
|
| 94 |
+
try:
|
| 95 |
+
_req.Requirement(value)
|
| 96 |
+
return True
|
| 97 |
+
except _req.InvalidRequirement:
|
| 98 |
+
return False
|
| 99 |
+
|
| 100 |
+
except ImportError: # pragma: no cover
|
| 101 |
+
_logger.warning(
|
| 102 |
+
"Could not find an installation of `packaging`. Requirements, dependencies and "
|
| 103 |
+
"versions might not be validated. "
|
| 104 |
+
"To enforce validation, please install `packaging`."
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
def pep508(value: str) -> bool:
|
| 108 |
+
return True
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def pep508_versionspec(value: str) -> bool:
|
| 112 |
+
"""Expression that can be used to specify/lock versions (including ranges)
|
| 113 |
+
See ``versionspec`` in :ref:`PyPA's dependency specifiers
|
| 114 |
+
<pypa:dependency-specifiers>` (initially introduced in :pep:`508`).
|
| 115 |
+
"""
|
| 116 |
+
if any(c in value for c in (";", "]", "@")):
|
| 117 |
+
# In PEP 508:
|
| 118 |
+
# conditional markers, extras and URL specs are not included in the
|
| 119 |
+
# versionspec
|
| 120 |
+
return False
|
| 121 |
+
# Let's pretend we have a dependency called `requirement` with the given
|
| 122 |
+
# version spec, then we can reuse the pep508 function for validation:
|
| 123 |
+
return pep508(f"requirement{value}")
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
# -------------------------------------------------------------------------------------
|
| 127 |
+
# PEP 517
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def pep517_backend_reference(value: str) -> bool:
|
| 131 |
+
"""See PyPA's specification for defining build-backend references
|
| 132 |
+
introduced in :pep:`517#source-trees`.
|
| 133 |
+
|
| 134 |
+
This is similar to an entry-point reference (e.g., ``package.module:object``).
|
| 135 |
+
"""
|
| 136 |
+
module, _, obj = value.partition(":")
|
| 137 |
+
identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
|
| 138 |
+
return all(python_identifier(i) for i in identifiers if i)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
# -------------------------------------------------------------------------------------
|
| 142 |
+
# Classifiers - PEP 301
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _download_classifiers() -> str:
|
| 146 |
+
import ssl
|
| 147 |
+
from email.message import Message
|
| 148 |
+
from urllib.request import urlopen
|
| 149 |
+
|
| 150 |
+
url = "https://pypi.org/pypi?:action=list_classifiers"
|
| 151 |
+
context = ssl.create_default_context()
|
| 152 |
+
with urlopen(url, context=context) as response: # noqa: S310 (audit URLs)
|
| 153 |
+
headers = Message()
|
| 154 |
+
headers["content_type"] = response.getheader("content-type", "text/plain")
|
| 155 |
+
return response.read().decode(headers.get_param("charset", "utf-8")) # type: ignore[no-any-return]
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class _TroveClassifier:
|
| 159 |
+
"""The ``trove_classifiers`` package is the official way of validating classifiers,
|
| 160 |
+
however this package might not be always available.
|
| 161 |
+
As a workaround we can still download a list from PyPI.
|
| 162 |
+
We also don't want to be over strict about it, so simply skipping silently is an
|
| 163 |
+
option (classifiers will be validated anyway during the upload to PyPI).
|
| 164 |
+
"""
|
| 165 |
+
|
| 166 |
+
downloaded: typing.Union[None, "Literal[False]", typing.Set[str]]
|
| 167 |
+
|
| 168 |
+
def __init__(self) -> None:
|
| 169 |
+
self.downloaded = None
|
| 170 |
+
self._skip_download = False
|
| 171 |
+
# None => not cached yet
|
| 172 |
+
# False => cache not available
|
| 173 |
+
self.__name__ = "trove_classifier" # Emulate a public function
|
| 174 |
+
|
| 175 |
+
def _disable_download(self) -> None:
|
| 176 |
+
# This is a private API. Only setuptools has the consent of using it.
|
| 177 |
+
self._skip_download = True
|
| 178 |
+
|
| 179 |
+
def __call__(self, value: str) -> bool:
|
| 180 |
+
if self.downloaded is False or self._skip_download is True:
|
| 181 |
+
return True
|
| 182 |
+
|
| 183 |
+
if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"):
|
| 184 |
+
self.downloaded = False
|
| 185 |
+
msg = (
|
| 186 |
+
"Install ``trove-classifiers`` to ensure proper validation. "
|
| 187 |
+
"Skipping download of classifiers list from PyPI (NO_NETWORK)."
|
| 188 |
+
)
|
| 189 |
+
_logger.debug(msg)
|
| 190 |
+
return True
|
| 191 |
+
|
| 192 |
+
if self.downloaded is None:
|
| 193 |
+
msg = (
|
| 194 |
+
"Install ``trove-classifiers`` to ensure proper validation. "
|
| 195 |
+
"Meanwhile a list of classifiers will be downloaded from PyPI."
|
| 196 |
+
)
|
| 197 |
+
_logger.debug(msg)
|
| 198 |
+
try:
|
| 199 |
+
self.downloaded = set(_download_classifiers().splitlines())
|
| 200 |
+
except Exception:
|
| 201 |
+
self.downloaded = False
|
| 202 |
+
_logger.debug("Problem with download, skipping validation")
|
| 203 |
+
return True
|
| 204 |
+
|
| 205 |
+
return value in self.downloaded or value.lower().startswith("private ::")
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
try:
|
| 209 |
+
from trove_classifiers import classifiers as _trove_classifiers
|
| 210 |
+
|
| 211 |
+
def trove_classifier(value: str) -> bool:
|
| 212 |
+
"""See https://pypi.org/classifiers/"""
|
| 213 |
+
return value in _trove_classifiers or value.lower().startswith("private ::")
|
| 214 |
+
|
| 215 |
+
except ImportError: # pragma: no cover
|
| 216 |
+
trove_classifier = _TroveClassifier()
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
# -------------------------------------------------------------------------------------
|
| 220 |
+
# Stub packages - PEP 561
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def pep561_stub_name(value: str) -> bool:
|
| 224 |
+
"""Name of a directory containing type stubs.
|
| 225 |
+
It must follow the name scheme ``<package>-stubs`` as defined in
|
| 226 |
+
:pep:`561#stub-only-packages`.
|
| 227 |
+
"""
|
| 228 |
+
top, *children = value.split(".")
|
| 229 |
+
if not top.endswith("-stubs"):
|
| 230 |
+
return False
|
| 231 |
+
return python_module_name(".".join([top[: -len("-stubs")], *children]))
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
# -------------------------------------------------------------------------------------
|
| 235 |
+
# Non-PEP related
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def url(value: str) -> bool:
|
| 239 |
+
"""Valid URL (validation uses :obj:`urllib.parse`).
|
| 240 |
+
For maximum compatibility please make sure to include a ``scheme`` prefix
|
| 241 |
+
in your URL (e.g. ``http://``).
|
| 242 |
+
"""
|
| 243 |
+
from urllib.parse import urlparse
|
| 244 |
+
|
| 245 |
+
try:
|
| 246 |
+
parts = urlparse(value)
|
| 247 |
+
if not parts.scheme:
|
| 248 |
+
_logger.warning(
|
| 249 |
+
"For maximum compatibility please make sure to include a "
|
| 250 |
+
"`scheme` prefix in your URL (e.g. 'http://'). "
|
| 251 |
+
f"Given value: {value}"
|
| 252 |
+
)
|
| 253 |
+
if not (value.startswith("/") or value.startswith("\\") or "@" in value):
|
| 254 |
+
parts = urlparse(f"http://{value}")
|
| 255 |
+
|
| 256 |
+
return bool(parts.scheme and parts.netloc)
|
| 257 |
+
except Exception:
|
| 258 |
+
return False
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
# https://packaging.python.org/specifications/entry-points/
|
| 262 |
+
ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
|
| 263 |
+
ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
|
| 264 |
+
RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
|
| 265 |
+
RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
|
| 266 |
+
ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
|
| 267 |
+
ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def python_identifier(value: str) -> bool:
|
| 271 |
+
"""Can be used as identifier in Python.
|
| 272 |
+
(Validation uses :obj:`str.isidentifier`).
|
| 273 |
+
"""
|
| 274 |
+
return value.isidentifier()
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
def python_qualified_identifier(value: str) -> bool:
|
| 278 |
+
"""
|
| 279 |
+
Python "dotted identifier", i.e. a sequence of :obj:`python_identifier`
|
| 280 |
+
concatenated with ``"."`` (e.g.: ``package.module.submodule``).
|
| 281 |
+
"""
|
| 282 |
+
if value.startswith(".") or value.endswith("."):
|
| 283 |
+
return False
|
| 284 |
+
return all(python_identifier(m) for m in value.split("."))
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
def python_module_name(value: str) -> bool:
|
| 288 |
+
"""Module name that can be used in an ``import``-statement in Python.
|
| 289 |
+
See :obj:`python_qualified_identifier`.
|
| 290 |
+
"""
|
| 291 |
+
return python_qualified_identifier(value)
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def python_module_name_relaxed(value: str) -> bool:
|
| 295 |
+
"""Similar to :obj:`python_module_name`, but relaxed to also accept
|
| 296 |
+
dash characters (``-``) and cover special cases like ``pip-run``.
|
| 297 |
+
|
| 298 |
+
It is recommended, however, that beginners avoid dash characters,
|
| 299 |
+
as they require advanced knowledge about Python internals.
|
| 300 |
+
|
| 301 |
+
The following are disallowed:
|
| 302 |
+
|
| 303 |
+
* names starting/ending in dashes,
|
| 304 |
+
* names ending in ``-stubs`` (potentially collide with :obj:`pep561_stub_name`).
|
| 305 |
+
"""
|
| 306 |
+
if value.startswith("-") or value.endswith("-"):
|
| 307 |
+
return False
|
| 308 |
+
if value.endswith("-stubs"):
|
| 309 |
+
return False # Avoid collision with PEP 561
|
| 310 |
+
return python_module_name(value.replace("-", "_"))
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def python_entrypoint_group(value: str) -> bool:
|
| 314 |
+
"""See ``Data model > group`` in the :ref:`PyPA's entry-points specification
|
| 315 |
+
<pypa:entry-points>`.
|
| 316 |
+
"""
|
| 317 |
+
return ENTRYPOINT_GROUP_REGEX.match(value) is not None
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def python_entrypoint_name(value: str) -> bool:
|
| 321 |
+
"""See ``Data model > name`` in the :ref:`PyPA's entry-points specification
|
| 322 |
+
<pypa:entry-points>`.
|
| 323 |
+
"""
|
| 324 |
+
if not ENTRYPOINT_REGEX.match(value):
|
| 325 |
+
return False
|
| 326 |
+
if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
|
| 327 |
+
msg = f"Entry point `{value}` does not follow recommended pattern: "
|
| 328 |
+
msg += RECOMMEDED_ENTRYPOINT_PATTERN
|
| 329 |
+
_logger.warning(msg)
|
| 330 |
+
return True
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def python_entrypoint_reference(value: str) -> bool:
|
| 334 |
+
"""Reference to a Python object using in the format::
|
| 335 |
+
|
| 336 |
+
importable.module:object.attr
|
| 337 |
+
|
| 338 |
+
See ``Data model >object reference`` in the :ref:`PyPA's entry-points specification
|
| 339 |
+
<pypa:entry-points>`.
|
| 340 |
+
"""
|
| 341 |
+
module, _, rest = value.partition(":")
|
| 342 |
+
if "[" in rest:
|
| 343 |
+
obj, _, extras_ = rest.partition("[")
|
| 344 |
+
if extras_.strip()[-1] != "]":
|
| 345 |
+
return False
|
| 346 |
+
extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
|
| 347 |
+
if not all(pep508_identifier(e) for e in extras):
|
| 348 |
+
return False
|
| 349 |
+
_logger.warning(f"`{value}` - using extras for entry points is not recommended")
|
| 350 |
+
else:
|
| 351 |
+
obj = rest
|
| 352 |
+
|
| 353 |
+
module_parts = module.split(".")
|
| 354 |
+
identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
|
| 355 |
+
return all(python_identifier(i.strip()) for i in identifiers)
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def uint8(value: builtins.int) -> bool:
|
| 359 |
+
r"""Unsigned 8-bit integer (:math:`0 \leq x < 2^8`)"""
|
| 360 |
+
return 0 <= value < 2**8
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
def uint16(value: builtins.int) -> bool:
|
| 364 |
+
r"""Unsigned 16-bit integer (:math:`0 \leq x < 2^{16}`)"""
|
| 365 |
+
return 0 <= value < 2**16
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def uint(value: builtins.int) -> bool:
|
| 369 |
+
r"""Unsigned 64-bit integer (:math:`0 \leq x < 2^{64}`)"""
|
| 370 |
+
return 0 <= value < 2**64
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
def int(value: builtins.int) -> bool:
|
| 374 |
+
r"""Signed 64-bit integer (:math:`-2^{63} \leq x < 2^{63}`)"""
|
| 375 |
+
return -(2**63) <= value < 2**63
|
llava/lib/python3.10/site-packages/setuptools/config/expand.py
ADDED
|
@@ -0,0 +1,452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility functions to expand configuration directives or special values
|
| 2 |
+
(such glob patterns).
|
| 3 |
+
|
| 4 |
+
We can split the process of interpreting configuration files into 2 steps:
|
| 5 |
+
|
| 6 |
+
1. The parsing the file contents from strings to value objects
|
| 7 |
+
that can be understand by Python (for example a string with a comma
|
| 8 |
+
separated list of keywords into an actual Python list of strings).
|
| 9 |
+
|
| 10 |
+
2. The expansion (or post-processing) of these values according to the
|
| 11 |
+
semantics ``setuptools`` assign to them (for example a configuration field
|
| 12 |
+
with the ``file:`` directive should be expanded from a list of file paths to
|
| 13 |
+
a single string with the contents of those files concatenated)
|
| 14 |
+
|
| 15 |
+
This module focus on the second step, and therefore allow sharing the expansion
|
| 16 |
+
functions among several configuration file formats.
|
| 17 |
+
|
| 18 |
+
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
from __future__ import annotations
|
| 22 |
+
|
| 23 |
+
import ast
|
| 24 |
+
import importlib
|
| 25 |
+
import os
|
| 26 |
+
import pathlib
|
| 27 |
+
import sys
|
| 28 |
+
from collections.abc import Iterable, Iterator, Mapping
|
| 29 |
+
from configparser import ConfigParser
|
| 30 |
+
from glob import iglob
|
| 31 |
+
from importlib.machinery import ModuleSpec, all_suffixes
|
| 32 |
+
from itertools import chain
|
| 33 |
+
from pathlib import Path
|
| 34 |
+
from types import ModuleType, TracebackType
|
| 35 |
+
from typing import TYPE_CHECKING, Any, Callable, TypeVar
|
| 36 |
+
|
| 37 |
+
from .. import _static
|
| 38 |
+
from .._path import StrPath, same_path as _same_path
|
| 39 |
+
from ..discovery import find_package_path
|
| 40 |
+
from ..warnings import SetuptoolsWarning
|
| 41 |
+
|
| 42 |
+
from distutils.errors import DistutilsOptionError
|
| 43 |
+
|
| 44 |
+
if TYPE_CHECKING:
|
| 45 |
+
from typing_extensions import Self
|
| 46 |
+
|
| 47 |
+
from setuptools.dist import Distribution
|
| 48 |
+
|
| 49 |
+
_K = TypeVar("_K")
|
| 50 |
+
_V_co = TypeVar("_V_co", covariant=True)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class StaticModule:
|
| 54 |
+
"""Proxy to a module object that avoids executing arbitrary code."""
|
| 55 |
+
|
| 56 |
+
def __init__(self, name: str, spec: ModuleSpec) -> None:
|
| 57 |
+
module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None
|
| 58 |
+
vars(self).update(locals())
|
| 59 |
+
del self.self
|
| 60 |
+
|
| 61 |
+
def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
|
| 62 |
+
for statement in self.module.body:
|
| 63 |
+
if isinstance(statement, ast.Assign):
|
| 64 |
+
yield from ((target, statement.value) for target in statement.targets)
|
| 65 |
+
elif isinstance(statement, ast.AnnAssign) and statement.value:
|
| 66 |
+
yield (statement.target, statement.value)
|
| 67 |
+
|
| 68 |
+
def __getattr__(self, attr: str):
|
| 69 |
+
"""Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
|
| 70 |
+
try:
|
| 71 |
+
return next(
|
| 72 |
+
ast.literal_eval(value)
|
| 73 |
+
for target, value in self._find_assignments()
|
| 74 |
+
if isinstance(target, ast.Name) and target.id == attr
|
| 75 |
+
)
|
| 76 |
+
except Exception as e:
|
| 77 |
+
raise AttributeError(f"{self.name} has no attribute {attr}") from e
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def glob_relative(
|
| 81 |
+
patterns: Iterable[str], root_dir: StrPath | None = None
|
| 82 |
+
) -> list[str]:
|
| 83 |
+
"""Expand the list of glob patterns, but preserving relative paths.
|
| 84 |
+
|
| 85 |
+
:param list[str] patterns: List of glob patterns
|
| 86 |
+
:param str root_dir: Path to which globs should be relative
|
| 87 |
+
(current directory by default)
|
| 88 |
+
:rtype: list
|
| 89 |
+
"""
|
| 90 |
+
glob_characters = {'*', '?', '[', ']', '{', '}'}
|
| 91 |
+
expanded_values = []
|
| 92 |
+
root_dir = root_dir or os.getcwd()
|
| 93 |
+
for value in patterns:
|
| 94 |
+
# Has globby characters?
|
| 95 |
+
if any(char in value for char in glob_characters):
|
| 96 |
+
# then expand the glob pattern while keeping paths *relative*:
|
| 97 |
+
glob_path = os.path.abspath(os.path.join(root_dir, value))
|
| 98 |
+
expanded_values.extend(
|
| 99 |
+
sorted(
|
| 100 |
+
os.path.relpath(path, root_dir).replace(os.sep, "/")
|
| 101 |
+
for path in iglob(glob_path, recursive=True)
|
| 102 |
+
)
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
else:
|
| 106 |
+
# take the value as-is
|
| 107 |
+
path = os.path.relpath(value, root_dir).replace(os.sep, "/")
|
| 108 |
+
expanded_values.append(path)
|
| 109 |
+
|
| 110 |
+
return expanded_values
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def read_files(
|
| 114 |
+
filepaths: StrPath | Iterable[StrPath], root_dir: StrPath | None = None
|
| 115 |
+
) -> str:
|
| 116 |
+
"""Return the content of the files concatenated using ``\n`` as str
|
| 117 |
+
|
| 118 |
+
This function is sandboxed and won't reach anything outside ``root_dir``
|
| 119 |
+
|
| 120 |
+
(By default ``root_dir`` is the current directory).
|
| 121 |
+
"""
|
| 122 |
+
from more_itertools import always_iterable
|
| 123 |
+
|
| 124 |
+
root_dir = os.path.abspath(root_dir or os.getcwd())
|
| 125 |
+
_filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
|
| 126 |
+
return '\n'.join(
|
| 127 |
+
_read_file(path)
|
| 128 |
+
for path in _filter_existing_files(_filepaths)
|
| 129 |
+
if _assert_local(path, root_dir)
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]:
|
| 134 |
+
for path in filepaths:
|
| 135 |
+
if os.path.isfile(path):
|
| 136 |
+
yield path
|
| 137 |
+
else:
|
| 138 |
+
SetuptoolsWarning.emit(f"File {path!r} cannot be found")
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _read_file(filepath: bytes | StrPath) -> str:
|
| 142 |
+
with open(filepath, encoding='utf-8') as f:
|
| 143 |
+
return f.read()
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def _assert_local(filepath: StrPath, root_dir: str):
|
| 147 |
+
if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents:
|
| 148 |
+
msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
|
| 149 |
+
raise DistutilsOptionError(msg)
|
| 150 |
+
|
| 151 |
+
return True
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def read_attr(
|
| 155 |
+
attr_desc: str,
|
| 156 |
+
package_dir: Mapping[str, str] | None = None,
|
| 157 |
+
root_dir: StrPath | None = None,
|
| 158 |
+
) -> Any:
|
| 159 |
+
"""Reads the value of an attribute from a module.
|
| 160 |
+
|
| 161 |
+
This function will try to read the attributed statically first
|
| 162 |
+
(via :func:`ast.literal_eval`), and only evaluate the module if it fails.
|
| 163 |
+
|
| 164 |
+
Examples:
|
| 165 |
+
read_attr("package.attr")
|
| 166 |
+
read_attr("package.module.attr")
|
| 167 |
+
|
| 168 |
+
:param str attr_desc: Dot-separated string describing how to reach the
|
| 169 |
+
attribute (see examples above)
|
| 170 |
+
:param dict[str, str] package_dir: Mapping of package names to their
|
| 171 |
+
location in disk (represented by paths relative to ``root_dir``).
|
| 172 |
+
:param str root_dir: Path to directory containing all the packages in
|
| 173 |
+
``package_dir`` (current directory by default).
|
| 174 |
+
:rtype: str
|
| 175 |
+
"""
|
| 176 |
+
root_dir = root_dir or os.getcwd()
|
| 177 |
+
attrs_path = attr_desc.strip().split('.')
|
| 178 |
+
attr_name = attrs_path.pop()
|
| 179 |
+
module_name = '.'.join(attrs_path)
|
| 180 |
+
module_name = module_name or '__init__'
|
| 181 |
+
path = _find_module(module_name, package_dir, root_dir)
|
| 182 |
+
spec = _find_spec(module_name, path)
|
| 183 |
+
|
| 184 |
+
try:
|
| 185 |
+
value = getattr(StaticModule(module_name, spec), attr_name)
|
| 186 |
+
# XXX: Is marking as static contents coming from modules too optimistic?
|
| 187 |
+
return _static.attempt_conversion(value)
|
| 188 |
+
except Exception:
|
| 189 |
+
# fallback to evaluate module
|
| 190 |
+
module = _load_spec(spec, module_name)
|
| 191 |
+
return getattr(module, attr_name)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def _find_spec(module_name: str, module_path: StrPath | None) -> ModuleSpec:
|
| 195 |
+
spec = importlib.util.spec_from_file_location(module_name, module_path)
|
| 196 |
+
spec = spec or importlib.util.find_spec(module_name)
|
| 197 |
+
|
| 198 |
+
if spec is None:
|
| 199 |
+
raise ModuleNotFoundError(module_name)
|
| 200 |
+
|
| 201 |
+
return spec
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
|
| 205 |
+
name = getattr(spec, "__name__", module_name)
|
| 206 |
+
if name in sys.modules:
|
| 207 |
+
return sys.modules[name]
|
| 208 |
+
module = importlib.util.module_from_spec(spec)
|
| 209 |
+
sys.modules[name] = module # cache (it also ensures `==` works on loaded items)
|
| 210 |
+
assert spec.loader is not None
|
| 211 |
+
spec.loader.exec_module(module)
|
| 212 |
+
return module
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def _find_module(
|
| 216 |
+
module_name: str, package_dir: Mapping[str, str] | None, root_dir: StrPath
|
| 217 |
+
) -> str | None:
|
| 218 |
+
"""Find the path to the module named ``module_name``,
|
| 219 |
+
considering the ``package_dir`` in the build configuration and ``root_dir``.
|
| 220 |
+
|
| 221 |
+
>>> tmp = getfixture('tmpdir')
|
| 222 |
+
>>> _ = tmp.ensure("a/b/c.py")
|
| 223 |
+
>>> _ = tmp.ensure("a/b/d/__init__.py")
|
| 224 |
+
>>> r = lambda x: x.replace(str(tmp), "tmp").replace(os.sep, "/")
|
| 225 |
+
>>> r(_find_module("a.b.c", None, tmp))
|
| 226 |
+
'tmp/a/b/c.py'
|
| 227 |
+
>>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))
|
| 228 |
+
'tmp/a/b/d/__init__.py'
|
| 229 |
+
"""
|
| 230 |
+
path_start = find_package_path(module_name, package_dir or {}, root_dir)
|
| 231 |
+
candidates = chain.from_iterable(
|
| 232 |
+
(f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))
|
| 233 |
+
for ext in all_suffixes()
|
| 234 |
+
)
|
| 235 |
+
return next((x for x in candidates if os.path.isfile(x)), None)
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def resolve_class(
|
| 239 |
+
qualified_class_name: str,
|
| 240 |
+
package_dir: Mapping[str, str] | None = None,
|
| 241 |
+
root_dir: StrPath | None = None,
|
| 242 |
+
) -> Callable:
|
| 243 |
+
"""Given a qualified class name, return the associated class object"""
|
| 244 |
+
root_dir = root_dir or os.getcwd()
|
| 245 |
+
idx = qualified_class_name.rfind('.')
|
| 246 |
+
class_name = qualified_class_name[idx + 1 :]
|
| 247 |
+
pkg_name = qualified_class_name[:idx]
|
| 248 |
+
|
| 249 |
+
path = _find_module(pkg_name, package_dir, root_dir)
|
| 250 |
+
module = _load_spec(_find_spec(pkg_name, path), pkg_name)
|
| 251 |
+
return getattr(module, class_name)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def cmdclass(
|
| 255 |
+
values: dict[str, str],
|
| 256 |
+
package_dir: Mapping[str, str] | None = None,
|
| 257 |
+
root_dir: StrPath | None = None,
|
| 258 |
+
) -> dict[str, Callable]:
|
| 259 |
+
"""Given a dictionary mapping command names to strings for qualified class
|
| 260 |
+
names, apply :func:`resolve_class` to the dict values.
|
| 261 |
+
"""
|
| 262 |
+
return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()}
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
def find_packages(
|
| 266 |
+
*,
|
| 267 |
+
namespaces=True,
|
| 268 |
+
fill_package_dir: dict[str, str] | None = None,
|
| 269 |
+
root_dir: StrPath | None = None,
|
| 270 |
+
**kwargs,
|
| 271 |
+
) -> list[str]:
|
| 272 |
+
"""Works similarly to :func:`setuptools.find_packages`, but with all
|
| 273 |
+
arguments given as keyword arguments. Moreover, ``where`` can be given
|
| 274 |
+
as a list (the results will be simply concatenated).
|
| 275 |
+
|
| 276 |
+
When the additional keyword argument ``namespaces`` is ``True``, it will
|
| 277 |
+
behave like :func:`setuptools.find_namespace_packages`` (i.e. include
|
| 278 |
+
implicit namespaces as per :pep:`420`).
|
| 279 |
+
|
| 280 |
+
The ``where`` argument will be considered relative to ``root_dir`` (or the current
|
| 281 |
+
working directory when ``root_dir`` is not given).
|
| 282 |
+
|
| 283 |
+
If the ``fill_package_dir`` argument is passed, this function will consider it as a
|
| 284 |
+
similar data structure to the ``package_dir`` configuration parameter add fill-in
|
| 285 |
+
any missing package location.
|
| 286 |
+
|
| 287 |
+
:rtype: list
|
| 288 |
+
"""
|
| 289 |
+
from more_itertools import always_iterable, unique_everseen
|
| 290 |
+
|
| 291 |
+
from setuptools.discovery import construct_package_dir
|
| 292 |
+
|
| 293 |
+
# check "not namespaces" first due to python/mypy#6232
|
| 294 |
+
if not namespaces:
|
| 295 |
+
from setuptools.discovery import PackageFinder
|
| 296 |
+
else:
|
| 297 |
+
from setuptools.discovery import PEP420PackageFinder as PackageFinder
|
| 298 |
+
|
| 299 |
+
root_dir = root_dir or os.curdir
|
| 300 |
+
where = kwargs.pop('where', ['.'])
|
| 301 |
+
packages: list[str] = []
|
| 302 |
+
fill_package_dir = {} if fill_package_dir is None else fill_package_dir
|
| 303 |
+
search = list(unique_everseen(always_iterable(where)))
|
| 304 |
+
|
| 305 |
+
if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)):
|
| 306 |
+
fill_package_dir.setdefault("", search[0])
|
| 307 |
+
|
| 308 |
+
for path in search:
|
| 309 |
+
package_path = _nest_path(root_dir, path)
|
| 310 |
+
pkgs = PackageFinder.find(package_path, **kwargs)
|
| 311 |
+
packages.extend(pkgs)
|
| 312 |
+
if pkgs and not (
|
| 313 |
+
fill_package_dir.get("") == path or os.path.samefile(package_path, root_dir)
|
| 314 |
+
):
|
| 315 |
+
fill_package_dir.update(construct_package_dir(pkgs, path))
|
| 316 |
+
|
| 317 |
+
return packages
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def _nest_path(parent: StrPath, path: StrPath) -> str:
|
| 321 |
+
path = parent if path in {".", ""} else os.path.join(parent, path)
|
| 322 |
+
return os.path.normpath(path)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
def version(value: Callable | Iterable[str | int] | str) -> str:
|
| 326 |
+
"""When getting the version directly from an attribute,
|
| 327 |
+
it should be normalised to string.
|
| 328 |
+
"""
|
| 329 |
+
_value = value() if callable(value) else value
|
| 330 |
+
|
| 331 |
+
if isinstance(_value, str):
|
| 332 |
+
return _value
|
| 333 |
+
if hasattr(_value, '__iter__'):
|
| 334 |
+
return '.'.join(map(str, _value))
|
| 335 |
+
return f'{_value}'
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def canonic_package_data(package_data: dict) -> dict:
|
| 339 |
+
if "*" in package_data:
|
| 340 |
+
package_data[""] = package_data.pop("*")
|
| 341 |
+
return package_data
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def canonic_data_files(
|
| 345 |
+
data_files: list | dict, root_dir: StrPath | None = None
|
| 346 |
+
) -> list[tuple[str, list[str]]]:
|
| 347 |
+
"""For compatibility with ``setup.py``, ``data_files`` should be a list
|
| 348 |
+
of pairs instead of a dict.
|
| 349 |
+
|
| 350 |
+
This function also expands glob patterns.
|
| 351 |
+
"""
|
| 352 |
+
if isinstance(data_files, list):
|
| 353 |
+
return data_files
|
| 354 |
+
|
| 355 |
+
return [
|
| 356 |
+
(dest, glob_relative(patterns, root_dir))
|
| 357 |
+
for dest, patterns in data_files.items()
|
| 358 |
+
]
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def entry_points(
|
| 362 |
+
text: str, text_source: str = "entry-points"
|
| 363 |
+
) -> dict[str, dict[str, str]]:
|
| 364 |
+
"""Given the contents of entry-points file,
|
| 365 |
+
process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
|
| 366 |
+
The first level keys are entry-point groups, the second level keys are
|
| 367 |
+
entry-point names, and the second level values are references to objects
|
| 368 |
+
(that correspond to the entry-point value).
|
| 369 |
+
"""
|
| 370 |
+
# Using undocumented behaviour, see python/typeshed#12700
|
| 371 |
+
parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore[call-overload]
|
| 372 |
+
parser.optionxform = str # case sensitive
|
| 373 |
+
parser.read_string(text, text_source)
|
| 374 |
+
groups = {k: dict(v.items()) for k, v in parser.items()}
|
| 375 |
+
groups.pop(parser.default_section, None)
|
| 376 |
+
return groups
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
class EnsurePackagesDiscovered:
|
| 380 |
+
"""Some expand functions require all the packages to already be discovered before
|
| 381 |
+
they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`.
|
| 382 |
+
|
| 383 |
+
Therefore in some cases we will need to run autodiscovery during the evaluation of
|
| 384 |
+
the configuration. However, it is better to postpone calling package discovery as
|
| 385 |
+
much as possible, because some parameters can influence it (e.g. ``package_dir``),
|
| 386 |
+
and those might not have been processed yet.
|
| 387 |
+
"""
|
| 388 |
+
|
| 389 |
+
def __init__(self, distribution: Distribution) -> None:
|
| 390 |
+
self._dist = distribution
|
| 391 |
+
self._called = False
|
| 392 |
+
|
| 393 |
+
def __call__(self):
|
| 394 |
+
"""Trigger the automatic package discovery, if it is still necessary."""
|
| 395 |
+
if not self._called:
|
| 396 |
+
self._called = True
|
| 397 |
+
self._dist.set_defaults(name=False) # Skip name, we can still be parsing
|
| 398 |
+
|
| 399 |
+
def __enter__(self) -> Self:
|
| 400 |
+
return self
|
| 401 |
+
|
| 402 |
+
def __exit__(
|
| 403 |
+
self,
|
| 404 |
+
exc_type: type[BaseException] | None,
|
| 405 |
+
exc_value: BaseException | None,
|
| 406 |
+
traceback: TracebackType | None,
|
| 407 |
+
):
|
| 408 |
+
if self._called:
|
| 409 |
+
self._dist.set_defaults.analyse_name() # Now we can set a default name
|
| 410 |
+
|
| 411 |
+
def _get_package_dir(self) -> Mapping[str, str]:
|
| 412 |
+
self()
|
| 413 |
+
pkg_dir = self._dist.package_dir
|
| 414 |
+
return {} if pkg_dir is None else pkg_dir
|
| 415 |
+
|
| 416 |
+
@property
|
| 417 |
+
def package_dir(self) -> Mapping[str, str]:
|
| 418 |
+
"""Proxy to ``package_dir`` that may trigger auto-discovery when used."""
|
| 419 |
+
return LazyMappingProxy(self._get_package_dir)
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
class LazyMappingProxy(Mapping[_K, _V_co]):
|
| 423 |
+
"""Mapping proxy that delays resolving the target object, until really needed.
|
| 424 |
+
|
| 425 |
+
>>> def obtain_mapping():
|
| 426 |
+
... print("Running expensive function!")
|
| 427 |
+
... return {"key": "value", "other key": "other value"}
|
| 428 |
+
>>> mapping = LazyMappingProxy(obtain_mapping)
|
| 429 |
+
>>> mapping["key"]
|
| 430 |
+
Running expensive function!
|
| 431 |
+
'value'
|
| 432 |
+
>>> mapping["other key"]
|
| 433 |
+
'other value'
|
| 434 |
+
"""
|
| 435 |
+
|
| 436 |
+
def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V_co]]) -> None:
|
| 437 |
+
self._obtain = obtain_mapping_value
|
| 438 |
+
self._value: Mapping[_K, _V_co] | None = None
|
| 439 |
+
|
| 440 |
+
def _target(self) -> Mapping[_K, _V_co]:
|
| 441 |
+
if self._value is None:
|
| 442 |
+
self._value = self._obtain()
|
| 443 |
+
return self._value
|
| 444 |
+
|
| 445 |
+
def __getitem__(self, key: _K) -> _V_co:
|
| 446 |
+
return self._target()[key]
|
| 447 |
+
|
| 448 |
+
def __len__(self) -> int:
|
| 449 |
+
return len(self._target())
|
| 450 |
+
|
| 451 |
+
def __iter__(self) -> Iterator[_K]:
|
| 452 |
+
return iter(self._target())
|
llava/lib/python3.10/site-packages/setuptools/tests/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import locale
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
__all__ = ['fail_on_ascii']
|
| 7 |
+
|
| 8 |
+
if sys.version_info >= (3, 11):
|
| 9 |
+
locale_encoding = locale.getencoding()
|
| 10 |
+
else:
|
| 11 |
+
locale_encoding = locale.getpreferredencoding(False)
|
| 12 |
+
is_ascii = locale_encoding == 'ANSI_X3.4-1968'
|
| 13 |
+
fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")
|
llava/lib/python3.10/site-packages/setuptools/tests/fixtures.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import os
|
| 3 |
+
import subprocess
|
| 4 |
+
import sys
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
import path
|
| 8 |
+
import pytest
|
| 9 |
+
|
| 10 |
+
from . import contexts, environment
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@pytest.fixture
|
| 14 |
+
def user_override(monkeypatch):
|
| 15 |
+
"""
|
| 16 |
+
Override site.USER_BASE and site.USER_SITE with temporary directories in
|
| 17 |
+
a context.
|
| 18 |
+
"""
|
| 19 |
+
with contexts.tempdir() as user_base:
|
| 20 |
+
monkeypatch.setattr('site.USER_BASE', user_base)
|
| 21 |
+
with contexts.tempdir() as user_site:
|
| 22 |
+
monkeypatch.setattr('site.USER_SITE', user_site)
|
| 23 |
+
with contexts.save_user_site_setting():
|
| 24 |
+
yield
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@pytest.fixture
|
| 28 |
+
def tmpdir_cwd(tmpdir):
|
| 29 |
+
with tmpdir.as_cwd() as orig:
|
| 30 |
+
yield orig
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture(autouse=True, scope="session")
|
| 34 |
+
def workaround_xdist_376(request):
|
| 35 |
+
"""
|
| 36 |
+
Workaround pytest-dev/pytest-xdist#376
|
| 37 |
+
|
| 38 |
+
``pytest-xdist`` tends to inject '' into ``sys.path``,
|
| 39 |
+
which may break certain isolation expectations.
|
| 40 |
+
Remove the entry so the import
|
| 41 |
+
machinery behaves the same irrespective of xdist.
|
| 42 |
+
"""
|
| 43 |
+
if not request.config.pluginmanager.has_plugin('xdist'):
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
with contextlib.suppress(ValueError):
|
| 47 |
+
sys.path.remove('')
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
@pytest.fixture
|
| 51 |
+
def sample_project(tmp_path):
|
| 52 |
+
"""
|
| 53 |
+
Clone the 'sampleproject' and return a path to it.
|
| 54 |
+
"""
|
| 55 |
+
cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
|
| 56 |
+
try:
|
| 57 |
+
subprocess.check_call(cmd, cwd=str(tmp_path))
|
| 58 |
+
except Exception:
|
| 59 |
+
pytest.skip("Unable to clone sampleproject")
|
| 60 |
+
return tmp_path / 'sampleproject'
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# sdist and wheel artifacts should be stable across a round of tests
|
| 64 |
+
# so we can build them once per session and use the files as "readonly"
|
| 65 |
+
|
| 66 |
+
# In the case of setuptools, building the wheel without sdist may cause
|
| 67 |
+
# it to contain the `build` directory, and therefore create situations with
|
| 68 |
+
# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _build_distributions(tmp_path_factory, request):
|
| 72 |
+
with contexts.session_locked_tmp_dir(
|
| 73 |
+
request, tmp_path_factory, "dist_build"
|
| 74 |
+
) as tmp: # pragma: no cover
|
| 75 |
+
sdist = next(tmp.glob("*.tar.gz"), None)
|
| 76 |
+
wheel = next(tmp.glob("*.whl"), None)
|
| 77 |
+
if sdist and wheel:
|
| 78 |
+
return (sdist, wheel)
|
| 79 |
+
|
| 80 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 81 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 82 |
+
|
| 83 |
+
subprocess.check_output([
|
| 84 |
+
sys.executable,
|
| 85 |
+
"-m",
|
| 86 |
+
"build",
|
| 87 |
+
"--outdir",
|
| 88 |
+
str(tmp),
|
| 89 |
+
str(request.config.rootdir),
|
| 90 |
+
])
|
| 91 |
+
|
| 92 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 93 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 94 |
+
|
| 95 |
+
return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@pytest.fixture(scope="session")
|
| 99 |
+
def setuptools_sdist(tmp_path_factory, request):
|
| 100 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
|
| 101 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 102 |
+
return Path(prebuilt).resolve()
|
| 103 |
+
|
| 104 |
+
sdist, _ = _build_distributions(tmp_path_factory, request)
|
| 105 |
+
return sdist
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
@pytest.fixture(scope="session")
|
| 109 |
+
def setuptools_wheel(tmp_path_factory, request):
|
| 110 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
|
| 111 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 112 |
+
return Path(prebuilt).resolve()
|
| 113 |
+
|
| 114 |
+
_, wheel = _build_distributions(tmp_path_factory, request)
|
| 115 |
+
return wheel
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@pytest.fixture
|
| 119 |
+
def venv(tmp_path, setuptools_wheel):
|
| 120 |
+
"""Virtual env with the version of setuptools under test installed"""
|
| 121 |
+
env = environment.VirtualEnv()
|
| 122 |
+
env.root = path.Path(tmp_path / 'venv')
|
| 123 |
+
env.create_opts = ['--no-setuptools', '--wheel=bundle']
|
| 124 |
+
# TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
|
| 125 |
+
env.req = str(setuptools_wheel)
|
| 126 |
+
# In some environments (eg. downstream distro packaging),
|
| 127 |
+
# where tox isn't used to run tests and PYTHONPATH is set to point to
|
| 128 |
+
# a specific setuptools codebase, PYTHONPATH will leak into the spawned
|
| 129 |
+
# processes.
|
| 130 |
+
# env.create() should install the just created setuptools
|
| 131 |
+
# wheel, but it doesn't if it finds another existing matching setuptools
|
| 132 |
+
# installation present on PYTHONPATH:
|
| 133 |
+
# `setuptools is already installed with the same version as the provided
|
| 134 |
+
# wheel. Use --force-reinstall to force an installation of the wheel.`
|
| 135 |
+
# This prevents leaking PYTHONPATH to the created environment.
|
| 136 |
+
with contexts.environment(PYTHONPATH=None):
|
| 137 |
+
return env.create()
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@pytest.fixture
|
| 141 |
+
def venv_without_setuptools(tmp_path):
|
| 142 |
+
"""Virtual env without any version of setuptools installed"""
|
| 143 |
+
env = environment.VirtualEnv()
|
| 144 |
+
env.root = path.Path(tmp_path / 'venv_without_setuptools')
|
| 145 |
+
env.create_opts = ['--no-setuptools', '--no-wheel']
|
| 146 |
+
env.ensure_env()
|
| 147 |
+
return env
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
@pytest.fixture
|
| 151 |
+
def bare_venv(tmp_path):
|
| 152 |
+
"""Virtual env without any common packages installed"""
|
| 153 |
+
env = environment.VirtualEnv()
|
| 154 |
+
env.root = path.Path(tmp_path / 'bare_venv')
|
| 155 |
+
env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
|
| 156 |
+
env.ensure_env()
|
| 157 |
+
return env
|
llava/lib/python3.10/site-packages/setuptools/tests/mod_with_constant.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
value = 'three, sir!'
|
llava/lib/python3.10/site-packages/setuptools/tests/namespaces.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ast
|
| 2 |
+
import json
|
| 3 |
+
import textwrap
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def iter_namespace_pkgs(namespace):
|
| 8 |
+
parts = namespace.split(".")
|
| 9 |
+
for i in range(len(parts)):
|
| 10 |
+
yield ".".join(parts[: i + 1])
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def build_namespace_package(tmpdir, name, version="1.0", impl="pkg_resources"):
|
| 14 |
+
src_dir = tmpdir / name
|
| 15 |
+
src_dir.mkdir()
|
| 16 |
+
setup_py = src_dir / 'setup.py'
|
| 17 |
+
namespace, _, rest = name.rpartition('.')
|
| 18 |
+
namespaces = list(iter_namespace_pkgs(namespace))
|
| 19 |
+
setup_args = {
|
| 20 |
+
"name": name,
|
| 21 |
+
"version": version,
|
| 22 |
+
"packages": namespaces,
|
| 23 |
+
}
|
| 24 |
+
|
| 25 |
+
if impl == "pkg_resources":
|
| 26 |
+
tmpl = '__import__("pkg_resources").declare_namespace(__name__)'
|
| 27 |
+
setup_args["namespace_packages"] = namespaces
|
| 28 |
+
elif impl == "pkgutil":
|
| 29 |
+
tmpl = '__path__ = __import__("pkgutil").extend_path(__path__, __name__)'
|
| 30 |
+
else:
|
| 31 |
+
raise ValueError(f"Cannot recognise {impl=} when creating namespaces")
|
| 32 |
+
|
| 33 |
+
args = json.dumps(setup_args, indent=4)
|
| 34 |
+
assert ast.literal_eval(args) # ensure it is valid Python
|
| 35 |
+
|
| 36 |
+
script = textwrap.dedent(
|
| 37 |
+
"""\
|
| 38 |
+
import setuptools
|
| 39 |
+
args = {args}
|
| 40 |
+
setuptools.setup(**args)
|
| 41 |
+
"""
|
| 42 |
+
).format(args=args)
|
| 43 |
+
setup_py.write_text(script, encoding='utf-8')
|
| 44 |
+
|
| 45 |
+
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
|
| 46 |
+
ns_pkg_dir.mkdir(parents=True)
|
| 47 |
+
|
| 48 |
+
for ns in namespaces:
|
| 49 |
+
pkg_init = src_dir / ns.replace(".", "/") / '__init__.py'
|
| 50 |
+
pkg_init.write_text(tmpl, encoding='utf-8')
|
| 51 |
+
|
| 52 |
+
pkg_mod = ns_pkg_dir / (rest + '.py')
|
| 53 |
+
some_functionality = 'name = {rest!r}'.format(**locals())
|
| 54 |
+
pkg_mod.write_text(some_functionality, encoding='utf-8')
|
| 55 |
+
return src_dir
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def build_pep420_namespace_package(tmpdir, name):
|
| 59 |
+
src_dir = tmpdir / name
|
| 60 |
+
src_dir.mkdir()
|
| 61 |
+
pyproject = src_dir / "pyproject.toml"
|
| 62 |
+
namespace, _, rest = name.rpartition(".")
|
| 63 |
+
script = f"""\
|
| 64 |
+
[build-system]
|
| 65 |
+
requires = ["setuptools"]
|
| 66 |
+
build-backend = "setuptools.build_meta"
|
| 67 |
+
|
| 68 |
+
[project]
|
| 69 |
+
name = "{name}"
|
| 70 |
+
version = "3.14159"
|
| 71 |
+
"""
|
| 72 |
+
pyproject.write_text(textwrap.dedent(script), encoding='utf-8')
|
| 73 |
+
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
|
| 74 |
+
ns_pkg_dir.mkdir(parents=True)
|
| 75 |
+
pkg_mod = ns_pkg_dir / (rest + ".py")
|
| 76 |
+
some_functionality = f"name = {rest!r}"
|
| 77 |
+
pkg_mod.write_text(some_functionality, encoding='utf-8')
|
| 78 |
+
return src_dir
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def make_site_dir(target):
|
| 82 |
+
"""
|
| 83 |
+
Add a sitecustomize.py module in target to cause
|
| 84 |
+
target to be added to site dirs such that .pth files
|
| 85 |
+
are processed there.
|
| 86 |
+
"""
|
| 87 |
+
sc = target / 'sitecustomize.py'
|
| 88 |
+
target_str = str(target)
|
| 89 |
+
tmpl = '__import__("site").addsitedir({target_str!r})'
|
| 90 |
+
sc.write_text(tmpl.format(**locals()), encoding='utf-8')
|
llava/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
result = 'passed'
|
llava/lib/python3.10/site-packages/setuptools/tests/server.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Basic http server for tests to simulate PyPI or custom indexes"""
|
| 2 |
+
|
| 3 |
+
import http.server
|
| 4 |
+
import os
|
| 5 |
+
import threading
|
| 6 |
+
import time
|
| 7 |
+
import urllib.parse
|
| 8 |
+
import urllib.request
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class IndexServer(http.server.HTTPServer):
|
| 12 |
+
"""Basic single-threaded http server simulating a package index
|
| 13 |
+
|
| 14 |
+
You can use this server in unittest like this::
|
| 15 |
+
s = IndexServer()
|
| 16 |
+
s.start()
|
| 17 |
+
index_url = s.base_url() + 'mytestindex'
|
| 18 |
+
# do some test requests to the index
|
| 19 |
+
# The index files should be located in setuptools/tests/indexes
|
| 20 |
+
s.stop()
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
def __init__(
|
| 24 |
+
self,
|
| 25 |
+
server_address=('', 0),
|
| 26 |
+
RequestHandlerClass=http.server.SimpleHTTPRequestHandler,
|
| 27 |
+
):
|
| 28 |
+
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
| 29 |
+
self._run = True
|
| 30 |
+
|
| 31 |
+
def start(self):
|
| 32 |
+
self.thread = threading.Thread(target=self.serve_forever)
|
| 33 |
+
self.thread.start()
|
| 34 |
+
|
| 35 |
+
def stop(self):
|
| 36 |
+
"Stop the server"
|
| 37 |
+
|
| 38 |
+
# Let the server finish the last request and wait for a new one.
|
| 39 |
+
time.sleep(0.1)
|
| 40 |
+
|
| 41 |
+
self.shutdown()
|
| 42 |
+
self.thread.join()
|
| 43 |
+
self.socket.close()
|
| 44 |
+
|
| 45 |
+
def base_url(self):
|
| 46 |
+
port = self.server_port
|
| 47 |
+
return f'http://127.0.0.1:{port}/setuptools/tests/indexes/'
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class RequestRecorder(http.server.BaseHTTPRequestHandler):
|
| 51 |
+
def do_GET(self):
|
| 52 |
+
requests = vars(self.server).setdefault('requests', [])
|
| 53 |
+
requests.append(self)
|
| 54 |
+
self.send_response(200, 'OK')
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class MockServer(http.server.HTTPServer, threading.Thread):
|
| 58 |
+
"""
|
| 59 |
+
A simple HTTP Server that records the requests made to it.
|
| 60 |
+
"""
|
| 61 |
+
|
| 62 |
+
def __init__(self, server_address=('', 0), RequestHandlerClass=RequestRecorder):
|
| 63 |
+
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
| 64 |
+
threading.Thread.__init__(self)
|
| 65 |
+
self.daemon = True
|
| 66 |
+
self.requests = []
|
| 67 |
+
|
| 68 |
+
def run(self):
|
| 69 |
+
self.serve_forever()
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def netloc(self):
|
| 73 |
+
return f'localhost:{self.server_port}'
|
| 74 |
+
|
| 75 |
+
@property
|
| 76 |
+
def url(self):
|
| 77 |
+
return f'http://{self.netloc}/'
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def path_to_url(path, authority=None):
|
| 81 |
+
"""Convert a path to a file: URL."""
|
| 82 |
+
path = os.path.normpath(os.path.abspath(path))
|
| 83 |
+
base = 'file:'
|
| 84 |
+
if authority is not None:
|
| 85 |
+
base += '//' + authority
|
| 86 |
+
return urllib.parse.urljoin(base, urllib.request.pathname2url(path))
|
llava/lib/python3.10/site-packages/setuptools/tests/test_archive_util.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import tarfile
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
from setuptools import archive_util
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@pytest.fixture
|
| 10 |
+
def tarfile_with_unicode(tmpdir):
|
| 11 |
+
"""
|
| 12 |
+
Create a tarfile containing only a file whose name is
|
| 13 |
+
a zero byte file called testimäge.png.
|
| 14 |
+
"""
|
| 15 |
+
tarobj = io.BytesIO()
|
| 16 |
+
|
| 17 |
+
with tarfile.open(fileobj=tarobj, mode="w:gz") as tgz:
|
| 18 |
+
data = b""
|
| 19 |
+
|
| 20 |
+
filename = "testimäge.png"
|
| 21 |
+
|
| 22 |
+
t = tarfile.TarInfo(filename)
|
| 23 |
+
t.size = len(data)
|
| 24 |
+
|
| 25 |
+
tgz.addfile(t, io.BytesIO(data))
|
| 26 |
+
|
| 27 |
+
target = tmpdir / 'unicode-pkg-1.0.tar.gz'
|
| 28 |
+
with open(str(target), mode='wb') as tf:
|
| 29 |
+
tf.write(tarobj.getvalue())
|
| 30 |
+
return str(target)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.mark.xfail(reason="#710 and #712")
|
| 34 |
+
def test_unicode_files(tarfile_with_unicode, tmpdir):
|
| 35 |
+
target = tmpdir / 'out'
|
| 36 |
+
archive_util.unpack_archive(tarfile_with_unicode, str(target))
|
llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import zipfile
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from setuptools.dist import Distribution
|
| 10 |
+
|
| 11 |
+
from . import contexts
|
| 12 |
+
|
| 13 |
+
SETUP_PY = """\
|
| 14 |
+
from setuptools import setup
|
| 15 |
+
|
| 16 |
+
setup(py_modules=['hi'])
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@pytest.fixture
|
| 21 |
+
def setup_context(tmpdir):
|
| 22 |
+
with (tmpdir / 'setup.py').open('w') as f:
|
| 23 |
+
f.write(SETUP_PY)
|
| 24 |
+
with (tmpdir / 'hi.py').open('w') as f:
|
| 25 |
+
f.write('1\n')
|
| 26 |
+
with tmpdir.as_cwd():
|
| 27 |
+
yield tmpdir
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Test:
|
| 31 |
+
@pytest.mark.usefixtures("user_override")
|
| 32 |
+
@pytest.mark.usefixtures("setup_context")
|
| 33 |
+
def test_bdist_egg(self):
|
| 34 |
+
dist = Distribution(
|
| 35 |
+
dict(
|
| 36 |
+
script_name='setup.py',
|
| 37 |
+
script_args=['bdist_egg'],
|
| 38 |
+
name='foo',
|
| 39 |
+
py_modules=['hi'],
|
| 40 |
+
)
|
| 41 |
+
)
|
| 42 |
+
os.makedirs(os.path.join('build', 'src'))
|
| 43 |
+
with contexts.quiet():
|
| 44 |
+
dist.parse_command_line()
|
| 45 |
+
dist.run_commands()
|
| 46 |
+
|
| 47 |
+
# let's see if we got our egg link at the right place
|
| 48 |
+
[content] = os.listdir('dist')
|
| 49 |
+
assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
|
| 50 |
+
|
| 51 |
+
@pytest.mark.xfail(
|
| 52 |
+
os.environ.get('PYTHONDONTWRITEBYTECODE', False),
|
| 53 |
+
reason="Byte code disabled",
|
| 54 |
+
)
|
| 55 |
+
@pytest.mark.usefixtures("user_override")
|
| 56 |
+
@pytest.mark.usefixtures("setup_context")
|
| 57 |
+
def test_exclude_source_files(self):
|
| 58 |
+
dist = Distribution(
|
| 59 |
+
dict(
|
| 60 |
+
script_name='setup.py',
|
| 61 |
+
script_args=['bdist_egg', '--exclude-source-files'],
|
| 62 |
+
py_modules=['hi'],
|
| 63 |
+
)
|
| 64 |
+
)
|
| 65 |
+
with contexts.quiet():
|
| 66 |
+
dist.parse_command_line()
|
| 67 |
+
dist.run_commands()
|
| 68 |
+
[dist_name] = os.listdir('dist')
|
| 69 |
+
dist_filename = os.path.join('dist', dist_name)
|
| 70 |
+
zip = zipfile.ZipFile(dist_filename)
|
| 71 |
+
names = list(zi.filename for zi in zip.filelist)
|
| 72 |
+
assert 'hi.pyc' in names
|
| 73 |
+
assert 'hi.py' not in names
|
llava/lib/python3.10/site-packages/setuptools/tests/test_build.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools import Command
|
| 2 |
+
from setuptools.command.build import build
|
| 3 |
+
from setuptools.dist import Distribution
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
|
| 7 |
+
"""
|
| 8 |
+
Check that the setuptools Distribution uses the
|
| 9 |
+
setuptools specific build object.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
dist = Distribution(
|
| 13 |
+
dict(
|
| 14 |
+
script_name='setup.py',
|
| 15 |
+
script_args=['build'],
|
| 16 |
+
packages=[],
|
| 17 |
+
package_data={'': ['path/*']},
|
| 18 |
+
)
|
| 19 |
+
)
|
| 20 |
+
assert isinstance(dist.get_command_obj("build"), build)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class Subcommand(Command):
|
| 24 |
+
"""Dummy command to be used in tests"""
|
| 25 |
+
|
| 26 |
+
def initialize_options(self):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
def finalize_options(self):
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
def run(self):
|
| 33 |
+
raise NotImplementedError("just to check if the command runs")
|
llava/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py
ADDED
|
@@ -0,0 +1,970 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import importlib
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import shutil
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
import tarfile
|
| 9 |
+
from concurrent import futures
|
| 10 |
+
from pathlib import Path
|
| 11 |
+
from typing import Any, Callable
|
| 12 |
+
from zipfile import ZipFile
|
| 13 |
+
|
| 14 |
+
import pytest
|
| 15 |
+
from jaraco import path
|
| 16 |
+
from packaging.requirements import Requirement
|
| 17 |
+
|
| 18 |
+
from .textwrap import DALS
|
| 19 |
+
|
| 20 |
+
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
|
| 24 |
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
pytestmark = pytest.mark.skipif(
|
| 28 |
+
sys.platform == "win32" and IS_PYPY,
|
| 29 |
+
reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
|
| 30 |
+
"is flaky and problematic",
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class BuildBackendBase:
|
| 35 |
+
def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'):
|
| 36 |
+
self.cwd = cwd
|
| 37 |
+
self.env = env or {}
|
| 38 |
+
self.backend_name = backend_name
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class BuildBackend(BuildBackendBase):
|
| 42 |
+
"""PEP 517 Build Backend"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, *args, **kwargs):
|
| 45 |
+
super().__init__(*args, **kwargs)
|
| 46 |
+
self.pool = futures.ProcessPoolExecutor(max_workers=1)
|
| 47 |
+
|
| 48 |
+
def __getattr__(self, name: str) -> Callable[..., Any]:
|
| 49 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 50 |
+
|
| 51 |
+
def method(*args, **kw):
|
| 52 |
+
root = os.path.abspath(self.cwd)
|
| 53 |
+
caller = BuildBackendCaller(root, self.env, self.backend_name)
|
| 54 |
+
pid = None
|
| 55 |
+
try:
|
| 56 |
+
pid = self.pool.submit(os.getpid).result(TIMEOUT)
|
| 57 |
+
return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
|
| 58 |
+
except futures.TimeoutError:
|
| 59 |
+
self.pool.shutdown(wait=False) # doesn't stop already running processes
|
| 60 |
+
self._kill(pid)
|
| 61 |
+
pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
|
| 62 |
+
except (futures.process.BrokenProcessPool, MemoryError, OSError):
|
| 63 |
+
if IS_PYPY:
|
| 64 |
+
pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
|
| 65 |
+
raise
|
| 66 |
+
|
| 67 |
+
return method
|
| 68 |
+
|
| 69 |
+
def _kill(self, pid):
|
| 70 |
+
if pid is None:
|
| 71 |
+
return
|
| 72 |
+
with contextlib.suppress(ProcessLookupError, OSError):
|
| 73 |
+
os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class BuildBackendCaller(BuildBackendBase):
|
| 77 |
+
def __init__(self, *args, **kwargs):
|
| 78 |
+
super().__init__(*args, **kwargs)
|
| 79 |
+
|
| 80 |
+
(self.backend_name, _, self.backend_obj) = self.backend_name.partition(':')
|
| 81 |
+
|
| 82 |
+
def __call__(self, name, *args, **kw):
|
| 83 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 84 |
+
os.chdir(self.cwd)
|
| 85 |
+
os.environ.update(self.env)
|
| 86 |
+
mod = importlib.import_module(self.backend_name)
|
| 87 |
+
|
| 88 |
+
if self.backend_obj:
|
| 89 |
+
backend = getattr(mod, self.backend_obj)
|
| 90 |
+
else:
|
| 91 |
+
backend = mod
|
| 92 |
+
|
| 93 |
+
return getattr(backend, name)(*args, **kw)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
defns = [
|
| 97 |
+
{ # simple setup.py script
|
| 98 |
+
'setup.py': DALS(
|
| 99 |
+
"""
|
| 100 |
+
__import__('setuptools').setup(
|
| 101 |
+
name='foo',
|
| 102 |
+
version='0.0.0',
|
| 103 |
+
py_modules=['hello'],
|
| 104 |
+
setup_requires=['six'],
|
| 105 |
+
)
|
| 106 |
+
"""
|
| 107 |
+
),
|
| 108 |
+
'hello.py': DALS(
|
| 109 |
+
"""
|
| 110 |
+
def run():
|
| 111 |
+
print('hello')
|
| 112 |
+
"""
|
| 113 |
+
),
|
| 114 |
+
},
|
| 115 |
+
{ # setup.py that relies on __name__
|
| 116 |
+
'setup.py': DALS(
|
| 117 |
+
"""
|
| 118 |
+
assert __name__ == '__main__'
|
| 119 |
+
__import__('setuptools').setup(
|
| 120 |
+
name='foo',
|
| 121 |
+
version='0.0.0',
|
| 122 |
+
py_modules=['hello'],
|
| 123 |
+
setup_requires=['six'],
|
| 124 |
+
)
|
| 125 |
+
"""
|
| 126 |
+
),
|
| 127 |
+
'hello.py': DALS(
|
| 128 |
+
"""
|
| 129 |
+
def run():
|
| 130 |
+
print('hello')
|
| 131 |
+
"""
|
| 132 |
+
),
|
| 133 |
+
},
|
| 134 |
+
{ # setup.py script that runs arbitrary code
|
| 135 |
+
'setup.py': DALS(
|
| 136 |
+
"""
|
| 137 |
+
variable = True
|
| 138 |
+
def function():
|
| 139 |
+
return variable
|
| 140 |
+
assert variable
|
| 141 |
+
__import__('setuptools').setup(
|
| 142 |
+
name='foo',
|
| 143 |
+
version='0.0.0',
|
| 144 |
+
py_modules=['hello'],
|
| 145 |
+
setup_requires=['six'],
|
| 146 |
+
)
|
| 147 |
+
"""
|
| 148 |
+
),
|
| 149 |
+
'hello.py': DALS(
|
| 150 |
+
"""
|
| 151 |
+
def run():
|
| 152 |
+
print('hello')
|
| 153 |
+
"""
|
| 154 |
+
),
|
| 155 |
+
},
|
| 156 |
+
{ # setup.py script that constructs temp files to be included in the distribution
|
| 157 |
+
'setup.py': DALS(
|
| 158 |
+
"""
|
| 159 |
+
# Some packages construct files on the fly, include them in the package,
|
| 160 |
+
# and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
|
| 161 |
+
# Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
|
| 162 |
+
# to obtain a distribution object first, and then run the distutils
|
| 163 |
+
# commands later, because these files will be removed in the meantime.
|
| 164 |
+
|
| 165 |
+
with open('world.py', 'w', encoding="utf-8") as f:
|
| 166 |
+
f.write('x = 42')
|
| 167 |
+
|
| 168 |
+
try:
|
| 169 |
+
__import__('setuptools').setup(
|
| 170 |
+
name='foo',
|
| 171 |
+
version='0.0.0',
|
| 172 |
+
py_modules=['world'],
|
| 173 |
+
setup_requires=['six'],
|
| 174 |
+
)
|
| 175 |
+
finally:
|
| 176 |
+
# Some packages will clean temporary files
|
| 177 |
+
__import__('os').unlink('world.py')
|
| 178 |
+
"""
|
| 179 |
+
),
|
| 180 |
+
},
|
| 181 |
+
{ # setup.cfg only
|
| 182 |
+
'setup.cfg': DALS(
|
| 183 |
+
"""
|
| 184 |
+
[metadata]
|
| 185 |
+
name = foo
|
| 186 |
+
version = 0.0.0
|
| 187 |
+
|
| 188 |
+
[options]
|
| 189 |
+
py_modules=hello
|
| 190 |
+
setup_requires=six
|
| 191 |
+
"""
|
| 192 |
+
),
|
| 193 |
+
'hello.py': DALS(
|
| 194 |
+
"""
|
| 195 |
+
def run():
|
| 196 |
+
print('hello')
|
| 197 |
+
"""
|
| 198 |
+
),
|
| 199 |
+
},
|
| 200 |
+
{ # setup.cfg and setup.py
|
| 201 |
+
'setup.cfg': DALS(
|
| 202 |
+
"""
|
| 203 |
+
[metadata]
|
| 204 |
+
name = foo
|
| 205 |
+
version = 0.0.0
|
| 206 |
+
|
| 207 |
+
[options]
|
| 208 |
+
py_modules=hello
|
| 209 |
+
setup_requires=six
|
| 210 |
+
"""
|
| 211 |
+
),
|
| 212 |
+
'setup.py': "__import__('setuptools').setup()",
|
| 213 |
+
'hello.py': DALS(
|
| 214 |
+
"""
|
| 215 |
+
def run():
|
| 216 |
+
print('hello')
|
| 217 |
+
"""
|
| 218 |
+
),
|
| 219 |
+
},
|
| 220 |
+
]
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class TestBuildMetaBackend:
|
| 224 |
+
backend_name = 'setuptools.build_meta'
|
| 225 |
+
|
| 226 |
+
def get_build_backend(self):
|
| 227 |
+
return BuildBackend(backend_name=self.backend_name)
|
| 228 |
+
|
| 229 |
+
@pytest.fixture(params=defns)
|
| 230 |
+
def build_backend(self, tmpdir, request):
|
| 231 |
+
path.build(request.param, prefix=str(tmpdir))
|
| 232 |
+
with tmpdir.as_cwd():
|
| 233 |
+
yield self.get_build_backend()
|
| 234 |
+
|
| 235 |
+
def test_get_requires_for_build_wheel(self, build_backend):
|
| 236 |
+
actual = build_backend.get_requires_for_build_wheel()
|
| 237 |
+
expected = ['six']
|
| 238 |
+
assert sorted(actual) == sorted(expected)
|
| 239 |
+
|
| 240 |
+
def test_get_requires_for_build_sdist(self, build_backend):
|
| 241 |
+
actual = build_backend.get_requires_for_build_sdist()
|
| 242 |
+
expected = ['six']
|
| 243 |
+
assert sorted(actual) == sorted(expected)
|
| 244 |
+
|
| 245 |
+
def test_build_wheel(self, build_backend):
|
| 246 |
+
dist_dir = os.path.abspath('pip-wheel')
|
| 247 |
+
os.makedirs(dist_dir)
|
| 248 |
+
wheel_name = build_backend.build_wheel(dist_dir)
|
| 249 |
+
|
| 250 |
+
wheel_file = os.path.join(dist_dir, wheel_name)
|
| 251 |
+
assert os.path.isfile(wheel_file)
|
| 252 |
+
|
| 253 |
+
# Temporary files should be removed
|
| 254 |
+
assert not os.path.isfile('world.py')
|
| 255 |
+
|
| 256 |
+
with ZipFile(wheel_file) as zipfile:
|
| 257 |
+
wheel_contents = set(zipfile.namelist())
|
| 258 |
+
|
| 259 |
+
# Each one of the examples have a single module
|
| 260 |
+
# that should be included in the distribution
|
| 261 |
+
python_scripts = (f for f in wheel_contents if f.endswith('.py'))
|
| 262 |
+
modules = [f for f in python_scripts if not f.endswith('setup.py')]
|
| 263 |
+
assert len(modules) == 1
|
| 264 |
+
|
| 265 |
+
@pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
|
| 266 |
+
def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
|
| 267 |
+
# Building a sdist/wheel should still succeed if there's
|
| 268 |
+
# already a sdist/wheel in the destination directory.
|
| 269 |
+
files = {
|
| 270 |
+
'setup.py': "from setuptools import setup\nsetup()",
|
| 271 |
+
'VERSION': "0.0.1",
|
| 272 |
+
'setup.cfg': DALS(
|
| 273 |
+
"""
|
| 274 |
+
[metadata]
|
| 275 |
+
name = foo
|
| 276 |
+
version = file: VERSION
|
| 277 |
+
"""
|
| 278 |
+
),
|
| 279 |
+
'pyproject.toml': DALS(
|
| 280 |
+
"""
|
| 281 |
+
[build-system]
|
| 282 |
+
requires = ["setuptools", "wheel"]
|
| 283 |
+
build-backend = "setuptools.build_meta"
|
| 284 |
+
"""
|
| 285 |
+
),
|
| 286 |
+
}
|
| 287 |
+
|
| 288 |
+
path.build(files)
|
| 289 |
+
|
| 290 |
+
dist_dir = os.path.abspath('preexisting-' + build_type)
|
| 291 |
+
|
| 292 |
+
build_backend = self.get_build_backend()
|
| 293 |
+
build_method = getattr(build_backend, 'build_' + build_type)
|
| 294 |
+
|
| 295 |
+
# Build a first sdist/wheel.
|
| 296 |
+
# Note: this also check the destination directory is
|
| 297 |
+
# successfully created if it does not exist already.
|
| 298 |
+
first_result = build_method(dist_dir)
|
| 299 |
+
|
| 300 |
+
# Change version.
|
| 301 |
+
with open("VERSION", "wt", encoding="utf-8") as version_file:
|
| 302 |
+
version_file.write("0.0.2")
|
| 303 |
+
|
| 304 |
+
# Build a *second* sdist/wheel.
|
| 305 |
+
second_result = build_method(dist_dir)
|
| 306 |
+
|
| 307 |
+
assert os.path.isfile(os.path.join(dist_dir, first_result))
|
| 308 |
+
assert first_result != second_result
|
| 309 |
+
|
| 310 |
+
# And if rebuilding the exact same sdist/wheel?
|
| 311 |
+
open(os.path.join(dist_dir, second_result), 'wb').close()
|
| 312 |
+
third_result = build_method(dist_dir)
|
| 313 |
+
assert third_result == second_result
|
| 314 |
+
assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
|
| 315 |
+
|
| 316 |
+
@pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
|
| 317 |
+
def test_build_with_pyproject_config(self, tmpdir, setup_script):
|
| 318 |
+
files = {
|
| 319 |
+
'pyproject.toml': DALS(
|
| 320 |
+
"""
|
| 321 |
+
[build-system]
|
| 322 |
+
requires = ["setuptools", "wheel"]
|
| 323 |
+
build-backend = "setuptools.build_meta"
|
| 324 |
+
|
| 325 |
+
[project]
|
| 326 |
+
name = "foo"
|
| 327 |
+
license = {text = "MIT"}
|
| 328 |
+
description = "This is a Python package"
|
| 329 |
+
dynamic = ["version", "readme"]
|
| 330 |
+
classifiers = [
|
| 331 |
+
"Development Status :: 5 - Production/Stable",
|
| 332 |
+
"Intended Audience :: Developers"
|
| 333 |
+
]
|
| 334 |
+
urls = {Homepage = "http://github.com"}
|
| 335 |
+
dependencies = [
|
| 336 |
+
"appdirs",
|
| 337 |
+
]
|
| 338 |
+
|
| 339 |
+
[project.optional-dependencies]
|
| 340 |
+
all = [
|
| 341 |
+
"tomli>=1",
|
| 342 |
+
"pyscaffold>=4,<5",
|
| 343 |
+
'importlib; python_version == "2.6"',
|
| 344 |
+
]
|
| 345 |
+
|
| 346 |
+
[project.scripts]
|
| 347 |
+
foo = "foo.cli:main"
|
| 348 |
+
|
| 349 |
+
[tool.setuptools]
|
| 350 |
+
zip-safe = false
|
| 351 |
+
package-dir = {"" = "src"}
|
| 352 |
+
packages = {find = {where = ["src"]}}
|
| 353 |
+
license-files = ["LICENSE*"]
|
| 354 |
+
|
| 355 |
+
[tool.setuptools.dynamic]
|
| 356 |
+
version = {attr = "foo.__version__"}
|
| 357 |
+
readme = {file = "README.rst"}
|
| 358 |
+
|
| 359 |
+
[tool.distutils.sdist]
|
| 360 |
+
formats = "gztar"
|
| 361 |
+
"""
|
| 362 |
+
),
|
| 363 |
+
"MANIFEST.in": DALS(
|
| 364 |
+
"""
|
| 365 |
+
global-include *.py *.txt
|
| 366 |
+
global-exclude *.py[cod]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
"README.rst": "This is a ``README``",
|
| 370 |
+
"LICENSE.txt": "---- placeholder MIT license ----",
|
| 371 |
+
"src": {
|
| 372 |
+
"foo": {
|
| 373 |
+
"__init__.py": "__version__ = '0.1'",
|
| 374 |
+
"__init__.pyi": "__version__: str",
|
| 375 |
+
"cli.py": "def main(): print('hello world')",
|
| 376 |
+
"data.txt": "def main(): print('hello world')",
|
| 377 |
+
"py.typed": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
}
|
| 381 |
+
if setup_script:
|
| 382 |
+
files["setup.py"] = setup_script
|
| 383 |
+
|
| 384 |
+
build_backend = self.get_build_backend()
|
| 385 |
+
with tmpdir.as_cwd():
|
| 386 |
+
path.build(files)
|
| 387 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 388 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 389 |
+
|
| 390 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 391 |
+
sdist_contents = set(tar.getnames())
|
| 392 |
+
|
| 393 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 394 |
+
wheel_contents = set(zipfile.namelist())
|
| 395 |
+
metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
|
| 396 |
+
license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
|
| 397 |
+
epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
|
| 398 |
+
|
| 399 |
+
assert sdist_contents - {"foo-0.1/setup.py"} == {
|
| 400 |
+
'foo-0.1',
|
| 401 |
+
'foo-0.1/LICENSE.txt',
|
| 402 |
+
'foo-0.1/MANIFEST.in',
|
| 403 |
+
'foo-0.1/PKG-INFO',
|
| 404 |
+
'foo-0.1/README.rst',
|
| 405 |
+
'foo-0.1/pyproject.toml',
|
| 406 |
+
'foo-0.1/setup.cfg',
|
| 407 |
+
'foo-0.1/src',
|
| 408 |
+
'foo-0.1/src/foo',
|
| 409 |
+
'foo-0.1/src/foo/__init__.py',
|
| 410 |
+
'foo-0.1/src/foo/__init__.pyi',
|
| 411 |
+
'foo-0.1/src/foo/cli.py',
|
| 412 |
+
'foo-0.1/src/foo/data.txt',
|
| 413 |
+
'foo-0.1/src/foo/py.typed',
|
| 414 |
+
'foo-0.1/src/foo.egg-info',
|
| 415 |
+
'foo-0.1/src/foo.egg-info/PKG-INFO',
|
| 416 |
+
'foo-0.1/src/foo.egg-info/SOURCES.txt',
|
| 417 |
+
'foo-0.1/src/foo.egg-info/dependency_links.txt',
|
| 418 |
+
'foo-0.1/src/foo.egg-info/entry_points.txt',
|
| 419 |
+
'foo-0.1/src/foo.egg-info/requires.txt',
|
| 420 |
+
'foo-0.1/src/foo.egg-info/top_level.txt',
|
| 421 |
+
'foo-0.1/src/foo.egg-info/not-zip-safe',
|
| 422 |
+
}
|
| 423 |
+
assert wheel_contents == {
|
| 424 |
+
"foo/__init__.py",
|
| 425 |
+
"foo/__init__.pyi", # include type information by default
|
| 426 |
+
"foo/cli.py",
|
| 427 |
+
"foo/data.txt", # include_package_data defaults to True
|
| 428 |
+
"foo/py.typed", # include type information by default
|
| 429 |
+
"foo-0.1.dist-info/LICENSE.txt",
|
| 430 |
+
"foo-0.1.dist-info/METADATA",
|
| 431 |
+
"foo-0.1.dist-info/WHEEL",
|
| 432 |
+
"foo-0.1.dist-info/entry_points.txt",
|
| 433 |
+
"foo-0.1.dist-info/top_level.txt",
|
| 434 |
+
"foo-0.1.dist-info/RECORD",
|
| 435 |
+
}
|
| 436 |
+
assert license == "---- placeholder MIT license ----"
|
| 437 |
+
|
| 438 |
+
for line in (
|
| 439 |
+
"Summary: This is a Python package",
|
| 440 |
+
"License: MIT",
|
| 441 |
+
"Classifier: Intended Audience :: Developers",
|
| 442 |
+
"Requires-Dist: appdirs",
|
| 443 |
+
"Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
|
| 444 |
+
"Requires-Dist: "
|
| 445 |
+
+ str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
|
| 446 |
+
):
|
| 447 |
+
assert line in metadata, (line, metadata)
|
| 448 |
+
|
| 449 |
+
assert metadata.strip().endswith("This is a ``README``")
|
| 450 |
+
assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
|
| 451 |
+
|
| 452 |
+
def test_static_metadata_in_pyproject_config(self, tmpdir):
|
| 453 |
+
# Make sure static metadata in pyproject.toml is not overwritten by setup.py
|
| 454 |
+
# as required by PEP 621
|
| 455 |
+
files = {
|
| 456 |
+
'pyproject.toml': DALS(
|
| 457 |
+
"""
|
| 458 |
+
[build-system]
|
| 459 |
+
requires = ["setuptools", "wheel"]
|
| 460 |
+
build-backend = "setuptools.build_meta"
|
| 461 |
+
|
| 462 |
+
[project]
|
| 463 |
+
name = "foo"
|
| 464 |
+
description = "This is a Python package"
|
| 465 |
+
version = "42"
|
| 466 |
+
dependencies = ["six"]
|
| 467 |
+
"""
|
| 468 |
+
),
|
| 469 |
+
'hello.py': DALS(
|
| 470 |
+
"""
|
| 471 |
+
def run():
|
| 472 |
+
print('hello')
|
| 473 |
+
"""
|
| 474 |
+
),
|
| 475 |
+
'setup.py': DALS(
|
| 476 |
+
"""
|
| 477 |
+
__import__('setuptools').setup(
|
| 478 |
+
name='bar',
|
| 479 |
+
version='13',
|
| 480 |
+
)
|
| 481 |
+
"""
|
| 482 |
+
),
|
| 483 |
+
}
|
| 484 |
+
build_backend = self.get_build_backend()
|
| 485 |
+
with tmpdir.as_cwd():
|
| 486 |
+
path.build(files)
|
| 487 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 488 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 489 |
+
|
| 490 |
+
assert (tmpdir / "temp/foo-42.tar.gz").exists()
|
| 491 |
+
assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
|
| 492 |
+
assert not (tmpdir / "temp/bar-13.tar.gz").exists()
|
| 493 |
+
assert not (tmpdir / "temp/bar-42.tar.gz").exists()
|
| 494 |
+
assert not (tmpdir / "temp/foo-13.tar.gz").exists()
|
| 495 |
+
assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
|
| 496 |
+
assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
|
| 497 |
+
assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
|
| 498 |
+
|
| 499 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 500 |
+
pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
|
| 501 |
+
members = tar.getnames()
|
| 502 |
+
assert "bar-13/PKG-INFO" not in members
|
| 503 |
+
|
| 504 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 505 |
+
metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
|
| 506 |
+
members = zipfile.namelist()
|
| 507 |
+
assert "bar-13.dist-info/METADATA" not in members
|
| 508 |
+
|
| 509 |
+
for file in pkg_info, metadata:
|
| 510 |
+
for line in ("Name: foo", "Version: 42"):
|
| 511 |
+
assert line in file
|
| 512 |
+
for line in ("Name: bar", "Version: 13"):
|
| 513 |
+
assert line not in file
|
| 514 |
+
|
| 515 |
+
def test_build_sdist(self, build_backend):
|
| 516 |
+
dist_dir = os.path.abspath('pip-sdist')
|
| 517 |
+
os.makedirs(dist_dir)
|
| 518 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 519 |
+
|
| 520 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 521 |
+
|
| 522 |
+
def test_prepare_metadata_for_build_wheel(self, build_backend):
|
| 523 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 524 |
+
os.makedirs(dist_dir)
|
| 525 |
+
|
| 526 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 527 |
+
|
| 528 |
+
assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
|
| 529 |
+
|
| 530 |
+
def test_prepare_metadata_inplace(self, build_backend):
|
| 531 |
+
"""
|
| 532 |
+
Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
|
| 533 |
+
See issue #3523.
|
| 534 |
+
"""
|
| 535 |
+
for pre_existing in [
|
| 536 |
+
".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
|
| 537 |
+
".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
|
| 538 |
+
".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
|
| 539 |
+
".venv/python3.10/site-packages/click-8.1.3.dist-info",
|
| 540 |
+
"venv/python3.10/site-packages/distlib-0.3.5.dist-info",
|
| 541 |
+
"env/python3.10/site-packages/docutils-0.19.dist-info",
|
| 542 |
+
]:
|
| 543 |
+
os.makedirs(pre_existing, exist_ok=True)
|
| 544 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(".")
|
| 545 |
+
assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
|
| 546 |
+
|
| 547 |
+
def test_build_sdist_explicit_dist(self, build_backend):
|
| 548 |
+
# explicitly specifying the dist folder should work
|
| 549 |
+
# the folder sdist_directory and the ``--dist-dir`` can be the same
|
| 550 |
+
dist_dir = os.path.abspath('dist')
|
| 551 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 552 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 553 |
+
|
| 554 |
+
def test_build_sdist_version_change(self, build_backend):
|
| 555 |
+
sdist_into_directory = os.path.abspath("out_sdist")
|
| 556 |
+
os.makedirs(sdist_into_directory)
|
| 557 |
+
|
| 558 |
+
sdist_name = build_backend.build_sdist(sdist_into_directory)
|
| 559 |
+
assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
|
| 560 |
+
|
| 561 |
+
# if the setup.py changes subsequent call of the build meta
|
| 562 |
+
# should still succeed, given the
|
| 563 |
+
# sdist_directory the frontend specifies is empty
|
| 564 |
+
setup_loc = os.path.abspath("setup.py")
|
| 565 |
+
if not os.path.exists(setup_loc):
|
| 566 |
+
setup_loc = os.path.abspath("setup.cfg")
|
| 567 |
+
|
| 568 |
+
with open(setup_loc, 'rt', encoding="utf-8") as file_handler:
|
| 569 |
+
content = file_handler.read()
|
| 570 |
+
with open(setup_loc, 'wt', encoding="utf-8") as file_handler:
|
| 571 |
+
file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
|
| 572 |
+
|
| 573 |
+
shutil.rmtree(sdist_into_directory)
|
| 574 |
+
os.makedirs(sdist_into_directory)
|
| 575 |
+
|
| 576 |
+
sdist_name = build_backend.build_sdist("out_sdist")
|
| 577 |
+
assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
|
| 578 |
+
|
| 579 |
+
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
|
| 580 |
+
files = {
|
| 581 |
+
'setup.py': DALS(
|
| 582 |
+
"""
|
| 583 |
+
__import__('setuptools').setup(
|
| 584 |
+
name='foo',
|
| 585 |
+
version='0.0.0',
|
| 586 |
+
py_modules=['hello']
|
| 587 |
+
)"""
|
| 588 |
+
),
|
| 589 |
+
'hello.py': '',
|
| 590 |
+
'pyproject.toml': DALS(
|
| 591 |
+
"""
|
| 592 |
+
[build-system]
|
| 593 |
+
requires = ["setuptools", "wheel"]
|
| 594 |
+
build-backend = "setuptools.build_meta"
|
| 595 |
+
"""
|
| 596 |
+
),
|
| 597 |
+
}
|
| 598 |
+
path.build(files)
|
| 599 |
+
build_backend = self.get_build_backend()
|
| 600 |
+
targz_path = build_backend.build_sdist("temp")
|
| 601 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 602 |
+
assert any('pyproject.toml' in name for name in tar.getnames())
|
| 603 |
+
|
| 604 |
+
def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
|
| 605 |
+
# If build_sdist is called from a script other than setup.py,
|
| 606 |
+
# ensure setup.py is included
|
| 607 |
+
path.build(defns[0])
|
| 608 |
+
|
| 609 |
+
build_backend = self.get_build_backend()
|
| 610 |
+
targz_path = build_backend.build_sdist("temp")
|
| 611 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 612 |
+
assert any('setup.py' in name for name in tar.getnames())
|
| 613 |
+
|
| 614 |
+
def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
|
| 615 |
+
# Ensure that MANIFEST.in can exclude setup.py
|
| 616 |
+
files = {
|
| 617 |
+
'setup.py': DALS(
|
| 618 |
+
"""
|
| 619 |
+
__import__('setuptools').setup(
|
| 620 |
+
name='foo',
|
| 621 |
+
version='0.0.0',
|
| 622 |
+
py_modules=['hello']
|
| 623 |
+
)"""
|
| 624 |
+
),
|
| 625 |
+
'hello.py': '',
|
| 626 |
+
'MANIFEST.in': DALS(
|
| 627 |
+
"""
|
| 628 |
+
exclude setup.py
|
| 629 |
+
"""
|
| 630 |
+
),
|
| 631 |
+
}
|
| 632 |
+
|
| 633 |
+
path.build(files)
|
| 634 |
+
|
| 635 |
+
build_backend = self.get_build_backend()
|
| 636 |
+
targz_path = build_backend.build_sdist("temp")
|
| 637 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 638 |
+
assert not any('setup.py' in name for name in tar.getnames())
|
| 639 |
+
|
| 640 |
+
def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
|
| 641 |
+
files = {
|
| 642 |
+
'setup.py': DALS(
|
| 643 |
+
"""
|
| 644 |
+
__import__('setuptools').setup(
|
| 645 |
+
name='foo',
|
| 646 |
+
version='0.0.0',
|
| 647 |
+
py_modules=['hello']
|
| 648 |
+
)"""
|
| 649 |
+
),
|
| 650 |
+
'hello.py': '',
|
| 651 |
+
'setup.cfg': DALS(
|
| 652 |
+
"""
|
| 653 |
+
[sdist]
|
| 654 |
+
formats=zip
|
| 655 |
+
"""
|
| 656 |
+
),
|
| 657 |
+
}
|
| 658 |
+
|
| 659 |
+
path.build(files)
|
| 660 |
+
|
| 661 |
+
build_backend = self.get_build_backend()
|
| 662 |
+
build_backend.build_sdist("temp")
|
| 663 |
+
|
| 664 |
+
_relative_path_import_files = {
|
| 665 |
+
'setup.py': DALS(
|
| 666 |
+
"""
|
| 667 |
+
__import__('setuptools').setup(
|
| 668 |
+
name='foo',
|
| 669 |
+
version=__import__('hello').__version__,
|
| 670 |
+
py_modules=['hello']
|
| 671 |
+
)"""
|
| 672 |
+
),
|
| 673 |
+
'hello.py': '__version__ = "0.0.0"',
|
| 674 |
+
'setup.cfg': DALS(
|
| 675 |
+
"""
|
| 676 |
+
[sdist]
|
| 677 |
+
formats=zip
|
| 678 |
+
"""
|
| 679 |
+
),
|
| 680 |
+
}
|
| 681 |
+
|
| 682 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 683 |
+
path.build(self._relative_path_import_files)
|
| 684 |
+
build_backend = self.get_build_backend()
|
| 685 |
+
with pytest.raises(ImportError, match="^No module named 'hello'$"):
|
| 686 |
+
build_backend.build_sdist("temp")
|
| 687 |
+
|
| 688 |
+
_simple_pyproject_example = {
|
| 689 |
+
"pyproject.toml": DALS(
|
| 690 |
+
"""
|
| 691 |
+
[project]
|
| 692 |
+
name = "proj"
|
| 693 |
+
version = "42"
|
| 694 |
+
"""
|
| 695 |
+
),
|
| 696 |
+
"src": {"proj": {"__init__.py": ""}},
|
| 697 |
+
}
|
| 698 |
+
|
| 699 |
+
def _assert_link_tree(self, parent_dir):
|
| 700 |
+
"""All files in the directory should be either links or hard links"""
|
| 701 |
+
files = list(Path(parent_dir).glob("**/*"))
|
| 702 |
+
assert files # Should not be empty
|
| 703 |
+
for file in files:
|
| 704 |
+
assert file.is_symlink() or os.stat(file).st_nlink > 0
|
| 705 |
+
|
| 706 |
+
def test_editable_without_config_settings(self, tmpdir_cwd):
|
| 707 |
+
"""
|
| 708 |
+
Sanity check to ensure tests with --mode=strict are different from the ones
|
| 709 |
+
without --mode.
|
| 710 |
+
|
| 711 |
+
--mode=strict should create a local directory with a package tree.
|
| 712 |
+
The directory should not get created otherwise.
|
| 713 |
+
"""
|
| 714 |
+
path.build(self._simple_pyproject_example)
|
| 715 |
+
build_backend = self.get_build_backend()
|
| 716 |
+
assert not Path("build").exists()
|
| 717 |
+
build_backend.build_editable("temp")
|
| 718 |
+
assert not Path("build").exists()
|
| 719 |
+
|
| 720 |
+
def test_build_wheel_inplace(self, tmpdir_cwd):
|
| 721 |
+
config_settings = {"--build-option": ["build_ext", "--inplace"]}
|
| 722 |
+
path.build(self._simple_pyproject_example)
|
| 723 |
+
build_backend = self.get_build_backend()
|
| 724 |
+
assert not Path("build").exists()
|
| 725 |
+
Path("build").mkdir()
|
| 726 |
+
build_backend.prepare_metadata_for_build_wheel("build", config_settings)
|
| 727 |
+
build_backend.build_wheel("build", config_settings)
|
| 728 |
+
assert Path("build/proj-42-py3-none-any.whl").exists()
|
| 729 |
+
|
| 730 |
+
@pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}])
|
| 731 |
+
def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
|
| 732 |
+
path.build({**self._simple_pyproject_example, '_meta': {}})
|
| 733 |
+
assert not Path("build").exists()
|
| 734 |
+
build_backend = self.get_build_backend()
|
| 735 |
+
build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
|
| 736 |
+
build_backend.build_editable("temp", config_settings, "_meta")
|
| 737 |
+
self._assert_link_tree(next(Path("build").glob("__editable__.*")))
|
| 738 |
+
|
| 739 |
+
@pytest.mark.parametrize(
|
| 740 |
+
("setup_literal", "requirements"),
|
| 741 |
+
[
|
| 742 |
+
("'foo'", ['foo']),
|
| 743 |
+
("['foo']", ['foo']),
|
| 744 |
+
(r"'foo\n'", ['foo']),
|
| 745 |
+
(r"'foo\n\n'", ['foo']),
|
| 746 |
+
("['foo', 'bar']", ['foo', 'bar']),
|
| 747 |
+
(r"'# Has a comment line\nfoo'", ['foo']),
|
| 748 |
+
(r"'foo # Has an inline comment'", ['foo']),
|
| 749 |
+
(r"'foo \\\n >=3.0'", ['foo>=3.0']),
|
| 750 |
+
(r"'foo\nbar'", ['foo', 'bar']),
|
| 751 |
+
(r"'foo\nbar\n'", ['foo', 'bar']),
|
| 752 |
+
(r"['foo\n', 'bar\n']", ['foo', 'bar']),
|
| 753 |
+
],
|
| 754 |
+
)
|
| 755 |
+
@pytest.mark.parametrize('use_wheel', [True, False])
|
| 756 |
+
def test_setup_requires(self, setup_literal, requirements, use_wheel, tmpdir_cwd):
|
| 757 |
+
files = {
|
| 758 |
+
'setup.py': DALS(
|
| 759 |
+
"""
|
| 760 |
+
from setuptools import setup
|
| 761 |
+
|
| 762 |
+
setup(
|
| 763 |
+
name="qux",
|
| 764 |
+
version="0.0.0",
|
| 765 |
+
py_modules=["hello"],
|
| 766 |
+
setup_requires={setup_literal},
|
| 767 |
+
)
|
| 768 |
+
"""
|
| 769 |
+
).format(setup_literal=setup_literal),
|
| 770 |
+
'hello.py': DALS(
|
| 771 |
+
"""
|
| 772 |
+
def run():
|
| 773 |
+
print('hello')
|
| 774 |
+
"""
|
| 775 |
+
),
|
| 776 |
+
}
|
| 777 |
+
|
| 778 |
+
path.build(files)
|
| 779 |
+
|
| 780 |
+
build_backend = self.get_build_backend()
|
| 781 |
+
|
| 782 |
+
if use_wheel:
|
| 783 |
+
get_requires = build_backend.get_requires_for_build_wheel
|
| 784 |
+
else:
|
| 785 |
+
get_requires = build_backend.get_requires_for_build_sdist
|
| 786 |
+
|
| 787 |
+
# Ensure that the build requirements are properly parsed
|
| 788 |
+
expected = sorted(requirements)
|
| 789 |
+
actual = get_requires()
|
| 790 |
+
|
| 791 |
+
assert expected == sorted(actual)
|
| 792 |
+
|
| 793 |
+
def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
|
| 794 |
+
# Make sure patches introduced to retrieve setup_requires don't accidentally
|
| 795 |
+
# activate auto-discovery and cause problems due to the incomplete set of
|
| 796 |
+
# attributes passed to MinimalDistribution
|
| 797 |
+
files = {
|
| 798 |
+
'pyproject.toml': DALS(
|
| 799 |
+
"""
|
| 800 |
+
[project]
|
| 801 |
+
name = "proj"
|
| 802 |
+
version = "42"
|
| 803 |
+
"""
|
| 804 |
+
),
|
| 805 |
+
"setup.py": DALS(
|
| 806 |
+
"""
|
| 807 |
+
__import__('setuptools').setup(
|
| 808 |
+
setup_requires=["foo"],
|
| 809 |
+
py_modules = ["hello", "world"]
|
| 810 |
+
)
|
| 811 |
+
"""
|
| 812 |
+
),
|
| 813 |
+
'hello.py': "'hello'",
|
| 814 |
+
'world.py': "'world'",
|
| 815 |
+
}
|
| 816 |
+
path.build(files)
|
| 817 |
+
build_backend = self.get_build_backend()
|
| 818 |
+
setup_requires = build_backend.get_requires_for_build_wheel()
|
| 819 |
+
assert setup_requires == ["foo"]
|
| 820 |
+
|
| 821 |
+
def test_dont_install_setup_requires(self, tmpdir_cwd):
|
| 822 |
+
files = {
|
| 823 |
+
'setup.py': DALS(
|
| 824 |
+
"""
|
| 825 |
+
from setuptools import setup
|
| 826 |
+
|
| 827 |
+
setup(
|
| 828 |
+
name="qux",
|
| 829 |
+
version="0.0.0",
|
| 830 |
+
py_modules=["hello"],
|
| 831 |
+
setup_requires=["does-not-exist >99"],
|
| 832 |
+
)
|
| 833 |
+
"""
|
| 834 |
+
),
|
| 835 |
+
'hello.py': DALS(
|
| 836 |
+
"""
|
| 837 |
+
def run():
|
| 838 |
+
print('hello')
|
| 839 |
+
"""
|
| 840 |
+
),
|
| 841 |
+
}
|
| 842 |
+
|
| 843 |
+
path.build(files)
|
| 844 |
+
|
| 845 |
+
build_backend = self.get_build_backend()
|
| 846 |
+
|
| 847 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 848 |
+
os.makedirs(dist_dir)
|
| 849 |
+
|
| 850 |
+
# does-not-exist can't be satisfied, so if it attempts to install
|
| 851 |
+
# setup_requires, it will fail.
|
| 852 |
+
build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 853 |
+
|
| 854 |
+
_sys_argv_0_passthrough = {
|
| 855 |
+
'setup.py': DALS(
|
| 856 |
+
"""
|
| 857 |
+
import os
|
| 858 |
+
import sys
|
| 859 |
+
|
| 860 |
+
__import__('setuptools').setup(
|
| 861 |
+
name='foo',
|
| 862 |
+
version='0.0.0',
|
| 863 |
+
)
|
| 864 |
+
|
| 865 |
+
sys_argv = os.path.abspath(sys.argv[0])
|
| 866 |
+
file_path = os.path.abspath('setup.py')
|
| 867 |
+
assert sys_argv == file_path
|
| 868 |
+
"""
|
| 869 |
+
)
|
| 870 |
+
}
|
| 871 |
+
|
| 872 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 873 |
+
path.build(self._sys_argv_0_passthrough)
|
| 874 |
+
build_backend = self.get_build_backend()
|
| 875 |
+
with pytest.raises(AssertionError):
|
| 876 |
+
build_backend.build_sdist("temp")
|
| 877 |
+
|
| 878 |
+
_setup_py_file_abspath = {
|
| 879 |
+
'setup.py': DALS(
|
| 880 |
+
"""
|
| 881 |
+
import os
|
| 882 |
+
assert os.path.isabs(__file__)
|
| 883 |
+
__import__('setuptools').setup(
|
| 884 |
+
name='foo',
|
| 885 |
+
version='0.0.0',
|
| 886 |
+
py_modules=['hello'],
|
| 887 |
+
setup_requires=['six'],
|
| 888 |
+
)
|
| 889 |
+
"""
|
| 890 |
+
)
|
| 891 |
+
}
|
| 892 |
+
|
| 893 |
+
def test_setup_py_file_abspath(self, tmpdir_cwd):
|
| 894 |
+
path.build(self._setup_py_file_abspath)
|
| 895 |
+
build_backend = self.get_build_backend()
|
| 896 |
+
build_backend.build_sdist("temp")
|
| 897 |
+
|
| 898 |
+
@pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
|
| 899 |
+
def test_build_with_empty_setuppy(self, build_backend, build_hook):
|
| 900 |
+
files = {'setup.py': ''}
|
| 901 |
+
path.build(files)
|
| 902 |
+
|
| 903 |
+
msg = re.escape('No distribution was found.')
|
| 904 |
+
with pytest.raises(ValueError, match=msg):
|
| 905 |
+
getattr(build_backend, build_hook)("temp")
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
|
| 909 |
+
backend_name = 'setuptools.build_meta:__legacy__'
|
| 910 |
+
|
| 911 |
+
# build_meta_legacy-specific tests
|
| 912 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 913 |
+
# This must fail in build_meta, but must pass in build_meta_legacy
|
| 914 |
+
path.build(self._relative_path_import_files)
|
| 915 |
+
|
| 916 |
+
build_backend = self.get_build_backend()
|
| 917 |
+
build_backend.build_sdist("temp")
|
| 918 |
+
|
| 919 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 920 |
+
path.build(self._sys_argv_0_passthrough)
|
| 921 |
+
|
| 922 |
+
build_backend = self.get_build_backend()
|
| 923 |
+
build_backend.build_sdist("temp")
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
|
| 927 |
+
pyproject = """
|
| 928 |
+
[build-system]
|
| 929 |
+
requires = ["setuptools"]
|
| 930 |
+
build-backend = "setuptools.build_meta"
|
| 931 |
+
[project]
|
| 932 |
+
name = "myproj"
|
| 933 |
+
version = "42"
|
| 934 |
+
"""
|
| 935 |
+
path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
|
| 936 |
+
|
| 937 |
+
# First: sanity check
|
| 938 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 939 |
+
output = venv.run(cmd, cwd=tmpdir).lower()
|
| 940 |
+
assert "running setup.py develop for myproj" not in output
|
| 941 |
+
assert "created wheel for myproj" in output
|
| 942 |
+
|
| 943 |
+
# Then: real test
|
| 944 |
+
env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
|
| 945 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 946 |
+
output = venv.run(cmd, cwd=tmpdir, env=env).lower()
|
| 947 |
+
assert "running setup.py develop for myproj" in output
|
| 948 |
+
|
| 949 |
+
|
| 950 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 951 |
+
def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
|
| 952 |
+
"""Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""
|
| 953 |
+
monkeypatch.chdir(tmp_path)
|
| 954 |
+
setuppy = """
|
| 955 |
+
import sys, setuptools
|
| 956 |
+
setuptools.setup(name='foo', version='0.0.0')
|
| 957 |
+
sys.exit(0)
|
| 958 |
+
"""
|
| 959 |
+
(tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
|
| 960 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 961 |
+
assert backend.get_requires_for_build_wheel() == []
|
| 962 |
+
|
| 963 |
+
|
| 964 |
+
def test_system_exit_in_setuppy(monkeypatch, tmp_path):
|
| 965 |
+
monkeypatch.chdir(tmp_path)
|
| 966 |
+
setuppy = "import sys; sys.exit('some error')"
|
| 967 |
+
(tmp_path / "setup.py").write_text(setuppy, encoding="utf-8")
|
| 968 |
+
with pytest.raises(SystemExit, match="some error"):
|
| 969 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 970 |
+
backend.get_requires_for_build_wheel()
|
llava/lib/python3.10/site-packages/setuptools/tests/test_build_py.py
ADDED
|
@@ -0,0 +1,480 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import stat
|
| 4 |
+
import warnings
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from unittest.mock import Mock
|
| 7 |
+
|
| 8 |
+
import jaraco.path
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from setuptools import SetuptoolsDeprecationWarning
|
| 12 |
+
from setuptools.dist import Distribution
|
| 13 |
+
|
| 14 |
+
from .textwrap import DALS
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def test_directories_in_package_data_glob(tmpdir_cwd):
|
| 18 |
+
"""
|
| 19 |
+
Directories matching the glob in package_data should
|
| 20 |
+
not be included in the package data.
|
| 21 |
+
|
| 22 |
+
Regression test for #261.
|
| 23 |
+
"""
|
| 24 |
+
dist = Distribution(
|
| 25 |
+
dict(
|
| 26 |
+
script_name='setup.py',
|
| 27 |
+
script_args=['build_py'],
|
| 28 |
+
packages=[''],
|
| 29 |
+
package_data={'': ['path/*']},
|
| 30 |
+
)
|
| 31 |
+
)
|
| 32 |
+
os.makedirs('path/subpath')
|
| 33 |
+
dist.parse_command_line()
|
| 34 |
+
dist.run_commands()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def test_recursive_in_package_data_glob(tmpdir_cwd):
|
| 38 |
+
"""
|
| 39 |
+
Files matching recursive globs (**) in package_data should
|
| 40 |
+
be included in the package data.
|
| 41 |
+
|
| 42 |
+
#1806
|
| 43 |
+
"""
|
| 44 |
+
dist = Distribution(
|
| 45 |
+
dict(
|
| 46 |
+
script_name='setup.py',
|
| 47 |
+
script_args=['build_py'],
|
| 48 |
+
packages=[''],
|
| 49 |
+
package_data={'': ['path/**/data']},
|
| 50 |
+
)
|
| 51 |
+
)
|
| 52 |
+
os.makedirs('path/subpath/subsubpath')
|
| 53 |
+
open('path/subpath/subsubpath/data', 'wb').close()
|
| 54 |
+
|
| 55 |
+
dist.parse_command_line()
|
| 56 |
+
dist.run_commands()
|
| 57 |
+
|
| 58 |
+
assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
|
| 59 |
+
"File is not included"
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def test_read_only(tmpdir_cwd):
|
| 64 |
+
"""
|
| 65 |
+
Ensure read-only flag is not preserved in copy
|
| 66 |
+
for package modules and package data, as that
|
| 67 |
+
causes problems with deleting read-only files on
|
| 68 |
+
Windows.
|
| 69 |
+
|
| 70 |
+
#1451
|
| 71 |
+
"""
|
| 72 |
+
dist = Distribution(
|
| 73 |
+
dict(
|
| 74 |
+
script_name='setup.py',
|
| 75 |
+
script_args=['build_py'],
|
| 76 |
+
packages=['pkg'],
|
| 77 |
+
package_data={'pkg': ['data.dat']},
|
| 78 |
+
)
|
| 79 |
+
)
|
| 80 |
+
os.makedirs('pkg')
|
| 81 |
+
open('pkg/__init__.py', 'wb').close()
|
| 82 |
+
open('pkg/data.dat', 'wb').close()
|
| 83 |
+
os.chmod('pkg/__init__.py', stat.S_IREAD)
|
| 84 |
+
os.chmod('pkg/data.dat', stat.S_IREAD)
|
| 85 |
+
dist.parse_command_line()
|
| 86 |
+
dist.run_commands()
|
| 87 |
+
shutil.rmtree('build')
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
@pytest.mark.xfail(
|
| 91 |
+
'platform.system() == "Windows"',
|
| 92 |
+
reason="On Windows, files do not have executable bits",
|
| 93 |
+
raises=AssertionError,
|
| 94 |
+
strict=True,
|
| 95 |
+
)
|
| 96 |
+
def test_executable_data(tmpdir_cwd):
|
| 97 |
+
"""
|
| 98 |
+
Ensure executable bit is preserved in copy for
|
| 99 |
+
package data, as users rely on it for scripts.
|
| 100 |
+
|
| 101 |
+
#2041
|
| 102 |
+
"""
|
| 103 |
+
dist = Distribution(
|
| 104 |
+
dict(
|
| 105 |
+
script_name='setup.py',
|
| 106 |
+
script_args=['build_py'],
|
| 107 |
+
packages=['pkg'],
|
| 108 |
+
package_data={'pkg': ['run-me']},
|
| 109 |
+
)
|
| 110 |
+
)
|
| 111 |
+
os.makedirs('pkg')
|
| 112 |
+
open('pkg/__init__.py', 'wb').close()
|
| 113 |
+
open('pkg/run-me', 'wb').close()
|
| 114 |
+
os.chmod('pkg/run-me', 0o700)
|
| 115 |
+
|
| 116 |
+
dist.parse_command_line()
|
| 117 |
+
dist.run_commands()
|
| 118 |
+
|
| 119 |
+
assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
|
| 120 |
+
"Script is not executable"
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
EXAMPLE_WITH_MANIFEST = {
|
| 125 |
+
"setup.cfg": DALS(
|
| 126 |
+
"""
|
| 127 |
+
[metadata]
|
| 128 |
+
name = mypkg
|
| 129 |
+
version = 42
|
| 130 |
+
|
| 131 |
+
[options]
|
| 132 |
+
include_package_data = True
|
| 133 |
+
packages = find:
|
| 134 |
+
|
| 135 |
+
[options.packages.find]
|
| 136 |
+
exclude = *.tests*
|
| 137 |
+
"""
|
| 138 |
+
),
|
| 139 |
+
"mypkg": {
|
| 140 |
+
"__init__.py": "",
|
| 141 |
+
"resource_file.txt": "",
|
| 142 |
+
"tests": {
|
| 143 |
+
"__init__.py": "",
|
| 144 |
+
"test_mypkg.py": "",
|
| 145 |
+
"test_file.txt": "",
|
| 146 |
+
},
|
| 147 |
+
},
|
| 148 |
+
"MANIFEST.in": DALS(
|
| 149 |
+
"""
|
| 150 |
+
global-include *.py *.txt
|
| 151 |
+
global-exclude *.py[cod]
|
| 152 |
+
prune dist
|
| 153 |
+
prune build
|
| 154 |
+
prune *.egg-info
|
| 155 |
+
"""
|
| 156 |
+
),
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def test_excluded_subpackages(tmpdir_cwd):
|
| 161 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 162 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 163 |
+
dist.parse_config_files()
|
| 164 |
+
|
| 165 |
+
build_py = dist.get_command_obj("build_py")
|
| 166 |
+
|
| 167 |
+
msg = r"Python recognizes 'mypkg\.tests' as an importable package"
|
| 168 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 169 |
+
# TODO: To fix #3260 we need some transition period to deprecate the
|
| 170 |
+
# existing behavior of `include_package_data`. After the transition, we
|
| 171 |
+
# should remove the warning and fix the behaviour.
|
| 172 |
+
|
| 173 |
+
if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
|
| 174 |
+
# pytest.warns reset the warning filter temporarily
|
| 175 |
+
# https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
|
| 176 |
+
warnings.filterwarnings(
|
| 177 |
+
"ignore",
|
| 178 |
+
"'encoding' argument not specified",
|
| 179 |
+
module="distutils.text_file",
|
| 180 |
+
# This warning is already fixed in pypa/distutils but not in stdlib
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
build_py.finalize_options()
|
| 184 |
+
build_py.run()
|
| 185 |
+
|
| 186 |
+
build_dir = Path(dist.get_command_obj("build_py").build_lib)
|
| 187 |
+
assert (build_dir / "mypkg/__init__.py").exists()
|
| 188 |
+
assert (build_dir / "mypkg/resource_file.txt").exists()
|
| 189 |
+
|
| 190 |
+
# Setuptools is configured to ignore `mypkg.tests`, therefore the following
|
| 191 |
+
# files/dirs should not be included in the distribution.
|
| 192 |
+
for f in [
|
| 193 |
+
"mypkg/tests/__init__.py",
|
| 194 |
+
"mypkg/tests/test_mypkg.py",
|
| 195 |
+
"mypkg/tests/test_file.txt",
|
| 196 |
+
"mypkg/tests",
|
| 197 |
+
]:
|
| 198 |
+
with pytest.raises(AssertionError):
|
| 199 |
+
# TODO: Enforce the following assertion once #3260 is fixed
|
| 200 |
+
# (remove context manager and the following xfail).
|
| 201 |
+
assert not (build_dir / f).exists()
|
| 202 |
+
|
| 203 |
+
pytest.xfail("#3260")
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 207 |
+
def test_existing_egg_info(tmpdir_cwd, monkeypatch):
|
| 208 |
+
"""When provided with the ``existing_egg_info_dir`` attribute, build_py should not
|
| 209 |
+
attempt to run egg_info again.
|
| 210 |
+
"""
|
| 211 |
+
# == Pre-condition ==
|
| 212 |
+
# Generate an egg-info dir
|
| 213 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 214 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 215 |
+
dist.parse_config_files()
|
| 216 |
+
assert dist.include_package_data
|
| 217 |
+
|
| 218 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 219 |
+
dist.run_command("egg_info")
|
| 220 |
+
egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
|
| 221 |
+
assert egg_info_dir.is_dir()
|
| 222 |
+
|
| 223 |
+
# == Setup ==
|
| 224 |
+
build_py = dist.get_command_obj("build_py")
|
| 225 |
+
build_py.finalize_options()
|
| 226 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 227 |
+
egg_info_run = Mock(side_effect=egg_info.run)
|
| 228 |
+
monkeypatch.setattr(egg_info, "run", egg_info_run)
|
| 229 |
+
|
| 230 |
+
# == Remove caches ==
|
| 231 |
+
# egg_info is called when build_py looks for data_files, which gets cached.
|
| 232 |
+
# We need to ensure it is not cached yet, otherwise it may impact on the tests
|
| 233 |
+
build_py.__dict__.pop('data_files', None)
|
| 234 |
+
dist.reinitialize_command(egg_info)
|
| 235 |
+
|
| 236 |
+
# == Sanity check ==
|
| 237 |
+
# Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
|
| 238 |
+
build_py.existing_egg_info_dir = None
|
| 239 |
+
build_py.run()
|
| 240 |
+
egg_info_run.assert_called()
|
| 241 |
+
|
| 242 |
+
# == Remove caches ==
|
| 243 |
+
egg_info_run.reset_mock()
|
| 244 |
+
build_py.__dict__.pop('data_files', None)
|
| 245 |
+
dist.reinitialize_command(egg_info)
|
| 246 |
+
|
| 247 |
+
# == Actual test ==
|
| 248 |
+
# Ensure that if existing_egg_info_dir is given, egg_info doesn't run
|
| 249 |
+
build_py.existing_egg_info_dir = egg_info_dir
|
| 250 |
+
build_py.run()
|
| 251 |
+
egg_info_run.assert_not_called()
|
| 252 |
+
assert build_py.data_files
|
| 253 |
+
|
| 254 |
+
# Make sure the list of outputs is actually OK
|
| 255 |
+
outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
|
| 256 |
+
assert outputs
|
| 257 |
+
example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
|
| 258 |
+
assert example in outputs
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
EXAMPLE_ARBITRARY_MAPPING = {
|
| 262 |
+
"pyproject.toml": DALS(
|
| 263 |
+
"""
|
| 264 |
+
[project]
|
| 265 |
+
name = "mypkg"
|
| 266 |
+
version = "42"
|
| 267 |
+
|
| 268 |
+
[tool.setuptools]
|
| 269 |
+
packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
|
| 270 |
+
|
| 271 |
+
[tool.setuptools.package-dir]
|
| 272 |
+
"" = "src"
|
| 273 |
+
"mypkg.sub2" = "src/mypkg/_sub2"
|
| 274 |
+
"mypkg.sub2.nested" = "other"
|
| 275 |
+
"""
|
| 276 |
+
),
|
| 277 |
+
"src": {
|
| 278 |
+
"mypkg": {
|
| 279 |
+
"__init__.py": "",
|
| 280 |
+
"resource_file.txt": "",
|
| 281 |
+
"sub1": {
|
| 282 |
+
"__init__.py": "",
|
| 283 |
+
"mod1.py": "",
|
| 284 |
+
},
|
| 285 |
+
"_sub2": {
|
| 286 |
+
"mod2.py": "",
|
| 287 |
+
},
|
| 288 |
+
},
|
| 289 |
+
},
|
| 290 |
+
"other": {
|
| 291 |
+
"__init__.py": "",
|
| 292 |
+
"mod3.py": "",
|
| 293 |
+
},
|
| 294 |
+
"MANIFEST.in": DALS(
|
| 295 |
+
"""
|
| 296 |
+
global-include *.py *.txt
|
| 297 |
+
global-exclude *.py[cod]
|
| 298 |
+
"""
|
| 299 |
+
),
|
| 300 |
+
}
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
def test_get_outputs(tmpdir_cwd):
|
| 304 |
+
jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
|
| 305 |
+
dist = Distribution({"script_name": "%test%"})
|
| 306 |
+
dist.parse_config_files()
|
| 307 |
+
|
| 308 |
+
build_py = dist.get_command_obj("build_py")
|
| 309 |
+
build_py.editable_mode = True
|
| 310 |
+
build_py.ensure_finalized()
|
| 311 |
+
build_lib = build_py.build_lib.replace(os.sep, "/")
|
| 312 |
+
outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
|
| 313 |
+
assert outputs == {
|
| 314 |
+
f"{build_lib}/mypkg/__init__.py",
|
| 315 |
+
f"{build_lib}/mypkg/resource_file.txt",
|
| 316 |
+
f"{build_lib}/mypkg/sub1/__init__.py",
|
| 317 |
+
f"{build_lib}/mypkg/sub1/mod1.py",
|
| 318 |
+
f"{build_lib}/mypkg/sub2/mod2.py",
|
| 319 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py",
|
| 320 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py",
|
| 321 |
+
}
|
| 322 |
+
mapping = {
|
| 323 |
+
k.replace(os.sep, "/"): v.replace(os.sep, "/")
|
| 324 |
+
for k, v in build_py.get_output_mapping().items()
|
| 325 |
+
}
|
| 326 |
+
assert mapping == {
|
| 327 |
+
f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
|
| 328 |
+
f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
|
| 329 |
+
f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
|
| 330 |
+
f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
|
| 331 |
+
f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
|
| 332 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
|
| 333 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
class TestTypeInfoFiles:
|
| 338 |
+
PYPROJECTS = {
|
| 339 |
+
"default_pyproject": DALS(
|
| 340 |
+
"""
|
| 341 |
+
[project]
|
| 342 |
+
name = "foo"
|
| 343 |
+
version = "1"
|
| 344 |
+
"""
|
| 345 |
+
),
|
| 346 |
+
"dont_include_package_data": DALS(
|
| 347 |
+
"""
|
| 348 |
+
[project]
|
| 349 |
+
name = "foo"
|
| 350 |
+
version = "1"
|
| 351 |
+
|
| 352 |
+
[tool.setuptools]
|
| 353 |
+
include-package-data = false
|
| 354 |
+
"""
|
| 355 |
+
),
|
| 356 |
+
"exclude_type_info": DALS(
|
| 357 |
+
"""
|
| 358 |
+
[project]
|
| 359 |
+
name = "foo"
|
| 360 |
+
version = "1"
|
| 361 |
+
|
| 362 |
+
[tool.setuptools]
|
| 363 |
+
include-package-data = false
|
| 364 |
+
|
| 365 |
+
[tool.setuptools.exclude-package-data]
|
| 366 |
+
"*" = ["py.typed", "*.pyi"]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
}
|
| 370 |
+
|
| 371 |
+
EXAMPLES = {
|
| 372 |
+
"simple_namespace": {
|
| 373 |
+
"directory_structure": {
|
| 374 |
+
"foo": {
|
| 375 |
+
"bar.pyi": "",
|
| 376 |
+
"py.typed": "",
|
| 377 |
+
"__init__.py": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
"expected_type_files": {"foo/bar.pyi", "foo/py.typed"},
|
| 381 |
+
},
|
| 382 |
+
"nested_inside_namespace": {
|
| 383 |
+
"directory_structure": {
|
| 384 |
+
"foo": {
|
| 385 |
+
"bar": {
|
| 386 |
+
"py.typed": "",
|
| 387 |
+
"mod.pyi": "",
|
| 388 |
+
}
|
| 389 |
+
}
|
| 390 |
+
},
|
| 391 |
+
"expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"},
|
| 392 |
+
},
|
| 393 |
+
"namespace_nested_inside_regular": {
|
| 394 |
+
"directory_structure": {
|
| 395 |
+
"foo": {
|
| 396 |
+
"namespace": {
|
| 397 |
+
"foo.pyi": "",
|
| 398 |
+
},
|
| 399 |
+
"__init__.pyi": "",
|
| 400 |
+
"py.typed": "",
|
| 401 |
+
}
|
| 402 |
+
},
|
| 403 |
+
"expected_type_files": {
|
| 404 |
+
"foo/namespace/foo.pyi",
|
| 405 |
+
"foo/__init__.pyi",
|
| 406 |
+
"foo/py.typed",
|
| 407 |
+
},
|
| 408 |
+
},
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
@pytest.mark.parametrize(
|
| 412 |
+
"pyproject",
|
| 413 |
+
[
|
| 414 |
+
"default_pyproject",
|
| 415 |
+
pytest.param(
|
| 416 |
+
"dont_include_package_data",
|
| 417 |
+
marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
|
| 418 |
+
),
|
| 419 |
+
],
|
| 420 |
+
)
|
| 421 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 422 |
+
def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
|
| 423 |
+
structure = {
|
| 424 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 425 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 426 |
+
}
|
| 427 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 428 |
+
jaraco.path.build(structure)
|
| 429 |
+
|
| 430 |
+
build_py = get_finalized_build_py()
|
| 431 |
+
outputs = get_outputs(build_py)
|
| 432 |
+
assert expected_type_files <= outputs
|
| 433 |
+
|
| 434 |
+
@pytest.mark.parametrize("pyproject", ["exclude_type_info"])
|
| 435 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 436 |
+
def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example):
|
| 437 |
+
structure = {
|
| 438 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 439 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 440 |
+
}
|
| 441 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 442 |
+
jaraco.path.build(structure)
|
| 443 |
+
|
| 444 |
+
build_py = get_finalized_build_py()
|
| 445 |
+
outputs = get_outputs(build_py)
|
| 446 |
+
assert expected_type_files.isdisjoint(outputs)
|
| 447 |
+
|
| 448 |
+
def test_stub_only_package(self, tmpdir_cwd):
|
| 449 |
+
structure = {
|
| 450 |
+
"pyproject.toml": DALS(
|
| 451 |
+
"""
|
| 452 |
+
[project]
|
| 453 |
+
name = "foo-stubs"
|
| 454 |
+
version = "1"
|
| 455 |
+
"""
|
| 456 |
+
),
|
| 457 |
+
"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""},
|
| 458 |
+
}
|
| 459 |
+
expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"}
|
| 460 |
+
jaraco.path.build(structure)
|
| 461 |
+
|
| 462 |
+
build_py = get_finalized_build_py()
|
| 463 |
+
outputs = get_outputs(build_py)
|
| 464 |
+
assert expected_type_files <= outputs
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def get_finalized_build_py(script_name="%build_py-test%"):
|
| 468 |
+
dist = Distribution({"script_name": script_name})
|
| 469 |
+
dist.parse_config_files()
|
| 470 |
+
build_py = dist.get_command_obj("build_py")
|
| 471 |
+
build_py.finalize_options()
|
| 472 |
+
return build_py
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def get_outputs(build_py):
|
| 476 |
+
build_dir = Path(build_py.build_lib)
|
| 477 |
+
return {
|
| 478 |
+
os.path.relpath(x, build_dir).replace(os.sep, "/")
|
| 479 |
+
for x in build_py.get_outputs()
|
| 480 |
+
}
|
llava/lib/python3.10/site-packages/setuptools/tests/test_core_metadata.py
ADDED
|
@@ -0,0 +1,577 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import importlib
|
| 5 |
+
import io
|
| 6 |
+
from email import message_from_string
|
| 7 |
+
from email.generator import Generator
|
| 8 |
+
from email.message import EmailMessage, Message
|
| 9 |
+
from email.parser import Parser
|
| 10 |
+
from email.policy import EmailPolicy
|
| 11 |
+
from inspect import cleandoc
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
from unittest.mock import Mock
|
| 14 |
+
|
| 15 |
+
import pytest
|
| 16 |
+
from packaging.metadata import Metadata
|
| 17 |
+
from packaging.requirements import Requirement
|
| 18 |
+
|
| 19 |
+
from setuptools import _reqs, sic
|
| 20 |
+
from setuptools._core_metadata import rfc822_escape, rfc822_unescape
|
| 21 |
+
from setuptools.command.egg_info import egg_info, write_requirements
|
| 22 |
+
from setuptools.config import expand, setupcfg
|
| 23 |
+
from setuptools.dist import Distribution
|
| 24 |
+
|
| 25 |
+
from .config.downloads import retrieve_file, urls_from_file
|
| 26 |
+
|
| 27 |
+
EXAMPLE_BASE_INFO = dict(
|
| 28 |
+
name="package",
|
| 29 |
+
version="0.0.1",
|
| 30 |
+
author="Foo Bar",
|
| 31 |
+
author_email="foo@bar.net",
|
| 32 |
+
long_description="Long\ndescription",
|
| 33 |
+
description="Short description",
|
| 34 |
+
keywords=["one", "two"],
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.mark.parametrize(
|
| 39 |
+
("content", "result"),
|
| 40 |
+
(
|
| 41 |
+
pytest.param(
|
| 42 |
+
"Just a single line",
|
| 43 |
+
None,
|
| 44 |
+
id="single_line",
|
| 45 |
+
),
|
| 46 |
+
pytest.param(
|
| 47 |
+
"Multiline\nText\nwithout\nextra indents\n",
|
| 48 |
+
None,
|
| 49 |
+
id="multiline",
|
| 50 |
+
),
|
| 51 |
+
pytest.param(
|
| 52 |
+
"Multiline\n With\n\nadditional\n indentation",
|
| 53 |
+
None,
|
| 54 |
+
id="multiline_with_indentation",
|
| 55 |
+
),
|
| 56 |
+
pytest.param(
|
| 57 |
+
" Leading whitespace",
|
| 58 |
+
"Leading whitespace",
|
| 59 |
+
id="remove_leading_whitespace",
|
| 60 |
+
),
|
| 61 |
+
pytest.param(
|
| 62 |
+
" Leading whitespace\nIn\n Multiline comment",
|
| 63 |
+
"Leading whitespace\nIn\n Multiline comment",
|
| 64 |
+
id="remove_leading_whitespace_multiline",
|
| 65 |
+
),
|
| 66 |
+
),
|
| 67 |
+
)
|
| 68 |
+
def test_rfc822_unescape(content, result):
|
| 69 |
+
assert (result or content) == rfc822_unescape(rfc822_escape(content))
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def __read_test_cases():
|
| 73 |
+
base = EXAMPLE_BASE_INFO
|
| 74 |
+
|
| 75 |
+
params = functools.partial(dict, base)
|
| 76 |
+
|
| 77 |
+
return [
|
| 78 |
+
('Metadata version 1.0', params()),
|
| 79 |
+
(
|
| 80 |
+
'Metadata Version 1.0: Short long description',
|
| 81 |
+
params(
|
| 82 |
+
long_description='Short long description',
|
| 83 |
+
),
|
| 84 |
+
),
|
| 85 |
+
(
|
| 86 |
+
'Metadata version 1.1: Classifiers',
|
| 87 |
+
params(
|
| 88 |
+
classifiers=[
|
| 89 |
+
'Programming Language :: Python :: 3',
|
| 90 |
+
'Programming Language :: Python :: 3.7',
|
| 91 |
+
'License :: OSI Approved :: MIT License',
|
| 92 |
+
],
|
| 93 |
+
),
|
| 94 |
+
),
|
| 95 |
+
(
|
| 96 |
+
'Metadata version 1.1: Download URL',
|
| 97 |
+
params(
|
| 98 |
+
download_url='https://example.com',
|
| 99 |
+
),
|
| 100 |
+
),
|
| 101 |
+
(
|
| 102 |
+
'Metadata Version 1.2: Requires-Python',
|
| 103 |
+
params(
|
| 104 |
+
python_requires='>=3.7',
|
| 105 |
+
),
|
| 106 |
+
),
|
| 107 |
+
pytest.param(
|
| 108 |
+
'Metadata Version 1.2: Project-Url',
|
| 109 |
+
params(project_urls=dict(Foo='https://example.bar')),
|
| 110 |
+
marks=pytest.mark.xfail(
|
| 111 |
+
reason="Issue #1578: project_urls not read",
|
| 112 |
+
),
|
| 113 |
+
),
|
| 114 |
+
(
|
| 115 |
+
'Metadata Version 2.1: Long Description Content Type',
|
| 116 |
+
params(
|
| 117 |
+
long_description_content_type='text/x-rst; charset=UTF-8',
|
| 118 |
+
),
|
| 119 |
+
),
|
| 120 |
+
(
|
| 121 |
+
'License',
|
| 122 |
+
params(
|
| 123 |
+
license='MIT',
|
| 124 |
+
),
|
| 125 |
+
),
|
| 126 |
+
(
|
| 127 |
+
'License multiline',
|
| 128 |
+
params(
|
| 129 |
+
license='This is a long license \nover multiple lines',
|
| 130 |
+
),
|
| 131 |
+
),
|
| 132 |
+
pytest.param(
|
| 133 |
+
'Metadata Version 2.1: Provides Extra',
|
| 134 |
+
params(provides_extras=['foo', 'bar']),
|
| 135 |
+
marks=pytest.mark.xfail(reason="provides_extras not read"),
|
| 136 |
+
),
|
| 137 |
+
(
|
| 138 |
+
'Missing author',
|
| 139 |
+
dict(
|
| 140 |
+
name='foo',
|
| 141 |
+
version='1.0.0',
|
| 142 |
+
author_email='snorri@sturluson.name',
|
| 143 |
+
),
|
| 144 |
+
),
|
| 145 |
+
(
|
| 146 |
+
'Missing author e-mail',
|
| 147 |
+
dict(
|
| 148 |
+
name='foo',
|
| 149 |
+
version='1.0.0',
|
| 150 |
+
author='Snorri Sturluson',
|
| 151 |
+
),
|
| 152 |
+
),
|
| 153 |
+
(
|
| 154 |
+
'Missing author and e-mail',
|
| 155 |
+
dict(
|
| 156 |
+
name='foo',
|
| 157 |
+
version='1.0.0',
|
| 158 |
+
),
|
| 159 |
+
),
|
| 160 |
+
(
|
| 161 |
+
'Bypass normalized version',
|
| 162 |
+
dict(
|
| 163 |
+
name='foo',
|
| 164 |
+
version=sic('1.0.0a'),
|
| 165 |
+
),
|
| 166 |
+
),
|
| 167 |
+
]
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@pytest.mark.parametrize(("name", "attrs"), __read_test_cases())
|
| 171 |
+
def test_read_metadata(name, attrs):
|
| 172 |
+
dist = Distribution(attrs)
|
| 173 |
+
metadata_out = dist.metadata
|
| 174 |
+
dist_class = metadata_out.__class__
|
| 175 |
+
|
| 176 |
+
# Write to PKG_INFO and then load into a new metadata object
|
| 177 |
+
PKG_INFO = io.StringIO()
|
| 178 |
+
|
| 179 |
+
metadata_out.write_pkg_file(PKG_INFO)
|
| 180 |
+
PKG_INFO.seek(0)
|
| 181 |
+
pkg_info = PKG_INFO.read()
|
| 182 |
+
assert _valid_metadata(pkg_info)
|
| 183 |
+
|
| 184 |
+
PKG_INFO.seek(0)
|
| 185 |
+
metadata_in = dist_class()
|
| 186 |
+
metadata_in.read_pkg_file(PKG_INFO)
|
| 187 |
+
|
| 188 |
+
tested_attrs = [
|
| 189 |
+
('name', dist_class.get_name),
|
| 190 |
+
('version', dist_class.get_version),
|
| 191 |
+
('author', dist_class.get_contact),
|
| 192 |
+
('author_email', dist_class.get_contact_email),
|
| 193 |
+
('metadata_version', dist_class.get_metadata_version),
|
| 194 |
+
('provides', dist_class.get_provides),
|
| 195 |
+
('description', dist_class.get_description),
|
| 196 |
+
('long_description', dist_class.get_long_description),
|
| 197 |
+
('download_url', dist_class.get_download_url),
|
| 198 |
+
('keywords', dist_class.get_keywords),
|
| 199 |
+
('platforms', dist_class.get_platforms),
|
| 200 |
+
('obsoletes', dist_class.get_obsoletes),
|
| 201 |
+
('requires', dist_class.get_requires),
|
| 202 |
+
('classifiers', dist_class.get_classifiers),
|
| 203 |
+
('project_urls', lambda s: getattr(s, 'project_urls', {})),
|
| 204 |
+
('provides_extras', lambda s: getattr(s, 'provides_extras', {})),
|
| 205 |
+
]
|
| 206 |
+
|
| 207 |
+
for attr, getter in tested_attrs:
|
| 208 |
+
assert getter(metadata_in) == getter(metadata_out)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def __maintainer_test_cases():
|
| 212 |
+
attrs = {"name": "package", "version": "1.0", "description": "xxx"}
|
| 213 |
+
|
| 214 |
+
def merge_dicts(d1, d2):
|
| 215 |
+
d1 = d1.copy()
|
| 216 |
+
d1.update(d2)
|
| 217 |
+
|
| 218 |
+
return d1
|
| 219 |
+
|
| 220 |
+
return [
|
| 221 |
+
('No author, no maintainer', attrs.copy()),
|
| 222 |
+
(
|
| 223 |
+
'Author (no e-mail), no maintainer',
|
| 224 |
+
merge_dicts(attrs, {'author': 'Author Name'}),
|
| 225 |
+
),
|
| 226 |
+
(
|
| 227 |
+
'Author (e-mail), no maintainer',
|
| 228 |
+
merge_dicts(
|
| 229 |
+
attrs, {'author': 'Author Name', 'author_email': 'author@name.com'}
|
| 230 |
+
),
|
| 231 |
+
),
|
| 232 |
+
(
|
| 233 |
+
'No author, maintainer (no e-mail)',
|
| 234 |
+
merge_dicts(attrs, {'maintainer': 'Maintainer Name'}),
|
| 235 |
+
),
|
| 236 |
+
(
|
| 237 |
+
'No author, maintainer (e-mail)',
|
| 238 |
+
merge_dicts(
|
| 239 |
+
attrs,
|
| 240 |
+
{
|
| 241 |
+
'maintainer': 'Maintainer Name',
|
| 242 |
+
'maintainer_email': 'maintainer@name.com',
|
| 243 |
+
},
|
| 244 |
+
),
|
| 245 |
+
),
|
| 246 |
+
(
|
| 247 |
+
'Author (no e-mail), Maintainer (no-email)',
|
| 248 |
+
merge_dicts(
|
| 249 |
+
attrs, {'author': 'Author Name', 'maintainer': 'Maintainer Name'}
|
| 250 |
+
),
|
| 251 |
+
),
|
| 252 |
+
(
|
| 253 |
+
'Author (e-mail), Maintainer (e-mail)',
|
| 254 |
+
merge_dicts(
|
| 255 |
+
attrs,
|
| 256 |
+
{
|
| 257 |
+
'author': 'Author Name',
|
| 258 |
+
'author_email': 'author@name.com',
|
| 259 |
+
'maintainer': 'Maintainer Name',
|
| 260 |
+
'maintainer_email': 'maintainer@name.com',
|
| 261 |
+
},
|
| 262 |
+
),
|
| 263 |
+
),
|
| 264 |
+
(
|
| 265 |
+
'No author (e-mail), no maintainer (e-mail)',
|
| 266 |
+
merge_dicts(
|
| 267 |
+
attrs,
|
| 268 |
+
{
|
| 269 |
+
'author_email': 'author@name.com',
|
| 270 |
+
'maintainer_email': 'maintainer@name.com',
|
| 271 |
+
},
|
| 272 |
+
),
|
| 273 |
+
),
|
| 274 |
+
('Author unicode', merge_dicts(attrs, {'author': '鉄沢寛'})),
|
| 275 |
+
('Maintainer unicode', merge_dicts(attrs, {'maintainer': 'Jan Łukasiewicz'})),
|
| 276 |
+
]
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
@pytest.mark.parametrize(("name", "attrs"), __maintainer_test_cases())
|
| 280 |
+
def test_maintainer_author(name, attrs, tmpdir):
|
| 281 |
+
tested_keys = {
|
| 282 |
+
'author': 'Author',
|
| 283 |
+
'author_email': 'Author-email',
|
| 284 |
+
'maintainer': 'Maintainer',
|
| 285 |
+
'maintainer_email': 'Maintainer-email',
|
| 286 |
+
}
|
| 287 |
+
|
| 288 |
+
# Generate a PKG-INFO file
|
| 289 |
+
dist = Distribution(attrs)
|
| 290 |
+
fn = tmpdir.mkdir('pkg_info')
|
| 291 |
+
fn_s = str(fn)
|
| 292 |
+
|
| 293 |
+
dist.metadata.write_pkg_info(fn_s)
|
| 294 |
+
|
| 295 |
+
with open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f:
|
| 296 |
+
pkg_info = f.read()
|
| 297 |
+
|
| 298 |
+
assert _valid_metadata(pkg_info)
|
| 299 |
+
|
| 300 |
+
# Drop blank lines and strip lines from default description
|
| 301 |
+
raw_pkg_lines = pkg_info.splitlines()
|
| 302 |
+
pkg_lines = list(filter(None, raw_pkg_lines[:-2]))
|
| 303 |
+
|
| 304 |
+
pkg_lines_set = set(pkg_lines)
|
| 305 |
+
|
| 306 |
+
# Duplicate lines should not be generated
|
| 307 |
+
assert len(pkg_lines) == len(pkg_lines_set)
|
| 308 |
+
|
| 309 |
+
for fkey, dkey in tested_keys.items():
|
| 310 |
+
val = attrs.get(dkey, None)
|
| 311 |
+
if val is None:
|
| 312 |
+
for line in pkg_lines:
|
| 313 |
+
assert not line.startswith(fkey + ':')
|
| 314 |
+
else:
|
| 315 |
+
line = f'{fkey}: {val}'
|
| 316 |
+
assert line in pkg_lines_set
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
class TestParityWithMetadataFromPyPaWheel:
|
| 320 |
+
def base_example(self):
|
| 321 |
+
attrs = dict(
|
| 322 |
+
**EXAMPLE_BASE_INFO,
|
| 323 |
+
# Example with complex requirement definition
|
| 324 |
+
python_requires=">=3.8",
|
| 325 |
+
install_requires="""
|
| 326 |
+
packaging==23.2
|
| 327 |
+
more-itertools==8.8.0; extra == "other"
|
| 328 |
+
jaraco.text==3.7.0
|
| 329 |
+
importlib-resources==5.10.2; python_version<"3.8"
|
| 330 |
+
importlib-metadata==6.0.0 ; python_version<"3.8"
|
| 331 |
+
colorama>=0.4.4; sys_platform == "win32"
|
| 332 |
+
""",
|
| 333 |
+
extras_require={
|
| 334 |
+
"testing": """
|
| 335 |
+
pytest >= 6
|
| 336 |
+
pytest-checkdocs >= 2.4
|
| 337 |
+
tomli ; \\
|
| 338 |
+
# Using stdlib when possible
|
| 339 |
+
python_version < "3.11"
|
| 340 |
+
ini2toml[lite]>=0.9
|
| 341 |
+
""",
|
| 342 |
+
"other": [],
|
| 343 |
+
},
|
| 344 |
+
)
|
| 345 |
+
# Generate a PKG-INFO file using setuptools
|
| 346 |
+
return Distribution(attrs)
|
| 347 |
+
|
| 348 |
+
def test_requires_dist(self, tmp_path):
|
| 349 |
+
dist = self.base_example()
|
| 350 |
+
pkg_info = _get_pkginfo(dist)
|
| 351 |
+
assert _valid_metadata(pkg_info)
|
| 352 |
+
|
| 353 |
+
# Ensure Requires-Dist is present
|
| 354 |
+
expected = [
|
| 355 |
+
'Metadata-Version:',
|
| 356 |
+
'Requires-Python: >=3.8',
|
| 357 |
+
'Provides-Extra: other',
|
| 358 |
+
'Provides-Extra: testing',
|
| 359 |
+
'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"',
|
| 360 |
+
'Requires-Dist: more-itertools==8.8.0; extra == "other"',
|
| 361 |
+
'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"',
|
| 362 |
+
]
|
| 363 |
+
for line in expected:
|
| 364 |
+
assert line in pkg_info
|
| 365 |
+
|
| 366 |
+
HERE = Path(__file__).parent
|
| 367 |
+
EXAMPLES_FILE = HERE / "config/setupcfg_examples.txt"
|
| 368 |
+
|
| 369 |
+
@pytest.fixture(params=[None, *urls_from_file(EXAMPLES_FILE)])
|
| 370 |
+
def dist(self, request, monkeypatch, tmp_path):
|
| 371 |
+
"""Example of distribution with arbitrary configuration"""
|
| 372 |
+
monkeypatch.chdir(tmp_path)
|
| 373 |
+
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.42"))
|
| 374 |
+
monkeypatch.setattr(expand, "read_files", Mock(return_value="hello world"))
|
| 375 |
+
if request.param is None:
|
| 376 |
+
yield self.base_example()
|
| 377 |
+
else:
|
| 378 |
+
# Real-world usage
|
| 379 |
+
config = retrieve_file(request.param)
|
| 380 |
+
yield setupcfg.apply_configuration(Distribution({}), config)
|
| 381 |
+
|
| 382 |
+
@pytest.mark.uses_network
|
| 383 |
+
def test_equivalent_output(self, tmp_path, dist):
|
| 384 |
+
"""Ensure output from setuptools is equivalent to the one from `pypa/wheel`"""
|
| 385 |
+
# Generate a METADATA file using pypa/wheel for comparison
|
| 386 |
+
wheel_metadata = importlib.import_module("wheel.metadata")
|
| 387 |
+
pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
|
| 388 |
+
|
| 389 |
+
if pkginfo_to_metadata is None: # pragma: nocover
|
| 390 |
+
pytest.xfail(
|
| 391 |
+
"wheel.metadata.pkginfo_to_metadata is undefined, "
|
| 392 |
+
"(this is likely to be caused by API changes in pypa/wheel"
|
| 393 |
+
)
|
| 394 |
+
|
| 395 |
+
# Generate an simplified "egg-info" dir for pypa/wheel to convert
|
| 396 |
+
pkg_info = _get_pkginfo(dist)
|
| 397 |
+
egg_info_dir = tmp_path / "pkg.egg-info"
|
| 398 |
+
egg_info_dir.mkdir(parents=True)
|
| 399 |
+
(egg_info_dir / "PKG-INFO").write_text(pkg_info, encoding="utf-8")
|
| 400 |
+
write_requirements(egg_info(dist), egg_info_dir, egg_info_dir / "requires.txt")
|
| 401 |
+
|
| 402 |
+
# Get pypa/wheel generated METADATA but normalize requirements formatting
|
| 403 |
+
metadata_msg = pkginfo_to_metadata(egg_info_dir, egg_info_dir / "PKG-INFO")
|
| 404 |
+
metadata_str = _normalize_metadata(metadata_msg)
|
| 405 |
+
pkg_info_msg = message_from_string(pkg_info)
|
| 406 |
+
pkg_info_str = _normalize_metadata(pkg_info_msg)
|
| 407 |
+
|
| 408 |
+
# Compare setuptools PKG-INFO x pypa/wheel METADATA
|
| 409 |
+
assert metadata_str == pkg_info_str
|
| 410 |
+
|
| 411 |
+
# Make sure it parses/serializes well in pypa/wheel
|
| 412 |
+
_assert_roundtrip_message(pkg_info)
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
class TestPEP643:
|
| 416 |
+
STATIC_CONFIG = {
|
| 417 |
+
"setup.cfg": cleandoc(
|
| 418 |
+
"""
|
| 419 |
+
[metadata]
|
| 420 |
+
name = package
|
| 421 |
+
version = 0.0.1
|
| 422 |
+
author = Foo Bar
|
| 423 |
+
author_email = foo@bar.net
|
| 424 |
+
long_description = Long
|
| 425 |
+
description
|
| 426 |
+
description = Short description
|
| 427 |
+
keywords = one, two
|
| 428 |
+
platforms = abcd
|
| 429 |
+
[options]
|
| 430 |
+
install_requires = requests
|
| 431 |
+
"""
|
| 432 |
+
),
|
| 433 |
+
"pyproject.toml": cleandoc(
|
| 434 |
+
"""
|
| 435 |
+
[project]
|
| 436 |
+
name = "package"
|
| 437 |
+
version = "0.0.1"
|
| 438 |
+
authors = [
|
| 439 |
+
{name = "Foo Bar", email = "foo@bar.net"}
|
| 440 |
+
]
|
| 441 |
+
description = "Short description"
|
| 442 |
+
readme = {text = "Long\\ndescription", content-type = "text/plain"}
|
| 443 |
+
keywords = ["one", "two"]
|
| 444 |
+
dependencies = ["requests"]
|
| 445 |
+
[tool.setuptools]
|
| 446 |
+
provides = ["abcd"]
|
| 447 |
+
obsoletes = ["abcd"]
|
| 448 |
+
"""
|
| 449 |
+
),
|
| 450 |
+
}
|
| 451 |
+
|
| 452 |
+
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
|
| 453 |
+
def test_static_config_has_no_dynamic(self, file, tmpdir_cwd):
|
| 454 |
+
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
|
| 455 |
+
metadata = _get_metadata()
|
| 456 |
+
assert metadata.get_all("Dynamic") is None
|
| 457 |
+
assert metadata.get_all("dynamic") is None
|
| 458 |
+
|
| 459 |
+
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
|
| 460 |
+
@pytest.mark.parametrize(
|
| 461 |
+
"fields",
|
| 462 |
+
[
|
| 463 |
+
# Single dynamic field
|
| 464 |
+
{"requires-python": ("python_requires", ">=3.12")},
|
| 465 |
+
{"author-email": ("author_email", "snoopy@peanuts.com")},
|
| 466 |
+
{"keywords": ("keywords", ["hello", "world"])},
|
| 467 |
+
{"platform": ("platforms", ["abcd"])},
|
| 468 |
+
# Multiple dynamic fields
|
| 469 |
+
{
|
| 470 |
+
"summary": ("description", "hello world"),
|
| 471 |
+
"description": ("long_description", "bla bla bla bla"),
|
| 472 |
+
"requires-dist": ("install_requires", ["hello-world"]),
|
| 473 |
+
},
|
| 474 |
+
],
|
| 475 |
+
)
|
| 476 |
+
def test_modified_fields_marked_as_dynamic(self, file, fields, tmpdir_cwd):
|
| 477 |
+
# We start with a static config
|
| 478 |
+
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
|
| 479 |
+
dist = _makedist()
|
| 480 |
+
|
| 481 |
+
# ... but then we simulate the effects of a plugin modifying the distribution
|
| 482 |
+
for attr, value in fields.values():
|
| 483 |
+
# `dist` and `dist.metadata` are complicated...
|
| 484 |
+
# Some attributes work when set on `dist`, others on `dist.metadata`...
|
| 485 |
+
# Here we set in both just in case (this also avoids calling `_finalize_*`)
|
| 486 |
+
setattr(dist, attr, value)
|
| 487 |
+
setattr(dist.metadata, attr, value)
|
| 488 |
+
|
| 489 |
+
# Then we should be able to list the modified fields as Dynamic
|
| 490 |
+
metadata = _get_metadata(dist)
|
| 491 |
+
assert set(metadata.get_all("Dynamic")) == set(fields)
|
| 492 |
+
|
| 493 |
+
|
| 494 |
+
def _makedist(**attrs):
|
| 495 |
+
dist = Distribution(attrs)
|
| 496 |
+
dist.parse_config_files()
|
| 497 |
+
return dist
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
def _assert_roundtrip_message(metadata: str) -> None:
|
| 501 |
+
"""Emulate the way wheel.bdist_wheel parses and regenerates the message,
|
| 502 |
+
then ensures the metadata generated by setuptools is compatible.
|
| 503 |
+
"""
|
| 504 |
+
with io.StringIO(metadata) as buffer:
|
| 505 |
+
msg = Parser(EmailMessage).parse(buffer)
|
| 506 |
+
|
| 507 |
+
serialization_policy = EmailPolicy(
|
| 508 |
+
utf8=True,
|
| 509 |
+
mangle_from_=False,
|
| 510 |
+
max_line_length=0,
|
| 511 |
+
)
|
| 512 |
+
with io.BytesIO() as buffer:
|
| 513 |
+
out = io.TextIOWrapper(buffer, encoding="utf-8")
|
| 514 |
+
Generator(out, policy=serialization_policy).flatten(msg)
|
| 515 |
+
out.flush()
|
| 516 |
+
regenerated = buffer.getvalue()
|
| 517 |
+
|
| 518 |
+
raw_metadata = bytes(metadata, "utf-8")
|
| 519 |
+
# Normalise newlines to avoid test errors on Windows:
|
| 520 |
+
raw_metadata = b"\n".join(raw_metadata.splitlines())
|
| 521 |
+
regenerated = b"\n".join(regenerated.splitlines())
|
| 522 |
+
assert regenerated == raw_metadata
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def _normalize_metadata(msg: Message) -> str:
|
| 526 |
+
"""Allow equivalent metadata to be compared directly"""
|
| 527 |
+
# The main challenge regards the requirements and extras.
|
| 528 |
+
# Both setuptools and wheel already apply some level of normalization
|
| 529 |
+
# but they differ regarding which character is chosen, according to the
|
| 530 |
+
# following spec it should be "-":
|
| 531 |
+
# https://packaging.python.org/en/latest/specifications/name-normalization/
|
| 532 |
+
|
| 533 |
+
# Related issues:
|
| 534 |
+
# https://github.com/pypa/packaging/issues/845
|
| 535 |
+
# https://github.com/pypa/packaging/issues/644#issuecomment-2429813968
|
| 536 |
+
|
| 537 |
+
extras = {x.replace("_", "-"): x for x in msg.get_all("Provides-Extra", [])}
|
| 538 |
+
reqs = [
|
| 539 |
+
_normalize_req(req, extras)
|
| 540 |
+
for req in _reqs.parse(msg.get_all("Requires-Dist", []))
|
| 541 |
+
]
|
| 542 |
+
del msg["Requires-Dist"]
|
| 543 |
+
del msg["Provides-Extra"]
|
| 544 |
+
|
| 545 |
+
# Ensure consistent ord
|
| 546 |
+
for req in sorted(reqs):
|
| 547 |
+
msg["Requires-Dist"] = req
|
| 548 |
+
for extra in sorted(extras):
|
| 549 |
+
msg["Provides-Extra"] = extra
|
| 550 |
+
|
| 551 |
+
# TODO: Handle lack of PEP 643 implementation in pypa/wheel?
|
| 552 |
+
del msg["Metadata-Version"]
|
| 553 |
+
|
| 554 |
+
return msg.as_string()
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
def _normalize_req(req: Requirement, extras: dict[str, str]) -> str:
|
| 558 |
+
"""Allow equivalent requirement objects to be compared directly"""
|
| 559 |
+
as_str = str(req).replace(req.name, req.name.replace("_", "-"))
|
| 560 |
+
for norm, orig in extras.items():
|
| 561 |
+
as_str = as_str.replace(orig, norm)
|
| 562 |
+
return as_str
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def _get_pkginfo(dist: Distribution):
|
| 566 |
+
with io.StringIO() as fp:
|
| 567 |
+
dist.metadata.write_pkg_file(fp)
|
| 568 |
+
return fp.getvalue()
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def _get_metadata(dist: Distribution | None = None):
|
| 572 |
+
return message_from_string(_get_pkginfo(dist or _makedist()))
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
def _valid_metadata(text: str) -> bool:
|
| 576 |
+
metadata = Metadata.from_email(text, validate=True) # can raise exceptions
|
| 577 |
+
return metadata is not None
|
llava/lib/python3.10/site-packages/setuptools/tests/test_depends.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from setuptools import depends
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestGetModuleConstant:
|
| 7 |
+
def test_basic(self):
|
| 8 |
+
"""
|
| 9 |
+
Invoke get_module_constant on a module in
|
| 10 |
+
the test package.
|
| 11 |
+
"""
|
| 12 |
+
mod_name = 'setuptools.tests.mod_with_constant'
|
| 13 |
+
val = depends.get_module_constant(mod_name, 'value')
|
| 14 |
+
assert val == 'three, sir!'
|
| 15 |
+
assert 'setuptools.tests.mod_with_constant' not in sys.modules
|
llava/lib/python3.10/site-packages/setuptools/tests/test_develop.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import platform
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from setuptools._path import paths_on_pythonpath
|
| 12 |
+
from setuptools.command.develop import develop
|
| 13 |
+
from setuptools.dist import Distribution
|
| 14 |
+
|
| 15 |
+
from . import contexts, namespaces
|
| 16 |
+
|
| 17 |
+
SETUP_PY = """\
|
| 18 |
+
from setuptools import setup
|
| 19 |
+
|
| 20 |
+
setup(name='foo',
|
| 21 |
+
packages=['foo'],
|
| 22 |
+
)
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
INIT_PY = """print "foo"
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@pytest.fixture
|
| 30 |
+
def temp_user(monkeypatch):
|
| 31 |
+
with contexts.tempdir() as user_base:
|
| 32 |
+
with contexts.tempdir() as user_site:
|
| 33 |
+
monkeypatch.setattr('site.USER_BASE', user_base)
|
| 34 |
+
monkeypatch.setattr('site.USER_SITE', user_site)
|
| 35 |
+
yield
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.fixture
|
| 39 |
+
def test_env(tmpdir, temp_user):
|
| 40 |
+
target = tmpdir
|
| 41 |
+
foo = target.mkdir('foo')
|
| 42 |
+
setup = target / 'setup.py'
|
| 43 |
+
if setup.isfile():
|
| 44 |
+
raise ValueError(dir(target))
|
| 45 |
+
with setup.open('w') as f:
|
| 46 |
+
f.write(SETUP_PY)
|
| 47 |
+
init = foo / '__init__.py'
|
| 48 |
+
with init.open('w') as f:
|
| 49 |
+
f.write(INIT_PY)
|
| 50 |
+
with target.as_cwd():
|
| 51 |
+
yield target
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class TestDevelop:
|
| 55 |
+
in_virtualenv = hasattr(sys, 'real_prefix')
|
| 56 |
+
in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
|
| 57 |
+
|
| 58 |
+
def test_console_scripts(self, tmpdir):
|
| 59 |
+
"""
|
| 60 |
+
Test that console scripts are installed and that they reference
|
| 61 |
+
only the project by name and not the current version.
|
| 62 |
+
"""
|
| 63 |
+
pytest.skip(
|
| 64 |
+
"TODO: needs a fixture to cause 'develop' "
|
| 65 |
+
"to be invoked without mutating environment."
|
| 66 |
+
)
|
| 67 |
+
settings = dict(
|
| 68 |
+
name='foo',
|
| 69 |
+
packages=['foo'],
|
| 70 |
+
version='0.0',
|
| 71 |
+
entry_points={
|
| 72 |
+
'console_scripts': [
|
| 73 |
+
'foocmd = foo:foo',
|
| 74 |
+
],
|
| 75 |
+
},
|
| 76 |
+
)
|
| 77 |
+
dist = Distribution(settings)
|
| 78 |
+
dist.script_name = 'setup.py'
|
| 79 |
+
cmd = develop(dist)
|
| 80 |
+
cmd.ensure_finalized()
|
| 81 |
+
cmd.install_dir = tmpdir
|
| 82 |
+
cmd.run()
|
| 83 |
+
# assert '0.0' not in foocmd_text
|
| 84 |
+
|
| 85 |
+
@pytest.mark.xfail(reason="legacy behavior retained for compatibility #4167")
|
| 86 |
+
def test_egg_link_filename(self):
|
| 87 |
+
settings = dict(
|
| 88 |
+
name='Foo $$$ Bar_baz-bing',
|
| 89 |
+
)
|
| 90 |
+
dist = Distribution(settings)
|
| 91 |
+
cmd = develop(dist)
|
| 92 |
+
cmd.ensure_finalized()
|
| 93 |
+
link = pathlib.Path(cmd.egg_link)
|
| 94 |
+
assert link.suffix == '.egg-link'
|
| 95 |
+
assert link.stem == 'Foo_Bar_baz_bing'
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TestResolver:
|
| 99 |
+
"""
|
| 100 |
+
TODO: These tests were written with a minimal understanding
|
| 101 |
+
of what _resolve_setup_path is intending to do. Come up with
|
| 102 |
+
more meaningful cases that look like real-world scenarios.
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
def test_resolve_setup_path_cwd(self):
|
| 106 |
+
assert develop._resolve_setup_path('.', '.', '.') == '.'
|
| 107 |
+
|
| 108 |
+
def test_resolve_setup_path_one_dir(self):
|
| 109 |
+
assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../'
|
| 110 |
+
|
| 111 |
+
def test_resolve_setup_path_one_dir_trailing_slash(self):
|
| 112 |
+
assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../'
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class TestNamespaces:
|
| 116 |
+
@staticmethod
|
| 117 |
+
def install_develop(src_dir, target):
|
| 118 |
+
develop_cmd = [
|
| 119 |
+
sys.executable,
|
| 120 |
+
'setup.py',
|
| 121 |
+
'develop',
|
| 122 |
+
'--install-dir',
|
| 123 |
+
str(target),
|
| 124 |
+
]
|
| 125 |
+
with src_dir.as_cwd():
|
| 126 |
+
with paths_on_pythonpath([str(target)]):
|
| 127 |
+
subprocess.check_call(develop_cmd)
|
| 128 |
+
|
| 129 |
+
@pytest.mark.skipif(
|
| 130 |
+
bool(os.environ.get("APPVEYOR")),
|
| 131 |
+
reason="https://github.com/pypa/setuptools/issues/851",
|
| 132 |
+
)
|
| 133 |
+
@pytest.mark.skipif(
|
| 134 |
+
platform.python_implementation() == 'PyPy',
|
| 135 |
+
reason="https://github.com/pypa/setuptools/issues/1202",
|
| 136 |
+
)
|
| 137 |
+
def test_namespace_package_importable(self, tmpdir):
|
| 138 |
+
"""
|
| 139 |
+
Installing two packages sharing the same namespace, one installed
|
| 140 |
+
naturally using pip or `--single-version-externally-managed`
|
| 141 |
+
and the other installed using `develop` should leave the namespace
|
| 142 |
+
in tact and both packages reachable by import.
|
| 143 |
+
"""
|
| 144 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 145 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 146 |
+
target = tmpdir / 'packages'
|
| 147 |
+
# use pip to install to the target directory
|
| 148 |
+
install_cmd = [
|
| 149 |
+
sys.executable,
|
| 150 |
+
'-m',
|
| 151 |
+
'pip',
|
| 152 |
+
'install',
|
| 153 |
+
str(pkg_A),
|
| 154 |
+
'-t',
|
| 155 |
+
str(target),
|
| 156 |
+
]
|
| 157 |
+
subprocess.check_call(install_cmd)
|
| 158 |
+
self.install_develop(pkg_B, target)
|
| 159 |
+
namespaces.make_site_dir(target)
|
| 160 |
+
try_import = [
|
| 161 |
+
sys.executable,
|
| 162 |
+
'-c',
|
| 163 |
+
'import myns.pkgA; import myns.pkgB',
|
| 164 |
+
]
|
| 165 |
+
with paths_on_pythonpath([str(target)]):
|
| 166 |
+
subprocess.check_call(try_import)
|
| 167 |
+
|
| 168 |
+
# additionally ensure that pkg_resources import works
|
| 169 |
+
pkg_resources_imp = [
|
| 170 |
+
sys.executable,
|
| 171 |
+
'-c',
|
| 172 |
+
'import pkg_resources',
|
| 173 |
+
]
|
| 174 |
+
with paths_on_pythonpath([str(target)]):
|
| 175 |
+
subprocess.check_call(pkg_resources_imp)
|
llava/lib/python3.10/site-packages/setuptools/tests/test_dist.py
ADDED
|
@@ -0,0 +1,278 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import re
|
| 3 |
+
import urllib.parse
|
| 4 |
+
import urllib.request
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
from setuptools import Distribution
|
| 9 |
+
from setuptools.dist import check_package_data, check_specifier
|
| 10 |
+
|
| 11 |
+
from .test_easy_install import make_nspkg_sdist
|
| 12 |
+
from .test_find_packages import ensure_files
|
| 13 |
+
from .textwrap import DALS
|
| 14 |
+
|
| 15 |
+
from distutils.errors import DistutilsSetupError
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def test_dist_fetch_build_egg(tmpdir):
|
| 19 |
+
"""
|
| 20 |
+
Check multiple calls to `Distribution.fetch_build_egg` work as expected.
|
| 21 |
+
"""
|
| 22 |
+
index = tmpdir.mkdir('index')
|
| 23 |
+
index_url = urllib.parse.urljoin('file://', urllib.request.pathname2url(str(index)))
|
| 24 |
+
|
| 25 |
+
def sdist_with_index(distname, version):
|
| 26 |
+
dist_dir = index.mkdir(distname)
|
| 27 |
+
dist_sdist = f'{distname}-{version}.tar.gz'
|
| 28 |
+
make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version)
|
| 29 |
+
with dist_dir.join('index.html').open('w') as fp:
|
| 30 |
+
fp.write(
|
| 31 |
+
DALS(
|
| 32 |
+
"""
|
| 33 |
+
<!DOCTYPE html><html><body>
|
| 34 |
+
<a href="{dist_sdist}" rel="internal">{dist_sdist}</a><br/>
|
| 35 |
+
</body></html>
|
| 36 |
+
"""
|
| 37 |
+
).format(dist_sdist=dist_sdist)
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
sdist_with_index('barbazquux', '3.2.0')
|
| 41 |
+
sdist_with_index('barbazquux-runner', '2.11.1')
|
| 42 |
+
with tmpdir.join('setup.cfg').open('w') as fp:
|
| 43 |
+
fp.write(
|
| 44 |
+
DALS(
|
| 45 |
+
"""
|
| 46 |
+
[easy_install]
|
| 47 |
+
index_url = {index_url}
|
| 48 |
+
"""
|
| 49 |
+
).format(index_url=index_url)
|
| 50 |
+
)
|
| 51 |
+
reqs = """
|
| 52 |
+
barbazquux-runner
|
| 53 |
+
barbazquux
|
| 54 |
+
""".split()
|
| 55 |
+
with tmpdir.as_cwd():
|
| 56 |
+
dist = Distribution()
|
| 57 |
+
dist.parse_config_files()
|
| 58 |
+
resolved_dists = [dist.fetch_build_egg(r) for r in reqs]
|
| 59 |
+
assert [dist.key for dist in resolved_dists if dist] == reqs
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
EXAMPLE_BASE_INFO = dict(
|
| 63 |
+
name="package",
|
| 64 |
+
version="0.0.1",
|
| 65 |
+
author="Foo Bar",
|
| 66 |
+
author_email="foo@bar.net",
|
| 67 |
+
long_description="Long\ndescription",
|
| 68 |
+
description="Short description",
|
| 69 |
+
keywords=["one", "two"],
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def test_provides_extras_deterministic_order():
|
| 74 |
+
attrs = dict(extras_require=dict(a=['foo'], b=['bar']))
|
| 75 |
+
dist = Distribution(attrs)
|
| 76 |
+
assert list(dist.metadata.provides_extras) == ['a', 'b']
|
| 77 |
+
attrs['extras_require'] = dict(reversed(attrs['extras_require'].items()))
|
| 78 |
+
dist = Distribution(attrs)
|
| 79 |
+
assert list(dist.metadata.provides_extras) == ['b', 'a']
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
CHECK_PACKAGE_DATA_TESTS = (
|
| 83 |
+
# Valid.
|
| 84 |
+
(
|
| 85 |
+
{
|
| 86 |
+
'': ['*.txt', '*.rst'],
|
| 87 |
+
'hello': ['*.msg'],
|
| 88 |
+
},
|
| 89 |
+
None,
|
| 90 |
+
),
|
| 91 |
+
# Not a dictionary.
|
| 92 |
+
(
|
| 93 |
+
(
|
| 94 |
+
('', ['*.txt', '*.rst']),
|
| 95 |
+
('hello', ['*.msg']),
|
| 96 |
+
),
|
| 97 |
+
(
|
| 98 |
+
"'package_data' must be a dictionary mapping package"
|
| 99 |
+
" names to lists of string wildcard patterns"
|
| 100 |
+
),
|
| 101 |
+
),
|
| 102 |
+
# Invalid key type.
|
| 103 |
+
(
|
| 104 |
+
{
|
| 105 |
+
400: ['*.txt', '*.rst'],
|
| 106 |
+
},
|
| 107 |
+
("keys of 'package_data' dict must be strings (got 400)"),
|
| 108 |
+
),
|
| 109 |
+
# Invalid value type.
|
| 110 |
+
(
|
| 111 |
+
{
|
| 112 |
+
'hello': '*.msg',
|
| 113 |
+
},
|
| 114 |
+
(
|
| 115 |
+
"\"values of 'package_data' dict\" must be of type <tuple[str, ...] | list[str]>"
|
| 116 |
+
" (got '*.msg')"
|
| 117 |
+
),
|
| 118 |
+
),
|
| 119 |
+
# Invalid value type (generators are single use)
|
| 120 |
+
(
|
| 121 |
+
{
|
| 122 |
+
'hello': (x for x in "generator"),
|
| 123 |
+
},
|
| 124 |
+
(
|
| 125 |
+
"\"values of 'package_data' dict\" must be of type <tuple[str, ...] | list[str]>"
|
| 126 |
+
" (got <generator object"
|
| 127 |
+
),
|
| 128 |
+
),
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
@pytest.mark.parametrize(('package_data', 'expected_message'), CHECK_PACKAGE_DATA_TESTS)
|
| 133 |
+
def test_check_package_data(package_data, expected_message):
|
| 134 |
+
if expected_message is None:
|
| 135 |
+
assert check_package_data(None, 'package_data', package_data) is None
|
| 136 |
+
else:
|
| 137 |
+
with pytest.raises(DistutilsSetupError, match=re.escape(expected_message)):
|
| 138 |
+
check_package_data(None, 'package_data', package_data)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def test_check_specifier():
|
| 142 |
+
# valid specifier value
|
| 143 |
+
attrs = {'name': 'foo', 'python_requires': '>=3.0, !=3.1'}
|
| 144 |
+
dist = Distribution(attrs)
|
| 145 |
+
check_specifier(dist, attrs, attrs['python_requires'])
|
| 146 |
+
|
| 147 |
+
attrs = {'name': 'foo', 'python_requires': ['>=3.0', '!=3.1']}
|
| 148 |
+
dist = Distribution(attrs)
|
| 149 |
+
check_specifier(dist, attrs, attrs['python_requires'])
|
| 150 |
+
|
| 151 |
+
# invalid specifier value
|
| 152 |
+
attrs = {'name': 'foo', 'python_requires': '>=invalid-version'}
|
| 153 |
+
with pytest.raises(DistutilsSetupError):
|
| 154 |
+
dist = Distribution(attrs)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def test_metadata_name():
|
| 158 |
+
with pytest.raises(DistutilsSetupError, match='missing.*name'):
|
| 159 |
+
Distribution()._validate_metadata()
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@pytest.mark.parametrize(
|
| 163 |
+
('dist_name', 'py_module'),
|
| 164 |
+
[
|
| 165 |
+
("my.pkg", "my_pkg"),
|
| 166 |
+
("my-pkg", "my_pkg"),
|
| 167 |
+
("my_pkg", "my_pkg"),
|
| 168 |
+
("pkg", "pkg"),
|
| 169 |
+
],
|
| 170 |
+
)
|
| 171 |
+
def test_dist_default_py_modules(tmp_path, dist_name, py_module):
|
| 172 |
+
(tmp_path / f"{py_module}.py").touch()
|
| 173 |
+
|
| 174 |
+
(tmp_path / "setup.py").touch()
|
| 175 |
+
(tmp_path / "noxfile.py").touch()
|
| 176 |
+
# ^-- make sure common tool files are ignored
|
| 177 |
+
|
| 178 |
+
attrs = {**EXAMPLE_BASE_INFO, "name": dist_name, "src_root": str(tmp_path)}
|
| 179 |
+
# Find `py_modules` corresponding to dist_name if not given
|
| 180 |
+
dist = Distribution(attrs)
|
| 181 |
+
dist.set_defaults()
|
| 182 |
+
assert dist.py_modules == [py_module]
|
| 183 |
+
# When `py_modules` is given, don't do anything
|
| 184 |
+
dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]})
|
| 185 |
+
dist.set_defaults()
|
| 186 |
+
assert dist.py_modules == ["explicity_py_module"]
|
| 187 |
+
# When `packages` is given, don't do anything
|
| 188 |
+
dist = Distribution({**attrs, "packages": ["explicity_package"]})
|
| 189 |
+
dist.set_defaults()
|
| 190 |
+
assert not dist.py_modules
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
@pytest.mark.parametrize(
|
| 194 |
+
('dist_name', 'package_dir', 'package_files', 'packages'),
|
| 195 |
+
[
|
| 196 |
+
("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
|
| 197 |
+
("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
|
| 198 |
+
("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
|
| 199 |
+
("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]),
|
| 200 |
+
(
|
| 201 |
+
"my_pkg",
|
| 202 |
+
None,
|
| 203 |
+
["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"],
|
| 204 |
+
["my_pkg", "my_pkg2"],
|
| 205 |
+
),
|
| 206 |
+
(
|
| 207 |
+
"my_pkg",
|
| 208 |
+
{"pkg": "lib", "pkg2": "lib2"},
|
| 209 |
+
["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
|
| 210 |
+
["pkg", "pkg.nested", "pkg2"],
|
| 211 |
+
),
|
| 212 |
+
],
|
| 213 |
+
)
|
| 214 |
+
def test_dist_default_packages(
|
| 215 |
+
tmp_path, dist_name, package_dir, package_files, packages
|
| 216 |
+
):
|
| 217 |
+
ensure_files(tmp_path, package_files)
|
| 218 |
+
|
| 219 |
+
(tmp_path / "setup.py").touch()
|
| 220 |
+
(tmp_path / "noxfile.py").touch()
|
| 221 |
+
# ^-- should not be included by default
|
| 222 |
+
|
| 223 |
+
attrs = {
|
| 224 |
+
**EXAMPLE_BASE_INFO,
|
| 225 |
+
"name": dist_name,
|
| 226 |
+
"src_root": str(tmp_path),
|
| 227 |
+
"package_dir": package_dir,
|
| 228 |
+
}
|
| 229 |
+
# Find `packages` either corresponding to dist_name or inside src
|
| 230 |
+
dist = Distribution(attrs)
|
| 231 |
+
dist.set_defaults()
|
| 232 |
+
assert not dist.py_modules
|
| 233 |
+
assert not dist.py_modules
|
| 234 |
+
assert set(dist.packages) == set(packages)
|
| 235 |
+
# When `py_modules` is given, don't do anything
|
| 236 |
+
dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]})
|
| 237 |
+
dist.set_defaults()
|
| 238 |
+
assert not dist.packages
|
| 239 |
+
assert set(dist.py_modules) == {"explicit_py_module"}
|
| 240 |
+
# When `packages` is given, don't do anything
|
| 241 |
+
dist = Distribution({**attrs, "packages": ["explicit_package"]})
|
| 242 |
+
dist.set_defaults()
|
| 243 |
+
assert not dist.py_modules
|
| 244 |
+
assert set(dist.packages) == {"explicit_package"}
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
@pytest.mark.parametrize(
|
| 248 |
+
('dist_name', 'package_dir', 'package_files'),
|
| 249 |
+
[
|
| 250 |
+
("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]),
|
| 251 |
+
("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]),
|
| 252 |
+
("my_pkg", None, ["my_pkg.py"]),
|
| 253 |
+
("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]),
|
| 254 |
+
("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]),
|
| 255 |
+
(
|
| 256 |
+
"my_pkg",
|
| 257 |
+
{"my_pkg": "lib", "my_pkg.lib2": "lib2"},
|
| 258 |
+
["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
|
| 259 |
+
),
|
| 260 |
+
# Should not try to guess a name from multiple py_modules/packages
|
| 261 |
+
("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]),
|
| 262 |
+
("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]),
|
| 263 |
+
],
|
| 264 |
+
)
|
| 265 |
+
def test_dist_default_name(tmp_path, dist_name, package_dir, package_files):
|
| 266 |
+
"""Make sure dist.name is discovered from packages/py_modules"""
|
| 267 |
+
ensure_files(tmp_path, package_files)
|
| 268 |
+
attrs = {
|
| 269 |
+
**EXAMPLE_BASE_INFO,
|
| 270 |
+
"src_root": "/".join(os.path.split(tmp_path)), # POSIX-style
|
| 271 |
+
"package_dir": package_dir,
|
| 272 |
+
}
|
| 273 |
+
del attrs["name"]
|
| 274 |
+
|
| 275 |
+
dist = Distribution(attrs)
|
| 276 |
+
dist.set_defaults()
|
| 277 |
+
assert dist.py_modules or dist.packages
|
| 278 |
+
assert dist.get_name() == dist_name
|
llava/lib/python3.10/site-packages/setuptools/tests/test_dist_info.py
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test .dist-info style distributions."""
|
| 2 |
+
|
| 3 |
+
import pathlib
|
| 4 |
+
import re
|
| 5 |
+
import shutil
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
from functools import partial
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
import pkg_resources
|
| 13 |
+
from setuptools.archive_util import unpack_archive
|
| 14 |
+
|
| 15 |
+
from .textwrap import DALS
|
| 16 |
+
|
| 17 |
+
read = partial(pathlib.Path.read_text, encoding="utf-8")
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class TestDistInfo:
|
| 21 |
+
metadata_base = DALS(
|
| 22 |
+
"""
|
| 23 |
+
Metadata-Version: 1.2
|
| 24 |
+
Requires-Dist: splort (==4)
|
| 25 |
+
Provides-Extra: baz
|
| 26 |
+
Requires-Dist: quux (>=1.1); extra == 'baz'
|
| 27 |
+
"""
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
@classmethod
|
| 31 |
+
def build_metadata(cls, **kwargs):
|
| 32 |
+
lines = ('{key}: {value}\n'.format(**locals()) for key, value in kwargs.items())
|
| 33 |
+
return cls.metadata_base + ''.join(lines)
|
| 34 |
+
|
| 35 |
+
@pytest.fixture
|
| 36 |
+
def metadata(self, tmpdir):
|
| 37 |
+
dist_info_name = 'VersionedDistribution-2.718.dist-info'
|
| 38 |
+
versioned = tmpdir / dist_info_name
|
| 39 |
+
versioned.mkdir()
|
| 40 |
+
filename = versioned / 'METADATA'
|
| 41 |
+
content = self.build_metadata(
|
| 42 |
+
Name='VersionedDistribution',
|
| 43 |
+
)
|
| 44 |
+
filename.write_text(content, encoding='utf-8')
|
| 45 |
+
|
| 46 |
+
dist_info_name = 'UnversionedDistribution.dist-info'
|
| 47 |
+
unversioned = tmpdir / dist_info_name
|
| 48 |
+
unversioned.mkdir()
|
| 49 |
+
filename = unversioned / 'METADATA'
|
| 50 |
+
content = self.build_metadata(
|
| 51 |
+
Name='UnversionedDistribution',
|
| 52 |
+
Version='0.3',
|
| 53 |
+
)
|
| 54 |
+
filename.write_text(content, encoding='utf-8')
|
| 55 |
+
|
| 56 |
+
return str(tmpdir)
|
| 57 |
+
|
| 58 |
+
def test_distinfo(self, metadata):
|
| 59 |
+
dists = dict(
|
| 60 |
+
(d.project_name, d) for d in pkg_resources.find_distributions(metadata)
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
assert len(dists) == 2, dists
|
| 64 |
+
|
| 65 |
+
unversioned = dists['UnversionedDistribution']
|
| 66 |
+
versioned = dists['VersionedDistribution']
|
| 67 |
+
|
| 68 |
+
assert versioned.version == '2.718' # from filename
|
| 69 |
+
assert unversioned.version == '0.3' # from METADATA
|
| 70 |
+
|
| 71 |
+
def test_conditional_dependencies(self, metadata):
|
| 72 |
+
specs = 'splort==4', 'quux>=1.1'
|
| 73 |
+
requires = list(map(pkg_resources.Requirement.parse, specs))
|
| 74 |
+
|
| 75 |
+
for d in pkg_resources.find_distributions(metadata):
|
| 76 |
+
assert d.requires() == requires[:1]
|
| 77 |
+
assert d.requires(extras=('baz',)) == [
|
| 78 |
+
requires[0],
|
| 79 |
+
pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'),
|
| 80 |
+
]
|
| 81 |
+
assert d.extras == ['baz']
|
| 82 |
+
|
| 83 |
+
def test_invalid_version(self, tmp_path):
|
| 84 |
+
"""
|
| 85 |
+
Supplying an invalid version crashes dist_info.
|
| 86 |
+
"""
|
| 87 |
+
config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n"
|
| 88 |
+
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
|
| 89 |
+
msg = re.compile("invalid version", re.M | re.I)
|
| 90 |
+
proc = run_command_inner("dist_info", cwd=tmp_path, check=False)
|
| 91 |
+
assert proc.returncode
|
| 92 |
+
assert msg.search(proc.stdout)
|
| 93 |
+
assert not list(tmp_path.glob("*.dist-info"))
|
| 94 |
+
|
| 95 |
+
def test_tag_arguments(self, tmp_path):
|
| 96 |
+
config = """
|
| 97 |
+
[metadata]
|
| 98 |
+
name=proj
|
| 99 |
+
version=42
|
| 100 |
+
[egg_info]
|
| 101 |
+
tag_date=1
|
| 102 |
+
tag_build=.post
|
| 103 |
+
"""
|
| 104 |
+
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
|
| 105 |
+
|
| 106 |
+
print(run_command("dist_info", "--no-date", cwd=tmp_path))
|
| 107 |
+
dist_info = next(tmp_path.glob("*.dist-info"))
|
| 108 |
+
assert dist_info.name.startswith("proj-42")
|
| 109 |
+
shutil.rmtree(dist_info)
|
| 110 |
+
|
| 111 |
+
print(run_command("dist_info", "--tag-build", ".a", cwd=tmp_path))
|
| 112 |
+
dist_info = next(tmp_path.glob("*.dist-info"))
|
| 113 |
+
assert dist_info.name.startswith("proj-42a")
|
| 114 |
+
|
| 115 |
+
@pytest.mark.parametrize("keep_egg_info", (False, True))
|
| 116 |
+
def test_output_dir(self, tmp_path, keep_egg_info):
|
| 117 |
+
config = "[metadata]\nname=proj\nversion=42\n"
|
| 118 |
+
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
|
| 119 |
+
out = tmp_path / "__out"
|
| 120 |
+
out.mkdir()
|
| 121 |
+
opts = ["--keep-egg-info"] if keep_egg_info else []
|
| 122 |
+
run_command("dist_info", "--output-dir", out, *opts, cwd=tmp_path)
|
| 123 |
+
assert len(list(out.glob("*.dist-info"))) == 1
|
| 124 |
+
assert len(list(tmp_path.glob("*.dist-info"))) == 0
|
| 125 |
+
expected_egg_info = int(keep_egg_info)
|
| 126 |
+
assert len(list(out.glob("*.egg-info"))) == expected_egg_info
|
| 127 |
+
assert len(list(tmp_path.glob("*.egg-info"))) == 0
|
| 128 |
+
assert len(list(out.glob("*.__bkp__"))) == 0
|
| 129 |
+
assert len(list(tmp_path.glob("*.__bkp__"))) == 0
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class TestWheelCompatibility:
|
| 133 |
+
"""Make sure the .dist-info directory produced with the ``dist_info`` command
|
| 134 |
+
is the same as the one produced by ``bdist_wheel``.
|
| 135 |
+
"""
|
| 136 |
+
|
| 137 |
+
SETUPCFG = DALS(
|
| 138 |
+
"""
|
| 139 |
+
[metadata]
|
| 140 |
+
name = {name}
|
| 141 |
+
version = {version}
|
| 142 |
+
|
| 143 |
+
[options]
|
| 144 |
+
install_requires =
|
| 145 |
+
foo>=12; sys_platform != "linux"
|
| 146 |
+
|
| 147 |
+
[options.extras_require]
|
| 148 |
+
test = pytest
|
| 149 |
+
|
| 150 |
+
[options.entry_points]
|
| 151 |
+
console_scripts =
|
| 152 |
+
executable-name = my_package.module:function
|
| 153 |
+
discover =
|
| 154 |
+
myproj = my_package.other_module:function
|
| 155 |
+
"""
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
EGG_INFO_OPTS = [
|
| 159 |
+
# Related: #3088 #2872
|
| 160 |
+
("", ""),
|
| 161 |
+
(".post", "[egg_info]\ntag_build = post\n"),
|
| 162 |
+
(".post", "[egg_info]\ntag_build = .post\n"),
|
| 163 |
+
(".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"),
|
| 164 |
+
(".dev", "[egg_info]\ntag_build = .dev\n"),
|
| 165 |
+
(".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"),
|
| 166 |
+
("a1", "[egg_info]\ntag_build = .a1\n"),
|
| 167 |
+
("+local", "[egg_info]\ntag_build = +local\n"),
|
| 168 |
+
]
|
| 169 |
+
|
| 170 |
+
@pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split())
|
| 171 |
+
@pytest.mark.parametrize("version", ["0.42.13"])
|
| 172 |
+
@pytest.mark.parametrize(("suffix", "cfg"), EGG_INFO_OPTS)
|
| 173 |
+
def test_dist_info_is_the_same_as_in_wheel(
|
| 174 |
+
self, name, version, tmp_path, suffix, cfg
|
| 175 |
+
):
|
| 176 |
+
config = self.SETUPCFG.format(name=name, version=version) + cfg
|
| 177 |
+
|
| 178 |
+
for i in "dir_wheel", "dir_dist":
|
| 179 |
+
(tmp_path / i).mkdir()
|
| 180 |
+
(tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8")
|
| 181 |
+
|
| 182 |
+
run_command("bdist_wheel", cwd=tmp_path / "dir_wheel")
|
| 183 |
+
wheel = next(tmp_path.glob("dir_wheel/dist/*.whl"))
|
| 184 |
+
unpack_archive(wheel, tmp_path / "unpack")
|
| 185 |
+
wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info"))
|
| 186 |
+
|
| 187 |
+
run_command("dist_info", cwd=tmp_path / "dir_dist")
|
| 188 |
+
dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
|
| 189 |
+
|
| 190 |
+
assert dist_info.name == wheel_dist_info.name
|
| 191 |
+
assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}")
|
| 192 |
+
for file in "METADATA", "entry_points.txt":
|
| 193 |
+
assert read(dist_info / file) == read(wheel_dist_info / file)
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def run_command_inner(*cmd, **kwargs):
|
| 197 |
+
opts = {
|
| 198 |
+
"stderr": subprocess.STDOUT,
|
| 199 |
+
"stdout": subprocess.PIPE,
|
| 200 |
+
"text": True,
|
| 201 |
+
"encoding": "utf-8",
|
| 202 |
+
"check": True,
|
| 203 |
+
**kwargs,
|
| 204 |
+
}
|
| 205 |
+
cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *map(str, cmd)]
|
| 206 |
+
return subprocess.run(cmd, **opts)
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def run_command(*args, **kwargs):
|
| 210 |
+
return run_command_inner(*args, **kwargs).stdout
|
llava/lib/python3.10/site-packages/setuptools/tests/test_easy_install.py
ADDED
|
@@ -0,0 +1,1472 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Easy install Tests"""
|
| 2 |
+
|
| 3 |
+
import contextlib
|
| 4 |
+
import io
|
| 5 |
+
import itertools
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
import pathlib
|
| 9 |
+
import re
|
| 10 |
+
import site
|
| 11 |
+
import subprocess
|
| 12 |
+
import sys
|
| 13 |
+
import tarfile
|
| 14 |
+
import tempfile
|
| 15 |
+
import time
|
| 16 |
+
import warnings
|
| 17 |
+
import zipfile
|
| 18 |
+
from pathlib import Path
|
| 19 |
+
from typing import NamedTuple
|
| 20 |
+
from unittest import mock
|
| 21 |
+
|
| 22 |
+
import pytest
|
| 23 |
+
from jaraco import path
|
| 24 |
+
|
| 25 |
+
import pkg_resources
|
| 26 |
+
import setuptools.command.easy_install as ei
|
| 27 |
+
from pkg_resources import Distribution as PRDistribution, normalize_path, working_set
|
| 28 |
+
from setuptools import sandbox
|
| 29 |
+
from setuptools.command.easy_install import PthDistributions
|
| 30 |
+
from setuptools.dist import Distribution
|
| 31 |
+
from setuptools.sandbox import run_setup
|
| 32 |
+
from setuptools.tests import fail_on_ascii
|
| 33 |
+
from setuptools.tests.server import MockServer, path_to_url
|
| 34 |
+
|
| 35 |
+
from . import contexts
|
| 36 |
+
from .textwrap import DALS
|
| 37 |
+
|
| 38 |
+
import distutils.errors
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@pytest.fixture(autouse=True)
|
| 42 |
+
def pip_disable_index(monkeypatch):
|
| 43 |
+
"""
|
| 44 |
+
Important: Disable the default index for pip to avoid
|
| 45 |
+
querying packages in the index and potentially resolving
|
| 46 |
+
and installing packages there.
|
| 47 |
+
"""
|
| 48 |
+
monkeypatch.setenv('PIP_NO_INDEX', 'true')
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class FakeDist:
|
| 52 |
+
def get_entry_map(self, group):
|
| 53 |
+
if group != 'console_scripts':
|
| 54 |
+
return {}
|
| 55 |
+
return {'name': 'ep'}
|
| 56 |
+
|
| 57 |
+
def as_requirement(self):
|
| 58 |
+
return 'spec'
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
SETUP_PY = DALS(
|
| 62 |
+
"""
|
| 63 |
+
from setuptools import setup
|
| 64 |
+
|
| 65 |
+
setup()
|
| 66 |
+
"""
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class TestEasyInstallTest:
|
| 71 |
+
def test_get_script_args(self):
|
| 72 |
+
header = ei.CommandSpec.best().from_environment().as_header()
|
| 73 |
+
dist = FakeDist()
|
| 74 |
+
args = next(ei.ScriptWriter.get_args(dist))
|
| 75 |
+
_name, script = itertools.islice(args, 2)
|
| 76 |
+
assert script.startswith(header)
|
| 77 |
+
assert "'spec'" in script
|
| 78 |
+
assert "'console_scripts'" in script
|
| 79 |
+
assert "'name'" in script
|
| 80 |
+
assert re.search('^# EASY-INSTALL-ENTRY-SCRIPT', script, flags=re.MULTILINE)
|
| 81 |
+
|
| 82 |
+
def test_no_find_links(self):
|
| 83 |
+
# new option '--no-find-links', that blocks find-links added at
|
| 84 |
+
# the project level
|
| 85 |
+
dist = Distribution()
|
| 86 |
+
cmd = ei.easy_install(dist)
|
| 87 |
+
cmd.check_pth_processing = lambda: True
|
| 88 |
+
cmd.no_find_links = True
|
| 89 |
+
cmd.find_links = ['link1', 'link2']
|
| 90 |
+
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
|
| 91 |
+
cmd.args = ['ok']
|
| 92 |
+
cmd.ensure_finalized()
|
| 93 |
+
assert cmd.package_index.scanned_urls == {}
|
| 94 |
+
|
| 95 |
+
# let's try without it (default behavior)
|
| 96 |
+
cmd = ei.easy_install(dist)
|
| 97 |
+
cmd.check_pth_processing = lambda: True
|
| 98 |
+
cmd.find_links = ['link1', 'link2']
|
| 99 |
+
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
|
| 100 |
+
cmd.args = ['ok']
|
| 101 |
+
cmd.ensure_finalized()
|
| 102 |
+
keys = sorted(cmd.package_index.scanned_urls.keys())
|
| 103 |
+
assert keys == ['link1', 'link2']
|
| 104 |
+
|
| 105 |
+
def test_write_exception(self):
|
| 106 |
+
"""
|
| 107 |
+
Test that `cant_write_to_target` is rendered as a DistutilsError.
|
| 108 |
+
"""
|
| 109 |
+
dist = Distribution()
|
| 110 |
+
cmd = ei.easy_install(dist)
|
| 111 |
+
cmd.install_dir = os.getcwd()
|
| 112 |
+
with pytest.raises(distutils.errors.DistutilsError):
|
| 113 |
+
cmd.cant_write_to_target()
|
| 114 |
+
|
| 115 |
+
def test_all_site_dirs(self, monkeypatch):
|
| 116 |
+
"""
|
| 117 |
+
get_site_dirs should always return site dirs reported by
|
| 118 |
+
site.getsitepackages.
|
| 119 |
+
"""
|
| 120 |
+
path = normalize_path('/setuptools/test/site-packages')
|
| 121 |
+
|
| 122 |
+
def mock_gsp():
|
| 123 |
+
return [path]
|
| 124 |
+
|
| 125 |
+
monkeypatch.setattr(site, 'getsitepackages', mock_gsp, raising=False)
|
| 126 |
+
assert path in ei.get_site_dirs()
|
| 127 |
+
|
| 128 |
+
def test_all_site_dirs_works_without_getsitepackages(self, monkeypatch):
|
| 129 |
+
monkeypatch.delattr(site, 'getsitepackages', raising=False)
|
| 130 |
+
assert ei.get_site_dirs()
|
| 131 |
+
|
| 132 |
+
@pytest.fixture
|
| 133 |
+
def sdist_unicode(self, tmpdir):
|
| 134 |
+
files = [
|
| 135 |
+
(
|
| 136 |
+
'setup.py',
|
| 137 |
+
DALS(
|
| 138 |
+
"""
|
| 139 |
+
import setuptools
|
| 140 |
+
setuptools.setup(
|
| 141 |
+
name="setuptools-test-unicode",
|
| 142 |
+
version="1.0",
|
| 143 |
+
packages=["mypkg"],
|
| 144 |
+
include_package_data=True,
|
| 145 |
+
)
|
| 146 |
+
"""
|
| 147 |
+
),
|
| 148 |
+
),
|
| 149 |
+
(
|
| 150 |
+
'mypkg/__init__.py',
|
| 151 |
+
"",
|
| 152 |
+
),
|
| 153 |
+
(
|
| 154 |
+
'mypkg/☃.txt',
|
| 155 |
+
"",
|
| 156 |
+
),
|
| 157 |
+
]
|
| 158 |
+
sdist_name = 'setuptools-test-unicode-1.0.zip'
|
| 159 |
+
sdist = tmpdir / sdist_name
|
| 160 |
+
# can't use make_sdist, because the issue only occurs
|
| 161 |
+
# with zip sdists.
|
| 162 |
+
sdist_zip = zipfile.ZipFile(str(sdist), 'w')
|
| 163 |
+
for filename, content in files:
|
| 164 |
+
sdist_zip.writestr(filename, content)
|
| 165 |
+
sdist_zip.close()
|
| 166 |
+
return str(sdist)
|
| 167 |
+
|
| 168 |
+
@fail_on_ascii
|
| 169 |
+
def test_unicode_filename_in_sdist(self, sdist_unicode, tmpdir, monkeypatch):
|
| 170 |
+
"""
|
| 171 |
+
The install command should execute correctly even if
|
| 172 |
+
the package has unicode filenames.
|
| 173 |
+
"""
|
| 174 |
+
dist = Distribution({'script_args': ['easy_install']})
|
| 175 |
+
target = (tmpdir / 'target').ensure_dir()
|
| 176 |
+
cmd = ei.easy_install(
|
| 177 |
+
dist,
|
| 178 |
+
install_dir=str(target),
|
| 179 |
+
args=['x'],
|
| 180 |
+
)
|
| 181 |
+
monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target))
|
| 182 |
+
cmd.ensure_finalized()
|
| 183 |
+
cmd.easy_install(sdist_unicode)
|
| 184 |
+
|
| 185 |
+
@pytest.fixture
|
| 186 |
+
def sdist_unicode_in_script(self, tmpdir):
|
| 187 |
+
files = [
|
| 188 |
+
(
|
| 189 |
+
"setup.py",
|
| 190 |
+
DALS(
|
| 191 |
+
"""
|
| 192 |
+
import setuptools
|
| 193 |
+
setuptools.setup(
|
| 194 |
+
name="setuptools-test-unicode",
|
| 195 |
+
version="1.0",
|
| 196 |
+
packages=["mypkg"],
|
| 197 |
+
include_package_data=True,
|
| 198 |
+
scripts=['mypkg/unicode_in_script'],
|
| 199 |
+
)
|
| 200 |
+
"""
|
| 201 |
+
),
|
| 202 |
+
),
|
| 203 |
+
("mypkg/__init__.py", ""),
|
| 204 |
+
(
|
| 205 |
+
"mypkg/unicode_in_script",
|
| 206 |
+
DALS(
|
| 207 |
+
"""
|
| 208 |
+
#!/bin/sh
|
| 209 |
+
# á
|
| 210 |
+
|
| 211 |
+
non_python_fn() {
|
| 212 |
+
}
|
| 213 |
+
"""
|
| 214 |
+
),
|
| 215 |
+
),
|
| 216 |
+
]
|
| 217 |
+
sdist_name = "setuptools-test-unicode-script-1.0.zip"
|
| 218 |
+
sdist = tmpdir / sdist_name
|
| 219 |
+
# can't use make_sdist, because the issue only occurs
|
| 220 |
+
# with zip sdists.
|
| 221 |
+
sdist_zip = zipfile.ZipFile(str(sdist), "w")
|
| 222 |
+
for filename, content in files:
|
| 223 |
+
sdist_zip.writestr(filename, content.encode('utf-8'))
|
| 224 |
+
sdist_zip.close()
|
| 225 |
+
return str(sdist)
|
| 226 |
+
|
| 227 |
+
@fail_on_ascii
|
| 228 |
+
def test_unicode_content_in_sdist(
|
| 229 |
+
self, sdist_unicode_in_script, tmpdir, monkeypatch
|
| 230 |
+
):
|
| 231 |
+
"""
|
| 232 |
+
The install command should execute correctly even if
|
| 233 |
+
the package has unicode in scripts.
|
| 234 |
+
"""
|
| 235 |
+
dist = Distribution({"script_args": ["easy_install"]})
|
| 236 |
+
target = (tmpdir / "target").ensure_dir()
|
| 237 |
+
cmd = ei.easy_install(dist, install_dir=str(target), args=["x"])
|
| 238 |
+
monkeypatch.setitem(os.environ, "PYTHONPATH", str(target))
|
| 239 |
+
cmd.ensure_finalized()
|
| 240 |
+
cmd.easy_install(sdist_unicode_in_script)
|
| 241 |
+
|
| 242 |
+
@pytest.fixture
|
| 243 |
+
def sdist_script(self, tmpdir):
|
| 244 |
+
files = [
|
| 245 |
+
(
|
| 246 |
+
'setup.py',
|
| 247 |
+
DALS(
|
| 248 |
+
"""
|
| 249 |
+
import setuptools
|
| 250 |
+
setuptools.setup(
|
| 251 |
+
name="setuptools-test-script",
|
| 252 |
+
version="1.0",
|
| 253 |
+
scripts=["mypkg_script"],
|
| 254 |
+
)
|
| 255 |
+
"""
|
| 256 |
+
),
|
| 257 |
+
),
|
| 258 |
+
(
|
| 259 |
+
'mypkg_script',
|
| 260 |
+
DALS(
|
| 261 |
+
"""
|
| 262 |
+
#/usr/bin/python
|
| 263 |
+
print('mypkg_script')
|
| 264 |
+
"""
|
| 265 |
+
),
|
| 266 |
+
),
|
| 267 |
+
]
|
| 268 |
+
sdist_name = 'setuptools-test-script-1.0.zip'
|
| 269 |
+
sdist = str(tmpdir / sdist_name)
|
| 270 |
+
make_sdist(sdist, files)
|
| 271 |
+
return sdist
|
| 272 |
+
|
| 273 |
+
@pytest.mark.skipif(
|
| 274 |
+
not sys.platform.startswith('linux'), reason="Test can only be run on Linux"
|
| 275 |
+
)
|
| 276 |
+
def test_script_install(self, sdist_script, tmpdir, monkeypatch):
|
| 277 |
+
"""
|
| 278 |
+
Check scripts are installed.
|
| 279 |
+
"""
|
| 280 |
+
dist = Distribution({'script_args': ['easy_install']})
|
| 281 |
+
target = (tmpdir / 'target').ensure_dir()
|
| 282 |
+
cmd = ei.easy_install(
|
| 283 |
+
dist,
|
| 284 |
+
install_dir=str(target),
|
| 285 |
+
args=['x'],
|
| 286 |
+
)
|
| 287 |
+
monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target))
|
| 288 |
+
cmd.ensure_finalized()
|
| 289 |
+
cmd.easy_install(sdist_script)
|
| 290 |
+
assert (target / 'mypkg_script').exists()
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
@pytest.mark.filterwarnings('ignore:Unbuilt egg')
|
| 294 |
+
class TestPTHFileWriter:
|
| 295 |
+
def test_add_from_cwd_site_sets_dirty(self):
|
| 296 |
+
"""a pth file manager should set dirty
|
| 297 |
+
if a distribution is in site but also the cwd
|
| 298 |
+
"""
|
| 299 |
+
pth = PthDistributions('does-not_exist', [os.getcwd()])
|
| 300 |
+
assert not pth.dirty
|
| 301 |
+
pth.add(PRDistribution(os.getcwd()))
|
| 302 |
+
assert pth.dirty
|
| 303 |
+
|
| 304 |
+
def test_add_from_site_is_ignored(self):
|
| 305 |
+
location = '/test/location/does-not-have-to-exist'
|
| 306 |
+
# PthDistributions expects all locations to be normalized
|
| 307 |
+
location = pkg_resources.normalize_path(location)
|
| 308 |
+
pth = PthDistributions(
|
| 309 |
+
'does-not_exist',
|
| 310 |
+
[
|
| 311 |
+
location,
|
| 312 |
+
],
|
| 313 |
+
)
|
| 314 |
+
assert not pth.dirty
|
| 315 |
+
pth.add(PRDistribution(location))
|
| 316 |
+
assert not pth.dirty
|
| 317 |
+
|
| 318 |
+
def test_many_pth_distributions_merge_together(self, tmpdir):
|
| 319 |
+
"""
|
| 320 |
+
If the pth file is modified under the hood, then PthDistribution
|
| 321 |
+
will refresh its content before saving, merging contents when
|
| 322 |
+
necessary.
|
| 323 |
+
"""
|
| 324 |
+
# putting the pth file in a dedicated sub-folder,
|
| 325 |
+
pth_subdir = tmpdir.join("pth_subdir")
|
| 326 |
+
pth_subdir.mkdir()
|
| 327 |
+
pth_path = str(pth_subdir.join("file1.pth"))
|
| 328 |
+
pth1 = PthDistributions(pth_path)
|
| 329 |
+
pth2 = PthDistributions(pth_path)
|
| 330 |
+
assert pth1.paths == pth2.paths == [], (
|
| 331 |
+
"unless there would be some default added at some point"
|
| 332 |
+
)
|
| 333 |
+
# and so putting the src_subdir in folder distinct than the pth one,
|
| 334 |
+
# so to keep it absolute by PthDistributions
|
| 335 |
+
new_src_path = tmpdir.join("src_subdir")
|
| 336 |
+
new_src_path.mkdir() # must exist to be accounted
|
| 337 |
+
new_src_path_str = str(new_src_path)
|
| 338 |
+
pth1.paths.append(new_src_path_str)
|
| 339 |
+
pth1.save()
|
| 340 |
+
assert pth1.paths, (
|
| 341 |
+
"the new_src_path added must still be present/valid in pth1 after save"
|
| 342 |
+
)
|
| 343 |
+
# now,
|
| 344 |
+
assert new_src_path_str not in pth2.paths, (
|
| 345 |
+
"right before we save the entry should still not be present"
|
| 346 |
+
)
|
| 347 |
+
pth2.save()
|
| 348 |
+
assert new_src_path_str in pth2.paths, (
|
| 349 |
+
"the new_src_path entry should have been added by pth2 with its save() call"
|
| 350 |
+
)
|
| 351 |
+
assert pth2.paths[-1] == new_src_path, (
|
| 352 |
+
"and it should match exactly on the last entry actually "
|
| 353 |
+
"given we append to it in save()"
|
| 354 |
+
)
|
| 355 |
+
# finally,
|
| 356 |
+
assert PthDistributions(pth_path).paths == pth2.paths, (
|
| 357 |
+
"and we should have the exact same list at the end "
|
| 358 |
+
"with a fresh PthDistributions instance"
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
@pytest.fixture
|
| 363 |
+
def setup_context(tmpdir):
|
| 364 |
+
with (tmpdir / 'setup.py').open('w', encoding="utf-8") as f:
|
| 365 |
+
f.write(SETUP_PY)
|
| 366 |
+
with tmpdir.as_cwd():
|
| 367 |
+
yield tmpdir
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
@pytest.mark.usefixtures("user_override")
|
| 371 |
+
@pytest.mark.usefixtures("setup_context")
|
| 372 |
+
class TestUserInstallTest:
|
| 373 |
+
# prevent check that site-packages is writable. easy_install
|
| 374 |
+
# shouldn't be writing to system site-packages during finalize
|
| 375 |
+
# options, but while it does, bypass the behavior.
|
| 376 |
+
prev_sp_write = mock.patch(
|
| 377 |
+
'setuptools.command.easy_install.easy_install.check_site_dir',
|
| 378 |
+
mock.Mock(),
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
# simulate setuptools installed in user site packages
|
| 382 |
+
@mock.patch('setuptools.command.easy_install.__file__', site.USER_SITE)
|
| 383 |
+
@mock.patch('site.ENABLE_USER_SITE', True)
|
| 384 |
+
@prev_sp_write
|
| 385 |
+
def test_user_install_not_implied_user_site_enabled(self):
|
| 386 |
+
self.assert_not_user_site()
|
| 387 |
+
|
| 388 |
+
@mock.patch('site.ENABLE_USER_SITE', False)
|
| 389 |
+
@prev_sp_write
|
| 390 |
+
def test_user_install_not_implied_user_site_disabled(self):
|
| 391 |
+
self.assert_not_user_site()
|
| 392 |
+
|
| 393 |
+
@staticmethod
|
| 394 |
+
def assert_not_user_site():
|
| 395 |
+
# create a finalized easy_install command
|
| 396 |
+
dist = Distribution()
|
| 397 |
+
dist.script_name = 'setup.py'
|
| 398 |
+
cmd = ei.easy_install(dist)
|
| 399 |
+
cmd.args = ['py']
|
| 400 |
+
cmd.ensure_finalized()
|
| 401 |
+
assert not cmd.user, 'user should not be implied'
|
| 402 |
+
|
| 403 |
+
def test_multiproc_atexit(self):
|
| 404 |
+
pytest.importorskip('multiprocessing')
|
| 405 |
+
|
| 406 |
+
log = logging.getLogger('test_easy_install')
|
| 407 |
+
logging.basicConfig(level=logging.INFO, stream=sys.stderr)
|
| 408 |
+
log.info('this should not break')
|
| 409 |
+
|
| 410 |
+
@pytest.fixture
|
| 411 |
+
def foo_package(self, tmpdir):
|
| 412 |
+
egg_file = tmpdir / 'foo-1.0.egg-info'
|
| 413 |
+
with egg_file.open('w') as f:
|
| 414 |
+
f.write('Name: foo\n')
|
| 415 |
+
return str(tmpdir)
|
| 416 |
+
|
| 417 |
+
@pytest.fixture
|
| 418 |
+
def install_target(self, tmpdir):
|
| 419 |
+
target = str(tmpdir)
|
| 420 |
+
with mock.patch('sys.path', sys.path + [target]):
|
| 421 |
+
python_path = os.path.pathsep.join(sys.path)
|
| 422 |
+
with mock.patch.dict(os.environ, PYTHONPATH=python_path):
|
| 423 |
+
yield target
|
| 424 |
+
|
| 425 |
+
def test_local_index(self, foo_package, install_target):
|
| 426 |
+
"""
|
| 427 |
+
The local index must be used when easy_install locates installed
|
| 428 |
+
packages.
|
| 429 |
+
"""
|
| 430 |
+
dist = Distribution()
|
| 431 |
+
dist.script_name = 'setup.py'
|
| 432 |
+
cmd = ei.easy_install(dist)
|
| 433 |
+
cmd.install_dir = install_target
|
| 434 |
+
cmd.args = ['foo']
|
| 435 |
+
cmd.ensure_finalized()
|
| 436 |
+
cmd.local_index.scan([foo_package])
|
| 437 |
+
res = cmd.easy_install('foo')
|
| 438 |
+
actual = os.path.normcase(os.path.realpath(res.location))
|
| 439 |
+
expected = os.path.normcase(os.path.realpath(foo_package))
|
| 440 |
+
assert actual == expected
|
| 441 |
+
|
| 442 |
+
@contextlib.contextmanager
|
| 443 |
+
def user_install_setup_context(self, *args, **kwargs):
|
| 444 |
+
"""
|
| 445 |
+
Wrap sandbox.setup_context to patch easy_install in that context to
|
| 446 |
+
appear as user-installed.
|
| 447 |
+
"""
|
| 448 |
+
with self.orig_context(*args, **kwargs):
|
| 449 |
+
import setuptools.command.easy_install as ei
|
| 450 |
+
|
| 451 |
+
ei.__file__ = site.USER_SITE
|
| 452 |
+
yield
|
| 453 |
+
|
| 454 |
+
def patched_setup_context(self):
|
| 455 |
+
self.orig_context = sandbox.setup_context
|
| 456 |
+
|
| 457 |
+
return mock.patch(
|
| 458 |
+
'setuptools.sandbox.setup_context',
|
| 459 |
+
self.user_install_setup_context,
|
| 460 |
+
)
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
@pytest.fixture
|
| 464 |
+
def distutils_package():
|
| 465 |
+
distutils_setup_py = SETUP_PY.replace(
|
| 466 |
+
'from setuptools import setup',
|
| 467 |
+
'from distutils.core import setup',
|
| 468 |
+
)
|
| 469 |
+
with contexts.tempdir(cd=os.chdir):
|
| 470 |
+
with open('setup.py', 'w', encoding="utf-8") as f:
|
| 471 |
+
f.write(distutils_setup_py)
|
| 472 |
+
yield
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
@pytest.mark.usefixtures("distutils_package")
|
| 476 |
+
class TestDistutilsPackage:
|
| 477 |
+
def test_bdist_egg_available_on_distutils_pkg(self):
|
| 478 |
+
run_setup('setup.py', ['bdist_egg'])
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
@pytest.fixture
|
| 482 |
+
def mock_index():
|
| 483 |
+
# set up a server which will simulate an alternate package index.
|
| 484 |
+
p_index = MockServer()
|
| 485 |
+
if p_index.server_port == 0:
|
| 486 |
+
# Some platforms (Jython) don't find a port to which to bind,
|
| 487 |
+
# so skip test for them.
|
| 488 |
+
pytest.skip("could not find a valid port")
|
| 489 |
+
p_index.start()
|
| 490 |
+
return p_index
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
class TestInstallRequires:
|
| 494 |
+
def test_setup_install_includes_dependencies(self, tmp_path, mock_index):
|
| 495 |
+
"""
|
| 496 |
+
When ``python setup.py install`` is called directly, it will use easy_install
|
| 497 |
+
to fetch dependencies.
|
| 498 |
+
"""
|
| 499 |
+
# TODO: Remove these tests once `setup.py install` is completely removed
|
| 500 |
+
project_root = tmp_path / "project"
|
| 501 |
+
project_root.mkdir(exist_ok=True)
|
| 502 |
+
install_root = tmp_path / "install"
|
| 503 |
+
install_root.mkdir(exist_ok=True)
|
| 504 |
+
|
| 505 |
+
self.create_project(project_root)
|
| 506 |
+
cmd = [
|
| 507 |
+
sys.executable,
|
| 508 |
+
'-c',
|
| 509 |
+
'__import__("setuptools").setup()',
|
| 510 |
+
'install',
|
| 511 |
+
'--install-base',
|
| 512 |
+
str(install_root),
|
| 513 |
+
'--install-lib',
|
| 514 |
+
str(install_root),
|
| 515 |
+
'--install-headers',
|
| 516 |
+
str(install_root),
|
| 517 |
+
'--install-scripts',
|
| 518 |
+
str(install_root),
|
| 519 |
+
'--install-data',
|
| 520 |
+
str(install_root),
|
| 521 |
+
'--install-purelib',
|
| 522 |
+
str(install_root),
|
| 523 |
+
'--install-platlib',
|
| 524 |
+
str(install_root),
|
| 525 |
+
]
|
| 526 |
+
env = {**os.environ, "__EASYINSTALL_INDEX": mock_index.url}
|
| 527 |
+
cp = subprocess.run(
|
| 528 |
+
cmd,
|
| 529 |
+
cwd=str(project_root),
|
| 530 |
+
env=env,
|
| 531 |
+
stdout=subprocess.PIPE,
|
| 532 |
+
stderr=subprocess.STDOUT,
|
| 533 |
+
text=True,
|
| 534 |
+
encoding="utf-8",
|
| 535 |
+
)
|
| 536 |
+
assert cp.returncode != 0
|
| 537 |
+
try:
|
| 538 |
+
assert '/does-not-exist/' in {r.path for r in mock_index.requests}
|
| 539 |
+
assert next(
|
| 540 |
+
line
|
| 541 |
+
for line in cp.stdout.splitlines()
|
| 542 |
+
if "not find suitable distribution for" in line
|
| 543 |
+
and "does-not-exist" in line
|
| 544 |
+
)
|
| 545 |
+
except Exception:
|
| 546 |
+
if "failed to get random numbers" in cp.stdout:
|
| 547 |
+
pytest.xfail(f"{sys.platform} failure - {cp.stdout}")
|
| 548 |
+
raise
|
| 549 |
+
|
| 550 |
+
def create_project(self, root):
|
| 551 |
+
config = """
|
| 552 |
+
[metadata]
|
| 553 |
+
name = project
|
| 554 |
+
version = 42
|
| 555 |
+
|
| 556 |
+
[options]
|
| 557 |
+
install_requires = does-not-exist
|
| 558 |
+
py_modules = mod
|
| 559 |
+
"""
|
| 560 |
+
(root / 'setup.cfg').write_text(DALS(config), encoding="utf-8")
|
| 561 |
+
(root / 'mod.py').touch()
|
| 562 |
+
|
| 563 |
+
|
| 564 |
+
class TestSetupRequires:
|
| 565 |
+
def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch):
|
| 566 |
+
"""
|
| 567 |
+
When easy_install installs a source distribution which specifies
|
| 568 |
+
setup_requires, it should honor the fetch parameters (such as
|
| 569 |
+
index-url, and find-links).
|
| 570 |
+
"""
|
| 571 |
+
monkeypatch.setenv('PIP_RETRIES', '0')
|
| 572 |
+
monkeypatch.setenv('PIP_TIMEOUT', '0')
|
| 573 |
+
monkeypatch.setenv('PIP_NO_INDEX', 'false')
|
| 574 |
+
with contexts.quiet():
|
| 575 |
+
# create an sdist that has a build-time dependency.
|
| 576 |
+
with TestSetupRequires.create_sdist() as dist_file:
|
| 577 |
+
with contexts.tempdir() as temp_install_dir:
|
| 578 |
+
with contexts.environment(PYTHONPATH=temp_install_dir):
|
| 579 |
+
cmd = [
|
| 580 |
+
sys.executable,
|
| 581 |
+
'-c',
|
| 582 |
+
'__import__("setuptools").setup()',
|
| 583 |
+
'easy_install',
|
| 584 |
+
'--index-url',
|
| 585 |
+
mock_index.url,
|
| 586 |
+
'--exclude-scripts',
|
| 587 |
+
'--install-dir',
|
| 588 |
+
temp_install_dir,
|
| 589 |
+
dist_file,
|
| 590 |
+
]
|
| 591 |
+
subprocess.Popen(cmd).wait()
|
| 592 |
+
# there should have been one requests to the server
|
| 593 |
+
assert [r.path for r in mock_index.requests] == ['/does-not-exist/']
|
| 594 |
+
|
| 595 |
+
@staticmethod
|
| 596 |
+
@contextlib.contextmanager
|
| 597 |
+
def create_sdist():
|
| 598 |
+
"""
|
| 599 |
+
Return an sdist with a setup_requires dependency (of something that
|
| 600 |
+
doesn't exist)
|
| 601 |
+
"""
|
| 602 |
+
with contexts.tempdir() as dir:
|
| 603 |
+
dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
|
| 604 |
+
make_sdist(
|
| 605 |
+
dist_path,
|
| 606 |
+
[
|
| 607 |
+
(
|
| 608 |
+
'setup.py',
|
| 609 |
+
DALS(
|
| 610 |
+
"""
|
| 611 |
+
import setuptools
|
| 612 |
+
setuptools.setup(
|
| 613 |
+
name="setuptools-test-fetcher",
|
| 614 |
+
version="1.0",
|
| 615 |
+
setup_requires = ['does-not-exist'],
|
| 616 |
+
)
|
| 617 |
+
"""
|
| 618 |
+
),
|
| 619 |
+
),
|
| 620 |
+
('setup.cfg', ''),
|
| 621 |
+
],
|
| 622 |
+
)
|
| 623 |
+
yield dist_path
|
| 624 |
+
|
| 625 |
+
use_setup_cfg = (
|
| 626 |
+
(),
|
| 627 |
+
('dependency_links',),
|
| 628 |
+
('setup_requires',),
|
| 629 |
+
('dependency_links', 'setup_requires'),
|
| 630 |
+
)
|
| 631 |
+
|
| 632 |
+
@pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
|
| 633 |
+
def test_setup_requires_overrides_version_conflict(self, use_setup_cfg):
|
| 634 |
+
"""
|
| 635 |
+
Regression test for distribution issue 323:
|
| 636 |
+
https://bitbucket.org/tarek/distribute/issues/323
|
| 637 |
+
|
| 638 |
+
Ensures that a distribution's setup_requires requirements can still be
|
| 639 |
+
installed and used locally even if a conflicting version of that
|
| 640 |
+
requirement is already on the path.
|
| 641 |
+
"""
|
| 642 |
+
|
| 643 |
+
fake_dist = PRDistribution(
|
| 644 |
+
'does-not-matter', project_name='foobar', version='0.0'
|
| 645 |
+
)
|
| 646 |
+
working_set.add(fake_dist)
|
| 647 |
+
|
| 648 |
+
with contexts.save_pkg_resources_state():
|
| 649 |
+
with contexts.tempdir() as temp_dir:
|
| 650 |
+
test_pkg = create_setup_requires_package(
|
| 651 |
+
temp_dir, use_setup_cfg=use_setup_cfg
|
| 652 |
+
)
|
| 653 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 654 |
+
with contexts.quiet() as (stdout, _stderr):
|
| 655 |
+
# Don't even need to install the package, just
|
| 656 |
+
# running the setup.py at all is sufficient
|
| 657 |
+
run_setup(test_setup_py, ['--name'])
|
| 658 |
+
|
| 659 |
+
lines = stdout.readlines()
|
| 660 |
+
assert len(lines) > 0
|
| 661 |
+
assert lines[-1].strip() == 'test_pkg'
|
| 662 |
+
|
| 663 |
+
@pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
|
| 664 |
+
def test_setup_requires_override_nspkg(self, use_setup_cfg):
|
| 665 |
+
"""
|
| 666 |
+
Like ``test_setup_requires_overrides_version_conflict`` but where the
|
| 667 |
+
``setup_requires`` package is part of a namespace package that has
|
| 668 |
+
*already* been imported.
|
| 669 |
+
"""
|
| 670 |
+
|
| 671 |
+
with contexts.save_pkg_resources_state():
|
| 672 |
+
with contexts.tempdir() as temp_dir:
|
| 673 |
+
foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz')
|
| 674 |
+
make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
|
| 675 |
+
# Now actually go ahead an extract to the temp dir and add the
|
| 676 |
+
# extracted path to sys.path so foo.bar v0.1 is importable
|
| 677 |
+
foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1')
|
| 678 |
+
os.mkdir(foobar_1_dir)
|
| 679 |
+
with tarfile.open(foobar_1_archive) as tf:
|
| 680 |
+
tf.extraction_filter = lambda member, path: member
|
| 681 |
+
tf.extractall(foobar_1_dir)
|
| 682 |
+
sys.path.insert(1, foobar_1_dir)
|
| 683 |
+
|
| 684 |
+
dist = PRDistribution(
|
| 685 |
+
foobar_1_dir, project_name='foo.bar', version='0.1'
|
| 686 |
+
)
|
| 687 |
+
working_set.add(dist)
|
| 688 |
+
|
| 689 |
+
template = DALS(
|
| 690 |
+
"""\
|
| 691 |
+
import foo # Even with foo imported first the
|
| 692 |
+
# setup_requires package should override
|
| 693 |
+
import setuptools
|
| 694 |
+
setuptools.setup(**%r)
|
| 695 |
+
|
| 696 |
+
if not (hasattr(foo, '__path__') and
|
| 697 |
+
len(foo.__path__) == 2):
|
| 698 |
+
print('FAIL')
|
| 699 |
+
|
| 700 |
+
if 'foo.bar-0.2' not in foo.__path__[0]:
|
| 701 |
+
print('FAIL')
|
| 702 |
+
"""
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
test_pkg = create_setup_requires_package(
|
| 706 |
+
temp_dir,
|
| 707 |
+
'foo.bar',
|
| 708 |
+
'0.2',
|
| 709 |
+
make_nspkg_sdist,
|
| 710 |
+
template,
|
| 711 |
+
use_setup_cfg=use_setup_cfg,
|
| 712 |
+
)
|
| 713 |
+
|
| 714 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 715 |
+
|
| 716 |
+
with contexts.quiet() as (stdout, _stderr):
|
| 717 |
+
try:
|
| 718 |
+
# Don't even need to install the package, just
|
| 719 |
+
# running the setup.py at all is sufficient
|
| 720 |
+
run_setup(test_setup_py, ['--name'])
|
| 721 |
+
except pkg_resources.VersionConflict:
|
| 722 |
+
self.fail(
|
| 723 |
+
'Installing setup.py requirements caused a VersionConflict'
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
assert 'FAIL' not in stdout.getvalue()
|
| 727 |
+
lines = stdout.readlines()
|
| 728 |
+
assert len(lines) > 0
|
| 729 |
+
assert lines[-1].strip() == 'test_pkg'
|
| 730 |
+
|
| 731 |
+
@pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
|
| 732 |
+
def test_setup_requires_with_attr_version(self, use_setup_cfg):
|
| 733 |
+
def make_dependency_sdist(dist_path, distname, version):
|
| 734 |
+
files = [
|
| 735 |
+
(
|
| 736 |
+
'setup.py',
|
| 737 |
+
DALS(
|
| 738 |
+
f"""
|
| 739 |
+
import setuptools
|
| 740 |
+
setuptools.setup(
|
| 741 |
+
name={distname!r},
|
| 742 |
+
version={version!r},
|
| 743 |
+
py_modules=[{distname!r}],
|
| 744 |
+
)
|
| 745 |
+
"""
|
| 746 |
+
),
|
| 747 |
+
),
|
| 748 |
+
(
|
| 749 |
+
distname + '.py',
|
| 750 |
+
DALS(
|
| 751 |
+
"""
|
| 752 |
+
version = 42
|
| 753 |
+
"""
|
| 754 |
+
),
|
| 755 |
+
),
|
| 756 |
+
]
|
| 757 |
+
make_sdist(dist_path, files)
|
| 758 |
+
|
| 759 |
+
with contexts.save_pkg_resources_state():
|
| 760 |
+
with contexts.tempdir() as temp_dir:
|
| 761 |
+
test_pkg = create_setup_requires_package(
|
| 762 |
+
temp_dir,
|
| 763 |
+
setup_attrs=dict(version='attr: foobar.version'),
|
| 764 |
+
make_package=make_dependency_sdist,
|
| 765 |
+
use_setup_cfg=use_setup_cfg + ('version',),
|
| 766 |
+
)
|
| 767 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 768 |
+
with contexts.quiet() as (stdout, _stderr):
|
| 769 |
+
run_setup(test_setup_py, ['--version'])
|
| 770 |
+
lines = stdout.readlines()
|
| 771 |
+
assert len(lines) > 0
|
| 772 |
+
assert lines[-1].strip() == '42'
|
| 773 |
+
|
| 774 |
+
def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch):
|
| 775 |
+
monkeypatch.setenv('PIP_RETRIES', '0')
|
| 776 |
+
monkeypatch.setenv('PIP_TIMEOUT', '0')
|
| 777 |
+
monkeypatch.setenv('PIP_NO_INDEX', 'false')
|
| 778 |
+
monkeypatch.setenv('PIP_INDEX_URL', mock_index.url)
|
| 779 |
+
with contexts.save_pkg_resources_state():
|
| 780 |
+
with contexts.tempdir() as temp_dir:
|
| 781 |
+
test_pkg = create_setup_requires_package(
|
| 782 |
+
temp_dir,
|
| 783 |
+
'python-xlib',
|
| 784 |
+
'0.19',
|
| 785 |
+
setup_attrs=dict(dependency_links=[]),
|
| 786 |
+
)
|
| 787 |
+
test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
|
| 788 |
+
with open(test_setup_cfg, 'w', encoding="utf-8") as fp:
|
| 789 |
+
fp.write(
|
| 790 |
+
DALS(
|
| 791 |
+
"""
|
| 792 |
+
[easy_install]
|
| 793 |
+
index_url = https://pypi.org/legacy/
|
| 794 |
+
"""
|
| 795 |
+
)
|
| 796 |
+
)
|
| 797 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 798 |
+
with pytest.raises(distutils.errors.DistutilsError):
|
| 799 |
+
run_setup(test_setup_py, ['--version'])
|
| 800 |
+
assert len(mock_index.requests) == 1
|
| 801 |
+
assert mock_index.requests[0].path == '/python-xlib/'
|
| 802 |
+
|
| 803 |
+
def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch):
|
| 804 |
+
monkeypatch.setenv('PIP_RETRIES', '0')
|
| 805 |
+
monkeypatch.setenv('PIP_TIMEOUT', '0')
|
| 806 |
+
monkeypatch.setenv('PIP_INDEX_URL', mock_index.url)
|
| 807 |
+
with contexts.save_pkg_resources_state():
|
| 808 |
+
with contexts.tempdir() as temp_dir:
|
| 809 |
+
dep_sdist = os.path.join(temp_dir, 'dep.tar.gz')
|
| 810 |
+
make_trivial_sdist(dep_sdist, 'dependency', '42')
|
| 811 |
+
dep_url = path_to_url(dep_sdist, authority='localhost')
|
| 812 |
+
test_pkg = create_setup_requires_package(
|
| 813 |
+
temp_dir,
|
| 814 |
+
# Ignored (overridden by setup_attrs)
|
| 815 |
+
'python-xlib',
|
| 816 |
+
'0.19',
|
| 817 |
+
setup_attrs=dict(setup_requires=f'dependency @ {dep_url}'),
|
| 818 |
+
)
|
| 819 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 820 |
+
run_setup(test_setup_py, ['--version'])
|
| 821 |
+
assert len(mock_index.requests) == 0
|
| 822 |
+
|
| 823 |
+
def test_setup_requires_with_allow_hosts(self, mock_index):
|
| 824 |
+
"""The `allow-hosts` option in not supported anymore."""
|
| 825 |
+
files = {
|
| 826 |
+
'test_pkg': {
|
| 827 |
+
'setup.py': DALS(
|
| 828 |
+
"""
|
| 829 |
+
from setuptools import setup
|
| 830 |
+
setup(setup_requires='python-xlib')
|
| 831 |
+
"""
|
| 832 |
+
),
|
| 833 |
+
'setup.cfg': DALS(
|
| 834 |
+
"""
|
| 835 |
+
[easy_install]
|
| 836 |
+
allow_hosts = *
|
| 837 |
+
"""
|
| 838 |
+
),
|
| 839 |
+
}
|
| 840 |
+
}
|
| 841 |
+
with contexts.save_pkg_resources_state():
|
| 842 |
+
with contexts.tempdir() as temp_dir:
|
| 843 |
+
path.build(files, prefix=temp_dir)
|
| 844 |
+
setup_py = str(pathlib.Path(temp_dir, 'test_pkg', 'setup.py'))
|
| 845 |
+
with pytest.raises(distutils.errors.DistutilsError):
|
| 846 |
+
run_setup(setup_py, ['--version'])
|
| 847 |
+
assert len(mock_index.requests) == 0
|
| 848 |
+
|
| 849 |
+
def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir):
|
| 850 |
+
"""Check `python_requires` is honored."""
|
| 851 |
+
monkeypatch.setenv('PIP_RETRIES', '0')
|
| 852 |
+
monkeypatch.setenv('PIP_TIMEOUT', '0')
|
| 853 |
+
monkeypatch.setenv('PIP_NO_INDEX', '1')
|
| 854 |
+
monkeypatch.setenv('PIP_VERBOSE', '1')
|
| 855 |
+
dep_1_0_sdist = 'dep-1.0.tar.gz'
|
| 856 |
+
dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist))
|
| 857 |
+
dep_1_0_python_requires = '>=2.7'
|
| 858 |
+
make_python_requires_sdist(
|
| 859 |
+
str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires
|
| 860 |
+
)
|
| 861 |
+
dep_2_0_sdist = 'dep-2.0.tar.gz'
|
| 862 |
+
dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist))
|
| 863 |
+
dep_2_0_python_requires = (
|
| 864 |
+
f'!={sys.version_info.major}.{sys.version_info.minor}.*'
|
| 865 |
+
)
|
| 866 |
+
make_python_requires_sdist(
|
| 867 |
+
str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires
|
| 868 |
+
)
|
| 869 |
+
index = tmpdir / 'index.html'
|
| 870 |
+
index.write_text(
|
| 871 |
+
DALS(
|
| 872 |
+
"""
|
| 873 |
+
<!DOCTYPE html>
|
| 874 |
+
<html><head><title>Links for dep</title></head>
|
| 875 |
+
<body>
|
| 876 |
+
<h1>Links for dep</h1>
|
| 877 |
+
<a href="{dep_1_0_url}"\
|
| 878 |
+
data-requires-python="{dep_1_0_python_requires}">{dep_1_0_sdist}</a><br/>
|
| 879 |
+
<a href="{dep_2_0_url}"\
|
| 880 |
+
data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/>
|
| 881 |
+
</body>
|
| 882 |
+
</html>
|
| 883 |
+
"""
|
| 884 |
+
).format(
|
| 885 |
+
dep_1_0_url=dep_1_0_url,
|
| 886 |
+
dep_1_0_sdist=dep_1_0_sdist,
|
| 887 |
+
dep_1_0_python_requires=dep_1_0_python_requires,
|
| 888 |
+
dep_2_0_url=dep_2_0_url,
|
| 889 |
+
dep_2_0_sdist=dep_2_0_sdist,
|
| 890 |
+
dep_2_0_python_requires=dep_2_0_python_requires,
|
| 891 |
+
),
|
| 892 |
+
'utf-8',
|
| 893 |
+
)
|
| 894 |
+
index_url = path_to_url(str(index))
|
| 895 |
+
with contexts.save_pkg_resources_state():
|
| 896 |
+
test_pkg = create_setup_requires_package(
|
| 897 |
+
str(tmpdir),
|
| 898 |
+
'python-xlib',
|
| 899 |
+
'0.19', # Ignored (overridden by setup_attrs).
|
| 900 |
+
setup_attrs=dict(setup_requires='dep', dependency_links=[index_url]),
|
| 901 |
+
)
|
| 902 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 903 |
+
run_setup(test_setup_py, ['--version'])
|
| 904 |
+
eggs = list(
|
| 905 |
+
map(str, pkg_resources.find_distributions(os.path.join(test_pkg, '.eggs')))
|
| 906 |
+
)
|
| 907 |
+
assert eggs == ['dep 1.0']
|
| 908 |
+
|
| 909 |
+
@pytest.mark.parametrize('with_dependency_links_in_setup_py', (False, True))
|
| 910 |
+
def test_setup_requires_with_find_links_in_setup_cfg(
|
| 911 |
+
self, monkeypatch, with_dependency_links_in_setup_py
|
| 912 |
+
):
|
| 913 |
+
monkeypatch.setenv('PIP_RETRIES', '0')
|
| 914 |
+
monkeypatch.setenv('PIP_TIMEOUT', '0')
|
| 915 |
+
with contexts.save_pkg_resources_state():
|
| 916 |
+
with contexts.tempdir() as temp_dir:
|
| 917 |
+
make_trivial_sdist(
|
| 918 |
+
os.path.join(temp_dir, 'python-xlib-42.tar.gz'), 'python-xlib', '42'
|
| 919 |
+
)
|
| 920 |
+
test_pkg = os.path.join(temp_dir, 'test_pkg')
|
| 921 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 922 |
+
test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
|
| 923 |
+
os.mkdir(test_pkg)
|
| 924 |
+
with open(test_setup_py, 'w', encoding="utf-8") as fp:
|
| 925 |
+
if with_dependency_links_in_setup_py:
|
| 926 |
+
dependency_links = [os.path.join(temp_dir, 'links')]
|
| 927 |
+
else:
|
| 928 |
+
dependency_links = []
|
| 929 |
+
fp.write(
|
| 930 |
+
DALS(
|
| 931 |
+
"""
|
| 932 |
+
from setuptools import installer, setup
|
| 933 |
+
setup(setup_requires='python-xlib==42',
|
| 934 |
+
dependency_links={dependency_links!r})
|
| 935 |
+
"""
|
| 936 |
+
).format(dependency_links=dependency_links)
|
| 937 |
+
)
|
| 938 |
+
with open(test_setup_cfg, 'w', encoding="utf-8") as fp:
|
| 939 |
+
fp.write(
|
| 940 |
+
DALS(
|
| 941 |
+
"""
|
| 942 |
+
[easy_install]
|
| 943 |
+
index_url = {index_url}
|
| 944 |
+
find_links = {find_links}
|
| 945 |
+
"""
|
| 946 |
+
).format(
|
| 947 |
+
index_url=os.path.join(temp_dir, 'index'),
|
| 948 |
+
find_links=temp_dir,
|
| 949 |
+
)
|
| 950 |
+
)
|
| 951 |
+
run_setup(test_setup_py, ['--version'])
|
| 952 |
+
|
| 953 |
+
def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch):
|
| 954 |
+
"""
|
| 955 |
+
Use case: installing a package with a build dependency on
|
| 956 |
+
an already installed `dep[extra]`, which in turn depends
|
| 957 |
+
on `extra_dep` (whose is not already installed).
|
| 958 |
+
"""
|
| 959 |
+
with contexts.save_pkg_resources_state():
|
| 960 |
+
with contexts.tempdir() as temp_dir:
|
| 961 |
+
# Create source distribution for `extra_dep`.
|
| 962 |
+
make_trivial_sdist(
|
| 963 |
+
os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), 'extra_dep', '1.0'
|
| 964 |
+
)
|
| 965 |
+
# Create source tree for `dep`.
|
| 966 |
+
dep_pkg = os.path.join(temp_dir, 'dep')
|
| 967 |
+
os.mkdir(dep_pkg)
|
| 968 |
+
path.build(
|
| 969 |
+
{
|
| 970 |
+
'setup.py': DALS(
|
| 971 |
+
"""
|
| 972 |
+
import setuptools
|
| 973 |
+
setuptools.setup(
|
| 974 |
+
name='dep', version='2.0',
|
| 975 |
+
extras_require={'extra': ['extra_dep']},
|
| 976 |
+
)
|
| 977 |
+
"""
|
| 978 |
+
),
|
| 979 |
+
'setup.cfg': '',
|
| 980 |
+
},
|
| 981 |
+
prefix=dep_pkg,
|
| 982 |
+
)
|
| 983 |
+
# "Install" dep.
|
| 984 |
+
run_setup(os.path.join(dep_pkg, 'setup.py'), ['dist_info'])
|
| 985 |
+
working_set.add_entry(dep_pkg)
|
| 986 |
+
# Create source tree for test package.
|
| 987 |
+
test_pkg = os.path.join(temp_dir, 'test_pkg')
|
| 988 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 989 |
+
os.mkdir(test_pkg)
|
| 990 |
+
with open(test_setup_py, 'w', encoding="utf-8") as fp:
|
| 991 |
+
fp.write(
|
| 992 |
+
DALS(
|
| 993 |
+
"""
|
| 994 |
+
from setuptools import installer, setup
|
| 995 |
+
setup(setup_requires='dep[extra]')
|
| 996 |
+
"""
|
| 997 |
+
)
|
| 998 |
+
)
|
| 999 |
+
# Check...
|
| 1000 |
+
monkeypatch.setenv('PIP_FIND_LINKS', str(temp_dir))
|
| 1001 |
+
monkeypatch.setenv('PIP_NO_INDEX', '1')
|
| 1002 |
+
monkeypatch.setenv('PIP_RETRIES', '0')
|
| 1003 |
+
monkeypatch.setenv('PIP_TIMEOUT', '0')
|
| 1004 |
+
run_setup(test_setup_py, ['--version'])
|
| 1005 |
+
|
| 1006 |
+
def test_setup_requires_with_distutils_command_dep(self, monkeypatch):
|
| 1007 |
+
"""
|
| 1008 |
+
Use case: ensure build requirements' extras
|
| 1009 |
+
are properly installed and activated.
|
| 1010 |
+
"""
|
| 1011 |
+
with contexts.save_pkg_resources_state():
|
| 1012 |
+
with contexts.tempdir() as temp_dir:
|
| 1013 |
+
# Create source distribution for `extra_dep`.
|
| 1014 |
+
make_sdist(
|
| 1015 |
+
os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'),
|
| 1016 |
+
[
|
| 1017 |
+
(
|
| 1018 |
+
'setup.py',
|
| 1019 |
+
DALS(
|
| 1020 |
+
"""
|
| 1021 |
+
import setuptools
|
| 1022 |
+
setuptools.setup(
|
| 1023 |
+
name='extra_dep',
|
| 1024 |
+
version='1.0',
|
| 1025 |
+
py_modules=['extra_dep'],
|
| 1026 |
+
)
|
| 1027 |
+
"""
|
| 1028 |
+
),
|
| 1029 |
+
),
|
| 1030 |
+
('setup.cfg', ''),
|
| 1031 |
+
('extra_dep.py', ''),
|
| 1032 |
+
],
|
| 1033 |
+
)
|
| 1034 |
+
# Create source tree for `epdep`.
|
| 1035 |
+
dep_pkg = os.path.join(temp_dir, 'epdep')
|
| 1036 |
+
os.mkdir(dep_pkg)
|
| 1037 |
+
path.build(
|
| 1038 |
+
{
|
| 1039 |
+
'setup.py': DALS(
|
| 1040 |
+
"""
|
| 1041 |
+
import setuptools
|
| 1042 |
+
setuptools.setup(
|
| 1043 |
+
name='dep', version='2.0',
|
| 1044 |
+
py_modules=['epcmd'],
|
| 1045 |
+
extras_require={'extra': ['extra_dep']},
|
| 1046 |
+
entry_points='''
|
| 1047 |
+
[distutils.commands]
|
| 1048 |
+
epcmd = epcmd:epcmd [extra]
|
| 1049 |
+
''',
|
| 1050 |
+
)
|
| 1051 |
+
"""
|
| 1052 |
+
),
|
| 1053 |
+
'setup.cfg': '',
|
| 1054 |
+
'epcmd.py': DALS(
|
| 1055 |
+
"""
|
| 1056 |
+
from distutils.command.build_py import build_py
|
| 1057 |
+
|
| 1058 |
+
import extra_dep
|
| 1059 |
+
|
| 1060 |
+
class epcmd(build_py):
|
| 1061 |
+
pass
|
| 1062 |
+
"""
|
| 1063 |
+
),
|
| 1064 |
+
},
|
| 1065 |
+
prefix=dep_pkg,
|
| 1066 |
+
)
|
| 1067 |
+
# "Install" dep.
|
| 1068 |
+
run_setup(os.path.join(dep_pkg, 'setup.py'), ['dist_info'])
|
| 1069 |
+
working_set.add_entry(dep_pkg)
|
| 1070 |
+
# Create source tree for test package.
|
| 1071 |
+
test_pkg = os.path.join(temp_dir, 'test_pkg')
|
| 1072 |
+
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
| 1073 |
+
os.mkdir(test_pkg)
|
| 1074 |
+
with open(test_setup_py, 'w', encoding="utf-8") as fp:
|
| 1075 |
+
fp.write(
|
| 1076 |
+
DALS(
|
| 1077 |
+
"""
|
| 1078 |
+
from setuptools import installer, setup
|
| 1079 |
+
setup(setup_requires='dep[extra]')
|
| 1080 |
+
"""
|
| 1081 |
+
)
|
| 1082 |
+
)
|
| 1083 |
+
# Check...
|
| 1084 |
+
monkeypatch.setenv('PIP_FIND_LINKS', str(temp_dir))
|
| 1085 |
+
monkeypatch.setenv('PIP_NO_INDEX', '1')
|
| 1086 |
+
monkeypatch.setenv('PIP_RETRIES', '0')
|
| 1087 |
+
monkeypatch.setenv('PIP_TIMEOUT', '0')
|
| 1088 |
+
run_setup(test_setup_py, ['epcmd'])
|
| 1089 |
+
|
| 1090 |
+
|
| 1091 |
+
def make_trivial_sdist(dist_path, distname, version):
|
| 1092 |
+
"""
|
| 1093 |
+
Create a simple sdist tarball at dist_path, containing just a simple
|
| 1094 |
+
setup.py.
|
| 1095 |
+
"""
|
| 1096 |
+
|
| 1097 |
+
make_sdist(
|
| 1098 |
+
dist_path,
|
| 1099 |
+
[
|
| 1100 |
+
(
|
| 1101 |
+
'setup.py',
|
| 1102 |
+
DALS(
|
| 1103 |
+
f"""\
|
| 1104 |
+
import setuptools
|
| 1105 |
+
setuptools.setup(
|
| 1106 |
+
name={distname!r},
|
| 1107 |
+
version={version!r}
|
| 1108 |
+
)
|
| 1109 |
+
"""
|
| 1110 |
+
),
|
| 1111 |
+
),
|
| 1112 |
+
('setup.cfg', ''),
|
| 1113 |
+
],
|
| 1114 |
+
)
|
| 1115 |
+
|
| 1116 |
+
|
| 1117 |
+
def make_nspkg_sdist(dist_path, distname, version):
|
| 1118 |
+
"""
|
| 1119 |
+
Make an sdist tarball with distname and version which also contains one
|
| 1120 |
+
package with the same name as distname. The top-level package is
|
| 1121 |
+
designated a namespace package).
|
| 1122 |
+
"""
|
| 1123 |
+
|
| 1124 |
+
parts = distname.split('.')
|
| 1125 |
+
nspackage = parts[0]
|
| 1126 |
+
|
| 1127 |
+
packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]
|
| 1128 |
+
|
| 1129 |
+
setup_py = DALS(
|
| 1130 |
+
f"""\
|
| 1131 |
+
import setuptools
|
| 1132 |
+
setuptools.setup(
|
| 1133 |
+
name={distname!r},
|
| 1134 |
+
version={version!r},
|
| 1135 |
+
packages={packages!r},
|
| 1136 |
+
namespace_packages=[{nspackage!r}]
|
| 1137 |
+
)
|
| 1138 |
+
"""
|
| 1139 |
+
)
|
| 1140 |
+
|
| 1141 |
+
init = "__import__('pkg_resources').declare_namespace(__name__)"
|
| 1142 |
+
|
| 1143 |
+
files = [('setup.py', setup_py), (os.path.join(nspackage, '__init__.py'), init)]
|
| 1144 |
+
for package in packages[1:]:
|
| 1145 |
+
filename = os.path.join(*(package.split('.') + ['__init__.py']))
|
| 1146 |
+
files.append((filename, ''))
|
| 1147 |
+
|
| 1148 |
+
make_sdist(dist_path, files)
|
| 1149 |
+
|
| 1150 |
+
|
| 1151 |
+
def make_python_requires_sdist(dist_path, distname, version, python_requires):
|
| 1152 |
+
make_sdist(
|
| 1153 |
+
dist_path,
|
| 1154 |
+
[
|
| 1155 |
+
(
|
| 1156 |
+
'setup.py',
|
| 1157 |
+
DALS(
|
| 1158 |
+
"""\
|
| 1159 |
+
import setuptools
|
| 1160 |
+
setuptools.setup(
|
| 1161 |
+
name={name!r},
|
| 1162 |
+
version={version!r},
|
| 1163 |
+
python_requires={python_requires!r},
|
| 1164 |
+
)
|
| 1165 |
+
"""
|
| 1166 |
+
).format(
|
| 1167 |
+
name=distname, version=version, python_requires=python_requires
|
| 1168 |
+
),
|
| 1169 |
+
),
|
| 1170 |
+
('setup.cfg', ''),
|
| 1171 |
+
],
|
| 1172 |
+
)
|
| 1173 |
+
|
| 1174 |
+
|
| 1175 |
+
def make_sdist(dist_path, files):
|
| 1176 |
+
"""
|
| 1177 |
+
Create a simple sdist tarball at dist_path, containing the files
|
| 1178 |
+
listed in ``files`` as ``(filename, content)`` tuples.
|
| 1179 |
+
"""
|
| 1180 |
+
|
| 1181 |
+
# Distributions with only one file don't play well with pip.
|
| 1182 |
+
assert len(files) > 1
|
| 1183 |
+
with tarfile.open(dist_path, 'w:gz') as dist:
|
| 1184 |
+
for filename, content in files:
|
| 1185 |
+
file_bytes = io.BytesIO(content.encode('utf-8'))
|
| 1186 |
+
file_info = tarfile.TarInfo(name=filename)
|
| 1187 |
+
file_info.size = len(file_bytes.getvalue())
|
| 1188 |
+
file_info.mtime = int(time.time())
|
| 1189 |
+
dist.addfile(file_info, fileobj=file_bytes)
|
| 1190 |
+
|
| 1191 |
+
|
| 1192 |
+
def create_setup_requires_package(
|
| 1193 |
+
path,
|
| 1194 |
+
distname='foobar',
|
| 1195 |
+
version='0.1',
|
| 1196 |
+
make_package=make_trivial_sdist,
|
| 1197 |
+
setup_py_template=None,
|
| 1198 |
+
setup_attrs=None,
|
| 1199 |
+
use_setup_cfg=(),
|
| 1200 |
+
):
|
| 1201 |
+
"""Creates a source tree under path for a trivial test package that has a
|
| 1202 |
+
single requirement in setup_requires--a tarball for that requirement is
|
| 1203 |
+
also created and added to the dependency_links argument.
|
| 1204 |
+
|
| 1205 |
+
``distname`` and ``version`` refer to the name/version of the package that
|
| 1206 |
+
the test package requires via ``setup_requires``. The name of the test
|
| 1207 |
+
package itself is just 'test_pkg'.
|
| 1208 |
+
"""
|
| 1209 |
+
|
| 1210 |
+
test_setup_attrs = {
|
| 1211 |
+
'name': 'test_pkg',
|
| 1212 |
+
'version': '0.0',
|
| 1213 |
+
'setup_requires': [f'{distname}=={version}'],
|
| 1214 |
+
'dependency_links': [os.path.abspath(path)],
|
| 1215 |
+
}
|
| 1216 |
+
if setup_attrs:
|
| 1217 |
+
test_setup_attrs.update(setup_attrs)
|
| 1218 |
+
|
| 1219 |
+
test_pkg = os.path.join(path, 'test_pkg')
|
| 1220 |
+
os.mkdir(test_pkg)
|
| 1221 |
+
|
| 1222 |
+
# setup.cfg
|
| 1223 |
+
if use_setup_cfg:
|
| 1224 |
+
options = []
|
| 1225 |
+
metadata = []
|
| 1226 |
+
for name in use_setup_cfg:
|
| 1227 |
+
value = test_setup_attrs.pop(name)
|
| 1228 |
+
if name in 'name version'.split():
|
| 1229 |
+
section = metadata
|
| 1230 |
+
else:
|
| 1231 |
+
section = options
|
| 1232 |
+
if isinstance(value, (tuple, list)):
|
| 1233 |
+
value = ';'.join(value)
|
| 1234 |
+
section.append(f'{name}: {value}')
|
| 1235 |
+
test_setup_cfg_contents = DALS(
|
| 1236 |
+
"""
|
| 1237 |
+
[metadata]
|
| 1238 |
+
{metadata}
|
| 1239 |
+
[options]
|
| 1240 |
+
{options}
|
| 1241 |
+
"""
|
| 1242 |
+
).format(
|
| 1243 |
+
options='\n'.join(options),
|
| 1244 |
+
metadata='\n'.join(metadata),
|
| 1245 |
+
)
|
| 1246 |
+
else:
|
| 1247 |
+
test_setup_cfg_contents = ''
|
| 1248 |
+
with open(os.path.join(test_pkg, 'setup.cfg'), 'w', encoding="utf-8") as f:
|
| 1249 |
+
f.write(test_setup_cfg_contents)
|
| 1250 |
+
|
| 1251 |
+
# setup.py
|
| 1252 |
+
if setup_py_template is None:
|
| 1253 |
+
setup_py_template = DALS(
|
| 1254 |
+
"""\
|
| 1255 |
+
import setuptools
|
| 1256 |
+
setuptools.setup(**%r)
|
| 1257 |
+
"""
|
| 1258 |
+
)
|
| 1259 |
+
with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
|
| 1260 |
+
f.write(setup_py_template % test_setup_attrs)
|
| 1261 |
+
|
| 1262 |
+
foobar_path = os.path.join(path, f'{distname}-{version}.tar.gz')
|
| 1263 |
+
make_package(foobar_path, distname, version)
|
| 1264 |
+
|
| 1265 |
+
return test_pkg
|
| 1266 |
+
|
| 1267 |
+
|
| 1268 |
+
@pytest.mark.skipif(
|
| 1269 |
+
sys.platform.startswith('java') and ei.is_sh(sys.executable),
|
| 1270 |
+
reason="Test cannot run under java when executable is sh",
|
| 1271 |
+
)
|
| 1272 |
+
class TestScriptHeader:
|
| 1273 |
+
non_ascii_exe = '/Users/José/bin/python'
|
| 1274 |
+
exe_with_spaces = r'C:\Program Files\Python36\python.exe'
|
| 1275 |
+
|
| 1276 |
+
def test_get_script_header(self):
|
| 1277 |
+
expected = f'#!{ei.nt_quote_arg(os.path.normpath(sys.executable))}\n'
|
| 1278 |
+
actual = ei.ScriptWriter.get_header('#!/usr/local/bin/python')
|
| 1279 |
+
assert actual == expected
|
| 1280 |
+
|
| 1281 |
+
def test_get_script_header_args(self):
|
| 1282 |
+
expected = f'#!{ei.nt_quote_arg(os.path.normpath(sys.executable))} -x\n'
|
| 1283 |
+
actual = ei.ScriptWriter.get_header('#!/usr/bin/python -x')
|
| 1284 |
+
assert actual == expected
|
| 1285 |
+
|
| 1286 |
+
def test_get_script_header_non_ascii_exe(self):
|
| 1287 |
+
actual = ei.ScriptWriter.get_header(
|
| 1288 |
+
'#!/usr/bin/python', executable=self.non_ascii_exe
|
| 1289 |
+
)
|
| 1290 |
+
expected = f'#!{self.non_ascii_exe} -x\n'
|
| 1291 |
+
assert actual == expected
|
| 1292 |
+
|
| 1293 |
+
def test_get_script_header_exe_with_spaces(self):
|
| 1294 |
+
actual = ei.ScriptWriter.get_header(
|
| 1295 |
+
'#!/usr/bin/python', executable='"' + self.exe_with_spaces + '"'
|
| 1296 |
+
)
|
| 1297 |
+
expected = f'#!"{self.exe_with_spaces}"\n'
|
| 1298 |
+
assert actual == expected
|
| 1299 |
+
|
| 1300 |
+
|
| 1301 |
+
class TestCommandSpec:
|
| 1302 |
+
def test_custom_launch_command(self):
|
| 1303 |
+
"""
|
| 1304 |
+
Show how a custom CommandSpec could be used to specify a #! executable
|
| 1305 |
+
which takes parameters.
|
| 1306 |
+
"""
|
| 1307 |
+
cmd = ei.CommandSpec(['/usr/bin/env', 'python3'])
|
| 1308 |
+
assert cmd.as_header() == '#!/usr/bin/env python3\n'
|
| 1309 |
+
|
| 1310 |
+
def test_from_param_for_CommandSpec_is_passthrough(self):
|
| 1311 |
+
"""
|
| 1312 |
+
from_param should return an instance of a CommandSpec
|
| 1313 |
+
"""
|
| 1314 |
+
cmd = ei.CommandSpec(['python'])
|
| 1315 |
+
cmd_new = ei.CommandSpec.from_param(cmd)
|
| 1316 |
+
assert cmd is cmd_new
|
| 1317 |
+
|
| 1318 |
+
@mock.patch('sys.executable', TestScriptHeader.exe_with_spaces)
|
| 1319 |
+
@mock.patch.dict(os.environ)
|
| 1320 |
+
def test_from_environment_with_spaces_in_executable(self):
|
| 1321 |
+
os.environ.pop('__PYVENV_LAUNCHER__', None)
|
| 1322 |
+
cmd = ei.CommandSpec.from_environment()
|
| 1323 |
+
assert len(cmd) == 1
|
| 1324 |
+
assert cmd.as_header().startswith('#!"')
|
| 1325 |
+
|
| 1326 |
+
def test_from_simple_string_uses_shlex(self):
|
| 1327 |
+
"""
|
| 1328 |
+
In order to support `executable = /usr/bin/env my-python`, make sure
|
| 1329 |
+
from_param invokes shlex on that input.
|
| 1330 |
+
"""
|
| 1331 |
+
cmd = ei.CommandSpec.from_param('/usr/bin/env my-python')
|
| 1332 |
+
assert len(cmd) == 2
|
| 1333 |
+
assert '"' not in cmd.as_header()
|
| 1334 |
+
|
| 1335 |
+
def test_from_param_raises_expected_error(self) -> None:
|
| 1336 |
+
"""
|
| 1337 |
+
from_param should raise its own TypeError when the argument's type is unsupported
|
| 1338 |
+
"""
|
| 1339 |
+
with pytest.raises(TypeError) as exc_info:
|
| 1340 |
+
ei.CommandSpec.from_param(object()) # type: ignore[arg-type] # We want a type error here
|
| 1341 |
+
assert (
|
| 1342 |
+
str(exc_info.value) == "Argument has an unsupported type <class 'object'>"
|
| 1343 |
+
), exc_info.value
|
| 1344 |
+
|
| 1345 |
+
|
| 1346 |
+
class TestWindowsScriptWriter:
|
| 1347 |
+
def test_header(self):
|
| 1348 |
+
hdr = ei.WindowsScriptWriter.get_header('')
|
| 1349 |
+
assert hdr.startswith('#!')
|
| 1350 |
+
assert hdr.endswith('\n')
|
| 1351 |
+
hdr = hdr.lstrip('#!')
|
| 1352 |
+
hdr = hdr.rstrip('\n')
|
| 1353 |
+
# header should not start with an escaped quote
|
| 1354 |
+
assert not hdr.startswith('\\"')
|
| 1355 |
+
|
| 1356 |
+
|
| 1357 |
+
class VersionStub(NamedTuple):
|
| 1358 |
+
major: int
|
| 1359 |
+
minor: int
|
| 1360 |
+
micro: int
|
| 1361 |
+
releaselevel: str
|
| 1362 |
+
serial: int
|
| 1363 |
+
|
| 1364 |
+
|
| 1365 |
+
def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch):
|
| 1366 |
+
# In issue #3001, easy_install wrongly uses the `python3.1` directory
|
| 1367 |
+
# when the interpreter is `python3.10` and the `--user` option is given.
|
| 1368 |
+
# See pypa/setuptools#3001.
|
| 1369 |
+
dist = Distribution()
|
| 1370 |
+
cmd = dist.get_command_obj('easy_install')
|
| 1371 |
+
cmd.args = ['ok']
|
| 1372 |
+
cmd.optimize = 0
|
| 1373 |
+
cmd.user = True
|
| 1374 |
+
cmd.install_userbase = str(tmpdir)
|
| 1375 |
+
cmd.install_usersite = None
|
| 1376 |
+
install_cmd = dist.get_command_obj('install')
|
| 1377 |
+
install_cmd.install_userbase = str(tmpdir)
|
| 1378 |
+
install_cmd.install_usersite = None
|
| 1379 |
+
|
| 1380 |
+
with monkeypatch.context() as patch, warnings.catch_warnings():
|
| 1381 |
+
warnings.simplefilter("ignore")
|
| 1382 |
+
version = '3.10.1 (main, Dec 21 2021, 09:17:12) [GCC 10.2.1 20210110]'
|
| 1383 |
+
info = VersionStub(3, 10, 1, "final", 0)
|
| 1384 |
+
patch.setattr('site.ENABLE_USER_SITE', True)
|
| 1385 |
+
patch.setattr('sys.version', version)
|
| 1386 |
+
patch.setattr('sys.version_info', info)
|
| 1387 |
+
patch.setattr(cmd, 'create_home_path', mock.Mock())
|
| 1388 |
+
cmd.finalize_options()
|
| 1389 |
+
|
| 1390 |
+
name = "pypy" if hasattr(sys, 'pypy_version_info') else "python"
|
| 1391 |
+
install_dir = cmd.install_dir.lower()
|
| 1392 |
+
|
| 1393 |
+
# In some platforms (e.g. Windows), install_dir is mostly determined
|
| 1394 |
+
# via `sysconfig`, which define constants eagerly at module creation.
|
| 1395 |
+
# This means that monkeypatching `sys.version` to emulate 3.10 for testing
|
| 1396 |
+
# may have no effect.
|
| 1397 |
+
# The safest test here is to rely on the fact that 3.1 is no longer
|
| 1398 |
+
# supported/tested, and make sure that if 'python3.1' ever appears in the string
|
| 1399 |
+
# it is followed by another digit (e.g. 'python3.10').
|
| 1400 |
+
if re.search(name + r'3\.?1', install_dir):
|
| 1401 |
+
assert re.search(name + r'3\.?1\d', install_dir)
|
| 1402 |
+
|
| 1403 |
+
# The following "variables" are used for interpolation in distutils
|
| 1404 |
+
# installation schemes, so it should be fair to treat them as "semi-public",
|
| 1405 |
+
# or at least public enough so we can have a test to make sure they are correct
|
| 1406 |
+
assert cmd.config_vars['py_version'] == '3.10.1'
|
| 1407 |
+
assert cmd.config_vars['py_version_short'] == '3.10'
|
| 1408 |
+
assert cmd.config_vars['py_version_nodot'] == '310'
|
| 1409 |
+
|
| 1410 |
+
|
| 1411 |
+
@pytest.mark.xfail(
|
| 1412 |
+
sys.platform == "darwin",
|
| 1413 |
+
reason="https://github.com/pypa/setuptools/pull/4716#issuecomment-2447624418",
|
| 1414 |
+
)
|
| 1415 |
+
def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path):
|
| 1416 |
+
"""`setup.py develop` should honor `--user` even under build isolation"""
|
| 1417 |
+
|
| 1418 |
+
# == Arrange ==
|
| 1419 |
+
# Pretend that build isolation was enabled
|
| 1420 |
+
# e.g pip sets the environment variable PYTHONNOUSERSITE=1
|
| 1421 |
+
monkeypatch.setattr('site.ENABLE_USER_SITE', False)
|
| 1422 |
+
|
| 1423 |
+
# Patching $HOME for 2 reasons:
|
| 1424 |
+
# 1. setuptools/command/easy_install.py:create_home_path
|
| 1425 |
+
# tries creating directories in $HOME.
|
| 1426 |
+
# Given::
|
| 1427 |
+
# self.config_vars['DESTDIRS'] = (
|
| 1428 |
+
# "/home/user/.pyenv/versions/3.9.10 "
|
| 1429 |
+
# "/home/user/.pyenv/versions/3.9.10/lib "
|
| 1430 |
+
# "/home/user/.pyenv/versions/3.9.10/lib/python3.9 "
|
| 1431 |
+
# "/home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")
|
| 1432 |
+
# `create_home_path` will::
|
| 1433 |
+
# makedirs(
|
| 1434 |
+
# "/home/user/.pyenv/versions/3.9.10 "
|
| 1435 |
+
# "/home/user/.pyenv/versions/3.9.10/lib "
|
| 1436 |
+
# "/home/user/.pyenv/versions/3.9.10/lib/python3.9 "
|
| 1437 |
+
# "/home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")
|
| 1438 |
+
#
|
| 1439 |
+
# 2. We are going to force `site` to update site.USER_BASE and site.USER_SITE
|
| 1440 |
+
# To point inside our new home
|
| 1441 |
+
monkeypatch.setenv('HOME', str(tmp_path / '.home'))
|
| 1442 |
+
monkeypatch.setenv('USERPROFILE', str(tmp_path / '.home'))
|
| 1443 |
+
monkeypatch.setenv('APPDATA', str(tmp_path / '.home'))
|
| 1444 |
+
monkeypatch.setattr('site.USER_BASE', None)
|
| 1445 |
+
monkeypatch.setattr('site.USER_SITE', None)
|
| 1446 |
+
user_site = Path(site.getusersitepackages())
|
| 1447 |
+
user_site.mkdir(parents=True, exist_ok=True)
|
| 1448 |
+
|
| 1449 |
+
sys_prefix = tmp_path / '.sys_prefix'
|
| 1450 |
+
sys_prefix.mkdir(parents=True, exist_ok=True)
|
| 1451 |
+
monkeypatch.setattr('sys.prefix', str(sys_prefix))
|
| 1452 |
+
|
| 1453 |
+
setup_script = (
|
| 1454 |
+
"__import__('setuptools').setup(name='aproj', version=42, packages=[])\n"
|
| 1455 |
+
)
|
| 1456 |
+
(tmp_path / "setup.py").write_text(setup_script, encoding="utf-8")
|
| 1457 |
+
|
| 1458 |
+
# == Sanity check ==
|
| 1459 |
+
assert list(sys_prefix.glob("*")) == []
|
| 1460 |
+
assert list(user_site.glob("*")) == []
|
| 1461 |
+
|
| 1462 |
+
# == Act ==
|
| 1463 |
+
run_setup('setup.py', ['develop', '--user'])
|
| 1464 |
+
|
| 1465 |
+
# == Assert ==
|
| 1466 |
+
# Should not install to sys.prefix
|
| 1467 |
+
assert list(sys_prefix.glob("*")) == []
|
| 1468 |
+
# Should install to user site
|
| 1469 |
+
installed = {f.name for f in user_site.glob("*")}
|
| 1470 |
+
# sometimes easy-install.pth is created and sometimes not
|
| 1471 |
+
installed = installed - {"easy-install.pth"}
|
| 1472 |
+
assert installed == {'aproj.egg-link'}
|
llava/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py
ADDED
|
@@ -0,0 +1,1289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import platform
|
| 5 |
+
import stat
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
from copy import deepcopy
|
| 9 |
+
from importlib import import_module
|
| 10 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from textwrap import dedent
|
| 13 |
+
from typing import Any
|
| 14 |
+
from unittest.mock import Mock
|
| 15 |
+
from uuid import uuid4
|
| 16 |
+
|
| 17 |
+
import jaraco.envs
|
| 18 |
+
import jaraco.path
|
| 19 |
+
import pytest
|
| 20 |
+
from path import Path as _Path
|
| 21 |
+
|
| 22 |
+
from setuptools._importlib import resources as importlib_resources
|
| 23 |
+
from setuptools.command.editable_wheel import (
|
| 24 |
+
_DebuggingTips,
|
| 25 |
+
_encode_pth,
|
| 26 |
+
_find_namespaces,
|
| 27 |
+
_find_package_roots,
|
| 28 |
+
_find_virtual_namespaces,
|
| 29 |
+
_finder_template,
|
| 30 |
+
_LinkTree,
|
| 31 |
+
_TopLevelFinder,
|
| 32 |
+
editable_wheel,
|
| 33 |
+
)
|
| 34 |
+
from setuptools.dist import Distribution
|
| 35 |
+
from setuptools.extension import Extension
|
| 36 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning
|
| 37 |
+
|
| 38 |
+
from . import contexts, namespaces
|
| 39 |
+
|
| 40 |
+
from distutils.core import run_setup
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@pytest.fixture(params=["strict", "lenient"])
|
| 44 |
+
def editable_opts(request):
|
| 45 |
+
if request.param == "strict":
|
| 46 |
+
return ["--config-settings", "editable-mode=strict"]
|
| 47 |
+
return []
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
EXAMPLE = {
|
| 51 |
+
'pyproject.toml': dedent(
|
| 52 |
+
"""\
|
| 53 |
+
[build-system]
|
| 54 |
+
requires = ["setuptools"]
|
| 55 |
+
build-backend = "setuptools.build_meta"
|
| 56 |
+
|
| 57 |
+
[project]
|
| 58 |
+
name = "mypkg"
|
| 59 |
+
version = "3.14159"
|
| 60 |
+
license = {text = "MIT"}
|
| 61 |
+
description = "This is a Python package"
|
| 62 |
+
dynamic = ["readme"]
|
| 63 |
+
classifiers = [
|
| 64 |
+
"Development Status :: 5 - Production/Stable",
|
| 65 |
+
"Intended Audience :: Developers"
|
| 66 |
+
]
|
| 67 |
+
urls = {Homepage = "https://github.com"}
|
| 68 |
+
|
| 69 |
+
[tool.setuptools]
|
| 70 |
+
package-dir = {"" = "src"}
|
| 71 |
+
packages = {find = {where = ["src"]}}
|
| 72 |
+
license-files = ["LICENSE*"]
|
| 73 |
+
|
| 74 |
+
[tool.setuptools.dynamic]
|
| 75 |
+
readme = {file = "README.rst"}
|
| 76 |
+
|
| 77 |
+
[tool.distutils.egg_info]
|
| 78 |
+
tag-build = ".post0"
|
| 79 |
+
"""
|
| 80 |
+
),
|
| 81 |
+
"MANIFEST.in": dedent(
|
| 82 |
+
"""\
|
| 83 |
+
global-include *.py *.txt
|
| 84 |
+
global-exclude *.py[cod]
|
| 85 |
+
prune dist
|
| 86 |
+
prune build
|
| 87 |
+
"""
|
| 88 |
+
).strip(),
|
| 89 |
+
"README.rst": "This is a ``README``",
|
| 90 |
+
"LICENSE.txt": "---- placeholder MIT license ----",
|
| 91 |
+
"src": {
|
| 92 |
+
"mypkg": {
|
| 93 |
+
"__init__.py": dedent(
|
| 94 |
+
"""\
|
| 95 |
+
import sys
|
| 96 |
+
from importlib.metadata import PackageNotFoundError, version
|
| 97 |
+
|
| 98 |
+
try:
|
| 99 |
+
__version__ = version(__name__)
|
| 100 |
+
except PackageNotFoundError:
|
| 101 |
+
__version__ = "unknown"
|
| 102 |
+
"""
|
| 103 |
+
),
|
| 104 |
+
"__main__.py": dedent(
|
| 105 |
+
"""\
|
| 106 |
+
from importlib.resources import read_text
|
| 107 |
+
from . import __version__, __name__ as parent
|
| 108 |
+
from .mod import x
|
| 109 |
+
|
| 110 |
+
data = read_text(parent, "data.txt")
|
| 111 |
+
print(__version__, data, x)
|
| 112 |
+
"""
|
| 113 |
+
),
|
| 114 |
+
"mod.py": "x = ''",
|
| 115 |
+
"data.txt": "Hello World",
|
| 116 |
+
}
|
| 117 |
+
},
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
|
| 125 |
+
@pytest.mark.parametrize(
|
| 126 |
+
"files",
|
| 127 |
+
[
|
| 128 |
+
{**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB},
|
| 129 |
+
EXAMPLE, # No setup.py script
|
| 130 |
+
],
|
| 131 |
+
)
|
| 132 |
+
def test_editable_with_pyproject(tmp_path, venv, files, editable_opts):
|
| 133 |
+
project = tmp_path / "mypkg"
|
| 134 |
+
project.mkdir()
|
| 135 |
+
jaraco.path.build(files, prefix=project)
|
| 136 |
+
|
| 137 |
+
cmd = [
|
| 138 |
+
"python",
|
| 139 |
+
"-m",
|
| 140 |
+
"pip",
|
| 141 |
+
"install",
|
| 142 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 143 |
+
"-e",
|
| 144 |
+
str(project),
|
| 145 |
+
*editable_opts,
|
| 146 |
+
]
|
| 147 |
+
print(venv.run(cmd))
|
| 148 |
+
|
| 149 |
+
cmd = ["python", "-m", "mypkg"]
|
| 150 |
+
assert venv.run(cmd).strip() == "3.14159.post0 Hello World"
|
| 151 |
+
|
| 152 |
+
(project / "src/mypkg/data.txt").write_text("foobar", encoding="utf-8")
|
| 153 |
+
(project / "src/mypkg/mod.py").write_text("x = 42", encoding="utf-8")
|
| 154 |
+
assert venv.run(cmd).strip() == "3.14159.post0 foobar 42"
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
|
| 158 |
+
files = {
|
| 159 |
+
"mypkg": {
|
| 160 |
+
"pyproject.toml": dedent(
|
| 161 |
+
"""\
|
| 162 |
+
[build-system]
|
| 163 |
+
requires = ["setuptools", "wheel"]
|
| 164 |
+
build-backend = "setuptools.build_meta"
|
| 165 |
+
|
| 166 |
+
[project]
|
| 167 |
+
name = "mypkg"
|
| 168 |
+
version = "3.14159"
|
| 169 |
+
|
| 170 |
+
[tool.setuptools]
|
| 171 |
+
packages = ["pkg"]
|
| 172 |
+
py-modules = ["mod"]
|
| 173 |
+
"""
|
| 174 |
+
),
|
| 175 |
+
"pkg": {"__init__.py": "a = 4"},
|
| 176 |
+
"mod.py": "b = 2",
|
| 177 |
+
},
|
| 178 |
+
}
|
| 179 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 180 |
+
project = tmp_path / "mypkg"
|
| 181 |
+
|
| 182 |
+
cmd = [
|
| 183 |
+
"python",
|
| 184 |
+
"-m",
|
| 185 |
+
"pip",
|
| 186 |
+
"install",
|
| 187 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 188 |
+
"-e",
|
| 189 |
+
str(project),
|
| 190 |
+
*editable_opts,
|
| 191 |
+
]
|
| 192 |
+
print(venv.run(cmd))
|
| 193 |
+
cmd = ["python", "-c", "import pkg, mod; print(pkg.a, mod.b)"]
|
| 194 |
+
assert venv.run(cmd).strip() == "4 2"
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def test_editable_with_single_module(tmp_path, venv, editable_opts):
|
| 198 |
+
files = {
|
| 199 |
+
"mypkg": {
|
| 200 |
+
"pyproject.toml": dedent(
|
| 201 |
+
"""\
|
| 202 |
+
[build-system]
|
| 203 |
+
requires = ["setuptools", "wheel"]
|
| 204 |
+
build-backend = "setuptools.build_meta"
|
| 205 |
+
|
| 206 |
+
[project]
|
| 207 |
+
name = "mod"
|
| 208 |
+
version = "3.14159"
|
| 209 |
+
|
| 210 |
+
[tool.setuptools]
|
| 211 |
+
py-modules = ["mod"]
|
| 212 |
+
"""
|
| 213 |
+
),
|
| 214 |
+
"mod.py": "b = 2",
|
| 215 |
+
},
|
| 216 |
+
}
|
| 217 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 218 |
+
project = tmp_path / "mypkg"
|
| 219 |
+
|
| 220 |
+
cmd = [
|
| 221 |
+
"python",
|
| 222 |
+
"-m",
|
| 223 |
+
"pip",
|
| 224 |
+
"install",
|
| 225 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 226 |
+
"-e",
|
| 227 |
+
str(project),
|
| 228 |
+
*editable_opts,
|
| 229 |
+
]
|
| 230 |
+
print(venv.run(cmd))
|
| 231 |
+
cmd = ["python", "-c", "import mod; print(mod.b)"]
|
| 232 |
+
assert venv.run(cmd).strip() == "2"
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
class TestLegacyNamespaces:
|
| 236 |
+
# legacy => pkg_resources.declare_namespace(...) + setup(namespace_packages=...)
|
| 237 |
+
|
| 238 |
+
def test_nspkg_file_is_unique(self, tmp_path, monkeypatch):
|
| 239 |
+
deprecation = pytest.warns(
|
| 240 |
+
SetuptoolsDeprecationWarning, match=".*namespace_packages parameter.*"
|
| 241 |
+
)
|
| 242 |
+
installation_dir = tmp_path / ".installation_dir"
|
| 243 |
+
installation_dir.mkdir()
|
| 244 |
+
examples = (
|
| 245 |
+
"myns.pkgA",
|
| 246 |
+
"myns.pkgB",
|
| 247 |
+
"myns.n.pkgA",
|
| 248 |
+
"myns.n.pkgB",
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
for name in examples:
|
| 252 |
+
pkg = namespaces.build_namespace_package(tmp_path, name, version="42")
|
| 253 |
+
with deprecation, monkeypatch.context() as ctx:
|
| 254 |
+
ctx.chdir(pkg)
|
| 255 |
+
dist = run_setup("setup.py", stop_after="config")
|
| 256 |
+
cmd = editable_wheel(dist)
|
| 257 |
+
cmd.finalize_options()
|
| 258 |
+
editable_name = cmd.get_finalized_command("dist_info").name
|
| 259 |
+
cmd._install_namespaces(installation_dir, editable_name)
|
| 260 |
+
|
| 261 |
+
files = list(installation_dir.glob("*-nspkg.pth"))
|
| 262 |
+
assert len(files) == len(examples)
|
| 263 |
+
|
| 264 |
+
@pytest.mark.parametrize(
|
| 265 |
+
"impl",
|
| 266 |
+
(
|
| 267 |
+
"pkg_resources",
|
| 268 |
+
# "pkgutil", => does not work
|
| 269 |
+
),
|
| 270 |
+
)
|
| 271 |
+
@pytest.mark.parametrize("ns", ("myns.n",))
|
| 272 |
+
def test_namespace_package_importable(
|
| 273 |
+
self, venv, tmp_path, ns, impl, editable_opts
|
| 274 |
+
):
|
| 275 |
+
"""
|
| 276 |
+
Installing two packages sharing the same namespace, one installed
|
| 277 |
+
naturally using pip or `--single-version-externally-managed`
|
| 278 |
+
and the other installed in editable mode should leave the namespace
|
| 279 |
+
intact and both packages reachable by import.
|
| 280 |
+
(Ported from test_develop).
|
| 281 |
+
"""
|
| 282 |
+
build_system = """\
|
| 283 |
+
[build-system]
|
| 284 |
+
requires = ["setuptools"]
|
| 285 |
+
build-backend = "setuptools.build_meta"
|
| 286 |
+
"""
|
| 287 |
+
pkg_A = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgA", impl=impl)
|
| 288 |
+
pkg_B = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgB", impl=impl)
|
| 289 |
+
(pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8")
|
| 290 |
+
(pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8")
|
| 291 |
+
# use pip to install to the target directory
|
| 292 |
+
opts = editable_opts[:]
|
| 293 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 294 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 295 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 296 |
+
venv.run(["python", "-c", f"import {ns}.pkgA; import {ns}.pkgB"])
|
| 297 |
+
# additionally ensure that pkg_resources import works
|
| 298 |
+
venv.run(["python", "-c", "import pkg_resources"])
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
class TestPep420Namespaces:
|
| 302 |
+
def test_namespace_package_importable(self, venv, tmp_path, editable_opts):
|
| 303 |
+
"""
|
| 304 |
+
Installing two packages sharing the same namespace, one installed
|
| 305 |
+
normally using pip and the other installed in editable mode
|
| 306 |
+
should allow importing both packages.
|
| 307 |
+
"""
|
| 308 |
+
pkg_A = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgA')
|
| 309 |
+
pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
|
| 310 |
+
# use pip to install to the target directory
|
| 311 |
+
opts = editable_opts[:]
|
| 312 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 313 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 314 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 315 |
+
venv.run(["python", "-c", "import myns.n.pkgA; import myns.n.pkgB"])
|
| 316 |
+
|
| 317 |
+
def test_namespace_created_via_package_dir(self, venv, tmp_path, editable_opts):
|
| 318 |
+
"""Currently users can create a namespace by tweaking `package_dir`"""
|
| 319 |
+
files = {
|
| 320 |
+
"pkgA": {
|
| 321 |
+
"pyproject.toml": dedent(
|
| 322 |
+
"""\
|
| 323 |
+
[build-system]
|
| 324 |
+
requires = ["setuptools", "wheel"]
|
| 325 |
+
build-backend = "setuptools.build_meta"
|
| 326 |
+
|
| 327 |
+
[project]
|
| 328 |
+
name = "pkgA"
|
| 329 |
+
version = "3.14159"
|
| 330 |
+
|
| 331 |
+
[tool.setuptools]
|
| 332 |
+
package-dir = {"myns.n.pkgA" = "src"}
|
| 333 |
+
"""
|
| 334 |
+
),
|
| 335 |
+
"src": {"__init__.py": "a = 1"},
|
| 336 |
+
},
|
| 337 |
+
}
|
| 338 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 339 |
+
pkg_A = tmp_path / "pkgA"
|
| 340 |
+
pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
|
| 341 |
+
pkg_C = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgC')
|
| 342 |
+
|
| 343 |
+
# use pip to install to the target directory
|
| 344 |
+
opts = editable_opts[:]
|
| 345 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 346 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 347 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 348 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_C), *opts])
|
| 349 |
+
venv.run(["python", "-c", "from myns.n import pkgA, pkgB, pkgC"])
|
| 350 |
+
|
| 351 |
+
def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
|
| 352 |
+
"""Sometimes users might specify an ``include`` pattern that ignores parent
|
| 353 |
+
packages. In a normal installation this would ignore all modules inside the
|
| 354 |
+
parent packages, and make them namespaces (reported in issue #3504),
|
| 355 |
+
so the editable mode should preserve this behaviour.
|
| 356 |
+
"""
|
| 357 |
+
files = {
|
| 358 |
+
"pkgA": {
|
| 359 |
+
"pyproject.toml": dedent(
|
| 360 |
+
"""\
|
| 361 |
+
[build-system]
|
| 362 |
+
requires = ["setuptools", "wheel"]
|
| 363 |
+
build-backend = "setuptools.build_meta"
|
| 364 |
+
|
| 365 |
+
[project]
|
| 366 |
+
name = "pkgA"
|
| 367 |
+
version = "3.14159"
|
| 368 |
+
|
| 369 |
+
[tool.setuptools]
|
| 370 |
+
packages.find.include = ["mypkg.*"]
|
| 371 |
+
"""
|
| 372 |
+
),
|
| 373 |
+
"mypkg": {
|
| 374 |
+
"__init__.py": "",
|
| 375 |
+
"other.py": "b = 1",
|
| 376 |
+
"n": {
|
| 377 |
+
"__init__.py": "",
|
| 378 |
+
"pkgA.py": "a = 1",
|
| 379 |
+
},
|
| 380 |
+
},
|
| 381 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 382 |
+
},
|
| 383 |
+
}
|
| 384 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 385 |
+
pkg_A = tmp_path / "pkgA"
|
| 386 |
+
|
| 387 |
+
# use pip to install to the target directory
|
| 388 |
+
opts = ["--no-build-isolation"] # force current version of setuptools
|
| 389 |
+
venv.run(["python", "-m", "pip", "-v", "install", "-e", str(pkg_A), *opts])
|
| 390 |
+
out = venv.run(["python", "-c", "from mypkg.n import pkgA; print(pkgA.a)"])
|
| 391 |
+
assert out.strip() == "1"
|
| 392 |
+
cmd = """\
|
| 393 |
+
try:
|
| 394 |
+
import mypkg.other
|
| 395 |
+
except ImportError:
|
| 396 |
+
print("mypkg.other not defined")
|
| 397 |
+
"""
|
| 398 |
+
out = venv.run(["python", "-c", dedent(cmd)])
|
| 399 |
+
assert "mypkg.other not defined" in out
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
def test_editable_with_prefix(tmp_path, sample_project, editable_opts):
|
| 403 |
+
"""
|
| 404 |
+
Editable install to a prefix should be discoverable.
|
| 405 |
+
"""
|
| 406 |
+
prefix = tmp_path / 'prefix'
|
| 407 |
+
|
| 408 |
+
# figure out where pip will likely install the package
|
| 409 |
+
site_packages_all = [
|
| 410 |
+
prefix / Path(path).relative_to(sys.prefix)
|
| 411 |
+
for path in sys.path
|
| 412 |
+
if 'site-packages' in path and path.startswith(sys.prefix)
|
| 413 |
+
]
|
| 414 |
+
|
| 415 |
+
for sp in site_packages_all:
|
| 416 |
+
sp.mkdir(parents=True)
|
| 417 |
+
|
| 418 |
+
# install workaround
|
| 419 |
+
_addsitedirs(site_packages_all)
|
| 420 |
+
|
| 421 |
+
env = dict(os.environ, PYTHONPATH=os.pathsep.join(map(str, site_packages_all)))
|
| 422 |
+
cmd = [
|
| 423 |
+
sys.executable,
|
| 424 |
+
'-m',
|
| 425 |
+
'pip',
|
| 426 |
+
'install',
|
| 427 |
+
'--editable',
|
| 428 |
+
str(sample_project),
|
| 429 |
+
'--prefix',
|
| 430 |
+
str(prefix),
|
| 431 |
+
'--no-build-isolation',
|
| 432 |
+
*editable_opts,
|
| 433 |
+
]
|
| 434 |
+
subprocess.check_call(cmd, env=env)
|
| 435 |
+
|
| 436 |
+
# now run 'sample' with the prefix on the PYTHONPATH
|
| 437 |
+
bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
|
| 438 |
+
exe = prefix / bin / 'sample'
|
| 439 |
+
subprocess.check_call([exe], env=env)
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
class TestFinderTemplate:
|
| 443 |
+
"""This test focus in getting a particular implementation detail right.
|
| 444 |
+
If at some point in time the implementation is changed for something different,
|
| 445 |
+
this test can be modified or even excluded.
|
| 446 |
+
"""
|
| 447 |
+
|
| 448 |
+
def install_finder(self, finder):
|
| 449 |
+
loc = {}
|
| 450 |
+
exec(finder, loc, loc)
|
| 451 |
+
loc["install"]()
|
| 452 |
+
|
| 453 |
+
def test_packages(self, tmp_path):
|
| 454 |
+
files = {
|
| 455 |
+
"src1": {
|
| 456 |
+
"pkg1": {
|
| 457 |
+
"__init__.py": "",
|
| 458 |
+
"subpkg": {"mod1.py": "a = 42"},
|
| 459 |
+
},
|
| 460 |
+
},
|
| 461 |
+
"src2": {"mod2.py": "a = 43"},
|
| 462 |
+
}
|
| 463 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 464 |
+
|
| 465 |
+
mapping = {
|
| 466 |
+
"pkg1": str(tmp_path / "src1/pkg1"),
|
| 467 |
+
"mod2": str(tmp_path / "src2/mod2"),
|
| 468 |
+
}
|
| 469 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 470 |
+
|
| 471 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 472 |
+
for mod in ("pkg1", "pkg1.subpkg", "pkg1.subpkg.mod1", "mod2"):
|
| 473 |
+
sys.modules.pop(mod, None)
|
| 474 |
+
|
| 475 |
+
self.install_finder(template)
|
| 476 |
+
mod1 = import_module("pkg1.subpkg.mod1")
|
| 477 |
+
mod2 = import_module("mod2")
|
| 478 |
+
subpkg = import_module("pkg1.subpkg")
|
| 479 |
+
|
| 480 |
+
assert mod1.a == 42
|
| 481 |
+
assert mod2.a == 43
|
| 482 |
+
expected = str((tmp_path / "src1/pkg1/subpkg").resolve())
|
| 483 |
+
assert_path(subpkg, expected)
|
| 484 |
+
|
| 485 |
+
def test_namespace(self, tmp_path):
|
| 486 |
+
files = {"pkg": {"__init__.py": "a = 13", "text.txt": "abc"}}
|
| 487 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 488 |
+
|
| 489 |
+
mapping = {"ns.othername": str(tmp_path / "pkg")}
|
| 490 |
+
namespaces = {"ns": []}
|
| 491 |
+
|
| 492 |
+
template = _finder_template(str(uuid4()), mapping, namespaces)
|
| 493 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 494 |
+
for mod in ("ns", "ns.othername"):
|
| 495 |
+
sys.modules.pop(mod, None)
|
| 496 |
+
|
| 497 |
+
self.install_finder(template)
|
| 498 |
+
pkg = import_module("ns.othername")
|
| 499 |
+
text = importlib_resources.files(pkg) / "text.txt"
|
| 500 |
+
|
| 501 |
+
expected = str((tmp_path / "pkg").resolve())
|
| 502 |
+
assert_path(pkg, expected)
|
| 503 |
+
assert pkg.a == 13
|
| 504 |
+
|
| 505 |
+
# Make sure resources can also be found
|
| 506 |
+
assert text.read_text(encoding="utf-8") == "abc"
|
| 507 |
+
|
| 508 |
+
def test_combine_namespaces(self, tmp_path):
|
| 509 |
+
files = {
|
| 510 |
+
"src1": {"ns": {"pkg1": {"__init__.py": "a = 13"}}},
|
| 511 |
+
"src2": {"ns": {"mod2.py": "b = 37"}},
|
| 512 |
+
}
|
| 513 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 514 |
+
|
| 515 |
+
mapping = {
|
| 516 |
+
"ns.pkgA": str(tmp_path / "src1/ns/pkg1"),
|
| 517 |
+
"ns": str(tmp_path / "src2/ns"),
|
| 518 |
+
}
|
| 519 |
+
namespaces_ = {"ns": [str(tmp_path / "src1"), str(tmp_path / "src2")]}
|
| 520 |
+
template = _finder_template(str(uuid4()), mapping, namespaces_)
|
| 521 |
+
|
| 522 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 523 |
+
for mod in ("ns", "ns.pkgA", "ns.mod2"):
|
| 524 |
+
sys.modules.pop(mod, None)
|
| 525 |
+
|
| 526 |
+
self.install_finder(template)
|
| 527 |
+
pkgA = import_module("ns.pkgA")
|
| 528 |
+
mod2 = import_module("ns.mod2")
|
| 529 |
+
|
| 530 |
+
expected = str((tmp_path / "src1/ns/pkg1").resolve())
|
| 531 |
+
assert_path(pkgA, expected)
|
| 532 |
+
assert pkgA.a == 13
|
| 533 |
+
assert mod2.b == 37
|
| 534 |
+
|
| 535 |
+
def test_combine_namespaces_nested(self, tmp_path):
|
| 536 |
+
"""
|
| 537 |
+
Users may attempt to combine namespace packages in a nested way via
|
| 538 |
+
``package_dir`` as shown in pypa/setuptools#4248.
|
| 539 |
+
"""
|
| 540 |
+
|
| 541 |
+
files = {
|
| 542 |
+
"src": {"my_package": {"my_module.py": "a = 13"}},
|
| 543 |
+
"src2": {"my_package2": {"my_module2.py": "b = 37"}},
|
| 544 |
+
}
|
| 545 |
+
|
| 546 |
+
stack = jaraco.path.DirectoryStack()
|
| 547 |
+
with stack.context(tmp_path):
|
| 548 |
+
jaraco.path.build(files)
|
| 549 |
+
attrs = {
|
| 550 |
+
"script_name": "%PEP 517%",
|
| 551 |
+
"package_dir": {
|
| 552 |
+
"different_name": "src/my_package",
|
| 553 |
+
"different_name.subpkg": "src2/my_package2",
|
| 554 |
+
},
|
| 555 |
+
"packages": ["different_name", "different_name.subpkg"],
|
| 556 |
+
}
|
| 557 |
+
dist = Distribution(attrs)
|
| 558 |
+
finder = _TopLevelFinder(dist, str(uuid4()))
|
| 559 |
+
code = next(v for k, v in finder.get_implementation() if k.endswith(".py"))
|
| 560 |
+
|
| 561 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 562 |
+
for mod in attrs["packages"]:
|
| 563 |
+
sys.modules.pop(mod, None)
|
| 564 |
+
|
| 565 |
+
self.install_finder(code)
|
| 566 |
+
mod1 = import_module("different_name.my_module")
|
| 567 |
+
mod2 = import_module("different_name.subpkg.my_module2")
|
| 568 |
+
|
| 569 |
+
expected = str((tmp_path / "src/my_package/my_module.py").resolve())
|
| 570 |
+
assert str(Path(mod1.__file__).resolve()) == expected
|
| 571 |
+
|
| 572 |
+
expected = str((tmp_path / "src2/my_package2/my_module2.py").resolve())
|
| 573 |
+
assert str(Path(mod2.__file__).resolve()) == expected
|
| 574 |
+
|
| 575 |
+
assert mod1.a == 13
|
| 576 |
+
assert mod2.b == 37
|
| 577 |
+
|
| 578 |
+
def test_dynamic_path_computation(self, tmp_path):
|
| 579 |
+
# Follows the example in PEP 420
|
| 580 |
+
files = {
|
| 581 |
+
"project1": {"parent": {"child": {"one.py": "x = 1"}}},
|
| 582 |
+
"project2": {"parent": {"child": {"two.py": "x = 2"}}},
|
| 583 |
+
"project3": {"parent": {"child": {"three.py": "x = 3"}}},
|
| 584 |
+
}
|
| 585 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 586 |
+
mapping = {}
|
| 587 |
+
namespaces_ = {"parent": [str(tmp_path / "project1/parent")]}
|
| 588 |
+
template = _finder_template(str(uuid4()), mapping, namespaces_)
|
| 589 |
+
|
| 590 |
+
mods = (f"parent.child.{name}" for name in ("one", "two", "three"))
|
| 591 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 592 |
+
for mod in ("parent", "parent.child", "parent.child", *mods):
|
| 593 |
+
sys.modules.pop(mod, None)
|
| 594 |
+
|
| 595 |
+
self.install_finder(template)
|
| 596 |
+
|
| 597 |
+
one = import_module("parent.child.one")
|
| 598 |
+
assert one.x == 1
|
| 599 |
+
|
| 600 |
+
with pytest.raises(ImportError):
|
| 601 |
+
import_module("parent.child.two")
|
| 602 |
+
|
| 603 |
+
sys.path.append(str(tmp_path / "project2"))
|
| 604 |
+
two = import_module("parent.child.two")
|
| 605 |
+
assert two.x == 2
|
| 606 |
+
|
| 607 |
+
with pytest.raises(ImportError):
|
| 608 |
+
import_module("parent.child.three")
|
| 609 |
+
|
| 610 |
+
sys.path.append(str(tmp_path / "project3"))
|
| 611 |
+
three = import_module("parent.child.three")
|
| 612 |
+
assert three.x == 3
|
| 613 |
+
|
| 614 |
+
def test_no_recursion(self, tmp_path):
|
| 615 |
+
# See issue #3550
|
| 616 |
+
files = {
|
| 617 |
+
"pkg": {
|
| 618 |
+
"__init__.py": "from . import pkg",
|
| 619 |
+
},
|
| 620 |
+
}
|
| 621 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 622 |
+
|
| 623 |
+
mapping = {
|
| 624 |
+
"pkg": str(tmp_path / "pkg"),
|
| 625 |
+
}
|
| 626 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 627 |
+
|
| 628 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 629 |
+
sys.modules.pop("pkg", None)
|
| 630 |
+
|
| 631 |
+
self.install_finder(template)
|
| 632 |
+
with pytest.raises(ImportError, match="pkg"):
|
| 633 |
+
import_module("pkg")
|
| 634 |
+
|
| 635 |
+
def test_similar_name(self, tmp_path):
|
| 636 |
+
files = {
|
| 637 |
+
"foo": {
|
| 638 |
+
"__init__.py": "",
|
| 639 |
+
"bar": {
|
| 640 |
+
"__init__.py": "",
|
| 641 |
+
},
|
| 642 |
+
},
|
| 643 |
+
}
|
| 644 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 645 |
+
|
| 646 |
+
mapping = {
|
| 647 |
+
"foo": str(tmp_path / "foo"),
|
| 648 |
+
}
|
| 649 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 650 |
+
|
| 651 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 652 |
+
sys.modules.pop("foo", None)
|
| 653 |
+
sys.modules.pop("foo.bar", None)
|
| 654 |
+
|
| 655 |
+
self.install_finder(template)
|
| 656 |
+
with pytest.raises(ImportError, match="foobar"):
|
| 657 |
+
import_module("foobar")
|
| 658 |
+
|
| 659 |
+
def test_case_sensitivity(self, tmp_path):
|
| 660 |
+
files = {
|
| 661 |
+
"foo": {
|
| 662 |
+
"__init__.py": "",
|
| 663 |
+
"lowercase.py": "x = 1",
|
| 664 |
+
"bar": {
|
| 665 |
+
"__init__.py": "",
|
| 666 |
+
"lowercase.py": "x = 2",
|
| 667 |
+
},
|
| 668 |
+
},
|
| 669 |
+
}
|
| 670 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 671 |
+
mapping = {
|
| 672 |
+
"foo": str(tmp_path / "foo"),
|
| 673 |
+
}
|
| 674 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 675 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 676 |
+
sys.modules.pop("foo", None)
|
| 677 |
+
|
| 678 |
+
self.install_finder(template)
|
| 679 |
+
with pytest.raises(ImportError, match="'FOO'"):
|
| 680 |
+
import_module("FOO")
|
| 681 |
+
|
| 682 |
+
with pytest.raises(ImportError, match="'foo\\.LOWERCASE'"):
|
| 683 |
+
import_module("foo.LOWERCASE")
|
| 684 |
+
|
| 685 |
+
with pytest.raises(ImportError, match="'foo\\.bar\\.Lowercase'"):
|
| 686 |
+
import_module("foo.bar.Lowercase")
|
| 687 |
+
|
| 688 |
+
with pytest.raises(ImportError, match="'foo\\.BAR'"):
|
| 689 |
+
import_module("foo.BAR.lowercase")
|
| 690 |
+
|
| 691 |
+
with pytest.raises(ImportError, match="'FOO'"):
|
| 692 |
+
import_module("FOO.bar.lowercase")
|
| 693 |
+
|
| 694 |
+
mod = import_module("foo.lowercase")
|
| 695 |
+
assert mod.x == 1
|
| 696 |
+
|
| 697 |
+
mod = import_module("foo.bar.lowercase")
|
| 698 |
+
assert mod.x == 2
|
| 699 |
+
|
| 700 |
+
def test_namespace_case_sensitivity(self, tmp_path):
|
| 701 |
+
files = {
|
| 702 |
+
"pkg": {
|
| 703 |
+
"__init__.py": "a = 13",
|
| 704 |
+
"foo": {
|
| 705 |
+
"__init__.py": "b = 37",
|
| 706 |
+
"bar.py": "c = 42",
|
| 707 |
+
},
|
| 708 |
+
},
|
| 709 |
+
}
|
| 710 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 711 |
+
|
| 712 |
+
mapping = {"ns.othername": str(tmp_path / "pkg")}
|
| 713 |
+
namespaces = {"ns": []}
|
| 714 |
+
|
| 715 |
+
template = _finder_template(str(uuid4()), mapping, namespaces)
|
| 716 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 717 |
+
for mod in ("ns", "ns.othername"):
|
| 718 |
+
sys.modules.pop(mod, None)
|
| 719 |
+
|
| 720 |
+
self.install_finder(template)
|
| 721 |
+
pkg = import_module("ns.othername")
|
| 722 |
+
expected = str((tmp_path / "pkg").resolve())
|
| 723 |
+
assert_path(pkg, expected)
|
| 724 |
+
assert pkg.a == 13
|
| 725 |
+
|
| 726 |
+
foo = import_module("ns.othername.foo")
|
| 727 |
+
assert foo.b == 37
|
| 728 |
+
|
| 729 |
+
bar = import_module("ns.othername.foo.bar")
|
| 730 |
+
assert bar.c == 42
|
| 731 |
+
|
| 732 |
+
with pytest.raises(ImportError, match="'NS'"):
|
| 733 |
+
import_module("NS.othername.foo")
|
| 734 |
+
|
| 735 |
+
with pytest.raises(ImportError, match="'ns\\.othername\\.FOO\\'"):
|
| 736 |
+
import_module("ns.othername.FOO")
|
| 737 |
+
|
| 738 |
+
with pytest.raises(ImportError, match="'ns\\.othername\\.foo\\.BAR\\'"):
|
| 739 |
+
import_module("ns.othername.foo.BAR")
|
| 740 |
+
|
| 741 |
+
def test_intermediate_packages(self, tmp_path):
|
| 742 |
+
"""
|
| 743 |
+
The finder should not import ``fullname`` if the intermediate segments
|
| 744 |
+
don't exist (see pypa/setuptools#4019).
|
| 745 |
+
"""
|
| 746 |
+
files = {
|
| 747 |
+
"src": {
|
| 748 |
+
"mypkg": {
|
| 749 |
+
"__init__.py": "",
|
| 750 |
+
"config.py": "a = 13",
|
| 751 |
+
"helloworld.py": "b = 13",
|
| 752 |
+
"components": {
|
| 753 |
+
"config.py": "a = 37",
|
| 754 |
+
},
|
| 755 |
+
},
|
| 756 |
+
}
|
| 757 |
+
}
|
| 758 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 759 |
+
|
| 760 |
+
mapping = {"mypkg": str(tmp_path / "src/mypkg")}
|
| 761 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 762 |
+
|
| 763 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 764 |
+
for mod in (
|
| 765 |
+
"mypkg",
|
| 766 |
+
"mypkg.config",
|
| 767 |
+
"mypkg.helloworld",
|
| 768 |
+
"mypkg.components",
|
| 769 |
+
"mypkg.components.config",
|
| 770 |
+
"mypkg.components.helloworld",
|
| 771 |
+
):
|
| 772 |
+
sys.modules.pop(mod, None)
|
| 773 |
+
|
| 774 |
+
self.install_finder(template)
|
| 775 |
+
|
| 776 |
+
config = import_module("mypkg.components.config")
|
| 777 |
+
assert config.a == 37
|
| 778 |
+
|
| 779 |
+
helloworld = import_module("mypkg.helloworld")
|
| 780 |
+
assert helloworld.b == 13
|
| 781 |
+
|
| 782 |
+
with pytest.raises(ImportError):
|
| 783 |
+
import_module("mypkg.components.helloworld")
|
| 784 |
+
|
| 785 |
+
|
| 786 |
+
def test_pkg_roots(tmp_path):
|
| 787 |
+
"""This test focus in getting a particular implementation detail right.
|
| 788 |
+
If at some point in time the implementation is changed for something different,
|
| 789 |
+
this test can be modified or even excluded.
|
| 790 |
+
"""
|
| 791 |
+
files = {
|
| 792 |
+
"a": {"b": {"__init__.py": "ab = 1"}, "__init__.py": "a = 1"},
|
| 793 |
+
"d": {"__init__.py": "d = 1", "e": {"__init__.py": "de = 1"}},
|
| 794 |
+
"f": {"g": {"h": {"__init__.py": "fgh = 1"}}},
|
| 795 |
+
"other": {"__init__.py": "abc = 1"},
|
| 796 |
+
"another": {"__init__.py": "abcxyz = 1"},
|
| 797 |
+
"yet_another": {"__init__.py": "mnopq = 1"},
|
| 798 |
+
}
|
| 799 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 800 |
+
package_dir = {
|
| 801 |
+
"a.b.c": "other",
|
| 802 |
+
"a.b.c.x.y.z": "another",
|
| 803 |
+
"m.n.o.p.q": "yet_another",
|
| 804 |
+
}
|
| 805 |
+
packages = [
|
| 806 |
+
"a",
|
| 807 |
+
"a.b",
|
| 808 |
+
"a.b.c",
|
| 809 |
+
"a.b.c.x.y",
|
| 810 |
+
"a.b.c.x.y.z",
|
| 811 |
+
"d",
|
| 812 |
+
"d.e",
|
| 813 |
+
"f",
|
| 814 |
+
"f.g",
|
| 815 |
+
"f.g.h",
|
| 816 |
+
"m.n.o.p.q",
|
| 817 |
+
]
|
| 818 |
+
roots = _find_package_roots(packages, package_dir, tmp_path)
|
| 819 |
+
assert roots == {
|
| 820 |
+
"a": str(tmp_path / "a"),
|
| 821 |
+
"a.b.c": str(tmp_path / "other"),
|
| 822 |
+
"a.b.c.x.y.z": str(tmp_path / "another"),
|
| 823 |
+
"d": str(tmp_path / "d"),
|
| 824 |
+
"f": str(tmp_path / "f"),
|
| 825 |
+
"m.n.o.p.q": str(tmp_path / "yet_another"),
|
| 826 |
+
}
|
| 827 |
+
|
| 828 |
+
ns = set(dict(_find_namespaces(packages, roots)))
|
| 829 |
+
assert ns == {"f", "f.g"}
|
| 830 |
+
|
| 831 |
+
ns = set(_find_virtual_namespaces(roots))
|
| 832 |
+
assert ns == {"a.b", "a.b.c.x", "a.b.c.x.y", "m", "m.n", "m.n.o", "m.n.o.p"}
|
| 833 |
+
|
| 834 |
+
|
| 835 |
+
class TestOverallBehaviour:
|
| 836 |
+
PYPROJECT = """\
|
| 837 |
+
[build-system]
|
| 838 |
+
requires = ["setuptools"]
|
| 839 |
+
build-backend = "setuptools.build_meta"
|
| 840 |
+
|
| 841 |
+
[project]
|
| 842 |
+
name = "mypkg"
|
| 843 |
+
version = "3.14159"
|
| 844 |
+
"""
|
| 845 |
+
|
| 846 |
+
# Any: Would need a TypedDict. Keep it simple for tests
|
| 847 |
+
FLAT_LAYOUT: dict[str, Any] = {
|
| 848 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 849 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 850 |
+
"otherfile.py": "",
|
| 851 |
+
"mypkg": {
|
| 852 |
+
"__init__.py": "",
|
| 853 |
+
"mod1.py": "var = 42",
|
| 854 |
+
"subpackage": {
|
| 855 |
+
"__init__.py": "",
|
| 856 |
+
"mod2.py": "var = 13",
|
| 857 |
+
"resource_file.txt": "resource 39",
|
| 858 |
+
},
|
| 859 |
+
},
|
| 860 |
+
}
|
| 861 |
+
|
| 862 |
+
EXAMPLES = {
|
| 863 |
+
"flat-layout": FLAT_LAYOUT,
|
| 864 |
+
"src-layout": {
|
| 865 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 866 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 867 |
+
"otherfile.py": "",
|
| 868 |
+
"src": {"mypkg": FLAT_LAYOUT["mypkg"]},
|
| 869 |
+
},
|
| 870 |
+
"custom-layout": {
|
| 871 |
+
"pyproject.toml": dedent(PYPROJECT)
|
| 872 |
+
+ dedent(
|
| 873 |
+
"""\
|
| 874 |
+
[tool.setuptools]
|
| 875 |
+
packages = ["mypkg", "mypkg.subpackage"]
|
| 876 |
+
|
| 877 |
+
[tool.setuptools.package-dir]
|
| 878 |
+
"mypkg.subpackage" = "other"
|
| 879 |
+
"""
|
| 880 |
+
),
|
| 881 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 882 |
+
"otherfile.py": "",
|
| 883 |
+
"mypkg": {
|
| 884 |
+
"__init__.py": "",
|
| 885 |
+
"mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
|
| 886 |
+
},
|
| 887 |
+
"other": FLAT_LAYOUT["mypkg"]["subpackage"],
|
| 888 |
+
},
|
| 889 |
+
"namespace": {
|
| 890 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 891 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 892 |
+
"otherfile.py": "",
|
| 893 |
+
"src": {
|
| 894 |
+
"mypkg": {
|
| 895 |
+
"mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
|
| 896 |
+
"subpackage": FLAT_LAYOUT["mypkg"]["subpackage"],
|
| 897 |
+
},
|
| 898 |
+
},
|
| 899 |
+
},
|
| 900 |
+
}
|
| 901 |
+
|
| 902 |
+
@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
|
| 903 |
+
@pytest.mark.parametrize("layout", EXAMPLES.keys())
|
| 904 |
+
def test_editable_install(self, tmp_path, venv, layout, editable_opts):
|
| 905 |
+
project, _ = install_project(
|
| 906 |
+
"mypkg", venv, tmp_path, self.EXAMPLES[layout], *editable_opts
|
| 907 |
+
)
|
| 908 |
+
|
| 909 |
+
# Ensure stray files are not importable
|
| 910 |
+
cmd_import_error = """\
|
| 911 |
+
try:
|
| 912 |
+
import otherfile
|
| 913 |
+
except ImportError as ex:
|
| 914 |
+
print(ex)
|
| 915 |
+
"""
|
| 916 |
+
out = venv.run(["python", "-c", dedent(cmd_import_error)])
|
| 917 |
+
assert "No module named 'otherfile'" in out
|
| 918 |
+
|
| 919 |
+
# Ensure the modules are importable
|
| 920 |
+
cmd_get_vars = """\
|
| 921 |
+
import mypkg, mypkg.mod1, mypkg.subpackage.mod2
|
| 922 |
+
print(mypkg.mod1.var, mypkg.subpackage.mod2.var)
|
| 923 |
+
"""
|
| 924 |
+
out = venv.run(["python", "-c", dedent(cmd_get_vars)])
|
| 925 |
+
assert "42 13" in out
|
| 926 |
+
|
| 927 |
+
# Ensure resources are reachable
|
| 928 |
+
cmd_get_resource = """\
|
| 929 |
+
import mypkg.subpackage
|
| 930 |
+
from setuptools._importlib import resources as importlib_resources
|
| 931 |
+
text = importlib_resources.files(mypkg.subpackage) / "resource_file.txt"
|
| 932 |
+
print(text.read_text(encoding="utf-8"))
|
| 933 |
+
"""
|
| 934 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 935 |
+
assert "resource 39" in out
|
| 936 |
+
|
| 937 |
+
# Ensure files are editable
|
| 938 |
+
mod1 = next(project.glob("**/mod1.py"))
|
| 939 |
+
mod2 = next(project.glob("**/mod2.py"))
|
| 940 |
+
resource_file = next(project.glob("**/resource_file.txt"))
|
| 941 |
+
|
| 942 |
+
mod1.write_text("var = 17", encoding="utf-8")
|
| 943 |
+
mod2.write_text("var = 781", encoding="utf-8")
|
| 944 |
+
resource_file.write_text("resource 374", encoding="utf-8")
|
| 945 |
+
|
| 946 |
+
out = venv.run(["python", "-c", dedent(cmd_get_vars)])
|
| 947 |
+
assert "42 13" not in out
|
| 948 |
+
assert "17 781" in out
|
| 949 |
+
|
| 950 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 951 |
+
assert "resource 39" not in out
|
| 952 |
+
assert "resource 374" in out
|
| 953 |
+
|
| 954 |
+
|
| 955 |
+
class TestLinkTree:
|
| 956 |
+
FILES = deepcopy(TestOverallBehaviour.EXAMPLES["src-layout"])
|
| 957 |
+
FILES["pyproject.toml"] += dedent(
|
| 958 |
+
"""\
|
| 959 |
+
[tool.setuptools]
|
| 960 |
+
# Temporary workaround: both `include-package-data` and `package-data` configs
|
| 961 |
+
# can be removed after #3260 is fixed.
|
| 962 |
+
include-package-data = false
|
| 963 |
+
package-data = {"*" = ["*.txt"]}
|
| 964 |
+
|
| 965 |
+
[tool.setuptools.packages.find]
|
| 966 |
+
where = ["src"]
|
| 967 |
+
exclude = ["*.subpackage*"]
|
| 968 |
+
"""
|
| 969 |
+
)
|
| 970 |
+
FILES["src"]["mypkg"]["resource.not_in_manifest"] = "abc"
|
| 971 |
+
|
| 972 |
+
def test_generated_tree(self, tmp_path):
|
| 973 |
+
jaraco.path.build(self.FILES, prefix=tmp_path)
|
| 974 |
+
|
| 975 |
+
with _Path(tmp_path):
|
| 976 |
+
name = "mypkg-3.14159"
|
| 977 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 978 |
+
dist.parse_config_files()
|
| 979 |
+
|
| 980 |
+
wheel = Mock()
|
| 981 |
+
aux = tmp_path / ".aux"
|
| 982 |
+
build = tmp_path / ".build"
|
| 983 |
+
aux.mkdir()
|
| 984 |
+
build.mkdir()
|
| 985 |
+
|
| 986 |
+
build_py = dist.get_command_obj("build_py")
|
| 987 |
+
build_py.editable_mode = True
|
| 988 |
+
build_py.build_lib = str(build)
|
| 989 |
+
build_py.ensure_finalized()
|
| 990 |
+
outputs = build_py.get_outputs()
|
| 991 |
+
output_mapping = build_py.get_output_mapping()
|
| 992 |
+
|
| 993 |
+
make_tree = _LinkTree(dist, name, aux, build)
|
| 994 |
+
make_tree(wheel, outputs, output_mapping)
|
| 995 |
+
|
| 996 |
+
mod1 = next(aux.glob("**/mod1.py"))
|
| 997 |
+
expected = tmp_path / "src/mypkg/mod1.py"
|
| 998 |
+
assert_link_to(mod1, expected)
|
| 999 |
+
|
| 1000 |
+
assert next(aux.glob("**/subpackage"), None) is None
|
| 1001 |
+
assert next(aux.glob("**/mod2.py"), None) is None
|
| 1002 |
+
assert next(aux.glob("**/resource_file.txt"), None) is None
|
| 1003 |
+
|
| 1004 |
+
assert next(aux.glob("**/resource.not_in_manifest"), None) is None
|
| 1005 |
+
|
| 1006 |
+
def test_strict_install(self, tmp_path, venv):
|
| 1007 |
+
opts = ["--config-settings", "editable-mode=strict"]
|
| 1008 |
+
install_project("mypkg", venv, tmp_path, self.FILES, *opts)
|
| 1009 |
+
|
| 1010 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1011 |
+
assert "42" in out
|
| 1012 |
+
|
| 1013 |
+
# Ensure packages excluded from distribution are not importable
|
| 1014 |
+
cmd_import_error = """\
|
| 1015 |
+
try:
|
| 1016 |
+
from mypkg import subpackage
|
| 1017 |
+
except ImportError as ex:
|
| 1018 |
+
print(ex)
|
| 1019 |
+
"""
|
| 1020 |
+
out = venv.run(["python", "-c", dedent(cmd_import_error)])
|
| 1021 |
+
assert "cannot import name 'subpackage'" in out
|
| 1022 |
+
|
| 1023 |
+
# Ensure resource files excluded from distribution are not reachable
|
| 1024 |
+
cmd_get_resource = """\
|
| 1025 |
+
import mypkg
|
| 1026 |
+
from setuptools._importlib import resources as importlib_resources
|
| 1027 |
+
try:
|
| 1028 |
+
text = importlib_resources.files(mypkg) / "resource.not_in_manifest"
|
| 1029 |
+
print(text.read_text(encoding="utf-8"))
|
| 1030 |
+
except FileNotFoundError as ex:
|
| 1031 |
+
print(ex)
|
| 1032 |
+
"""
|
| 1033 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 1034 |
+
assert "No such file or directory" in out
|
| 1035 |
+
assert "resource.not_in_manifest" in out
|
| 1036 |
+
|
| 1037 |
+
|
| 1038 |
+
@pytest.mark.filterwarnings("ignore:.*compat.*:setuptools.SetuptoolsDeprecationWarning")
|
| 1039 |
+
def test_compat_install(tmp_path, venv):
|
| 1040 |
+
# TODO: Remove `compat` after Dec/2022.
|
| 1041 |
+
opts = ["--config-settings", "editable-mode=compat"]
|
| 1042 |
+
files = TestOverallBehaviour.EXAMPLES["custom-layout"]
|
| 1043 |
+
install_project("mypkg", venv, tmp_path, files, *opts)
|
| 1044 |
+
|
| 1045 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1046 |
+
assert "42" in out
|
| 1047 |
+
|
| 1048 |
+
expected_path = comparable_path(str(tmp_path))
|
| 1049 |
+
|
| 1050 |
+
# Compatible behaviour will make spurious modules and excluded
|
| 1051 |
+
# files importable directly from the original path
|
| 1052 |
+
for cmd in (
|
| 1053 |
+
"import otherfile; print(otherfile)",
|
| 1054 |
+
"import other; print(other)",
|
| 1055 |
+
"import mypkg; print(mypkg)",
|
| 1056 |
+
):
|
| 1057 |
+
out = comparable_path(venv.run(["python", "-c", cmd]))
|
| 1058 |
+
assert expected_path in out
|
| 1059 |
+
|
| 1060 |
+
# Compatible behaviour will not consider custom mappings
|
| 1061 |
+
cmd = """\
|
| 1062 |
+
try:
|
| 1063 |
+
from mypkg import subpackage;
|
| 1064 |
+
except ImportError as ex:
|
| 1065 |
+
print(ex)
|
| 1066 |
+
"""
|
| 1067 |
+
out = venv.run(["python", "-c", dedent(cmd)])
|
| 1068 |
+
assert "cannot import name 'subpackage'" in out
|
| 1069 |
+
|
| 1070 |
+
|
| 1071 |
+
def test_pbr_integration(tmp_path, venv, editable_opts):
|
| 1072 |
+
"""Ensure editable installs work with pbr, issue #3500"""
|
| 1073 |
+
files = {
|
| 1074 |
+
"pyproject.toml": dedent(
|
| 1075 |
+
"""\
|
| 1076 |
+
[build-system]
|
| 1077 |
+
requires = ["setuptools"]
|
| 1078 |
+
build-backend = "setuptools.build_meta"
|
| 1079 |
+
"""
|
| 1080 |
+
),
|
| 1081 |
+
"setup.py": dedent(
|
| 1082 |
+
"""\
|
| 1083 |
+
__import__('setuptools').setup(
|
| 1084 |
+
pbr=True,
|
| 1085 |
+
setup_requires=["pbr"],
|
| 1086 |
+
)
|
| 1087 |
+
"""
|
| 1088 |
+
),
|
| 1089 |
+
"setup.cfg": dedent(
|
| 1090 |
+
"""\
|
| 1091 |
+
[metadata]
|
| 1092 |
+
name = mypkg
|
| 1093 |
+
|
| 1094 |
+
[files]
|
| 1095 |
+
packages =
|
| 1096 |
+
mypkg
|
| 1097 |
+
"""
|
| 1098 |
+
),
|
| 1099 |
+
"mypkg": {
|
| 1100 |
+
"__init__.py": "",
|
| 1101 |
+
"hello.py": "print('Hello world!')",
|
| 1102 |
+
},
|
| 1103 |
+
"other": {"test.txt": "Another file in here."},
|
| 1104 |
+
}
|
| 1105 |
+
venv.run(["python", "-m", "pip", "install", "pbr"])
|
| 1106 |
+
|
| 1107 |
+
with contexts.environment(PBR_VERSION="0.42"):
|
| 1108 |
+
install_project("mypkg", venv, tmp_path, files, *editable_opts)
|
| 1109 |
+
|
| 1110 |
+
out = venv.run(["python", "-c", "import mypkg.hello"])
|
| 1111 |
+
assert "Hello world!" in out
|
| 1112 |
+
|
| 1113 |
+
|
| 1114 |
+
class TestCustomBuildPy:
|
| 1115 |
+
"""
|
| 1116 |
+
Issue #3501 indicates that some plugins/customizations might rely on:
|
| 1117 |
+
|
| 1118 |
+
1. ``build_py`` not running
|
| 1119 |
+
2. ``build_py`` always copying files to ``build_lib``
|
| 1120 |
+
|
| 1121 |
+
During the transition period setuptools should prevent potential errors from
|
| 1122 |
+
happening due to those assumptions.
|
| 1123 |
+
"""
|
| 1124 |
+
|
| 1125 |
+
# TODO: Remove tests after _run_build_steps is removed.
|
| 1126 |
+
|
| 1127 |
+
FILES = {
|
| 1128 |
+
**TestOverallBehaviour.EXAMPLES["flat-layout"],
|
| 1129 |
+
"setup.py": dedent(
|
| 1130 |
+
"""\
|
| 1131 |
+
import pathlib
|
| 1132 |
+
from setuptools import setup
|
| 1133 |
+
from setuptools.command.build_py import build_py as orig
|
| 1134 |
+
|
| 1135 |
+
class my_build_py(orig):
|
| 1136 |
+
def run(self):
|
| 1137 |
+
super().run()
|
| 1138 |
+
raise ValueError("TEST_RAISE")
|
| 1139 |
+
|
| 1140 |
+
setup(cmdclass={"build_py": my_build_py})
|
| 1141 |
+
"""
|
| 1142 |
+
),
|
| 1143 |
+
}
|
| 1144 |
+
|
| 1145 |
+
def test_safeguarded_from_errors(self, tmp_path, venv):
|
| 1146 |
+
"""Ensure that errors in custom build_py are reported as warnings"""
|
| 1147 |
+
# Warnings should show up
|
| 1148 |
+
_, out = install_project("mypkg", venv, tmp_path, self.FILES)
|
| 1149 |
+
assert "SetuptoolsDeprecationWarning" in out
|
| 1150 |
+
assert "ValueError: TEST_RAISE" in out
|
| 1151 |
+
# but installation should be successful
|
| 1152 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1153 |
+
assert "42" in out
|
| 1154 |
+
|
| 1155 |
+
|
| 1156 |
+
class TestCustomBuildWheel:
|
| 1157 |
+
def install_custom_build_wheel(self, dist):
|
| 1158 |
+
bdist_wheel_cls = dist.get_command_class("bdist_wheel")
|
| 1159 |
+
|
| 1160 |
+
class MyBdistWheel(bdist_wheel_cls):
|
| 1161 |
+
def get_tag(self):
|
| 1162 |
+
# In issue #3513, we can see that some extensions may try to access
|
| 1163 |
+
# the `plat_name` property in bdist_wheel
|
| 1164 |
+
if self.plat_name.startswith("macosx-"):
|
| 1165 |
+
_ = "macOS platform"
|
| 1166 |
+
return super().get_tag()
|
| 1167 |
+
|
| 1168 |
+
dist.cmdclass["bdist_wheel"] = MyBdistWheel
|
| 1169 |
+
|
| 1170 |
+
def test_access_plat_name(self, tmpdir_cwd):
|
| 1171 |
+
# Even when a custom bdist_wheel tries to access plat_name the build should
|
| 1172 |
+
# be successful
|
| 1173 |
+
jaraco.path.build({"module.py": "x = 42"})
|
| 1174 |
+
dist = Distribution()
|
| 1175 |
+
dist.script_name = "setup.py"
|
| 1176 |
+
dist.set_defaults()
|
| 1177 |
+
self.install_custom_build_wheel(dist)
|
| 1178 |
+
cmd = editable_wheel(dist)
|
| 1179 |
+
cmd.ensure_finalized()
|
| 1180 |
+
cmd.run()
|
| 1181 |
+
wheel_file = str(next(Path().glob('dist/*.whl')))
|
| 1182 |
+
assert "editable" in wheel_file
|
| 1183 |
+
|
| 1184 |
+
|
| 1185 |
+
class TestCustomBuildExt:
|
| 1186 |
+
def install_custom_build_ext_distutils(self, dist):
|
| 1187 |
+
from distutils.command.build_ext import build_ext as build_ext_cls
|
| 1188 |
+
|
| 1189 |
+
class MyBuildExt(build_ext_cls):
|
| 1190 |
+
pass
|
| 1191 |
+
|
| 1192 |
+
dist.cmdclass["build_ext"] = MyBuildExt
|
| 1193 |
+
|
| 1194 |
+
@pytest.mark.skipif(
|
| 1195 |
+
sys.platform != "linux", reason="compilers may fail without correct setup"
|
| 1196 |
+
)
|
| 1197 |
+
def test_distutils_leave_inplace_files(self, tmpdir_cwd):
|
| 1198 |
+
jaraco.path.build({"module.c": ""})
|
| 1199 |
+
attrs = {
|
| 1200 |
+
"ext_modules": [Extension("module", ["module.c"])],
|
| 1201 |
+
}
|
| 1202 |
+
dist = Distribution(attrs)
|
| 1203 |
+
dist.script_name = "setup.py"
|
| 1204 |
+
dist.set_defaults()
|
| 1205 |
+
self.install_custom_build_ext_distutils(dist)
|
| 1206 |
+
cmd = editable_wheel(dist)
|
| 1207 |
+
cmd.ensure_finalized()
|
| 1208 |
+
cmd.run()
|
| 1209 |
+
wheel_file = str(next(Path().glob('dist/*.whl')))
|
| 1210 |
+
assert "editable" in wheel_file
|
| 1211 |
+
files = [p for p in Path().glob("module.*") if p.suffix != ".c"]
|
| 1212 |
+
assert len(files) == 1
|
| 1213 |
+
name = files[0].name
|
| 1214 |
+
assert any(name.endswith(ext) for ext in EXTENSION_SUFFIXES)
|
| 1215 |
+
|
| 1216 |
+
|
| 1217 |
+
def test_debugging_tips(tmpdir_cwd, monkeypatch):
|
| 1218 |
+
"""Make sure to display useful debugging tips to the user."""
|
| 1219 |
+
jaraco.path.build({"module.py": "x = 42"})
|
| 1220 |
+
dist = Distribution()
|
| 1221 |
+
dist.script_name = "setup.py"
|
| 1222 |
+
dist.set_defaults()
|
| 1223 |
+
cmd = editable_wheel(dist)
|
| 1224 |
+
cmd.ensure_finalized()
|
| 1225 |
+
|
| 1226 |
+
SimulatedErr = type("SimulatedErr", (Exception,), {})
|
| 1227 |
+
simulated_failure = Mock(side_effect=SimulatedErr())
|
| 1228 |
+
monkeypatch.setattr(cmd, "get_finalized_command", simulated_failure)
|
| 1229 |
+
|
| 1230 |
+
expected_msg = "following steps are recommended to help debug"
|
| 1231 |
+
with pytest.raises(SimulatedErr), pytest.warns(_DebuggingTips, match=expected_msg):
|
| 1232 |
+
cmd.run()
|
| 1233 |
+
|
| 1234 |
+
|
| 1235 |
+
@pytest.mark.filterwarnings("error")
|
| 1236 |
+
def test_encode_pth():
|
| 1237 |
+
"""Ensure _encode_pth function does not produce encoding warnings"""
|
| 1238 |
+
content = _encode_pth("tkmilan_ç_utf8") # no warnings (would be turned into errors)
|
| 1239 |
+
assert isinstance(content, bytes)
|
| 1240 |
+
|
| 1241 |
+
|
| 1242 |
+
def install_project(name, venv, tmp_path, files, *opts):
|
| 1243 |
+
project = tmp_path / name
|
| 1244 |
+
project.mkdir()
|
| 1245 |
+
jaraco.path.build(files, prefix=project)
|
| 1246 |
+
opts = [*opts, "--no-build-isolation"] # force current version of setuptools
|
| 1247 |
+
out = venv.run(
|
| 1248 |
+
["python", "-m", "pip", "-v", "install", "-e", str(project), *opts],
|
| 1249 |
+
stderr=subprocess.STDOUT,
|
| 1250 |
+
)
|
| 1251 |
+
return project, out
|
| 1252 |
+
|
| 1253 |
+
|
| 1254 |
+
def _addsitedirs(new_dirs):
|
| 1255 |
+
"""To use this function, it is necessary to insert new_dir in front of sys.path.
|
| 1256 |
+
The Python process will try to import a ``sitecustomize`` module on startup.
|
| 1257 |
+
If we manipulate sys.path/PYTHONPATH, we can force it to run our code,
|
| 1258 |
+
which invokes ``addsitedir`` and ensure ``.pth`` files are loaded.
|
| 1259 |
+
"""
|
| 1260 |
+
content = '\n'.join(
|
| 1261 |
+
("import site",)
|
| 1262 |
+
+ tuple(f"site.addsitedir({os.fspath(new_dir)!r})" for new_dir in new_dirs)
|
| 1263 |
+
)
|
| 1264 |
+
(new_dirs[0] / "sitecustomize.py").write_text(content, encoding="utf-8")
|
| 1265 |
+
|
| 1266 |
+
|
| 1267 |
+
# ---- Assertion Helpers ----
|
| 1268 |
+
|
| 1269 |
+
|
| 1270 |
+
def assert_path(pkg, expected):
|
| 1271 |
+
# __path__ is not guaranteed to exist, so we have to account for that
|
| 1272 |
+
if pkg.__path__:
|
| 1273 |
+
path = next(iter(pkg.__path__), None)
|
| 1274 |
+
if path:
|
| 1275 |
+
assert str(Path(path).resolve()) == expected
|
| 1276 |
+
|
| 1277 |
+
|
| 1278 |
+
def assert_link_to(file: Path, other: Path) -> None:
|
| 1279 |
+
if file.is_symlink():
|
| 1280 |
+
assert str(file.resolve()) == str(other.resolve())
|
| 1281 |
+
else:
|
| 1282 |
+
file_stat = file.stat()
|
| 1283 |
+
other_stat = other.stat()
|
| 1284 |
+
assert file_stat[stat.ST_INO] == other_stat[stat.ST_INO]
|
| 1285 |
+
assert file_stat[stat.ST_DEV] == other_stat[stat.ST_DEV]
|
| 1286 |
+
|
| 1287 |
+
|
| 1288 |
+
def comparable_path(str_with_path: str) -> str:
|
| 1289 |
+
return str_with_path.lower().replace(os.sep, "/").replace("//", "/")
|
llava/lib/python3.10/site-packages/setuptools/tests/test_glob.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
from jaraco import path
|
| 3 |
+
|
| 4 |
+
from setuptools.glob import glob
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@pytest.mark.parametrize(
|
| 8 |
+
('tree', 'pattern', 'matches'),
|
| 9 |
+
(
|
| 10 |
+
('', b'', []),
|
| 11 |
+
('', '', []),
|
| 12 |
+
(
|
| 13 |
+
"""
|
| 14 |
+
appveyor.yml
|
| 15 |
+
CHANGES.rst
|
| 16 |
+
LICENSE
|
| 17 |
+
MANIFEST.in
|
| 18 |
+
pyproject.toml
|
| 19 |
+
README.rst
|
| 20 |
+
setup.cfg
|
| 21 |
+
setup.py
|
| 22 |
+
""",
|
| 23 |
+
'*.rst',
|
| 24 |
+
('CHANGES.rst', 'README.rst'),
|
| 25 |
+
),
|
| 26 |
+
(
|
| 27 |
+
"""
|
| 28 |
+
appveyor.yml
|
| 29 |
+
CHANGES.rst
|
| 30 |
+
LICENSE
|
| 31 |
+
MANIFEST.in
|
| 32 |
+
pyproject.toml
|
| 33 |
+
README.rst
|
| 34 |
+
setup.cfg
|
| 35 |
+
setup.py
|
| 36 |
+
""",
|
| 37 |
+
b'*.rst',
|
| 38 |
+
(b'CHANGES.rst', b'README.rst'),
|
| 39 |
+
),
|
| 40 |
+
),
|
| 41 |
+
)
|
| 42 |
+
def test_glob(monkeypatch, tmpdir, tree, pattern, matches):
|
| 43 |
+
monkeypatch.chdir(tmpdir)
|
| 44 |
+
path.build({name: '' for name in tree.split()})
|
| 45 |
+
assert list(sorted(glob(pattern))) == list(sorted(matches))
|
llava/lib/python3.10/site-packages/setuptools/tests/test_logging.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import inspect
|
| 3 |
+
import logging
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
setup_py = """\
|
| 12 |
+
from setuptools import setup
|
| 13 |
+
|
| 14 |
+
setup(
|
| 15 |
+
name="test_logging",
|
| 16 |
+
version="0.0"
|
| 17 |
+
)
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@pytest.mark.parametrize(
|
| 22 |
+
('flag', 'expected_level'), [("--dry-run", "INFO"), ("--verbose", "DEBUG")]
|
| 23 |
+
)
|
| 24 |
+
def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
|
| 25 |
+
"""Make sure the correct verbosity level is set (issue #3038)"""
|
| 26 |
+
import setuptools # noqa: F401 # import setuptools to monkeypatch distutils
|
| 27 |
+
|
| 28 |
+
import distutils # <- load distutils after all the patches take place
|
| 29 |
+
|
| 30 |
+
logger = logging.Logger(__name__)
|
| 31 |
+
monkeypatch.setattr(logging, "root", logger)
|
| 32 |
+
unset_log_level = logger.getEffectiveLevel()
|
| 33 |
+
assert logging.getLevelName(unset_log_level) == "NOTSET"
|
| 34 |
+
|
| 35 |
+
setup_script = tmp_path / "setup.py"
|
| 36 |
+
setup_script.write_text(setup_py, encoding="utf-8")
|
| 37 |
+
dist = distutils.core.run_setup(setup_script, stop_after="init")
|
| 38 |
+
dist.script_args = [flag, "sdist"]
|
| 39 |
+
dist.parse_command_line() # <- where the log level is set
|
| 40 |
+
log_level = logger.getEffectiveLevel()
|
| 41 |
+
log_level_name = logging.getLevelName(log_level)
|
| 42 |
+
assert log_level_name == expected_level
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def flaky_on_pypy(func):
|
| 46 |
+
@functools.wraps(func)
|
| 47 |
+
def _func():
|
| 48 |
+
try:
|
| 49 |
+
func()
|
| 50 |
+
except AssertionError: # pragma: no cover
|
| 51 |
+
if IS_PYPY:
|
| 52 |
+
msg = "Flaky monkeypatch on PyPy (#4124)"
|
| 53 |
+
pytest.xfail(f"{msg}. Original discussion in #3707, #3709.")
|
| 54 |
+
raise
|
| 55 |
+
|
| 56 |
+
return _func
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
@flaky_on_pypy
|
| 60 |
+
def test_patching_does_not_cause_problems():
|
| 61 |
+
# Ensure `dist.log` is only patched if necessary
|
| 62 |
+
|
| 63 |
+
import _distutils_hack
|
| 64 |
+
|
| 65 |
+
import setuptools.logging
|
| 66 |
+
|
| 67 |
+
from distutils import dist
|
| 68 |
+
|
| 69 |
+
setuptools.logging.configure()
|
| 70 |
+
|
| 71 |
+
if _distutils_hack.enabled():
|
| 72 |
+
# Modern logging infra, no problematic patching.
|
| 73 |
+
assert dist.__file__ is None or "setuptools" in dist.__file__
|
| 74 |
+
assert isinstance(dist.log, logging.Logger)
|
| 75 |
+
else:
|
| 76 |
+
assert inspect.ismodule(dist.log)
|
llava/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import subprocess
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
from setuptools._path import paths_on_pythonpath
|
| 5 |
+
|
| 6 |
+
from . import namespaces
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TestNamespaces:
|
| 10 |
+
def test_mixed_site_and_non_site(self, tmpdir):
|
| 11 |
+
"""
|
| 12 |
+
Installing two packages sharing the same namespace, one installed
|
| 13 |
+
to a site dir and the other installed just to a path on PYTHONPATH
|
| 14 |
+
should leave the namespace in tact and both packages reachable by
|
| 15 |
+
import.
|
| 16 |
+
"""
|
| 17 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 18 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 19 |
+
site_packages = tmpdir / 'site-packages'
|
| 20 |
+
path_packages = tmpdir / 'path-packages'
|
| 21 |
+
targets = site_packages, path_packages
|
| 22 |
+
# use pip to install to the target directory
|
| 23 |
+
install_cmd = [
|
| 24 |
+
sys.executable,
|
| 25 |
+
'-m',
|
| 26 |
+
'pip.__main__',
|
| 27 |
+
'install',
|
| 28 |
+
str(pkg_A),
|
| 29 |
+
'-t',
|
| 30 |
+
str(site_packages),
|
| 31 |
+
]
|
| 32 |
+
subprocess.check_call(install_cmd)
|
| 33 |
+
namespaces.make_site_dir(site_packages)
|
| 34 |
+
install_cmd = [
|
| 35 |
+
sys.executable,
|
| 36 |
+
'-m',
|
| 37 |
+
'pip.__main__',
|
| 38 |
+
'install',
|
| 39 |
+
str(pkg_B),
|
| 40 |
+
'-t',
|
| 41 |
+
str(path_packages),
|
| 42 |
+
]
|
| 43 |
+
subprocess.check_call(install_cmd)
|
| 44 |
+
try_import = [
|
| 45 |
+
sys.executable,
|
| 46 |
+
'-c',
|
| 47 |
+
'import myns.pkgA; import myns.pkgB',
|
| 48 |
+
]
|
| 49 |
+
with paths_on_pythonpath(map(str, targets)):
|
| 50 |
+
subprocess.check_call(try_import)
|
| 51 |
+
|
| 52 |
+
def test_pkg_resources_import(self, tmpdir):
|
| 53 |
+
"""
|
| 54 |
+
Ensure that a namespace package doesn't break on import
|
| 55 |
+
of pkg_resources.
|
| 56 |
+
"""
|
| 57 |
+
pkg = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 58 |
+
target = tmpdir / 'packages'
|
| 59 |
+
target.mkdir()
|
| 60 |
+
install_cmd = [
|
| 61 |
+
sys.executable,
|
| 62 |
+
'-m',
|
| 63 |
+
'pip',
|
| 64 |
+
'install',
|
| 65 |
+
'-t',
|
| 66 |
+
str(target),
|
| 67 |
+
str(pkg),
|
| 68 |
+
]
|
| 69 |
+
with paths_on_pythonpath([str(target)]):
|
| 70 |
+
subprocess.check_call(install_cmd)
|
| 71 |
+
namespaces.make_site_dir(target)
|
| 72 |
+
try_import = [
|
| 73 |
+
sys.executable,
|
| 74 |
+
'-c',
|
| 75 |
+
'import pkg_resources',
|
| 76 |
+
]
|
| 77 |
+
with paths_on_pythonpath([str(target)]):
|
| 78 |
+
subprocess.check_call(try_import)
|
| 79 |
+
|
| 80 |
+
def test_namespace_package_installed_and_cwd(self, tmpdir):
|
| 81 |
+
"""
|
| 82 |
+
Installing a namespace packages but also having it in the current
|
| 83 |
+
working directory, only one version should take precedence.
|
| 84 |
+
"""
|
| 85 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 86 |
+
target = tmpdir / 'packages'
|
| 87 |
+
# use pip to install to the target directory
|
| 88 |
+
install_cmd = [
|
| 89 |
+
sys.executable,
|
| 90 |
+
'-m',
|
| 91 |
+
'pip.__main__',
|
| 92 |
+
'install',
|
| 93 |
+
str(pkg_A),
|
| 94 |
+
'-t',
|
| 95 |
+
str(target),
|
| 96 |
+
]
|
| 97 |
+
subprocess.check_call(install_cmd)
|
| 98 |
+
namespaces.make_site_dir(target)
|
| 99 |
+
|
| 100 |
+
# ensure that package imports and pkg_resources imports
|
| 101 |
+
pkg_resources_imp = [
|
| 102 |
+
sys.executable,
|
| 103 |
+
'-c',
|
| 104 |
+
'import pkg_resources; import myns.pkgA',
|
| 105 |
+
]
|
| 106 |
+
with paths_on_pythonpath([str(target)]):
|
| 107 |
+
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
|
| 108 |
+
|
| 109 |
+
def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
|
| 110 |
+
"""
|
| 111 |
+
Installing one namespace package and also have another in the same
|
| 112 |
+
namespace in the current working directory, both of them must be
|
| 113 |
+
importable.
|
| 114 |
+
"""
|
| 115 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 116 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 117 |
+
target = tmpdir / 'packages'
|
| 118 |
+
# use pip to install to the target directory
|
| 119 |
+
install_cmd = [
|
| 120 |
+
sys.executable,
|
| 121 |
+
'-m',
|
| 122 |
+
'pip.__main__',
|
| 123 |
+
'install',
|
| 124 |
+
str(pkg_A),
|
| 125 |
+
'-t',
|
| 126 |
+
str(target),
|
| 127 |
+
]
|
| 128 |
+
subprocess.check_call(install_cmd)
|
| 129 |
+
namespaces.make_site_dir(target)
|
| 130 |
+
|
| 131 |
+
# ensure that all packages import and pkg_resources imports
|
| 132 |
+
pkg_resources_imp = [
|
| 133 |
+
sys.executable,
|
| 134 |
+
'-c',
|
| 135 |
+
'import pkg_resources; import myns.pkgA; import myns.pkgB',
|
| 136 |
+
]
|
| 137 |
+
with paths_on_pythonpath([str(target)]):
|
| 138 |
+
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B))
|
llava/lib/python3.10/site-packages/setuptools/tests/test_packageindex.py
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import http.client
|
| 2 |
+
import re
|
| 3 |
+
import urllib.error
|
| 4 |
+
import urllib.request
|
| 5 |
+
from inspect import cleandoc
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
import setuptools.package_index
|
| 10 |
+
|
| 11 |
+
import distutils.errors
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class TestPackageIndex:
|
| 15 |
+
def test_regex(self):
|
| 16 |
+
hash_url = 'http://other_url?:action=show_md5&'
|
| 17 |
+
hash_url += 'digest=0123456789abcdef0123456789abcdef'
|
| 18 |
+
doc = """
|
| 19 |
+
<a href="http://some_url">Name</a>
|
| 20 |
+
(<a title="MD5 hash"
|
| 21 |
+
href="{hash_url}">md5</a>)
|
| 22 |
+
""".lstrip().format(**locals())
|
| 23 |
+
assert setuptools.package_index.PYPI_MD5.match(doc)
|
| 24 |
+
|
| 25 |
+
def test_bad_url_bad_port(self):
|
| 26 |
+
index = setuptools.package_index.PackageIndex()
|
| 27 |
+
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
|
| 28 |
+
with pytest.raises(Exception, match=re.escape(url)):
|
| 29 |
+
v = index.open_url(url)
|
| 30 |
+
assert isinstance(v, urllib.error.HTTPError)
|
| 31 |
+
|
| 32 |
+
def test_bad_url_typo(self):
|
| 33 |
+
# issue 16
|
| 34 |
+
# easy_install inquant.contentmirror.plone breaks because of a typo
|
| 35 |
+
# in its home URL
|
| 36 |
+
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
|
| 37 |
+
|
| 38 |
+
url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
|
| 39 |
+
|
| 40 |
+
with pytest.raises(Exception, match=re.escape(url)):
|
| 41 |
+
v = index.open_url(url)
|
| 42 |
+
assert isinstance(v, urllib.error.HTTPError)
|
| 43 |
+
|
| 44 |
+
def test_bad_url_bad_status_line(self):
|
| 45 |
+
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
|
| 46 |
+
|
| 47 |
+
def _urlopen(*args):
|
| 48 |
+
raise http.client.BadStatusLine('line')
|
| 49 |
+
|
| 50 |
+
index.opener = _urlopen
|
| 51 |
+
url = 'http://example.com'
|
| 52 |
+
with pytest.raises(Exception, match=r'line'):
|
| 53 |
+
index.open_url(url)
|
| 54 |
+
|
| 55 |
+
def test_bad_url_double_scheme(self):
|
| 56 |
+
"""
|
| 57 |
+
A bad URL with a double scheme should raise a DistutilsError.
|
| 58 |
+
"""
|
| 59 |
+
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
|
| 60 |
+
|
| 61 |
+
# issue 20
|
| 62 |
+
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
|
| 63 |
+
try:
|
| 64 |
+
index.open_url(url)
|
| 65 |
+
except distutils.errors.DistutilsError as error:
|
| 66 |
+
msg = str(error)
|
| 67 |
+
assert (
|
| 68 |
+
'nonnumeric port' in msg
|
| 69 |
+
or 'getaddrinfo failed' in msg
|
| 70 |
+
or 'Name or service not known' in msg
|
| 71 |
+
)
|
| 72 |
+
return
|
| 73 |
+
raise RuntimeError("Did not raise")
|
| 74 |
+
|
| 75 |
+
def test_url_ok(self):
|
| 76 |
+
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
|
| 77 |
+
url = 'file:///tmp/test_package_index'
|
| 78 |
+
assert index.url_ok(url, True)
|
| 79 |
+
|
| 80 |
+
def test_parse_bdist_wininst(self):
|
| 81 |
+
parse = setuptools.package_index.parse_bdist_wininst
|
| 82 |
+
|
| 83 |
+
actual = parse('reportlab-2.5.win32-py2.4.exe')
|
| 84 |
+
expected = 'reportlab-2.5', '2.4', 'win32'
|
| 85 |
+
assert actual == expected
|
| 86 |
+
|
| 87 |
+
actual = parse('reportlab-2.5.win32.exe')
|
| 88 |
+
expected = 'reportlab-2.5', None, 'win32'
|
| 89 |
+
assert actual == expected
|
| 90 |
+
|
| 91 |
+
actual = parse('reportlab-2.5.win-amd64-py2.7.exe')
|
| 92 |
+
expected = 'reportlab-2.5', '2.7', 'win-amd64'
|
| 93 |
+
assert actual == expected
|
| 94 |
+
|
| 95 |
+
actual = parse('reportlab-2.5.win-amd64.exe')
|
| 96 |
+
expected = 'reportlab-2.5', None, 'win-amd64'
|
| 97 |
+
assert actual == expected
|
| 98 |
+
|
| 99 |
+
def test__vcs_split_rev_from_url(self):
|
| 100 |
+
"""
|
| 101 |
+
Test the basic usage of _vcs_split_rev_from_url
|
| 102 |
+
"""
|
| 103 |
+
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
|
| 104 |
+
url, rev = vsrfu('https://example.com/bar@2995')
|
| 105 |
+
assert url == 'https://example.com/bar'
|
| 106 |
+
assert rev == '2995'
|
| 107 |
+
|
| 108 |
+
def test_local_index(self, tmpdir):
|
| 109 |
+
"""
|
| 110 |
+
local_open should be able to read an index from the file system.
|
| 111 |
+
"""
|
| 112 |
+
index_file = tmpdir / 'index.html'
|
| 113 |
+
with index_file.open('w') as f:
|
| 114 |
+
f.write('<div>content</div>')
|
| 115 |
+
url = 'file:' + urllib.request.pathname2url(str(tmpdir)) + '/'
|
| 116 |
+
res = setuptools.package_index.local_open(url)
|
| 117 |
+
assert 'content' in res.read()
|
| 118 |
+
|
| 119 |
+
def test_egg_fragment(self):
|
| 120 |
+
"""
|
| 121 |
+
EGG fragments must comply to PEP 440
|
| 122 |
+
"""
|
| 123 |
+
epoch = [
|
| 124 |
+
'',
|
| 125 |
+
'1!',
|
| 126 |
+
]
|
| 127 |
+
releases = [
|
| 128 |
+
'0',
|
| 129 |
+
'0.0',
|
| 130 |
+
'0.0.0',
|
| 131 |
+
]
|
| 132 |
+
pre = [
|
| 133 |
+
'a0',
|
| 134 |
+
'b0',
|
| 135 |
+
'rc0',
|
| 136 |
+
]
|
| 137 |
+
post = ['.post0']
|
| 138 |
+
dev = [
|
| 139 |
+
'.dev0',
|
| 140 |
+
]
|
| 141 |
+
local = [
|
| 142 |
+
('', ''),
|
| 143 |
+
('+ubuntu.0', '+ubuntu.0'),
|
| 144 |
+
('+ubuntu-0', '+ubuntu.0'),
|
| 145 |
+
('+ubuntu_0', '+ubuntu.0'),
|
| 146 |
+
]
|
| 147 |
+
versions = [
|
| 148 |
+
[''.join([e, r, p, loc]) for loc in locs]
|
| 149 |
+
for e in epoch
|
| 150 |
+
for r in releases
|
| 151 |
+
for p in sum([pre, post, dev], [''])
|
| 152 |
+
for locs in local
|
| 153 |
+
]
|
| 154 |
+
for v, vc in versions:
|
| 155 |
+
dists = list(
|
| 156 |
+
setuptools.package_index.distros_for_url(
|
| 157 |
+
'http://example.com/example-foo.zip#egg=example-foo-' + v
|
| 158 |
+
)
|
| 159 |
+
)
|
| 160 |
+
assert dists[0].version == ''
|
| 161 |
+
assert dists[1].version == vc
|
| 162 |
+
|
| 163 |
+
def test_download_git_with_rev(self, tmp_path, fp):
|
| 164 |
+
url = 'git+https://github.example/group/project@master#egg=foo'
|
| 165 |
+
index = setuptools.package_index.PackageIndex()
|
| 166 |
+
|
| 167 |
+
expected_dir = tmp_path / 'project@master'
|
| 168 |
+
fp.register([
|
| 169 |
+
'git',
|
| 170 |
+
'clone',
|
| 171 |
+
'--quiet',
|
| 172 |
+
'https://github.example/group/project',
|
| 173 |
+
expected_dir,
|
| 174 |
+
])
|
| 175 |
+
fp.register(['git', '-C', expected_dir, 'checkout', '--quiet', 'master'])
|
| 176 |
+
|
| 177 |
+
result = index.download(url, tmp_path)
|
| 178 |
+
|
| 179 |
+
assert result == str(expected_dir)
|
| 180 |
+
assert len(fp.calls) == 2
|
| 181 |
+
|
| 182 |
+
def test_download_git_no_rev(self, tmp_path, fp):
|
| 183 |
+
url = 'git+https://github.example/group/project#egg=foo'
|
| 184 |
+
index = setuptools.package_index.PackageIndex()
|
| 185 |
+
|
| 186 |
+
expected_dir = tmp_path / 'project'
|
| 187 |
+
fp.register([
|
| 188 |
+
'git',
|
| 189 |
+
'clone',
|
| 190 |
+
'--quiet',
|
| 191 |
+
'https://github.example/group/project',
|
| 192 |
+
expected_dir,
|
| 193 |
+
])
|
| 194 |
+
index.download(url, tmp_path)
|
| 195 |
+
|
| 196 |
+
def test_download_svn(self, tmp_path):
|
| 197 |
+
url = 'svn+https://svn.example/project#egg=foo'
|
| 198 |
+
index = setuptools.package_index.PackageIndex()
|
| 199 |
+
|
| 200 |
+
msg = r".*SVN download is not supported.*"
|
| 201 |
+
with pytest.raises(distutils.errors.DistutilsError, match=msg):
|
| 202 |
+
index.download(url, tmp_path)
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
class TestContentCheckers:
|
| 206 |
+
def test_md5(self):
|
| 207 |
+
checker = setuptools.package_index.HashChecker.from_url(
|
| 208 |
+
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
|
| 209 |
+
)
|
| 210 |
+
checker.feed('You should probably not be using MD5'.encode('ascii'))
|
| 211 |
+
assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478'
|
| 212 |
+
assert checker.is_valid()
|
| 213 |
+
|
| 214 |
+
def test_other_fragment(self):
|
| 215 |
+
"Content checks should succeed silently if no hash is present"
|
| 216 |
+
checker = setuptools.package_index.HashChecker.from_url(
|
| 217 |
+
'http://foo/bar#something%20completely%20different'
|
| 218 |
+
)
|
| 219 |
+
checker.feed('anything'.encode('ascii'))
|
| 220 |
+
assert checker.is_valid()
|
| 221 |
+
|
| 222 |
+
def test_blank_md5(self):
|
| 223 |
+
"Content checks should succeed if a hash is empty"
|
| 224 |
+
checker = setuptools.package_index.HashChecker.from_url('http://foo/bar#md5=')
|
| 225 |
+
checker.feed('anything'.encode('ascii'))
|
| 226 |
+
assert checker.is_valid()
|
| 227 |
+
|
| 228 |
+
def test_get_hash_name_md5(self):
|
| 229 |
+
checker = setuptools.package_index.HashChecker.from_url(
|
| 230 |
+
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
|
| 231 |
+
)
|
| 232 |
+
assert checker.hash_name == 'md5'
|
| 233 |
+
|
| 234 |
+
def test_report(self):
|
| 235 |
+
checker = setuptools.package_index.HashChecker.from_url(
|
| 236 |
+
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
|
| 237 |
+
)
|
| 238 |
+
rep = checker.report(lambda x: x, 'My message about %s')
|
| 239 |
+
assert rep == 'My message about md5'
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
class TestPyPIConfig:
|
| 243 |
+
def test_percent_in_password(self, tmp_home_dir):
|
| 244 |
+
pypirc = tmp_home_dir / '.pypirc'
|
| 245 |
+
pypirc.write_text(
|
| 246 |
+
cleandoc(
|
| 247 |
+
"""
|
| 248 |
+
[pypi]
|
| 249 |
+
repository=https://pypi.org
|
| 250 |
+
username=jaraco
|
| 251 |
+
password=pity%
|
| 252 |
+
"""
|
| 253 |
+
),
|
| 254 |
+
encoding="utf-8",
|
| 255 |
+
)
|
| 256 |
+
cfg = setuptools.package_index.PyPIConfig()
|
| 257 |
+
cred = cfg.creds_by_repository['https://pypi.org']
|
| 258 |
+
assert cred.username == 'jaraco'
|
| 259 |
+
assert cred.password == 'pity%'
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
@pytest.mark.timeout(1)
|
| 263 |
+
def test_REL_DoS():
|
| 264 |
+
"""
|
| 265 |
+
REL should not hang on a contrived attack string.
|
| 266 |
+
"""
|
| 267 |
+
setuptools.package_index.REL.search('< rel=' + ' ' * 2**12)
|
llava/lib/python3.10/site-packages/setuptools/tests/test_sandbox.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import types
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
import pkg_resources
|
| 9 |
+
import setuptools.sandbox
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestSandbox:
|
| 13 |
+
def test_devnull(self, tmpdir):
|
| 14 |
+
with setuptools.sandbox.DirectorySandbox(str(tmpdir)):
|
| 15 |
+
self._file_writer(os.devnull)
|
| 16 |
+
|
| 17 |
+
@staticmethod
|
| 18 |
+
def _file_writer(path):
|
| 19 |
+
def do_write():
|
| 20 |
+
with open(path, 'w', encoding="utf-8") as f:
|
| 21 |
+
f.write('xxx')
|
| 22 |
+
|
| 23 |
+
return do_write
|
| 24 |
+
|
| 25 |
+
def test_setup_py_with_BOM(self):
|
| 26 |
+
"""
|
| 27 |
+
It should be possible to execute a setup.py with a Byte Order Mark
|
| 28 |
+
"""
|
| 29 |
+
target = pkg_resources.resource_filename(__name__, 'script-with-bom.py')
|
| 30 |
+
namespace = types.ModuleType('namespace')
|
| 31 |
+
setuptools.sandbox._execfile(target, vars(namespace))
|
| 32 |
+
assert namespace.result == 'passed'
|
| 33 |
+
|
| 34 |
+
def test_setup_py_with_CRLF(self, tmpdir):
|
| 35 |
+
setup_py = tmpdir / 'setup.py'
|
| 36 |
+
with setup_py.open('wb') as stream:
|
| 37 |
+
stream.write(b'"degenerate script"\r\n')
|
| 38 |
+
setuptools.sandbox._execfile(str(setup_py), globals())
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class TestExceptionSaver:
|
| 42 |
+
def test_exception_trapped(self):
|
| 43 |
+
with setuptools.sandbox.ExceptionSaver():
|
| 44 |
+
raise ValueError("details")
|
| 45 |
+
|
| 46 |
+
def test_exception_resumed(self):
|
| 47 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 48 |
+
raise ValueError("details")
|
| 49 |
+
|
| 50 |
+
with pytest.raises(ValueError) as caught:
|
| 51 |
+
saved_exc.resume()
|
| 52 |
+
|
| 53 |
+
assert isinstance(caught.value, ValueError)
|
| 54 |
+
assert str(caught.value) == 'details'
|
| 55 |
+
|
| 56 |
+
def test_exception_reconstructed(self):
|
| 57 |
+
orig_exc = ValueError("details")
|
| 58 |
+
|
| 59 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 60 |
+
raise orig_exc
|
| 61 |
+
|
| 62 |
+
with pytest.raises(ValueError) as caught:
|
| 63 |
+
saved_exc.resume()
|
| 64 |
+
|
| 65 |
+
assert isinstance(caught.value, ValueError)
|
| 66 |
+
assert caught.value is not orig_exc
|
| 67 |
+
|
| 68 |
+
def test_no_exception_passes_quietly(self):
|
| 69 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
saved_exc.resume()
|
| 73 |
+
|
| 74 |
+
def test_unpickleable_exception(self):
|
| 75 |
+
class CantPickleThis(Exception):
|
| 76 |
+
"This Exception is unpickleable because it's not in globals"
|
| 77 |
+
|
| 78 |
+
def __repr__(self) -> str:
|
| 79 |
+
return f'CantPickleThis{self.args!r}'
|
| 80 |
+
|
| 81 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 82 |
+
raise CantPickleThis('detail')
|
| 83 |
+
|
| 84 |
+
with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
|
| 85 |
+
saved_exc.resume()
|
| 86 |
+
|
| 87 |
+
assert str(caught.value) == "CantPickleThis('detail',)"
|
| 88 |
+
|
| 89 |
+
def test_unpickleable_exception_when_hiding_setuptools(self):
|
| 90 |
+
"""
|
| 91 |
+
As revealed in #440, an infinite recursion can occur if an unpickleable
|
| 92 |
+
exception while setuptools is hidden. Ensure this doesn't happen.
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
class ExceptionUnderTest(Exception):
|
| 96 |
+
"""
|
| 97 |
+
An unpickleable exception (not in globals).
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
|
| 101 |
+
with setuptools.sandbox.save_modules():
|
| 102 |
+
setuptools.sandbox.hide_setuptools()
|
| 103 |
+
raise ExceptionUnderTest
|
| 104 |
+
|
| 105 |
+
(msg,) = caught.value.args
|
| 106 |
+
assert msg == 'ExceptionUnderTest()'
|
| 107 |
+
|
| 108 |
+
def test_sandbox_violation_raised_hiding_setuptools(self, tmpdir):
|
| 109 |
+
"""
|
| 110 |
+
When in a sandbox with setuptools hidden, a SandboxViolation
|
| 111 |
+
should reflect a proper exception and not be wrapped in
|
| 112 |
+
an UnpickleableException.
|
| 113 |
+
"""
|
| 114 |
+
|
| 115 |
+
def write_file():
|
| 116 |
+
"Trigger a SandboxViolation by writing outside the sandbox"
|
| 117 |
+
with open('/etc/foo', 'w', encoding="utf-8"):
|
| 118 |
+
pass
|
| 119 |
+
|
| 120 |
+
with pytest.raises(setuptools.sandbox.SandboxViolation) as caught:
|
| 121 |
+
with setuptools.sandbox.save_modules():
|
| 122 |
+
setuptools.sandbox.hide_setuptools()
|
| 123 |
+
with setuptools.sandbox.DirectorySandbox(str(tmpdir)):
|
| 124 |
+
write_file()
|
| 125 |
+
|
| 126 |
+
cmd, args, kwargs = caught.value.args
|
| 127 |
+
assert cmd == 'open'
|
| 128 |
+
assert args == ('/etc/foo', 'w')
|
| 129 |
+
assert kwargs == {"encoding": "utf-8"}
|
| 130 |
+
|
| 131 |
+
msg = str(caught.value)
|
| 132 |
+
assert 'open' in msg
|
| 133 |
+
assert "('/etc/foo', 'w')" in msg
|
| 134 |
+
assert "{'encoding': 'utf-8'}" in msg
|
llava/lib/python3.10/site-packages/setuptools/tests/test_sdist.py
ADDED
|
@@ -0,0 +1,972 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""sdist tests"""
|
| 2 |
+
|
| 3 |
+
import contextlib
|
| 4 |
+
import io
|
| 5 |
+
import logging
|
| 6 |
+
import os
|
| 7 |
+
import pathlib
|
| 8 |
+
import sys
|
| 9 |
+
import tarfile
|
| 10 |
+
import tempfile
|
| 11 |
+
import unicodedata
|
| 12 |
+
from inspect import cleandoc
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from unittest import mock
|
| 15 |
+
|
| 16 |
+
import jaraco.path
|
| 17 |
+
import pytest
|
| 18 |
+
|
| 19 |
+
from setuptools import Command, SetuptoolsDeprecationWarning
|
| 20 |
+
from setuptools._importlib import metadata
|
| 21 |
+
from setuptools.command.egg_info import manifest_maker
|
| 22 |
+
from setuptools.command.sdist import sdist
|
| 23 |
+
from setuptools.dist import Distribution
|
| 24 |
+
from setuptools.extension import Extension
|
| 25 |
+
from setuptools.tests import fail_on_ascii
|
| 26 |
+
|
| 27 |
+
from .text import Filenames
|
| 28 |
+
|
| 29 |
+
import distutils
|
| 30 |
+
from distutils.core import run_setup
|
| 31 |
+
|
| 32 |
+
SETUP_ATTRS = {
|
| 33 |
+
'name': 'sdist_test',
|
| 34 |
+
'version': '0.0',
|
| 35 |
+
'packages': ['sdist_test'],
|
| 36 |
+
'package_data': {'sdist_test': ['*.txt']},
|
| 37 |
+
'data_files': [("data", [os.path.join("d", "e.dat")])],
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
SETUP_PY = f"""\
|
| 41 |
+
from setuptools import setup
|
| 42 |
+
|
| 43 |
+
setup(**{SETUP_ATTRS!r})
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
EXTENSION = Extension(
|
| 47 |
+
name="sdist_test.f",
|
| 48 |
+
sources=[os.path.join("sdist_test", "f.c")],
|
| 49 |
+
depends=[os.path.join("sdist_test", "f.h")],
|
| 50 |
+
)
|
| 51 |
+
EXTENSION_SOURCES = EXTENSION.sources + EXTENSION.depends
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@contextlib.contextmanager
|
| 55 |
+
def quiet():
|
| 56 |
+
old_stdout, old_stderr = sys.stdout, sys.stderr
|
| 57 |
+
sys.stdout, sys.stderr = io.StringIO(), io.StringIO()
|
| 58 |
+
try:
|
| 59 |
+
yield
|
| 60 |
+
finally:
|
| 61 |
+
sys.stdout, sys.stderr = old_stdout, old_stderr
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
# Convert to POSIX path
|
| 65 |
+
def posix(path):
|
| 66 |
+
if not isinstance(path, str):
|
| 67 |
+
return path.replace(os.sep.encode('ascii'), b'/')
|
| 68 |
+
else:
|
| 69 |
+
return path.replace(os.sep, '/')
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
# HFS Plus uses decomposed UTF-8
|
| 73 |
+
def decompose(path):
|
| 74 |
+
if isinstance(path, str):
|
| 75 |
+
return unicodedata.normalize('NFD', path)
|
| 76 |
+
try:
|
| 77 |
+
path = path.decode('utf-8')
|
| 78 |
+
path = unicodedata.normalize('NFD', path)
|
| 79 |
+
path = path.encode('utf-8')
|
| 80 |
+
except UnicodeError:
|
| 81 |
+
pass # Not UTF-8
|
| 82 |
+
return path
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def read_all_bytes(filename):
|
| 86 |
+
with open(filename, 'rb') as fp:
|
| 87 |
+
return fp.read()
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def latin1_fail():
|
| 91 |
+
try:
|
| 92 |
+
desc, filename = tempfile.mkstemp(suffix=Filenames.latin_1)
|
| 93 |
+
os.close(desc)
|
| 94 |
+
os.remove(filename)
|
| 95 |
+
except Exception:
|
| 96 |
+
return True
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
fail_on_latin1_encoded_filenames = pytest.mark.xfail(
|
| 100 |
+
latin1_fail(),
|
| 101 |
+
reason="System does not support latin-1 filenames",
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
skip_under_xdist = pytest.mark.skipif(
|
| 106 |
+
"os.environ.get('PYTEST_XDIST_WORKER')",
|
| 107 |
+
reason="pytest-dev/pytest-xdist#843",
|
| 108 |
+
)
|
| 109 |
+
skip_under_stdlib_distutils = pytest.mark.skipif(
|
| 110 |
+
not distutils.__package__.startswith('setuptools'),
|
| 111 |
+
reason="the test is not supported with stdlib distutils",
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def touch(path):
|
| 116 |
+
open(path, 'wb').close()
|
| 117 |
+
return path
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def symlink_or_skip_test(src, dst):
|
| 121 |
+
try:
|
| 122 |
+
os.symlink(src, dst)
|
| 123 |
+
except (OSError, NotImplementedError):
|
| 124 |
+
pytest.skip("symlink not supported in OS")
|
| 125 |
+
return None
|
| 126 |
+
return dst
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
class TestSdistTest:
|
| 130 |
+
@pytest.fixture(autouse=True)
|
| 131 |
+
def source_dir(self, tmpdir):
|
| 132 |
+
tmpdir = tmpdir / "project_root"
|
| 133 |
+
tmpdir.mkdir()
|
| 134 |
+
|
| 135 |
+
(tmpdir / 'setup.py').write_text(SETUP_PY, encoding='utf-8')
|
| 136 |
+
|
| 137 |
+
# Set up the rest of the test package
|
| 138 |
+
test_pkg = tmpdir / 'sdist_test'
|
| 139 |
+
test_pkg.mkdir()
|
| 140 |
+
data_folder = tmpdir / 'd'
|
| 141 |
+
data_folder.mkdir()
|
| 142 |
+
# *.rst was not included in package_data, so c.rst should not be
|
| 143 |
+
# automatically added to the manifest when not under version control
|
| 144 |
+
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
|
| 145 |
+
touch(test_pkg / fname)
|
| 146 |
+
touch(data_folder / 'e.dat')
|
| 147 |
+
# C sources are not included by default, but they will be,
|
| 148 |
+
# if an extension module uses them as sources or depends
|
| 149 |
+
for fname in EXTENSION_SOURCES:
|
| 150 |
+
touch(tmpdir / fname)
|
| 151 |
+
|
| 152 |
+
with tmpdir.as_cwd():
|
| 153 |
+
yield tmpdir
|
| 154 |
+
|
| 155 |
+
def assert_package_data_in_manifest(self, cmd):
|
| 156 |
+
manifest = cmd.filelist.files
|
| 157 |
+
assert os.path.join('sdist_test', 'a.txt') in manifest
|
| 158 |
+
assert os.path.join('sdist_test', 'b.txt') in manifest
|
| 159 |
+
assert os.path.join('sdist_test', 'c.rst') not in manifest
|
| 160 |
+
assert os.path.join('d', 'e.dat') in manifest
|
| 161 |
+
|
| 162 |
+
def setup_with_extension(self):
|
| 163 |
+
setup_attrs = {**SETUP_ATTRS, 'ext_modules': [EXTENSION]}
|
| 164 |
+
|
| 165 |
+
dist = Distribution(setup_attrs)
|
| 166 |
+
dist.script_name = 'setup.py'
|
| 167 |
+
cmd = sdist(dist)
|
| 168 |
+
cmd.ensure_finalized()
|
| 169 |
+
|
| 170 |
+
with quiet():
|
| 171 |
+
cmd.run()
|
| 172 |
+
|
| 173 |
+
return cmd
|
| 174 |
+
|
| 175 |
+
def test_package_data_in_sdist(self):
|
| 176 |
+
"""Regression test for pull request #4: ensures that files listed in
|
| 177 |
+
package_data are included in the manifest even if they're not added to
|
| 178 |
+
version control.
|
| 179 |
+
"""
|
| 180 |
+
|
| 181 |
+
dist = Distribution(SETUP_ATTRS)
|
| 182 |
+
dist.script_name = 'setup.py'
|
| 183 |
+
cmd = sdist(dist)
|
| 184 |
+
cmd.ensure_finalized()
|
| 185 |
+
|
| 186 |
+
with quiet():
|
| 187 |
+
cmd.run()
|
| 188 |
+
|
| 189 |
+
self.assert_package_data_in_manifest(cmd)
|
| 190 |
+
|
| 191 |
+
def test_package_data_and_include_package_data_in_sdist(self):
|
| 192 |
+
"""
|
| 193 |
+
Ensure package_data and include_package_data work
|
| 194 |
+
together.
|
| 195 |
+
"""
|
| 196 |
+
setup_attrs = {**SETUP_ATTRS, 'include_package_data': True}
|
| 197 |
+
assert setup_attrs['package_data']
|
| 198 |
+
|
| 199 |
+
dist = Distribution(setup_attrs)
|
| 200 |
+
dist.script_name = 'setup.py'
|
| 201 |
+
cmd = sdist(dist)
|
| 202 |
+
cmd.ensure_finalized()
|
| 203 |
+
|
| 204 |
+
with quiet():
|
| 205 |
+
cmd.run()
|
| 206 |
+
|
| 207 |
+
self.assert_package_data_in_manifest(cmd)
|
| 208 |
+
|
| 209 |
+
def test_extension_sources_in_sdist(self):
|
| 210 |
+
"""
|
| 211 |
+
Ensure that the files listed in Extension.sources and Extension.depends
|
| 212 |
+
are automatically included in the manifest.
|
| 213 |
+
"""
|
| 214 |
+
cmd = self.setup_with_extension()
|
| 215 |
+
self.assert_package_data_in_manifest(cmd)
|
| 216 |
+
manifest = cmd.filelist.files
|
| 217 |
+
for path in EXTENSION_SOURCES:
|
| 218 |
+
assert path in manifest
|
| 219 |
+
|
| 220 |
+
def test_missing_extension_sources(self):
|
| 221 |
+
"""
|
| 222 |
+
Similar to test_extension_sources_in_sdist but the referenced files don't exist.
|
| 223 |
+
Missing files should not be included in distribution (with no error raised).
|
| 224 |
+
"""
|
| 225 |
+
for path in EXTENSION_SOURCES:
|
| 226 |
+
os.remove(path)
|
| 227 |
+
|
| 228 |
+
cmd = self.setup_with_extension()
|
| 229 |
+
self.assert_package_data_in_manifest(cmd)
|
| 230 |
+
manifest = cmd.filelist.files
|
| 231 |
+
for path in EXTENSION_SOURCES:
|
| 232 |
+
assert path not in manifest
|
| 233 |
+
|
| 234 |
+
def test_symlinked_extension_sources(self):
|
| 235 |
+
"""
|
| 236 |
+
Similar to test_extension_sources_in_sdist but the referenced files are
|
| 237 |
+
instead symbolic links to project-local files. Referenced file paths
|
| 238 |
+
should be included. Symlink targets themselves should NOT be included.
|
| 239 |
+
"""
|
| 240 |
+
symlinked = []
|
| 241 |
+
for path in EXTENSION_SOURCES:
|
| 242 |
+
base, ext = os.path.splitext(path)
|
| 243 |
+
target = base + "_target." + ext
|
| 244 |
+
|
| 245 |
+
os.rename(path, target)
|
| 246 |
+
symlink_or_skip_test(os.path.basename(target), path)
|
| 247 |
+
symlinked.append(target)
|
| 248 |
+
|
| 249 |
+
cmd = self.setup_with_extension()
|
| 250 |
+
self.assert_package_data_in_manifest(cmd)
|
| 251 |
+
manifest = cmd.filelist.files
|
| 252 |
+
for path in EXTENSION_SOURCES:
|
| 253 |
+
assert path in manifest
|
| 254 |
+
for path in symlinked:
|
| 255 |
+
assert path not in manifest
|
| 256 |
+
|
| 257 |
+
_INVALID_PATHS = {
|
| 258 |
+
"must be relative": lambda: (
|
| 259 |
+
os.path.abspath(os.path.join("sdist_test", "f.h"))
|
| 260 |
+
),
|
| 261 |
+
"can't have `..` segments": lambda: (
|
| 262 |
+
os.path.join("sdist_test", "..", "sdist_test", "f.h")
|
| 263 |
+
),
|
| 264 |
+
"doesn't exist": lambda: (
|
| 265 |
+
os.path.join("sdist_test", "this_file_does_not_exist.h")
|
| 266 |
+
),
|
| 267 |
+
"must be inside the project root": lambda: (
|
| 268 |
+
symlink_or_skip_test(
|
| 269 |
+
touch(os.path.join("..", "outside_of_project_root.h")),
|
| 270 |
+
"symlink.h",
|
| 271 |
+
)
|
| 272 |
+
),
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
@skip_under_stdlib_distutils
|
| 276 |
+
@pytest.mark.parametrize("reason", _INVALID_PATHS.keys())
|
| 277 |
+
def test_invalid_extension_depends(self, reason, caplog):
|
| 278 |
+
"""
|
| 279 |
+
Due to backwards compatibility reasons, `Extension.depends` should accept
|
| 280 |
+
invalid/weird paths, but then ignore them when building a sdist.
|
| 281 |
+
|
| 282 |
+
This test verifies that the source distribution is still built
|
| 283 |
+
successfully with such paths, but that instead of adding these paths to
|
| 284 |
+
the manifest, we emit an informational message, notifying the user that
|
| 285 |
+
the invalid path won't be automatically included.
|
| 286 |
+
"""
|
| 287 |
+
invalid_path = self._INVALID_PATHS[reason]()
|
| 288 |
+
extension = Extension(
|
| 289 |
+
name="sdist_test.f",
|
| 290 |
+
sources=[],
|
| 291 |
+
depends=[invalid_path],
|
| 292 |
+
)
|
| 293 |
+
setup_attrs = {**SETUP_ATTRS, 'ext_modules': [extension]}
|
| 294 |
+
|
| 295 |
+
dist = Distribution(setup_attrs)
|
| 296 |
+
dist.script_name = 'setup.py'
|
| 297 |
+
cmd = sdist(dist)
|
| 298 |
+
cmd.ensure_finalized()
|
| 299 |
+
|
| 300 |
+
with quiet(), caplog.at_level(logging.INFO):
|
| 301 |
+
cmd.run()
|
| 302 |
+
|
| 303 |
+
self.assert_package_data_in_manifest(cmd)
|
| 304 |
+
manifest = cmd.filelist.files
|
| 305 |
+
assert invalid_path not in manifest
|
| 306 |
+
|
| 307 |
+
expected_message = [
|
| 308 |
+
message
|
| 309 |
+
for (logger, level, message) in caplog.record_tuples
|
| 310 |
+
if (
|
| 311 |
+
logger == "root" #
|
| 312 |
+
and level == logging.INFO #
|
| 313 |
+
and invalid_path in message #
|
| 314 |
+
)
|
| 315 |
+
]
|
| 316 |
+
assert len(expected_message) == 1
|
| 317 |
+
(expected_message,) = expected_message
|
| 318 |
+
assert reason in expected_message
|
| 319 |
+
|
| 320 |
+
def test_custom_build_py(self):
|
| 321 |
+
"""
|
| 322 |
+
Ensure projects defining custom build_py don't break
|
| 323 |
+
when creating sdists (issue #2849)
|
| 324 |
+
"""
|
| 325 |
+
from distutils.command.build_py import build_py as OrigBuildPy
|
| 326 |
+
|
| 327 |
+
using_custom_command_guard = mock.Mock()
|
| 328 |
+
|
| 329 |
+
class CustomBuildPy(OrigBuildPy):
|
| 330 |
+
"""
|
| 331 |
+
Some projects have custom commands inheriting from `distutils`
|
| 332 |
+
"""
|
| 333 |
+
|
| 334 |
+
def get_data_files(self):
|
| 335 |
+
using_custom_command_guard()
|
| 336 |
+
return super().get_data_files()
|
| 337 |
+
|
| 338 |
+
setup_attrs = {**SETUP_ATTRS, 'include_package_data': True}
|
| 339 |
+
assert setup_attrs['package_data']
|
| 340 |
+
|
| 341 |
+
dist = Distribution(setup_attrs)
|
| 342 |
+
dist.script_name = 'setup.py'
|
| 343 |
+
cmd = sdist(dist)
|
| 344 |
+
cmd.ensure_finalized()
|
| 345 |
+
|
| 346 |
+
# Make sure we use the custom command
|
| 347 |
+
cmd.cmdclass = {'build_py': CustomBuildPy}
|
| 348 |
+
cmd.distribution.cmdclass = {'build_py': CustomBuildPy}
|
| 349 |
+
assert cmd.distribution.get_command_class('build_py') == CustomBuildPy
|
| 350 |
+
|
| 351 |
+
msg = "setuptools instead of distutils"
|
| 352 |
+
with quiet(), pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 353 |
+
cmd.run()
|
| 354 |
+
|
| 355 |
+
using_custom_command_guard.assert_called()
|
| 356 |
+
self.assert_package_data_in_manifest(cmd)
|
| 357 |
+
|
| 358 |
+
def test_setup_py_exists(self):
|
| 359 |
+
dist = Distribution(SETUP_ATTRS)
|
| 360 |
+
dist.script_name = 'foo.py'
|
| 361 |
+
cmd = sdist(dist)
|
| 362 |
+
cmd.ensure_finalized()
|
| 363 |
+
|
| 364 |
+
with quiet():
|
| 365 |
+
cmd.run()
|
| 366 |
+
|
| 367 |
+
manifest = cmd.filelist.files
|
| 368 |
+
assert 'setup.py' in manifest
|
| 369 |
+
|
| 370 |
+
def test_setup_py_missing(self):
|
| 371 |
+
dist = Distribution(SETUP_ATTRS)
|
| 372 |
+
dist.script_name = 'foo.py'
|
| 373 |
+
cmd = sdist(dist)
|
| 374 |
+
cmd.ensure_finalized()
|
| 375 |
+
|
| 376 |
+
if os.path.exists("setup.py"):
|
| 377 |
+
os.remove("setup.py")
|
| 378 |
+
with quiet():
|
| 379 |
+
cmd.run()
|
| 380 |
+
|
| 381 |
+
manifest = cmd.filelist.files
|
| 382 |
+
assert 'setup.py' not in manifest
|
| 383 |
+
|
| 384 |
+
def test_setup_py_excluded(self):
|
| 385 |
+
with open("MANIFEST.in", "w", encoding="utf-8") as manifest_file:
|
| 386 |
+
manifest_file.write("exclude setup.py")
|
| 387 |
+
|
| 388 |
+
dist = Distribution(SETUP_ATTRS)
|
| 389 |
+
dist.script_name = 'foo.py'
|
| 390 |
+
cmd = sdist(dist)
|
| 391 |
+
cmd.ensure_finalized()
|
| 392 |
+
|
| 393 |
+
with quiet():
|
| 394 |
+
cmd.run()
|
| 395 |
+
|
| 396 |
+
manifest = cmd.filelist.files
|
| 397 |
+
assert 'setup.py' not in manifest
|
| 398 |
+
|
| 399 |
+
def test_defaults_case_sensitivity(self, source_dir):
|
| 400 |
+
"""
|
| 401 |
+
Make sure default files (README.*, etc.) are added in a case-sensitive
|
| 402 |
+
way to avoid problems with packages built on Windows.
|
| 403 |
+
"""
|
| 404 |
+
|
| 405 |
+
touch(source_dir / 'readme.rst')
|
| 406 |
+
touch(source_dir / 'SETUP.cfg')
|
| 407 |
+
|
| 408 |
+
dist = Distribution(SETUP_ATTRS)
|
| 409 |
+
# the extension deliberately capitalized for this test
|
| 410 |
+
# to make sure the actual filename (not capitalized) gets added
|
| 411 |
+
# to the manifest
|
| 412 |
+
dist.script_name = 'setup.PY'
|
| 413 |
+
cmd = sdist(dist)
|
| 414 |
+
cmd.ensure_finalized()
|
| 415 |
+
|
| 416 |
+
with quiet():
|
| 417 |
+
cmd.run()
|
| 418 |
+
|
| 419 |
+
# lowercase all names so we can test in a
|
| 420 |
+
# case-insensitive way to make sure the files
|
| 421 |
+
# are not included.
|
| 422 |
+
manifest = map(lambda x: x.lower(), cmd.filelist.files)
|
| 423 |
+
assert 'readme.rst' not in manifest, manifest
|
| 424 |
+
assert 'setup.py' not in manifest, manifest
|
| 425 |
+
assert 'setup.cfg' not in manifest, manifest
|
| 426 |
+
|
| 427 |
+
def test_exclude_dev_only_cache_folders(self, source_dir):
|
| 428 |
+
included = {
|
| 429 |
+
# Emulate problem in https://github.com/pypa/setuptools/issues/4601
|
| 430 |
+
"MANIFEST.in": (
|
| 431 |
+
"global-include LICEN[CS]E* COPYING* NOTICE* AUTHORS*\n"
|
| 432 |
+
"global-include *.txt\n"
|
| 433 |
+
),
|
| 434 |
+
# For the sake of being conservative and limiting unforeseen side-effects
|
| 435 |
+
# we just exclude dev-only cache folders at the root of the repository:
|
| 436 |
+
"test/.venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
|
| 437 |
+
"src/.nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
|
| 438 |
+
"doc/.tox/default/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
|
| 439 |
+
# Let's test against false positives with similarly named files:
|
| 440 |
+
".venv-requirements.txt": "",
|
| 441 |
+
".tox-coveragerc.txt": "",
|
| 442 |
+
".noxy/coveragerc.txt": "",
|
| 443 |
+
}
|
| 444 |
+
|
| 445 |
+
excluded = {
|
| 446 |
+
# .tox/.nox/.venv are well-know folders present at the root of Python repos
|
| 447 |
+
# and therefore should be excluded
|
| 448 |
+
".tox/release/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
|
| 449 |
+
".nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
|
| 450 |
+
".venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
|
| 451 |
+
}
|
| 452 |
+
|
| 453 |
+
for file, content in {**excluded, **included}.items():
|
| 454 |
+
Path(source_dir, file).parent.mkdir(parents=True, exist_ok=True)
|
| 455 |
+
Path(source_dir, file).write_text(content, encoding="utf-8")
|
| 456 |
+
|
| 457 |
+
cmd = self.setup_with_extension()
|
| 458 |
+
self.assert_package_data_in_manifest(cmd)
|
| 459 |
+
manifest = {f.replace(os.sep, '/') for f in cmd.filelist.files}
|
| 460 |
+
for path in excluded:
|
| 461 |
+
assert os.path.exists(path)
|
| 462 |
+
assert path not in manifest, (path, manifest)
|
| 463 |
+
for path in included:
|
| 464 |
+
assert os.path.exists(path)
|
| 465 |
+
assert path in manifest, (path, manifest)
|
| 466 |
+
|
| 467 |
+
@fail_on_ascii
|
| 468 |
+
def test_manifest_is_written_with_utf8_encoding(self):
|
| 469 |
+
# Test for #303.
|
| 470 |
+
dist = Distribution(SETUP_ATTRS)
|
| 471 |
+
dist.script_name = 'setup.py'
|
| 472 |
+
mm = manifest_maker(dist)
|
| 473 |
+
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
| 474 |
+
os.mkdir('sdist_test.egg-info')
|
| 475 |
+
|
| 476 |
+
# UTF-8 filename
|
| 477 |
+
filename = os.path.join('sdist_test', 'smörbröd.py')
|
| 478 |
+
|
| 479 |
+
# Must create the file or it will get stripped.
|
| 480 |
+
touch(filename)
|
| 481 |
+
|
| 482 |
+
# Add UTF-8 filename and write manifest
|
| 483 |
+
with quiet():
|
| 484 |
+
mm.run()
|
| 485 |
+
mm.filelist.append(filename)
|
| 486 |
+
mm.write_manifest()
|
| 487 |
+
|
| 488 |
+
contents = read_all_bytes(mm.manifest)
|
| 489 |
+
|
| 490 |
+
# The manifest should be UTF-8 encoded
|
| 491 |
+
u_contents = contents.decode('UTF-8')
|
| 492 |
+
|
| 493 |
+
# The manifest should contain the UTF-8 filename
|
| 494 |
+
assert posix(filename) in u_contents
|
| 495 |
+
|
| 496 |
+
@fail_on_ascii
|
| 497 |
+
def test_write_manifest_allows_utf8_filenames(self):
|
| 498 |
+
# Test for #303.
|
| 499 |
+
dist = Distribution(SETUP_ATTRS)
|
| 500 |
+
dist.script_name = 'setup.py'
|
| 501 |
+
mm = manifest_maker(dist)
|
| 502 |
+
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
| 503 |
+
os.mkdir('sdist_test.egg-info')
|
| 504 |
+
|
| 505 |
+
filename = os.path.join(b'sdist_test', Filenames.utf_8)
|
| 506 |
+
|
| 507 |
+
# Must touch the file or risk removal
|
| 508 |
+
touch(filename)
|
| 509 |
+
|
| 510 |
+
# Add filename and write manifest
|
| 511 |
+
with quiet():
|
| 512 |
+
mm.run()
|
| 513 |
+
u_filename = filename.decode('utf-8')
|
| 514 |
+
mm.filelist.files.append(u_filename)
|
| 515 |
+
# Re-write manifest
|
| 516 |
+
mm.write_manifest()
|
| 517 |
+
|
| 518 |
+
contents = read_all_bytes(mm.manifest)
|
| 519 |
+
|
| 520 |
+
# The manifest should be UTF-8 encoded
|
| 521 |
+
contents.decode('UTF-8')
|
| 522 |
+
|
| 523 |
+
# The manifest should contain the UTF-8 filename
|
| 524 |
+
assert posix(filename) in contents
|
| 525 |
+
|
| 526 |
+
# The filelist should have been updated as well
|
| 527 |
+
assert u_filename in mm.filelist.files
|
| 528 |
+
|
| 529 |
+
@skip_under_xdist
|
| 530 |
+
def test_write_manifest_skips_non_utf8_filenames(self):
|
| 531 |
+
"""
|
| 532 |
+
Files that cannot be encoded to UTF-8 (specifically, those that
|
| 533 |
+
weren't originally successfully decoded and have surrogate
|
| 534 |
+
escapes) should be omitted from the manifest.
|
| 535 |
+
See https://bitbucket.org/tarek/distribute/issue/303 for history.
|
| 536 |
+
"""
|
| 537 |
+
dist = Distribution(SETUP_ATTRS)
|
| 538 |
+
dist.script_name = 'setup.py'
|
| 539 |
+
mm = manifest_maker(dist)
|
| 540 |
+
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
| 541 |
+
os.mkdir('sdist_test.egg-info')
|
| 542 |
+
|
| 543 |
+
# Latin-1 filename
|
| 544 |
+
filename = os.path.join(b'sdist_test', Filenames.latin_1)
|
| 545 |
+
|
| 546 |
+
# Add filename with surrogates and write manifest
|
| 547 |
+
with quiet():
|
| 548 |
+
mm.run()
|
| 549 |
+
u_filename = filename.decode('utf-8', 'surrogateescape')
|
| 550 |
+
mm.filelist.append(u_filename)
|
| 551 |
+
# Re-write manifest
|
| 552 |
+
mm.write_manifest()
|
| 553 |
+
|
| 554 |
+
contents = read_all_bytes(mm.manifest)
|
| 555 |
+
|
| 556 |
+
# The manifest should be UTF-8 encoded
|
| 557 |
+
contents.decode('UTF-8')
|
| 558 |
+
|
| 559 |
+
# The Latin-1 filename should have been skipped
|
| 560 |
+
assert posix(filename) not in contents
|
| 561 |
+
|
| 562 |
+
# The filelist should have been updated as well
|
| 563 |
+
assert u_filename not in mm.filelist.files
|
| 564 |
+
|
| 565 |
+
@fail_on_ascii
|
| 566 |
+
def test_manifest_is_read_with_utf8_encoding(self):
|
| 567 |
+
# Test for #303.
|
| 568 |
+
dist = Distribution(SETUP_ATTRS)
|
| 569 |
+
dist.script_name = 'setup.py'
|
| 570 |
+
cmd = sdist(dist)
|
| 571 |
+
cmd.ensure_finalized()
|
| 572 |
+
|
| 573 |
+
# Create manifest
|
| 574 |
+
with quiet():
|
| 575 |
+
cmd.run()
|
| 576 |
+
|
| 577 |
+
# Add UTF-8 filename to manifest
|
| 578 |
+
filename = os.path.join(b'sdist_test', Filenames.utf_8)
|
| 579 |
+
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
| 580 |
+
manifest = open(cmd.manifest, 'ab')
|
| 581 |
+
manifest.write(b'\n' + filename)
|
| 582 |
+
manifest.close()
|
| 583 |
+
|
| 584 |
+
# The file must exist to be included in the filelist
|
| 585 |
+
touch(filename)
|
| 586 |
+
|
| 587 |
+
# Re-read manifest
|
| 588 |
+
cmd.filelist.files = []
|
| 589 |
+
with quiet():
|
| 590 |
+
cmd.read_manifest()
|
| 591 |
+
|
| 592 |
+
# The filelist should contain the UTF-8 filename
|
| 593 |
+
filename = filename.decode('utf-8')
|
| 594 |
+
assert filename in cmd.filelist.files
|
| 595 |
+
|
| 596 |
+
@fail_on_latin1_encoded_filenames
|
| 597 |
+
def test_read_manifest_skips_non_utf8_filenames(self):
|
| 598 |
+
# Test for #303.
|
| 599 |
+
dist = Distribution(SETUP_ATTRS)
|
| 600 |
+
dist.script_name = 'setup.py'
|
| 601 |
+
cmd = sdist(dist)
|
| 602 |
+
cmd.ensure_finalized()
|
| 603 |
+
|
| 604 |
+
# Create manifest
|
| 605 |
+
with quiet():
|
| 606 |
+
cmd.run()
|
| 607 |
+
|
| 608 |
+
# Add Latin-1 filename to manifest
|
| 609 |
+
filename = os.path.join(b'sdist_test', Filenames.latin_1)
|
| 610 |
+
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
| 611 |
+
manifest = open(cmd.manifest, 'ab')
|
| 612 |
+
manifest.write(b'\n' + filename)
|
| 613 |
+
manifest.close()
|
| 614 |
+
|
| 615 |
+
# The file must exist to be included in the filelist
|
| 616 |
+
touch(filename)
|
| 617 |
+
|
| 618 |
+
# Re-read manifest
|
| 619 |
+
cmd.filelist.files = []
|
| 620 |
+
with quiet():
|
| 621 |
+
cmd.read_manifest()
|
| 622 |
+
|
| 623 |
+
# The Latin-1 filename should have been skipped
|
| 624 |
+
filename = filename.decode('latin-1')
|
| 625 |
+
assert filename not in cmd.filelist.files
|
| 626 |
+
|
| 627 |
+
@fail_on_ascii
|
| 628 |
+
@fail_on_latin1_encoded_filenames
|
| 629 |
+
def test_sdist_with_utf8_encoded_filename(self):
|
| 630 |
+
# Test for #303.
|
| 631 |
+
dist = Distribution(self.make_strings(SETUP_ATTRS))
|
| 632 |
+
dist.script_name = 'setup.py'
|
| 633 |
+
cmd = sdist(dist)
|
| 634 |
+
cmd.ensure_finalized()
|
| 635 |
+
|
| 636 |
+
filename = os.path.join(b'sdist_test', Filenames.utf_8)
|
| 637 |
+
touch(filename)
|
| 638 |
+
|
| 639 |
+
with quiet():
|
| 640 |
+
cmd.run()
|
| 641 |
+
|
| 642 |
+
if sys.platform == 'darwin':
|
| 643 |
+
filename = decompose(filename)
|
| 644 |
+
|
| 645 |
+
fs_enc = sys.getfilesystemencoding()
|
| 646 |
+
|
| 647 |
+
if sys.platform == 'win32':
|
| 648 |
+
if fs_enc == 'cp1252':
|
| 649 |
+
# Python mangles the UTF-8 filename
|
| 650 |
+
filename = filename.decode('cp1252')
|
| 651 |
+
assert filename in cmd.filelist.files
|
| 652 |
+
else:
|
| 653 |
+
filename = filename.decode('mbcs')
|
| 654 |
+
assert filename in cmd.filelist.files
|
| 655 |
+
else:
|
| 656 |
+
filename = filename.decode('utf-8')
|
| 657 |
+
assert filename in cmd.filelist.files
|
| 658 |
+
|
| 659 |
+
@classmethod
|
| 660 |
+
def make_strings(cls, item):
|
| 661 |
+
if isinstance(item, dict):
|
| 662 |
+
return {key: cls.make_strings(value) for key, value in item.items()}
|
| 663 |
+
if isinstance(item, list):
|
| 664 |
+
return list(map(cls.make_strings, item))
|
| 665 |
+
return str(item)
|
| 666 |
+
|
| 667 |
+
@fail_on_latin1_encoded_filenames
|
| 668 |
+
@skip_under_xdist
|
| 669 |
+
def test_sdist_with_latin1_encoded_filename(self):
|
| 670 |
+
# Test for #303.
|
| 671 |
+
dist = Distribution(self.make_strings(SETUP_ATTRS))
|
| 672 |
+
dist.script_name = 'setup.py'
|
| 673 |
+
cmd = sdist(dist)
|
| 674 |
+
cmd.ensure_finalized()
|
| 675 |
+
|
| 676 |
+
# Latin-1 filename
|
| 677 |
+
filename = os.path.join(b'sdist_test', Filenames.latin_1)
|
| 678 |
+
touch(filename)
|
| 679 |
+
assert os.path.isfile(filename)
|
| 680 |
+
|
| 681 |
+
with quiet():
|
| 682 |
+
cmd.run()
|
| 683 |
+
|
| 684 |
+
# not all windows systems have a default FS encoding of cp1252
|
| 685 |
+
if sys.platform == 'win32':
|
| 686 |
+
# Latin-1 is similar to Windows-1252 however
|
| 687 |
+
# on mbcs filesys it is not in latin-1 encoding
|
| 688 |
+
fs_enc = sys.getfilesystemencoding()
|
| 689 |
+
if fs_enc != 'mbcs':
|
| 690 |
+
fs_enc = 'latin-1'
|
| 691 |
+
filename = filename.decode(fs_enc)
|
| 692 |
+
|
| 693 |
+
assert filename in cmd.filelist.files
|
| 694 |
+
else:
|
| 695 |
+
# The Latin-1 filename should have been skipped
|
| 696 |
+
filename = filename.decode('latin-1')
|
| 697 |
+
assert filename not in cmd.filelist.files
|
| 698 |
+
|
| 699 |
+
_EXAMPLE_DIRECTIVES = {
|
| 700 |
+
"setup.cfg - long_description and version": """
|
| 701 |
+
[metadata]
|
| 702 |
+
name = testing
|
| 703 |
+
version = file: src/VERSION.txt
|
| 704 |
+
license_files = DOWHATYOUWANT
|
| 705 |
+
long_description = file: README.rst, USAGE.rst
|
| 706 |
+
""",
|
| 707 |
+
"pyproject.toml - static readme/license files and dynamic version": """
|
| 708 |
+
[project]
|
| 709 |
+
name = "testing"
|
| 710 |
+
readme = "USAGE.rst"
|
| 711 |
+
license = {file = "DOWHATYOUWANT"}
|
| 712 |
+
dynamic = ["version"]
|
| 713 |
+
[tool.setuptools.dynamic]
|
| 714 |
+
version = {file = ["src/VERSION.txt"]}
|
| 715 |
+
""",
|
| 716 |
+
"pyproject.toml - directive with str instead of list": """
|
| 717 |
+
[project]
|
| 718 |
+
name = "testing"
|
| 719 |
+
readme = "USAGE.rst"
|
| 720 |
+
license = {file = "DOWHATYOUWANT"}
|
| 721 |
+
dynamic = ["version"]
|
| 722 |
+
[tool.setuptools.dynamic]
|
| 723 |
+
version = {file = "src/VERSION.txt"}
|
| 724 |
+
""",
|
| 725 |
+
}
|
| 726 |
+
|
| 727 |
+
@pytest.mark.parametrize("config", _EXAMPLE_DIRECTIVES.keys())
|
| 728 |
+
def test_add_files_referenced_by_config_directives(self, source_dir, config):
|
| 729 |
+
config_file, _, _ = config.partition(" - ")
|
| 730 |
+
config_text = self._EXAMPLE_DIRECTIVES[config]
|
| 731 |
+
(source_dir / 'src').mkdir()
|
| 732 |
+
(source_dir / 'src/VERSION.txt').write_text("0.42", encoding="utf-8")
|
| 733 |
+
(source_dir / 'README.rst').write_text("hello world!", encoding="utf-8")
|
| 734 |
+
(source_dir / 'USAGE.rst').write_text("hello world!", encoding="utf-8")
|
| 735 |
+
(source_dir / 'DOWHATYOUWANT').write_text("hello world!", encoding="utf-8")
|
| 736 |
+
(source_dir / config_file).write_text(config_text, encoding="utf-8")
|
| 737 |
+
|
| 738 |
+
dist = Distribution({"packages": []})
|
| 739 |
+
dist.script_name = 'setup.py'
|
| 740 |
+
dist.parse_config_files()
|
| 741 |
+
|
| 742 |
+
cmd = sdist(dist)
|
| 743 |
+
cmd.ensure_finalized()
|
| 744 |
+
with quiet():
|
| 745 |
+
cmd.run()
|
| 746 |
+
|
| 747 |
+
assert (
|
| 748 |
+
'src/VERSION.txt' in cmd.filelist.files
|
| 749 |
+
or 'src\\VERSION.txt' in cmd.filelist.files
|
| 750 |
+
)
|
| 751 |
+
assert 'USAGE.rst' in cmd.filelist.files
|
| 752 |
+
assert 'DOWHATYOUWANT' in cmd.filelist.files
|
| 753 |
+
assert '/' not in cmd.filelist.files
|
| 754 |
+
assert '\\' not in cmd.filelist.files
|
| 755 |
+
|
| 756 |
+
def test_pyproject_toml_in_sdist(self, source_dir):
|
| 757 |
+
"""
|
| 758 |
+
Check if pyproject.toml is included in source distribution if present
|
| 759 |
+
"""
|
| 760 |
+
touch(source_dir / 'pyproject.toml')
|
| 761 |
+
dist = Distribution(SETUP_ATTRS)
|
| 762 |
+
dist.script_name = 'setup.py'
|
| 763 |
+
cmd = sdist(dist)
|
| 764 |
+
cmd.ensure_finalized()
|
| 765 |
+
with quiet():
|
| 766 |
+
cmd.run()
|
| 767 |
+
manifest = cmd.filelist.files
|
| 768 |
+
assert 'pyproject.toml' in manifest
|
| 769 |
+
|
| 770 |
+
def test_pyproject_toml_excluded(self, source_dir):
|
| 771 |
+
"""
|
| 772 |
+
Check that pyproject.toml can excluded even if present
|
| 773 |
+
"""
|
| 774 |
+
touch(source_dir / 'pyproject.toml')
|
| 775 |
+
with open('MANIFEST.in', 'w', encoding="utf-8") as mts:
|
| 776 |
+
print('exclude pyproject.toml', file=mts)
|
| 777 |
+
dist = Distribution(SETUP_ATTRS)
|
| 778 |
+
dist.script_name = 'setup.py'
|
| 779 |
+
cmd = sdist(dist)
|
| 780 |
+
cmd.ensure_finalized()
|
| 781 |
+
with quiet():
|
| 782 |
+
cmd.run()
|
| 783 |
+
manifest = cmd.filelist.files
|
| 784 |
+
assert 'pyproject.toml' not in manifest
|
| 785 |
+
|
| 786 |
+
def test_build_subcommand_source_files(self, source_dir):
|
| 787 |
+
touch(source_dir / '.myfile~')
|
| 788 |
+
|
| 789 |
+
# Sanity check: without custom commands file list should not be affected
|
| 790 |
+
dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"})
|
| 791 |
+
cmd = sdist(dist)
|
| 792 |
+
cmd.ensure_finalized()
|
| 793 |
+
with quiet():
|
| 794 |
+
cmd.run()
|
| 795 |
+
manifest = cmd.filelist.files
|
| 796 |
+
assert '.myfile~' not in manifest
|
| 797 |
+
|
| 798 |
+
# Test: custom command should be able to augment file list
|
| 799 |
+
dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"})
|
| 800 |
+
build = dist.get_command_obj("build")
|
| 801 |
+
build.sub_commands = [*build.sub_commands, ("build_custom", None)]
|
| 802 |
+
|
| 803 |
+
class build_custom(Command):
|
| 804 |
+
def initialize_options(self): ...
|
| 805 |
+
|
| 806 |
+
def finalize_options(self): ...
|
| 807 |
+
|
| 808 |
+
def run(self): ...
|
| 809 |
+
|
| 810 |
+
def get_source_files(self):
|
| 811 |
+
return ['.myfile~']
|
| 812 |
+
|
| 813 |
+
dist.cmdclass.update(build_custom=build_custom)
|
| 814 |
+
|
| 815 |
+
cmd = sdist(dist)
|
| 816 |
+
cmd.use_defaults = True
|
| 817 |
+
cmd.ensure_finalized()
|
| 818 |
+
with quiet():
|
| 819 |
+
cmd.run()
|
| 820 |
+
manifest = cmd.filelist.files
|
| 821 |
+
assert '.myfile~' in manifest
|
| 822 |
+
|
| 823 |
+
@pytest.mark.skipif("os.environ.get('SETUPTOOLS_USE_DISTUTILS') == 'stdlib'")
|
| 824 |
+
def test_build_base_pathlib(self, source_dir):
|
| 825 |
+
"""
|
| 826 |
+
Ensure if build_base is a pathlib.Path, the build still succeeds.
|
| 827 |
+
"""
|
| 828 |
+
dist = Distribution({
|
| 829 |
+
**SETUP_ATTRS,
|
| 830 |
+
"script_name": "setup.py",
|
| 831 |
+
"options": {"build": {"build_base": pathlib.Path('build')}},
|
| 832 |
+
})
|
| 833 |
+
cmd = sdist(dist)
|
| 834 |
+
cmd.ensure_finalized()
|
| 835 |
+
with quiet():
|
| 836 |
+
cmd.run()
|
| 837 |
+
|
| 838 |
+
|
| 839 |
+
def test_default_revctrl():
|
| 840 |
+
"""
|
| 841 |
+
When _default_revctrl was removed from the `setuptools.command.sdist`
|
| 842 |
+
module in 10.0, it broke some systems which keep an old install of
|
| 843 |
+
setuptools (Distribute) around. Those old versions require that the
|
| 844 |
+
setuptools package continue to implement that interface, so this
|
| 845 |
+
function provides that interface, stubbed. See #320 for details.
|
| 846 |
+
|
| 847 |
+
This interface must be maintained until Ubuntu 12.04 is no longer
|
| 848 |
+
supported (by Setuptools).
|
| 849 |
+
"""
|
| 850 |
+
(ep,) = metadata.EntryPoints._from_text(
|
| 851 |
+
"""
|
| 852 |
+
[setuptools.file_finders]
|
| 853 |
+
svn_cvs = setuptools.command.sdist:_default_revctrl
|
| 854 |
+
"""
|
| 855 |
+
)
|
| 856 |
+
res = ep.load()
|
| 857 |
+
assert hasattr(res, '__iter__')
|
| 858 |
+
|
| 859 |
+
|
| 860 |
+
class TestRegressions:
|
| 861 |
+
"""
|
| 862 |
+
Can be removed/changed if the project decides to change how it handles symlinks
|
| 863 |
+
or external files.
|
| 864 |
+
"""
|
| 865 |
+
|
| 866 |
+
@staticmethod
|
| 867 |
+
def files_for_symlink_in_extension_depends(tmp_path, dep_path):
|
| 868 |
+
return {
|
| 869 |
+
"external": {
|
| 870 |
+
"dir": {"file.h": ""},
|
| 871 |
+
},
|
| 872 |
+
"project": {
|
| 873 |
+
"setup.py": cleandoc(
|
| 874 |
+
f"""
|
| 875 |
+
from setuptools import Extension, setup
|
| 876 |
+
setup(
|
| 877 |
+
name="myproj",
|
| 878 |
+
version="42",
|
| 879 |
+
ext_modules=[
|
| 880 |
+
Extension(
|
| 881 |
+
"hello", sources=["hello.pyx"],
|
| 882 |
+
depends=[{dep_path!r}]
|
| 883 |
+
)
|
| 884 |
+
],
|
| 885 |
+
)
|
| 886 |
+
"""
|
| 887 |
+
),
|
| 888 |
+
"hello.pyx": "",
|
| 889 |
+
"MANIFEST.in": "global-include *.h",
|
| 890 |
+
},
|
| 891 |
+
}
|
| 892 |
+
|
| 893 |
+
@pytest.mark.parametrize(
|
| 894 |
+
"dep_path", ("myheaders/dir/file.h", "myheaders/dir/../dir/file.h")
|
| 895 |
+
)
|
| 896 |
+
def test_symlink_in_extension_depends(self, monkeypatch, tmp_path, dep_path):
|
| 897 |
+
# Given a project with a symlinked dir and a "depends" targeting that dir
|
| 898 |
+
files = self.files_for_symlink_in_extension_depends(tmp_path, dep_path)
|
| 899 |
+
jaraco.path.build(files, prefix=str(tmp_path))
|
| 900 |
+
symlink_or_skip_test(tmp_path / "external", tmp_path / "project/myheaders")
|
| 901 |
+
|
| 902 |
+
# When `sdist` runs, there should be no error
|
| 903 |
+
members = run_sdist(monkeypatch, tmp_path / "project")
|
| 904 |
+
# and the sdist should contain the symlinked files
|
| 905 |
+
for expected in (
|
| 906 |
+
"myproj-42/hello.pyx",
|
| 907 |
+
"myproj-42/myheaders/dir/file.h",
|
| 908 |
+
):
|
| 909 |
+
assert expected in members
|
| 910 |
+
|
| 911 |
+
@staticmethod
|
| 912 |
+
def files_for_external_path_in_extension_depends(tmp_path, dep_path):
|
| 913 |
+
head, _, tail = dep_path.partition("$tmp_path$/")
|
| 914 |
+
dep_path = tmp_path / tail if tail else head
|
| 915 |
+
|
| 916 |
+
return {
|
| 917 |
+
"external": {
|
| 918 |
+
"dir": {"file.h": ""},
|
| 919 |
+
},
|
| 920 |
+
"project": {
|
| 921 |
+
"setup.py": cleandoc(
|
| 922 |
+
f"""
|
| 923 |
+
from setuptools import Extension, setup
|
| 924 |
+
setup(
|
| 925 |
+
name="myproj",
|
| 926 |
+
version="42",
|
| 927 |
+
ext_modules=[
|
| 928 |
+
Extension(
|
| 929 |
+
"hello", sources=["hello.pyx"],
|
| 930 |
+
depends=[{str(dep_path)!r}]
|
| 931 |
+
)
|
| 932 |
+
],
|
| 933 |
+
)
|
| 934 |
+
"""
|
| 935 |
+
),
|
| 936 |
+
"hello.pyx": "",
|
| 937 |
+
"MANIFEST.in": "global-include *.h",
|
| 938 |
+
},
|
| 939 |
+
}
|
| 940 |
+
|
| 941 |
+
@pytest.mark.parametrize(
|
| 942 |
+
"dep_path", ("$tmp_path$/external/dir/file.h", "../external/dir/file.h")
|
| 943 |
+
)
|
| 944 |
+
def test_external_path_in_extension_depends(self, monkeypatch, tmp_path, dep_path):
|
| 945 |
+
# Given a project with a "depends" targeting an external dir
|
| 946 |
+
files = self.files_for_external_path_in_extension_depends(tmp_path, dep_path)
|
| 947 |
+
jaraco.path.build(files, prefix=str(tmp_path))
|
| 948 |
+
# When `sdist` runs, there should be no error
|
| 949 |
+
members = run_sdist(monkeypatch, tmp_path / "project")
|
| 950 |
+
# and the sdist should not contain the external file
|
| 951 |
+
for name in members:
|
| 952 |
+
assert "file.h" not in name
|
| 953 |
+
|
| 954 |
+
|
| 955 |
+
def run_sdist(monkeypatch, project):
|
| 956 |
+
"""Given a project directory, run the sdist and return its contents"""
|
| 957 |
+
monkeypatch.chdir(project)
|
| 958 |
+
with quiet():
|
| 959 |
+
run_setup("setup.py", ["sdist"])
|
| 960 |
+
|
| 961 |
+
archive = next((project / "dist").glob("*.tar.gz"))
|
| 962 |
+
with tarfile.open(str(archive)) as tar:
|
| 963 |
+
return set(tar.getnames())
|
| 964 |
+
|
| 965 |
+
|
| 966 |
+
def test_sanity_check_setuptools_own_sdist(setuptools_sdist):
|
| 967 |
+
with tarfile.open(setuptools_sdist) as tar:
|
| 968 |
+
files = tar.getnames()
|
| 969 |
+
|
| 970 |
+
# setuptools sdist should not include the .tox folder
|
| 971 |
+
tox_files = [name for name in files if ".tox" in name]
|
| 972 |
+
assert len(tox_files) == 0, f"not empty {tox_files}"
|
llava/lib/python3.10/site-packages/setuptools/tests/test_setopt.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import configparser
|
| 2 |
+
|
| 3 |
+
from setuptools.command import setopt
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestEdit:
|
| 7 |
+
@staticmethod
|
| 8 |
+
def parse_config(filename):
|
| 9 |
+
parser = configparser.ConfigParser()
|
| 10 |
+
with open(filename, encoding='utf-8') as reader:
|
| 11 |
+
parser.read_file(reader)
|
| 12 |
+
return parser
|
| 13 |
+
|
| 14 |
+
@staticmethod
|
| 15 |
+
def write_text(file, content):
|
| 16 |
+
with open(file, 'wb') as strm:
|
| 17 |
+
strm.write(content.encode('utf-8'))
|
| 18 |
+
|
| 19 |
+
def test_utf8_encoding_retained(self, tmpdir):
|
| 20 |
+
"""
|
| 21 |
+
When editing a file, non-ASCII characters encoded in
|
| 22 |
+
UTF-8 should be retained.
|
| 23 |
+
"""
|
| 24 |
+
config = tmpdir.join('setup.cfg')
|
| 25 |
+
self.write_text(str(config), '[names]\njaraco=джарако')
|
| 26 |
+
setopt.edit_config(str(config), dict(names=dict(other='yes')))
|
| 27 |
+
parser = self.parse_config(str(config))
|
| 28 |
+
assert parser.get('names', 'jaraco') == 'джарако'
|
| 29 |
+
assert parser.get('names', 'other') == 'yes'
|
| 30 |
+
|
| 31 |
+
def test_case_retained(self, tmpdir):
|
| 32 |
+
"""
|
| 33 |
+
When editing a file, case of keys should be retained.
|
| 34 |
+
"""
|
| 35 |
+
config = tmpdir.join('setup.cfg')
|
| 36 |
+
self.write_text(str(config), '[names]\nFoO=bAr')
|
| 37 |
+
setopt.edit_config(str(config), dict(names=dict(oTher='yes')))
|
| 38 |
+
actual = config.read_text(encoding='ascii')
|
| 39 |
+
assert 'FoO' in actual
|
| 40 |
+
assert 'oTher' in actual
|
llava/lib/python3.10/site-packages/setuptools/tests/test_shutil_wrapper.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import stat
|
| 2 |
+
import sys
|
| 3 |
+
from unittest.mock import Mock
|
| 4 |
+
|
| 5 |
+
from setuptools import _shutil
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_rmtree_readonly(monkeypatch, tmp_path):
|
| 9 |
+
"""Verify onerr works as expected"""
|
| 10 |
+
|
| 11 |
+
tmp_dir = tmp_path / "with_readonly"
|
| 12 |
+
tmp_dir.mkdir()
|
| 13 |
+
some_file = tmp_dir.joinpath("file.txt")
|
| 14 |
+
some_file.touch()
|
| 15 |
+
some_file.chmod(stat.S_IREAD)
|
| 16 |
+
|
| 17 |
+
expected_count = 1 if sys.platform.startswith("win") else 0
|
| 18 |
+
chmod_fn = Mock(wraps=_shutil.attempt_chmod_verbose)
|
| 19 |
+
monkeypatch.setattr(_shutil, "attempt_chmod_verbose", chmod_fn)
|
| 20 |
+
|
| 21 |
+
_shutil.rmtree(tmp_dir)
|
| 22 |
+
assert chmod_fn.call_count == expected_count
|
| 23 |
+
assert not tmp_dir.is_dir()
|
llava/lib/python3.10/site-packages/setuptools/tests/test_unicode_utils.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools import unicode_utils
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def test_filesys_decode_fs_encoding_is_None(monkeypatch):
|
| 5 |
+
"""
|
| 6 |
+
Test filesys_decode does not raise TypeError when
|
| 7 |
+
getfilesystemencoding returns None.
|
| 8 |
+
"""
|
| 9 |
+
monkeypatch.setattr('sys.getfilesystemencoding', lambda: None)
|
| 10 |
+
unicode_utils.filesys_decode(b'test')
|
llava/lib/python3.10/site-packages/setuptools/tests/test_warnings.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from inspect import cleandoc
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
|
| 6 |
+
|
| 7 |
+
_EXAMPLES = {
|
| 8 |
+
"default": dict(
|
| 9 |
+
args=("Hello {x}", "\n\t{target} {v:.1f}"),
|
| 10 |
+
kwargs={"x": 5, "v": 3, "target": "World"},
|
| 11 |
+
expected="""
|
| 12 |
+
Hello 5
|
| 13 |
+
!!
|
| 14 |
+
|
| 15 |
+
********************************************************************************
|
| 16 |
+
World 3.0
|
| 17 |
+
********************************************************************************
|
| 18 |
+
|
| 19 |
+
!!
|
| 20 |
+
""",
|
| 21 |
+
),
|
| 22 |
+
"futue_due_date": dict(
|
| 23 |
+
args=("Summary", "Lorem ipsum"),
|
| 24 |
+
kwargs={"due_date": (9999, 11, 22)},
|
| 25 |
+
expected="""
|
| 26 |
+
Summary
|
| 27 |
+
!!
|
| 28 |
+
|
| 29 |
+
********************************************************************************
|
| 30 |
+
Lorem ipsum
|
| 31 |
+
|
| 32 |
+
By 9999-Nov-22, you need to update your project and remove deprecated calls
|
| 33 |
+
or your builds will no longer be supported.
|
| 34 |
+
********************************************************************************
|
| 35 |
+
|
| 36 |
+
!!
|
| 37 |
+
""",
|
| 38 |
+
),
|
| 39 |
+
"past_due_date_with_docs": dict(
|
| 40 |
+
args=("Summary", "Lorem ipsum"),
|
| 41 |
+
kwargs={"due_date": (2000, 11, 22), "see_docs": "some_page.html"},
|
| 42 |
+
expected="""
|
| 43 |
+
Summary
|
| 44 |
+
!!
|
| 45 |
+
|
| 46 |
+
********************************************************************************
|
| 47 |
+
Lorem ipsum
|
| 48 |
+
|
| 49 |
+
This deprecation is overdue, please update your project and remove deprecated
|
| 50 |
+
calls to avoid build errors in the future.
|
| 51 |
+
|
| 52 |
+
See https://setuptools.pypa.io/en/latest/some_page.html for details.
|
| 53 |
+
********************************************************************************
|
| 54 |
+
|
| 55 |
+
!!
|
| 56 |
+
""",
|
| 57 |
+
),
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@pytest.mark.parametrize("example_name", _EXAMPLES.keys())
|
| 62 |
+
def test_formatting(monkeypatch, example_name):
|
| 63 |
+
"""
|
| 64 |
+
It should automatically handle indentation, interpolation and things like due date.
|
| 65 |
+
"""
|
| 66 |
+
args = _EXAMPLES[example_name]["args"]
|
| 67 |
+
kwargs = _EXAMPLES[example_name]["kwargs"]
|
| 68 |
+
expected = _EXAMPLES[example_name]["expected"]
|
| 69 |
+
|
| 70 |
+
monkeypatch.setenv("SETUPTOOLS_ENFORCE_DEPRECATION", "false")
|
| 71 |
+
with pytest.warns(SetuptoolsWarning) as warn_info:
|
| 72 |
+
SetuptoolsWarning.emit(*args, **kwargs)
|
| 73 |
+
assert _get_message(warn_info) == cleandoc(expected)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def test_due_date_enforcement(monkeypatch):
|
| 77 |
+
class _MyDeprecation(SetuptoolsDeprecationWarning):
|
| 78 |
+
_SUMMARY = "Summary"
|
| 79 |
+
_DETAILS = "Lorem ipsum"
|
| 80 |
+
_DUE_DATE = (2000, 11, 22)
|
| 81 |
+
_SEE_DOCS = "some_page.html"
|
| 82 |
+
|
| 83 |
+
monkeypatch.setenv("SETUPTOOLS_ENFORCE_DEPRECATION", "true")
|
| 84 |
+
with pytest.raises(SetuptoolsDeprecationWarning) as exc_info:
|
| 85 |
+
_MyDeprecation.emit()
|
| 86 |
+
|
| 87 |
+
expected = """
|
| 88 |
+
Summary
|
| 89 |
+
!!
|
| 90 |
+
|
| 91 |
+
********************************************************************************
|
| 92 |
+
Lorem ipsum
|
| 93 |
+
|
| 94 |
+
This deprecation is overdue, please update your project and remove deprecated
|
| 95 |
+
calls to avoid build errors in the future.
|
| 96 |
+
|
| 97 |
+
See https://setuptools.pypa.io/en/latest/some_page.html for details.
|
| 98 |
+
********************************************************************************
|
| 99 |
+
|
| 100 |
+
!!
|
| 101 |
+
"""
|
| 102 |
+
assert str(exc_info.value) == cleandoc(expected)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def _get_message(warn_info):
|
| 106 |
+
return next(warn.message.args[0] for warn in warn_info)
|
llava/lib/python3.10/site-packages/setuptools/tests/test_wheel.py
ADDED
|
@@ -0,0 +1,714 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""wheel tests"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import contextlib
|
| 6 |
+
import glob
|
| 7 |
+
import inspect
|
| 8 |
+
import os
|
| 9 |
+
import pathlib
|
| 10 |
+
import shutil
|
| 11 |
+
import stat
|
| 12 |
+
import subprocess
|
| 13 |
+
import sys
|
| 14 |
+
import zipfile
|
| 15 |
+
from typing import Any
|
| 16 |
+
|
| 17 |
+
import pytest
|
| 18 |
+
from jaraco import path
|
| 19 |
+
from packaging.tags import parse_tag
|
| 20 |
+
from packaging.utils import canonicalize_name
|
| 21 |
+
|
| 22 |
+
from pkg_resources import PY_MAJOR, Distribution, PathMetadata
|
| 23 |
+
from setuptools.wheel import Wheel
|
| 24 |
+
|
| 25 |
+
from .contexts import tempdir
|
| 26 |
+
from .textwrap import DALS
|
| 27 |
+
|
| 28 |
+
from distutils.sysconfig import get_config_var
|
| 29 |
+
from distutils.util import get_platform
|
| 30 |
+
|
| 31 |
+
WHEEL_INFO_TESTS = (
|
| 32 |
+
('invalid.whl', ValueError),
|
| 33 |
+
(
|
| 34 |
+
'simplewheel-2.0-1-py2.py3-none-any.whl',
|
| 35 |
+
{
|
| 36 |
+
'project_name': 'simplewheel',
|
| 37 |
+
'version': '2.0',
|
| 38 |
+
'build': '1',
|
| 39 |
+
'py_version': 'py2.py3',
|
| 40 |
+
'abi': 'none',
|
| 41 |
+
'platform': 'any',
|
| 42 |
+
},
|
| 43 |
+
),
|
| 44 |
+
(
|
| 45 |
+
'simple.dist-0.1-py2.py3-none-any.whl',
|
| 46 |
+
{
|
| 47 |
+
'project_name': 'simple.dist',
|
| 48 |
+
'version': '0.1',
|
| 49 |
+
'build': None,
|
| 50 |
+
'py_version': 'py2.py3',
|
| 51 |
+
'abi': 'none',
|
| 52 |
+
'platform': 'any',
|
| 53 |
+
},
|
| 54 |
+
),
|
| 55 |
+
(
|
| 56 |
+
'example_pkg_a-1-py3-none-any.whl',
|
| 57 |
+
{
|
| 58 |
+
'project_name': 'example_pkg_a',
|
| 59 |
+
'version': '1',
|
| 60 |
+
'build': None,
|
| 61 |
+
'py_version': 'py3',
|
| 62 |
+
'abi': 'none',
|
| 63 |
+
'platform': 'any',
|
| 64 |
+
},
|
| 65 |
+
),
|
| 66 |
+
(
|
| 67 |
+
'PyQt5-5.9-5.9.1-cp35.cp36.cp37-abi3-manylinux1_x86_64.whl',
|
| 68 |
+
{
|
| 69 |
+
'project_name': 'PyQt5',
|
| 70 |
+
'version': '5.9',
|
| 71 |
+
'build': '5.9.1',
|
| 72 |
+
'py_version': 'cp35.cp36.cp37',
|
| 73 |
+
'abi': 'abi3',
|
| 74 |
+
'platform': 'manylinux1_x86_64',
|
| 75 |
+
},
|
| 76 |
+
),
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
@pytest.mark.parametrize(
|
| 81 |
+
('filename', 'info'), WHEEL_INFO_TESTS, ids=[t[0] for t in WHEEL_INFO_TESTS]
|
| 82 |
+
)
|
| 83 |
+
def test_wheel_info(filename, info):
|
| 84 |
+
if inspect.isclass(info):
|
| 85 |
+
with pytest.raises(info):
|
| 86 |
+
Wheel(filename)
|
| 87 |
+
return
|
| 88 |
+
w = Wheel(filename)
|
| 89 |
+
assert {k: getattr(w, k) for k in info.keys()} == info
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@contextlib.contextmanager
|
| 93 |
+
def build_wheel(extra_file_defs=None, **kwargs):
|
| 94 |
+
file_defs = {
|
| 95 |
+
'setup.py': (
|
| 96 |
+
DALS(
|
| 97 |
+
"""
|
| 98 |
+
# -*- coding: utf-8 -*-
|
| 99 |
+
from setuptools import setup
|
| 100 |
+
import setuptools
|
| 101 |
+
setup(**%r)
|
| 102 |
+
"""
|
| 103 |
+
)
|
| 104 |
+
% kwargs
|
| 105 |
+
).encode('utf-8'),
|
| 106 |
+
}
|
| 107 |
+
if extra_file_defs:
|
| 108 |
+
file_defs.update(extra_file_defs)
|
| 109 |
+
with tempdir() as source_dir:
|
| 110 |
+
path.build(file_defs, source_dir)
|
| 111 |
+
subprocess.check_call(
|
| 112 |
+
(sys.executable, 'setup.py', '-q', 'bdist_wheel'), cwd=source_dir
|
| 113 |
+
)
|
| 114 |
+
yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0]
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def tree_set(root):
|
| 118 |
+
contents = set()
|
| 119 |
+
for dirpath, dirnames, filenames in os.walk(root):
|
| 120 |
+
for filename in filenames:
|
| 121 |
+
contents.add(os.path.join(os.path.relpath(dirpath, root), filename))
|
| 122 |
+
return contents
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def flatten_tree(tree):
|
| 126 |
+
"""Flatten nested dicts and lists into a full list of paths"""
|
| 127 |
+
output = set()
|
| 128 |
+
for node, contents in tree.items():
|
| 129 |
+
if isinstance(contents, dict):
|
| 130 |
+
contents = flatten_tree(contents)
|
| 131 |
+
|
| 132 |
+
for elem in contents:
|
| 133 |
+
if isinstance(elem, dict):
|
| 134 |
+
output |= {os.path.join(node, val) for val in flatten_tree(elem)}
|
| 135 |
+
else:
|
| 136 |
+
output.add(os.path.join(node, elem))
|
| 137 |
+
return output
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def format_install_tree(tree):
|
| 141 |
+
return {
|
| 142 |
+
x.format(
|
| 143 |
+
py_version=PY_MAJOR,
|
| 144 |
+
platform=get_platform(),
|
| 145 |
+
shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'),
|
| 146 |
+
)
|
| 147 |
+
for x in tree
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def _check_wheel_install(
|
| 152 |
+
filename, install_dir, install_tree_includes, project_name, version, requires_txt
|
| 153 |
+
):
|
| 154 |
+
w = Wheel(filename)
|
| 155 |
+
egg_path = os.path.join(install_dir, w.egg_name())
|
| 156 |
+
w.install_as_egg(egg_path)
|
| 157 |
+
if install_tree_includes is not None:
|
| 158 |
+
install_tree = format_install_tree(install_tree_includes)
|
| 159 |
+
exp = tree_set(install_dir)
|
| 160 |
+
assert install_tree.issubset(exp), install_tree - exp
|
| 161 |
+
|
| 162 |
+
metadata = PathMetadata(egg_path, os.path.join(egg_path, 'EGG-INFO'))
|
| 163 |
+
dist = Distribution.from_filename(egg_path, metadata=metadata)
|
| 164 |
+
assert dist.project_name == project_name
|
| 165 |
+
assert dist.version == version
|
| 166 |
+
if requires_txt is None:
|
| 167 |
+
assert not dist.has_metadata('requires.txt')
|
| 168 |
+
else:
|
| 169 |
+
# Order must match to ensure reproducibility.
|
| 170 |
+
assert requires_txt == dist.get_metadata('requires.txt').lstrip()
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class Record:
|
| 174 |
+
def __init__(self, id, **kwargs):
|
| 175 |
+
self._id = id
|
| 176 |
+
self._fields = kwargs
|
| 177 |
+
|
| 178 |
+
def __repr__(self) -> str:
|
| 179 |
+
return f'{self._id}(**{self._fields!r})'
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
# Using Any to avoid possible type union issues later in test
|
| 183 |
+
# making a TypedDict is not worth in a test and anonymous/inline TypedDict are experimental
|
| 184 |
+
# https://github.com/python/mypy/issues/9884
|
| 185 |
+
WHEEL_INSTALL_TESTS: tuple[dict[str, Any], ...] = (
|
| 186 |
+
dict(
|
| 187 |
+
id='basic',
|
| 188 |
+
file_defs={'foo': {'__init__.py': ''}},
|
| 189 |
+
setup_kwargs=dict(
|
| 190 |
+
packages=['foo'],
|
| 191 |
+
),
|
| 192 |
+
install_tree=flatten_tree({
|
| 193 |
+
'foo-1.0-py{py_version}.egg': {
|
| 194 |
+
'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'],
|
| 195 |
+
'foo': ['__init__.py'],
|
| 196 |
+
}
|
| 197 |
+
}),
|
| 198 |
+
),
|
| 199 |
+
dict(
|
| 200 |
+
id='utf-8',
|
| 201 |
+
setup_kwargs=dict(
|
| 202 |
+
description='Description accentuée',
|
| 203 |
+
),
|
| 204 |
+
),
|
| 205 |
+
dict(
|
| 206 |
+
id='data',
|
| 207 |
+
file_defs={
|
| 208 |
+
'data.txt': DALS(
|
| 209 |
+
"""
|
| 210 |
+
Some data...
|
| 211 |
+
"""
|
| 212 |
+
),
|
| 213 |
+
},
|
| 214 |
+
setup_kwargs=dict(
|
| 215 |
+
data_files=[('data_dir', ['data.txt'])],
|
| 216 |
+
),
|
| 217 |
+
install_tree=flatten_tree({
|
| 218 |
+
'foo-1.0-py{py_version}.egg': {
|
| 219 |
+
'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'],
|
| 220 |
+
'data_dir': ['data.txt'],
|
| 221 |
+
}
|
| 222 |
+
}),
|
| 223 |
+
),
|
| 224 |
+
dict(
|
| 225 |
+
id='extension',
|
| 226 |
+
file_defs={
|
| 227 |
+
'extension.c': DALS(
|
| 228 |
+
"""
|
| 229 |
+
#include "Python.h"
|
| 230 |
+
|
| 231 |
+
#if PY_MAJOR_VERSION >= 3
|
| 232 |
+
|
| 233 |
+
static struct PyModuleDef moduledef = {
|
| 234 |
+
PyModuleDef_HEAD_INIT,
|
| 235 |
+
"extension",
|
| 236 |
+
NULL,
|
| 237 |
+
0,
|
| 238 |
+
NULL,
|
| 239 |
+
NULL,
|
| 240 |
+
NULL,
|
| 241 |
+
NULL,
|
| 242 |
+
NULL
|
| 243 |
+
};
|
| 244 |
+
|
| 245 |
+
#define INITERROR return NULL
|
| 246 |
+
|
| 247 |
+
PyMODINIT_FUNC PyInit_extension(void)
|
| 248 |
+
|
| 249 |
+
#else
|
| 250 |
+
|
| 251 |
+
#define INITERROR return
|
| 252 |
+
|
| 253 |
+
void initextension(void)
|
| 254 |
+
|
| 255 |
+
#endif
|
| 256 |
+
{
|
| 257 |
+
#if PY_MAJOR_VERSION >= 3
|
| 258 |
+
PyObject *module = PyModule_Create(&moduledef);
|
| 259 |
+
#else
|
| 260 |
+
PyObject *module = Py_InitModule("extension", NULL);
|
| 261 |
+
#endif
|
| 262 |
+
if (module == NULL)
|
| 263 |
+
INITERROR;
|
| 264 |
+
#if PY_MAJOR_VERSION >= 3
|
| 265 |
+
return module;
|
| 266 |
+
#endif
|
| 267 |
+
}
|
| 268 |
+
"""
|
| 269 |
+
),
|
| 270 |
+
},
|
| 271 |
+
setup_kwargs=dict(
|
| 272 |
+
ext_modules=[
|
| 273 |
+
Record(
|
| 274 |
+
'setuptools.Extension', name='extension', sources=['extension.c']
|
| 275 |
+
)
|
| 276 |
+
],
|
| 277 |
+
),
|
| 278 |
+
install_tree=flatten_tree({
|
| 279 |
+
'foo-1.0-py{py_version}-{platform}.egg': [
|
| 280 |
+
'extension{shlib_ext}',
|
| 281 |
+
{
|
| 282 |
+
'EGG-INFO': [
|
| 283 |
+
'PKG-INFO',
|
| 284 |
+
'RECORD',
|
| 285 |
+
'WHEEL',
|
| 286 |
+
'top_level.txt',
|
| 287 |
+
]
|
| 288 |
+
},
|
| 289 |
+
]
|
| 290 |
+
}),
|
| 291 |
+
),
|
| 292 |
+
dict(
|
| 293 |
+
id='header',
|
| 294 |
+
file_defs={
|
| 295 |
+
'header.h': DALS(
|
| 296 |
+
"""
|
| 297 |
+
"""
|
| 298 |
+
),
|
| 299 |
+
},
|
| 300 |
+
setup_kwargs=dict(
|
| 301 |
+
headers=['header.h'],
|
| 302 |
+
),
|
| 303 |
+
install_tree=flatten_tree({
|
| 304 |
+
'foo-1.0-py{py_version}.egg': [
|
| 305 |
+
'header.h',
|
| 306 |
+
{
|
| 307 |
+
'EGG-INFO': [
|
| 308 |
+
'PKG-INFO',
|
| 309 |
+
'RECORD',
|
| 310 |
+
'WHEEL',
|
| 311 |
+
'top_level.txt',
|
| 312 |
+
]
|
| 313 |
+
},
|
| 314 |
+
]
|
| 315 |
+
}),
|
| 316 |
+
),
|
| 317 |
+
dict(
|
| 318 |
+
id='script',
|
| 319 |
+
file_defs={
|
| 320 |
+
'script.py': DALS(
|
| 321 |
+
"""
|
| 322 |
+
#/usr/bin/python
|
| 323 |
+
print('hello world!')
|
| 324 |
+
"""
|
| 325 |
+
),
|
| 326 |
+
'script.sh': DALS(
|
| 327 |
+
"""
|
| 328 |
+
#/bin/sh
|
| 329 |
+
echo 'hello world!'
|
| 330 |
+
"""
|
| 331 |
+
),
|
| 332 |
+
},
|
| 333 |
+
setup_kwargs=dict(
|
| 334 |
+
scripts=['script.py', 'script.sh'],
|
| 335 |
+
),
|
| 336 |
+
install_tree=flatten_tree({
|
| 337 |
+
'foo-1.0-py{py_version}.egg': {
|
| 338 |
+
'EGG-INFO': [
|
| 339 |
+
'PKG-INFO',
|
| 340 |
+
'RECORD',
|
| 341 |
+
'WHEEL',
|
| 342 |
+
'top_level.txt',
|
| 343 |
+
{'scripts': ['script.py', 'script.sh']},
|
| 344 |
+
]
|
| 345 |
+
}
|
| 346 |
+
}),
|
| 347 |
+
),
|
| 348 |
+
dict(
|
| 349 |
+
id='requires1',
|
| 350 |
+
install_requires='foobar==2.0',
|
| 351 |
+
install_tree=flatten_tree({
|
| 352 |
+
'foo-1.0-py{py_version}.egg': {
|
| 353 |
+
'EGG-INFO': [
|
| 354 |
+
'PKG-INFO',
|
| 355 |
+
'RECORD',
|
| 356 |
+
'WHEEL',
|
| 357 |
+
'requires.txt',
|
| 358 |
+
'top_level.txt',
|
| 359 |
+
]
|
| 360 |
+
}
|
| 361 |
+
}),
|
| 362 |
+
requires_txt=DALS(
|
| 363 |
+
"""
|
| 364 |
+
foobar==2.0
|
| 365 |
+
"""
|
| 366 |
+
),
|
| 367 |
+
),
|
| 368 |
+
dict(
|
| 369 |
+
id='requires2',
|
| 370 |
+
install_requires=f"""
|
| 371 |
+
bar
|
| 372 |
+
foo<=2.0; {sys.platform!r} in sys_platform
|
| 373 |
+
""",
|
| 374 |
+
requires_txt=DALS(
|
| 375 |
+
"""
|
| 376 |
+
bar
|
| 377 |
+
foo<=2.0
|
| 378 |
+
"""
|
| 379 |
+
),
|
| 380 |
+
),
|
| 381 |
+
dict(
|
| 382 |
+
id='requires3',
|
| 383 |
+
install_requires=f"""
|
| 384 |
+
bar; {sys.platform!r} != sys_platform
|
| 385 |
+
""",
|
| 386 |
+
),
|
| 387 |
+
dict(
|
| 388 |
+
id='requires4',
|
| 389 |
+
install_requires="""
|
| 390 |
+
foo
|
| 391 |
+
""",
|
| 392 |
+
extras_require={
|
| 393 |
+
'extra': 'foobar>3',
|
| 394 |
+
},
|
| 395 |
+
requires_txt=DALS(
|
| 396 |
+
"""
|
| 397 |
+
foo
|
| 398 |
+
|
| 399 |
+
[extra]
|
| 400 |
+
foobar>3
|
| 401 |
+
"""
|
| 402 |
+
),
|
| 403 |
+
),
|
| 404 |
+
dict(
|
| 405 |
+
id='requires5',
|
| 406 |
+
extras_require={
|
| 407 |
+
'extra': f'foobar; {sys.platform!r} != sys_platform',
|
| 408 |
+
},
|
| 409 |
+
requires_txt=DALS(
|
| 410 |
+
"""
|
| 411 |
+
[extra]
|
| 412 |
+
"""
|
| 413 |
+
),
|
| 414 |
+
),
|
| 415 |
+
dict(
|
| 416 |
+
id='requires_ensure_order',
|
| 417 |
+
install_requires="""
|
| 418 |
+
foo
|
| 419 |
+
bar
|
| 420 |
+
baz
|
| 421 |
+
qux
|
| 422 |
+
""",
|
| 423 |
+
extras_require={
|
| 424 |
+
'extra': """
|
| 425 |
+
foobar>3
|
| 426 |
+
barbaz>4
|
| 427 |
+
bazqux>5
|
| 428 |
+
quxzap>6
|
| 429 |
+
""",
|
| 430 |
+
},
|
| 431 |
+
requires_txt=DALS(
|
| 432 |
+
"""
|
| 433 |
+
foo
|
| 434 |
+
bar
|
| 435 |
+
baz
|
| 436 |
+
qux
|
| 437 |
+
|
| 438 |
+
[extra]
|
| 439 |
+
foobar>3
|
| 440 |
+
barbaz>4
|
| 441 |
+
bazqux>5
|
| 442 |
+
quxzap>6
|
| 443 |
+
"""
|
| 444 |
+
),
|
| 445 |
+
),
|
| 446 |
+
dict(
|
| 447 |
+
id='namespace_package',
|
| 448 |
+
file_defs={
|
| 449 |
+
'foo': {
|
| 450 |
+
'bar': {'__init__.py': ''},
|
| 451 |
+
},
|
| 452 |
+
},
|
| 453 |
+
setup_kwargs=dict(
|
| 454 |
+
namespace_packages=['foo'],
|
| 455 |
+
packages=['foo.bar'],
|
| 456 |
+
),
|
| 457 |
+
install_tree=flatten_tree({
|
| 458 |
+
'foo-1.0-py{py_version}.egg': [
|
| 459 |
+
'foo-1.0-py{py_version}-nspkg.pth',
|
| 460 |
+
{
|
| 461 |
+
'EGG-INFO': [
|
| 462 |
+
'PKG-INFO',
|
| 463 |
+
'RECORD',
|
| 464 |
+
'WHEEL',
|
| 465 |
+
'namespace_packages.txt',
|
| 466 |
+
'top_level.txt',
|
| 467 |
+
]
|
| 468 |
+
},
|
| 469 |
+
{
|
| 470 |
+
'foo': [
|
| 471 |
+
'__init__.py',
|
| 472 |
+
{'bar': ['__init__.py']},
|
| 473 |
+
]
|
| 474 |
+
},
|
| 475 |
+
]
|
| 476 |
+
}),
|
| 477 |
+
),
|
| 478 |
+
dict(
|
| 479 |
+
id='empty_namespace_package',
|
| 480 |
+
file_defs={
|
| 481 |
+
'foobar': {
|
| 482 |
+
'__init__.py': (
|
| 483 |
+
"__import__('pkg_resources').declare_namespace(__name__)"
|
| 484 |
+
)
|
| 485 |
+
},
|
| 486 |
+
},
|
| 487 |
+
setup_kwargs=dict(
|
| 488 |
+
namespace_packages=['foobar'],
|
| 489 |
+
packages=['foobar'],
|
| 490 |
+
),
|
| 491 |
+
install_tree=flatten_tree({
|
| 492 |
+
'foo-1.0-py{py_version}.egg': [
|
| 493 |
+
'foo-1.0-py{py_version}-nspkg.pth',
|
| 494 |
+
{
|
| 495 |
+
'EGG-INFO': [
|
| 496 |
+
'PKG-INFO',
|
| 497 |
+
'RECORD',
|
| 498 |
+
'WHEEL',
|
| 499 |
+
'namespace_packages.txt',
|
| 500 |
+
'top_level.txt',
|
| 501 |
+
]
|
| 502 |
+
},
|
| 503 |
+
{
|
| 504 |
+
'foobar': [
|
| 505 |
+
'__init__.py',
|
| 506 |
+
]
|
| 507 |
+
},
|
| 508 |
+
]
|
| 509 |
+
}),
|
| 510 |
+
),
|
| 511 |
+
dict(
|
| 512 |
+
id='data_in_package',
|
| 513 |
+
file_defs={
|
| 514 |
+
'foo': {
|
| 515 |
+
'__init__.py': '',
|
| 516 |
+
'data_dir': {
|
| 517 |
+
'data.txt': DALS(
|
| 518 |
+
"""
|
| 519 |
+
Some data...
|
| 520 |
+
"""
|
| 521 |
+
),
|
| 522 |
+
},
|
| 523 |
+
}
|
| 524 |
+
},
|
| 525 |
+
setup_kwargs=dict(
|
| 526 |
+
packages=['foo'],
|
| 527 |
+
data_files=[('foo/data_dir', ['foo/data_dir/data.txt'])],
|
| 528 |
+
),
|
| 529 |
+
install_tree=flatten_tree({
|
| 530 |
+
'foo-1.0-py{py_version}.egg': {
|
| 531 |
+
'EGG-INFO': [
|
| 532 |
+
'PKG-INFO',
|
| 533 |
+
'RECORD',
|
| 534 |
+
'WHEEL',
|
| 535 |
+
'top_level.txt',
|
| 536 |
+
],
|
| 537 |
+
'foo': [
|
| 538 |
+
'__init__.py',
|
| 539 |
+
{
|
| 540 |
+
'data_dir': [
|
| 541 |
+
'data.txt',
|
| 542 |
+
]
|
| 543 |
+
},
|
| 544 |
+
],
|
| 545 |
+
}
|
| 546 |
+
}),
|
| 547 |
+
),
|
| 548 |
+
)
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
@pytest.mark.parametrize(
|
| 552 |
+
'params',
|
| 553 |
+
WHEEL_INSTALL_TESTS,
|
| 554 |
+
ids=[params['id'] for params in WHEEL_INSTALL_TESTS],
|
| 555 |
+
)
|
| 556 |
+
def test_wheel_install(params):
|
| 557 |
+
project_name = params.get('name', 'foo')
|
| 558 |
+
version = params.get('version', '1.0')
|
| 559 |
+
install_requires = params.get('install_requires', [])
|
| 560 |
+
extras_require = params.get('extras_require', {})
|
| 561 |
+
requires_txt = params.get('requires_txt', None)
|
| 562 |
+
install_tree = params.get('install_tree')
|
| 563 |
+
file_defs = params.get('file_defs', {})
|
| 564 |
+
setup_kwargs = params.get('setup_kwargs', {})
|
| 565 |
+
with (
|
| 566 |
+
build_wheel(
|
| 567 |
+
name=project_name,
|
| 568 |
+
version=version,
|
| 569 |
+
install_requires=install_requires,
|
| 570 |
+
extras_require=extras_require,
|
| 571 |
+
extra_file_defs=file_defs,
|
| 572 |
+
**setup_kwargs,
|
| 573 |
+
) as filename,
|
| 574 |
+
tempdir() as install_dir,
|
| 575 |
+
):
|
| 576 |
+
_check_wheel_install(
|
| 577 |
+
filename, install_dir, install_tree, project_name, version, requires_txt
|
| 578 |
+
)
|
| 579 |
+
|
| 580 |
+
|
| 581 |
+
def test_wheel_install_pep_503():
|
| 582 |
+
project_name = 'Foo_Bar' # PEP 503 canonicalized name is "foo-bar"
|
| 583 |
+
version = '1.0'
|
| 584 |
+
with (
|
| 585 |
+
build_wheel(
|
| 586 |
+
name=project_name,
|
| 587 |
+
version=version,
|
| 588 |
+
) as filename,
|
| 589 |
+
tempdir() as install_dir,
|
| 590 |
+
):
|
| 591 |
+
new_filename = filename.replace(project_name, canonicalize_name(project_name))
|
| 592 |
+
shutil.move(filename, new_filename)
|
| 593 |
+
_check_wheel_install(
|
| 594 |
+
new_filename,
|
| 595 |
+
install_dir,
|
| 596 |
+
None,
|
| 597 |
+
canonicalize_name(project_name),
|
| 598 |
+
version,
|
| 599 |
+
None,
|
| 600 |
+
)
|
| 601 |
+
|
| 602 |
+
|
| 603 |
+
def test_wheel_no_dist_dir():
|
| 604 |
+
project_name = 'nodistinfo'
|
| 605 |
+
version = '1.0'
|
| 606 |
+
wheel_name = f'{project_name}-{version}-py2.py3-none-any.whl'
|
| 607 |
+
with tempdir() as source_dir:
|
| 608 |
+
wheel_path = os.path.join(source_dir, wheel_name)
|
| 609 |
+
# create an empty zip file
|
| 610 |
+
zipfile.ZipFile(wheel_path, 'w').close()
|
| 611 |
+
with tempdir() as install_dir:
|
| 612 |
+
with pytest.raises(ValueError):
|
| 613 |
+
_check_wheel_install(
|
| 614 |
+
wheel_path, install_dir, None, project_name, version, None
|
| 615 |
+
)
|
| 616 |
+
|
| 617 |
+
|
| 618 |
+
def test_wheel_is_compatible(monkeypatch):
|
| 619 |
+
def sys_tags():
|
| 620 |
+
return {
|
| 621 |
+
(t.interpreter, t.abi, t.platform)
|
| 622 |
+
for t in parse_tag('cp36-cp36m-manylinux1_x86_64')
|
| 623 |
+
}
|
| 624 |
+
|
| 625 |
+
monkeypatch.setattr('setuptools.wheel._get_supported_tags', sys_tags)
|
| 626 |
+
assert Wheel('onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible()
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
def test_wheel_mode():
|
| 630 |
+
@contextlib.contextmanager
|
| 631 |
+
def build_wheel(extra_file_defs=None, **kwargs):
|
| 632 |
+
file_defs = {
|
| 633 |
+
'setup.py': (
|
| 634 |
+
DALS(
|
| 635 |
+
"""
|
| 636 |
+
# -*- coding: utf-8 -*-
|
| 637 |
+
from setuptools import setup
|
| 638 |
+
import setuptools
|
| 639 |
+
setup(**%r)
|
| 640 |
+
"""
|
| 641 |
+
)
|
| 642 |
+
% kwargs
|
| 643 |
+
).encode('utf-8'),
|
| 644 |
+
}
|
| 645 |
+
if extra_file_defs:
|
| 646 |
+
file_defs.update(extra_file_defs)
|
| 647 |
+
with tempdir() as source_dir:
|
| 648 |
+
path.build(file_defs, source_dir)
|
| 649 |
+
runsh = pathlib.Path(source_dir) / "script.sh"
|
| 650 |
+
os.chmod(runsh, 0o777)
|
| 651 |
+
subprocess.check_call(
|
| 652 |
+
(sys.executable, 'setup.py', '-q', 'bdist_wheel'), cwd=source_dir
|
| 653 |
+
)
|
| 654 |
+
yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0]
|
| 655 |
+
|
| 656 |
+
params = dict(
|
| 657 |
+
id='script',
|
| 658 |
+
file_defs={
|
| 659 |
+
'script.py': DALS(
|
| 660 |
+
"""
|
| 661 |
+
#/usr/bin/python
|
| 662 |
+
print('hello world!')
|
| 663 |
+
"""
|
| 664 |
+
),
|
| 665 |
+
'script.sh': DALS(
|
| 666 |
+
"""
|
| 667 |
+
#/bin/sh
|
| 668 |
+
echo 'hello world!'
|
| 669 |
+
"""
|
| 670 |
+
),
|
| 671 |
+
},
|
| 672 |
+
setup_kwargs=dict(
|
| 673 |
+
scripts=['script.py', 'script.sh'],
|
| 674 |
+
),
|
| 675 |
+
install_tree=flatten_tree({
|
| 676 |
+
'foo-1.0-py{py_version}.egg': {
|
| 677 |
+
'EGG-INFO': [
|
| 678 |
+
'PKG-INFO',
|
| 679 |
+
'RECORD',
|
| 680 |
+
'WHEEL',
|
| 681 |
+
'top_level.txt',
|
| 682 |
+
{'scripts': ['script.py', 'script.sh']},
|
| 683 |
+
]
|
| 684 |
+
}
|
| 685 |
+
}),
|
| 686 |
+
)
|
| 687 |
+
|
| 688 |
+
project_name = params.get('name', 'foo')
|
| 689 |
+
version = params.get('version', '1.0')
|
| 690 |
+
install_tree = params.get('install_tree')
|
| 691 |
+
file_defs = params.get('file_defs', {})
|
| 692 |
+
setup_kwargs = params.get('setup_kwargs', {})
|
| 693 |
+
|
| 694 |
+
with (
|
| 695 |
+
build_wheel(
|
| 696 |
+
name=project_name,
|
| 697 |
+
version=version,
|
| 698 |
+
install_requires=[],
|
| 699 |
+
extras_require={},
|
| 700 |
+
extra_file_defs=file_defs,
|
| 701 |
+
**setup_kwargs,
|
| 702 |
+
) as filename,
|
| 703 |
+
tempdir() as install_dir,
|
| 704 |
+
):
|
| 705 |
+
_check_wheel_install(
|
| 706 |
+
filename, install_dir, install_tree, project_name, version, None
|
| 707 |
+
)
|
| 708 |
+
w = Wheel(filename)
|
| 709 |
+
base = pathlib.Path(install_dir) / w.egg_name()
|
| 710 |
+
script_sh = base / "EGG-INFO" / "scripts" / "script.sh"
|
| 711 |
+
assert script_sh.exists()
|
| 712 |
+
if sys.platform != 'win32':
|
| 713 |
+
# Editable file mode has no effect on Windows
|
| 714 |
+
assert oct(stat.S_IMODE(script_sh.stat().st_mode)) == "0o777"
|