Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc +3 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py +51 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/config/expand.py +452 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/__init__.py +13 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/__pycache__/test_core_metadata.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__init__.py +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_apply_pyprojecttoml.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_expand.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_pyprojecttoml.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_pyprojecttoml_dynamic_deps.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_setupcfg.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/__init__.py +59 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/__pycache__/preload.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/preload.py +18 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/setupcfg_examples.txt +22 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_apply_pyprojecttoml.py +539 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_expand.py +247 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml.py +396 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py +109 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_setupcfg.py +965 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/contexts.py +145 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/environment.py +95 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/fixtures.py +157 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/indexes/test_links_priority/external.html +3 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html +4 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__init__.py +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__pycache__/helpers.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__pycache__/test_pip_install_sdist.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/helpers.py +77 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/test_pip_install_sdist.py +223 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/mod_with_constant.py +1 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/namespaces.py +90 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py +1 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/server.py +86 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_archive_util.py +36 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_deprecations.py +28 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py +73 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_wheel.py +623 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build.py +33 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_clib.py +84 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_ext.py +293 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py +970 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_py.py +480 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_config_discovery.py +647 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_core_metadata.py +577 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_depends.py +15 -0
.gitattributes
CHANGED
|
@@ -84,3 +84,4 @@ evalkit_llava/bin/xzcat filter=lfs diff=lfs merge=lfs -text
|
|
| 84 |
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distlib/w64-arm.exe filter=lfs diff=lfs merge=lfs -text
|
| 85 |
evalkit_llava/lib/libz.so.1.2.13 filter=lfs diff=lfs merge=lfs -text
|
| 86 |
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 84 |
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distlib/w64-arm.exe filter=lfs diff=lfs merge=lfs -text
|
| 85 |
evalkit_llava/lib/libz.so.1.2.13 filter=lfs diff=lfs merge=lfs -text
|
| 86 |
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 87 |
+
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:61c07f125b633c6db4e3e454a870d8c1223f601862e8832e0b72c076305af08b
|
| 3 |
+
size 194435
|
evalkit_llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-310.pyc
ADDED
|
Binary file (85.8 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
SPLIT_RE = re.compile(r'[\.\[\]]+')
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class JsonSchemaException(ValueError):
|
| 8 |
+
"""
|
| 9 |
+
Base exception of ``fastjsonschema`` library.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class JsonSchemaValueException(JsonSchemaException):
|
| 14 |
+
"""
|
| 15 |
+
Exception raised by validation function. Available properties:
|
| 16 |
+
|
| 17 |
+
* ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``),
|
| 18 |
+
* invalid ``value`` (e.g. ``60``),
|
| 19 |
+
* ``name`` of a path in the data structure (e.g. ``data.property[index]``),
|
| 20 |
+
* ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``),
|
| 21 |
+
* the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``),
|
| 22 |
+
* ``rule`` which the ``value`` is breaking (e.g. ``maximum``)
|
| 23 |
+
* and ``rule_definition`` (e.g. ``42``).
|
| 24 |
+
|
| 25 |
+
.. versionchanged:: 2.14.0
|
| 26 |
+
Added all extra properties.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
def __init__(self, message, value=None, name=None, definition=None, rule=None):
|
| 30 |
+
super().__init__(message)
|
| 31 |
+
self.message = message
|
| 32 |
+
self.value = value
|
| 33 |
+
self.name = name
|
| 34 |
+
self.definition = definition
|
| 35 |
+
self.rule = rule
|
| 36 |
+
|
| 37 |
+
@property
|
| 38 |
+
def path(self):
|
| 39 |
+
return [item for item in SPLIT_RE.split(self.name) if item != '']
|
| 40 |
+
|
| 41 |
+
@property
|
| 42 |
+
def rule_definition(self):
|
| 43 |
+
if not self.rule or not self.definition:
|
| 44 |
+
return None
|
| 45 |
+
return self.definition.get(self.rule)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class JsonSchemaDefinitionException(JsonSchemaException):
|
| 49 |
+
"""
|
| 50 |
+
Exception raised by generator of validation function.
|
| 51 |
+
"""
|
evalkit_llava/lib/python3.10/site-packages/setuptools/config/expand.py
ADDED
|
@@ -0,0 +1,452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility functions to expand configuration directives or special values
|
| 2 |
+
(such glob patterns).
|
| 3 |
+
|
| 4 |
+
We can split the process of interpreting configuration files into 2 steps:
|
| 5 |
+
|
| 6 |
+
1. The parsing the file contents from strings to value objects
|
| 7 |
+
that can be understand by Python (for example a string with a comma
|
| 8 |
+
separated list of keywords into an actual Python list of strings).
|
| 9 |
+
|
| 10 |
+
2. The expansion (or post-processing) of these values according to the
|
| 11 |
+
semantics ``setuptools`` assign to them (for example a configuration field
|
| 12 |
+
with the ``file:`` directive should be expanded from a list of file paths to
|
| 13 |
+
a single string with the contents of those files concatenated)
|
| 14 |
+
|
| 15 |
+
This module focus on the second step, and therefore allow sharing the expansion
|
| 16 |
+
functions among several configuration file formats.
|
| 17 |
+
|
| 18 |
+
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
from __future__ import annotations
|
| 22 |
+
|
| 23 |
+
import ast
|
| 24 |
+
import importlib
|
| 25 |
+
import os
|
| 26 |
+
import pathlib
|
| 27 |
+
import sys
|
| 28 |
+
from collections.abc import Iterable, Iterator, Mapping
|
| 29 |
+
from configparser import ConfigParser
|
| 30 |
+
from glob import iglob
|
| 31 |
+
from importlib.machinery import ModuleSpec, all_suffixes
|
| 32 |
+
from itertools import chain
|
| 33 |
+
from pathlib import Path
|
| 34 |
+
from types import ModuleType, TracebackType
|
| 35 |
+
from typing import TYPE_CHECKING, Any, Callable, TypeVar
|
| 36 |
+
|
| 37 |
+
from .. import _static
|
| 38 |
+
from .._path import StrPath, same_path as _same_path
|
| 39 |
+
from ..discovery import find_package_path
|
| 40 |
+
from ..warnings import SetuptoolsWarning
|
| 41 |
+
|
| 42 |
+
from distutils.errors import DistutilsOptionError
|
| 43 |
+
|
| 44 |
+
if TYPE_CHECKING:
|
| 45 |
+
from typing_extensions import Self
|
| 46 |
+
|
| 47 |
+
from setuptools.dist import Distribution
|
| 48 |
+
|
| 49 |
+
_K = TypeVar("_K")
|
| 50 |
+
_V_co = TypeVar("_V_co", covariant=True)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class StaticModule:
|
| 54 |
+
"""Proxy to a module object that avoids executing arbitrary code."""
|
| 55 |
+
|
| 56 |
+
def __init__(self, name: str, spec: ModuleSpec) -> None:
|
| 57 |
+
module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None
|
| 58 |
+
vars(self).update(locals())
|
| 59 |
+
del self.self
|
| 60 |
+
|
| 61 |
+
def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
|
| 62 |
+
for statement in self.module.body:
|
| 63 |
+
if isinstance(statement, ast.Assign):
|
| 64 |
+
yield from ((target, statement.value) for target in statement.targets)
|
| 65 |
+
elif isinstance(statement, ast.AnnAssign) and statement.value:
|
| 66 |
+
yield (statement.target, statement.value)
|
| 67 |
+
|
| 68 |
+
def __getattr__(self, attr: str):
|
| 69 |
+
"""Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
|
| 70 |
+
try:
|
| 71 |
+
return next(
|
| 72 |
+
ast.literal_eval(value)
|
| 73 |
+
for target, value in self._find_assignments()
|
| 74 |
+
if isinstance(target, ast.Name) and target.id == attr
|
| 75 |
+
)
|
| 76 |
+
except Exception as e:
|
| 77 |
+
raise AttributeError(f"{self.name} has no attribute {attr}") from e
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def glob_relative(
|
| 81 |
+
patterns: Iterable[str], root_dir: StrPath | None = None
|
| 82 |
+
) -> list[str]:
|
| 83 |
+
"""Expand the list of glob patterns, but preserving relative paths.
|
| 84 |
+
|
| 85 |
+
:param list[str] patterns: List of glob patterns
|
| 86 |
+
:param str root_dir: Path to which globs should be relative
|
| 87 |
+
(current directory by default)
|
| 88 |
+
:rtype: list
|
| 89 |
+
"""
|
| 90 |
+
glob_characters = {'*', '?', '[', ']', '{', '}'}
|
| 91 |
+
expanded_values = []
|
| 92 |
+
root_dir = root_dir or os.getcwd()
|
| 93 |
+
for value in patterns:
|
| 94 |
+
# Has globby characters?
|
| 95 |
+
if any(char in value for char in glob_characters):
|
| 96 |
+
# then expand the glob pattern while keeping paths *relative*:
|
| 97 |
+
glob_path = os.path.abspath(os.path.join(root_dir, value))
|
| 98 |
+
expanded_values.extend(
|
| 99 |
+
sorted(
|
| 100 |
+
os.path.relpath(path, root_dir).replace(os.sep, "/")
|
| 101 |
+
for path in iglob(glob_path, recursive=True)
|
| 102 |
+
)
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
else:
|
| 106 |
+
# take the value as-is
|
| 107 |
+
path = os.path.relpath(value, root_dir).replace(os.sep, "/")
|
| 108 |
+
expanded_values.append(path)
|
| 109 |
+
|
| 110 |
+
return expanded_values
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def read_files(
|
| 114 |
+
filepaths: StrPath | Iterable[StrPath], root_dir: StrPath | None = None
|
| 115 |
+
) -> str:
|
| 116 |
+
"""Return the content of the files concatenated using ``\n`` as str
|
| 117 |
+
|
| 118 |
+
This function is sandboxed and won't reach anything outside ``root_dir``
|
| 119 |
+
|
| 120 |
+
(By default ``root_dir`` is the current directory).
|
| 121 |
+
"""
|
| 122 |
+
from more_itertools import always_iterable
|
| 123 |
+
|
| 124 |
+
root_dir = os.path.abspath(root_dir or os.getcwd())
|
| 125 |
+
_filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
|
| 126 |
+
return '\n'.join(
|
| 127 |
+
_read_file(path)
|
| 128 |
+
for path in _filter_existing_files(_filepaths)
|
| 129 |
+
if _assert_local(path, root_dir)
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]:
|
| 134 |
+
for path in filepaths:
|
| 135 |
+
if os.path.isfile(path):
|
| 136 |
+
yield path
|
| 137 |
+
else:
|
| 138 |
+
SetuptoolsWarning.emit(f"File {path!r} cannot be found")
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _read_file(filepath: bytes | StrPath) -> str:
|
| 142 |
+
with open(filepath, encoding='utf-8') as f:
|
| 143 |
+
return f.read()
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def _assert_local(filepath: StrPath, root_dir: str):
|
| 147 |
+
if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents:
|
| 148 |
+
msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
|
| 149 |
+
raise DistutilsOptionError(msg)
|
| 150 |
+
|
| 151 |
+
return True
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def read_attr(
|
| 155 |
+
attr_desc: str,
|
| 156 |
+
package_dir: Mapping[str, str] | None = None,
|
| 157 |
+
root_dir: StrPath | None = None,
|
| 158 |
+
) -> Any:
|
| 159 |
+
"""Reads the value of an attribute from a module.
|
| 160 |
+
|
| 161 |
+
This function will try to read the attributed statically first
|
| 162 |
+
(via :func:`ast.literal_eval`), and only evaluate the module if it fails.
|
| 163 |
+
|
| 164 |
+
Examples:
|
| 165 |
+
read_attr("package.attr")
|
| 166 |
+
read_attr("package.module.attr")
|
| 167 |
+
|
| 168 |
+
:param str attr_desc: Dot-separated string describing how to reach the
|
| 169 |
+
attribute (see examples above)
|
| 170 |
+
:param dict[str, str] package_dir: Mapping of package names to their
|
| 171 |
+
location in disk (represented by paths relative to ``root_dir``).
|
| 172 |
+
:param str root_dir: Path to directory containing all the packages in
|
| 173 |
+
``package_dir`` (current directory by default).
|
| 174 |
+
:rtype: str
|
| 175 |
+
"""
|
| 176 |
+
root_dir = root_dir or os.getcwd()
|
| 177 |
+
attrs_path = attr_desc.strip().split('.')
|
| 178 |
+
attr_name = attrs_path.pop()
|
| 179 |
+
module_name = '.'.join(attrs_path)
|
| 180 |
+
module_name = module_name or '__init__'
|
| 181 |
+
path = _find_module(module_name, package_dir, root_dir)
|
| 182 |
+
spec = _find_spec(module_name, path)
|
| 183 |
+
|
| 184 |
+
try:
|
| 185 |
+
value = getattr(StaticModule(module_name, spec), attr_name)
|
| 186 |
+
# XXX: Is marking as static contents coming from modules too optimistic?
|
| 187 |
+
return _static.attempt_conversion(value)
|
| 188 |
+
except Exception:
|
| 189 |
+
# fallback to evaluate module
|
| 190 |
+
module = _load_spec(spec, module_name)
|
| 191 |
+
return getattr(module, attr_name)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def _find_spec(module_name: str, module_path: StrPath | None) -> ModuleSpec:
|
| 195 |
+
spec = importlib.util.spec_from_file_location(module_name, module_path)
|
| 196 |
+
spec = spec or importlib.util.find_spec(module_name)
|
| 197 |
+
|
| 198 |
+
if spec is None:
|
| 199 |
+
raise ModuleNotFoundError(module_name)
|
| 200 |
+
|
| 201 |
+
return spec
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
|
| 205 |
+
name = getattr(spec, "__name__", module_name)
|
| 206 |
+
if name in sys.modules:
|
| 207 |
+
return sys.modules[name]
|
| 208 |
+
module = importlib.util.module_from_spec(spec)
|
| 209 |
+
sys.modules[name] = module # cache (it also ensures `==` works on loaded items)
|
| 210 |
+
assert spec.loader is not None
|
| 211 |
+
spec.loader.exec_module(module)
|
| 212 |
+
return module
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def _find_module(
|
| 216 |
+
module_name: str, package_dir: Mapping[str, str] | None, root_dir: StrPath
|
| 217 |
+
) -> str | None:
|
| 218 |
+
"""Find the path to the module named ``module_name``,
|
| 219 |
+
considering the ``package_dir`` in the build configuration and ``root_dir``.
|
| 220 |
+
|
| 221 |
+
>>> tmp = getfixture('tmpdir')
|
| 222 |
+
>>> _ = tmp.ensure("a/b/c.py")
|
| 223 |
+
>>> _ = tmp.ensure("a/b/d/__init__.py")
|
| 224 |
+
>>> r = lambda x: x.replace(str(tmp), "tmp").replace(os.sep, "/")
|
| 225 |
+
>>> r(_find_module("a.b.c", None, tmp))
|
| 226 |
+
'tmp/a/b/c.py'
|
| 227 |
+
>>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))
|
| 228 |
+
'tmp/a/b/d/__init__.py'
|
| 229 |
+
"""
|
| 230 |
+
path_start = find_package_path(module_name, package_dir or {}, root_dir)
|
| 231 |
+
candidates = chain.from_iterable(
|
| 232 |
+
(f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))
|
| 233 |
+
for ext in all_suffixes()
|
| 234 |
+
)
|
| 235 |
+
return next((x for x in candidates if os.path.isfile(x)), None)
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def resolve_class(
|
| 239 |
+
qualified_class_name: str,
|
| 240 |
+
package_dir: Mapping[str, str] | None = None,
|
| 241 |
+
root_dir: StrPath | None = None,
|
| 242 |
+
) -> Callable:
|
| 243 |
+
"""Given a qualified class name, return the associated class object"""
|
| 244 |
+
root_dir = root_dir or os.getcwd()
|
| 245 |
+
idx = qualified_class_name.rfind('.')
|
| 246 |
+
class_name = qualified_class_name[idx + 1 :]
|
| 247 |
+
pkg_name = qualified_class_name[:idx]
|
| 248 |
+
|
| 249 |
+
path = _find_module(pkg_name, package_dir, root_dir)
|
| 250 |
+
module = _load_spec(_find_spec(pkg_name, path), pkg_name)
|
| 251 |
+
return getattr(module, class_name)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def cmdclass(
|
| 255 |
+
values: dict[str, str],
|
| 256 |
+
package_dir: Mapping[str, str] | None = None,
|
| 257 |
+
root_dir: StrPath | None = None,
|
| 258 |
+
) -> dict[str, Callable]:
|
| 259 |
+
"""Given a dictionary mapping command names to strings for qualified class
|
| 260 |
+
names, apply :func:`resolve_class` to the dict values.
|
| 261 |
+
"""
|
| 262 |
+
return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()}
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
def find_packages(
|
| 266 |
+
*,
|
| 267 |
+
namespaces=True,
|
| 268 |
+
fill_package_dir: dict[str, str] | None = None,
|
| 269 |
+
root_dir: StrPath | None = None,
|
| 270 |
+
**kwargs,
|
| 271 |
+
) -> list[str]:
|
| 272 |
+
"""Works similarly to :func:`setuptools.find_packages`, but with all
|
| 273 |
+
arguments given as keyword arguments. Moreover, ``where`` can be given
|
| 274 |
+
as a list (the results will be simply concatenated).
|
| 275 |
+
|
| 276 |
+
When the additional keyword argument ``namespaces`` is ``True``, it will
|
| 277 |
+
behave like :func:`setuptools.find_namespace_packages`` (i.e. include
|
| 278 |
+
implicit namespaces as per :pep:`420`).
|
| 279 |
+
|
| 280 |
+
The ``where`` argument will be considered relative to ``root_dir`` (or the current
|
| 281 |
+
working directory when ``root_dir`` is not given).
|
| 282 |
+
|
| 283 |
+
If the ``fill_package_dir`` argument is passed, this function will consider it as a
|
| 284 |
+
similar data structure to the ``package_dir`` configuration parameter add fill-in
|
| 285 |
+
any missing package location.
|
| 286 |
+
|
| 287 |
+
:rtype: list
|
| 288 |
+
"""
|
| 289 |
+
from more_itertools import always_iterable, unique_everseen
|
| 290 |
+
|
| 291 |
+
from setuptools.discovery import construct_package_dir
|
| 292 |
+
|
| 293 |
+
# check "not namespaces" first due to python/mypy#6232
|
| 294 |
+
if not namespaces:
|
| 295 |
+
from setuptools.discovery import PackageFinder
|
| 296 |
+
else:
|
| 297 |
+
from setuptools.discovery import PEP420PackageFinder as PackageFinder
|
| 298 |
+
|
| 299 |
+
root_dir = root_dir or os.curdir
|
| 300 |
+
where = kwargs.pop('where', ['.'])
|
| 301 |
+
packages: list[str] = []
|
| 302 |
+
fill_package_dir = {} if fill_package_dir is None else fill_package_dir
|
| 303 |
+
search = list(unique_everseen(always_iterable(where)))
|
| 304 |
+
|
| 305 |
+
if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)):
|
| 306 |
+
fill_package_dir.setdefault("", search[0])
|
| 307 |
+
|
| 308 |
+
for path in search:
|
| 309 |
+
package_path = _nest_path(root_dir, path)
|
| 310 |
+
pkgs = PackageFinder.find(package_path, **kwargs)
|
| 311 |
+
packages.extend(pkgs)
|
| 312 |
+
if pkgs and not (
|
| 313 |
+
fill_package_dir.get("") == path or os.path.samefile(package_path, root_dir)
|
| 314 |
+
):
|
| 315 |
+
fill_package_dir.update(construct_package_dir(pkgs, path))
|
| 316 |
+
|
| 317 |
+
return packages
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def _nest_path(parent: StrPath, path: StrPath) -> str:
|
| 321 |
+
path = parent if path in {".", ""} else os.path.join(parent, path)
|
| 322 |
+
return os.path.normpath(path)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
def version(value: Callable | Iterable[str | int] | str) -> str:
|
| 326 |
+
"""When getting the version directly from an attribute,
|
| 327 |
+
it should be normalised to string.
|
| 328 |
+
"""
|
| 329 |
+
_value = value() if callable(value) else value
|
| 330 |
+
|
| 331 |
+
if isinstance(_value, str):
|
| 332 |
+
return _value
|
| 333 |
+
if hasattr(_value, '__iter__'):
|
| 334 |
+
return '.'.join(map(str, _value))
|
| 335 |
+
return f'{_value}'
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def canonic_package_data(package_data: dict) -> dict:
|
| 339 |
+
if "*" in package_data:
|
| 340 |
+
package_data[""] = package_data.pop("*")
|
| 341 |
+
return package_data
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def canonic_data_files(
|
| 345 |
+
data_files: list | dict, root_dir: StrPath | None = None
|
| 346 |
+
) -> list[tuple[str, list[str]]]:
|
| 347 |
+
"""For compatibility with ``setup.py``, ``data_files`` should be a list
|
| 348 |
+
of pairs instead of a dict.
|
| 349 |
+
|
| 350 |
+
This function also expands glob patterns.
|
| 351 |
+
"""
|
| 352 |
+
if isinstance(data_files, list):
|
| 353 |
+
return data_files
|
| 354 |
+
|
| 355 |
+
return [
|
| 356 |
+
(dest, glob_relative(patterns, root_dir))
|
| 357 |
+
for dest, patterns in data_files.items()
|
| 358 |
+
]
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def entry_points(
|
| 362 |
+
text: str, text_source: str = "entry-points"
|
| 363 |
+
) -> dict[str, dict[str, str]]:
|
| 364 |
+
"""Given the contents of entry-points file,
|
| 365 |
+
process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
|
| 366 |
+
The first level keys are entry-point groups, the second level keys are
|
| 367 |
+
entry-point names, and the second level values are references to objects
|
| 368 |
+
(that correspond to the entry-point value).
|
| 369 |
+
"""
|
| 370 |
+
# Using undocumented behaviour, see python/typeshed#12700
|
| 371 |
+
parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore[call-overload]
|
| 372 |
+
parser.optionxform = str # case sensitive
|
| 373 |
+
parser.read_string(text, text_source)
|
| 374 |
+
groups = {k: dict(v.items()) for k, v in parser.items()}
|
| 375 |
+
groups.pop(parser.default_section, None)
|
| 376 |
+
return groups
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
class EnsurePackagesDiscovered:
|
| 380 |
+
"""Some expand functions require all the packages to already be discovered before
|
| 381 |
+
they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`.
|
| 382 |
+
|
| 383 |
+
Therefore in some cases we will need to run autodiscovery during the evaluation of
|
| 384 |
+
the configuration. However, it is better to postpone calling package discovery as
|
| 385 |
+
much as possible, because some parameters can influence it (e.g. ``package_dir``),
|
| 386 |
+
and those might not have been processed yet.
|
| 387 |
+
"""
|
| 388 |
+
|
| 389 |
+
def __init__(self, distribution: Distribution) -> None:
|
| 390 |
+
self._dist = distribution
|
| 391 |
+
self._called = False
|
| 392 |
+
|
| 393 |
+
def __call__(self):
|
| 394 |
+
"""Trigger the automatic package discovery, if it is still necessary."""
|
| 395 |
+
if not self._called:
|
| 396 |
+
self._called = True
|
| 397 |
+
self._dist.set_defaults(name=False) # Skip name, we can still be parsing
|
| 398 |
+
|
| 399 |
+
def __enter__(self) -> Self:
|
| 400 |
+
return self
|
| 401 |
+
|
| 402 |
+
def __exit__(
|
| 403 |
+
self,
|
| 404 |
+
exc_type: type[BaseException] | None,
|
| 405 |
+
exc_value: BaseException | None,
|
| 406 |
+
traceback: TracebackType | None,
|
| 407 |
+
):
|
| 408 |
+
if self._called:
|
| 409 |
+
self._dist.set_defaults.analyse_name() # Now we can set a default name
|
| 410 |
+
|
| 411 |
+
def _get_package_dir(self) -> Mapping[str, str]:
|
| 412 |
+
self()
|
| 413 |
+
pkg_dir = self._dist.package_dir
|
| 414 |
+
return {} if pkg_dir is None else pkg_dir
|
| 415 |
+
|
| 416 |
+
@property
|
| 417 |
+
def package_dir(self) -> Mapping[str, str]:
|
| 418 |
+
"""Proxy to ``package_dir`` that may trigger auto-discovery when used."""
|
| 419 |
+
return LazyMappingProxy(self._get_package_dir)
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
class LazyMappingProxy(Mapping[_K, _V_co]):
|
| 423 |
+
"""Mapping proxy that delays resolving the target object, until really needed.
|
| 424 |
+
|
| 425 |
+
>>> def obtain_mapping():
|
| 426 |
+
... print("Running expensive function!")
|
| 427 |
+
... return {"key": "value", "other key": "other value"}
|
| 428 |
+
>>> mapping = LazyMappingProxy(obtain_mapping)
|
| 429 |
+
>>> mapping["key"]
|
| 430 |
+
Running expensive function!
|
| 431 |
+
'value'
|
| 432 |
+
>>> mapping["other key"]
|
| 433 |
+
'other value'
|
| 434 |
+
"""
|
| 435 |
+
|
| 436 |
+
def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V_co]]) -> None:
|
| 437 |
+
self._obtain = obtain_mapping_value
|
| 438 |
+
self._value: Mapping[_K, _V_co] | None = None
|
| 439 |
+
|
| 440 |
+
def _target(self) -> Mapping[_K, _V_co]:
|
| 441 |
+
if self._value is None:
|
| 442 |
+
self._value = self._obtain()
|
| 443 |
+
return self._value
|
| 444 |
+
|
| 445 |
+
def __getitem__(self, key: _K) -> _V_co:
|
| 446 |
+
return self._target()[key]
|
| 447 |
+
|
| 448 |
+
def __len__(self) -> int:
|
| 449 |
+
return len(self._target())
|
| 450 |
+
|
| 451 |
+
def __iter__(self) -> Iterator[_K]:
|
| 452 |
+
return iter(self._target())
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import locale
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
__all__ = ['fail_on_ascii']
|
| 7 |
+
|
| 8 |
+
if sys.version_info >= (3, 11):
|
| 9 |
+
locale_encoding = locale.getencoding()
|
| 10 |
+
else:
|
| 11 |
+
locale_encoding = locale.getpreferredencoding(False)
|
| 12 |
+
is_ascii = locale_encoding == 'ANSI_X3.4-1968'
|
| 13 |
+
fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/__pycache__/test_core_metadata.cpython-310.pyc
ADDED
|
Binary file (14.9 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__init__.py
ADDED
|
File without changes
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_apply_pyprojecttoml.cpython-310.pyc
ADDED
|
Binary file (19.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_expand.cpython-310.pyc
ADDED
|
Binary file (7.79 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_pyprojecttoml.cpython-310.pyc
ADDED
|
Binary file (11.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_pyprojecttoml_dynamic_deps.cpython-310.pyc
ADDED
|
Binary file (3.37 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_setupcfg.cpython-310.pyc
ADDED
|
Binary file (27.9 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/__init__.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import re
|
| 4 |
+
import time
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from urllib.error import HTTPError
|
| 7 |
+
from urllib.request import urlopen
|
| 8 |
+
|
| 9 |
+
__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
|
| 13 |
+
DOWNLOAD_DIR = Path(__file__).parent
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# ----------------------------------------------------------------------
|
| 17 |
+
# Please update ./preload.py accordingly when modifying this file
|
| 18 |
+
# ----------------------------------------------------------------------
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def output_file(url: str, download_dir: Path = DOWNLOAD_DIR) -> Path:
|
| 22 |
+
file_name = url.strip()
|
| 23 |
+
for part in NAME_REMOVE:
|
| 24 |
+
file_name = file_name.replace(part, '').strip().strip('/:').strip()
|
| 25 |
+
return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5) -> Path:
|
| 29 |
+
path = output_file(url, download_dir)
|
| 30 |
+
if path.exists():
|
| 31 |
+
print(f"Skipping {url} (already exists: {path})")
|
| 32 |
+
else:
|
| 33 |
+
download_dir.mkdir(exist_ok=True, parents=True)
|
| 34 |
+
print(f"Downloading {url} to {path}")
|
| 35 |
+
try:
|
| 36 |
+
download(url, path)
|
| 37 |
+
except HTTPError:
|
| 38 |
+
time.sleep(wait) # wait a few seconds and try again.
|
| 39 |
+
download(url, path)
|
| 40 |
+
return path
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def urls_from_file(list_file: Path) -> list[str]:
|
| 44 |
+
"""``list_file`` should be a text file where each line corresponds to a URL to
|
| 45 |
+
download.
|
| 46 |
+
"""
|
| 47 |
+
print(f"file: {list_file}")
|
| 48 |
+
content = list_file.read_text(encoding="utf-8")
|
| 49 |
+
return [url for url in content.splitlines() if not url.startswith("#")]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def download(url: str, dest: Path):
|
| 53 |
+
with urlopen(url) as f:
|
| 54 |
+
data = f.read()
|
| 55 |
+
|
| 56 |
+
with open(dest, "wb") as f:
|
| 57 |
+
f.write(data)
|
| 58 |
+
|
| 59 |
+
assert Path(dest).exists()
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.15 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/__pycache__/preload.cpython-310.pyc
ADDED
|
Binary file (650 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/downloads/preload.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This file can be used to preload files needed for testing.
|
| 2 |
+
|
| 3 |
+
For example you can use::
|
| 4 |
+
|
| 5 |
+
cd setuptools/tests/config
|
| 6 |
+
python -m downloads.preload setupcfg_examples.txt
|
| 7 |
+
|
| 8 |
+
to make sure the `setup.cfg` examples are downloaded before starting the tests.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
|
| 14 |
+
from . import retrieve_file, urls_from_file
|
| 15 |
+
|
| 16 |
+
if __name__ == "__main__":
|
| 17 |
+
urls = urls_from_file(Path(sys.argv[1]))
|
| 18 |
+
list(map(retrieve_file, urls))
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/setupcfg_examples.txt
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ====================================================================
|
| 2 |
+
# Some popular packages that use setup.cfg (and others not so popular)
|
| 3 |
+
# Reference: https://hugovk.github.io/top-pypi-packages/
|
| 4 |
+
# ====================================================================
|
| 5 |
+
https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg
|
| 6 |
+
https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg
|
| 7 |
+
https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg
|
| 8 |
+
https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg
|
| 9 |
+
https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg
|
| 10 |
+
https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg
|
| 11 |
+
https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg
|
| 12 |
+
https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg
|
| 13 |
+
https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg
|
| 14 |
+
https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg
|
| 15 |
+
https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
|
| 16 |
+
https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
|
| 17 |
+
https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
|
| 18 |
+
https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
|
| 19 |
+
https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
|
| 20 |
+
https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
|
| 21 |
+
https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg
|
| 22 |
+
https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_apply_pyprojecttoml.py
ADDED
|
@@ -0,0 +1,539 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Make sure that applying the configuration from pyproject.toml is equivalent to
|
| 2 |
+
applying a similar configuration from setup.cfg
|
| 3 |
+
|
| 4 |
+
To run these tests offline, please have a look on ``./downloads/preload.py``
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import io
|
| 10 |
+
import re
|
| 11 |
+
import tarfile
|
| 12 |
+
from inspect import cleandoc
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from unittest.mock import Mock
|
| 15 |
+
|
| 16 |
+
import pytest
|
| 17 |
+
from ini2toml.api import LiteTranslator
|
| 18 |
+
from packaging.metadata import Metadata
|
| 19 |
+
|
| 20 |
+
import setuptools # noqa: F401 # ensure monkey patch to metadata
|
| 21 |
+
from setuptools._static import is_static
|
| 22 |
+
from setuptools.command.egg_info import write_requirements
|
| 23 |
+
from setuptools.config import expand, pyprojecttoml, setupcfg
|
| 24 |
+
from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
|
| 25 |
+
from setuptools.dist import Distribution
|
| 26 |
+
from setuptools.errors import RemovedConfigError
|
| 27 |
+
|
| 28 |
+
from .downloads import retrieve_file, urls_from_file
|
| 29 |
+
|
| 30 |
+
HERE = Path(__file__).parent
|
| 31 |
+
EXAMPLES_FILE = "setupcfg_examples.txt"
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def makedist(path, **attrs):
|
| 35 |
+
return Distribution({"src_root": path, **attrs})
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
|
| 39 |
+
@pytest.mark.filterwarnings("ignore")
|
| 40 |
+
@pytest.mark.uses_network
|
| 41 |
+
def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
|
| 42 |
+
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
|
| 43 |
+
setupcfg_example = retrieve_file(url)
|
| 44 |
+
pyproject_example = Path(tmp_path, "pyproject.toml")
|
| 45 |
+
setupcfg_text = setupcfg_example.read_text(encoding="utf-8")
|
| 46 |
+
toml_config = LiteTranslator().translate(setupcfg_text, "setup.cfg")
|
| 47 |
+
pyproject_example.write_text(toml_config, encoding="utf-8")
|
| 48 |
+
|
| 49 |
+
dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
|
| 50 |
+
dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
|
| 51 |
+
|
| 52 |
+
pkg_info_toml = core_metadata(dist_toml)
|
| 53 |
+
pkg_info_cfg = core_metadata(dist_cfg)
|
| 54 |
+
assert pkg_info_toml == pkg_info_cfg
|
| 55 |
+
|
| 56 |
+
if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)):
|
| 57 |
+
assert set(dist_toml.license_files) == set(dist_cfg.license_files)
|
| 58 |
+
|
| 59 |
+
if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)):
|
| 60 |
+
print(dist_cfg.entry_points)
|
| 61 |
+
ep_toml = {
|
| 62 |
+
(k, *sorted(i.replace(" ", "") for i in v))
|
| 63 |
+
for k, v in dist_toml.entry_points.items()
|
| 64 |
+
}
|
| 65 |
+
ep_cfg = {
|
| 66 |
+
(k, *sorted(i.replace(" ", "") for i in v))
|
| 67 |
+
for k, v in dist_cfg.entry_points.items()
|
| 68 |
+
}
|
| 69 |
+
assert ep_toml == ep_cfg
|
| 70 |
+
|
| 71 |
+
if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)):
|
| 72 |
+
pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()}
|
| 73 |
+
pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()}
|
| 74 |
+
assert pkg_data_toml == pkg_data_cfg
|
| 75 |
+
|
| 76 |
+
if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)):
|
| 77 |
+
data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files}
|
| 78 |
+
data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files}
|
| 79 |
+
assert data_files_toml == data_files_cfg
|
| 80 |
+
|
| 81 |
+
assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
|
| 82 |
+
if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
|
| 83 |
+
extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
|
| 84 |
+
extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
|
| 85 |
+
assert extra_req_toml == extra_req_cfg
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
PEP621_EXAMPLE = """\
|
| 89 |
+
[project]
|
| 90 |
+
name = "spam"
|
| 91 |
+
version = "2020.0.0"
|
| 92 |
+
description = "Lovely Spam! Wonderful Spam!"
|
| 93 |
+
readme = "README.rst"
|
| 94 |
+
requires-python = ">=3.8"
|
| 95 |
+
license = {file = "LICENSE.txt"}
|
| 96 |
+
keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
|
| 97 |
+
authors = [
|
| 98 |
+
{email = "hi@pradyunsg.me"},
|
| 99 |
+
{name = "Tzu-Ping Chung"}
|
| 100 |
+
]
|
| 101 |
+
maintainers = [
|
| 102 |
+
{name = "Brett Cannon", email = "brett@python.org"},
|
| 103 |
+
{name = "John X. Ãørçeč", email = "john@utf8.org"},
|
| 104 |
+
{name = "Γαμα קּ 東", email = "gama@utf8.org"},
|
| 105 |
+
]
|
| 106 |
+
classifiers = [
|
| 107 |
+
"Development Status :: 4 - Beta",
|
| 108 |
+
"Programming Language :: Python"
|
| 109 |
+
]
|
| 110 |
+
|
| 111 |
+
dependencies = [
|
| 112 |
+
"httpx",
|
| 113 |
+
"gidgethub[httpx]>4.0.0",
|
| 114 |
+
"django>2.1; os_name != 'nt'",
|
| 115 |
+
"django>2.0; os_name == 'nt'"
|
| 116 |
+
]
|
| 117 |
+
|
| 118 |
+
[project.optional-dependencies]
|
| 119 |
+
test = [
|
| 120 |
+
"pytest < 5.0.0",
|
| 121 |
+
"pytest-cov[all]"
|
| 122 |
+
]
|
| 123 |
+
|
| 124 |
+
[project.urls]
|
| 125 |
+
homepage = "http://example.com"
|
| 126 |
+
documentation = "http://readthedocs.org"
|
| 127 |
+
repository = "http://github.com"
|
| 128 |
+
changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md"
|
| 129 |
+
|
| 130 |
+
[project.scripts]
|
| 131 |
+
spam-cli = "spam:main_cli"
|
| 132 |
+
|
| 133 |
+
[project.gui-scripts]
|
| 134 |
+
spam-gui = "spam:main_gui"
|
| 135 |
+
|
| 136 |
+
[project.entry-points."spam.magical"]
|
| 137 |
+
tomatoes = "spam:main_tomatoes"
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
PEP621_INTERNATIONAL_EMAIL_EXAMPLE = """\
|
| 141 |
+
[project]
|
| 142 |
+
name = "spam"
|
| 143 |
+
version = "2020.0.0"
|
| 144 |
+
authors = [
|
| 145 |
+
{email = "hi@pradyunsg.me"},
|
| 146 |
+
{name = "Tzu-Ping Chung"}
|
| 147 |
+
]
|
| 148 |
+
maintainers = [
|
| 149 |
+
{name = "Степан Бандера", email = "криївка@оун-упа.укр"},
|
| 150 |
+
]
|
| 151 |
+
"""
|
| 152 |
+
|
| 153 |
+
PEP621_EXAMPLE_SCRIPT = """
|
| 154 |
+
def main_cli(): pass
|
| 155 |
+
def main_gui(): pass
|
| 156 |
+
def main_tomatoes(): pass
|
| 157 |
+
"""
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def _pep621_example_project(
|
| 161 |
+
tmp_path,
|
| 162 |
+
readme="README.rst",
|
| 163 |
+
pyproject_text=PEP621_EXAMPLE,
|
| 164 |
+
):
|
| 165 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 166 |
+
text = pyproject_text
|
| 167 |
+
replacements = {'readme = "README.rst"': f'readme = "{readme}"'}
|
| 168 |
+
for orig, subst in replacements.items():
|
| 169 |
+
text = text.replace(orig, subst)
|
| 170 |
+
pyproject.write_text(text, encoding="utf-8")
|
| 171 |
+
|
| 172 |
+
(tmp_path / readme).write_text("hello world", encoding="utf-8")
|
| 173 |
+
(tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---", encoding="utf-8")
|
| 174 |
+
(tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT, encoding="utf-8")
|
| 175 |
+
return pyproject
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def test_pep621_example(tmp_path):
|
| 179 |
+
"""Make sure the example in PEP 621 works"""
|
| 180 |
+
pyproject = _pep621_example_project(tmp_path)
|
| 181 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 182 |
+
assert dist.metadata.license == "--- LICENSE stub ---"
|
| 183 |
+
assert set(dist.metadata.license_files) == {"LICENSE.txt"}
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@pytest.mark.parametrize(
|
| 187 |
+
("readme", "ctype"),
|
| 188 |
+
[
|
| 189 |
+
("Readme.txt", "text/plain"),
|
| 190 |
+
("readme.md", "text/markdown"),
|
| 191 |
+
("text.rst", "text/x-rst"),
|
| 192 |
+
],
|
| 193 |
+
)
|
| 194 |
+
def test_readme_content_type(tmp_path, readme, ctype):
|
| 195 |
+
pyproject = _pep621_example_project(tmp_path, readme)
|
| 196 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 197 |
+
assert dist.metadata.long_description_content_type == ctype
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def test_undefined_content_type(tmp_path):
|
| 201 |
+
pyproject = _pep621_example_project(tmp_path, "README.tex")
|
| 202 |
+
with pytest.raises(ValueError, match="Undefined content type for README.tex"):
|
| 203 |
+
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def test_no_explicit_content_type_for_missing_extension(tmp_path):
|
| 207 |
+
pyproject = _pep621_example_project(tmp_path, "README")
|
| 208 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 209 |
+
assert dist.metadata.long_description_content_type is None
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
@pytest.mark.parametrize(
|
| 213 |
+
("pyproject_text", "expected_maintainers_meta_value"),
|
| 214 |
+
(
|
| 215 |
+
pytest.param(
|
| 216 |
+
PEP621_EXAMPLE,
|
| 217 |
+
(
|
| 218 |
+
'Brett Cannon <brett@python.org>, "John X. Ãørçeč" <john@utf8.org>, '
|
| 219 |
+
'Γαμα קּ 東 <gama@utf8.org>'
|
| 220 |
+
),
|
| 221 |
+
id='non-international-emails',
|
| 222 |
+
),
|
| 223 |
+
pytest.param(
|
| 224 |
+
PEP621_INTERNATIONAL_EMAIL_EXAMPLE,
|
| 225 |
+
'Степан Бандера <криївка@оун-упа.укр>',
|
| 226 |
+
marks=pytest.mark.xfail(
|
| 227 |
+
reason="CPython's `email.headerregistry.Address` only supports "
|
| 228 |
+
'RFC 5322, as of Nov 10, 2022 and latest Python 3.11.0',
|
| 229 |
+
strict=True,
|
| 230 |
+
),
|
| 231 |
+
id='international-email',
|
| 232 |
+
),
|
| 233 |
+
),
|
| 234 |
+
)
|
| 235 |
+
def test_utf8_maintainer_in_metadata( # issue-3663
|
| 236 |
+
expected_maintainers_meta_value,
|
| 237 |
+
pyproject_text,
|
| 238 |
+
tmp_path,
|
| 239 |
+
):
|
| 240 |
+
pyproject = _pep621_example_project(
|
| 241 |
+
tmp_path,
|
| 242 |
+
"README",
|
| 243 |
+
pyproject_text=pyproject_text,
|
| 244 |
+
)
|
| 245 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 246 |
+
assert dist.metadata.maintainer_email == expected_maintainers_meta_value
|
| 247 |
+
pkg_file = tmp_path / "PKG-FILE"
|
| 248 |
+
with open(pkg_file, "w", encoding="utf-8") as fh:
|
| 249 |
+
dist.metadata.write_pkg_file(fh)
|
| 250 |
+
content = pkg_file.read_text(encoding="utf-8")
|
| 251 |
+
assert f"Maintainer-email: {expected_maintainers_meta_value}" in content
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
class TestLicenseFiles:
|
| 255 |
+
# TODO: After PEP 639 is accepted, we have to move the license-files
|
| 256 |
+
# to the `project` table instead of `tool.setuptools`
|
| 257 |
+
|
| 258 |
+
def base_pyproject(self, tmp_path, additional_text):
|
| 259 |
+
pyproject = _pep621_example_project(tmp_path, "README")
|
| 260 |
+
text = pyproject.read_text(encoding="utf-8")
|
| 261 |
+
|
| 262 |
+
# Sanity-check
|
| 263 |
+
assert 'license = {file = "LICENSE.txt"}' in text
|
| 264 |
+
assert "[tool.setuptools]" not in text
|
| 265 |
+
|
| 266 |
+
text = f"{text}\n{additional_text}\n"
|
| 267 |
+
pyproject.write_text(text, encoding="utf-8")
|
| 268 |
+
return pyproject
|
| 269 |
+
|
| 270 |
+
def test_both_license_and_license_files_defined(self, tmp_path):
|
| 271 |
+
setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
|
| 272 |
+
pyproject = self.base_pyproject(tmp_path, setuptools_config)
|
| 273 |
+
|
| 274 |
+
(tmp_path / "_FILE.txt").touch()
|
| 275 |
+
(tmp_path / "_FILE.rst").touch()
|
| 276 |
+
|
| 277 |
+
# Would normally match the `license_files` patterns, but we want to exclude it
|
| 278 |
+
# by being explicit. On the other hand, contents should be added to `license`
|
| 279 |
+
license = tmp_path / "LICENSE.txt"
|
| 280 |
+
license.write_text("LicenseRef-Proprietary\n", encoding="utf-8")
|
| 281 |
+
|
| 282 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 283 |
+
assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
|
| 284 |
+
assert dist.metadata.license == "LicenseRef-Proprietary\n"
|
| 285 |
+
|
| 286 |
+
def test_default_patterns(self, tmp_path):
|
| 287 |
+
setuptools_config = '[tool.setuptools]\nzip-safe = false'
|
| 288 |
+
# ^ used just to trigger section validation
|
| 289 |
+
pyproject = self.base_pyproject(tmp_path, setuptools_config)
|
| 290 |
+
|
| 291 |
+
license_files = "LICENCE-a.html COPYING-abc.txt AUTHORS-xyz NOTICE,def".split()
|
| 292 |
+
|
| 293 |
+
for fname in license_files:
|
| 294 |
+
(tmp_path / fname).write_text(f"{fname}\n", encoding="utf-8")
|
| 295 |
+
|
| 296 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 297 |
+
assert (tmp_path / "LICENSE.txt").exists() # from base example
|
| 298 |
+
assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
class TestPyModules:
|
| 302 |
+
# https://github.com/pypa/setuptools/issues/4316
|
| 303 |
+
|
| 304 |
+
def dist(self, name):
|
| 305 |
+
toml_config = f"""
|
| 306 |
+
[project]
|
| 307 |
+
name = "test"
|
| 308 |
+
version = "42.0"
|
| 309 |
+
[tool.setuptools]
|
| 310 |
+
py-modules = [{name!r}]
|
| 311 |
+
"""
|
| 312 |
+
pyproject = Path("pyproject.toml")
|
| 313 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 314 |
+
return pyprojecttoml.apply_configuration(Distribution({}), pyproject)
|
| 315 |
+
|
| 316 |
+
@pytest.mark.parametrize("module", ["pip-run", "abc-d.λ-xyz-e"])
|
| 317 |
+
def test_valid_module_name(self, tmp_path, monkeypatch, module):
|
| 318 |
+
monkeypatch.chdir(tmp_path)
|
| 319 |
+
assert module in self.dist(module).py_modules
|
| 320 |
+
|
| 321 |
+
@pytest.mark.parametrize("module", ["pip run", "-pip-run", "pip-run-stubs"])
|
| 322 |
+
def test_invalid_module_name(self, tmp_path, monkeypatch, module):
|
| 323 |
+
monkeypatch.chdir(tmp_path)
|
| 324 |
+
with pytest.raises(ValueError, match="py-modules"):
|
| 325 |
+
self.dist(module).py_modules
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class TestExtModules:
|
| 329 |
+
def test_pyproject_sets_attribute(self, tmp_path, monkeypatch):
|
| 330 |
+
monkeypatch.chdir(tmp_path)
|
| 331 |
+
pyproject = Path("pyproject.toml")
|
| 332 |
+
toml_config = """
|
| 333 |
+
[project]
|
| 334 |
+
name = "test"
|
| 335 |
+
version = "42.0"
|
| 336 |
+
[tool.setuptools]
|
| 337 |
+
ext-modules = [
|
| 338 |
+
{name = "my.ext", sources = ["hello.c", "world.c"]}
|
| 339 |
+
]
|
| 340 |
+
"""
|
| 341 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 342 |
+
with pytest.warns(pyprojecttoml._ExperimentalConfiguration):
|
| 343 |
+
dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
|
| 344 |
+
assert len(dist.ext_modules) == 1
|
| 345 |
+
assert dist.ext_modules[0].name == "my.ext"
|
| 346 |
+
assert set(dist.ext_modules[0].sources) == {"hello.c", "world.c"}
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
class TestDeprecatedFields:
|
| 350 |
+
def test_namespace_packages(self, tmp_path):
|
| 351 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 352 |
+
config = """
|
| 353 |
+
[project]
|
| 354 |
+
name = "myproj"
|
| 355 |
+
version = "42"
|
| 356 |
+
[tool.setuptools]
|
| 357 |
+
namespace-packages = ["myproj.pkg"]
|
| 358 |
+
"""
|
| 359 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 360 |
+
with pytest.raises(RemovedConfigError, match="namespace-packages"):
|
| 361 |
+
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
class TestPresetField:
|
| 365 |
+
def pyproject(self, tmp_path, dynamic, extra_content=""):
|
| 366 |
+
content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n"
|
| 367 |
+
if "version" not in dynamic:
|
| 368 |
+
content += "version = '42'\n"
|
| 369 |
+
file = tmp_path / "pyproject.toml"
|
| 370 |
+
file.write_text(content + extra_content, encoding="utf-8")
|
| 371 |
+
return file
|
| 372 |
+
|
| 373 |
+
@pytest.mark.parametrize(
|
| 374 |
+
("attr", "field", "value"),
|
| 375 |
+
[
|
| 376 |
+
("classifiers", "classifiers", ["Private :: Classifier"]),
|
| 377 |
+
("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}),
|
| 378 |
+
("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}),
|
| 379 |
+
pytest.param(
|
| 380 |
+
*("install_requires", "dependencies", ["six"]),
|
| 381 |
+
marks=[
|
| 382 |
+
pytest.mark.filterwarnings("ignore:.*install_requires. overwritten")
|
| 383 |
+
],
|
| 384 |
+
),
|
| 385 |
+
],
|
| 386 |
+
)
|
| 387 |
+
def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
|
| 388 |
+
"""Setuptools cannot set a field if not listed in ``dynamic``"""
|
| 389 |
+
pyproject = self.pyproject(tmp_path, [])
|
| 390 |
+
dist = makedist(tmp_path, **{attr: value})
|
| 391 |
+
msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S)
|
| 392 |
+
with pytest.warns(_MissingDynamic, match=msg):
|
| 393 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 394 |
+
|
| 395 |
+
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
|
| 396 |
+
assert not dist_value
|
| 397 |
+
|
| 398 |
+
@pytest.mark.parametrize(
|
| 399 |
+
("attr", "field", "value"),
|
| 400 |
+
[
|
| 401 |
+
("install_requires", "dependencies", []),
|
| 402 |
+
("extras_require", "optional-dependencies", {}),
|
| 403 |
+
("install_requires", "dependencies", ["six"]),
|
| 404 |
+
("classifiers", "classifiers", ["Private :: Classifier"]),
|
| 405 |
+
],
|
| 406 |
+
)
|
| 407 |
+
def test_listed_in_dynamic(self, tmp_path, attr, field, value):
|
| 408 |
+
pyproject = self.pyproject(tmp_path, [field])
|
| 409 |
+
dist = makedist(tmp_path, **{attr: value})
|
| 410 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 411 |
+
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
|
| 412 |
+
assert dist_value == value
|
| 413 |
+
|
| 414 |
+
def test_warning_overwritten_dependencies(self, tmp_path):
|
| 415 |
+
src = "[project]\nname='pkg'\nversion='0.1'\ndependencies=['click']\n"
|
| 416 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 417 |
+
pyproject.write_text(src, encoding="utf-8")
|
| 418 |
+
dist = makedist(tmp_path, install_requires=["wheel"])
|
| 419 |
+
with pytest.warns(match="`install_requires` overwritten"):
|
| 420 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 421 |
+
assert "wheel" not in dist.install_requires
|
| 422 |
+
|
| 423 |
+
def test_optional_dependencies_dont_remove_env_markers(self, tmp_path):
|
| 424 |
+
"""
|
| 425 |
+
Internally setuptools converts dependencies with markers to "extras".
|
| 426 |
+
If ``install_requires`` is given by ``setup.py``, we have to ensure that
|
| 427 |
+
applying ``optional-dependencies`` does not overwrite the mandatory
|
| 428 |
+
dependencies with markers (see #3204).
|
| 429 |
+
"""
|
| 430 |
+
# If setuptools replace its internal mechanism that uses `requires.txt`
|
| 431 |
+
# this test has to be rewritten to adapt accordingly
|
| 432 |
+
extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n"
|
| 433 |
+
pyproject = self.pyproject(tmp_path, ["dependencies"], extra)
|
| 434 |
+
install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"']
|
| 435 |
+
dist = makedist(tmp_path, install_requires=install_req)
|
| 436 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 437 |
+
assert "foo" in dist.extras_require
|
| 438 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 439 |
+
write_requirements(egg_info, tmp_path, tmp_path / "requires.txt")
|
| 440 |
+
reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8")
|
| 441 |
+
assert "importlib-resources" in reqs
|
| 442 |
+
assert "bar" in reqs
|
| 443 |
+
assert ':python_version < "3.7"' in reqs
|
| 444 |
+
|
| 445 |
+
@pytest.mark.parametrize(
|
| 446 |
+
("field", "group"),
|
| 447 |
+
[("scripts", "console_scripts"), ("gui-scripts", "gui_scripts")],
|
| 448 |
+
)
|
| 449 |
+
@pytest.mark.filterwarnings("error")
|
| 450 |
+
def test_scripts_dont_require_dynamic_entry_points(self, tmp_path, field, group):
|
| 451 |
+
# Issue 3862
|
| 452 |
+
pyproject = self.pyproject(tmp_path, [field])
|
| 453 |
+
dist = makedist(tmp_path, entry_points={group: ["foobar=foobar:main"]})
|
| 454 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 455 |
+
assert group in dist.entry_points
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
class TestMeta:
|
| 459 |
+
def test_example_file_in_sdist(self, setuptools_sdist):
|
| 460 |
+
"""Meta test to ensure tests can run from sdist"""
|
| 461 |
+
with tarfile.open(setuptools_sdist) as tar:
|
| 462 |
+
assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames())
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
class TestInteropCommandLineParsing:
|
| 466 |
+
def test_version(self, tmp_path, monkeypatch, capsys):
|
| 467 |
+
# See pypa/setuptools#4047
|
| 468 |
+
# This test can be removed once the CLI interface of setup.py is removed
|
| 469 |
+
monkeypatch.chdir(tmp_path)
|
| 470 |
+
toml_config = """
|
| 471 |
+
[project]
|
| 472 |
+
name = "test"
|
| 473 |
+
version = "42.0"
|
| 474 |
+
"""
|
| 475 |
+
pyproject = Path(tmp_path, "pyproject.toml")
|
| 476 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 477 |
+
opts = {"script_args": ["--version"]}
|
| 478 |
+
dist = pyprojecttoml.apply_configuration(Distribution(opts), pyproject)
|
| 479 |
+
dist.parse_command_line() # <-- there should be no exception here.
|
| 480 |
+
captured = capsys.readouterr()
|
| 481 |
+
assert "42.0" in captured.out
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
class TestStaticConfig:
|
| 485 |
+
def test_mark_static_fields(self, tmp_path, monkeypatch):
|
| 486 |
+
monkeypatch.chdir(tmp_path)
|
| 487 |
+
toml_config = """
|
| 488 |
+
[project]
|
| 489 |
+
name = "test"
|
| 490 |
+
version = "42.0"
|
| 491 |
+
dependencies = ["hello"]
|
| 492 |
+
keywords = ["world"]
|
| 493 |
+
classifiers = ["private :: hello world"]
|
| 494 |
+
[tool.setuptools]
|
| 495 |
+
obsoletes = ["abcd"]
|
| 496 |
+
provides = ["abcd"]
|
| 497 |
+
platforms = ["abcd"]
|
| 498 |
+
"""
|
| 499 |
+
pyproject = Path(tmp_path, "pyproject.toml")
|
| 500 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 501 |
+
dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
|
| 502 |
+
assert is_static(dist.install_requires)
|
| 503 |
+
assert is_static(dist.metadata.keywords)
|
| 504 |
+
assert is_static(dist.metadata.classifiers)
|
| 505 |
+
assert is_static(dist.metadata.obsoletes)
|
| 506 |
+
assert is_static(dist.metadata.provides)
|
| 507 |
+
assert is_static(dist.metadata.platforms)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
# --- Auxiliary Functions ---
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
def core_metadata(dist) -> str:
|
| 514 |
+
with io.StringIO() as buffer:
|
| 515 |
+
dist.metadata.write_pkg_file(buffer)
|
| 516 |
+
pkg_file_txt = buffer.getvalue()
|
| 517 |
+
|
| 518 |
+
# Make sure core metadata is valid
|
| 519 |
+
Metadata.from_email(pkg_file_txt, validate=True) # can raise exceptions
|
| 520 |
+
|
| 521 |
+
skip_prefixes: tuple[str, ...] = ()
|
| 522 |
+
skip_lines = set()
|
| 523 |
+
# ---- DIFF NORMALISATION ----
|
| 524 |
+
# PEP 621 is very particular about author/maintainer metadata conversion, so skip
|
| 525 |
+
skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:")
|
| 526 |
+
# May be redundant with Home-page
|
| 527 |
+
skip_prefixes += ("Project-URL: Homepage,", "Home-page:")
|
| 528 |
+
# May be missing in original (relying on default) but backfilled in the TOML
|
| 529 |
+
skip_prefixes += ("Description-Content-Type:",)
|
| 530 |
+
# Remove empty lines
|
| 531 |
+
skip_lines.add("")
|
| 532 |
+
|
| 533 |
+
result = []
|
| 534 |
+
for line in pkg_file_txt.splitlines():
|
| 535 |
+
if line.startswith(skip_prefixes) or line in skip_lines:
|
| 536 |
+
continue
|
| 537 |
+
result.append(line + "\n")
|
| 538 |
+
|
| 539 |
+
return "".join(result)
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_expand.py
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
from setuptools._static import is_static
|
| 8 |
+
from setuptools.config import expand
|
| 9 |
+
from setuptools.discovery import find_package_path
|
| 10 |
+
|
| 11 |
+
from distutils.errors import DistutilsOptionError
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def write_files(files, root_dir):
|
| 15 |
+
for file, content in files.items():
|
| 16 |
+
path = root_dir / file
|
| 17 |
+
path.parent.mkdir(exist_ok=True, parents=True)
|
| 18 |
+
path.write_text(content, encoding="utf-8")
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def test_glob_relative(tmp_path, monkeypatch):
|
| 22 |
+
files = {
|
| 23 |
+
"dir1/dir2/dir3/file1.txt",
|
| 24 |
+
"dir1/dir2/file2.txt",
|
| 25 |
+
"dir1/file3.txt",
|
| 26 |
+
"a.ini",
|
| 27 |
+
"b.ini",
|
| 28 |
+
"dir1/c.ini",
|
| 29 |
+
"dir1/dir2/a.ini",
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
write_files({k: "" for k in files}, tmp_path)
|
| 33 |
+
patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"]
|
| 34 |
+
monkeypatch.chdir(tmp_path)
|
| 35 |
+
assert set(expand.glob_relative(patterns)) == files
|
| 36 |
+
# Make sure the same APIs work outside cwd
|
| 37 |
+
assert set(expand.glob_relative(patterns, tmp_path)) == files
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def test_read_files(tmp_path, monkeypatch):
|
| 41 |
+
dir_ = tmp_path / "dir_"
|
| 42 |
+
(tmp_path / "_dir").mkdir(exist_ok=True)
|
| 43 |
+
(tmp_path / "a.txt").touch()
|
| 44 |
+
files = {"a.txt": "a", "dir1/b.txt": "b", "dir1/dir2/c.txt": "c"}
|
| 45 |
+
write_files(files, dir_)
|
| 46 |
+
|
| 47 |
+
secrets = Path(str(dir_) + "secrets")
|
| 48 |
+
secrets.mkdir(exist_ok=True)
|
| 49 |
+
write_files({"secrets.txt": "secret keys"}, secrets)
|
| 50 |
+
|
| 51 |
+
with monkeypatch.context() as m:
|
| 52 |
+
m.chdir(dir_)
|
| 53 |
+
assert expand.read_files(list(files)) == "a\nb\nc"
|
| 54 |
+
|
| 55 |
+
cannot_access_msg = r"Cannot access '.*\.\..a\.txt'"
|
| 56 |
+
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
|
| 57 |
+
expand.read_files(["../a.txt"])
|
| 58 |
+
|
| 59 |
+
cannot_access_secrets_msg = r"Cannot access '.*secrets\.txt'"
|
| 60 |
+
with pytest.raises(DistutilsOptionError, match=cannot_access_secrets_msg):
|
| 61 |
+
expand.read_files(["../dir_secrets/secrets.txt"])
|
| 62 |
+
|
| 63 |
+
# Make sure the same APIs work outside cwd
|
| 64 |
+
assert expand.read_files(list(files), dir_) == "a\nb\nc"
|
| 65 |
+
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
|
| 66 |
+
expand.read_files(["../a.txt"], dir_)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class TestReadAttr:
|
| 70 |
+
@pytest.mark.parametrize(
|
| 71 |
+
"example",
|
| 72 |
+
[
|
| 73 |
+
# No cookie means UTF-8:
|
| 74 |
+
b"__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
|
| 75 |
+
# If a cookie is present, honor it:
|
| 76 |
+
b"# -*- coding: utf-8 -*-\n__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
|
| 77 |
+
b"# -*- coding: latin1 -*-\n__version__ = '\xe9'\nraise SystemExit(1)\n",
|
| 78 |
+
],
|
| 79 |
+
)
|
| 80 |
+
def test_read_attr_encoding_cookie(self, example, tmp_path):
|
| 81 |
+
(tmp_path / "mod.py").write_bytes(example)
|
| 82 |
+
assert expand.read_attr('mod.__version__', root_dir=tmp_path) == 'é'
|
| 83 |
+
|
| 84 |
+
def test_read_attr(self, tmp_path, monkeypatch):
|
| 85 |
+
files = {
|
| 86 |
+
"pkg/__init__.py": "",
|
| 87 |
+
"pkg/sub/__init__.py": "VERSION = '0.1.1'",
|
| 88 |
+
"pkg/sub/mod.py": (
|
| 89 |
+
"VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\nraise SystemExit(1)"
|
| 90 |
+
),
|
| 91 |
+
}
|
| 92 |
+
write_files(files, tmp_path)
|
| 93 |
+
|
| 94 |
+
with monkeypatch.context() as m:
|
| 95 |
+
m.chdir(tmp_path)
|
| 96 |
+
# Make sure it can read the attr statically without evaluating the module
|
| 97 |
+
version = expand.read_attr('pkg.sub.VERSION')
|
| 98 |
+
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
|
| 99 |
+
|
| 100 |
+
assert version == '0.1.1'
|
| 101 |
+
assert is_static(values)
|
| 102 |
+
|
| 103 |
+
assert values['a'] == 0
|
| 104 |
+
assert values['b'] == {42}
|
| 105 |
+
assert is_static(values)
|
| 106 |
+
|
| 107 |
+
# Make sure the same APIs work outside cwd
|
| 108 |
+
assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
|
| 109 |
+
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path)
|
| 110 |
+
assert values['c'] == (0, 1, 1)
|
| 111 |
+
|
| 112 |
+
@pytest.mark.parametrize(
|
| 113 |
+
"example",
|
| 114 |
+
[
|
| 115 |
+
"VERSION: str\nVERSION = '0.1.1'\nraise SystemExit(1)\n",
|
| 116 |
+
"VERSION: str = '0.1.1'\nraise SystemExit(1)\n",
|
| 117 |
+
],
|
| 118 |
+
)
|
| 119 |
+
def test_read_annotated_attr(self, tmp_path, example):
|
| 120 |
+
files = {
|
| 121 |
+
"pkg/__init__.py": "",
|
| 122 |
+
"pkg/sub/__init__.py": example,
|
| 123 |
+
}
|
| 124 |
+
write_files(files, tmp_path)
|
| 125 |
+
# Make sure this attribute can be read statically
|
| 126 |
+
version = expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path)
|
| 127 |
+
assert version == '0.1.1'
|
| 128 |
+
assert is_static(version)
|
| 129 |
+
|
| 130 |
+
@pytest.mark.parametrize(
|
| 131 |
+
"example",
|
| 132 |
+
[
|
| 133 |
+
"VERSION = (lambda: '0.1.1')()\n",
|
| 134 |
+
"def fn(): return '0.1.1'\nVERSION = fn()\n",
|
| 135 |
+
"VERSION: str = (lambda: '0.1.1')()\n",
|
| 136 |
+
],
|
| 137 |
+
)
|
| 138 |
+
def test_read_dynamic_attr(self, tmp_path, monkeypatch, example):
|
| 139 |
+
files = {
|
| 140 |
+
"pkg/__init__.py": "",
|
| 141 |
+
"pkg/sub/__init__.py": example,
|
| 142 |
+
}
|
| 143 |
+
write_files(files, tmp_path)
|
| 144 |
+
monkeypatch.chdir(tmp_path)
|
| 145 |
+
version = expand.read_attr('pkg.sub.VERSION')
|
| 146 |
+
assert version == '0.1.1'
|
| 147 |
+
assert not is_static(version)
|
| 148 |
+
|
| 149 |
+
def test_import_order(self, tmp_path):
|
| 150 |
+
"""
|
| 151 |
+
Sometimes the import machinery will import the parent package of a nested
|
| 152 |
+
module, which triggers side-effects and might create problems (see issue #3176)
|
| 153 |
+
|
| 154 |
+
``read_attr`` should bypass these limitations by resolving modules statically
|
| 155 |
+
(via ast.literal_eval).
|
| 156 |
+
"""
|
| 157 |
+
files = {
|
| 158 |
+
"src/pkg/__init__.py": "from .main import func\nfrom .about import version",
|
| 159 |
+
"src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42",
|
| 160 |
+
"src/pkg/about.py": "version = '42'",
|
| 161 |
+
}
|
| 162 |
+
write_files(files, tmp_path)
|
| 163 |
+
attr_desc = "pkg.about.version"
|
| 164 |
+
package_dir = {"": "src"}
|
| 165 |
+
# `import super_complicated_dep` should not run, otherwise the build fails
|
| 166 |
+
assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42"
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
@pytest.mark.parametrize(
|
| 170 |
+
("package_dir", "file", "module", "return_value"),
|
| 171 |
+
[
|
| 172 |
+
({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
|
| 173 |
+
({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
|
| 174 |
+
({}, "single_module.py", "single_module", 70),
|
| 175 |
+
({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
|
| 176 |
+
],
|
| 177 |
+
)
|
| 178 |
+
def test_resolve_class(monkeypatch, tmp_path, package_dir, file, module, return_value):
|
| 179 |
+
monkeypatch.setattr(sys, "modules", {}) # reproducibility
|
| 180 |
+
files = {file: f"class Custom:\n def testing(self): return {return_value}"}
|
| 181 |
+
write_files(files, tmp_path)
|
| 182 |
+
cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)
|
| 183 |
+
assert cls().testing() == return_value
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@pytest.mark.parametrize(
|
| 187 |
+
("args", "pkgs"),
|
| 188 |
+
[
|
| 189 |
+
({"where": ["."], "namespaces": False}, {"pkg", "other"}),
|
| 190 |
+
({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
|
| 191 |
+
({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}),
|
| 192 |
+
({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces`
|
| 193 |
+
],
|
| 194 |
+
)
|
| 195 |
+
def test_find_packages(tmp_path, args, pkgs):
|
| 196 |
+
files = {
|
| 197 |
+
"pkg/__init__.py",
|
| 198 |
+
"other/__init__.py",
|
| 199 |
+
"dir1/dir2/__init__.py",
|
| 200 |
+
}
|
| 201 |
+
write_files({k: "" for k in files}, tmp_path)
|
| 202 |
+
|
| 203 |
+
package_dir = {}
|
| 204 |
+
kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args}
|
| 205 |
+
where = kwargs.get("where", ["."])
|
| 206 |
+
assert set(expand.find_packages(**kwargs)) == pkgs
|
| 207 |
+
for pkg in pkgs:
|
| 208 |
+
pkg_path = find_package_path(pkg, package_dir, tmp_path)
|
| 209 |
+
assert os.path.exists(pkg_path)
|
| 210 |
+
|
| 211 |
+
# Make sure the same APIs work outside cwd
|
| 212 |
+
where = [
|
| 213 |
+
str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths
|
| 214 |
+
for p in args.pop("where", ["."])
|
| 215 |
+
]
|
| 216 |
+
|
| 217 |
+
assert set(expand.find_packages(where=where, **args)) == pkgs
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
@pytest.mark.parametrize(
|
| 221 |
+
("files", "where", "expected_package_dir"),
|
| 222 |
+
[
|
| 223 |
+
(["pkg1/__init__.py", "pkg1/other.py"], ["."], {}),
|
| 224 |
+
(["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}),
|
| 225 |
+
(["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}),
|
| 226 |
+
(["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}),
|
| 227 |
+
(
|
| 228 |
+
["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"],
|
| 229 |
+
["src1", "src2"],
|
| 230 |
+
{"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"},
|
| 231 |
+
),
|
| 232 |
+
(
|
| 233 |
+
["src/pkg1/__init__.py", "pkg2/__init__.py"],
|
| 234 |
+
["src", "."],
|
| 235 |
+
{"pkg1": "src/pkg1"},
|
| 236 |
+
),
|
| 237 |
+
],
|
| 238 |
+
)
|
| 239 |
+
def test_fill_package_dir(tmp_path, files, where, expected_package_dir):
|
| 240 |
+
write_files({k: "" for k in files}, tmp_path)
|
| 241 |
+
pkg_dir = {}
|
| 242 |
+
kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False}
|
| 243 |
+
pkgs = expand.find_packages(where=where, **kwargs)
|
| 244 |
+
assert set(pkg_dir.items()) == set(expected_package_dir.items())
|
| 245 |
+
for pkg in pkgs:
|
| 246 |
+
pkg_path = find_package_path(pkg, pkg_dir, tmp_path)
|
| 247 |
+
assert os.path.exists(pkg_path)
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml.py
ADDED
|
@@ -0,0 +1,396 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from configparser import ConfigParser
|
| 3 |
+
from inspect import cleandoc
|
| 4 |
+
|
| 5 |
+
import jaraco.path
|
| 6 |
+
import pytest
|
| 7 |
+
import tomli_w
|
| 8 |
+
from path import Path
|
| 9 |
+
|
| 10 |
+
import setuptools # noqa: F401 # force distutils.core to be patched
|
| 11 |
+
from setuptools.config.pyprojecttoml import (
|
| 12 |
+
_ToolsTypoInMetadata,
|
| 13 |
+
apply_configuration,
|
| 14 |
+
expand_configuration,
|
| 15 |
+
read_configuration,
|
| 16 |
+
validate,
|
| 17 |
+
)
|
| 18 |
+
from setuptools.dist import Distribution
|
| 19 |
+
from setuptools.errors import OptionError
|
| 20 |
+
|
| 21 |
+
import distutils.core
|
| 22 |
+
|
| 23 |
+
EXAMPLE = """
|
| 24 |
+
[project]
|
| 25 |
+
name = "myproj"
|
| 26 |
+
keywords = ["some", "key", "words"]
|
| 27 |
+
dynamic = ["version", "readme"]
|
| 28 |
+
requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
| 29 |
+
dependencies = [
|
| 30 |
+
'importlib-metadata>=0.12;python_version<"3.8"',
|
| 31 |
+
'importlib-resources>=1.0;python_version<"3.7"',
|
| 32 |
+
'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
|
| 33 |
+
]
|
| 34 |
+
|
| 35 |
+
[project.optional-dependencies]
|
| 36 |
+
docs = [
|
| 37 |
+
"sphinx>=3",
|
| 38 |
+
"sphinx-argparse>=0.2.5",
|
| 39 |
+
"sphinx-rtd-theme>=0.4.3",
|
| 40 |
+
]
|
| 41 |
+
testing = [
|
| 42 |
+
"pytest>=1",
|
| 43 |
+
"coverage>=3,<5",
|
| 44 |
+
]
|
| 45 |
+
|
| 46 |
+
[project.scripts]
|
| 47 |
+
exec = "pkg.__main__:exec"
|
| 48 |
+
|
| 49 |
+
[build-system]
|
| 50 |
+
requires = ["setuptools", "wheel"]
|
| 51 |
+
build-backend = "setuptools.build_meta"
|
| 52 |
+
|
| 53 |
+
[tool.setuptools]
|
| 54 |
+
package-dir = {"" = "src"}
|
| 55 |
+
zip-safe = true
|
| 56 |
+
platforms = ["any"]
|
| 57 |
+
|
| 58 |
+
[tool.setuptools.packages.find]
|
| 59 |
+
where = ["src"]
|
| 60 |
+
|
| 61 |
+
[tool.setuptools.cmdclass]
|
| 62 |
+
sdist = "pkg.mod.CustomSdist"
|
| 63 |
+
|
| 64 |
+
[tool.setuptools.dynamic.version]
|
| 65 |
+
attr = "pkg.__version__.VERSION"
|
| 66 |
+
|
| 67 |
+
[tool.setuptools.dynamic.readme]
|
| 68 |
+
file = ["README.md"]
|
| 69 |
+
content-type = "text/markdown"
|
| 70 |
+
|
| 71 |
+
[tool.setuptools.package-data]
|
| 72 |
+
"*" = ["*.txt"]
|
| 73 |
+
|
| 74 |
+
[tool.setuptools.data-files]
|
| 75 |
+
"data" = ["_files/*.txt"]
|
| 76 |
+
|
| 77 |
+
[tool.distutils.sdist]
|
| 78 |
+
formats = "gztar"
|
| 79 |
+
|
| 80 |
+
[tool.distutils.bdist_wheel]
|
| 81 |
+
universal = true
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def create_example(path, pkg_root):
|
| 86 |
+
files = {
|
| 87 |
+
"pyproject.toml": EXAMPLE,
|
| 88 |
+
"README.md": "hello world",
|
| 89 |
+
"_files": {
|
| 90 |
+
"file.txt": "",
|
| 91 |
+
},
|
| 92 |
+
}
|
| 93 |
+
packages = {
|
| 94 |
+
"pkg": {
|
| 95 |
+
"__init__.py": "",
|
| 96 |
+
"mod.py": "class CustomSdist: pass",
|
| 97 |
+
"__version__.py": "VERSION = (3, 10)",
|
| 98 |
+
"__main__.py": "def exec(): print('hello')",
|
| 99 |
+
},
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
assert pkg_root # Meta-test: cannot be empty string.
|
| 103 |
+
|
| 104 |
+
if pkg_root == ".":
|
| 105 |
+
files = {**files, **packages}
|
| 106 |
+
# skip other files: flat-layout will raise error for multi-package dist
|
| 107 |
+
else:
|
| 108 |
+
# Use this opportunity to ensure namespaces are discovered
|
| 109 |
+
files[pkg_root] = {**packages, "other": {"nested": {"__init__.py": ""}}}
|
| 110 |
+
|
| 111 |
+
jaraco.path.build(files, prefix=path)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def verify_example(config, path, pkg_root):
|
| 115 |
+
pyproject = path / "pyproject.toml"
|
| 116 |
+
pyproject.write_text(tomli_w.dumps(config), encoding="utf-8")
|
| 117 |
+
expanded = expand_configuration(config, path)
|
| 118 |
+
expanded_project = expanded["project"]
|
| 119 |
+
assert read_configuration(pyproject, expand=True) == expanded
|
| 120 |
+
assert expanded_project["version"] == "3.10"
|
| 121 |
+
assert expanded_project["readme"]["text"] == "hello world"
|
| 122 |
+
assert "packages" in expanded["tool"]["setuptools"]
|
| 123 |
+
if pkg_root == ".":
|
| 124 |
+
# Auto-discovery will raise error for multi-package dist
|
| 125 |
+
assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"}
|
| 126 |
+
else:
|
| 127 |
+
assert set(expanded["tool"]["setuptools"]["packages"]) == {
|
| 128 |
+
"pkg",
|
| 129 |
+
"other",
|
| 130 |
+
"other.nested",
|
| 131 |
+
}
|
| 132 |
+
assert expanded["tool"]["setuptools"]["include-package-data"] is True
|
| 133 |
+
assert "" in expanded["tool"]["setuptools"]["package-data"]
|
| 134 |
+
assert "*" not in expanded["tool"]["setuptools"]["package-data"]
|
| 135 |
+
assert expanded["tool"]["setuptools"]["data-files"] == [
|
| 136 |
+
("data", ["_files/file.txt"])
|
| 137 |
+
]
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def test_read_configuration(tmp_path):
|
| 141 |
+
create_example(tmp_path, "src")
|
| 142 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 143 |
+
|
| 144 |
+
config = read_configuration(pyproject, expand=False)
|
| 145 |
+
assert config["project"].get("version") is None
|
| 146 |
+
assert config["project"].get("readme") is None
|
| 147 |
+
|
| 148 |
+
verify_example(config, tmp_path, "src")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
@pytest.mark.parametrize(
|
| 152 |
+
("pkg_root", "opts"),
|
| 153 |
+
[
|
| 154 |
+
(".", {}),
|
| 155 |
+
("src", {}),
|
| 156 |
+
("lib", {"packages": {"find": {"where": ["lib"]}}}),
|
| 157 |
+
],
|
| 158 |
+
)
|
| 159 |
+
def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts):
|
| 160 |
+
create_example(tmp_path, pkg_root)
|
| 161 |
+
|
| 162 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 163 |
+
|
| 164 |
+
config = read_configuration(pyproject, expand=False)
|
| 165 |
+
assert config["project"].get("version") is None
|
| 166 |
+
assert config["project"].get("readme") is None
|
| 167 |
+
config["tool"]["setuptools"].pop("packages", None)
|
| 168 |
+
config["tool"]["setuptools"].pop("package-dir", None)
|
| 169 |
+
|
| 170 |
+
config["tool"]["setuptools"].update(opts)
|
| 171 |
+
verify_example(config, tmp_path, pkg_root)
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
ENTRY_POINTS = {
|
| 175 |
+
"console_scripts": {"a": "mod.a:func"},
|
| 176 |
+
"gui_scripts": {"b": "mod.b:func"},
|
| 177 |
+
"other": {"c": "mod.c:func [extra]"},
|
| 178 |
+
}
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
class TestEntryPoints:
|
| 182 |
+
def write_entry_points(self, tmp_path):
|
| 183 |
+
entry_points = ConfigParser()
|
| 184 |
+
entry_points.read_dict(ENTRY_POINTS)
|
| 185 |
+
with open(tmp_path / "entry-points.txt", "w", encoding="utf-8") as f:
|
| 186 |
+
entry_points.write(f)
|
| 187 |
+
|
| 188 |
+
def pyproject(self, dynamic=None):
|
| 189 |
+
project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]}
|
| 190 |
+
tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}
|
| 191 |
+
return {"project": project, "tool": {"setuptools": tool}}
|
| 192 |
+
|
| 193 |
+
def test_all_listed_in_dynamic(self, tmp_path):
|
| 194 |
+
self.write_entry_points(tmp_path)
|
| 195 |
+
expanded = expand_configuration(self.pyproject(), tmp_path)
|
| 196 |
+
expanded_project = expanded["project"]
|
| 197 |
+
assert len(expanded_project["scripts"]) == 1
|
| 198 |
+
assert expanded_project["scripts"]["a"] == "mod.a:func"
|
| 199 |
+
assert len(expanded_project["gui-scripts"]) == 1
|
| 200 |
+
assert expanded_project["gui-scripts"]["b"] == "mod.b:func"
|
| 201 |
+
assert len(expanded_project["entry-points"]) == 1
|
| 202 |
+
assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]"
|
| 203 |
+
|
| 204 |
+
@pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts"))
|
| 205 |
+
def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic):
|
| 206 |
+
self.write_entry_points(tmp_path)
|
| 207 |
+
dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic}
|
| 208 |
+
|
| 209 |
+
msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}"
|
| 210 |
+
with pytest.raises(OptionError, match=re.compile(msg, re.S)):
|
| 211 |
+
expand_configuration(self.pyproject(dynamic), tmp_path)
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
class TestClassifiers:
|
| 215 |
+
def test_dynamic(self, tmp_path):
|
| 216 |
+
# Let's create a project example that has dynamic classifiers
|
| 217 |
+
# coming from a txt file.
|
| 218 |
+
create_example(tmp_path, "src")
|
| 219 |
+
classifiers = cleandoc(
|
| 220 |
+
"""
|
| 221 |
+
Framework :: Flask
|
| 222 |
+
Programming Language :: Haskell
|
| 223 |
+
"""
|
| 224 |
+
)
|
| 225 |
+
(tmp_path / "classifiers.txt").write_text(classifiers, encoding="utf-8")
|
| 226 |
+
|
| 227 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 228 |
+
config = read_configuration(pyproject, expand=False)
|
| 229 |
+
dynamic = config["project"]["dynamic"]
|
| 230 |
+
config["project"]["dynamic"] = list({*dynamic, "classifiers"})
|
| 231 |
+
dynamic_config = config["tool"]["setuptools"]["dynamic"]
|
| 232 |
+
dynamic_config["classifiers"] = {"file": "classifiers.txt"}
|
| 233 |
+
|
| 234 |
+
# When the configuration is expanded,
|
| 235 |
+
# each line of the file should be an different classifier.
|
| 236 |
+
validate(config, pyproject)
|
| 237 |
+
expanded = expand_configuration(config, tmp_path)
|
| 238 |
+
|
| 239 |
+
assert set(expanded["project"]["classifiers"]) == {
|
| 240 |
+
"Framework :: Flask",
|
| 241 |
+
"Programming Language :: Haskell",
|
| 242 |
+
}
|
| 243 |
+
|
| 244 |
+
def test_dynamic_without_config(self, tmp_path):
|
| 245 |
+
config = """
|
| 246 |
+
[project]
|
| 247 |
+
name = "myproj"
|
| 248 |
+
version = '42'
|
| 249 |
+
dynamic = ["classifiers"]
|
| 250 |
+
"""
|
| 251 |
+
|
| 252 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 253 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 254 |
+
with pytest.raises(OptionError, match="No configuration .* .classifiers."):
|
| 255 |
+
read_configuration(pyproject)
|
| 256 |
+
|
| 257 |
+
def test_dynamic_readme_from_setup_script_args(self, tmp_path):
|
| 258 |
+
config = """
|
| 259 |
+
[project]
|
| 260 |
+
name = "myproj"
|
| 261 |
+
version = '42'
|
| 262 |
+
dynamic = ["readme"]
|
| 263 |
+
"""
|
| 264 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 265 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 266 |
+
dist = Distribution(attrs={"long_description": "42"})
|
| 267 |
+
# No error should occur because of missing `readme`
|
| 268 |
+
dist = apply_configuration(dist, pyproject)
|
| 269 |
+
assert dist.metadata.long_description == "42"
|
| 270 |
+
|
| 271 |
+
def test_dynamic_without_file(self, tmp_path):
|
| 272 |
+
config = """
|
| 273 |
+
[project]
|
| 274 |
+
name = "myproj"
|
| 275 |
+
version = '42'
|
| 276 |
+
dynamic = ["classifiers"]
|
| 277 |
+
|
| 278 |
+
[tool.setuptools.dynamic]
|
| 279 |
+
classifiers = {file = ["classifiers.txt"]}
|
| 280 |
+
"""
|
| 281 |
+
|
| 282 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 283 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 284 |
+
with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"):
|
| 285 |
+
expanded = read_configuration(pyproject)
|
| 286 |
+
assert "classifiers" not in expanded["project"]
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
@pytest.mark.parametrize(
|
| 290 |
+
"example",
|
| 291 |
+
(
|
| 292 |
+
"""
|
| 293 |
+
[project]
|
| 294 |
+
name = "myproj"
|
| 295 |
+
version = "1.2"
|
| 296 |
+
|
| 297 |
+
[my-tool.that-disrespect.pep518]
|
| 298 |
+
value = 42
|
| 299 |
+
""",
|
| 300 |
+
),
|
| 301 |
+
)
|
| 302 |
+
def test_ignore_unrelated_config(tmp_path, example):
|
| 303 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 304 |
+
pyproject.write_text(cleandoc(example), encoding="utf-8")
|
| 305 |
+
|
| 306 |
+
# Make sure no error is raised due to 3rd party configs in pyproject.toml
|
| 307 |
+
assert read_configuration(pyproject) is not None
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@pytest.mark.parametrize(
|
| 311 |
+
("example", "error_msg"),
|
| 312 |
+
[
|
| 313 |
+
(
|
| 314 |
+
"""
|
| 315 |
+
[project]
|
| 316 |
+
name = "myproj"
|
| 317 |
+
version = "1.2"
|
| 318 |
+
requires = ['pywin32; platform_system=="Windows"' ]
|
| 319 |
+
""",
|
| 320 |
+
"configuration error: .project. must not contain ..requires.. properties",
|
| 321 |
+
),
|
| 322 |
+
],
|
| 323 |
+
)
|
| 324 |
+
def test_invalid_example(tmp_path, example, error_msg):
|
| 325 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 326 |
+
pyproject.write_text(cleandoc(example), encoding="utf-8")
|
| 327 |
+
|
| 328 |
+
pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S)
|
| 329 |
+
with pytest.raises(ValueError, match=pattern):
|
| 330 |
+
read_configuration(pyproject)
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42"))
|
| 334 |
+
def test_empty(tmp_path, config):
|
| 335 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 336 |
+
pyproject.write_text(config, encoding="utf-8")
|
| 337 |
+
|
| 338 |
+
# Make sure no error is raised
|
| 339 |
+
assert read_configuration(pyproject) == {}
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",))
|
| 343 |
+
def test_include_package_data_by_default(tmp_path, config):
|
| 344 |
+
"""Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as
|
| 345 |
+
default.
|
| 346 |
+
"""
|
| 347 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 348 |
+
pyproject.write_text(config, encoding="utf-8")
|
| 349 |
+
|
| 350 |
+
config = read_configuration(pyproject)
|
| 351 |
+
assert config["tool"]["setuptools"]["include-package-data"] is True
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
def test_include_package_data_in_setuppy(tmp_path):
|
| 355 |
+
"""Builds with ``pyproject.toml`` should consider ``include_package_data`` set in
|
| 356 |
+
``setup.py``.
|
| 357 |
+
|
| 358 |
+
See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889
|
| 359 |
+
"""
|
| 360 |
+
files = {
|
| 361 |
+
"pyproject.toml": "[project]\nname = 'myproj'\nversion='42'\n",
|
| 362 |
+
"setup.py": "__import__('setuptools').setup(include_package_data=False)",
|
| 363 |
+
}
|
| 364 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 365 |
+
|
| 366 |
+
with Path(tmp_path):
|
| 367 |
+
dist = distutils.core.run_setup("setup.py", {}, stop_after="config")
|
| 368 |
+
|
| 369 |
+
assert dist.get_name() == "myproj"
|
| 370 |
+
assert dist.get_version() == "42"
|
| 371 |
+
assert dist.include_package_data is False
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def test_warn_tools_typo(tmp_path):
|
| 375 |
+
"""Test that the common ``tools.setuptools`` typo in ``pyproject.toml`` issues a warning
|
| 376 |
+
|
| 377 |
+
See https://github.com/pypa/setuptools/issues/4150
|
| 378 |
+
"""
|
| 379 |
+
config = """
|
| 380 |
+
[build-system]
|
| 381 |
+
requires = ["setuptools"]
|
| 382 |
+
build-backend = "setuptools.build_meta"
|
| 383 |
+
|
| 384 |
+
[project]
|
| 385 |
+
name = "myproj"
|
| 386 |
+
version = '42'
|
| 387 |
+
|
| 388 |
+
[tools.setuptools]
|
| 389 |
+
packages = ["package"]
|
| 390 |
+
"""
|
| 391 |
+
|
| 392 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 393 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 394 |
+
|
| 395 |
+
with pytest.warns(_ToolsTypoInMetadata):
|
| 396 |
+
read_configuration(pyproject)
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from inspect import cleandoc
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
from jaraco import path
|
| 5 |
+
|
| 6 |
+
from setuptools.config.pyprojecttoml import apply_configuration
|
| 7 |
+
from setuptools.dist import Distribution
|
| 8 |
+
from setuptools.warnings import SetuptoolsWarning
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def test_dynamic_dependencies(tmp_path):
|
| 12 |
+
files = {
|
| 13 |
+
"requirements.txt": "six\n # comment\n",
|
| 14 |
+
"pyproject.toml": cleandoc(
|
| 15 |
+
"""
|
| 16 |
+
[project]
|
| 17 |
+
name = "myproj"
|
| 18 |
+
version = "1.0"
|
| 19 |
+
dynamic = ["dependencies"]
|
| 20 |
+
|
| 21 |
+
[build-system]
|
| 22 |
+
requires = ["setuptools", "wheel"]
|
| 23 |
+
build-backend = "setuptools.build_meta"
|
| 24 |
+
|
| 25 |
+
[tool.setuptools.dynamic.dependencies]
|
| 26 |
+
file = ["requirements.txt"]
|
| 27 |
+
"""
|
| 28 |
+
),
|
| 29 |
+
}
|
| 30 |
+
path.build(files, prefix=tmp_path)
|
| 31 |
+
dist = Distribution()
|
| 32 |
+
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
|
| 33 |
+
assert dist.install_requires == ["six"]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def test_dynamic_optional_dependencies(tmp_path):
|
| 37 |
+
files = {
|
| 38 |
+
"requirements-docs.txt": "sphinx\n # comment\n",
|
| 39 |
+
"pyproject.toml": cleandoc(
|
| 40 |
+
"""
|
| 41 |
+
[project]
|
| 42 |
+
name = "myproj"
|
| 43 |
+
version = "1.0"
|
| 44 |
+
dynamic = ["optional-dependencies"]
|
| 45 |
+
|
| 46 |
+
[tool.setuptools.dynamic.optional-dependencies.docs]
|
| 47 |
+
file = ["requirements-docs.txt"]
|
| 48 |
+
|
| 49 |
+
[build-system]
|
| 50 |
+
requires = ["setuptools", "wheel"]
|
| 51 |
+
build-backend = "setuptools.build_meta"
|
| 52 |
+
"""
|
| 53 |
+
),
|
| 54 |
+
}
|
| 55 |
+
path.build(files, prefix=tmp_path)
|
| 56 |
+
dist = Distribution()
|
| 57 |
+
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
|
| 58 |
+
assert dist.extras_require == {"docs": ["sphinx"]}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def test_mixed_dynamic_optional_dependencies(tmp_path):
|
| 62 |
+
"""
|
| 63 |
+
Test that if PEP 621 was loosened to allow mixing of dynamic and static
|
| 64 |
+
configurations in the case of fields containing sub-fields (groups),
|
| 65 |
+
things would work out.
|
| 66 |
+
"""
|
| 67 |
+
files = {
|
| 68 |
+
"requirements-images.txt": "pillow~=42.0\n # comment\n",
|
| 69 |
+
"pyproject.toml": cleandoc(
|
| 70 |
+
"""
|
| 71 |
+
[project]
|
| 72 |
+
name = "myproj"
|
| 73 |
+
version = "1.0"
|
| 74 |
+
dynamic = ["optional-dependencies"]
|
| 75 |
+
|
| 76 |
+
[project.optional-dependencies]
|
| 77 |
+
docs = ["sphinx"]
|
| 78 |
+
|
| 79 |
+
[tool.setuptools.dynamic.optional-dependencies.images]
|
| 80 |
+
file = ["requirements-images.txt"]
|
| 81 |
+
"""
|
| 82 |
+
),
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
path.build(files, prefix=tmp_path)
|
| 86 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 87 |
+
with pytest.raises(ValueError, match="project.optional-dependencies"):
|
| 88 |
+
apply_configuration(Distribution(), pyproject)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def test_mixed_extras_require_optional_dependencies(tmp_path):
|
| 92 |
+
files = {
|
| 93 |
+
"pyproject.toml": cleandoc(
|
| 94 |
+
"""
|
| 95 |
+
[project]
|
| 96 |
+
name = "myproj"
|
| 97 |
+
version = "1.0"
|
| 98 |
+
optional-dependencies.docs = ["sphinx"]
|
| 99 |
+
"""
|
| 100 |
+
),
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
path.build(files, prefix=tmp_path)
|
| 104 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 105 |
+
|
| 106 |
+
with pytest.warns(SetuptoolsWarning, match=".extras_require. overwritten"):
|
| 107 |
+
dist = Distribution({"extras_require": {"hello": ["world"]}})
|
| 108 |
+
dist = apply_configuration(dist, pyproject)
|
| 109 |
+
assert dist.extras_require == {"docs": ["sphinx"]}
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/config/test_setupcfg.py
ADDED
|
@@ -0,0 +1,965 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import configparser
|
| 2 |
+
import contextlib
|
| 3 |
+
import inspect
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from unittest.mock import Mock, patch
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
from packaging.requirements import InvalidRequirement
|
| 9 |
+
|
| 10 |
+
from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration
|
| 11 |
+
from setuptools.dist import Distribution, _Distribution
|
| 12 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning
|
| 13 |
+
|
| 14 |
+
from ..textwrap import DALS
|
| 15 |
+
|
| 16 |
+
from distutils.errors import DistutilsFileError, DistutilsOptionError
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class ErrConfigHandler(ConfigHandler[Target]):
|
| 20 |
+
"""Erroneous handler. Fails to implement required methods."""
|
| 21 |
+
|
| 22 |
+
section_prefix = "**err**"
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def make_package_dir(name, base_dir, ns=False):
|
| 26 |
+
dir_package = base_dir
|
| 27 |
+
for dir_name in name.split('/'):
|
| 28 |
+
dir_package = dir_package.mkdir(dir_name)
|
| 29 |
+
init_file = None
|
| 30 |
+
if not ns:
|
| 31 |
+
init_file = dir_package.join('__init__.py')
|
| 32 |
+
init_file.write('')
|
| 33 |
+
return dir_package, init_file
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def fake_env(
|
| 37 |
+
tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package'
|
| 38 |
+
):
|
| 39 |
+
if setup_py is None:
|
| 40 |
+
setup_py = 'from setuptools import setup\nsetup()\n'
|
| 41 |
+
|
| 42 |
+
tmpdir.join('setup.py').write(setup_py)
|
| 43 |
+
config = tmpdir.join('setup.cfg')
|
| 44 |
+
config.write(setup_cfg.encode(encoding), mode='wb')
|
| 45 |
+
|
| 46 |
+
package_dir, init_file = make_package_dir(package_path, tmpdir)
|
| 47 |
+
|
| 48 |
+
init_file.write(
|
| 49 |
+
'VERSION = (1, 2, 3)\n'
|
| 50 |
+
'\n'
|
| 51 |
+
'VERSION_MAJOR = 1'
|
| 52 |
+
'\n'
|
| 53 |
+
'def get_version():\n'
|
| 54 |
+
' return [3, 4, 5, "dev"]\n'
|
| 55 |
+
'\n'
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
return package_dir, config
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@contextlib.contextmanager
|
| 62 |
+
def get_dist(tmpdir, kwargs_initial=None, parse=True):
|
| 63 |
+
kwargs_initial = kwargs_initial or {}
|
| 64 |
+
|
| 65 |
+
with tmpdir.as_cwd():
|
| 66 |
+
dist = Distribution(kwargs_initial)
|
| 67 |
+
dist.script_name = 'setup.py'
|
| 68 |
+
parse and dist.parse_config_files()
|
| 69 |
+
|
| 70 |
+
yield dist
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def test_parsers_implemented():
|
| 74 |
+
with pytest.raises(NotImplementedError):
|
| 75 |
+
handler = ErrConfigHandler(None, {}, False, Mock())
|
| 76 |
+
handler.parsers
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class TestConfigurationReader:
|
| 80 |
+
def test_basic(self, tmpdir):
|
| 81 |
+
_, config = fake_env(
|
| 82 |
+
tmpdir,
|
| 83 |
+
'[metadata]\n'
|
| 84 |
+
'version = 10.1.1\n'
|
| 85 |
+
'keywords = one, two\n'
|
| 86 |
+
'\n'
|
| 87 |
+
'[options]\n'
|
| 88 |
+
'scripts = bin/a.py, bin/b.py\n',
|
| 89 |
+
)
|
| 90 |
+
config_dict = read_configuration(str(config))
|
| 91 |
+
assert config_dict['metadata']['version'] == '10.1.1'
|
| 92 |
+
assert config_dict['metadata']['keywords'] == ['one', 'two']
|
| 93 |
+
assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
|
| 94 |
+
|
| 95 |
+
def test_no_config(self, tmpdir):
|
| 96 |
+
with pytest.raises(DistutilsFileError):
|
| 97 |
+
read_configuration(str(tmpdir.join('setup.cfg')))
|
| 98 |
+
|
| 99 |
+
def test_ignore_errors(self, tmpdir):
|
| 100 |
+
_, config = fake_env(
|
| 101 |
+
tmpdir,
|
| 102 |
+
'[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
|
| 103 |
+
)
|
| 104 |
+
with pytest.raises(ImportError):
|
| 105 |
+
read_configuration(str(config))
|
| 106 |
+
|
| 107 |
+
config_dict = read_configuration(str(config), ignore_option_errors=True)
|
| 108 |
+
|
| 109 |
+
assert config_dict['metadata']['keywords'] == ['one', 'two']
|
| 110 |
+
assert 'version' not in config_dict['metadata']
|
| 111 |
+
|
| 112 |
+
config.remove()
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class TestMetadata:
|
| 116 |
+
def test_basic(self, tmpdir):
|
| 117 |
+
fake_env(
|
| 118 |
+
tmpdir,
|
| 119 |
+
'[metadata]\n'
|
| 120 |
+
'version = 10.1.1\n'
|
| 121 |
+
'description = Some description\n'
|
| 122 |
+
'long_description_content_type = text/something\n'
|
| 123 |
+
'long_description = file: README\n'
|
| 124 |
+
'name = fake_name\n'
|
| 125 |
+
'keywords = one, two\n'
|
| 126 |
+
'provides = package, package.sub\n'
|
| 127 |
+
'license = otherlic\n'
|
| 128 |
+
'download_url = http://test.test.com/test/\n'
|
| 129 |
+
'maintainer_email = test@test.com\n',
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
tmpdir.join('README').write('readme contents\nline2')
|
| 133 |
+
|
| 134 |
+
meta_initial = {
|
| 135 |
+
# This will be used so `otherlic` won't replace it.
|
| 136 |
+
'license': 'BSD 3-Clause License',
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
with get_dist(tmpdir, meta_initial) as dist:
|
| 140 |
+
metadata = dist.metadata
|
| 141 |
+
|
| 142 |
+
assert metadata.version == '10.1.1'
|
| 143 |
+
assert metadata.description == 'Some description'
|
| 144 |
+
assert metadata.long_description_content_type == 'text/something'
|
| 145 |
+
assert metadata.long_description == 'readme contents\nline2'
|
| 146 |
+
assert metadata.provides == ['package', 'package.sub']
|
| 147 |
+
assert metadata.license == 'BSD 3-Clause License'
|
| 148 |
+
assert metadata.name == 'fake_name'
|
| 149 |
+
assert metadata.keywords == ['one', 'two']
|
| 150 |
+
assert metadata.download_url == 'http://test.test.com/test/'
|
| 151 |
+
assert metadata.maintainer_email == 'test@test.com'
|
| 152 |
+
|
| 153 |
+
def test_license_cfg(self, tmpdir):
|
| 154 |
+
fake_env(
|
| 155 |
+
tmpdir,
|
| 156 |
+
DALS(
|
| 157 |
+
"""
|
| 158 |
+
[metadata]
|
| 159 |
+
name=foo
|
| 160 |
+
version=0.0.1
|
| 161 |
+
license=Apache 2.0
|
| 162 |
+
"""
|
| 163 |
+
),
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
with get_dist(tmpdir) as dist:
|
| 167 |
+
metadata = dist.metadata
|
| 168 |
+
|
| 169 |
+
assert metadata.name == "foo"
|
| 170 |
+
assert metadata.version == "0.0.1"
|
| 171 |
+
assert metadata.license == "Apache 2.0"
|
| 172 |
+
|
| 173 |
+
def test_file_mixed(self, tmpdir):
|
| 174 |
+
fake_env(
|
| 175 |
+
tmpdir,
|
| 176 |
+
'[metadata]\nlong_description = file: README.rst, CHANGES.rst\n\n',
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
tmpdir.join('README.rst').write('readme contents\nline2')
|
| 180 |
+
tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
|
| 181 |
+
|
| 182 |
+
with get_dist(tmpdir) as dist:
|
| 183 |
+
assert dist.metadata.long_description == (
|
| 184 |
+
'readme contents\nline2\nchangelog contents\nand stuff'
|
| 185 |
+
)
|
| 186 |
+
|
| 187 |
+
def test_file_sandboxed(self, tmpdir):
|
| 188 |
+
tmpdir.ensure("README")
|
| 189 |
+
project = tmpdir.join('depth1', 'depth2')
|
| 190 |
+
project.ensure(dir=True)
|
| 191 |
+
fake_env(project, '[metadata]\nlong_description = file: ../../README\n')
|
| 192 |
+
|
| 193 |
+
with get_dist(project, parse=False) as dist:
|
| 194 |
+
with pytest.raises(DistutilsOptionError):
|
| 195 |
+
dist.parse_config_files() # file: out of sandbox
|
| 196 |
+
|
| 197 |
+
def test_aliases(self, tmpdir):
|
| 198 |
+
fake_env(
|
| 199 |
+
tmpdir,
|
| 200 |
+
'[metadata]\n'
|
| 201 |
+
'author_email = test@test.com\n'
|
| 202 |
+
'home_page = http://test.test.com/test/\n'
|
| 203 |
+
'summary = Short summary\n'
|
| 204 |
+
'platform = a, b\n'
|
| 205 |
+
'classifier =\n'
|
| 206 |
+
' Framework :: Django\n'
|
| 207 |
+
' Programming Language :: Python :: 3.5\n',
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
with get_dist(tmpdir) as dist:
|
| 211 |
+
metadata = dist.metadata
|
| 212 |
+
assert metadata.author_email == 'test@test.com'
|
| 213 |
+
assert metadata.url == 'http://test.test.com/test/'
|
| 214 |
+
assert metadata.description == 'Short summary'
|
| 215 |
+
assert metadata.platforms == ['a', 'b']
|
| 216 |
+
assert metadata.classifiers == [
|
| 217 |
+
'Framework :: Django',
|
| 218 |
+
'Programming Language :: Python :: 3.5',
|
| 219 |
+
]
|
| 220 |
+
|
| 221 |
+
def test_multiline(self, tmpdir):
|
| 222 |
+
fake_env(
|
| 223 |
+
tmpdir,
|
| 224 |
+
'[metadata]\n'
|
| 225 |
+
'name = fake_name\n'
|
| 226 |
+
'keywords =\n'
|
| 227 |
+
' one\n'
|
| 228 |
+
' two\n'
|
| 229 |
+
'classifiers =\n'
|
| 230 |
+
' Framework :: Django\n'
|
| 231 |
+
' Programming Language :: Python :: 3.5\n',
|
| 232 |
+
)
|
| 233 |
+
with get_dist(tmpdir) as dist:
|
| 234 |
+
metadata = dist.metadata
|
| 235 |
+
assert metadata.keywords == ['one', 'two']
|
| 236 |
+
assert metadata.classifiers == [
|
| 237 |
+
'Framework :: Django',
|
| 238 |
+
'Programming Language :: Python :: 3.5',
|
| 239 |
+
]
|
| 240 |
+
|
| 241 |
+
def test_dict(self, tmpdir):
|
| 242 |
+
fake_env(
|
| 243 |
+
tmpdir,
|
| 244 |
+
'[metadata]\n'
|
| 245 |
+
'project_urls =\n'
|
| 246 |
+
' Link One = https://example.com/one/\n'
|
| 247 |
+
' Link Two = https://example.com/two/\n',
|
| 248 |
+
)
|
| 249 |
+
with get_dist(tmpdir) as dist:
|
| 250 |
+
metadata = dist.metadata
|
| 251 |
+
assert metadata.project_urls == {
|
| 252 |
+
'Link One': 'https://example.com/one/',
|
| 253 |
+
'Link Two': 'https://example.com/two/',
|
| 254 |
+
}
|
| 255 |
+
|
| 256 |
+
def test_version(self, tmpdir):
|
| 257 |
+
package_dir, config = fake_env(
|
| 258 |
+
tmpdir, '[metadata]\nversion = attr: fake_package.VERSION\n'
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
sub_a = package_dir.mkdir('subpkg_a')
|
| 262 |
+
sub_a.join('__init__.py').write('')
|
| 263 |
+
sub_a.join('mod.py').write('VERSION = (2016, 11, 26)')
|
| 264 |
+
|
| 265 |
+
sub_b = package_dir.mkdir('subpkg_b')
|
| 266 |
+
sub_b.join('__init__.py').write('')
|
| 267 |
+
sub_b.join('mod.py').write(
|
| 268 |
+
'import third_party_module\nVERSION = (2016, 11, 26)'
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
with get_dist(tmpdir) as dist:
|
| 272 |
+
assert dist.metadata.version == '1.2.3'
|
| 273 |
+
|
| 274 |
+
config.write('[metadata]\nversion = attr: fake_package.get_version\n')
|
| 275 |
+
with get_dist(tmpdir) as dist:
|
| 276 |
+
assert dist.metadata.version == '3.4.5.dev'
|
| 277 |
+
|
| 278 |
+
config.write('[metadata]\nversion = attr: fake_package.VERSION_MAJOR\n')
|
| 279 |
+
with get_dist(tmpdir) as dist:
|
| 280 |
+
assert dist.metadata.version == '1'
|
| 281 |
+
|
| 282 |
+
config.write('[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n')
|
| 283 |
+
with get_dist(tmpdir) as dist:
|
| 284 |
+
assert dist.metadata.version == '2016.11.26'
|
| 285 |
+
|
| 286 |
+
config.write('[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n')
|
| 287 |
+
with get_dist(tmpdir) as dist:
|
| 288 |
+
assert dist.metadata.version == '2016.11.26'
|
| 289 |
+
|
| 290 |
+
def test_version_file(self, tmpdir):
|
| 291 |
+
fake_env(tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n')
|
| 292 |
+
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
|
| 293 |
+
|
| 294 |
+
with get_dist(tmpdir) as dist:
|
| 295 |
+
assert dist.metadata.version == '1.2.3'
|
| 296 |
+
|
| 297 |
+
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n')
|
| 298 |
+
with pytest.raises(DistutilsOptionError):
|
| 299 |
+
with get_dist(tmpdir) as dist:
|
| 300 |
+
dist.metadata.version
|
| 301 |
+
|
| 302 |
+
def test_version_with_package_dir_simple(self, tmpdir):
|
| 303 |
+
fake_env(
|
| 304 |
+
tmpdir,
|
| 305 |
+
'[metadata]\n'
|
| 306 |
+
'version = attr: fake_package_simple.VERSION\n'
|
| 307 |
+
'[options]\n'
|
| 308 |
+
'package_dir =\n'
|
| 309 |
+
' = src\n',
|
| 310 |
+
package_path='src/fake_package_simple',
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
with get_dist(tmpdir) as dist:
|
| 314 |
+
assert dist.metadata.version == '1.2.3'
|
| 315 |
+
|
| 316 |
+
def test_version_with_package_dir_rename(self, tmpdir):
|
| 317 |
+
fake_env(
|
| 318 |
+
tmpdir,
|
| 319 |
+
'[metadata]\n'
|
| 320 |
+
'version = attr: fake_package_rename.VERSION\n'
|
| 321 |
+
'[options]\n'
|
| 322 |
+
'package_dir =\n'
|
| 323 |
+
' fake_package_rename = fake_dir\n',
|
| 324 |
+
package_path='fake_dir',
|
| 325 |
+
)
|
| 326 |
+
|
| 327 |
+
with get_dist(tmpdir) as dist:
|
| 328 |
+
assert dist.metadata.version == '1.2.3'
|
| 329 |
+
|
| 330 |
+
def test_version_with_package_dir_complex(self, tmpdir):
|
| 331 |
+
fake_env(
|
| 332 |
+
tmpdir,
|
| 333 |
+
'[metadata]\n'
|
| 334 |
+
'version = attr: fake_package_complex.VERSION\n'
|
| 335 |
+
'[options]\n'
|
| 336 |
+
'package_dir =\n'
|
| 337 |
+
' fake_package_complex = src/fake_dir\n',
|
| 338 |
+
package_path='src/fake_dir',
|
| 339 |
+
)
|
| 340 |
+
|
| 341 |
+
with get_dist(tmpdir) as dist:
|
| 342 |
+
assert dist.metadata.version == '1.2.3'
|
| 343 |
+
|
| 344 |
+
def test_unknown_meta_item(self, tmpdir):
|
| 345 |
+
fake_env(tmpdir, '[metadata]\nname = fake_name\nunknown = some\n')
|
| 346 |
+
with get_dist(tmpdir, parse=False) as dist:
|
| 347 |
+
dist.parse_config_files() # Skip unknown.
|
| 348 |
+
|
| 349 |
+
def test_usupported_section(self, tmpdir):
|
| 350 |
+
fake_env(tmpdir, '[metadata.some]\nkey = val\n')
|
| 351 |
+
with get_dist(tmpdir, parse=False) as dist:
|
| 352 |
+
with pytest.raises(DistutilsOptionError):
|
| 353 |
+
dist.parse_config_files()
|
| 354 |
+
|
| 355 |
+
def test_classifiers(self, tmpdir):
|
| 356 |
+
expected = set([
|
| 357 |
+
'Framework :: Django',
|
| 358 |
+
'Programming Language :: Python :: 3',
|
| 359 |
+
'Programming Language :: Python :: 3.5',
|
| 360 |
+
])
|
| 361 |
+
|
| 362 |
+
# From file.
|
| 363 |
+
_, config = fake_env(tmpdir, '[metadata]\nclassifiers = file: classifiers\n')
|
| 364 |
+
|
| 365 |
+
tmpdir.join('classifiers').write(
|
| 366 |
+
'Framework :: Django\n'
|
| 367 |
+
'Programming Language :: Python :: 3\n'
|
| 368 |
+
'Programming Language :: Python :: 3.5\n'
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
with get_dist(tmpdir) as dist:
|
| 372 |
+
assert set(dist.metadata.classifiers) == expected
|
| 373 |
+
|
| 374 |
+
# From list notation
|
| 375 |
+
config.write(
|
| 376 |
+
'[metadata]\n'
|
| 377 |
+
'classifiers =\n'
|
| 378 |
+
' Framework :: Django\n'
|
| 379 |
+
' Programming Language :: Python :: 3\n'
|
| 380 |
+
' Programming Language :: Python :: 3.5\n'
|
| 381 |
+
)
|
| 382 |
+
with get_dist(tmpdir) as dist:
|
| 383 |
+
assert set(dist.metadata.classifiers) == expected
|
| 384 |
+
|
| 385 |
+
def test_interpolation(self, tmpdir):
|
| 386 |
+
fake_env(tmpdir, '[metadata]\ndescription = %(message)s\n')
|
| 387 |
+
with pytest.raises(configparser.InterpolationMissingOptionError):
|
| 388 |
+
with get_dist(tmpdir):
|
| 389 |
+
pass
|
| 390 |
+
|
| 391 |
+
def test_non_ascii_1(self, tmpdir):
|
| 392 |
+
fake_env(tmpdir, '[metadata]\ndescription = éàïôñ\n', encoding='utf-8')
|
| 393 |
+
with get_dist(tmpdir):
|
| 394 |
+
pass
|
| 395 |
+
|
| 396 |
+
def test_non_ascii_3(self, tmpdir):
|
| 397 |
+
fake_env(tmpdir, '\n# -*- coding: invalid\n')
|
| 398 |
+
with get_dist(tmpdir):
|
| 399 |
+
pass
|
| 400 |
+
|
| 401 |
+
def test_non_ascii_4(self, tmpdir):
|
| 402 |
+
fake_env(
|
| 403 |
+
tmpdir,
|
| 404 |
+
'# -*- coding: utf-8\n[metadata]\ndescription = éàïôñ\n',
|
| 405 |
+
encoding='utf-8',
|
| 406 |
+
)
|
| 407 |
+
with get_dist(tmpdir) as dist:
|
| 408 |
+
assert dist.metadata.description == 'éàïôñ'
|
| 409 |
+
|
| 410 |
+
def test_not_utf8(self, tmpdir):
|
| 411 |
+
"""
|
| 412 |
+
Config files encoded not in UTF-8 will fail
|
| 413 |
+
"""
|
| 414 |
+
fake_env(
|
| 415 |
+
tmpdir,
|
| 416 |
+
'# vim: set fileencoding=iso-8859-15 :\n[metadata]\ndescription = éàïôñ\n',
|
| 417 |
+
encoding='iso-8859-15',
|
| 418 |
+
)
|
| 419 |
+
with pytest.raises(UnicodeDecodeError):
|
| 420 |
+
with get_dist(tmpdir):
|
| 421 |
+
pass
|
| 422 |
+
|
| 423 |
+
def test_warn_dash_deprecation(self, tmpdir):
|
| 424 |
+
# warn_dash_deprecation() is a method in setuptools.dist
|
| 425 |
+
# remove this test and the method when no longer needed
|
| 426 |
+
fake_env(
|
| 427 |
+
tmpdir,
|
| 428 |
+
'[metadata]\n'
|
| 429 |
+
'author-email = test@test.com\n'
|
| 430 |
+
'maintainer_email = foo@foo.com\n',
|
| 431 |
+
)
|
| 432 |
+
msg = "Usage of dash-separated 'author-email' will not be supported"
|
| 433 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 434 |
+
with get_dist(tmpdir) as dist:
|
| 435 |
+
metadata = dist.metadata
|
| 436 |
+
|
| 437 |
+
assert metadata.author_email == 'test@test.com'
|
| 438 |
+
assert metadata.maintainer_email == 'foo@foo.com'
|
| 439 |
+
|
| 440 |
+
def test_make_option_lowercase(self, tmpdir):
|
| 441 |
+
# remove this test and the method make_option_lowercase() in setuptools.dist
|
| 442 |
+
# when no longer needed
|
| 443 |
+
fake_env(tmpdir, '[metadata]\nName = foo\ndescription = Some description\n')
|
| 444 |
+
msg = "Usage of uppercase key 'Name' in 'metadata' will not be supported"
|
| 445 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 446 |
+
with get_dist(tmpdir) as dist:
|
| 447 |
+
metadata = dist.metadata
|
| 448 |
+
|
| 449 |
+
assert metadata.name == 'foo'
|
| 450 |
+
assert metadata.description == 'Some description'
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
class TestOptions:
|
| 454 |
+
def test_basic(self, tmpdir):
|
| 455 |
+
fake_env(
|
| 456 |
+
tmpdir,
|
| 457 |
+
'[options]\n'
|
| 458 |
+
'zip_safe = True\n'
|
| 459 |
+
'include_package_data = yes\n'
|
| 460 |
+
'package_dir = b=c, =src\n'
|
| 461 |
+
'packages = pack_a, pack_b.subpack\n'
|
| 462 |
+
'namespace_packages = pack1, pack2\n'
|
| 463 |
+
'scripts = bin/one.py, bin/two.py\n'
|
| 464 |
+
'eager_resources = bin/one.py, bin/two.py\n'
|
| 465 |
+
'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
|
| 466 |
+
'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
|
| 467 |
+
'dependency_links = http://some.com/here/1, '
|
| 468 |
+
'http://some.com/there/2\n'
|
| 469 |
+
'python_requires = >=1.0, !=2.8\n'
|
| 470 |
+
'py_modules = module1, module2\n',
|
| 471 |
+
)
|
| 472 |
+
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
|
| 473 |
+
with deprec, get_dist(tmpdir) as dist:
|
| 474 |
+
assert dist.zip_safe
|
| 475 |
+
assert dist.include_package_data
|
| 476 |
+
assert dist.package_dir == {'': 'src', 'b': 'c'}
|
| 477 |
+
assert dist.packages == ['pack_a', 'pack_b.subpack']
|
| 478 |
+
assert dist.namespace_packages == ['pack1', 'pack2']
|
| 479 |
+
assert dist.scripts == ['bin/one.py', 'bin/two.py']
|
| 480 |
+
assert dist.dependency_links == ([
|
| 481 |
+
'http://some.com/here/1',
|
| 482 |
+
'http://some.com/there/2',
|
| 483 |
+
])
|
| 484 |
+
assert dist.install_requires == ([
|
| 485 |
+
'docutils>=0.3',
|
| 486 |
+
'pack==1.1,==1.3',
|
| 487 |
+
'hey',
|
| 488 |
+
])
|
| 489 |
+
assert dist.setup_requires == ([
|
| 490 |
+
'docutils>=0.3',
|
| 491 |
+
'spack ==1.1, ==1.3',
|
| 492 |
+
'there',
|
| 493 |
+
])
|
| 494 |
+
assert dist.python_requires == '>=1.0, !=2.8'
|
| 495 |
+
assert dist.py_modules == ['module1', 'module2']
|
| 496 |
+
|
| 497 |
+
def test_multiline(self, tmpdir):
|
| 498 |
+
fake_env(
|
| 499 |
+
tmpdir,
|
| 500 |
+
'[options]\n'
|
| 501 |
+
'package_dir = \n'
|
| 502 |
+
' b=c\n'
|
| 503 |
+
' =src\n'
|
| 504 |
+
'packages = \n'
|
| 505 |
+
' pack_a\n'
|
| 506 |
+
' pack_b.subpack\n'
|
| 507 |
+
'namespace_packages = \n'
|
| 508 |
+
' pack1\n'
|
| 509 |
+
' pack2\n'
|
| 510 |
+
'scripts = \n'
|
| 511 |
+
' bin/one.py\n'
|
| 512 |
+
' bin/two.py\n'
|
| 513 |
+
'eager_resources = \n'
|
| 514 |
+
' bin/one.py\n'
|
| 515 |
+
' bin/two.py\n'
|
| 516 |
+
'install_requires = \n'
|
| 517 |
+
' docutils>=0.3\n'
|
| 518 |
+
' pack ==1.1, ==1.3\n'
|
| 519 |
+
' hey\n'
|
| 520 |
+
'setup_requires = \n'
|
| 521 |
+
' docutils>=0.3\n'
|
| 522 |
+
' spack ==1.1, ==1.3\n'
|
| 523 |
+
' there\n'
|
| 524 |
+
'dependency_links = \n'
|
| 525 |
+
' http://some.com/here/1\n'
|
| 526 |
+
' http://some.com/there/2\n',
|
| 527 |
+
)
|
| 528 |
+
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
|
| 529 |
+
with deprec, get_dist(tmpdir) as dist:
|
| 530 |
+
assert dist.package_dir == {'': 'src', 'b': 'c'}
|
| 531 |
+
assert dist.packages == ['pack_a', 'pack_b.subpack']
|
| 532 |
+
assert dist.namespace_packages == ['pack1', 'pack2']
|
| 533 |
+
assert dist.scripts == ['bin/one.py', 'bin/two.py']
|
| 534 |
+
assert dist.dependency_links == ([
|
| 535 |
+
'http://some.com/here/1',
|
| 536 |
+
'http://some.com/there/2',
|
| 537 |
+
])
|
| 538 |
+
assert dist.install_requires == ([
|
| 539 |
+
'docutils>=0.3',
|
| 540 |
+
'pack==1.1,==1.3',
|
| 541 |
+
'hey',
|
| 542 |
+
])
|
| 543 |
+
assert dist.setup_requires == ([
|
| 544 |
+
'docutils>=0.3',
|
| 545 |
+
'spack ==1.1, ==1.3',
|
| 546 |
+
'there',
|
| 547 |
+
])
|
| 548 |
+
|
| 549 |
+
def test_package_dir_fail(self, tmpdir):
|
| 550 |
+
fake_env(tmpdir, '[options]\npackage_dir = a b\n')
|
| 551 |
+
with get_dist(tmpdir, parse=False) as dist:
|
| 552 |
+
with pytest.raises(DistutilsOptionError):
|
| 553 |
+
dist.parse_config_files()
|
| 554 |
+
|
| 555 |
+
def test_package_data(self, tmpdir):
|
| 556 |
+
fake_env(
|
| 557 |
+
tmpdir,
|
| 558 |
+
'[options.package_data]\n'
|
| 559 |
+
'* = *.txt, *.rst\n'
|
| 560 |
+
'hello = *.msg\n'
|
| 561 |
+
'\n'
|
| 562 |
+
'[options.exclude_package_data]\n'
|
| 563 |
+
'* = fake1.txt, fake2.txt\n'
|
| 564 |
+
'hello = *.dat\n',
|
| 565 |
+
)
|
| 566 |
+
|
| 567 |
+
with get_dist(tmpdir) as dist:
|
| 568 |
+
assert dist.package_data == {
|
| 569 |
+
'': ['*.txt', '*.rst'],
|
| 570 |
+
'hello': ['*.msg'],
|
| 571 |
+
}
|
| 572 |
+
assert dist.exclude_package_data == {
|
| 573 |
+
'': ['fake1.txt', 'fake2.txt'],
|
| 574 |
+
'hello': ['*.dat'],
|
| 575 |
+
}
|
| 576 |
+
|
| 577 |
+
def test_packages(self, tmpdir):
|
| 578 |
+
fake_env(tmpdir, '[options]\npackages = find:\n')
|
| 579 |
+
|
| 580 |
+
with get_dist(tmpdir) as dist:
|
| 581 |
+
assert dist.packages == ['fake_package']
|
| 582 |
+
|
| 583 |
+
def test_find_directive(self, tmpdir):
|
| 584 |
+
dir_package, config = fake_env(tmpdir, '[options]\npackages = find:\n')
|
| 585 |
+
|
| 586 |
+
make_package_dir('sub_one', dir_package)
|
| 587 |
+
make_package_dir('sub_two', dir_package)
|
| 588 |
+
|
| 589 |
+
with get_dist(tmpdir) as dist:
|
| 590 |
+
assert set(dist.packages) == set([
|
| 591 |
+
'fake_package',
|
| 592 |
+
'fake_package.sub_two',
|
| 593 |
+
'fake_package.sub_one',
|
| 594 |
+
])
|
| 595 |
+
|
| 596 |
+
config.write(
|
| 597 |
+
'[options]\n'
|
| 598 |
+
'packages = find:\n'
|
| 599 |
+
'\n'
|
| 600 |
+
'[options.packages.find]\n'
|
| 601 |
+
'where = .\n'
|
| 602 |
+
'include =\n'
|
| 603 |
+
' fake_package.sub_one\n'
|
| 604 |
+
' two\n'
|
| 605 |
+
)
|
| 606 |
+
with get_dist(tmpdir) as dist:
|
| 607 |
+
assert dist.packages == ['fake_package.sub_one']
|
| 608 |
+
|
| 609 |
+
config.write(
|
| 610 |
+
'[options]\n'
|
| 611 |
+
'packages = find:\n'
|
| 612 |
+
'\n'
|
| 613 |
+
'[options.packages.find]\n'
|
| 614 |
+
'exclude =\n'
|
| 615 |
+
' fake_package.sub_one\n'
|
| 616 |
+
)
|
| 617 |
+
with get_dist(tmpdir) as dist:
|
| 618 |
+
assert set(dist.packages) == set(['fake_package', 'fake_package.sub_two'])
|
| 619 |
+
|
| 620 |
+
def test_find_namespace_directive(self, tmpdir):
|
| 621 |
+
dir_package, config = fake_env(
|
| 622 |
+
tmpdir, '[options]\npackages = find_namespace:\n'
|
| 623 |
+
)
|
| 624 |
+
|
| 625 |
+
make_package_dir('sub_one', dir_package)
|
| 626 |
+
make_package_dir('sub_two', dir_package, ns=True)
|
| 627 |
+
|
| 628 |
+
with get_dist(tmpdir) as dist:
|
| 629 |
+
assert set(dist.packages) == {
|
| 630 |
+
'fake_package',
|
| 631 |
+
'fake_package.sub_two',
|
| 632 |
+
'fake_package.sub_one',
|
| 633 |
+
}
|
| 634 |
+
|
| 635 |
+
config.write(
|
| 636 |
+
'[options]\n'
|
| 637 |
+
'packages = find_namespace:\n'
|
| 638 |
+
'\n'
|
| 639 |
+
'[options.packages.find]\n'
|
| 640 |
+
'where = .\n'
|
| 641 |
+
'include =\n'
|
| 642 |
+
' fake_package.sub_one\n'
|
| 643 |
+
' two\n'
|
| 644 |
+
)
|
| 645 |
+
with get_dist(tmpdir) as dist:
|
| 646 |
+
assert dist.packages == ['fake_package.sub_one']
|
| 647 |
+
|
| 648 |
+
config.write(
|
| 649 |
+
'[options]\n'
|
| 650 |
+
'packages = find_namespace:\n'
|
| 651 |
+
'\n'
|
| 652 |
+
'[options.packages.find]\n'
|
| 653 |
+
'exclude =\n'
|
| 654 |
+
' fake_package.sub_one\n'
|
| 655 |
+
)
|
| 656 |
+
with get_dist(tmpdir) as dist:
|
| 657 |
+
assert set(dist.packages) == {'fake_package', 'fake_package.sub_two'}
|
| 658 |
+
|
| 659 |
+
def test_extras_require(self, tmpdir):
|
| 660 |
+
fake_env(
|
| 661 |
+
tmpdir,
|
| 662 |
+
'[options.extras_require]\n'
|
| 663 |
+
'pdf = ReportLab>=1.2; RXP\n'
|
| 664 |
+
'rest = \n'
|
| 665 |
+
' docutils>=0.3\n'
|
| 666 |
+
' pack ==1.1, ==1.3\n',
|
| 667 |
+
)
|
| 668 |
+
|
| 669 |
+
with get_dist(tmpdir) as dist:
|
| 670 |
+
assert dist.extras_require == {
|
| 671 |
+
'pdf': ['ReportLab>=1.2', 'RXP'],
|
| 672 |
+
'rest': ['docutils>=0.3', 'pack==1.1,==1.3'],
|
| 673 |
+
}
|
| 674 |
+
assert set(dist.metadata.provides_extras) == {'pdf', 'rest'}
|
| 675 |
+
|
| 676 |
+
@pytest.mark.parametrize(
|
| 677 |
+
"config",
|
| 678 |
+
[
|
| 679 |
+
"[options.extras_require]\nfoo = bar;python_version<'3'",
|
| 680 |
+
"[options.extras_require]\nfoo = bar;os_name=='linux'",
|
| 681 |
+
"[options.extras_require]\nfoo = bar;python_version<'3'\n",
|
| 682 |
+
"[options.extras_require]\nfoo = bar;os_name=='linux'\n",
|
| 683 |
+
"[options]\ninstall_requires = bar;python_version<'3'",
|
| 684 |
+
"[options]\ninstall_requires = bar;os_name=='linux'",
|
| 685 |
+
"[options]\ninstall_requires = bar;python_version<'3'\n",
|
| 686 |
+
"[options]\ninstall_requires = bar;os_name=='linux'\n",
|
| 687 |
+
],
|
| 688 |
+
)
|
| 689 |
+
def test_raises_accidental_env_marker_misconfig(self, config, tmpdir):
|
| 690 |
+
fake_env(tmpdir, config)
|
| 691 |
+
match = (
|
| 692 |
+
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
|
| 693 |
+
"looks like a valid environment marker.*"
|
| 694 |
+
)
|
| 695 |
+
with pytest.raises(InvalidRequirement, match=match):
|
| 696 |
+
with get_dist(tmpdir) as _:
|
| 697 |
+
pass
|
| 698 |
+
|
| 699 |
+
@pytest.mark.parametrize(
|
| 700 |
+
"config",
|
| 701 |
+
[
|
| 702 |
+
"[options.extras_require]\nfoo = bar;python_version<3",
|
| 703 |
+
"[options.extras_require]\nfoo = bar;python_version<3\n",
|
| 704 |
+
"[options]\ninstall_requires = bar;python_version<3",
|
| 705 |
+
"[options]\ninstall_requires = bar;python_version<3\n",
|
| 706 |
+
],
|
| 707 |
+
)
|
| 708 |
+
def test_warn_accidental_env_marker_misconfig(self, config, tmpdir):
|
| 709 |
+
fake_env(tmpdir, config)
|
| 710 |
+
match = (
|
| 711 |
+
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
|
| 712 |
+
"looks like a valid environment marker.*"
|
| 713 |
+
)
|
| 714 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=match):
|
| 715 |
+
with get_dist(tmpdir) as _:
|
| 716 |
+
pass
|
| 717 |
+
|
| 718 |
+
@pytest.mark.parametrize(
|
| 719 |
+
"config",
|
| 720 |
+
[
|
| 721 |
+
"[options.extras_require]\nfoo =\n bar;python_version<'3'",
|
| 722 |
+
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy",
|
| 723 |
+
"[options.extras_require]\nfoo =\n bar;python_version<'3'\n",
|
| 724 |
+
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy\n",
|
| 725 |
+
"[options.extras_require]\nfoo =\n bar\n python_version<3\n",
|
| 726 |
+
"[options]\ninstall_requires =\n bar;python_version<'3'",
|
| 727 |
+
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy",
|
| 728 |
+
"[options]\ninstall_requires =\n bar;python_version<'3'\n",
|
| 729 |
+
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy\n",
|
| 730 |
+
"[options]\ninstall_requires =\n bar\n python_version<3\n",
|
| 731 |
+
],
|
| 732 |
+
)
|
| 733 |
+
@pytest.mark.filterwarnings("error::setuptools.SetuptoolsDeprecationWarning")
|
| 734 |
+
def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn):
|
| 735 |
+
fake_env(tmpdir, config)
|
| 736 |
+
num_warnings = len(recwarn)
|
| 737 |
+
with get_dist(tmpdir) as _:
|
| 738 |
+
pass
|
| 739 |
+
# The examples are valid, no warnings shown
|
| 740 |
+
assert len(recwarn) == num_warnings
|
| 741 |
+
|
| 742 |
+
def test_dash_preserved_extras_require(self, tmpdir):
|
| 743 |
+
fake_env(tmpdir, '[options.extras_require]\nfoo-a = foo\nfoo_b = test\n')
|
| 744 |
+
|
| 745 |
+
with get_dist(tmpdir) as dist:
|
| 746 |
+
assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']}
|
| 747 |
+
|
| 748 |
+
def test_entry_points(self, tmpdir):
|
| 749 |
+
_, config = fake_env(
|
| 750 |
+
tmpdir,
|
| 751 |
+
'[options.entry_points]\n'
|
| 752 |
+
'group1 = point1 = pack.module:func, '
|
| 753 |
+
'.point2 = pack.module2:func_rest [rest]\n'
|
| 754 |
+
'group2 = point3 = pack.module:func2\n',
|
| 755 |
+
)
|
| 756 |
+
|
| 757 |
+
with get_dist(tmpdir) as dist:
|
| 758 |
+
assert dist.entry_points == {
|
| 759 |
+
'group1': [
|
| 760 |
+
'point1 = pack.module:func',
|
| 761 |
+
'.point2 = pack.module2:func_rest [rest]',
|
| 762 |
+
],
|
| 763 |
+
'group2': ['point3 = pack.module:func2'],
|
| 764 |
+
}
|
| 765 |
+
|
| 766 |
+
expected = (
|
| 767 |
+
'[blogtool.parsers]\n'
|
| 768 |
+
'.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
|
| 769 |
+
)
|
| 770 |
+
|
| 771 |
+
tmpdir.join('entry_points').write(expected)
|
| 772 |
+
|
| 773 |
+
# From file.
|
| 774 |
+
config.write('[options]\nentry_points = file: entry_points\n')
|
| 775 |
+
|
| 776 |
+
with get_dist(tmpdir) as dist:
|
| 777 |
+
assert dist.entry_points == expected
|
| 778 |
+
|
| 779 |
+
def test_case_sensitive_entry_points(self, tmpdir):
|
| 780 |
+
fake_env(
|
| 781 |
+
tmpdir,
|
| 782 |
+
'[options.entry_points]\n'
|
| 783 |
+
'GROUP1 = point1 = pack.module:func, '
|
| 784 |
+
'.point2 = pack.module2:func_rest [rest]\n'
|
| 785 |
+
'group2 = point3 = pack.module:func2\n',
|
| 786 |
+
)
|
| 787 |
+
|
| 788 |
+
with get_dist(tmpdir) as dist:
|
| 789 |
+
assert dist.entry_points == {
|
| 790 |
+
'GROUP1': [
|
| 791 |
+
'point1 = pack.module:func',
|
| 792 |
+
'.point2 = pack.module2:func_rest [rest]',
|
| 793 |
+
],
|
| 794 |
+
'group2': ['point3 = pack.module:func2'],
|
| 795 |
+
}
|
| 796 |
+
|
| 797 |
+
def test_data_files(self, tmpdir):
|
| 798 |
+
fake_env(
|
| 799 |
+
tmpdir,
|
| 800 |
+
'[options.data_files]\n'
|
| 801 |
+
'cfg =\n'
|
| 802 |
+
' a/b.conf\n'
|
| 803 |
+
' c/d.conf\n'
|
| 804 |
+
'data = e/f.dat, g/h.dat\n',
|
| 805 |
+
)
|
| 806 |
+
|
| 807 |
+
with get_dist(tmpdir) as dist:
|
| 808 |
+
expected = [
|
| 809 |
+
('cfg', ['a/b.conf', 'c/d.conf']),
|
| 810 |
+
('data', ['e/f.dat', 'g/h.dat']),
|
| 811 |
+
]
|
| 812 |
+
assert sorted(dist.data_files) == sorted(expected)
|
| 813 |
+
|
| 814 |
+
def test_data_files_globby(self, tmpdir):
|
| 815 |
+
fake_env(
|
| 816 |
+
tmpdir,
|
| 817 |
+
'[options.data_files]\n'
|
| 818 |
+
'cfg =\n'
|
| 819 |
+
' a/b.conf\n'
|
| 820 |
+
' c/d.conf\n'
|
| 821 |
+
'data = *.dat\n'
|
| 822 |
+
'icons = \n'
|
| 823 |
+
' *.ico\n'
|
| 824 |
+
'audio = \n'
|
| 825 |
+
' *.wav\n'
|
| 826 |
+
' sounds.db\n',
|
| 827 |
+
)
|
| 828 |
+
|
| 829 |
+
# Create dummy files for glob()'s sake:
|
| 830 |
+
tmpdir.join('a.dat').write('')
|
| 831 |
+
tmpdir.join('b.dat').write('')
|
| 832 |
+
tmpdir.join('c.dat').write('')
|
| 833 |
+
tmpdir.join('a.ico').write('')
|
| 834 |
+
tmpdir.join('b.ico').write('')
|
| 835 |
+
tmpdir.join('c.ico').write('')
|
| 836 |
+
tmpdir.join('beep.wav').write('')
|
| 837 |
+
tmpdir.join('boop.wav').write('')
|
| 838 |
+
tmpdir.join('sounds.db').write('')
|
| 839 |
+
|
| 840 |
+
with get_dist(tmpdir) as dist:
|
| 841 |
+
expected = [
|
| 842 |
+
('cfg', ['a/b.conf', 'c/d.conf']),
|
| 843 |
+
('data', ['a.dat', 'b.dat', 'c.dat']),
|
| 844 |
+
('icons', ['a.ico', 'b.ico', 'c.ico']),
|
| 845 |
+
('audio', ['beep.wav', 'boop.wav', 'sounds.db']),
|
| 846 |
+
]
|
| 847 |
+
assert sorted(dist.data_files) == sorted(expected)
|
| 848 |
+
|
| 849 |
+
def test_python_requires_simple(self, tmpdir):
|
| 850 |
+
fake_env(
|
| 851 |
+
tmpdir,
|
| 852 |
+
DALS(
|
| 853 |
+
"""
|
| 854 |
+
[options]
|
| 855 |
+
python_requires=>=2.7
|
| 856 |
+
"""
|
| 857 |
+
),
|
| 858 |
+
)
|
| 859 |
+
with get_dist(tmpdir) as dist:
|
| 860 |
+
dist.parse_config_files()
|
| 861 |
+
|
| 862 |
+
def test_python_requires_compound(self, tmpdir):
|
| 863 |
+
fake_env(
|
| 864 |
+
tmpdir,
|
| 865 |
+
DALS(
|
| 866 |
+
"""
|
| 867 |
+
[options]
|
| 868 |
+
python_requires=>=2.7,!=3.0.*
|
| 869 |
+
"""
|
| 870 |
+
),
|
| 871 |
+
)
|
| 872 |
+
with get_dist(tmpdir) as dist:
|
| 873 |
+
dist.parse_config_files()
|
| 874 |
+
|
| 875 |
+
def test_python_requires_invalid(self, tmpdir):
|
| 876 |
+
fake_env(
|
| 877 |
+
tmpdir,
|
| 878 |
+
DALS(
|
| 879 |
+
"""
|
| 880 |
+
[options]
|
| 881 |
+
python_requires=invalid
|
| 882 |
+
"""
|
| 883 |
+
),
|
| 884 |
+
)
|
| 885 |
+
with pytest.raises(Exception):
|
| 886 |
+
with get_dist(tmpdir) as dist:
|
| 887 |
+
dist.parse_config_files()
|
| 888 |
+
|
| 889 |
+
def test_cmdclass(self, tmpdir):
|
| 890 |
+
module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src
|
| 891 |
+
module_path.parent.mkdir(parents=True, exist_ok=True)
|
| 892 |
+
module_path.write_text(
|
| 893 |
+
"from distutils.core import Command\nclass CustomCmd(Command): pass\n",
|
| 894 |
+
encoding="utf-8",
|
| 895 |
+
)
|
| 896 |
+
|
| 897 |
+
setup_cfg = """
|
| 898 |
+
[options]
|
| 899 |
+
cmdclass =
|
| 900 |
+
customcmd = custom_build.CustomCmd
|
| 901 |
+
"""
|
| 902 |
+
fake_env(tmpdir, inspect.cleandoc(setup_cfg))
|
| 903 |
+
|
| 904 |
+
with get_dist(tmpdir) as dist:
|
| 905 |
+
cmdclass = dist.cmdclass['customcmd']
|
| 906 |
+
assert cmdclass.__name__ == "CustomCmd"
|
| 907 |
+
assert cmdclass.__module__ == "custom_build"
|
| 908 |
+
assert module_path.samefile(inspect.getfile(cmdclass))
|
| 909 |
+
|
| 910 |
+
def test_requirements_file(self, tmpdir):
|
| 911 |
+
fake_env(
|
| 912 |
+
tmpdir,
|
| 913 |
+
DALS(
|
| 914 |
+
"""
|
| 915 |
+
[options]
|
| 916 |
+
install_requires = file:requirements.txt
|
| 917 |
+
[options.extras_require]
|
| 918 |
+
colors = file:requirements-extra.txt
|
| 919 |
+
"""
|
| 920 |
+
),
|
| 921 |
+
)
|
| 922 |
+
|
| 923 |
+
tmpdir.join('requirements.txt').write('\ndocutils>=0.3\n\n')
|
| 924 |
+
tmpdir.join('requirements-extra.txt').write('colorama')
|
| 925 |
+
|
| 926 |
+
with get_dist(tmpdir) as dist:
|
| 927 |
+
assert dist.install_requires == ['docutils>=0.3']
|
| 928 |
+
assert dist.extras_require == {'colors': ['colorama']}
|
| 929 |
+
|
| 930 |
+
|
| 931 |
+
saved_dist_init = _Distribution.__init__
|
| 932 |
+
|
| 933 |
+
|
| 934 |
+
class TestExternalSetters:
|
| 935 |
+
# During creation of the setuptools Distribution() object, we call
|
| 936 |
+
# the init of the parent distutils Distribution object via
|
| 937 |
+
# _Distribution.__init__ ().
|
| 938 |
+
#
|
| 939 |
+
# It's possible distutils calls out to various keyword
|
| 940 |
+
# implementations (i.e. distutils.setup_keywords entry points)
|
| 941 |
+
# that may set a range of variables.
|
| 942 |
+
#
|
| 943 |
+
# This wraps distutil's Distribution.__init__ and simulates
|
| 944 |
+
# pbr or something else setting these values.
|
| 945 |
+
def _fake_distribution_init(self, dist, attrs):
|
| 946 |
+
saved_dist_init(dist, attrs)
|
| 947 |
+
# see self._DISTUTILS_UNSUPPORTED_METADATA
|
| 948 |
+
dist.metadata.long_description_content_type = 'text/something'
|
| 949 |
+
# Test overwrite setup() args
|
| 950 |
+
dist.metadata.project_urls = {
|
| 951 |
+
'Link One': 'https://example.com/one/',
|
| 952 |
+
'Link Two': 'https://example.com/two/',
|
| 953 |
+
}
|
| 954 |
+
|
| 955 |
+
@patch.object(_Distribution, '__init__', autospec=True)
|
| 956 |
+
def test_external_setters(self, mock_parent_init, tmpdir):
|
| 957 |
+
mock_parent_init.side_effect = self._fake_distribution_init
|
| 958 |
+
|
| 959 |
+
dist = Distribution(attrs={'project_urls': {'will_be': 'ignored'}})
|
| 960 |
+
|
| 961 |
+
assert dist.metadata.long_description_content_type == 'text/something'
|
| 962 |
+
assert dist.metadata.project_urls == {
|
| 963 |
+
'Link One': 'https://example.com/one/',
|
| 964 |
+
'Link Two': 'https://example.com/two/',
|
| 965 |
+
}
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/contexts.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import io
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
import site
|
| 6 |
+
import sys
|
| 7 |
+
import tempfile
|
| 8 |
+
|
| 9 |
+
from filelock import FileLock
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@contextlib.contextmanager
|
| 13 |
+
def tempdir(cd=lambda dir: None, **kwargs):
|
| 14 |
+
temp_dir = tempfile.mkdtemp(**kwargs)
|
| 15 |
+
orig_dir = os.getcwd()
|
| 16 |
+
try:
|
| 17 |
+
cd(temp_dir)
|
| 18 |
+
yield temp_dir
|
| 19 |
+
finally:
|
| 20 |
+
cd(orig_dir)
|
| 21 |
+
shutil.rmtree(temp_dir)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@contextlib.contextmanager
|
| 25 |
+
def environment(**replacements):
|
| 26 |
+
"""
|
| 27 |
+
In a context, patch the environment with replacements. Pass None values
|
| 28 |
+
to clear the values.
|
| 29 |
+
"""
|
| 30 |
+
saved = dict((key, os.environ[key]) for key in replacements if key in os.environ)
|
| 31 |
+
|
| 32 |
+
# remove values that are null
|
| 33 |
+
remove = (key for (key, value) in replacements.items() if value is None)
|
| 34 |
+
for key in list(remove):
|
| 35 |
+
os.environ.pop(key, None)
|
| 36 |
+
replacements.pop(key)
|
| 37 |
+
|
| 38 |
+
os.environ.update(replacements)
|
| 39 |
+
|
| 40 |
+
try:
|
| 41 |
+
yield saved
|
| 42 |
+
finally:
|
| 43 |
+
for key in replacements:
|
| 44 |
+
os.environ.pop(key, None)
|
| 45 |
+
os.environ.update(saved)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@contextlib.contextmanager
|
| 49 |
+
def quiet():
|
| 50 |
+
"""
|
| 51 |
+
Redirect stdout/stderr to StringIO objects to prevent console output from
|
| 52 |
+
distutils commands.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
old_stdout = sys.stdout
|
| 56 |
+
old_stderr = sys.stderr
|
| 57 |
+
new_stdout = sys.stdout = io.StringIO()
|
| 58 |
+
new_stderr = sys.stderr = io.StringIO()
|
| 59 |
+
try:
|
| 60 |
+
yield new_stdout, new_stderr
|
| 61 |
+
finally:
|
| 62 |
+
new_stdout.seek(0)
|
| 63 |
+
new_stderr.seek(0)
|
| 64 |
+
sys.stdout = old_stdout
|
| 65 |
+
sys.stderr = old_stderr
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
@contextlib.contextmanager
|
| 69 |
+
def save_user_site_setting():
|
| 70 |
+
saved = site.ENABLE_USER_SITE
|
| 71 |
+
try:
|
| 72 |
+
yield saved
|
| 73 |
+
finally:
|
| 74 |
+
site.ENABLE_USER_SITE = saved
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
@contextlib.contextmanager
|
| 78 |
+
def save_pkg_resources_state():
|
| 79 |
+
import pkg_resources
|
| 80 |
+
|
| 81 |
+
pr_state = pkg_resources.__getstate__()
|
| 82 |
+
# also save sys.path
|
| 83 |
+
sys_path = sys.path[:]
|
| 84 |
+
try:
|
| 85 |
+
yield pr_state, sys_path
|
| 86 |
+
finally:
|
| 87 |
+
sys.path[:] = sys_path
|
| 88 |
+
pkg_resources.__setstate__(pr_state)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@contextlib.contextmanager
|
| 92 |
+
def suppress_exceptions(*excs):
|
| 93 |
+
try:
|
| 94 |
+
yield
|
| 95 |
+
except excs:
|
| 96 |
+
pass
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def multiproc(request):
|
| 100 |
+
"""
|
| 101 |
+
Return True if running under xdist and multiple
|
| 102 |
+
workers are used.
|
| 103 |
+
"""
|
| 104 |
+
try:
|
| 105 |
+
worker_id = request.getfixturevalue('worker_id')
|
| 106 |
+
except Exception:
|
| 107 |
+
return False
|
| 108 |
+
return worker_id != 'master'
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
@contextlib.contextmanager
|
| 112 |
+
def session_locked_tmp_dir(request, tmp_path_factory, name):
|
| 113 |
+
"""Uses a file lock to guarantee only one worker can access a temp dir"""
|
| 114 |
+
# get the temp directory shared by all workers
|
| 115 |
+
base = tmp_path_factory.getbasetemp()
|
| 116 |
+
shared_dir = base.parent if multiproc(request) else base
|
| 117 |
+
|
| 118 |
+
locked_dir = shared_dir / name
|
| 119 |
+
with FileLock(locked_dir.with_suffix(".lock")):
|
| 120 |
+
# ^-- prevent multiple workers to access the directory at once
|
| 121 |
+
locked_dir.mkdir(exist_ok=True, parents=True)
|
| 122 |
+
yield locked_dir
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
@contextlib.contextmanager
|
| 126 |
+
def save_paths():
|
| 127 |
+
"""Make sure ``sys.path``, ``sys.meta_path`` and ``sys.path_hooks`` are preserved"""
|
| 128 |
+
prev = sys.path[:], sys.meta_path[:], sys.path_hooks[:]
|
| 129 |
+
|
| 130 |
+
try:
|
| 131 |
+
yield
|
| 132 |
+
finally:
|
| 133 |
+
sys.path, sys.meta_path, sys.path_hooks = prev
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@contextlib.contextmanager
|
| 137 |
+
def save_sys_modules():
|
| 138 |
+
"""Make sure initial ``sys.modules`` is preserved"""
|
| 139 |
+
prev_modules = sys.modules
|
| 140 |
+
|
| 141 |
+
try:
|
| 142 |
+
sys.modules = sys.modules.copy()
|
| 143 |
+
yield
|
| 144 |
+
finally:
|
| 145 |
+
sys.modules = prev_modules
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/environment.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import subprocess
|
| 3 |
+
import sys
|
| 4 |
+
import unicodedata
|
| 5 |
+
from subprocess import PIPE as _PIPE, Popen as _Popen
|
| 6 |
+
|
| 7 |
+
import jaraco.envs
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class VirtualEnv(jaraco.envs.VirtualEnv):
|
| 11 |
+
name = '.env'
|
| 12 |
+
# Some version of PyPy will import distutils on startup, implicitly
|
| 13 |
+
# importing setuptools, and thus leading to BackendInvalid errors
|
| 14 |
+
# when upgrading Setuptools. Bypass this behavior by avoiding the
|
| 15 |
+
# early availability and need to upgrade.
|
| 16 |
+
create_opts = ['--no-setuptools']
|
| 17 |
+
|
| 18 |
+
def run(self, cmd, *args, **kwargs):
|
| 19 |
+
cmd = [self.exe(cmd[0])] + cmd[1:]
|
| 20 |
+
kwargs = {"cwd": self.root, "encoding": "utf-8", **kwargs} # Allow overriding
|
| 21 |
+
# In some environments (eg. downstream distro packaging), where:
|
| 22 |
+
# - tox isn't used to run tests and
|
| 23 |
+
# - PYTHONPATH is set to point to a specific setuptools codebase and
|
| 24 |
+
# - no custom env is explicitly set by a test
|
| 25 |
+
# PYTHONPATH will leak into the spawned processes.
|
| 26 |
+
# In that case tests look for module in the wrong place (on PYTHONPATH).
|
| 27 |
+
# Unless the test sets its own special env, pass a copy of the existing
|
| 28 |
+
# environment with removed PYTHONPATH to the subprocesses.
|
| 29 |
+
if "env" not in kwargs:
|
| 30 |
+
env = dict(os.environ)
|
| 31 |
+
if "PYTHONPATH" in env:
|
| 32 |
+
del env["PYTHONPATH"]
|
| 33 |
+
kwargs["env"] = env
|
| 34 |
+
return subprocess.check_output(cmd, *args, **kwargs)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _which_dirs(cmd):
|
| 38 |
+
result = set()
|
| 39 |
+
for path in os.environ.get('PATH', '').split(os.pathsep):
|
| 40 |
+
filename = os.path.join(path, cmd)
|
| 41 |
+
if os.access(filename, os.X_OK):
|
| 42 |
+
result.add(path)
|
| 43 |
+
return result
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def run_setup_py(cmd, pypath=None, path=None, data_stream=0, env=None):
|
| 47 |
+
"""
|
| 48 |
+
Execution command for tests, separate from those used by the
|
| 49 |
+
code directly to prevent accidental behavior issues
|
| 50 |
+
"""
|
| 51 |
+
if env is None:
|
| 52 |
+
env = dict()
|
| 53 |
+
for envname in os.environ:
|
| 54 |
+
env[envname] = os.environ[envname]
|
| 55 |
+
|
| 56 |
+
# override the python path if needed
|
| 57 |
+
if pypath is not None:
|
| 58 |
+
env["PYTHONPATH"] = pypath
|
| 59 |
+
|
| 60 |
+
# override the execution path if needed
|
| 61 |
+
if path is not None:
|
| 62 |
+
env["PATH"] = path
|
| 63 |
+
if not env.get("PATH", ""):
|
| 64 |
+
env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip"))
|
| 65 |
+
env["PATH"] = os.pathsep.join(env["PATH"])
|
| 66 |
+
|
| 67 |
+
cmd = [sys.executable, "setup.py"] + list(cmd)
|
| 68 |
+
|
| 69 |
+
# https://bugs.python.org/issue8557
|
| 70 |
+
shell = sys.platform == 'win32'
|
| 71 |
+
|
| 72 |
+
try:
|
| 73 |
+
proc = _Popen(
|
| 74 |
+
cmd,
|
| 75 |
+
stdout=_PIPE,
|
| 76 |
+
stderr=_PIPE,
|
| 77 |
+
shell=shell,
|
| 78 |
+
env=env,
|
| 79 |
+
encoding="utf-8",
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
if isinstance(data_stream, tuple):
|
| 83 |
+
data_stream = slice(*data_stream)
|
| 84 |
+
data = proc.communicate()[data_stream]
|
| 85 |
+
except OSError:
|
| 86 |
+
return 1, ''
|
| 87 |
+
|
| 88 |
+
# decode the console string if needed
|
| 89 |
+
if hasattr(data, "decode"):
|
| 90 |
+
# use the default encoding
|
| 91 |
+
data = data.decode()
|
| 92 |
+
data = unicodedata.normalize('NFC', data)
|
| 93 |
+
|
| 94 |
+
# communicate calls wait()
|
| 95 |
+
return proc.returncode, data
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/fixtures.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import os
|
| 3 |
+
import subprocess
|
| 4 |
+
import sys
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
import path
|
| 8 |
+
import pytest
|
| 9 |
+
|
| 10 |
+
from . import contexts, environment
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@pytest.fixture
|
| 14 |
+
def user_override(monkeypatch):
|
| 15 |
+
"""
|
| 16 |
+
Override site.USER_BASE and site.USER_SITE with temporary directories in
|
| 17 |
+
a context.
|
| 18 |
+
"""
|
| 19 |
+
with contexts.tempdir() as user_base:
|
| 20 |
+
monkeypatch.setattr('site.USER_BASE', user_base)
|
| 21 |
+
with contexts.tempdir() as user_site:
|
| 22 |
+
monkeypatch.setattr('site.USER_SITE', user_site)
|
| 23 |
+
with contexts.save_user_site_setting():
|
| 24 |
+
yield
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@pytest.fixture
|
| 28 |
+
def tmpdir_cwd(tmpdir):
|
| 29 |
+
with tmpdir.as_cwd() as orig:
|
| 30 |
+
yield orig
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture(autouse=True, scope="session")
|
| 34 |
+
def workaround_xdist_376(request):
|
| 35 |
+
"""
|
| 36 |
+
Workaround pytest-dev/pytest-xdist#376
|
| 37 |
+
|
| 38 |
+
``pytest-xdist`` tends to inject '' into ``sys.path``,
|
| 39 |
+
which may break certain isolation expectations.
|
| 40 |
+
Remove the entry so the import
|
| 41 |
+
machinery behaves the same irrespective of xdist.
|
| 42 |
+
"""
|
| 43 |
+
if not request.config.pluginmanager.has_plugin('xdist'):
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
with contextlib.suppress(ValueError):
|
| 47 |
+
sys.path.remove('')
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
@pytest.fixture
|
| 51 |
+
def sample_project(tmp_path):
|
| 52 |
+
"""
|
| 53 |
+
Clone the 'sampleproject' and return a path to it.
|
| 54 |
+
"""
|
| 55 |
+
cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
|
| 56 |
+
try:
|
| 57 |
+
subprocess.check_call(cmd, cwd=str(tmp_path))
|
| 58 |
+
except Exception:
|
| 59 |
+
pytest.skip("Unable to clone sampleproject")
|
| 60 |
+
return tmp_path / 'sampleproject'
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# sdist and wheel artifacts should be stable across a round of tests
|
| 64 |
+
# so we can build them once per session and use the files as "readonly"
|
| 65 |
+
|
| 66 |
+
# In the case of setuptools, building the wheel without sdist may cause
|
| 67 |
+
# it to contain the `build` directory, and therefore create situations with
|
| 68 |
+
# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _build_distributions(tmp_path_factory, request):
|
| 72 |
+
with contexts.session_locked_tmp_dir(
|
| 73 |
+
request, tmp_path_factory, "dist_build"
|
| 74 |
+
) as tmp: # pragma: no cover
|
| 75 |
+
sdist = next(tmp.glob("*.tar.gz"), None)
|
| 76 |
+
wheel = next(tmp.glob("*.whl"), None)
|
| 77 |
+
if sdist and wheel:
|
| 78 |
+
return (sdist, wheel)
|
| 79 |
+
|
| 80 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 81 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 82 |
+
|
| 83 |
+
subprocess.check_output([
|
| 84 |
+
sys.executable,
|
| 85 |
+
"-m",
|
| 86 |
+
"build",
|
| 87 |
+
"--outdir",
|
| 88 |
+
str(tmp),
|
| 89 |
+
str(request.config.rootdir),
|
| 90 |
+
])
|
| 91 |
+
|
| 92 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 93 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 94 |
+
|
| 95 |
+
return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@pytest.fixture(scope="session")
|
| 99 |
+
def setuptools_sdist(tmp_path_factory, request):
|
| 100 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
|
| 101 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 102 |
+
return Path(prebuilt).resolve()
|
| 103 |
+
|
| 104 |
+
sdist, _ = _build_distributions(tmp_path_factory, request)
|
| 105 |
+
return sdist
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
@pytest.fixture(scope="session")
|
| 109 |
+
def setuptools_wheel(tmp_path_factory, request):
|
| 110 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
|
| 111 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 112 |
+
return Path(prebuilt).resolve()
|
| 113 |
+
|
| 114 |
+
_, wheel = _build_distributions(tmp_path_factory, request)
|
| 115 |
+
return wheel
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@pytest.fixture
|
| 119 |
+
def venv(tmp_path, setuptools_wheel):
|
| 120 |
+
"""Virtual env with the version of setuptools under test installed"""
|
| 121 |
+
env = environment.VirtualEnv()
|
| 122 |
+
env.root = path.Path(tmp_path / 'venv')
|
| 123 |
+
env.create_opts = ['--no-setuptools', '--wheel=bundle']
|
| 124 |
+
# TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
|
| 125 |
+
env.req = str(setuptools_wheel)
|
| 126 |
+
# In some environments (eg. downstream distro packaging),
|
| 127 |
+
# where tox isn't used to run tests and PYTHONPATH is set to point to
|
| 128 |
+
# a specific setuptools codebase, PYTHONPATH will leak into the spawned
|
| 129 |
+
# processes.
|
| 130 |
+
# env.create() should install the just created setuptools
|
| 131 |
+
# wheel, but it doesn't if it finds another existing matching setuptools
|
| 132 |
+
# installation present on PYTHONPATH:
|
| 133 |
+
# `setuptools is already installed with the same version as the provided
|
| 134 |
+
# wheel. Use --force-reinstall to force an installation of the wheel.`
|
| 135 |
+
# This prevents leaking PYTHONPATH to the created environment.
|
| 136 |
+
with contexts.environment(PYTHONPATH=None):
|
| 137 |
+
return env.create()
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@pytest.fixture
|
| 141 |
+
def venv_without_setuptools(tmp_path):
|
| 142 |
+
"""Virtual env without any version of setuptools installed"""
|
| 143 |
+
env = environment.VirtualEnv()
|
| 144 |
+
env.root = path.Path(tmp_path / 'venv_without_setuptools')
|
| 145 |
+
env.create_opts = ['--no-setuptools', '--no-wheel']
|
| 146 |
+
env.ensure_env()
|
| 147 |
+
return env
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
@pytest.fixture
|
| 151 |
+
def bare_venv(tmp_path):
|
| 152 |
+
"""Virtual env without any common packages installed"""
|
| 153 |
+
env = environment.VirtualEnv()
|
| 154 |
+
env.root = path.Path(tmp_path / 'bare_venv')
|
| 155 |
+
env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
|
| 156 |
+
env.ensure_env()
|
| 157 |
+
return env
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/indexes/test_links_priority/external.html
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<html><body>
|
| 2 |
+
<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
|
| 3 |
+
</body></html>
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<html><body>
|
| 2 |
+
<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
|
| 3 |
+
<a href="../../external.html" rel="homepage">external homepage</a><br/>
|
| 4 |
+
</body></html>
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__init__.py
ADDED
|
File without changes
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (162 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__pycache__/helpers.cpython-310.pyc
ADDED
|
Binary file (3.18 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/__pycache__/test_pip_install_sdist.cpython-310.pyc
ADDED
|
Binary file (6.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/helpers.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Reusable functions and classes for different types of integration tests.
|
| 2 |
+
|
| 3 |
+
For example ``Archive`` can be used to check the contents of distribution built
|
| 4 |
+
with setuptools, and ``run`` will always try to be as verbose as possible to
|
| 5 |
+
facilitate debugging.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import subprocess
|
| 10 |
+
import tarfile
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from zipfile import ZipFile
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def run(cmd, env=None):
|
| 16 |
+
r = subprocess.run(
|
| 17 |
+
cmd,
|
| 18 |
+
capture_output=True,
|
| 19 |
+
text=True,
|
| 20 |
+
encoding="utf-8",
|
| 21 |
+
env={**os.environ, **(env or {})},
|
| 22 |
+
# ^-- allow overwriting instead of discarding the current env
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
out = r.stdout + "\n" + r.stderr
|
| 26 |
+
# pytest omits stdout/err by default, if the test fails they help debugging
|
| 27 |
+
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
|
| 28 |
+
print(f"Command: {cmd}\nreturn code: {r.returncode}\n\n{out}")
|
| 29 |
+
|
| 30 |
+
if r.returncode == 0:
|
| 31 |
+
return out
|
| 32 |
+
raise subprocess.CalledProcessError(r.returncode, cmd, r.stdout, r.stderr)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class Archive:
|
| 36 |
+
"""Compatibility layer for ZipFile/Info and TarFile/Info"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, filename):
|
| 39 |
+
self._filename = filename
|
| 40 |
+
if filename.endswith("tar.gz"):
|
| 41 |
+
self._obj = tarfile.open(filename, "r:gz")
|
| 42 |
+
elif filename.endswith("zip"):
|
| 43 |
+
self._obj = ZipFile(filename)
|
| 44 |
+
else:
|
| 45 |
+
raise ValueError(f"{filename} doesn't seem to be a zip or tar.gz")
|
| 46 |
+
|
| 47 |
+
def __iter__(self):
|
| 48 |
+
if hasattr(self._obj, "infolist"):
|
| 49 |
+
return iter(self._obj.infolist())
|
| 50 |
+
return iter(self._obj)
|
| 51 |
+
|
| 52 |
+
def get_name(self, zip_or_tar_info):
|
| 53 |
+
if hasattr(zip_or_tar_info, "filename"):
|
| 54 |
+
return zip_or_tar_info.filename
|
| 55 |
+
return zip_or_tar_info.name
|
| 56 |
+
|
| 57 |
+
def get_content(self, zip_or_tar_info):
|
| 58 |
+
if hasattr(self._obj, "extractfile"):
|
| 59 |
+
content = self._obj.extractfile(zip_or_tar_info)
|
| 60 |
+
if content is None:
|
| 61 |
+
msg = f"Invalid {zip_or_tar_info.name} in {self._filename}"
|
| 62 |
+
raise ValueError(msg)
|
| 63 |
+
return str(content.read(), "utf-8")
|
| 64 |
+
return str(self._obj.read(zip_or_tar_info), "utf-8")
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def get_sdist_members(sdist_path):
|
| 68 |
+
with tarfile.open(sdist_path, "r:gz") as tar:
|
| 69 |
+
files = [Path(f) for f in tar.getnames()]
|
| 70 |
+
# remove root folder
|
| 71 |
+
relative_files = ("/".join(f.parts[1:]) for f in files)
|
| 72 |
+
return {f for f in relative_files if f}
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def get_wheel_members(wheel_path):
|
| 76 |
+
with ZipFile(wheel_path) as zipfile:
|
| 77 |
+
return set(zipfile.namelist())
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/integration/test_pip_install_sdist.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://github.com/python/mypy/issues/16936
|
| 2 |
+
# mypy: disable-error-code="has-type"
|
| 3 |
+
"""Integration tests for setuptools that focus on building packages via pip.
|
| 4 |
+
|
| 5 |
+
The idea behind these tests is not to exhaustively check all the possible
|
| 6 |
+
combinations of packages, operating systems, supporting libraries, etc, but
|
| 7 |
+
rather check a limited number of popular packages and how they interact with
|
| 8 |
+
the exposed public API. This way if any change in API is introduced, we hope to
|
| 9 |
+
identify backward compatibility problems before publishing a release.
|
| 10 |
+
|
| 11 |
+
The number of tested packages is purposefully kept small, to minimise duration
|
| 12 |
+
and the associated maintenance cost (changes in the way these packages define
|
| 13 |
+
their build process may require changes in the tests).
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
import json
|
| 17 |
+
import os
|
| 18 |
+
import shutil
|
| 19 |
+
import sys
|
| 20 |
+
from enum import Enum
|
| 21 |
+
from glob import glob
|
| 22 |
+
from hashlib import md5
|
| 23 |
+
from urllib.request import urlopen
|
| 24 |
+
|
| 25 |
+
import pytest
|
| 26 |
+
from packaging.requirements import Requirement
|
| 27 |
+
|
| 28 |
+
from .helpers import Archive, run
|
| 29 |
+
|
| 30 |
+
pytestmark = pytest.mark.integration
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/16936
|
| 34 |
+
"""Default version to be checked"""
|
| 35 |
+
# There are positive and negative aspects of checking the latest version of the
|
| 36 |
+
# packages.
|
| 37 |
+
# The main positive aspect is that the latest version might have already
|
| 38 |
+
# removed the use of APIs deprecated in previous releases of setuptools.
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
# Packages to be tested:
|
| 42 |
+
# (Please notice the test environment cannot support EVERY library required for
|
| 43 |
+
# compiling binary extensions. In Ubuntu/Debian nomenclature, we only assume
|
| 44 |
+
# that `build-essential`, `gfortran` and `libopenblas-dev` are installed,
|
| 45 |
+
# due to their relevance to the numerical/scientific programming ecosystem)
|
| 46 |
+
EXAMPLES = [
|
| 47 |
+
("pip", LATEST), # just in case...
|
| 48 |
+
("pytest", LATEST), # uses setuptools_scm
|
| 49 |
+
("mypy", LATEST), # custom build_py + ext_modules
|
| 50 |
+
# --- Popular packages: https://hugovk.github.io/top-pypi-packages/ ---
|
| 51 |
+
("botocore", LATEST),
|
| 52 |
+
("kiwisolver", LATEST), # build_ext
|
| 53 |
+
("brotli", LATEST), # not in the list but used by urllib3
|
| 54 |
+
("pyyaml", LATEST), # cython + custom build_ext + custom distclass
|
| 55 |
+
("charset-normalizer", LATEST), # uses mypyc, used by aiohttp
|
| 56 |
+
("protobuf", LATEST),
|
| 57 |
+
("requests", LATEST),
|
| 58 |
+
("celery", LATEST),
|
| 59 |
+
# When adding packages to this list, make sure they expose a `__version__`
|
| 60 |
+
# attribute, or modify the tests below
|
| 61 |
+
]
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
# Some packages have "optional" dependencies that modify their build behaviour
|
| 65 |
+
# and are not listed in pyproject.toml, others still use `setup_requires`
|
| 66 |
+
EXTRA_BUILD_DEPS = {
|
| 67 |
+
"pyyaml": ("Cython<3.0",), # constraint to avoid errors
|
| 68 |
+
"charset-normalizer": ("mypy>=1.4.1",), # no pyproject.toml available
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
EXTRA_ENV_VARS = {
|
| 72 |
+
"pyyaml": {"PYYAML_FORCE_CYTHON": "1"},
|
| 73 |
+
"charset-normalizer": {"CHARSET_NORMALIZER_USE_MYPYC": "1"},
|
| 74 |
+
}
|
| 75 |
+
|
| 76 |
+
IMPORT_NAME = {
|
| 77 |
+
"pyyaml": "yaml",
|
| 78 |
+
"protobuf": "google.protobuf",
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
VIRTUALENV = (sys.executable, "-m", "virtualenv")
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
# By default, pip will try to build packages in isolation (PEP 517), which
|
| 86 |
+
# means it will download the previous stable version of setuptools.
|
| 87 |
+
# `pip` flags can avoid that (the version of setuptools under test
|
| 88 |
+
# should be the one to be used)
|
| 89 |
+
INSTALL_OPTIONS = (
|
| 90 |
+
"--ignore-installed",
|
| 91 |
+
"--no-build-isolation",
|
| 92 |
+
# Omit "--no-binary :all:" the sdist is supplied directly.
|
| 93 |
+
# Allows dependencies as wheels.
|
| 94 |
+
)
|
| 95 |
+
# The downside of `--no-build-isolation` is that pip will not download build
|
| 96 |
+
# dependencies. The test script will have to also handle that.
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@pytest.fixture
|
| 100 |
+
def venv_python(tmp_path):
|
| 101 |
+
run([*VIRTUALENV, str(tmp_path / ".venv")])
|
| 102 |
+
possible_path = (str(p.parent) for p in tmp_path.glob(".venv/*/python*"))
|
| 103 |
+
return shutil.which("python", path=os.pathsep.join(possible_path))
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
@pytest.fixture(autouse=True)
|
| 107 |
+
def _prepare(tmp_path, venv_python, monkeypatch):
|
| 108 |
+
download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
|
| 109 |
+
os.makedirs(download_path, exist_ok=True)
|
| 110 |
+
|
| 111 |
+
# Environment vars used for building some of the packages
|
| 112 |
+
monkeypatch.setenv("USE_MYPYC", "1")
|
| 113 |
+
|
| 114 |
+
yield
|
| 115 |
+
|
| 116 |
+
# Let's provide the maximum amount of information possible in the case
|
| 117 |
+
# it is necessary to debug the tests directly from the CI logs.
|
| 118 |
+
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
|
| 119 |
+
print("Temporary directory:")
|
| 120 |
+
map(print, tmp_path.glob("*"))
|
| 121 |
+
print("Virtual environment:")
|
| 122 |
+
run([venv_python, "-m", "pip", "freeze"])
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
@pytest.mark.parametrize(("package", "version"), EXAMPLES)
|
| 126 |
+
@pytest.mark.uses_network
|
| 127 |
+
def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel):
|
| 128 |
+
venv_pip = (venv_python, "-m", "pip")
|
| 129 |
+
sdist = retrieve_sdist(package, version, tmp_path)
|
| 130 |
+
deps = build_deps(package, sdist)
|
| 131 |
+
if deps:
|
| 132 |
+
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
|
| 133 |
+
print("Dependencies:", deps)
|
| 134 |
+
run([*venv_pip, "install", *deps])
|
| 135 |
+
|
| 136 |
+
# Use a virtualenv to simulate PEP 517 isolation
|
| 137 |
+
# but install fresh setuptools wheel to ensure the version under development
|
| 138 |
+
env = EXTRA_ENV_VARS.get(package, {})
|
| 139 |
+
run([*venv_pip, "install", "--force-reinstall", setuptools_wheel])
|
| 140 |
+
run([*venv_pip, "install", *INSTALL_OPTIONS, sdist], env)
|
| 141 |
+
|
| 142 |
+
# Execute a simple script to make sure the package was installed correctly
|
| 143 |
+
pkg = IMPORT_NAME.get(package, package).replace("-", "_")
|
| 144 |
+
script = f"import {pkg}; print(getattr({pkg}, '__version__', 0))"
|
| 145 |
+
run([venv_python, "-c", script])
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
# ---- Helper Functions ----
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def retrieve_sdist(package, version, tmp_path):
|
| 152 |
+
"""Either use cached sdist file or download it from PyPI"""
|
| 153 |
+
# `pip download` cannot be used due to
|
| 154 |
+
# https://github.com/pypa/pip/issues/1884
|
| 155 |
+
# https://discuss.python.org/t/pep-625-file-name-of-a-source-distribution/4686
|
| 156 |
+
# We have to find the correct distribution file and download it
|
| 157 |
+
download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
|
| 158 |
+
dist = retrieve_pypi_sdist_metadata(package, version)
|
| 159 |
+
|
| 160 |
+
# Remove old files to prevent cache to grow indefinitely
|
| 161 |
+
for file in glob(os.path.join(download_path, f"{package}*")):
|
| 162 |
+
if dist["filename"] != file:
|
| 163 |
+
os.unlink(file)
|
| 164 |
+
|
| 165 |
+
dist_file = os.path.join(download_path, dist["filename"])
|
| 166 |
+
if not os.path.exists(dist_file):
|
| 167 |
+
download(dist["url"], dist_file, dist["md5_digest"])
|
| 168 |
+
return dist_file
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def retrieve_pypi_sdist_metadata(package, version):
|
| 172 |
+
# https://warehouse.pypa.io/api-reference/json.html
|
| 173 |
+
id_ = package if version is LATEST else f"{package}/{version}"
|
| 174 |
+
with urlopen(f"https://pypi.org/pypi/{id_}/json") as f:
|
| 175 |
+
metadata = json.load(f)
|
| 176 |
+
|
| 177 |
+
if metadata["info"]["yanked"]:
|
| 178 |
+
raise ValueError(f"Release for {package} {version} was yanked")
|
| 179 |
+
|
| 180 |
+
version = metadata["info"]["version"]
|
| 181 |
+
release = metadata["releases"][version] if version is LATEST else metadata["urls"]
|
| 182 |
+
(sdist,) = filter(lambda d: d["packagetype"] == "sdist", release)
|
| 183 |
+
return sdist
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
def download(url, dest, md5_digest):
|
| 187 |
+
with urlopen(url) as f:
|
| 188 |
+
data = f.read()
|
| 189 |
+
|
| 190 |
+
assert md5(data).hexdigest() == md5_digest
|
| 191 |
+
|
| 192 |
+
with open(dest, "wb") as f:
|
| 193 |
+
f.write(data)
|
| 194 |
+
|
| 195 |
+
assert os.path.exists(dest)
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def build_deps(package, sdist_file):
|
| 199 |
+
"""Find out what are the build dependencies for a package.
|
| 200 |
+
|
| 201 |
+
"Manually" install them, since pip will not install build
|
| 202 |
+
deps with `--no-build-isolation`.
|
| 203 |
+
"""
|
| 204 |
+
# delay importing, since pytest discovery phase may hit this file from a
|
| 205 |
+
# testenv without tomli
|
| 206 |
+
from setuptools.compat.py310 import tomllib
|
| 207 |
+
|
| 208 |
+
archive = Archive(sdist_file)
|
| 209 |
+
info = tomllib.loads(_read_pyproject(archive))
|
| 210 |
+
deps = info.get("build-system", {}).get("requires", [])
|
| 211 |
+
deps += EXTRA_BUILD_DEPS.get(package, [])
|
| 212 |
+
# Remove setuptools from requirements (and deduplicate)
|
| 213 |
+
requirements = {Requirement(d).name: d for d in deps}
|
| 214 |
+
return [v for k, v in requirements.items() if k != "setuptools"]
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def _read_pyproject(archive):
|
| 218 |
+
contents = (
|
| 219 |
+
archive.get_content(member)
|
| 220 |
+
for member in archive
|
| 221 |
+
if os.path.basename(archive.get_name(member)) == "pyproject.toml"
|
| 222 |
+
)
|
| 223 |
+
return next(contents, "")
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/mod_with_constant.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
value = 'three, sir!'
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/namespaces.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ast
|
| 2 |
+
import json
|
| 3 |
+
import textwrap
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def iter_namespace_pkgs(namespace):
|
| 8 |
+
parts = namespace.split(".")
|
| 9 |
+
for i in range(len(parts)):
|
| 10 |
+
yield ".".join(parts[: i + 1])
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def build_namespace_package(tmpdir, name, version="1.0", impl="pkg_resources"):
|
| 14 |
+
src_dir = tmpdir / name
|
| 15 |
+
src_dir.mkdir()
|
| 16 |
+
setup_py = src_dir / 'setup.py'
|
| 17 |
+
namespace, _, rest = name.rpartition('.')
|
| 18 |
+
namespaces = list(iter_namespace_pkgs(namespace))
|
| 19 |
+
setup_args = {
|
| 20 |
+
"name": name,
|
| 21 |
+
"version": version,
|
| 22 |
+
"packages": namespaces,
|
| 23 |
+
}
|
| 24 |
+
|
| 25 |
+
if impl == "pkg_resources":
|
| 26 |
+
tmpl = '__import__("pkg_resources").declare_namespace(__name__)'
|
| 27 |
+
setup_args["namespace_packages"] = namespaces
|
| 28 |
+
elif impl == "pkgutil":
|
| 29 |
+
tmpl = '__path__ = __import__("pkgutil").extend_path(__path__, __name__)'
|
| 30 |
+
else:
|
| 31 |
+
raise ValueError(f"Cannot recognise {impl=} when creating namespaces")
|
| 32 |
+
|
| 33 |
+
args = json.dumps(setup_args, indent=4)
|
| 34 |
+
assert ast.literal_eval(args) # ensure it is valid Python
|
| 35 |
+
|
| 36 |
+
script = textwrap.dedent(
|
| 37 |
+
"""\
|
| 38 |
+
import setuptools
|
| 39 |
+
args = {args}
|
| 40 |
+
setuptools.setup(**args)
|
| 41 |
+
"""
|
| 42 |
+
).format(args=args)
|
| 43 |
+
setup_py.write_text(script, encoding='utf-8')
|
| 44 |
+
|
| 45 |
+
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
|
| 46 |
+
ns_pkg_dir.mkdir(parents=True)
|
| 47 |
+
|
| 48 |
+
for ns in namespaces:
|
| 49 |
+
pkg_init = src_dir / ns.replace(".", "/") / '__init__.py'
|
| 50 |
+
pkg_init.write_text(tmpl, encoding='utf-8')
|
| 51 |
+
|
| 52 |
+
pkg_mod = ns_pkg_dir / (rest + '.py')
|
| 53 |
+
some_functionality = 'name = {rest!r}'.format(**locals())
|
| 54 |
+
pkg_mod.write_text(some_functionality, encoding='utf-8')
|
| 55 |
+
return src_dir
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def build_pep420_namespace_package(tmpdir, name):
|
| 59 |
+
src_dir = tmpdir / name
|
| 60 |
+
src_dir.mkdir()
|
| 61 |
+
pyproject = src_dir / "pyproject.toml"
|
| 62 |
+
namespace, _, rest = name.rpartition(".")
|
| 63 |
+
script = f"""\
|
| 64 |
+
[build-system]
|
| 65 |
+
requires = ["setuptools"]
|
| 66 |
+
build-backend = "setuptools.build_meta"
|
| 67 |
+
|
| 68 |
+
[project]
|
| 69 |
+
name = "{name}"
|
| 70 |
+
version = "3.14159"
|
| 71 |
+
"""
|
| 72 |
+
pyproject.write_text(textwrap.dedent(script), encoding='utf-8')
|
| 73 |
+
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
|
| 74 |
+
ns_pkg_dir.mkdir(parents=True)
|
| 75 |
+
pkg_mod = ns_pkg_dir / (rest + ".py")
|
| 76 |
+
some_functionality = f"name = {rest!r}"
|
| 77 |
+
pkg_mod.write_text(some_functionality, encoding='utf-8')
|
| 78 |
+
return src_dir
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def make_site_dir(target):
|
| 82 |
+
"""
|
| 83 |
+
Add a sitecustomize.py module in target to cause
|
| 84 |
+
target to be added to site dirs such that .pth files
|
| 85 |
+
are processed there.
|
| 86 |
+
"""
|
| 87 |
+
sc = target / 'sitecustomize.py'
|
| 88 |
+
target_str = str(target)
|
| 89 |
+
tmpl = '__import__("site").addsitedir({target_str!r})'
|
| 90 |
+
sc.write_text(tmpl.format(**locals()), encoding='utf-8')
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
result = 'passed'
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/server.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Basic http server for tests to simulate PyPI or custom indexes"""
|
| 2 |
+
|
| 3 |
+
import http.server
|
| 4 |
+
import os
|
| 5 |
+
import threading
|
| 6 |
+
import time
|
| 7 |
+
import urllib.parse
|
| 8 |
+
import urllib.request
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class IndexServer(http.server.HTTPServer):
|
| 12 |
+
"""Basic single-threaded http server simulating a package index
|
| 13 |
+
|
| 14 |
+
You can use this server in unittest like this::
|
| 15 |
+
s = IndexServer()
|
| 16 |
+
s.start()
|
| 17 |
+
index_url = s.base_url() + 'mytestindex'
|
| 18 |
+
# do some test requests to the index
|
| 19 |
+
# The index files should be located in setuptools/tests/indexes
|
| 20 |
+
s.stop()
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
def __init__(
|
| 24 |
+
self,
|
| 25 |
+
server_address=('', 0),
|
| 26 |
+
RequestHandlerClass=http.server.SimpleHTTPRequestHandler,
|
| 27 |
+
):
|
| 28 |
+
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
| 29 |
+
self._run = True
|
| 30 |
+
|
| 31 |
+
def start(self):
|
| 32 |
+
self.thread = threading.Thread(target=self.serve_forever)
|
| 33 |
+
self.thread.start()
|
| 34 |
+
|
| 35 |
+
def stop(self):
|
| 36 |
+
"Stop the server"
|
| 37 |
+
|
| 38 |
+
# Let the server finish the last request and wait for a new one.
|
| 39 |
+
time.sleep(0.1)
|
| 40 |
+
|
| 41 |
+
self.shutdown()
|
| 42 |
+
self.thread.join()
|
| 43 |
+
self.socket.close()
|
| 44 |
+
|
| 45 |
+
def base_url(self):
|
| 46 |
+
port = self.server_port
|
| 47 |
+
return f'http://127.0.0.1:{port}/setuptools/tests/indexes/'
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class RequestRecorder(http.server.BaseHTTPRequestHandler):
|
| 51 |
+
def do_GET(self):
|
| 52 |
+
requests = vars(self.server).setdefault('requests', [])
|
| 53 |
+
requests.append(self)
|
| 54 |
+
self.send_response(200, 'OK')
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class MockServer(http.server.HTTPServer, threading.Thread):
|
| 58 |
+
"""
|
| 59 |
+
A simple HTTP Server that records the requests made to it.
|
| 60 |
+
"""
|
| 61 |
+
|
| 62 |
+
def __init__(self, server_address=('', 0), RequestHandlerClass=RequestRecorder):
|
| 63 |
+
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
| 64 |
+
threading.Thread.__init__(self)
|
| 65 |
+
self.daemon = True
|
| 66 |
+
self.requests = []
|
| 67 |
+
|
| 68 |
+
def run(self):
|
| 69 |
+
self.serve_forever()
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def netloc(self):
|
| 73 |
+
return f'localhost:{self.server_port}'
|
| 74 |
+
|
| 75 |
+
@property
|
| 76 |
+
def url(self):
|
| 77 |
+
return f'http://{self.netloc}/'
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def path_to_url(path, authority=None):
|
| 81 |
+
"""Convert a path to a file: URL."""
|
| 82 |
+
path = os.path.normpath(os.path.abspath(path))
|
| 83 |
+
base = 'file:'
|
| 84 |
+
if authority is not None:
|
| 85 |
+
base += '//' + authority
|
| 86 |
+
return urllib.parse.urljoin(base, urllib.request.pathname2url(path))
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_archive_util.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import tarfile
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
from setuptools import archive_util
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@pytest.fixture
|
| 10 |
+
def tarfile_with_unicode(tmpdir):
|
| 11 |
+
"""
|
| 12 |
+
Create a tarfile containing only a file whose name is
|
| 13 |
+
a zero byte file called testimäge.png.
|
| 14 |
+
"""
|
| 15 |
+
tarobj = io.BytesIO()
|
| 16 |
+
|
| 17 |
+
with tarfile.open(fileobj=tarobj, mode="w:gz") as tgz:
|
| 18 |
+
data = b""
|
| 19 |
+
|
| 20 |
+
filename = "testimäge.png"
|
| 21 |
+
|
| 22 |
+
t = tarfile.TarInfo(filename)
|
| 23 |
+
t.size = len(data)
|
| 24 |
+
|
| 25 |
+
tgz.addfile(t, io.BytesIO(data))
|
| 26 |
+
|
| 27 |
+
target = tmpdir / 'unicode-pkg-1.0.tar.gz'
|
| 28 |
+
with open(str(target), mode='wb') as tf:
|
| 29 |
+
tf.write(tarobj.getvalue())
|
| 30 |
+
return str(target)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.mark.xfail(reason="#710 and #712")
|
| 34 |
+
def test_unicode_files(tarfile_with_unicode, tmpdir):
|
| 35 |
+
target = tmpdir / 'out'
|
| 36 |
+
archive_util.unpack_archive(tarfile_with_unicode, str(target))
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_deprecations.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
from unittest import mock
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
from setuptools import SetuptoolsDeprecationWarning
|
| 9 |
+
from setuptools.dist import Distribution
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
|
| 13 |
+
@pytest.mark.xfail(reason="bdist_rpm is long deprecated, should we remove it? #1988")
|
| 14 |
+
@mock.patch('distutils.command.bdist_rpm.bdist_rpm')
|
| 15 |
+
def test_bdist_rpm_warning(distutils_cmd, tmpdir_cwd):
|
| 16 |
+
dist = Distribution(
|
| 17 |
+
dict(
|
| 18 |
+
script_name='setup.py',
|
| 19 |
+
script_args=['bdist_rpm'],
|
| 20 |
+
name='foo',
|
| 21 |
+
py_modules=['hi'],
|
| 22 |
+
)
|
| 23 |
+
)
|
| 24 |
+
dist.parse_command_line()
|
| 25 |
+
with pytest.warns(SetuptoolsDeprecationWarning):
|
| 26 |
+
dist.run_commands()
|
| 27 |
+
|
| 28 |
+
distutils_cmd.run.assert_called_once()
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import zipfile
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from setuptools.dist import Distribution
|
| 10 |
+
|
| 11 |
+
from . import contexts
|
| 12 |
+
|
| 13 |
+
SETUP_PY = """\
|
| 14 |
+
from setuptools import setup
|
| 15 |
+
|
| 16 |
+
setup(py_modules=['hi'])
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@pytest.fixture
|
| 21 |
+
def setup_context(tmpdir):
|
| 22 |
+
with (tmpdir / 'setup.py').open('w') as f:
|
| 23 |
+
f.write(SETUP_PY)
|
| 24 |
+
with (tmpdir / 'hi.py').open('w') as f:
|
| 25 |
+
f.write('1\n')
|
| 26 |
+
with tmpdir.as_cwd():
|
| 27 |
+
yield tmpdir
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Test:
|
| 31 |
+
@pytest.mark.usefixtures("user_override")
|
| 32 |
+
@pytest.mark.usefixtures("setup_context")
|
| 33 |
+
def test_bdist_egg(self):
|
| 34 |
+
dist = Distribution(
|
| 35 |
+
dict(
|
| 36 |
+
script_name='setup.py',
|
| 37 |
+
script_args=['bdist_egg'],
|
| 38 |
+
name='foo',
|
| 39 |
+
py_modules=['hi'],
|
| 40 |
+
)
|
| 41 |
+
)
|
| 42 |
+
os.makedirs(os.path.join('build', 'src'))
|
| 43 |
+
with contexts.quiet():
|
| 44 |
+
dist.parse_command_line()
|
| 45 |
+
dist.run_commands()
|
| 46 |
+
|
| 47 |
+
# let's see if we got our egg link at the right place
|
| 48 |
+
[content] = os.listdir('dist')
|
| 49 |
+
assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
|
| 50 |
+
|
| 51 |
+
@pytest.mark.xfail(
|
| 52 |
+
os.environ.get('PYTHONDONTWRITEBYTECODE', False),
|
| 53 |
+
reason="Byte code disabled",
|
| 54 |
+
)
|
| 55 |
+
@pytest.mark.usefixtures("user_override")
|
| 56 |
+
@pytest.mark.usefixtures("setup_context")
|
| 57 |
+
def test_exclude_source_files(self):
|
| 58 |
+
dist = Distribution(
|
| 59 |
+
dict(
|
| 60 |
+
script_name='setup.py',
|
| 61 |
+
script_args=['bdist_egg', '--exclude-source-files'],
|
| 62 |
+
py_modules=['hi'],
|
| 63 |
+
)
|
| 64 |
+
)
|
| 65 |
+
with contexts.quiet():
|
| 66 |
+
dist.parse_command_line()
|
| 67 |
+
dist.run_commands()
|
| 68 |
+
[dist_name] = os.listdir('dist')
|
| 69 |
+
dist_filename = os.path.join('dist', dist_name)
|
| 70 |
+
zip = zipfile.ZipFile(dist_filename)
|
| 71 |
+
names = list(zi.filename for zi in zip.filelist)
|
| 72 |
+
assert 'hi.pyc' in names
|
| 73 |
+
assert 'hi.py' not in names
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_bdist_wheel.py
ADDED
|
@@ -0,0 +1,623 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import builtins
|
| 4 |
+
import importlib
|
| 5 |
+
import os.path
|
| 6 |
+
import platform
|
| 7 |
+
import shutil
|
| 8 |
+
import stat
|
| 9 |
+
import struct
|
| 10 |
+
import sys
|
| 11 |
+
import sysconfig
|
| 12 |
+
from contextlib import suppress
|
| 13 |
+
from inspect import cleandoc
|
| 14 |
+
from zipfile import ZipFile
|
| 15 |
+
|
| 16 |
+
import jaraco.path
|
| 17 |
+
import pytest
|
| 18 |
+
from packaging import tags
|
| 19 |
+
|
| 20 |
+
import setuptools
|
| 21 |
+
from setuptools.command.bdist_wheel import bdist_wheel, get_abi_tag
|
| 22 |
+
from setuptools.dist import Distribution
|
| 23 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning
|
| 24 |
+
|
| 25 |
+
from distutils.core import run_setup
|
| 26 |
+
|
| 27 |
+
DEFAULT_FILES = {
|
| 28 |
+
"dummy_dist-1.0.dist-info/top_level.txt",
|
| 29 |
+
"dummy_dist-1.0.dist-info/METADATA",
|
| 30 |
+
"dummy_dist-1.0.dist-info/WHEEL",
|
| 31 |
+
"dummy_dist-1.0.dist-info/RECORD",
|
| 32 |
+
}
|
| 33 |
+
DEFAULT_LICENSE_FILES = {
|
| 34 |
+
"LICENSE",
|
| 35 |
+
"LICENSE.txt",
|
| 36 |
+
"LICENCE",
|
| 37 |
+
"LICENCE.txt",
|
| 38 |
+
"COPYING",
|
| 39 |
+
"COPYING.md",
|
| 40 |
+
"NOTICE",
|
| 41 |
+
"NOTICE.rst",
|
| 42 |
+
"AUTHORS",
|
| 43 |
+
"AUTHORS.txt",
|
| 44 |
+
}
|
| 45 |
+
OTHER_IGNORED_FILES = {
|
| 46 |
+
"LICENSE~",
|
| 47 |
+
"AUTHORS~",
|
| 48 |
+
}
|
| 49 |
+
SETUPPY_EXAMPLE = """\
|
| 50 |
+
from setuptools import setup
|
| 51 |
+
|
| 52 |
+
setup(
|
| 53 |
+
name='dummy_dist',
|
| 54 |
+
version='1.0',
|
| 55 |
+
)
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
EXAMPLES = {
|
| 60 |
+
"dummy-dist": {
|
| 61 |
+
"setup.py": SETUPPY_EXAMPLE,
|
| 62 |
+
"licenses": {"DUMMYFILE": ""},
|
| 63 |
+
**dict.fromkeys(DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES, ""),
|
| 64 |
+
},
|
| 65 |
+
"simple-dist": {
|
| 66 |
+
"setup.py": cleandoc(
|
| 67 |
+
"""
|
| 68 |
+
from setuptools import setup
|
| 69 |
+
|
| 70 |
+
setup(
|
| 71 |
+
name="simple.dist",
|
| 72 |
+
version="0.1",
|
| 73 |
+
description="A testing distribution \N{SNOWMAN}",
|
| 74 |
+
extras_require={"voting": ["beaglevote"]},
|
| 75 |
+
)
|
| 76 |
+
"""
|
| 77 |
+
),
|
| 78 |
+
"simpledist": "",
|
| 79 |
+
},
|
| 80 |
+
"complex-dist": {
|
| 81 |
+
"setup.py": cleandoc(
|
| 82 |
+
"""
|
| 83 |
+
from setuptools import setup
|
| 84 |
+
|
| 85 |
+
setup(
|
| 86 |
+
name="complex-dist",
|
| 87 |
+
version="0.1",
|
| 88 |
+
description="Another testing distribution \N{SNOWMAN}",
|
| 89 |
+
long_description="Another testing distribution \N{SNOWMAN}",
|
| 90 |
+
author="Illustrious Author",
|
| 91 |
+
author_email="illustrious@example.org",
|
| 92 |
+
url="http://example.org/exemplary",
|
| 93 |
+
packages=["complexdist"],
|
| 94 |
+
setup_requires=["setuptools"],
|
| 95 |
+
install_requires=["quux", "splort"],
|
| 96 |
+
extras_require={"simple": ["simple.dist"]},
|
| 97 |
+
entry_points={
|
| 98 |
+
"console_scripts": [
|
| 99 |
+
"complex-dist=complexdist:main",
|
| 100 |
+
"complex-dist2=complexdist:main",
|
| 101 |
+
],
|
| 102 |
+
},
|
| 103 |
+
)
|
| 104 |
+
"""
|
| 105 |
+
),
|
| 106 |
+
"complexdist": {"__init__.py": "def main(): return"},
|
| 107 |
+
},
|
| 108 |
+
"headers-dist": {
|
| 109 |
+
"setup.py": cleandoc(
|
| 110 |
+
"""
|
| 111 |
+
from setuptools import setup
|
| 112 |
+
|
| 113 |
+
setup(
|
| 114 |
+
name="headers.dist",
|
| 115 |
+
version="0.1",
|
| 116 |
+
description="A distribution with headers",
|
| 117 |
+
headers=["header.h"],
|
| 118 |
+
)
|
| 119 |
+
"""
|
| 120 |
+
),
|
| 121 |
+
"headersdist.py": "",
|
| 122 |
+
"header.h": "",
|
| 123 |
+
},
|
| 124 |
+
"commasinfilenames-dist": {
|
| 125 |
+
"setup.py": cleandoc(
|
| 126 |
+
"""
|
| 127 |
+
from setuptools import setup
|
| 128 |
+
|
| 129 |
+
setup(
|
| 130 |
+
name="testrepo",
|
| 131 |
+
version="0.1",
|
| 132 |
+
packages=["mypackage"],
|
| 133 |
+
description="A test package with commas in file names",
|
| 134 |
+
include_package_data=True,
|
| 135 |
+
package_data={"mypackage.data": ["*"]},
|
| 136 |
+
)
|
| 137 |
+
"""
|
| 138 |
+
),
|
| 139 |
+
"mypackage": {
|
| 140 |
+
"__init__.py": "",
|
| 141 |
+
"data": {"__init__.py": "", "1,2,3.txt": ""},
|
| 142 |
+
},
|
| 143 |
+
"testrepo-0.1.0": {
|
| 144 |
+
"mypackage": {"__init__.py": ""},
|
| 145 |
+
},
|
| 146 |
+
},
|
| 147 |
+
"unicode-dist": {
|
| 148 |
+
"setup.py": cleandoc(
|
| 149 |
+
"""
|
| 150 |
+
from setuptools import setup
|
| 151 |
+
|
| 152 |
+
setup(
|
| 153 |
+
name="unicode.dist",
|
| 154 |
+
version="0.1",
|
| 155 |
+
description="A testing distribution \N{SNOWMAN}",
|
| 156 |
+
packages=["unicodedist"],
|
| 157 |
+
zip_safe=True,
|
| 158 |
+
)
|
| 159 |
+
"""
|
| 160 |
+
),
|
| 161 |
+
"unicodedist": {"__init__.py": "", "åäö_日本語.py": ""},
|
| 162 |
+
},
|
| 163 |
+
"utf8-metadata-dist": {
|
| 164 |
+
"setup.cfg": cleandoc(
|
| 165 |
+
"""
|
| 166 |
+
[metadata]
|
| 167 |
+
name = utf8-metadata-dist
|
| 168 |
+
version = 42
|
| 169 |
+
author_email = "John X. Ãørçeč" <john@utf8.org>, Γαμα קּ 東 <gama@utf8.org>
|
| 170 |
+
long_description = file: README.rst
|
| 171 |
+
"""
|
| 172 |
+
),
|
| 173 |
+
"README.rst": "UTF-8 描述 説明",
|
| 174 |
+
},
|
| 175 |
+
}
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
if sys.platform != "win32":
|
| 179 |
+
# ABI3 extensions don't really work on Windows
|
| 180 |
+
EXAMPLES["abi3extension-dist"] = {
|
| 181 |
+
"setup.py": cleandoc(
|
| 182 |
+
"""
|
| 183 |
+
from setuptools import Extension, setup
|
| 184 |
+
|
| 185 |
+
setup(
|
| 186 |
+
name="extension.dist",
|
| 187 |
+
version="0.1",
|
| 188 |
+
description="A testing distribution \N{SNOWMAN}",
|
| 189 |
+
ext_modules=[
|
| 190 |
+
Extension(
|
| 191 |
+
name="extension", sources=["extension.c"], py_limited_api=True
|
| 192 |
+
)
|
| 193 |
+
],
|
| 194 |
+
)
|
| 195 |
+
"""
|
| 196 |
+
),
|
| 197 |
+
"setup.cfg": "[bdist_wheel]\npy_limited_api=cp32",
|
| 198 |
+
"extension.c": "#define Py_LIMITED_API 0x03020000\n#include <Python.h>",
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def bdist_wheel_cmd(**kwargs):
|
| 203 |
+
"""Run command in the same process so that it is easier to collect coverage"""
|
| 204 |
+
dist_obj = (
|
| 205 |
+
run_setup("setup.py", stop_after="init")
|
| 206 |
+
if os.path.exists("setup.py")
|
| 207 |
+
else Distribution({"script_name": "%%build_meta%%"})
|
| 208 |
+
)
|
| 209 |
+
dist_obj.parse_config_files()
|
| 210 |
+
cmd = bdist_wheel(dist_obj)
|
| 211 |
+
for attr, value in kwargs.items():
|
| 212 |
+
setattr(cmd, attr, value)
|
| 213 |
+
cmd.finalize_options()
|
| 214 |
+
return cmd
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def mkexample(tmp_path_factory, name):
|
| 218 |
+
basedir = tmp_path_factory.mktemp(name)
|
| 219 |
+
jaraco.path.build(EXAMPLES[name], prefix=str(basedir))
|
| 220 |
+
return basedir
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
@pytest.fixture(scope="session")
|
| 224 |
+
def wheel_paths(tmp_path_factory):
|
| 225 |
+
build_base = tmp_path_factory.mktemp("build")
|
| 226 |
+
dist_dir = tmp_path_factory.mktemp("dist")
|
| 227 |
+
for name in EXAMPLES:
|
| 228 |
+
example_dir = mkexample(tmp_path_factory, name)
|
| 229 |
+
build_dir = build_base / name
|
| 230 |
+
with jaraco.path.DirectoryStack().context(example_dir):
|
| 231 |
+
bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
|
| 232 |
+
|
| 233 |
+
return sorted(str(fname) for fname in dist_dir.glob("*.whl"))
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
@pytest.fixture
|
| 237 |
+
def dummy_dist(tmp_path_factory):
|
| 238 |
+
return mkexample(tmp_path_factory, "dummy-dist")
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
def test_no_scripts(wheel_paths):
|
| 242 |
+
"""Make sure entry point scripts are not generated."""
|
| 243 |
+
path = next(path for path in wheel_paths if "complex_dist" in path)
|
| 244 |
+
for entry in ZipFile(path).infolist():
|
| 245 |
+
assert ".data/scripts/" not in entry.filename
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def test_unicode_record(wheel_paths):
|
| 249 |
+
path = next(path for path in wheel_paths if "unicode.dist" in path)
|
| 250 |
+
with ZipFile(path) as zf:
|
| 251 |
+
record = zf.read("unicode.dist-0.1.dist-info/RECORD")
|
| 252 |
+
|
| 253 |
+
assert "åäö_日本語.py".encode() in record
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
UTF8_PKG_INFO = """\
|
| 257 |
+
Metadata-Version: 2.1
|
| 258 |
+
Name: helloworld
|
| 259 |
+
Version: 42
|
| 260 |
+
Author-email: "John X. Ãørçeč" <john@utf8.org>, Γαμα קּ 東 <gama@utf8.org>
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
UTF-8 描述 説明
|
| 264 |
+
"""
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def test_preserve_unicode_metadata(monkeypatch, tmp_path):
|
| 268 |
+
monkeypatch.chdir(tmp_path)
|
| 269 |
+
egginfo = tmp_path / "dummy_dist.egg-info"
|
| 270 |
+
distinfo = tmp_path / "dummy_dist.dist-info"
|
| 271 |
+
|
| 272 |
+
egginfo.mkdir()
|
| 273 |
+
(egginfo / "PKG-INFO").write_text(UTF8_PKG_INFO, encoding="utf-8")
|
| 274 |
+
(egginfo / "dependency_links.txt").touch()
|
| 275 |
+
|
| 276 |
+
class simpler_bdist_wheel(bdist_wheel):
|
| 277 |
+
"""Avoid messing with setuptools/distutils internals"""
|
| 278 |
+
|
| 279 |
+
def __init__(self):
|
| 280 |
+
pass
|
| 281 |
+
|
| 282 |
+
@property
|
| 283 |
+
def license_paths(self):
|
| 284 |
+
return []
|
| 285 |
+
|
| 286 |
+
cmd_obj = simpler_bdist_wheel()
|
| 287 |
+
cmd_obj.egg2dist(egginfo, distinfo)
|
| 288 |
+
|
| 289 |
+
metadata = (distinfo / "METADATA").read_text(encoding="utf-8")
|
| 290 |
+
assert 'Author-email: "John X. Ãørçeč"' in metadata
|
| 291 |
+
assert "Γαμα קּ 東 " in metadata
|
| 292 |
+
assert "UTF-8 描述 説明" in metadata
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
|
| 296 |
+
monkeypatch.chdir(dummy_dist)
|
| 297 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
|
| 298 |
+
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
|
| 299 |
+
license_files = {
|
| 300 |
+
"dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
|
| 301 |
+
}
|
| 302 |
+
assert set(wf.namelist()) == DEFAULT_FILES | license_files
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
|
| 306 |
+
dummy_dist.joinpath("setup.cfg").write_text(
|
| 307 |
+
"[metadata]\nlicense_file=licenses/DUMMYFILE", encoding="utf-8"
|
| 308 |
+
)
|
| 309 |
+
monkeypatch.chdir(dummy_dist)
|
| 310 |
+
|
| 311 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
|
| 312 |
+
|
| 313 |
+
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
|
| 314 |
+
license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
|
| 315 |
+
assert set(wf.namelist()) == DEFAULT_FILES | license_files
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
@pytest.mark.parametrize(
|
| 319 |
+
("config_file", "config"),
|
| 320 |
+
[
|
| 321 |
+
("setup.cfg", "[metadata]\nlicense_files=licenses/*\n LICENSE"),
|
| 322 |
+
("setup.cfg", "[metadata]\nlicense_files=licenses/*, LICENSE"),
|
| 323 |
+
(
|
| 324 |
+
"setup.py",
|
| 325 |
+
SETUPPY_EXAMPLE.replace(
|
| 326 |
+
")", " license_files=['licenses/DUMMYFILE', 'LICENSE'])"
|
| 327 |
+
),
|
| 328 |
+
),
|
| 329 |
+
],
|
| 330 |
+
)
|
| 331 |
+
def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config):
|
| 332 |
+
dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8")
|
| 333 |
+
monkeypatch.chdir(dummy_dist)
|
| 334 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
|
| 335 |
+
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
|
| 336 |
+
license_files = {
|
| 337 |
+
"dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
|
| 338 |
+
}
|
| 339 |
+
assert set(wf.namelist()) == DEFAULT_FILES | license_files
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
|
| 343 |
+
dummy_dist.joinpath("setup.cfg").write_text(
|
| 344 |
+
"[metadata]\nlicense_files=\n", encoding="utf-8"
|
| 345 |
+
)
|
| 346 |
+
monkeypatch.chdir(dummy_dist)
|
| 347 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
|
| 348 |
+
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
|
| 349 |
+
assert set(wf.namelist()) == DEFAULT_FILES
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
def test_build_number(dummy_dist, monkeypatch, tmp_path):
|
| 353 |
+
monkeypatch.chdir(dummy_dist)
|
| 354 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2").run()
|
| 355 |
+
with ZipFile("dist/dummy_dist-1.0-2-py3-none-any.whl") as wf:
|
| 356 |
+
filenames = set(wf.namelist())
|
| 357 |
+
assert "dummy_dist-1.0.dist-info/RECORD" in filenames
|
| 358 |
+
assert "dummy_dist-1.0.dist-info/METADATA" in filenames
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def test_universal_deprecated(dummy_dist, monkeypatch, tmp_path):
|
| 362 |
+
monkeypatch.chdir(dummy_dist)
|
| 363 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=".*universal is deprecated"):
|
| 364 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
|
| 365 |
+
|
| 366 |
+
# For now we still respect the option
|
| 367 |
+
assert os.path.exists("dist/dummy_dist-1.0-py2.py3-none-any.whl")
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
EXTENSION_EXAMPLE = """\
|
| 371 |
+
#include <Python.h>
|
| 372 |
+
|
| 373 |
+
static PyMethodDef methods[] = {
|
| 374 |
+
{ NULL, NULL, 0, NULL }
|
| 375 |
+
};
|
| 376 |
+
|
| 377 |
+
static struct PyModuleDef module_def = {
|
| 378 |
+
PyModuleDef_HEAD_INIT,
|
| 379 |
+
"extension",
|
| 380 |
+
"Dummy extension module",
|
| 381 |
+
-1,
|
| 382 |
+
methods
|
| 383 |
+
};
|
| 384 |
+
|
| 385 |
+
PyMODINIT_FUNC PyInit_extension(void) {
|
| 386 |
+
return PyModule_Create(&module_def);
|
| 387 |
+
}
|
| 388 |
+
"""
|
| 389 |
+
EXTENSION_SETUPPY = """\
|
| 390 |
+
from __future__ import annotations
|
| 391 |
+
|
| 392 |
+
from setuptools import Extension, setup
|
| 393 |
+
|
| 394 |
+
setup(
|
| 395 |
+
name="extension.dist",
|
| 396 |
+
version="0.1",
|
| 397 |
+
description="A testing distribution \N{SNOWMAN}",
|
| 398 |
+
ext_modules=[Extension(name="extension", sources=["extension.c"])],
|
| 399 |
+
)
|
| 400 |
+
"""
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
@pytest.mark.filterwarnings(
|
| 404 |
+
"once:Config variable '.*' is unset.*, Python ABI tag may be incorrect"
|
| 405 |
+
)
|
| 406 |
+
def test_limited_abi(monkeypatch, tmp_path, tmp_path_factory):
|
| 407 |
+
"""Test that building a binary wheel with the limited ABI works."""
|
| 408 |
+
source_dir = tmp_path_factory.mktemp("extension_dist")
|
| 409 |
+
(source_dir / "setup.py").write_text(EXTENSION_SETUPPY, encoding="utf-8")
|
| 410 |
+
(source_dir / "extension.c").write_text(EXTENSION_EXAMPLE, encoding="utf-8")
|
| 411 |
+
build_dir = tmp_path.joinpath("build")
|
| 412 |
+
dist_dir = tmp_path.joinpath("dist")
|
| 413 |
+
monkeypatch.chdir(source_dir)
|
| 414 |
+
bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
|
| 418 |
+
basedir = str(tmp_path.joinpath("dummy"))
|
| 419 |
+
shutil.copytree(str(dummy_dist), basedir)
|
| 420 |
+
monkeypatch.chdir(basedir)
|
| 421 |
+
|
| 422 |
+
# Make the tree read-only
|
| 423 |
+
for root, _dirs, files in os.walk(basedir):
|
| 424 |
+
for fname in files:
|
| 425 |
+
os.chmod(os.path.join(root, fname), stat.S_IREAD)
|
| 426 |
+
|
| 427 |
+
bdist_wheel_cmd().run()
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
@pytest.mark.parametrize(
|
| 431 |
+
("option", "compress_type"),
|
| 432 |
+
list(bdist_wheel.supported_compressions.items()),
|
| 433 |
+
ids=list(bdist_wheel.supported_compressions),
|
| 434 |
+
)
|
| 435 |
+
def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
|
| 436 |
+
monkeypatch.chdir(dummy_dist)
|
| 437 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path), compression=option).run()
|
| 438 |
+
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
|
| 439 |
+
filenames = set(wf.namelist())
|
| 440 |
+
assert "dummy_dist-1.0.dist-info/RECORD" in filenames
|
| 441 |
+
assert "dummy_dist-1.0.dist-info/METADATA" in filenames
|
| 442 |
+
for zinfo in wf.filelist:
|
| 443 |
+
assert zinfo.compress_type == compress_type
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def test_wheelfile_line_endings(wheel_paths):
|
| 447 |
+
for path in wheel_paths:
|
| 448 |
+
with ZipFile(path) as wf:
|
| 449 |
+
wheelfile = next(fn for fn in wf.filelist if fn.filename.endswith("WHEEL"))
|
| 450 |
+
wheelfile_contents = wf.read(wheelfile)
|
| 451 |
+
assert b"\r" not in wheelfile_contents
|
| 452 |
+
|
| 453 |
+
|
| 454 |
+
def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path):
|
| 455 |
+
monkeypatch.setenv("SOURCE_DATE_EPOCH", "0")
|
| 456 |
+
monkeypatch.chdir(dummy_dist)
|
| 457 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2a").run()
|
| 458 |
+
with ZipFile("dist/dummy_dist-1.0-2a-py3-none-any.whl") as wf:
|
| 459 |
+
for zinfo in wf.filelist:
|
| 460 |
+
assert zinfo.date_time >= (1980, 1, 1, 0, 0, 0) # min epoch is used
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
def test_get_abi_tag_windows(monkeypatch):
|
| 464 |
+
monkeypatch.setattr(tags, "interpreter_name", lambda: "cp")
|
| 465 |
+
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313-win_amd64")
|
| 466 |
+
assert get_abi_tag() == "cp313"
|
| 467 |
+
monkeypatch.setattr(sys, "gettotalrefcount", lambda: 1, False)
|
| 468 |
+
assert get_abi_tag() == "cp313d"
|
| 469 |
+
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313t-win_amd64")
|
| 470 |
+
assert get_abi_tag() == "cp313td"
|
| 471 |
+
monkeypatch.delattr(sys, "gettotalrefcount")
|
| 472 |
+
assert get_abi_tag() == "cp313t"
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def test_get_abi_tag_pypy_old(monkeypatch):
|
| 476 |
+
monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
|
| 477 |
+
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy36-pp73")
|
| 478 |
+
assert get_abi_tag() == "pypy36_pp73"
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
def test_get_abi_tag_pypy_new(monkeypatch):
|
| 482 |
+
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy37-pp73-darwin")
|
| 483 |
+
monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
|
| 484 |
+
assert get_abi_tag() == "pypy37_pp73"
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
def test_get_abi_tag_graalpy(monkeypatch):
|
| 488 |
+
monkeypatch.setattr(
|
| 489 |
+
sysconfig, "get_config_var", lambda x: "graalpy231-310-native-x86_64-linux"
|
| 490 |
+
)
|
| 491 |
+
monkeypatch.setattr(tags, "interpreter_name", lambda: "graalpy")
|
| 492 |
+
assert get_abi_tag() == "graalpy231_310_native"
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
def test_get_abi_tag_fallback(monkeypatch):
|
| 496 |
+
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "unknown-python-310")
|
| 497 |
+
monkeypatch.setattr(tags, "interpreter_name", lambda: "unknown-python")
|
| 498 |
+
assert get_abi_tag() == "unknown_python_310"
|
| 499 |
+
|
| 500 |
+
|
| 501 |
+
def test_platform_with_space(dummy_dist, monkeypatch):
|
| 502 |
+
"""Ensure building on platforms with a space in the name succeed."""
|
| 503 |
+
monkeypatch.chdir(dummy_dist)
|
| 504 |
+
bdist_wheel_cmd(plat_name="isilon onefs").run()
|
| 505 |
+
|
| 506 |
+
|
| 507 |
+
def test_data_dir_with_tag_build(monkeypatch, tmp_path):
|
| 508 |
+
"""
|
| 509 |
+
Setuptools allow authors to set PEP 440's local version segments
|
| 510 |
+
using ``egg_info.tag_build``. This should be reflected not only in the
|
| 511 |
+
``.whl`` file name, but also in the ``.dist-info`` and ``.data`` dirs.
|
| 512 |
+
See pypa/setuptools#3997.
|
| 513 |
+
"""
|
| 514 |
+
monkeypatch.chdir(tmp_path)
|
| 515 |
+
files = {
|
| 516 |
+
"setup.py": """
|
| 517 |
+
from setuptools import setup
|
| 518 |
+
setup(headers=["hello.h"])
|
| 519 |
+
""",
|
| 520 |
+
"setup.cfg": """
|
| 521 |
+
[metadata]
|
| 522 |
+
name = test
|
| 523 |
+
version = 1.0
|
| 524 |
+
|
| 525 |
+
[options.data_files]
|
| 526 |
+
hello/world = file.txt
|
| 527 |
+
|
| 528 |
+
[egg_info]
|
| 529 |
+
tag_build = +what
|
| 530 |
+
tag_date = 0
|
| 531 |
+
""",
|
| 532 |
+
"file.txt": "",
|
| 533 |
+
"hello.h": "",
|
| 534 |
+
}
|
| 535 |
+
for file, content in files.items():
|
| 536 |
+
with open(file, "w", encoding="utf-8") as fh:
|
| 537 |
+
fh.write(cleandoc(content))
|
| 538 |
+
|
| 539 |
+
bdist_wheel_cmd().run()
|
| 540 |
+
|
| 541 |
+
# Ensure .whl, .dist-info and .data contain the local segment
|
| 542 |
+
wheel_path = "dist/test-1.0+what-py3-none-any.whl"
|
| 543 |
+
assert os.path.exists(wheel_path)
|
| 544 |
+
entries = set(ZipFile(wheel_path).namelist())
|
| 545 |
+
for expected in (
|
| 546 |
+
"test-1.0+what.data/headers/hello.h",
|
| 547 |
+
"test-1.0+what.data/data/hello/world/file.txt",
|
| 548 |
+
"test-1.0+what.dist-info/METADATA",
|
| 549 |
+
"test-1.0+what.dist-info/WHEEL",
|
| 550 |
+
):
|
| 551 |
+
assert expected in entries
|
| 552 |
+
|
| 553 |
+
for not_expected in (
|
| 554 |
+
"test.data/headers/hello.h",
|
| 555 |
+
"test-1.0.data/data/hello/world/file.txt",
|
| 556 |
+
"test.dist-info/METADATA",
|
| 557 |
+
"test-1.0.dist-info/WHEEL",
|
| 558 |
+
):
|
| 559 |
+
assert not_expected not in entries
|
| 560 |
+
|
| 561 |
+
|
| 562 |
+
@pytest.mark.parametrize(
|
| 563 |
+
("reported", "expected"),
|
| 564 |
+
[("linux-x86_64", "linux_i686"), ("linux-aarch64", "linux_armv7l")],
|
| 565 |
+
)
|
| 566 |
+
@pytest.mark.skipif(
|
| 567 |
+
platform.system() != "Linux", reason="Only makes sense to test on Linux"
|
| 568 |
+
)
|
| 569 |
+
def test_platform_linux32(reported, expected, monkeypatch):
|
| 570 |
+
monkeypatch.setattr(struct, "calcsize", lambda x: 4)
|
| 571 |
+
dist = setuptools.Distribution()
|
| 572 |
+
cmd = bdist_wheel(dist)
|
| 573 |
+
cmd.plat_name = reported
|
| 574 |
+
cmd.root_is_pure = False
|
| 575 |
+
_, _, actual = cmd.get_tag()
|
| 576 |
+
assert actual == expected
|
| 577 |
+
|
| 578 |
+
|
| 579 |
+
def test_no_ctypes(monkeypatch) -> None:
|
| 580 |
+
def _fake_import(name: str, *args, **kwargs):
|
| 581 |
+
if name == "ctypes":
|
| 582 |
+
raise ModuleNotFoundError(f"No module named {name}")
|
| 583 |
+
|
| 584 |
+
return importlib.__import__(name, *args, **kwargs)
|
| 585 |
+
|
| 586 |
+
with suppress(KeyError):
|
| 587 |
+
monkeypatch.delitem(sys.modules, "wheel.macosx_libfile")
|
| 588 |
+
|
| 589 |
+
# Install an importer shim that refuses to load ctypes
|
| 590 |
+
monkeypatch.setattr(builtins, "__import__", _fake_import)
|
| 591 |
+
with pytest.raises(ModuleNotFoundError, match="No module named ctypes"):
|
| 592 |
+
import wheel.macosx_libfile # noqa: F401
|
| 593 |
+
|
| 594 |
+
# Unload and reimport the bdist_wheel command module to make sure it won't try to
|
| 595 |
+
# import ctypes
|
| 596 |
+
monkeypatch.delitem(sys.modules, "setuptools.command.bdist_wheel")
|
| 597 |
+
|
| 598 |
+
import setuptools.command.bdist_wheel # noqa: F401
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
def test_dist_info_provided(dummy_dist, monkeypatch, tmp_path):
|
| 602 |
+
monkeypatch.chdir(dummy_dist)
|
| 603 |
+
distinfo = tmp_path / "dummy_dist.dist-info"
|
| 604 |
+
|
| 605 |
+
distinfo.mkdir()
|
| 606 |
+
(distinfo / "METADATA").write_text("name: helloworld", encoding="utf-8")
|
| 607 |
+
|
| 608 |
+
# We don't control the metadata. According to PEP-517, "The hook MAY also
|
| 609 |
+
# create other files inside this directory, and a build frontend MUST
|
| 610 |
+
# preserve".
|
| 611 |
+
(distinfo / "FOO").write_text("bar", encoding="utf-8")
|
| 612 |
+
|
| 613 |
+
bdist_wheel_cmd(bdist_dir=str(tmp_path), dist_info_dir=str(distinfo)).run()
|
| 614 |
+
expected = {
|
| 615 |
+
"dummy_dist-1.0.dist-info/FOO",
|
| 616 |
+
"dummy_dist-1.0.dist-info/RECORD",
|
| 617 |
+
}
|
| 618 |
+
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
|
| 619 |
+
files_found = set(wf.namelist())
|
| 620 |
+
# Check that all expected files are there.
|
| 621 |
+
assert expected - files_found == set()
|
| 622 |
+
# Make sure there is no accidental egg-info bleeding into the wheel.
|
| 623 |
+
assert not [path for path in files_found if 'egg-info' in str(path)]
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools import Command
|
| 2 |
+
from setuptools.command.build import build
|
| 3 |
+
from setuptools.dist import Distribution
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
|
| 7 |
+
"""
|
| 8 |
+
Check that the setuptools Distribution uses the
|
| 9 |
+
setuptools specific build object.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
dist = Distribution(
|
| 13 |
+
dict(
|
| 14 |
+
script_name='setup.py',
|
| 15 |
+
script_args=['build'],
|
| 16 |
+
packages=[],
|
| 17 |
+
package_data={'': ['path/*']},
|
| 18 |
+
)
|
| 19 |
+
)
|
| 20 |
+
assert isinstance(dist.get_command_obj("build"), build)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class Subcommand(Command):
|
| 24 |
+
"""Dummy command to be used in tests"""
|
| 25 |
+
|
| 26 |
+
def initialize_options(self):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
def finalize_options(self):
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
def run(self):
|
| 33 |
+
raise NotImplementedError("just to check if the command runs")
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_clib.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import random
|
| 2 |
+
from unittest import mock
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
from setuptools.command.build_clib import build_clib
|
| 7 |
+
from setuptools.dist import Distribution
|
| 8 |
+
|
| 9 |
+
from distutils.errors import DistutilsSetupError
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestBuildCLib:
|
| 13 |
+
@mock.patch('setuptools.command.build_clib.newer_pairwise_group')
|
| 14 |
+
def test_build_libraries(self, mock_newer):
|
| 15 |
+
dist = Distribution()
|
| 16 |
+
cmd = build_clib(dist)
|
| 17 |
+
|
| 18 |
+
# this will be a long section, just making sure all
|
| 19 |
+
# exceptions are properly raised
|
| 20 |
+
libs = [('example', {'sources': 'broken.c'})]
|
| 21 |
+
with pytest.raises(DistutilsSetupError):
|
| 22 |
+
cmd.build_libraries(libs)
|
| 23 |
+
|
| 24 |
+
obj_deps = 'some_string'
|
| 25 |
+
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
|
| 26 |
+
with pytest.raises(DistutilsSetupError):
|
| 27 |
+
cmd.build_libraries(libs)
|
| 28 |
+
|
| 29 |
+
obj_deps = {'': ''}
|
| 30 |
+
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
|
| 31 |
+
with pytest.raises(DistutilsSetupError):
|
| 32 |
+
cmd.build_libraries(libs)
|
| 33 |
+
|
| 34 |
+
obj_deps = {'source.c': ''}
|
| 35 |
+
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
|
| 36 |
+
with pytest.raises(DistutilsSetupError):
|
| 37 |
+
cmd.build_libraries(libs)
|
| 38 |
+
|
| 39 |
+
# with that out of the way, let's see if the crude dependency
|
| 40 |
+
# system works
|
| 41 |
+
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
|
| 42 |
+
mock_newer.return_value = ([], [])
|
| 43 |
+
|
| 44 |
+
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
|
| 45 |
+
libs = [('example', {'sources': ['example.c'], 'obj_deps': obj_deps})]
|
| 46 |
+
|
| 47 |
+
cmd.build_libraries(libs)
|
| 48 |
+
assert [['example.c', 'global.h', 'example.h']] in mock_newer.call_args[0]
|
| 49 |
+
assert not cmd.compiler.compile.called
|
| 50 |
+
assert cmd.compiler.create_static_lib.call_count == 1
|
| 51 |
+
|
| 52 |
+
# reset the call numbers so we can test again
|
| 53 |
+
cmd.compiler.reset_mock()
|
| 54 |
+
|
| 55 |
+
mock_newer.return_value = '' # anything as long as it's not ([],[])
|
| 56 |
+
cmd.build_libraries(libs)
|
| 57 |
+
assert cmd.compiler.compile.call_count == 1
|
| 58 |
+
assert cmd.compiler.create_static_lib.call_count == 1
|
| 59 |
+
|
| 60 |
+
@mock.patch('setuptools.command.build_clib.newer_pairwise_group')
|
| 61 |
+
def test_build_libraries_reproducible(self, mock_newer):
|
| 62 |
+
dist = Distribution()
|
| 63 |
+
cmd = build_clib(dist)
|
| 64 |
+
|
| 65 |
+
# with that out of the way, let's see if the crude dependency
|
| 66 |
+
# system works
|
| 67 |
+
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
|
| 68 |
+
mock_newer.return_value = ([], [])
|
| 69 |
+
|
| 70 |
+
original_sources = ['a-example.c', 'example.c']
|
| 71 |
+
sources = original_sources
|
| 72 |
+
|
| 73 |
+
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
|
| 74 |
+
libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
|
| 75 |
+
|
| 76 |
+
cmd.build_libraries(libs)
|
| 77 |
+
computed_call_args = mock_newer.call_args[0]
|
| 78 |
+
|
| 79 |
+
while sources == original_sources:
|
| 80 |
+
sources = random.sample(original_sources, len(original_sources))
|
| 81 |
+
libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
|
| 82 |
+
|
| 83 |
+
cmd.build_libraries(libs)
|
| 84 |
+
assert computed_call_args == mock_newer.call_args[0]
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_ext.py
ADDED
|
@@ -0,0 +1,293 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from importlib.util import cache_from_source as _compiled_file_name
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
from jaraco import path
|
| 9 |
+
|
| 10 |
+
from setuptools.command.build_ext import build_ext, get_abi3_suffix
|
| 11 |
+
from setuptools.dist import Distribution
|
| 12 |
+
from setuptools.errors import CompileError
|
| 13 |
+
from setuptools.extension import Extension
|
| 14 |
+
|
| 15 |
+
from . import environment
|
| 16 |
+
from .textwrap import DALS
|
| 17 |
+
|
| 18 |
+
import distutils.command.build_ext as orig
|
| 19 |
+
from distutils.sysconfig import get_config_var
|
| 20 |
+
|
| 21 |
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class TestBuildExt:
|
| 25 |
+
def test_get_ext_filename(self):
|
| 26 |
+
"""
|
| 27 |
+
Setuptools needs to give back the same
|
| 28 |
+
result as distutils, even if the fullname
|
| 29 |
+
is not in ext_map.
|
| 30 |
+
"""
|
| 31 |
+
dist = Distribution()
|
| 32 |
+
cmd = build_ext(dist)
|
| 33 |
+
cmd.ext_map['foo/bar'] = ''
|
| 34 |
+
res = cmd.get_ext_filename('foo')
|
| 35 |
+
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
|
| 36 |
+
assert res == wanted
|
| 37 |
+
|
| 38 |
+
def test_abi3_filename(self):
|
| 39 |
+
"""
|
| 40 |
+
Filename needs to be loadable by several versions
|
| 41 |
+
of Python 3 if 'is_abi3' is truthy on Extension()
|
| 42 |
+
"""
|
| 43 |
+
print(get_abi3_suffix())
|
| 44 |
+
|
| 45 |
+
extension = Extension('spam.eggs', ['eggs.c'], py_limited_api=True)
|
| 46 |
+
dist = Distribution(dict(ext_modules=[extension]))
|
| 47 |
+
cmd = build_ext(dist)
|
| 48 |
+
cmd.finalize_options()
|
| 49 |
+
assert 'spam.eggs' in cmd.ext_map
|
| 50 |
+
res = cmd.get_ext_filename('spam.eggs')
|
| 51 |
+
|
| 52 |
+
if not get_abi3_suffix():
|
| 53 |
+
assert res.endswith(get_config_var('EXT_SUFFIX'))
|
| 54 |
+
elif sys.platform == 'win32':
|
| 55 |
+
assert res.endswith('eggs.pyd')
|
| 56 |
+
else:
|
| 57 |
+
assert 'abi3' in res
|
| 58 |
+
|
| 59 |
+
def test_ext_suffix_override(self):
|
| 60 |
+
"""
|
| 61 |
+
SETUPTOOLS_EXT_SUFFIX variable always overrides
|
| 62 |
+
default extension options.
|
| 63 |
+
"""
|
| 64 |
+
dist = Distribution()
|
| 65 |
+
cmd = build_ext(dist)
|
| 66 |
+
cmd.ext_map['for_abi3'] = ext = Extension(
|
| 67 |
+
'for_abi3',
|
| 68 |
+
['s.c'],
|
| 69 |
+
# Override shouldn't affect abi3 modules
|
| 70 |
+
py_limited_api=True,
|
| 71 |
+
)
|
| 72 |
+
# Mock value needed to pass tests
|
| 73 |
+
ext._links_to_dynamic = False
|
| 74 |
+
|
| 75 |
+
if not IS_PYPY:
|
| 76 |
+
expect = cmd.get_ext_filename('for_abi3')
|
| 77 |
+
else:
|
| 78 |
+
# PyPy builds do not use ABI3 tag, so they will
|
| 79 |
+
# also get the overridden suffix.
|
| 80 |
+
expect = 'for_abi3.test-suffix'
|
| 81 |
+
|
| 82 |
+
try:
|
| 83 |
+
os.environ['SETUPTOOLS_EXT_SUFFIX'] = '.test-suffix'
|
| 84 |
+
res = cmd.get_ext_filename('normal')
|
| 85 |
+
assert 'normal.test-suffix' == res
|
| 86 |
+
res = cmd.get_ext_filename('for_abi3')
|
| 87 |
+
assert expect == res
|
| 88 |
+
finally:
|
| 89 |
+
del os.environ['SETUPTOOLS_EXT_SUFFIX']
|
| 90 |
+
|
| 91 |
+
def dist_with_example(self):
|
| 92 |
+
files = {
|
| 93 |
+
"src": {"mypkg": {"subpkg": {"ext2.c": ""}}},
|
| 94 |
+
"c-extensions": {"ext1": {"main.c": ""}},
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
ext1 = Extension("mypkg.ext1", ["c-extensions/ext1/main.c"])
|
| 98 |
+
ext2 = Extension("mypkg.subpkg.ext2", ["src/mypkg/subpkg/ext2.c"])
|
| 99 |
+
ext3 = Extension("ext3", ["c-extension/ext3.c"])
|
| 100 |
+
|
| 101 |
+
path.build(files)
|
| 102 |
+
return Distribution({
|
| 103 |
+
"script_name": "%test%",
|
| 104 |
+
"ext_modules": [ext1, ext2, ext3],
|
| 105 |
+
"package_dir": {"": "src"},
|
| 106 |
+
})
|
| 107 |
+
|
| 108 |
+
def test_get_outputs(self, tmpdir_cwd, monkeypatch):
|
| 109 |
+
monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
|
| 110 |
+
monkeypatch.setattr('setuptools.command.build_ext.use_stubs', False)
|
| 111 |
+
dist = self.dist_with_example()
|
| 112 |
+
|
| 113 |
+
# Regular build: get_outputs not empty, but get_output_mappings is empty
|
| 114 |
+
build_ext = dist.get_command_obj("build_ext")
|
| 115 |
+
build_ext.editable_mode = False
|
| 116 |
+
build_ext.ensure_finalized()
|
| 117 |
+
build_lib = build_ext.build_lib.replace(os.sep, "/")
|
| 118 |
+
outputs = [x.replace(os.sep, "/") for x in build_ext.get_outputs()]
|
| 119 |
+
assert outputs == [
|
| 120 |
+
f"{build_lib}/ext3.mp3",
|
| 121 |
+
f"{build_lib}/mypkg/ext1.mp3",
|
| 122 |
+
f"{build_lib}/mypkg/subpkg/ext2.mp3",
|
| 123 |
+
]
|
| 124 |
+
assert build_ext.get_output_mapping() == {}
|
| 125 |
+
|
| 126 |
+
# Editable build: get_output_mappings should contain everything in get_outputs
|
| 127 |
+
dist.reinitialize_command("build_ext")
|
| 128 |
+
build_ext.editable_mode = True
|
| 129 |
+
build_ext.ensure_finalized()
|
| 130 |
+
mapping = {
|
| 131 |
+
k.replace(os.sep, "/"): v.replace(os.sep, "/")
|
| 132 |
+
for k, v in build_ext.get_output_mapping().items()
|
| 133 |
+
}
|
| 134 |
+
assert mapping == {
|
| 135 |
+
f"{build_lib}/ext3.mp3": "src/ext3.mp3",
|
| 136 |
+
f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
|
| 137 |
+
f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
def test_get_output_mapping_with_stub(self, tmpdir_cwd, monkeypatch):
|
| 141 |
+
monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
|
| 142 |
+
monkeypatch.setattr('setuptools.command.build_ext.use_stubs', True)
|
| 143 |
+
dist = self.dist_with_example()
|
| 144 |
+
|
| 145 |
+
# Editable build should create compiled stubs (.pyc files only, no .py)
|
| 146 |
+
build_ext = dist.get_command_obj("build_ext")
|
| 147 |
+
build_ext.editable_mode = True
|
| 148 |
+
build_ext.ensure_finalized()
|
| 149 |
+
for ext in build_ext.extensions:
|
| 150 |
+
monkeypatch.setattr(ext, "_needs_stub", True)
|
| 151 |
+
|
| 152 |
+
build_lib = build_ext.build_lib.replace(os.sep, "/")
|
| 153 |
+
mapping = {
|
| 154 |
+
k.replace(os.sep, "/"): v.replace(os.sep, "/")
|
| 155 |
+
for k, v in build_ext.get_output_mapping().items()
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
def C(file):
|
| 159 |
+
"""Make it possible to do comparisons and tests in a OS-independent way"""
|
| 160 |
+
return _compiled_file_name(file).replace(os.sep, "/")
|
| 161 |
+
|
| 162 |
+
assert mapping == {
|
| 163 |
+
C(f"{build_lib}/ext3.py"): C("src/ext3.py"),
|
| 164 |
+
f"{build_lib}/ext3.mp3": "src/ext3.mp3",
|
| 165 |
+
C(f"{build_lib}/mypkg/ext1.py"): C("src/mypkg/ext1.py"),
|
| 166 |
+
f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
|
| 167 |
+
C(f"{build_lib}/mypkg/subpkg/ext2.py"): C("src/mypkg/subpkg/ext2.py"),
|
| 168 |
+
f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
# Ensure only the compiled stubs are present not the raw .py stub
|
| 172 |
+
assert f"{build_lib}/mypkg/ext1.py" not in mapping
|
| 173 |
+
assert f"{build_lib}/mypkg/subpkg/ext2.py" not in mapping
|
| 174 |
+
|
| 175 |
+
# Visualize what the cached stub files look like
|
| 176 |
+
example_stub = C(f"{build_lib}/mypkg/ext1.py")
|
| 177 |
+
assert example_stub in mapping
|
| 178 |
+
assert example_stub.startswith(f"{build_lib}/mypkg/__pycache__/ext1")
|
| 179 |
+
assert example_stub.endswith(".pyc")
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class TestBuildExtInplace:
|
| 183 |
+
def get_build_ext_cmd(self, optional: bool, **opts) -> build_ext:
|
| 184 |
+
files: dict[str, str | dict[str, dict[str, str]]] = {
|
| 185 |
+
"eggs.c": "#include missingheader.h\n",
|
| 186 |
+
".build": {"lib": {}, "tmp": {}},
|
| 187 |
+
}
|
| 188 |
+
path.build(files)
|
| 189 |
+
extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
|
| 190 |
+
dist = Distribution(dict(ext_modules=[extension]))
|
| 191 |
+
dist.script_name = 'setup.py'
|
| 192 |
+
cmd = build_ext(dist)
|
| 193 |
+
vars(cmd).update(build_lib=".build/lib", build_temp=".build/tmp", **opts)
|
| 194 |
+
cmd.ensure_finalized()
|
| 195 |
+
return cmd
|
| 196 |
+
|
| 197 |
+
def get_log_messages(self, caplog, capsys):
|
| 198 |
+
"""
|
| 199 |
+
Historically, distutils "logged" by printing to sys.std*.
|
| 200 |
+
Later versions adopted the logging framework. Grab
|
| 201 |
+
messages regardless of how they were captured.
|
| 202 |
+
"""
|
| 203 |
+
std = capsys.readouterr()
|
| 204 |
+
return std.out.splitlines() + std.err.splitlines() + caplog.messages
|
| 205 |
+
|
| 206 |
+
def test_optional(self, tmpdir_cwd, caplog, capsys):
|
| 207 |
+
"""
|
| 208 |
+
If optional extensions fail to build, setuptools should show the error
|
| 209 |
+
in the logs but not fail to build
|
| 210 |
+
"""
|
| 211 |
+
cmd = self.get_build_ext_cmd(optional=True, inplace=True)
|
| 212 |
+
cmd.run()
|
| 213 |
+
assert any(
|
| 214 |
+
'build_ext: building extension "spam.eggs" failed'
|
| 215 |
+
for msg in self.get_log_messages(caplog, capsys)
|
| 216 |
+
)
|
| 217 |
+
# No compile error exception should be raised
|
| 218 |
+
|
| 219 |
+
def test_non_optional(self, tmpdir_cwd):
|
| 220 |
+
# Non-optional extensions should raise an exception
|
| 221 |
+
cmd = self.get_build_ext_cmd(optional=False, inplace=True)
|
| 222 |
+
with pytest.raises(CompileError):
|
| 223 |
+
cmd.run()
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
def test_build_ext_config_handling(tmpdir_cwd):
|
| 227 |
+
files = {
|
| 228 |
+
'setup.py': DALS(
|
| 229 |
+
"""
|
| 230 |
+
from setuptools import Extension, setup
|
| 231 |
+
setup(
|
| 232 |
+
name='foo',
|
| 233 |
+
version='0.0.0',
|
| 234 |
+
ext_modules=[Extension('foo', ['foo.c'])],
|
| 235 |
+
)
|
| 236 |
+
"""
|
| 237 |
+
),
|
| 238 |
+
'foo.c': DALS(
|
| 239 |
+
"""
|
| 240 |
+
#include "Python.h"
|
| 241 |
+
|
| 242 |
+
#if PY_MAJOR_VERSION >= 3
|
| 243 |
+
|
| 244 |
+
static struct PyModuleDef moduledef = {
|
| 245 |
+
PyModuleDef_HEAD_INIT,
|
| 246 |
+
"foo",
|
| 247 |
+
NULL,
|
| 248 |
+
0,
|
| 249 |
+
NULL,
|
| 250 |
+
NULL,
|
| 251 |
+
NULL,
|
| 252 |
+
NULL,
|
| 253 |
+
NULL
|
| 254 |
+
};
|
| 255 |
+
|
| 256 |
+
#define INITERROR return NULL
|
| 257 |
+
|
| 258 |
+
PyMODINIT_FUNC PyInit_foo(void)
|
| 259 |
+
|
| 260 |
+
#else
|
| 261 |
+
|
| 262 |
+
#define INITERROR return
|
| 263 |
+
|
| 264 |
+
void initfoo(void)
|
| 265 |
+
|
| 266 |
+
#endif
|
| 267 |
+
{
|
| 268 |
+
#if PY_MAJOR_VERSION >= 3
|
| 269 |
+
PyObject *module = PyModule_Create(&moduledef);
|
| 270 |
+
#else
|
| 271 |
+
PyObject *module = Py_InitModule("extension", NULL);
|
| 272 |
+
#endif
|
| 273 |
+
if (module == NULL)
|
| 274 |
+
INITERROR;
|
| 275 |
+
#if PY_MAJOR_VERSION >= 3
|
| 276 |
+
return module;
|
| 277 |
+
#endif
|
| 278 |
+
}
|
| 279 |
+
"""
|
| 280 |
+
),
|
| 281 |
+
'setup.cfg': DALS(
|
| 282 |
+
"""
|
| 283 |
+
[build]
|
| 284 |
+
build_base = foo_build
|
| 285 |
+
"""
|
| 286 |
+
),
|
| 287 |
+
}
|
| 288 |
+
path.build(files)
|
| 289 |
+
code, (stdout, stderr) = environment.run_setup_py(
|
| 290 |
+
cmd=['build'],
|
| 291 |
+
data_stream=(0, 2),
|
| 292 |
+
)
|
| 293 |
+
assert code == 0, f'\nSTDOUT:\n{stdout}\nSTDERR:\n{stderr}'
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py
ADDED
|
@@ -0,0 +1,970 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import importlib
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import shutil
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
import tarfile
|
| 9 |
+
from concurrent import futures
|
| 10 |
+
from pathlib import Path
|
| 11 |
+
from typing import Any, Callable
|
| 12 |
+
from zipfile import ZipFile
|
| 13 |
+
|
| 14 |
+
import pytest
|
| 15 |
+
from jaraco import path
|
| 16 |
+
from packaging.requirements import Requirement
|
| 17 |
+
|
| 18 |
+
from .textwrap import DALS
|
| 19 |
+
|
| 20 |
+
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
|
| 24 |
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
pytestmark = pytest.mark.skipif(
|
| 28 |
+
sys.platform == "win32" and IS_PYPY,
|
| 29 |
+
reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
|
| 30 |
+
"is flaky and problematic",
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class BuildBackendBase:
|
| 35 |
+
def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'):
|
| 36 |
+
self.cwd = cwd
|
| 37 |
+
self.env = env or {}
|
| 38 |
+
self.backend_name = backend_name
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class BuildBackend(BuildBackendBase):
|
| 42 |
+
"""PEP 517 Build Backend"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, *args, **kwargs):
|
| 45 |
+
super().__init__(*args, **kwargs)
|
| 46 |
+
self.pool = futures.ProcessPoolExecutor(max_workers=1)
|
| 47 |
+
|
| 48 |
+
def __getattr__(self, name: str) -> Callable[..., Any]:
|
| 49 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 50 |
+
|
| 51 |
+
def method(*args, **kw):
|
| 52 |
+
root = os.path.abspath(self.cwd)
|
| 53 |
+
caller = BuildBackendCaller(root, self.env, self.backend_name)
|
| 54 |
+
pid = None
|
| 55 |
+
try:
|
| 56 |
+
pid = self.pool.submit(os.getpid).result(TIMEOUT)
|
| 57 |
+
return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
|
| 58 |
+
except futures.TimeoutError:
|
| 59 |
+
self.pool.shutdown(wait=False) # doesn't stop already running processes
|
| 60 |
+
self._kill(pid)
|
| 61 |
+
pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
|
| 62 |
+
except (futures.process.BrokenProcessPool, MemoryError, OSError):
|
| 63 |
+
if IS_PYPY:
|
| 64 |
+
pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
|
| 65 |
+
raise
|
| 66 |
+
|
| 67 |
+
return method
|
| 68 |
+
|
| 69 |
+
def _kill(self, pid):
|
| 70 |
+
if pid is None:
|
| 71 |
+
return
|
| 72 |
+
with contextlib.suppress(ProcessLookupError, OSError):
|
| 73 |
+
os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class BuildBackendCaller(BuildBackendBase):
|
| 77 |
+
def __init__(self, *args, **kwargs):
|
| 78 |
+
super().__init__(*args, **kwargs)
|
| 79 |
+
|
| 80 |
+
(self.backend_name, _, self.backend_obj) = self.backend_name.partition(':')
|
| 81 |
+
|
| 82 |
+
def __call__(self, name, *args, **kw):
|
| 83 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 84 |
+
os.chdir(self.cwd)
|
| 85 |
+
os.environ.update(self.env)
|
| 86 |
+
mod = importlib.import_module(self.backend_name)
|
| 87 |
+
|
| 88 |
+
if self.backend_obj:
|
| 89 |
+
backend = getattr(mod, self.backend_obj)
|
| 90 |
+
else:
|
| 91 |
+
backend = mod
|
| 92 |
+
|
| 93 |
+
return getattr(backend, name)(*args, **kw)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
defns = [
|
| 97 |
+
{ # simple setup.py script
|
| 98 |
+
'setup.py': DALS(
|
| 99 |
+
"""
|
| 100 |
+
__import__('setuptools').setup(
|
| 101 |
+
name='foo',
|
| 102 |
+
version='0.0.0',
|
| 103 |
+
py_modules=['hello'],
|
| 104 |
+
setup_requires=['six'],
|
| 105 |
+
)
|
| 106 |
+
"""
|
| 107 |
+
),
|
| 108 |
+
'hello.py': DALS(
|
| 109 |
+
"""
|
| 110 |
+
def run():
|
| 111 |
+
print('hello')
|
| 112 |
+
"""
|
| 113 |
+
),
|
| 114 |
+
},
|
| 115 |
+
{ # setup.py that relies on __name__
|
| 116 |
+
'setup.py': DALS(
|
| 117 |
+
"""
|
| 118 |
+
assert __name__ == '__main__'
|
| 119 |
+
__import__('setuptools').setup(
|
| 120 |
+
name='foo',
|
| 121 |
+
version='0.0.0',
|
| 122 |
+
py_modules=['hello'],
|
| 123 |
+
setup_requires=['six'],
|
| 124 |
+
)
|
| 125 |
+
"""
|
| 126 |
+
),
|
| 127 |
+
'hello.py': DALS(
|
| 128 |
+
"""
|
| 129 |
+
def run():
|
| 130 |
+
print('hello')
|
| 131 |
+
"""
|
| 132 |
+
),
|
| 133 |
+
},
|
| 134 |
+
{ # setup.py script that runs arbitrary code
|
| 135 |
+
'setup.py': DALS(
|
| 136 |
+
"""
|
| 137 |
+
variable = True
|
| 138 |
+
def function():
|
| 139 |
+
return variable
|
| 140 |
+
assert variable
|
| 141 |
+
__import__('setuptools').setup(
|
| 142 |
+
name='foo',
|
| 143 |
+
version='0.0.0',
|
| 144 |
+
py_modules=['hello'],
|
| 145 |
+
setup_requires=['six'],
|
| 146 |
+
)
|
| 147 |
+
"""
|
| 148 |
+
),
|
| 149 |
+
'hello.py': DALS(
|
| 150 |
+
"""
|
| 151 |
+
def run():
|
| 152 |
+
print('hello')
|
| 153 |
+
"""
|
| 154 |
+
),
|
| 155 |
+
},
|
| 156 |
+
{ # setup.py script that constructs temp files to be included in the distribution
|
| 157 |
+
'setup.py': DALS(
|
| 158 |
+
"""
|
| 159 |
+
# Some packages construct files on the fly, include them in the package,
|
| 160 |
+
# and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
|
| 161 |
+
# Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
|
| 162 |
+
# to obtain a distribution object first, and then run the distutils
|
| 163 |
+
# commands later, because these files will be removed in the meantime.
|
| 164 |
+
|
| 165 |
+
with open('world.py', 'w', encoding="utf-8") as f:
|
| 166 |
+
f.write('x = 42')
|
| 167 |
+
|
| 168 |
+
try:
|
| 169 |
+
__import__('setuptools').setup(
|
| 170 |
+
name='foo',
|
| 171 |
+
version='0.0.0',
|
| 172 |
+
py_modules=['world'],
|
| 173 |
+
setup_requires=['six'],
|
| 174 |
+
)
|
| 175 |
+
finally:
|
| 176 |
+
# Some packages will clean temporary files
|
| 177 |
+
__import__('os').unlink('world.py')
|
| 178 |
+
"""
|
| 179 |
+
),
|
| 180 |
+
},
|
| 181 |
+
{ # setup.cfg only
|
| 182 |
+
'setup.cfg': DALS(
|
| 183 |
+
"""
|
| 184 |
+
[metadata]
|
| 185 |
+
name = foo
|
| 186 |
+
version = 0.0.0
|
| 187 |
+
|
| 188 |
+
[options]
|
| 189 |
+
py_modules=hello
|
| 190 |
+
setup_requires=six
|
| 191 |
+
"""
|
| 192 |
+
),
|
| 193 |
+
'hello.py': DALS(
|
| 194 |
+
"""
|
| 195 |
+
def run():
|
| 196 |
+
print('hello')
|
| 197 |
+
"""
|
| 198 |
+
),
|
| 199 |
+
},
|
| 200 |
+
{ # setup.cfg and setup.py
|
| 201 |
+
'setup.cfg': DALS(
|
| 202 |
+
"""
|
| 203 |
+
[metadata]
|
| 204 |
+
name = foo
|
| 205 |
+
version = 0.0.0
|
| 206 |
+
|
| 207 |
+
[options]
|
| 208 |
+
py_modules=hello
|
| 209 |
+
setup_requires=six
|
| 210 |
+
"""
|
| 211 |
+
),
|
| 212 |
+
'setup.py': "__import__('setuptools').setup()",
|
| 213 |
+
'hello.py': DALS(
|
| 214 |
+
"""
|
| 215 |
+
def run():
|
| 216 |
+
print('hello')
|
| 217 |
+
"""
|
| 218 |
+
),
|
| 219 |
+
},
|
| 220 |
+
]
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class TestBuildMetaBackend:
|
| 224 |
+
backend_name = 'setuptools.build_meta'
|
| 225 |
+
|
| 226 |
+
def get_build_backend(self):
|
| 227 |
+
return BuildBackend(backend_name=self.backend_name)
|
| 228 |
+
|
| 229 |
+
@pytest.fixture(params=defns)
|
| 230 |
+
def build_backend(self, tmpdir, request):
|
| 231 |
+
path.build(request.param, prefix=str(tmpdir))
|
| 232 |
+
with tmpdir.as_cwd():
|
| 233 |
+
yield self.get_build_backend()
|
| 234 |
+
|
| 235 |
+
def test_get_requires_for_build_wheel(self, build_backend):
|
| 236 |
+
actual = build_backend.get_requires_for_build_wheel()
|
| 237 |
+
expected = ['six']
|
| 238 |
+
assert sorted(actual) == sorted(expected)
|
| 239 |
+
|
| 240 |
+
def test_get_requires_for_build_sdist(self, build_backend):
|
| 241 |
+
actual = build_backend.get_requires_for_build_sdist()
|
| 242 |
+
expected = ['six']
|
| 243 |
+
assert sorted(actual) == sorted(expected)
|
| 244 |
+
|
| 245 |
+
def test_build_wheel(self, build_backend):
|
| 246 |
+
dist_dir = os.path.abspath('pip-wheel')
|
| 247 |
+
os.makedirs(dist_dir)
|
| 248 |
+
wheel_name = build_backend.build_wheel(dist_dir)
|
| 249 |
+
|
| 250 |
+
wheel_file = os.path.join(dist_dir, wheel_name)
|
| 251 |
+
assert os.path.isfile(wheel_file)
|
| 252 |
+
|
| 253 |
+
# Temporary files should be removed
|
| 254 |
+
assert not os.path.isfile('world.py')
|
| 255 |
+
|
| 256 |
+
with ZipFile(wheel_file) as zipfile:
|
| 257 |
+
wheel_contents = set(zipfile.namelist())
|
| 258 |
+
|
| 259 |
+
# Each one of the examples have a single module
|
| 260 |
+
# that should be included in the distribution
|
| 261 |
+
python_scripts = (f for f in wheel_contents if f.endswith('.py'))
|
| 262 |
+
modules = [f for f in python_scripts if not f.endswith('setup.py')]
|
| 263 |
+
assert len(modules) == 1
|
| 264 |
+
|
| 265 |
+
@pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
|
| 266 |
+
def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
|
| 267 |
+
# Building a sdist/wheel should still succeed if there's
|
| 268 |
+
# already a sdist/wheel in the destination directory.
|
| 269 |
+
files = {
|
| 270 |
+
'setup.py': "from setuptools import setup\nsetup()",
|
| 271 |
+
'VERSION': "0.0.1",
|
| 272 |
+
'setup.cfg': DALS(
|
| 273 |
+
"""
|
| 274 |
+
[metadata]
|
| 275 |
+
name = foo
|
| 276 |
+
version = file: VERSION
|
| 277 |
+
"""
|
| 278 |
+
),
|
| 279 |
+
'pyproject.toml': DALS(
|
| 280 |
+
"""
|
| 281 |
+
[build-system]
|
| 282 |
+
requires = ["setuptools", "wheel"]
|
| 283 |
+
build-backend = "setuptools.build_meta"
|
| 284 |
+
"""
|
| 285 |
+
),
|
| 286 |
+
}
|
| 287 |
+
|
| 288 |
+
path.build(files)
|
| 289 |
+
|
| 290 |
+
dist_dir = os.path.abspath('preexisting-' + build_type)
|
| 291 |
+
|
| 292 |
+
build_backend = self.get_build_backend()
|
| 293 |
+
build_method = getattr(build_backend, 'build_' + build_type)
|
| 294 |
+
|
| 295 |
+
# Build a first sdist/wheel.
|
| 296 |
+
# Note: this also check the destination directory is
|
| 297 |
+
# successfully created if it does not exist already.
|
| 298 |
+
first_result = build_method(dist_dir)
|
| 299 |
+
|
| 300 |
+
# Change version.
|
| 301 |
+
with open("VERSION", "wt", encoding="utf-8") as version_file:
|
| 302 |
+
version_file.write("0.0.2")
|
| 303 |
+
|
| 304 |
+
# Build a *second* sdist/wheel.
|
| 305 |
+
second_result = build_method(dist_dir)
|
| 306 |
+
|
| 307 |
+
assert os.path.isfile(os.path.join(dist_dir, first_result))
|
| 308 |
+
assert first_result != second_result
|
| 309 |
+
|
| 310 |
+
# And if rebuilding the exact same sdist/wheel?
|
| 311 |
+
open(os.path.join(dist_dir, second_result), 'wb').close()
|
| 312 |
+
third_result = build_method(dist_dir)
|
| 313 |
+
assert third_result == second_result
|
| 314 |
+
assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
|
| 315 |
+
|
| 316 |
+
@pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
|
| 317 |
+
def test_build_with_pyproject_config(self, tmpdir, setup_script):
|
| 318 |
+
files = {
|
| 319 |
+
'pyproject.toml': DALS(
|
| 320 |
+
"""
|
| 321 |
+
[build-system]
|
| 322 |
+
requires = ["setuptools", "wheel"]
|
| 323 |
+
build-backend = "setuptools.build_meta"
|
| 324 |
+
|
| 325 |
+
[project]
|
| 326 |
+
name = "foo"
|
| 327 |
+
license = {text = "MIT"}
|
| 328 |
+
description = "This is a Python package"
|
| 329 |
+
dynamic = ["version", "readme"]
|
| 330 |
+
classifiers = [
|
| 331 |
+
"Development Status :: 5 - Production/Stable",
|
| 332 |
+
"Intended Audience :: Developers"
|
| 333 |
+
]
|
| 334 |
+
urls = {Homepage = "http://github.com"}
|
| 335 |
+
dependencies = [
|
| 336 |
+
"appdirs",
|
| 337 |
+
]
|
| 338 |
+
|
| 339 |
+
[project.optional-dependencies]
|
| 340 |
+
all = [
|
| 341 |
+
"tomli>=1",
|
| 342 |
+
"pyscaffold>=4,<5",
|
| 343 |
+
'importlib; python_version == "2.6"',
|
| 344 |
+
]
|
| 345 |
+
|
| 346 |
+
[project.scripts]
|
| 347 |
+
foo = "foo.cli:main"
|
| 348 |
+
|
| 349 |
+
[tool.setuptools]
|
| 350 |
+
zip-safe = false
|
| 351 |
+
package-dir = {"" = "src"}
|
| 352 |
+
packages = {find = {where = ["src"]}}
|
| 353 |
+
license-files = ["LICENSE*"]
|
| 354 |
+
|
| 355 |
+
[tool.setuptools.dynamic]
|
| 356 |
+
version = {attr = "foo.__version__"}
|
| 357 |
+
readme = {file = "README.rst"}
|
| 358 |
+
|
| 359 |
+
[tool.distutils.sdist]
|
| 360 |
+
formats = "gztar"
|
| 361 |
+
"""
|
| 362 |
+
),
|
| 363 |
+
"MANIFEST.in": DALS(
|
| 364 |
+
"""
|
| 365 |
+
global-include *.py *.txt
|
| 366 |
+
global-exclude *.py[cod]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
"README.rst": "This is a ``README``",
|
| 370 |
+
"LICENSE.txt": "---- placeholder MIT license ----",
|
| 371 |
+
"src": {
|
| 372 |
+
"foo": {
|
| 373 |
+
"__init__.py": "__version__ = '0.1'",
|
| 374 |
+
"__init__.pyi": "__version__: str",
|
| 375 |
+
"cli.py": "def main(): print('hello world')",
|
| 376 |
+
"data.txt": "def main(): print('hello world')",
|
| 377 |
+
"py.typed": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
}
|
| 381 |
+
if setup_script:
|
| 382 |
+
files["setup.py"] = setup_script
|
| 383 |
+
|
| 384 |
+
build_backend = self.get_build_backend()
|
| 385 |
+
with tmpdir.as_cwd():
|
| 386 |
+
path.build(files)
|
| 387 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 388 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 389 |
+
|
| 390 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 391 |
+
sdist_contents = set(tar.getnames())
|
| 392 |
+
|
| 393 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 394 |
+
wheel_contents = set(zipfile.namelist())
|
| 395 |
+
metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
|
| 396 |
+
license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
|
| 397 |
+
epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
|
| 398 |
+
|
| 399 |
+
assert sdist_contents - {"foo-0.1/setup.py"} == {
|
| 400 |
+
'foo-0.1',
|
| 401 |
+
'foo-0.1/LICENSE.txt',
|
| 402 |
+
'foo-0.1/MANIFEST.in',
|
| 403 |
+
'foo-0.1/PKG-INFO',
|
| 404 |
+
'foo-0.1/README.rst',
|
| 405 |
+
'foo-0.1/pyproject.toml',
|
| 406 |
+
'foo-0.1/setup.cfg',
|
| 407 |
+
'foo-0.1/src',
|
| 408 |
+
'foo-0.1/src/foo',
|
| 409 |
+
'foo-0.1/src/foo/__init__.py',
|
| 410 |
+
'foo-0.1/src/foo/__init__.pyi',
|
| 411 |
+
'foo-0.1/src/foo/cli.py',
|
| 412 |
+
'foo-0.1/src/foo/data.txt',
|
| 413 |
+
'foo-0.1/src/foo/py.typed',
|
| 414 |
+
'foo-0.1/src/foo.egg-info',
|
| 415 |
+
'foo-0.1/src/foo.egg-info/PKG-INFO',
|
| 416 |
+
'foo-0.1/src/foo.egg-info/SOURCES.txt',
|
| 417 |
+
'foo-0.1/src/foo.egg-info/dependency_links.txt',
|
| 418 |
+
'foo-0.1/src/foo.egg-info/entry_points.txt',
|
| 419 |
+
'foo-0.1/src/foo.egg-info/requires.txt',
|
| 420 |
+
'foo-0.1/src/foo.egg-info/top_level.txt',
|
| 421 |
+
'foo-0.1/src/foo.egg-info/not-zip-safe',
|
| 422 |
+
}
|
| 423 |
+
assert wheel_contents == {
|
| 424 |
+
"foo/__init__.py",
|
| 425 |
+
"foo/__init__.pyi", # include type information by default
|
| 426 |
+
"foo/cli.py",
|
| 427 |
+
"foo/data.txt", # include_package_data defaults to True
|
| 428 |
+
"foo/py.typed", # include type information by default
|
| 429 |
+
"foo-0.1.dist-info/LICENSE.txt",
|
| 430 |
+
"foo-0.1.dist-info/METADATA",
|
| 431 |
+
"foo-0.1.dist-info/WHEEL",
|
| 432 |
+
"foo-0.1.dist-info/entry_points.txt",
|
| 433 |
+
"foo-0.1.dist-info/top_level.txt",
|
| 434 |
+
"foo-0.1.dist-info/RECORD",
|
| 435 |
+
}
|
| 436 |
+
assert license == "---- placeholder MIT license ----"
|
| 437 |
+
|
| 438 |
+
for line in (
|
| 439 |
+
"Summary: This is a Python package",
|
| 440 |
+
"License: MIT",
|
| 441 |
+
"Classifier: Intended Audience :: Developers",
|
| 442 |
+
"Requires-Dist: appdirs",
|
| 443 |
+
"Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
|
| 444 |
+
"Requires-Dist: "
|
| 445 |
+
+ str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
|
| 446 |
+
):
|
| 447 |
+
assert line in metadata, (line, metadata)
|
| 448 |
+
|
| 449 |
+
assert metadata.strip().endswith("This is a ``README``")
|
| 450 |
+
assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
|
| 451 |
+
|
| 452 |
+
def test_static_metadata_in_pyproject_config(self, tmpdir):
|
| 453 |
+
# Make sure static metadata in pyproject.toml is not overwritten by setup.py
|
| 454 |
+
# as required by PEP 621
|
| 455 |
+
files = {
|
| 456 |
+
'pyproject.toml': DALS(
|
| 457 |
+
"""
|
| 458 |
+
[build-system]
|
| 459 |
+
requires = ["setuptools", "wheel"]
|
| 460 |
+
build-backend = "setuptools.build_meta"
|
| 461 |
+
|
| 462 |
+
[project]
|
| 463 |
+
name = "foo"
|
| 464 |
+
description = "This is a Python package"
|
| 465 |
+
version = "42"
|
| 466 |
+
dependencies = ["six"]
|
| 467 |
+
"""
|
| 468 |
+
),
|
| 469 |
+
'hello.py': DALS(
|
| 470 |
+
"""
|
| 471 |
+
def run():
|
| 472 |
+
print('hello')
|
| 473 |
+
"""
|
| 474 |
+
),
|
| 475 |
+
'setup.py': DALS(
|
| 476 |
+
"""
|
| 477 |
+
__import__('setuptools').setup(
|
| 478 |
+
name='bar',
|
| 479 |
+
version='13',
|
| 480 |
+
)
|
| 481 |
+
"""
|
| 482 |
+
),
|
| 483 |
+
}
|
| 484 |
+
build_backend = self.get_build_backend()
|
| 485 |
+
with tmpdir.as_cwd():
|
| 486 |
+
path.build(files)
|
| 487 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 488 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 489 |
+
|
| 490 |
+
assert (tmpdir / "temp/foo-42.tar.gz").exists()
|
| 491 |
+
assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
|
| 492 |
+
assert not (tmpdir / "temp/bar-13.tar.gz").exists()
|
| 493 |
+
assert not (tmpdir / "temp/bar-42.tar.gz").exists()
|
| 494 |
+
assert not (tmpdir / "temp/foo-13.tar.gz").exists()
|
| 495 |
+
assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
|
| 496 |
+
assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
|
| 497 |
+
assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
|
| 498 |
+
|
| 499 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 500 |
+
pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
|
| 501 |
+
members = tar.getnames()
|
| 502 |
+
assert "bar-13/PKG-INFO" not in members
|
| 503 |
+
|
| 504 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 505 |
+
metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
|
| 506 |
+
members = zipfile.namelist()
|
| 507 |
+
assert "bar-13.dist-info/METADATA" not in members
|
| 508 |
+
|
| 509 |
+
for file in pkg_info, metadata:
|
| 510 |
+
for line in ("Name: foo", "Version: 42"):
|
| 511 |
+
assert line in file
|
| 512 |
+
for line in ("Name: bar", "Version: 13"):
|
| 513 |
+
assert line not in file
|
| 514 |
+
|
| 515 |
+
def test_build_sdist(self, build_backend):
|
| 516 |
+
dist_dir = os.path.abspath('pip-sdist')
|
| 517 |
+
os.makedirs(dist_dir)
|
| 518 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 519 |
+
|
| 520 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 521 |
+
|
| 522 |
+
def test_prepare_metadata_for_build_wheel(self, build_backend):
|
| 523 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 524 |
+
os.makedirs(dist_dir)
|
| 525 |
+
|
| 526 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 527 |
+
|
| 528 |
+
assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
|
| 529 |
+
|
| 530 |
+
def test_prepare_metadata_inplace(self, build_backend):
|
| 531 |
+
"""
|
| 532 |
+
Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
|
| 533 |
+
See issue #3523.
|
| 534 |
+
"""
|
| 535 |
+
for pre_existing in [
|
| 536 |
+
".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
|
| 537 |
+
".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
|
| 538 |
+
".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
|
| 539 |
+
".venv/python3.10/site-packages/click-8.1.3.dist-info",
|
| 540 |
+
"venv/python3.10/site-packages/distlib-0.3.5.dist-info",
|
| 541 |
+
"env/python3.10/site-packages/docutils-0.19.dist-info",
|
| 542 |
+
]:
|
| 543 |
+
os.makedirs(pre_existing, exist_ok=True)
|
| 544 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(".")
|
| 545 |
+
assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
|
| 546 |
+
|
| 547 |
+
def test_build_sdist_explicit_dist(self, build_backend):
|
| 548 |
+
# explicitly specifying the dist folder should work
|
| 549 |
+
# the folder sdist_directory and the ``--dist-dir`` can be the same
|
| 550 |
+
dist_dir = os.path.abspath('dist')
|
| 551 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 552 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 553 |
+
|
| 554 |
+
def test_build_sdist_version_change(self, build_backend):
|
| 555 |
+
sdist_into_directory = os.path.abspath("out_sdist")
|
| 556 |
+
os.makedirs(sdist_into_directory)
|
| 557 |
+
|
| 558 |
+
sdist_name = build_backend.build_sdist(sdist_into_directory)
|
| 559 |
+
assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
|
| 560 |
+
|
| 561 |
+
# if the setup.py changes subsequent call of the build meta
|
| 562 |
+
# should still succeed, given the
|
| 563 |
+
# sdist_directory the frontend specifies is empty
|
| 564 |
+
setup_loc = os.path.abspath("setup.py")
|
| 565 |
+
if not os.path.exists(setup_loc):
|
| 566 |
+
setup_loc = os.path.abspath("setup.cfg")
|
| 567 |
+
|
| 568 |
+
with open(setup_loc, 'rt', encoding="utf-8") as file_handler:
|
| 569 |
+
content = file_handler.read()
|
| 570 |
+
with open(setup_loc, 'wt', encoding="utf-8") as file_handler:
|
| 571 |
+
file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
|
| 572 |
+
|
| 573 |
+
shutil.rmtree(sdist_into_directory)
|
| 574 |
+
os.makedirs(sdist_into_directory)
|
| 575 |
+
|
| 576 |
+
sdist_name = build_backend.build_sdist("out_sdist")
|
| 577 |
+
assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
|
| 578 |
+
|
| 579 |
+
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
|
| 580 |
+
files = {
|
| 581 |
+
'setup.py': DALS(
|
| 582 |
+
"""
|
| 583 |
+
__import__('setuptools').setup(
|
| 584 |
+
name='foo',
|
| 585 |
+
version='0.0.0',
|
| 586 |
+
py_modules=['hello']
|
| 587 |
+
)"""
|
| 588 |
+
),
|
| 589 |
+
'hello.py': '',
|
| 590 |
+
'pyproject.toml': DALS(
|
| 591 |
+
"""
|
| 592 |
+
[build-system]
|
| 593 |
+
requires = ["setuptools", "wheel"]
|
| 594 |
+
build-backend = "setuptools.build_meta"
|
| 595 |
+
"""
|
| 596 |
+
),
|
| 597 |
+
}
|
| 598 |
+
path.build(files)
|
| 599 |
+
build_backend = self.get_build_backend()
|
| 600 |
+
targz_path = build_backend.build_sdist("temp")
|
| 601 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 602 |
+
assert any('pyproject.toml' in name for name in tar.getnames())
|
| 603 |
+
|
| 604 |
+
def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
|
| 605 |
+
# If build_sdist is called from a script other than setup.py,
|
| 606 |
+
# ensure setup.py is included
|
| 607 |
+
path.build(defns[0])
|
| 608 |
+
|
| 609 |
+
build_backend = self.get_build_backend()
|
| 610 |
+
targz_path = build_backend.build_sdist("temp")
|
| 611 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 612 |
+
assert any('setup.py' in name for name in tar.getnames())
|
| 613 |
+
|
| 614 |
+
def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
|
| 615 |
+
# Ensure that MANIFEST.in can exclude setup.py
|
| 616 |
+
files = {
|
| 617 |
+
'setup.py': DALS(
|
| 618 |
+
"""
|
| 619 |
+
__import__('setuptools').setup(
|
| 620 |
+
name='foo',
|
| 621 |
+
version='0.0.0',
|
| 622 |
+
py_modules=['hello']
|
| 623 |
+
)"""
|
| 624 |
+
),
|
| 625 |
+
'hello.py': '',
|
| 626 |
+
'MANIFEST.in': DALS(
|
| 627 |
+
"""
|
| 628 |
+
exclude setup.py
|
| 629 |
+
"""
|
| 630 |
+
),
|
| 631 |
+
}
|
| 632 |
+
|
| 633 |
+
path.build(files)
|
| 634 |
+
|
| 635 |
+
build_backend = self.get_build_backend()
|
| 636 |
+
targz_path = build_backend.build_sdist("temp")
|
| 637 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 638 |
+
assert not any('setup.py' in name for name in tar.getnames())
|
| 639 |
+
|
| 640 |
+
def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
|
| 641 |
+
files = {
|
| 642 |
+
'setup.py': DALS(
|
| 643 |
+
"""
|
| 644 |
+
__import__('setuptools').setup(
|
| 645 |
+
name='foo',
|
| 646 |
+
version='0.0.0',
|
| 647 |
+
py_modules=['hello']
|
| 648 |
+
)"""
|
| 649 |
+
),
|
| 650 |
+
'hello.py': '',
|
| 651 |
+
'setup.cfg': DALS(
|
| 652 |
+
"""
|
| 653 |
+
[sdist]
|
| 654 |
+
formats=zip
|
| 655 |
+
"""
|
| 656 |
+
),
|
| 657 |
+
}
|
| 658 |
+
|
| 659 |
+
path.build(files)
|
| 660 |
+
|
| 661 |
+
build_backend = self.get_build_backend()
|
| 662 |
+
build_backend.build_sdist("temp")
|
| 663 |
+
|
| 664 |
+
_relative_path_import_files = {
|
| 665 |
+
'setup.py': DALS(
|
| 666 |
+
"""
|
| 667 |
+
__import__('setuptools').setup(
|
| 668 |
+
name='foo',
|
| 669 |
+
version=__import__('hello').__version__,
|
| 670 |
+
py_modules=['hello']
|
| 671 |
+
)"""
|
| 672 |
+
),
|
| 673 |
+
'hello.py': '__version__ = "0.0.0"',
|
| 674 |
+
'setup.cfg': DALS(
|
| 675 |
+
"""
|
| 676 |
+
[sdist]
|
| 677 |
+
formats=zip
|
| 678 |
+
"""
|
| 679 |
+
),
|
| 680 |
+
}
|
| 681 |
+
|
| 682 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 683 |
+
path.build(self._relative_path_import_files)
|
| 684 |
+
build_backend = self.get_build_backend()
|
| 685 |
+
with pytest.raises(ImportError, match="^No module named 'hello'$"):
|
| 686 |
+
build_backend.build_sdist("temp")
|
| 687 |
+
|
| 688 |
+
_simple_pyproject_example = {
|
| 689 |
+
"pyproject.toml": DALS(
|
| 690 |
+
"""
|
| 691 |
+
[project]
|
| 692 |
+
name = "proj"
|
| 693 |
+
version = "42"
|
| 694 |
+
"""
|
| 695 |
+
),
|
| 696 |
+
"src": {"proj": {"__init__.py": ""}},
|
| 697 |
+
}
|
| 698 |
+
|
| 699 |
+
def _assert_link_tree(self, parent_dir):
|
| 700 |
+
"""All files in the directory should be either links or hard links"""
|
| 701 |
+
files = list(Path(parent_dir).glob("**/*"))
|
| 702 |
+
assert files # Should not be empty
|
| 703 |
+
for file in files:
|
| 704 |
+
assert file.is_symlink() or os.stat(file).st_nlink > 0
|
| 705 |
+
|
| 706 |
+
def test_editable_without_config_settings(self, tmpdir_cwd):
|
| 707 |
+
"""
|
| 708 |
+
Sanity check to ensure tests with --mode=strict are different from the ones
|
| 709 |
+
without --mode.
|
| 710 |
+
|
| 711 |
+
--mode=strict should create a local directory with a package tree.
|
| 712 |
+
The directory should not get created otherwise.
|
| 713 |
+
"""
|
| 714 |
+
path.build(self._simple_pyproject_example)
|
| 715 |
+
build_backend = self.get_build_backend()
|
| 716 |
+
assert not Path("build").exists()
|
| 717 |
+
build_backend.build_editable("temp")
|
| 718 |
+
assert not Path("build").exists()
|
| 719 |
+
|
| 720 |
+
def test_build_wheel_inplace(self, tmpdir_cwd):
|
| 721 |
+
config_settings = {"--build-option": ["build_ext", "--inplace"]}
|
| 722 |
+
path.build(self._simple_pyproject_example)
|
| 723 |
+
build_backend = self.get_build_backend()
|
| 724 |
+
assert not Path("build").exists()
|
| 725 |
+
Path("build").mkdir()
|
| 726 |
+
build_backend.prepare_metadata_for_build_wheel("build", config_settings)
|
| 727 |
+
build_backend.build_wheel("build", config_settings)
|
| 728 |
+
assert Path("build/proj-42-py3-none-any.whl").exists()
|
| 729 |
+
|
| 730 |
+
@pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}])
|
| 731 |
+
def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
|
| 732 |
+
path.build({**self._simple_pyproject_example, '_meta': {}})
|
| 733 |
+
assert not Path("build").exists()
|
| 734 |
+
build_backend = self.get_build_backend()
|
| 735 |
+
build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
|
| 736 |
+
build_backend.build_editable("temp", config_settings, "_meta")
|
| 737 |
+
self._assert_link_tree(next(Path("build").glob("__editable__.*")))
|
| 738 |
+
|
| 739 |
+
@pytest.mark.parametrize(
|
| 740 |
+
("setup_literal", "requirements"),
|
| 741 |
+
[
|
| 742 |
+
("'foo'", ['foo']),
|
| 743 |
+
("['foo']", ['foo']),
|
| 744 |
+
(r"'foo\n'", ['foo']),
|
| 745 |
+
(r"'foo\n\n'", ['foo']),
|
| 746 |
+
("['foo', 'bar']", ['foo', 'bar']),
|
| 747 |
+
(r"'# Has a comment line\nfoo'", ['foo']),
|
| 748 |
+
(r"'foo # Has an inline comment'", ['foo']),
|
| 749 |
+
(r"'foo \\\n >=3.0'", ['foo>=3.0']),
|
| 750 |
+
(r"'foo\nbar'", ['foo', 'bar']),
|
| 751 |
+
(r"'foo\nbar\n'", ['foo', 'bar']),
|
| 752 |
+
(r"['foo\n', 'bar\n']", ['foo', 'bar']),
|
| 753 |
+
],
|
| 754 |
+
)
|
| 755 |
+
@pytest.mark.parametrize('use_wheel', [True, False])
|
| 756 |
+
def test_setup_requires(self, setup_literal, requirements, use_wheel, tmpdir_cwd):
|
| 757 |
+
files = {
|
| 758 |
+
'setup.py': DALS(
|
| 759 |
+
"""
|
| 760 |
+
from setuptools import setup
|
| 761 |
+
|
| 762 |
+
setup(
|
| 763 |
+
name="qux",
|
| 764 |
+
version="0.0.0",
|
| 765 |
+
py_modules=["hello"],
|
| 766 |
+
setup_requires={setup_literal},
|
| 767 |
+
)
|
| 768 |
+
"""
|
| 769 |
+
).format(setup_literal=setup_literal),
|
| 770 |
+
'hello.py': DALS(
|
| 771 |
+
"""
|
| 772 |
+
def run():
|
| 773 |
+
print('hello')
|
| 774 |
+
"""
|
| 775 |
+
),
|
| 776 |
+
}
|
| 777 |
+
|
| 778 |
+
path.build(files)
|
| 779 |
+
|
| 780 |
+
build_backend = self.get_build_backend()
|
| 781 |
+
|
| 782 |
+
if use_wheel:
|
| 783 |
+
get_requires = build_backend.get_requires_for_build_wheel
|
| 784 |
+
else:
|
| 785 |
+
get_requires = build_backend.get_requires_for_build_sdist
|
| 786 |
+
|
| 787 |
+
# Ensure that the build requirements are properly parsed
|
| 788 |
+
expected = sorted(requirements)
|
| 789 |
+
actual = get_requires()
|
| 790 |
+
|
| 791 |
+
assert expected == sorted(actual)
|
| 792 |
+
|
| 793 |
+
def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
|
| 794 |
+
# Make sure patches introduced to retrieve setup_requires don't accidentally
|
| 795 |
+
# activate auto-discovery and cause problems due to the incomplete set of
|
| 796 |
+
# attributes passed to MinimalDistribution
|
| 797 |
+
files = {
|
| 798 |
+
'pyproject.toml': DALS(
|
| 799 |
+
"""
|
| 800 |
+
[project]
|
| 801 |
+
name = "proj"
|
| 802 |
+
version = "42"
|
| 803 |
+
"""
|
| 804 |
+
),
|
| 805 |
+
"setup.py": DALS(
|
| 806 |
+
"""
|
| 807 |
+
__import__('setuptools').setup(
|
| 808 |
+
setup_requires=["foo"],
|
| 809 |
+
py_modules = ["hello", "world"]
|
| 810 |
+
)
|
| 811 |
+
"""
|
| 812 |
+
),
|
| 813 |
+
'hello.py': "'hello'",
|
| 814 |
+
'world.py': "'world'",
|
| 815 |
+
}
|
| 816 |
+
path.build(files)
|
| 817 |
+
build_backend = self.get_build_backend()
|
| 818 |
+
setup_requires = build_backend.get_requires_for_build_wheel()
|
| 819 |
+
assert setup_requires == ["foo"]
|
| 820 |
+
|
| 821 |
+
def test_dont_install_setup_requires(self, tmpdir_cwd):
|
| 822 |
+
files = {
|
| 823 |
+
'setup.py': DALS(
|
| 824 |
+
"""
|
| 825 |
+
from setuptools import setup
|
| 826 |
+
|
| 827 |
+
setup(
|
| 828 |
+
name="qux",
|
| 829 |
+
version="0.0.0",
|
| 830 |
+
py_modules=["hello"],
|
| 831 |
+
setup_requires=["does-not-exist >99"],
|
| 832 |
+
)
|
| 833 |
+
"""
|
| 834 |
+
),
|
| 835 |
+
'hello.py': DALS(
|
| 836 |
+
"""
|
| 837 |
+
def run():
|
| 838 |
+
print('hello')
|
| 839 |
+
"""
|
| 840 |
+
),
|
| 841 |
+
}
|
| 842 |
+
|
| 843 |
+
path.build(files)
|
| 844 |
+
|
| 845 |
+
build_backend = self.get_build_backend()
|
| 846 |
+
|
| 847 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 848 |
+
os.makedirs(dist_dir)
|
| 849 |
+
|
| 850 |
+
# does-not-exist can't be satisfied, so if it attempts to install
|
| 851 |
+
# setup_requires, it will fail.
|
| 852 |
+
build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 853 |
+
|
| 854 |
+
_sys_argv_0_passthrough = {
|
| 855 |
+
'setup.py': DALS(
|
| 856 |
+
"""
|
| 857 |
+
import os
|
| 858 |
+
import sys
|
| 859 |
+
|
| 860 |
+
__import__('setuptools').setup(
|
| 861 |
+
name='foo',
|
| 862 |
+
version='0.0.0',
|
| 863 |
+
)
|
| 864 |
+
|
| 865 |
+
sys_argv = os.path.abspath(sys.argv[0])
|
| 866 |
+
file_path = os.path.abspath('setup.py')
|
| 867 |
+
assert sys_argv == file_path
|
| 868 |
+
"""
|
| 869 |
+
)
|
| 870 |
+
}
|
| 871 |
+
|
| 872 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 873 |
+
path.build(self._sys_argv_0_passthrough)
|
| 874 |
+
build_backend = self.get_build_backend()
|
| 875 |
+
with pytest.raises(AssertionError):
|
| 876 |
+
build_backend.build_sdist("temp")
|
| 877 |
+
|
| 878 |
+
_setup_py_file_abspath = {
|
| 879 |
+
'setup.py': DALS(
|
| 880 |
+
"""
|
| 881 |
+
import os
|
| 882 |
+
assert os.path.isabs(__file__)
|
| 883 |
+
__import__('setuptools').setup(
|
| 884 |
+
name='foo',
|
| 885 |
+
version='0.0.0',
|
| 886 |
+
py_modules=['hello'],
|
| 887 |
+
setup_requires=['six'],
|
| 888 |
+
)
|
| 889 |
+
"""
|
| 890 |
+
)
|
| 891 |
+
}
|
| 892 |
+
|
| 893 |
+
def test_setup_py_file_abspath(self, tmpdir_cwd):
|
| 894 |
+
path.build(self._setup_py_file_abspath)
|
| 895 |
+
build_backend = self.get_build_backend()
|
| 896 |
+
build_backend.build_sdist("temp")
|
| 897 |
+
|
| 898 |
+
@pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
|
| 899 |
+
def test_build_with_empty_setuppy(self, build_backend, build_hook):
|
| 900 |
+
files = {'setup.py': ''}
|
| 901 |
+
path.build(files)
|
| 902 |
+
|
| 903 |
+
msg = re.escape('No distribution was found.')
|
| 904 |
+
with pytest.raises(ValueError, match=msg):
|
| 905 |
+
getattr(build_backend, build_hook)("temp")
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
|
| 909 |
+
backend_name = 'setuptools.build_meta:__legacy__'
|
| 910 |
+
|
| 911 |
+
# build_meta_legacy-specific tests
|
| 912 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 913 |
+
# This must fail in build_meta, but must pass in build_meta_legacy
|
| 914 |
+
path.build(self._relative_path_import_files)
|
| 915 |
+
|
| 916 |
+
build_backend = self.get_build_backend()
|
| 917 |
+
build_backend.build_sdist("temp")
|
| 918 |
+
|
| 919 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 920 |
+
path.build(self._sys_argv_0_passthrough)
|
| 921 |
+
|
| 922 |
+
build_backend = self.get_build_backend()
|
| 923 |
+
build_backend.build_sdist("temp")
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
|
| 927 |
+
pyproject = """
|
| 928 |
+
[build-system]
|
| 929 |
+
requires = ["setuptools"]
|
| 930 |
+
build-backend = "setuptools.build_meta"
|
| 931 |
+
[project]
|
| 932 |
+
name = "myproj"
|
| 933 |
+
version = "42"
|
| 934 |
+
"""
|
| 935 |
+
path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
|
| 936 |
+
|
| 937 |
+
# First: sanity check
|
| 938 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 939 |
+
output = venv.run(cmd, cwd=tmpdir).lower()
|
| 940 |
+
assert "running setup.py develop for myproj" not in output
|
| 941 |
+
assert "created wheel for myproj" in output
|
| 942 |
+
|
| 943 |
+
# Then: real test
|
| 944 |
+
env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
|
| 945 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 946 |
+
output = venv.run(cmd, cwd=tmpdir, env=env).lower()
|
| 947 |
+
assert "running setup.py develop for myproj" in output
|
| 948 |
+
|
| 949 |
+
|
| 950 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 951 |
+
def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
|
| 952 |
+
"""Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""
|
| 953 |
+
monkeypatch.chdir(tmp_path)
|
| 954 |
+
setuppy = """
|
| 955 |
+
import sys, setuptools
|
| 956 |
+
setuptools.setup(name='foo', version='0.0.0')
|
| 957 |
+
sys.exit(0)
|
| 958 |
+
"""
|
| 959 |
+
(tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
|
| 960 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 961 |
+
assert backend.get_requires_for_build_wheel() == []
|
| 962 |
+
|
| 963 |
+
|
| 964 |
+
def test_system_exit_in_setuppy(monkeypatch, tmp_path):
|
| 965 |
+
monkeypatch.chdir(tmp_path)
|
| 966 |
+
setuppy = "import sys; sys.exit('some error')"
|
| 967 |
+
(tmp_path / "setup.py").write_text(setuppy, encoding="utf-8")
|
| 968 |
+
with pytest.raises(SystemExit, match="some error"):
|
| 969 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 970 |
+
backend.get_requires_for_build_wheel()
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_build_py.py
ADDED
|
@@ -0,0 +1,480 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import stat
|
| 4 |
+
import warnings
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from unittest.mock import Mock
|
| 7 |
+
|
| 8 |
+
import jaraco.path
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from setuptools import SetuptoolsDeprecationWarning
|
| 12 |
+
from setuptools.dist import Distribution
|
| 13 |
+
|
| 14 |
+
from .textwrap import DALS
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def test_directories_in_package_data_glob(tmpdir_cwd):
|
| 18 |
+
"""
|
| 19 |
+
Directories matching the glob in package_data should
|
| 20 |
+
not be included in the package data.
|
| 21 |
+
|
| 22 |
+
Regression test for #261.
|
| 23 |
+
"""
|
| 24 |
+
dist = Distribution(
|
| 25 |
+
dict(
|
| 26 |
+
script_name='setup.py',
|
| 27 |
+
script_args=['build_py'],
|
| 28 |
+
packages=[''],
|
| 29 |
+
package_data={'': ['path/*']},
|
| 30 |
+
)
|
| 31 |
+
)
|
| 32 |
+
os.makedirs('path/subpath')
|
| 33 |
+
dist.parse_command_line()
|
| 34 |
+
dist.run_commands()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def test_recursive_in_package_data_glob(tmpdir_cwd):
|
| 38 |
+
"""
|
| 39 |
+
Files matching recursive globs (**) in package_data should
|
| 40 |
+
be included in the package data.
|
| 41 |
+
|
| 42 |
+
#1806
|
| 43 |
+
"""
|
| 44 |
+
dist = Distribution(
|
| 45 |
+
dict(
|
| 46 |
+
script_name='setup.py',
|
| 47 |
+
script_args=['build_py'],
|
| 48 |
+
packages=[''],
|
| 49 |
+
package_data={'': ['path/**/data']},
|
| 50 |
+
)
|
| 51 |
+
)
|
| 52 |
+
os.makedirs('path/subpath/subsubpath')
|
| 53 |
+
open('path/subpath/subsubpath/data', 'wb').close()
|
| 54 |
+
|
| 55 |
+
dist.parse_command_line()
|
| 56 |
+
dist.run_commands()
|
| 57 |
+
|
| 58 |
+
assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
|
| 59 |
+
"File is not included"
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def test_read_only(tmpdir_cwd):
|
| 64 |
+
"""
|
| 65 |
+
Ensure read-only flag is not preserved in copy
|
| 66 |
+
for package modules and package data, as that
|
| 67 |
+
causes problems with deleting read-only files on
|
| 68 |
+
Windows.
|
| 69 |
+
|
| 70 |
+
#1451
|
| 71 |
+
"""
|
| 72 |
+
dist = Distribution(
|
| 73 |
+
dict(
|
| 74 |
+
script_name='setup.py',
|
| 75 |
+
script_args=['build_py'],
|
| 76 |
+
packages=['pkg'],
|
| 77 |
+
package_data={'pkg': ['data.dat']},
|
| 78 |
+
)
|
| 79 |
+
)
|
| 80 |
+
os.makedirs('pkg')
|
| 81 |
+
open('pkg/__init__.py', 'wb').close()
|
| 82 |
+
open('pkg/data.dat', 'wb').close()
|
| 83 |
+
os.chmod('pkg/__init__.py', stat.S_IREAD)
|
| 84 |
+
os.chmod('pkg/data.dat', stat.S_IREAD)
|
| 85 |
+
dist.parse_command_line()
|
| 86 |
+
dist.run_commands()
|
| 87 |
+
shutil.rmtree('build')
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
@pytest.mark.xfail(
|
| 91 |
+
'platform.system() == "Windows"',
|
| 92 |
+
reason="On Windows, files do not have executable bits",
|
| 93 |
+
raises=AssertionError,
|
| 94 |
+
strict=True,
|
| 95 |
+
)
|
| 96 |
+
def test_executable_data(tmpdir_cwd):
|
| 97 |
+
"""
|
| 98 |
+
Ensure executable bit is preserved in copy for
|
| 99 |
+
package data, as users rely on it for scripts.
|
| 100 |
+
|
| 101 |
+
#2041
|
| 102 |
+
"""
|
| 103 |
+
dist = Distribution(
|
| 104 |
+
dict(
|
| 105 |
+
script_name='setup.py',
|
| 106 |
+
script_args=['build_py'],
|
| 107 |
+
packages=['pkg'],
|
| 108 |
+
package_data={'pkg': ['run-me']},
|
| 109 |
+
)
|
| 110 |
+
)
|
| 111 |
+
os.makedirs('pkg')
|
| 112 |
+
open('pkg/__init__.py', 'wb').close()
|
| 113 |
+
open('pkg/run-me', 'wb').close()
|
| 114 |
+
os.chmod('pkg/run-me', 0o700)
|
| 115 |
+
|
| 116 |
+
dist.parse_command_line()
|
| 117 |
+
dist.run_commands()
|
| 118 |
+
|
| 119 |
+
assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
|
| 120 |
+
"Script is not executable"
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
EXAMPLE_WITH_MANIFEST = {
|
| 125 |
+
"setup.cfg": DALS(
|
| 126 |
+
"""
|
| 127 |
+
[metadata]
|
| 128 |
+
name = mypkg
|
| 129 |
+
version = 42
|
| 130 |
+
|
| 131 |
+
[options]
|
| 132 |
+
include_package_data = True
|
| 133 |
+
packages = find:
|
| 134 |
+
|
| 135 |
+
[options.packages.find]
|
| 136 |
+
exclude = *.tests*
|
| 137 |
+
"""
|
| 138 |
+
),
|
| 139 |
+
"mypkg": {
|
| 140 |
+
"__init__.py": "",
|
| 141 |
+
"resource_file.txt": "",
|
| 142 |
+
"tests": {
|
| 143 |
+
"__init__.py": "",
|
| 144 |
+
"test_mypkg.py": "",
|
| 145 |
+
"test_file.txt": "",
|
| 146 |
+
},
|
| 147 |
+
},
|
| 148 |
+
"MANIFEST.in": DALS(
|
| 149 |
+
"""
|
| 150 |
+
global-include *.py *.txt
|
| 151 |
+
global-exclude *.py[cod]
|
| 152 |
+
prune dist
|
| 153 |
+
prune build
|
| 154 |
+
prune *.egg-info
|
| 155 |
+
"""
|
| 156 |
+
),
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def test_excluded_subpackages(tmpdir_cwd):
|
| 161 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 162 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 163 |
+
dist.parse_config_files()
|
| 164 |
+
|
| 165 |
+
build_py = dist.get_command_obj("build_py")
|
| 166 |
+
|
| 167 |
+
msg = r"Python recognizes 'mypkg\.tests' as an importable package"
|
| 168 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 169 |
+
# TODO: To fix #3260 we need some transition period to deprecate the
|
| 170 |
+
# existing behavior of `include_package_data`. After the transition, we
|
| 171 |
+
# should remove the warning and fix the behaviour.
|
| 172 |
+
|
| 173 |
+
if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
|
| 174 |
+
# pytest.warns reset the warning filter temporarily
|
| 175 |
+
# https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
|
| 176 |
+
warnings.filterwarnings(
|
| 177 |
+
"ignore",
|
| 178 |
+
"'encoding' argument not specified",
|
| 179 |
+
module="distutils.text_file",
|
| 180 |
+
# This warning is already fixed in pypa/distutils but not in stdlib
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
build_py.finalize_options()
|
| 184 |
+
build_py.run()
|
| 185 |
+
|
| 186 |
+
build_dir = Path(dist.get_command_obj("build_py").build_lib)
|
| 187 |
+
assert (build_dir / "mypkg/__init__.py").exists()
|
| 188 |
+
assert (build_dir / "mypkg/resource_file.txt").exists()
|
| 189 |
+
|
| 190 |
+
# Setuptools is configured to ignore `mypkg.tests`, therefore the following
|
| 191 |
+
# files/dirs should not be included in the distribution.
|
| 192 |
+
for f in [
|
| 193 |
+
"mypkg/tests/__init__.py",
|
| 194 |
+
"mypkg/tests/test_mypkg.py",
|
| 195 |
+
"mypkg/tests/test_file.txt",
|
| 196 |
+
"mypkg/tests",
|
| 197 |
+
]:
|
| 198 |
+
with pytest.raises(AssertionError):
|
| 199 |
+
# TODO: Enforce the following assertion once #3260 is fixed
|
| 200 |
+
# (remove context manager and the following xfail).
|
| 201 |
+
assert not (build_dir / f).exists()
|
| 202 |
+
|
| 203 |
+
pytest.xfail("#3260")
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 207 |
+
def test_existing_egg_info(tmpdir_cwd, monkeypatch):
|
| 208 |
+
"""When provided with the ``existing_egg_info_dir`` attribute, build_py should not
|
| 209 |
+
attempt to run egg_info again.
|
| 210 |
+
"""
|
| 211 |
+
# == Pre-condition ==
|
| 212 |
+
# Generate an egg-info dir
|
| 213 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 214 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 215 |
+
dist.parse_config_files()
|
| 216 |
+
assert dist.include_package_data
|
| 217 |
+
|
| 218 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 219 |
+
dist.run_command("egg_info")
|
| 220 |
+
egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
|
| 221 |
+
assert egg_info_dir.is_dir()
|
| 222 |
+
|
| 223 |
+
# == Setup ==
|
| 224 |
+
build_py = dist.get_command_obj("build_py")
|
| 225 |
+
build_py.finalize_options()
|
| 226 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 227 |
+
egg_info_run = Mock(side_effect=egg_info.run)
|
| 228 |
+
monkeypatch.setattr(egg_info, "run", egg_info_run)
|
| 229 |
+
|
| 230 |
+
# == Remove caches ==
|
| 231 |
+
# egg_info is called when build_py looks for data_files, which gets cached.
|
| 232 |
+
# We need to ensure it is not cached yet, otherwise it may impact on the tests
|
| 233 |
+
build_py.__dict__.pop('data_files', None)
|
| 234 |
+
dist.reinitialize_command(egg_info)
|
| 235 |
+
|
| 236 |
+
# == Sanity check ==
|
| 237 |
+
# Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
|
| 238 |
+
build_py.existing_egg_info_dir = None
|
| 239 |
+
build_py.run()
|
| 240 |
+
egg_info_run.assert_called()
|
| 241 |
+
|
| 242 |
+
# == Remove caches ==
|
| 243 |
+
egg_info_run.reset_mock()
|
| 244 |
+
build_py.__dict__.pop('data_files', None)
|
| 245 |
+
dist.reinitialize_command(egg_info)
|
| 246 |
+
|
| 247 |
+
# == Actual test ==
|
| 248 |
+
# Ensure that if existing_egg_info_dir is given, egg_info doesn't run
|
| 249 |
+
build_py.existing_egg_info_dir = egg_info_dir
|
| 250 |
+
build_py.run()
|
| 251 |
+
egg_info_run.assert_not_called()
|
| 252 |
+
assert build_py.data_files
|
| 253 |
+
|
| 254 |
+
# Make sure the list of outputs is actually OK
|
| 255 |
+
outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
|
| 256 |
+
assert outputs
|
| 257 |
+
example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
|
| 258 |
+
assert example in outputs
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
EXAMPLE_ARBITRARY_MAPPING = {
|
| 262 |
+
"pyproject.toml": DALS(
|
| 263 |
+
"""
|
| 264 |
+
[project]
|
| 265 |
+
name = "mypkg"
|
| 266 |
+
version = "42"
|
| 267 |
+
|
| 268 |
+
[tool.setuptools]
|
| 269 |
+
packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
|
| 270 |
+
|
| 271 |
+
[tool.setuptools.package-dir]
|
| 272 |
+
"" = "src"
|
| 273 |
+
"mypkg.sub2" = "src/mypkg/_sub2"
|
| 274 |
+
"mypkg.sub2.nested" = "other"
|
| 275 |
+
"""
|
| 276 |
+
),
|
| 277 |
+
"src": {
|
| 278 |
+
"mypkg": {
|
| 279 |
+
"__init__.py": "",
|
| 280 |
+
"resource_file.txt": "",
|
| 281 |
+
"sub1": {
|
| 282 |
+
"__init__.py": "",
|
| 283 |
+
"mod1.py": "",
|
| 284 |
+
},
|
| 285 |
+
"_sub2": {
|
| 286 |
+
"mod2.py": "",
|
| 287 |
+
},
|
| 288 |
+
},
|
| 289 |
+
},
|
| 290 |
+
"other": {
|
| 291 |
+
"__init__.py": "",
|
| 292 |
+
"mod3.py": "",
|
| 293 |
+
},
|
| 294 |
+
"MANIFEST.in": DALS(
|
| 295 |
+
"""
|
| 296 |
+
global-include *.py *.txt
|
| 297 |
+
global-exclude *.py[cod]
|
| 298 |
+
"""
|
| 299 |
+
),
|
| 300 |
+
}
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
def test_get_outputs(tmpdir_cwd):
|
| 304 |
+
jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
|
| 305 |
+
dist = Distribution({"script_name": "%test%"})
|
| 306 |
+
dist.parse_config_files()
|
| 307 |
+
|
| 308 |
+
build_py = dist.get_command_obj("build_py")
|
| 309 |
+
build_py.editable_mode = True
|
| 310 |
+
build_py.ensure_finalized()
|
| 311 |
+
build_lib = build_py.build_lib.replace(os.sep, "/")
|
| 312 |
+
outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
|
| 313 |
+
assert outputs == {
|
| 314 |
+
f"{build_lib}/mypkg/__init__.py",
|
| 315 |
+
f"{build_lib}/mypkg/resource_file.txt",
|
| 316 |
+
f"{build_lib}/mypkg/sub1/__init__.py",
|
| 317 |
+
f"{build_lib}/mypkg/sub1/mod1.py",
|
| 318 |
+
f"{build_lib}/mypkg/sub2/mod2.py",
|
| 319 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py",
|
| 320 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py",
|
| 321 |
+
}
|
| 322 |
+
mapping = {
|
| 323 |
+
k.replace(os.sep, "/"): v.replace(os.sep, "/")
|
| 324 |
+
for k, v in build_py.get_output_mapping().items()
|
| 325 |
+
}
|
| 326 |
+
assert mapping == {
|
| 327 |
+
f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
|
| 328 |
+
f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
|
| 329 |
+
f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
|
| 330 |
+
f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
|
| 331 |
+
f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
|
| 332 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
|
| 333 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
class TestTypeInfoFiles:
|
| 338 |
+
PYPROJECTS = {
|
| 339 |
+
"default_pyproject": DALS(
|
| 340 |
+
"""
|
| 341 |
+
[project]
|
| 342 |
+
name = "foo"
|
| 343 |
+
version = "1"
|
| 344 |
+
"""
|
| 345 |
+
),
|
| 346 |
+
"dont_include_package_data": DALS(
|
| 347 |
+
"""
|
| 348 |
+
[project]
|
| 349 |
+
name = "foo"
|
| 350 |
+
version = "1"
|
| 351 |
+
|
| 352 |
+
[tool.setuptools]
|
| 353 |
+
include-package-data = false
|
| 354 |
+
"""
|
| 355 |
+
),
|
| 356 |
+
"exclude_type_info": DALS(
|
| 357 |
+
"""
|
| 358 |
+
[project]
|
| 359 |
+
name = "foo"
|
| 360 |
+
version = "1"
|
| 361 |
+
|
| 362 |
+
[tool.setuptools]
|
| 363 |
+
include-package-data = false
|
| 364 |
+
|
| 365 |
+
[tool.setuptools.exclude-package-data]
|
| 366 |
+
"*" = ["py.typed", "*.pyi"]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
}
|
| 370 |
+
|
| 371 |
+
EXAMPLES = {
|
| 372 |
+
"simple_namespace": {
|
| 373 |
+
"directory_structure": {
|
| 374 |
+
"foo": {
|
| 375 |
+
"bar.pyi": "",
|
| 376 |
+
"py.typed": "",
|
| 377 |
+
"__init__.py": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
"expected_type_files": {"foo/bar.pyi", "foo/py.typed"},
|
| 381 |
+
},
|
| 382 |
+
"nested_inside_namespace": {
|
| 383 |
+
"directory_structure": {
|
| 384 |
+
"foo": {
|
| 385 |
+
"bar": {
|
| 386 |
+
"py.typed": "",
|
| 387 |
+
"mod.pyi": "",
|
| 388 |
+
}
|
| 389 |
+
}
|
| 390 |
+
},
|
| 391 |
+
"expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"},
|
| 392 |
+
},
|
| 393 |
+
"namespace_nested_inside_regular": {
|
| 394 |
+
"directory_structure": {
|
| 395 |
+
"foo": {
|
| 396 |
+
"namespace": {
|
| 397 |
+
"foo.pyi": "",
|
| 398 |
+
},
|
| 399 |
+
"__init__.pyi": "",
|
| 400 |
+
"py.typed": "",
|
| 401 |
+
}
|
| 402 |
+
},
|
| 403 |
+
"expected_type_files": {
|
| 404 |
+
"foo/namespace/foo.pyi",
|
| 405 |
+
"foo/__init__.pyi",
|
| 406 |
+
"foo/py.typed",
|
| 407 |
+
},
|
| 408 |
+
},
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
@pytest.mark.parametrize(
|
| 412 |
+
"pyproject",
|
| 413 |
+
[
|
| 414 |
+
"default_pyproject",
|
| 415 |
+
pytest.param(
|
| 416 |
+
"dont_include_package_data",
|
| 417 |
+
marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
|
| 418 |
+
),
|
| 419 |
+
],
|
| 420 |
+
)
|
| 421 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 422 |
+
def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
|
| 423 |
+
structure = {
|
| 424 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 425 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 426 |
+
}
|
| 427 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 428 |
+
jaraco.path.build(structure)
|
| 429 |
+
|
| 430 |
+
build_py = get_finalized_build_py()
|
| 431 |
+
outputs = get_outputs(build_py)
|
| 432 |
+
assert expected_type_files <= outputs
|
| 433 |
+
|
| 434 |
+
@pytest.mark.parametrize("pyproject", ["exclude_type_info"])
|
| 435 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 436 |
+
def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example):
|
| 437 |
+
structure = {
|
| 438 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 439 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 440 |
+
}
|
| 441 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 442 |
+
jaraco.path.build(structure)
|
| 443 |
+
|
| 444 |
+
build_py = get_finalized_build_py()
|
| 445 |
+
outputs = get_outputs(build_py)
|
| 446 |
+
assert expected_type_files.isdisjoint(outputs)
|
| 447 |
+
|
| 448 |
+
def test_stub_only_package(self, tmpdir_cwd):
|
| 449 |
+
structure = {
|
| 450 |
+
"pyproject.toml": DALS(
|
| 451 |
+
"""
|
| 452 |
+
[project]
|
| 453 |
+
name = "foo-stubs"
|
| 454 |
+
version = "1"
|
| 455 |
+
"""
|
| 456 |
+
),
|
| 457 |
+
"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""},
|
| 458 |
+
}
|
| 459 |
+
expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"}
|
| 460 |
+
jaraco.path.build(structure)
|
| 461 |
+
|
| 462 |
+
build_py = get_finalized_build_py()
|
| 463 |
+
outputs = get_outputs(build_py)
|
| 464 |
+
assert expected_type_files <= outputs
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def get_finalized_build_py(script_name="%build_py-test%"):
|
| 468 |
+
dist = Distribution({"script_name": script_name})
|
| 469 |
+
dist.parse_config_files()
|
| 470 |
+
build_py = dist.get_command_obj("build_py")
|
| 471 |
+
build_py.finalize_options()
|
| 472 |
+
return build_py
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def get_outputs(build_py):
|
| 476 |
+
build_dir = Path(build_py.build_lib)
|
| 477 |
+
return {
|
| 478 |
+
os.path.relpath(x, build_dir).replace(os.sep, "/")
|
| 479 |
+
for x in build_py.get_outputs()
|
| 480 |
+
}
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_config_discovery.py
ADDED
|
@@ -0,0 +1,647 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from configparser import ConfigParser
|
| 4 |
+
from itertools import product
|
| 5 |
+
from typing import cast
|
| 6 |
+
|
| 7 |
+
import jaraco.path
|
| 8 |
+
import pytest
|
| 9 |
+
from path import Path
|
| 10 |
+
|
| 11 |
+
import setuptools # noqa: F401 # force distutils.core to be patched
|
| 12 |
+
from setuptools.command.sdist import sdist
|
| 13 |
+
from setuptools.discovery import find_package_path, find_parent_package
|
| 14 |
+
from setuptools.dist import Distribution
|
| 15 |
+
from setuptools.errors import PackageDiscoveryError
|
| 16 |
+
|
| 17 |
+
from .contexts import quiet
|
| 18 |
+
from .integration.helpers import get_sdist_members, get_wheel_members, run
|
| 19 |
+
from .textwrap import DALS
|
| 20 |
+
|
| 21 |
+
import distutils.core
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class TestFindParentPackage:
|
| 25 |
+
def test_single_package(self, tmp_path):
|
| 26 |
+
# find_parent_package should find a non-namespace parent package
|
| 27 |
+
(tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True)
|
| 28 |
+
(tmp_path / "src/namespace/pkg/nested/__init__.py").touch()
|
| 29 |
+
(tmp_path / "src/namespace/pkg/__init__.py").touch()
|
| 30 |
+
packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"]
|
| 31 |
+
assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg"
|
| 32 |
+
|
| 33 |
+
def test_multiple_toplevel(self, tmp_path):
|
| 34 |
+
# find_parent_package should return null if the given list of packages does not
|
| 35 |
+
# have a single parent package
|
| 36 |
+
multiple = ["pkg", "pkg1", "pkg2"]
|
| 37 |
+
for name in multiple:
|
| 38 |
+
(tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True)
|
| 39 |
+
(tmp_path / f"src/{name}/__init__.py").touch()
|
| 40 |
+
assert find_parent_package(multiple, {"": "src"}, tmp_path) is None
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class TestDiscoverPackagesAndPyModules:
|
| 44 |
+
"""Make sure discovered values for ``packages`` and ``py_modules`` work
|
| 45 |
+
similarly to explicit configuration for the simple scenarios.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
OPTIONS = {
|
| 49 |
+
# Different options according to the circumstance being tested
|
| 50 |
+
"explicit-src": {"package_dir": {"": "src"}, "packages": ["pkg"]},
|
| 51 |
+
"variation-lib": {
|
| 52 |
+
"package_dir": {"": "lib"}, # variation of the source-layout
|
| 53 |
+
},
|
| 54 |
+
"explicit-flat": {"packages": ["pkg"]},
|
| 55 |
+
"explicit-single_module": {"py_modules": ["pkg"]},
|
| 56 |
+
"explicit-namespace": {"packages": ["ns", "ns.pkg"]},
|
| 57 |
+
"automatic-src": {},
|
| 58 |
+
"automatic-flat": {},
|
| 59 |
+
"automatic-single_module": {},
|
| 60 |
+
"automatic-namespace": {},
|
| 61 |
+
}
|
| 62 |
+
FILES = {
|
| 63 |
+
"src": ["src/pkg/__init__.py", "src/pkg/main.py"],
|
| 64 |
+
"lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"],
|
| 65 |
+
"flat": ["pkg/__init__.py", "pkg/main.py"],
|
| 66 |
+
"single_module": ["pkg.py"],
|
| 67 |
+
"namespace": ["ns/pkg/__init__.py"],
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
def _get_info(self, circumstance):
|
| 71 |
+
_, _, layout = circumstance.partition("-")
|
| 72 |
+
files = self.FILES[layout]
|
| 73 |
+
options = self.OPTIONS[circumstance]
|
| 74 |
+
return files, options
|
| 75 |
+
|
| 76 |
+
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
|
| 77 |
+
def test_sdist_filelist(self, tmp_path, circumstance):
|
| 78 |
+
files, options = self._get_info(circumstance)
|
| 79 |
+
_populate_project_dir(tmp_path, files, options)
|
| 80 |
+
|
| 81 |
+
_, cmd = _run_sdist_programatically(tmp_path, options)
|
| 82 |
+
|
| 83 |
+
manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files]
|
| 84 |
+
for file in files:
|
| 85 |
+
assert any(f.endswith(file) for f in manifest)
|
| 86 |
+
|
| 87 |
+
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
|
| 88 |
+
def test_project(self, tmp_path, circumstance):
|
| 89 |
+
files, options = self._get_info(circumstance)
|
| 90 |
+
_populate_project_dir(tmp_path, files, options)
|
| 91 |
+
|
| 92 |
+
# Simulate a pre-existing `build` directory
|
| 93 |
+
(tmp_path / "build").mkdir()
|
| 94 |
+
(tmp_path / "build/lib").mkdir()
|
| 95 |
+
(tmp_path / "build/bdist.linux-x86_64").mkdir()
|
| 96 |
+
(tmp_path / "build/bdist.linux-x86_64/file.py").touch()
|
| 97 |
+
(tmp_path / "build/lib/__init__.py").touch()
|
| 98 |
+
(tmp_path / "build/lib/file.py").touch()
|
| 99 |
+
(tmp_path / "dist").mkdir()
|
| 100 |
+
(tmp_path / "dist/file.py").touch()
|
| 101 |
+
|
| 102 |
+
_run_build(tmp_path)
|
| 103 |
+
|
| 104 |
+
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
|
| 105 |
+
print("~~~~~ sdist_members ~~~~~")
|
| 106 |
+
print('\n'.join(sdist_files))
|
| 107 |
+
assert sdist_files >= set(files)
|
| 108 |
+
|
| 109 |
+
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
|
| 110 |
+
print("~~~~~ wheel_members ~~~~~")
|
| 111 |
+
print('\n'.join(wheel_files))
|
| 112 |
+
orig_files = {f.replace("src/", "").replace("lib/", "") for f in files}
|
| 113 |
+
assert wheel_files >= orig_files
|
| 114 |
+
|
| 115 |
+
# Make sure build files are not included by mistake
|
| 116 |
+
for file in wheel_files:
|
| 117 |
+
assert "build" not in files
|
| 118 |
+
assert "dist" not in files
|
| 119 |
+
|
| 120 |
+
PURPOSEFULLY_EMPY = {
|
| 121 |
+
"setup.cfg": DALS(
|
| 122 |
+
"""
|
| 123 |
+
[metadata]
|
| 124 |
+
name = myproj
|
| 125 |
+
version = 0.0.0
|
| 126 |
+
|
| 127 |
+
[options]
|
| 128 |
+
{param} =
|
| 129 |
+
"""
|
| 130 |
+
),
|
| 131 |
+
"setup.py": DALS(
|
| 132 |
+
"""
|
| 133 |
+
__import__('setuptools').setup(
|
| 134 |
+
name="myproj",
|
| 135 |
+
version="0.0.0",
|
| 136 |
+
{param}=[]
|
| 137 |
+
)
|
| 138 |
+
"""
|
| 139 |
+
),
|
| 140 |
+
"pyproject.toml": DALS(
|
| 141 |
+
"""
|
| 142 |
+
[build-system]
|
| 143 |
+
requires = []
|
| 144 |
+
build-backend = 'setuptools.build_meta'
|
| 145 |
+
|
| 146 |
+
[project]
|
| 147 |
+
name = "myproj"
|
| 148 |
+
version = "0.0.0"
|
| 149 |
+
|
| 150 |
+
[tool.setuptools]
|
| 151 |
+
{param} = []
|
| 152 |
+
"""
|
| 153 |
+
),
|
| 154 |
+
"template-pyproject.toml": DALS(
|
| 155 |
+
"""
|
| 156 |
+
[build-system]
|
| 157 |
+
requires = []
|
| 158 |
+
build-backend = 'setuptools.build_meta'
|
| 159 |
+
"""
|
| 160 |
+
),
|
| 161 |
+
}
|
| 162 |
+
|
| 163 |
+
@pytest.mark.parametrize(
|
| 164 |
+
("config_file", "param", "circumstance"),
|
| 165 |
+
product(
|
| 166 |
+
["setup.cfg", "setup.py", "pyproject.toml"],
|
| 167 |
+
["packages", "py_modules"],
|
| 168 |
+
FILES.keys(),
|
| 169 |
+
),
|
| 170 |
+
)
|
| 171 |
+
def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
|
| 172 |
+
files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"]
|
| 173 |
+
_populate_project_dir(tmp_path, files, {})
|
| 174 |
+
|
| 175 |
+
if config_file == "pyproject.toml":
|
| 176 |
+
template_param = param.replace("_", "-")
|
| 177 |
+
else:
|
| 178 |
+
# Make sure build works with or without setup.cfg
|
| 179 |
+
pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"]
|
| 180 |
+
(tmp_path / "pyproject.toml").write_text(pyproject, encoding="utf-8")
|
| 181 |
+
template_param = param
|
| 182 |
+
|
| 183 |
+
config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param)
|
| 184 |
+
(tmp_path / config_file).write_text(config, encoding="utf-8")
|
| 185 |
+
|
| 186 |
+
dist = _get_dist(tmp_path, {})
|
| 187 |
+
# When either parameter package or py_modules is an empty list,
|
| 188 |
+
# then there should be no discovery
|
| 189 |
+
assert getattr(dist, param) == []
|
| 190 |
+
other = {"py_modules": "packages", "packages": "py_modules"}[param]
|
| 191 |
+
assert getattr(dist, other) is None
|
| 192 |
+
|
| 193 |
+
@pytest.mark.parametrize(
|
| 194 |
+
("extra_files", "pkgs"),
|
| 195 |
+
[
|
| 196 |
+
(["venv/bin/simulate_venv"], {"pkg"}),
|
| 197 |
+
(["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
|
| 198 |
+
(["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}),
|
| 199 |
+
(
|
| 200 |
+
# Type stubs can also be namespaced
|
| 201 |
+
["namespace-stubs/pkg/__init__.pyi"],
|
| 202 |
+
{"pkg", "namespace-stubs", "namespace-stubs.pkg"},
|
| 203 |
+
),
|
| 204 |
+
(
|
| 205 |
+
# Just the top-level package can have `-stubs`, ignore nested ones
|
| 206 |
+
["namespace-stubs/pkg-stubs/__init__.pyi"],
|
| 207 |
+
{"pkg", "namespace-stubs"},
|
| 208 |
+
),
|
| 209 |
+
(["_hidden/file.py"], {"pkg"}),
|
| 210 |
+
(["news/finalize.py"], {"pkg"}),
|
| 211 |
+
],
|
| 212 |
+
)
|
| 213 |
+
def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs):
|
| 214 |
+
files = self.FILES["flat"] + extra_files
|
| 215 |
+
_populate_project_dir(tmp_path, files, {})
|
| 216 |
+
dist = _get_dist(tmp_path, {})
|
| 217 |
+
assert set(dist.packages) == pkgs
|
| 218 |
+
|
| 219 |
+
@pytest.mark.parametrize(
|
| 220 |
+
"extra_files",
|
| 221 |
+
[
|
| 222 |
+
["other/__init__.py"],
|
| 223 |
+
["other/finalize.py"],
|
| 224 |
+
],
|
| 225 |
+
)
|
| 226 |
+
def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files):
|
| 227 |
+
files = self.FILES["flat"] + extra_files
|
| 228 |
+
_populate_project_dir(tmp_path, files, {})
|
| 229 |
+
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
|
| 230 |
+
_get_dist(tmp_path, {})
|
| 231 |
+
|
| 232 |
+
def test_flat_layout_with_single_module(self, tmp_path):
|
| 233 |
+
files = self.FILES["single_module"] + ["invalid-module-name.py"]
|
| 234 |
+
_populate_project_dir(tmp_path, files, {})
|
| 235 |
+
dist = _get_dist(tmp_path, {})
|
| 236 |
+
assert set(dist.py_modules) == {"pkg"}
|
| 237 |
+
|
| 238 |
+
def test_flat_layout_with_multiple_modules(self, tmp_path):
|
| 239 |
+
files = self.FILES["single_module"] + ["valid_module_name.py"]
|
| 240 |
+
_populate_project_dir(tmp_path, files, {})
|
| 241 |
+
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
|
| 242 |
+
_get_dist(tmp_path, {})
|
| 243 |
+
|
| 244 |
+
def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path):
|
| 245 |
+
"""Regression for issue 3692"""
|
| 246 |
+
from setuptools import build_meta
|
| 247 |
+
|
| 248 |
+
pyproject = '[project]\nname = "test"\nversion = "1"'
|
| 249 |
+
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
|
| 250 |
+
(tmp_path / "foo.py").touch()
|
| 251 |
+
with jaraco.path.DirectoryStack().context(tmp_path):
|
| 252 |
+
build_meta.build_wheel(".")
|
| 253 |
+
# Ensure py_modules are found
|
| 254 |
+
wheel_files = get_wheel_members(next(tmp_path.glob("*.whl")))
|
| 255 |
+
assert "foo.py" in wheel_files
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
class TestNoConfig:
|
| 259 |
+
DEFAULT_VERSION = "0.0.0" # Default version given by setuptools
|
| 260 |
+
|
| 261 |
+
EXAMPLES = {
|
| 262 |
+
"pkg1": ["src/pkg1.py"],
|
| 263 |
+
"pkg2": ["src/pkg2/__init__.py"],
|
| 264 |
+
"pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"],
|
| 265 |
+
"pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"],
|
| 266 |
+
"ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"],
|
| 267 |
+
"ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"],
|
| 268 |
+
}
|
| 269 |
+
|
| 270 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 271 |
+
def test_discover_name(self, tmp_path, example):
|
| 272 |
+
_populate_project_dir(tmp_path, self.EXAMPLES[example], {})
|
| 273 |
+
dist = _get_dist(tmp_path, {})
|
| 274 |
+
assert dist.get_name() == example
|
| 275 |
+
|
| 276 |
+
def test_build_with_discovered_name(self, tmp_path):
|
| 277 |
+
files = ["src/ns/nested/pkg/__init__.py"]
|
| 278 |
+
_populate_project_dir(tmp_path, files, {})
|
| 279 |
+
_run_build(tmp_path, "--sdist")
|
| 280 |
+
# Expected distribution file
|
| 281 |
+
dist_file = tmp_path / f"dist/ns_nested_pkg-{self.DEFAULT_VERSION}.tar.gz"
|
| 282 |
+
assert dist_file.is_file()
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
class TestWithAttrDirective:
|
| 286 |
+
@pytest.mark.parametrize(
|
| 287 |
+
("folder", "opts"),
|
| 288 |
+
[
|
| 289 |
+
("src", {}),
|
| 290 |
+
("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
|
| 291 |
+
],
|
| 292 |
+
)
|
| 293 |
+
def test_setupcfg_metadata(self, tmp_path, folder, opts):
|
| 294 |
+
files = [f"{folder}/pkg/__init__.py", "setup.cfg"]
|
| 295 |
+
_populate_project_dir(tmp_path, files, opts)
|
| 296 |
+
|
| 297 |
+
config = (tmp_path / "setup.cfg").read_text(encoding="utf-8")
|
| 298 |
+
overwrite = {
|
| 299 |
+
folder: {"pkg": {"__init__.py": "version = 42"}},
|
| 300 |
+
"setup.cfg": "[metadata]\nversion = attr: pkg.version\n" + config,
|
| 301 |
+
}
|
| 302 |
+
jaraco.path.build(overwrite, prefix=tmp_path)
|
| 303 |
+
|
| 304 |
+
dist = _get_dist(tmp_path, {})
|
| 305 |
+
assert dist.get_name() == "pkg"
|
| 306 |
+
assert dist.get_version() == "42"
|
| 307 |
+
assert dist.package_dir
|
| 308 |
+
package_path = find_package_path("pkg", dist.package_dir, tmp_path)
|
| 309 |
+
assert os.path.exists(package_path)
|
| 310 |
+
assert folder in Path(package_path).parts()
|
| 311 |
+
|
| 312 |
+
_run_build(tmp_path, "--sdist")
|
| 313 |
+
dist_file = tmp_path / "dist/pkg-42.tar.gz"
|
| 314 |
+
assert dist_file.is_file()
|
| 315 |
+
|
| 316 |
+
def test_pyproject_metadata(self, tmp_path):
|
| 317 |
+
_populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {})
|
| 318 |
+
|
| 319 |
+
overwrite = {
|
| 320 |
+
"src": {"pkg": {"__init__.py": "version = 42"}},
|
| 321 |
+
"pyproject.toml": (
|
| 322 |
+
"[project]\nname = 'pkg'\ndynamic = ['version']\n"
|
| 323 |
+
"[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n"
|
| 324 |
+
),
|
| 325 |
+
}
|
| 326 |
+
jaraco.path.build(overwrite, prefix=tmp_path)
|
| 327 |
+
|
| 328 |
+
dist = _get_dist(tmp_path, {})
|
| 329 |
+
assert dist.get_version() == "42"
|
| 330 |
+
assert dist.package_dir == {"": "src"}
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
class TestWithCExtension:
|
| 334 |
+
def _simulate_package_with_extension(self, tmp_path):
|
| 335 |
+
# This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0
|
| 336 |
+
files = [
|
| 337 |
+
"benchmarks/file.py",
|
| 338 |
+
"docs/Makefile",
|
| 339 |
+
"docs/requirements.txt",
|
| 340 |
+
"docs/source/conf.py",
|
| 341 |
+
"proj/header.h",
|
| 342 |
+
"proj/file.py",
|
| 343 |
+
"py/proj.cpp",
|
| 344 |
+
"py/other.cpp",
|
| 345 |
+
"py/file.py",
|
| 346 |
+
"py/py.typed",
|
| 347 |
+
"py/tests/test_proj.py",
|
| 348 |
+
"README.rst",
|
| 349 |
+
]
|
| 350 |
+
_populate_project_dir(tmp_path, files, {})
|
| 351 |
+
|
| 352 |
+
setup_script = """
|
| 353 |
+
from setuptools import Extension, setup
|
| 354 |
+
|
| 355 |
+
ext_modules = [
|
| 356 |
+
Extension(
|
| 357 |
+
"proj",
|
| 358 |
+
["py/proj.cpp", "py/other.cpp"],
|
| 359 |
+
include_dirs=["."],
|
| 360 |
+
language="c++",
|
| 361 |
+
),
|
| 362 |
+
]
|
| 363 |
+
setup(ext_modules=ext_modules)
|
| 364 |
+
"""
|
| 365 |
+
(tmp_path / "setup.py").write_text(DALS(setup_script), encoding="utf-8")
|
| 366 |
+
|
| 367 |
+
def test_skip_discovery_with_setupcfg_metadata(self, tmp_path):
|
| 368 |
+
"""Ensure that auto-discovery is not triggered when the project is based on
|
| 369 |
+
C-extensions only, for backward compatibility.
|
| 370 |
+
"""
|
| 371 |
+
self._simulate_package_with_extension(tmp_path)
|
| 372 |
+
|
| 373 |
+
pyproject = """
|
| 374 |
+
[build-system]
|
| 375 |
+
requires = []
|
| 376 |
+
build-backend = 'setuptools.build_meta'
|
| 377 |
+
"""
|
| 378 |
+
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
|
| 379 |
+
|
| 380 |
+
setupcfg = """
|
| 381 |
+
[metadata]
|
| 382 |
+
name = proj
|
| 383 |
+
version = 42
|
| 384 |
+
"""
|
| 385 |
+
(tmp_path / "setup.cfg").write_text(DALS(setupcfg), encoding="utf-8")
|
| 386 |
+
|
| 387 |
+
dist = _get_dist(tmp_path, {})
|
| 388 |
+
assert dist.get_name() == "proj"
|
| 389 |
+
assert dist.get_version() == "42"
|
| 390 |
+
assert dist.py_modules is None
|
| 391 |
+
assert dist.packages is None
|
| 392 |
+
assert len(dist.ext_modules) == 1
|
| 393 |
+
assert dist.ext_modules[0].name == "proj"
|
| 394 |
+
|
| 395 |
+
def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path):
|
| 396 |
+
"""When opting-in to pyproject.toml metadata, auto-discovery will be active if
|
| 397 |
+
the package lists C-extensions, but does not configure py-modules or packages.
|
| 398 |
+
|
| 399 |
+
This way we ensure users with complex package layouts that would lead to the
|
| 400 |
+
discovery of multiple top-level modules/packages see errors and are forced to
|
| 401 |
+
explicitly set ``packages`` or ``py-modules``.
|
| 402 |
+
"""
|
| 403 |
+
self._simulate_package_with_extension(tmp_path)
|
| 404 |
+
|
| 405 |
+
pyproject = """
|
| 406 |
+
[project]
|
| 407 |
+
name = 'proj'
|
| 408 |
+
version = '42'
|
| 409 |
+
"""
|
| 410 |
+
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
|
| 411 |
+
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
|
| 412 |
+
_get_dist(tmp_path, {})
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
class TestWithPackageData:
|
| 416 |
+
def _simulate_package_with_data_files(self, tmp_path, src_root):
|
| 417 |
+
files = [
|
| 418 |
+
f"{src_root}/proj/__init__.py",
|
| 419 |
+
f"{src_root}/proj/file1.txt",
|
| 420 |
+
f"{src_root}/proj/nested/file2.txt",
|
| 421 |
+
]
|
| 422 |
+
_populate_project_dir(tmp_path, files, {})
|
| 423 |
+
|
| 424 |
+
manifest = """
|
| 425 |
+
global-include *.py *.txt
|
| 426 |
+
"""
|
| 427 |
+
(tmp_path / "MANIFEST.in").write_text(DALS(manifest), encoding="utf-8")
|
| 428 |
+
|
| 429 |
+
EXAMPLE_SETUPCFG = """
|
| 430 |
+
[metadata]
|
| 431 |
+
name = proj
|
| 432 |
+
version = 42
|
| 433 |
+
|
| 434 |
+
[options]
|
| 435 |
+
include_package_data = True
|
| 436 |
+
"""
|
| 437 |
+
EXAMPLE_PYPROJECT = """
|
| 438 |
+
[project]
|
| 439 |
+
name = "proj"
|
| 440 |
+
version = "42"
|
| 441 |
+
"""
|
| 442 |
+
|
| 443 |
+
PYPROJECT_PACKAGE_DIR = """
|
| 444 |
+
[tool.setuptools]
|
| 445 |
+
package-dir = {"" = "src"}
|
| 446 |
+
"""
|
| 447 |
+
|
| 448 |
+
@pytest.mark.parametrize(
|
| 449 |
+
("src_root", "files"),
|
| 450 |
+
[
|
| 451 |
+
(".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
|
| 452 |
+
(".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
|
| 453 |
+
("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
|
| 454 |
+
("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
|
| 455 |
+
(
|
| 456 |
+
"src",
|
| 457 |
+
{
|
| 458 |
+
"setup.cfg": DALS(EXAMPLE_SETUPCFG)
|
| 459 |
+
+ DALS(
|
| 460 |
+
"""
|
| 461 |
+
packages = find:
|
| 462 |
+
package_dir =
|
| 463 |
+
=src
|
| 464 |
+
|
| 465 |
+
[options.packages.find]
|
| 466 |
+
where = src
|
| 467 |
+
"""
|
| 468 |
+
)
|
| 469 |
+
},
|
| 470 |
+
),
|
| 471 |
+
(
|
| 472 |
+
"src",
|
| 473 |
+
{
|
| 474 |
+
"pyproject.toml": DALS(EXAMPLE_PYPROJECT)
|
| 475 |
+
+ DALS(
|
| 476 |
+
"""
|
| 477 |
+
[tool.setuptools]
|
| 478 |
+
package-dir = {"" = "src"}
|
| 479 |
+
"""
|
| 480 |
+
)
|
| 481 |
+
},
|
| 482 |
+
),
|
| 483 |
+
],
|
| 484 |
+
)
|
| 485 |
+
def test_include_package_data(self, tmp_path, src_root, files):
|
| 486 |
+
"""
|
| 487 |
+
Make sure auto-discovery does not affect package include_package_data.
|
| 488 |
+
See issue #3196.
|
| 489 |
+
"""
|
| 490 |
+
jaraco.path.build(files, prefix=str(tmp_path))
|
| 491 |
+
self._simulate_package_with_data_files(tmp_path, src_root)
|
| 492 |
+
|
| 493 |
+
expected = {
|
| 494 |
+
os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"),
|
| 495 |
+
os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"),
|
| 496 |
+
}
|
| 497 |
+
|
| 498 |
+
_run_build(tmp_path)
|
| 499 |
+
|
| 500 |
+
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
|
| 501 |
+
print("~~~~~ sdist_members ~~~~~")
|
| 502 |
+
print('\n'.join(sdist_files))
|
| 503 |
+
assert sdist_files >= expected
|
| 504 |
+
|
| 505 |
+
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
|
| 506 |
+
print("~~~~~ wheel_members ~~~~~")
|
| 507 |
+
print('\n'.join(wheel_files))
|
| 508 |
+
orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected}
|
| 509 |
+
assert wheel_files >= orig_files
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
def test_compatible_with_numpy_configuration(tmp_path):
|
| 513 |
+
files = [
|
| 514 |
+
"dir1/__init__.py",
|
| 515 |
+
"dir2/__init__.py",
|
| 516 |
+
"file.py",
|
| 517 |
+
]
|
| 518 |
+
_populate_project_dir(tmp_path, files, {})
|
| 519 |
+
dist = Distribution({})
|
| 520 |
+
dist.configuration = object()
|
| 521 |
+
dist.set_defaults()
|
| 522 |
+
assert dist.py_modules is None
|
| 523 |
+
assert dist.packages is None
|
| 524 |
+
|
| 525 |
+
|
| 526 |
+
def test_name_discovery_doesnt_break_cli(tmpdir_cwd):
|
| 527 |
+
jaraco.path.build({"pkg.py": ""})
|
| 528 |
+
dist = Distribution({})
|
| 529 |
+
dist.script_args = ["--name"]
|
| 530 |
+
dist.set_defaults()
|
| 531 |
+
dist.parse_command_line() # <-- no exception should be raised here.
|
| 532 |
+
assert dist.get_name() == "pkg"
|
| 533 |
+
|
| 534 |
+
|
| 535 |
+
def test_preserve_explicit_name_with_dynamic_version(tmpdir_cwd, monkeypatch):
|
| 536 |
+
"""According to #3545 it seems that ``name`` discovery is running,
|
| 537 |
+
even when the project already explicitly sets it.
|
| 538 |
+
This seems to be related to parsing of dynamic versions (via ``attr`` directive),
|
| 539 |
+
which requires the auto-discovery of ``package_dir``.
|
| 540 |
+
"""
|
| 541 |
+
files = {
|
| 542 |
+
"src": {
|
| 543 |
+
"pkg": {"__init__.py": "__version__ = 42\n"},
|
| 544 |
+
},
|
| 545 |
+
"pyproject.toml": DALS(
|
| 546 |
+
"""
|
| 547 |
+
[project]
|
| 548 |
+
name = "myproj" # purposefully different from package name
|
| 549 |
+
dynamic = ["version"]
|
| 550 |
+
[tool.setuptools.dynamic]
|
| 551 |
+
version = {"attr" = "pkg.__version__"}
|
| 552 |
+
"""
|
| 553 |
+
),
|
| 554 |
+
}
|
| 555 |
+
jaraco.path.build(files)
|
| 556 |
+
dist = Distribution({})
|
| 557 |
+
orig_analyse_name = dist.set_defaults.analyse_name
|
| 558 |
+
|
| 559 |
+
def spy_analyse_name():
|
| 560 |
+
# We can check if name discovery was triggered by ensuring the original
|
| 561 |
+
# name remains instead of the package name.
|
| 562 |
+
orig_analyse_name()
|
| 563 |
+
assert dist.get_name() == "myproj"
|
| 564 |
+
|
| 565 |
+
monkeypatch.setattr(dist.set_defaults, "analyse_name", spy_analyse_name)
|
| 566 |
+
dist.parse_config_files()
|
| 567 |
+
assert dist.get_version() == "42"
|
| 568 |
+
assert set(dist.packages) == {"pkg"}
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def _populate_project_dir(root, files, options):
|
| 572 |
+
# NOTE: Currently pypa/build will refuse to build the project if no
|
| 573 |
+
# `pyproject.toml` or `setup.py` is found. So it is impossible to do
|
| 574 |
+
# completely "config-less" projects.
|
| 575 |
+
basic = {
|
| 576 |
+
"setup.py": "import setuptools\nsetuptools.setup()",
|
| 577 |
+
"README.md": "# Example Package",
|
| 578 |
+
"LICENSE": "Copyright (c) 2018",
|
| 579 |
+
}
|
| 580 |
+
jaraco.path.build(basic, prefix=root)
|
| 581 |
+
_write_setupcfg(root, options)
|
| 582 |
+
paths = (root / f for f in files)
|
| 583 |
+
for path in paths:
|
| 584 |
+
path.parent.mkdir(exist_ok=True, parents=True)
|
| 585 |
+
path.touch()
|
| 586 |
+
|
| 587 |
+
|
| 588 |
+
def _write_setupcfg(root, options):
|
| 589 |
+
if not options:
|
| 590 |
+
print("~~~~~ **NO** setup.cfg ~~~~~")
|
| 591 |
+
return
|
| 592 |
+
setupcfg = ConfigParser()
|
| 593 |
+
setupcfg.add_section("options")
|
| 594 |
+
for key, value in options.items():
|
| 595 |
+
if key == "packages.find":
|
| 596 |
+
setupcfg.add_section(f"options.{key}")
|
| 597 |
+
setupcfg[f"options.{key}"].update(value)
|
| 598 |
+
elif isinstance(value, list):
|
| 599 |
+
setupcfg["options"][key] = ", ".join(value)
|
| 600 |
+
elif isinstance(value, dict):
|
| 601 |
+
str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items())
|
| 602 |
+
setupcfg["options"][key] = "\n" + str_value
|
| 603 |
+
else:
|
| 604 |
+
setupcfg["options"][key] = str(value)
|
| 605 |
+
with open(root / "setup.cfg", "w", encoding="utf-8") as f:
|
| 606 |
+
setupcfg.write(f)
|
| 607 |
+
print("~~~~~ setup.cfg ~~~~~")
|
| 608 |
+
print((root / "setup.cfg").read_text(encoding="utf-8"))
|
| 609 |
+
|
| 610 |
+
|
| 611 |
+
def _run_build(path, *flags):
|
| 612 |
+
cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)]
|
| 613 |
+
return run(cmd, env={'DISTUTILS_DEBUG': ''})
|
| 614 |
+
|
| 615 |
+
|
| 616 |
+
def _get_dist(dist_path, attrs):
|
| 617 |
+
root = "/".join(os.path.split(dist_path)) # POSIX-style
|
| 618 |
+
|
| 619 |
+
script = dist_path / 'setup.py'
|
| 620 |
+
if script.exists():
|
| 621 |
+
with Path(dist_path):
|
| 622 |
+
dist = cast(
|
| 623 |
+
Distribution,
|
| 624 |
+
distutils.core.run_setup("setup.py", {}, stop_after="init"),
|
| 625 |
+
)
|
| 626 |
+
else:
|
| 627 |
+
dist = Distribution(attrs)
|
| 628 |
+
|
| 629 |
+
dist.src_root = root
|
| 630 |
+
dist.script_name = "setup.py"
|
| 631 |
+
with Path(dist_path):
|
| 632 |
+
dist.parse_config_files()
|
| 633 |
+
|
| 634 |
+
dist.set_defaults()
|
| 635 |
+
return dist
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
def _run_sdist_programatically(dist_path, attrs):
|
| 639 |
+
dist = _get_dist(dist_path, attrs)
|
| 640 |
+
cmd = sdist(dist)
|
| 641 |
+
cmd.ensure_finalized()
|
| 642 |
+
assert cmd.distribution.packages or cmd.distribution.py_modules
|
| 643 |
+
|
| 644 |
+
with quiet(), Path(dist_path):
|
| 645 |
+
cmd.run()
|
| 646 |
+
|
| 647 |
+
return dist, cmd
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_core_metadata.py
ADDED
|
@@ -0,0 +1,577 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import importlib
|
| 5 |
+
import io
|
| 6 |
+
from email import message_from_string
|
| 7 |
+
from email.generator import Generator
|
| 8 |
+
from email.message import EmailMessage, Message
|
| 9 |
+
from email.parser import Parser
|
| 10 |
+
from email.policy import EmailPolicy
|
| 11 |
+
from inspect import cleandoc
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
from unittest.mock import Mock
|
| 14 |
+
|
| 15 |
+
import pytest
|
| 16 |
+
from packaging.metadata import Metadata
|
| 17 |
+
from packaging.requirements import Requirement
|
| 18 |
+
|
| 19 |
+
from setuptools import _reqs, sic
|
| 20 |
+
from setuptools._core_metadata import rfc822_escape, rfc822_unescape
|
| 21 |
+
from setuptools.command.egg_info import egg_info, write_requirements
|
| 22 |
+
from setuptools.config import expand, setupcfg
|
| 23 |
+
from setuptools.dist import Distribution
|
| 24 |
+
|
| 25 |
+
from .config.downloads import retrieve_file, urls_from_file
|
| 26 |
+
|
| 27 |
+
EXAMPLE_BASE_INFO = dict(
|
| 28 |
+
name="package",
|
| 29 |
+
version="0.0.1",
|
| 30 |
+
author="Foo Bar",
|
| 31 |
+
author_email="foo@bar.net",
|
| 32 |
+
long_description="Long\ndescription",
|
| 33 |
+
description="Short description",
|
| 34 |
+
keywords=["one", "two"],
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.mark.parametrize(
|
| 39 |
+
("content", "result"),
|
| 40 |
+
(
|
| 41 |
+
pytest.param(
|
| 42 |
+
"Just a single line",
|
| 43 |
+
None,
|
| 44 |
+
id="single_line",
|
| 45 |
+
),
|
| 46 |
+
pytest.param(
|
| 47 |
+
"Multiline\nText\nwithout\nextra indents\n",
|
| 48 |
+
None,
|
| 49 |
+
id="multiline",
|
| 50 |
+
),
|
| 51 |
+
pytest.param(
|
| 52 |
+
"Multiline\n With\n\nadditional\n indentation",
|
| 53 |
+
None,
|
| 54 |
+
id="multiline_with_indentation",
|
| 55 |
+
),
|
| 56 |
+
pytest.param(
|
| 57 |
+
" Leading whitespace",
|
| 58 |
+
"Leading whitespace",
|
| 59 |
+
id="remove_leading_whitespace",
|
| 60 |
+
),
|
| 61 |
+
pytest.param(
|
| 62 |
+
" Leading whitespace\nIn\n Multiline comment",
|
| 63 |
+
"Leading whitespace\nIn\n Multiline comment",
|
| 64 |
+
id="remove_leading_whitespace_multiline",
|
| 65 |
+
),
|
| 66 |
+
),
|
| 67 |
+
)
|
| 68 |
+
def test_rfc822_unescape(content, result):
|
| 69 |
+
assert (result or content) == rfc822_unescape(rfc822_escape(content))
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def __read_test_cases():
|
| 73 |
+
base = EXAMPLE_BASE_INFO
|
| 74 |
+
|
| 75 |
+
params = functools.partial(dict, base)
|
| 76 |
+
|
| 77 |
+
return [
|
| 78 |
+
('Metadata version 1.0', params()),
|
| 79 |
+
(
|
| 80 |
+
'Metadata Version 1.0: Short long description',
|
| 81 |
+
params(
|
| 82 |
+
long_description='Short long description',
|
| 83 |
+
),
|
| 84 |
+
),
|
| 85 |
+
(
|
| 86 |
+
'Metadata version 1.1: Classifiers',
|
| 87 |
+
params(
|
| 88 |
+
classifiers=[
|
| 89 |
+
'Programming Language :: Python :: 3',
|
| 90 |
+
'Programming Language :: Python :: 3.7',
|
| 91 |
+
'License :: OSI Approved :: MIT License',
|
| 92 |
+
],
|
| 93 |
+
),
|
| 94 |
+
),
|
| 95 |
+
(
|
| 96 |
+
'Metadata version 1.1: Download URL',
|
| 97 |
+
params(
|
| 98 |
+
download_url='https://example.com',
|
| 99 |
+
),
|
| 100 |
+
),
|
| 101 |
+
(
|
| 102 |
+
'Metadata Version 1.2: Requires-Python',
|
| 103 |
+
params(
|
| 104 |
+
python_requires='>=3.7',
|
| 105 |
+
),
|
| 106 |
+
),
|
| 107 |
+
pytest.param(
|
| 108 |
+
'Metadata Version 1.2: Project-Url',
|
| 109 |
+
params(project_urls=dict(Foo='https://example.bar')),
|
| 110 |
+
marks=pytest.mark.xfail(
|
| 111 |
+
reason="Issue #1578: project_urls not read",
|
| 112 |
+
),
|
| 113 |
+
),
|
| 114 |
+
(
|
| 115 |
+
'Metadata Version 2.1: Long Description Content Type',
|
| 116 |
+
params(
|
| 117 |
+
long_description_content_type='text/x-rst; charset=UTF-8',
|
| 118 |
+
),
|
| 119 |
+
),
|
| 120 |
+
(
|
| 121 |
+
'License',
|
| 122 |
+
params(
|
| 123 |
+
license='MIT',
|
| 124 |
+
),
|
| 125 |
+
),
|
| 126 |
+
(
|
| 127 |
+
'License multiline',
|
| 128 |
+
params(
|
| 129 |
+
license='This is a long license \nover multiple lines',
|
| 130 |
+
),
|
| 131 |
+
),
|
| 132 |
+
pytest.param(
|
| 133 |
+
'Metadata Version 2.1: Provides Extra',
|
| 134 |
+
params(provides_extras=['foo', 'bar']),
|
| 135 |
+
marks=pytest.mark.xfail(reason="provides_extras not read"),
|
| 136 |
+
),
|
| 137 |
+
(
|
| 138 |
+
'Missing author',
|
| 139 |
+
dict(
|
| 140 |
+
name='foo',
|
| 141 |
+
version='1.0.0',
|
| 142 |
+
author_email='snorri@sturluson.name',
|
| 143 |
+
),
|
| 144 |
+
),
|
| 145 |
+
(
|
| 146 |
+
'Missing author e-mail',
|
| 147 |
+
dict(
|
| 148 |
+
name='foo',
|
| 149 |
+
version='1.0.0',
|
| 150 |
+
author='Snorri Sturluson',
|
| 151 |
+
),
|
| 152 |
+
),
|
| 153 |
+
(
|
| 154 |
+
'Missing author and e-mail',
|
| 155 |
+
dict(
|
| 156 |
+
name='foo',
|
| 157 |
+
version='1.0.0',
|
| 158 |
+
),
|
| 159 |
+
),
|
| 160 |
+
(
|
| 161 |
+
'Bypass normalized version',
|
| 162 |
+
dict(
|
| 163 |
+
name='foo',
|
| 164 |
+
version=sic('1.0.0a'),
|
| 165 |
+
),
|
| 166 |
+
),
|
| 167 |
+
]
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@pytest.mark.parametrize(("name", "attrs"), __read_test_cases())
|
| 171 |
+
def test_read_metadata(name, attrs):
|
| 172 |
+
dist = Distribution(attrs)
|
| 173 |
+
metadata_out = dist.metadata
|
| 174 |
+
dist_class = metadata_out.__class__
|
| 175 |
+
|
| 176 |
+
# Write to PKG_INFO and then load into a new metadata object
|
| 177 |
+
PKG_INFO = io.StringIO()
|
| 178 |
+
|
| 179 |
+
metadata_out.write_pkg_file(PKG_INFO)
|
| 180 |
+
PKG_INFO.seek(0)
|
| 181 |
+
pkg_info = PKG_INFO.read()
|
| 182 |
+
assert _valid_metadata(pkg_info)
|
| 183 |
+
|
| 184 |
+
PKG_INFO.seek(0)
|
| 185 |
+
metadata_in = dist_class()
|
| 186 |
+
metadata_in.read_pkg_file(PKG_INFO)
|
| 187 |
+
|
| 188 |
+
tested_attrs = [
|
| 189 |
+
('name', dist_class.get_name),
|
| 190 |
+
('version', dist_class.get_version),
|
| 191 |
+
('author', dist_class.get_contact),
|
| 192 |
+
('author_email', dist_class.get_contact_email),
|
| 193 |
+
('metadata_version', dist_class.get_metadata_version),
|
| 194 |
+
('provides', dist_class.get_provides),
|
| 195 |
+
('description', dist_class.get_description),
|
| 196 |
+
('long_description', dist_class.get_long_description),
|
| 197 |
+
('download_url', dist_class.get_download_url),
|
| 198 |
+
('keywords', dist_class.get_keywords),
|
| 199 |
+
('platforms', dist_class.get_platforms),
|
| 200 |
+
('obsoletes', dist_class.get_obsoletes),
|
| 201 |
+
('requires', dist_class.get_requires),
|
| 202 |
+
('classifiers', dist_class.get_classifiers),
|
| 203 |
+
('project_urls', lambda s: getattr(s, 'project_urls', {})),
|
| 204 |
+
('provides_extras', lambda s: getattr(s, 'provides_extras', {})),
|
| 205 |
+
]
|
| 206 |
+
|
| 207 |
+
for attr, getter in tested_attrs:
|
| 208 |
+
assert getter(metadata_in) == getter(metadata_out)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def __maintainer_test_cases():
|
| 212 |
+
attrs = {"name": "package", "version": "1.0", "description": "xxx"}
|
| 213 |
+
|
| 214 |
+
def merge_dicts(d1, d2):
|
| 215 |
+
d1 = d1.copy()
|
| 216 |
+
d1.update(d2)
|
| 217 |
+
|
| 218 |
+
return d1
|
| 219 |
+
|
| 220 |
+
return [
|
| 221 |
+
('No author, no maintainer', attrs.copy()),
|
| 222 |
+
(
|
| 223 |
+
'Author (no e-mail), no maintainer',
|
| 224 |
+
merge_dicts(attrs, {'author': 'Author Name'}),
|
| 225 |
+
),
|
| 226 |
+
(
|
| 227 |
+
'Author (e-mail), no maintainer',
|
| 228 |
+
merge_dicts(
|
| 229 |
+
attrs, {'author': 'Author Name', 'author_email': 'author@name.com'}
|
| 230 |
+
),
|
| 231 |
+
),
|
| 232 |
+
(
|
| 233 |
+
'No author, maintainer (no e-mail)',
|
| 234 |
+
merge_dicts(attrs, {'maintainer': 'Maintainer Name'}),
|
| 235 |
+
),
|
| 236 |
+
(
|
| 237 |
+
'No author, maintainer (e-mail)',
|
| 238 |
+
merge_dicts(
|
| 239 |
+
attrs,
|
| 240 |
+
{
|
| 241 |
+
'maintainer': 'Maintainer Name',
|
| 242 |
+
'maintainer_email': 'maintainer@name.com',
|
| 243 |
+
},
|
| 244 |
+
),
|
| 245 |
+
),
|
| 246 |
+
(
|
| 247 |
+
'Author (no e-mail), Maintainer (no-email)',
|
| 248 |
+
merge_dicts(
|
| 249 |
+
attrs, {'author': 'Author Name', 'maintainer': 'Maintainer Name'}
|
| 250 |
+
),
|
| 251 |
+
),
|
| 252 |
+
(
|
| 253 |
+
'Author (e-mail), Maintainer (e-mail)',
|
| 254 |
+
merge_dicts(
|
| 255 |
+
attrs,
|
| 256 |
+
{
|
| 257 |
+
'author': 'Author Name',
|
| 258 |
+
'author_email': 'author@name.com',
|
| 259 |
+
'maintainer': 'Maintainer Name',
|
| 260 |
+
'maintainer_email': 'maintainer@name.com',
|
| 261 |
+
},
|
| 262 |
+
),
|
| 263 |
+
),
|
| 264 |
+
(
|
| 265 |
+
'No author (e-mail), no maintainer (e-mail)',
|
| 266 |
+
merge_dicts(
|
| 267 |
+
attrs,
|
| 268 |
+
{
|
| 269 |
+
'author_email': 'author@name.com',
|
| 270 |
+
'maintainer_email': 'maintainer@name.com',
|
| 271 |
+
},
|
| 272 |
+
),
|
| 273 |
+
),
|
| 274 |
+
('Author unicode', merge_dicts(attrs, {'author': '鉄沢寛'})),
|
| 275 |
+
('Maintainer unicode', merge_dicts(attrs, {'maintainer': 'Jan Łukasiewicz'})),
|
| 276 |
+
]
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
@pytest.mark.parametrize(("name", "attrs"), __maintainer_test_cases())
|
| 280 |
+
def test_maintainer_author(name, attrs, tmpdir):
|
| 281 |
+
tested_keys = {
|
| 282 |
+
'author': 'Author',
|
| 283 |
+
'author_email': 'Author-email',
|
| 284 |
+
'maintainer': 'Maintainer',
|
| 285 |
+
'maintainer_email': 'Maintainer-email',
|
| 286 |
+
}
|
| 287 |
+
|
| 288 |
+
# Generate a PKG-INFO file
|
| 289 |
+
dist = Distribution(attrs)
|
| 290 |
+
fn = tmpdir.mkdir('pkg_info')
|
| 291 |
+
fn_s = str(fn)
|
| 292 |
+
|
| 293 |
+
dist.metadata.write_pkg_info(fn_s)
|
| 294 |
+
|
| 295 |
+
with open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f:
|
| 296 |
+
pkg_info = f.read()
|
| 297 |
+
|
| 298 |
+
assert _valid_metadata(pkg_info)
|
| 299 |
+
|
| 300 |
+
# Drop blank lines and strip lines from default description
|
| 301 |
+
raw_pkg_lines = pkg_info.splitlines()
|
| 302 |
+
pkg_lines = list(filter(None, raw_pkg_lines[:-2]))
|
| 303 |
+
|
| 304 |
+
pkg_lines_set = set(pkg_lines)
|
| 305 |
+
|
| 306 |
+
# Duplicate lines should not be generated
|
| 307 |
+
assert len(pkg_lines) == len(pkg_lines_set)
|
| 308 |
+
|
| 309 |
+
for fkey, dkey in tested_keys.items():
|
| 310 |
+
val = attrs.get(dkey, None)
|
| 311 |
+
if val is None:
|
| 312 |
+
for line in pkg_lines:
|
| 313 |
+
assert not line.startswith(fkey + ':')
|
| 314 |
+
else:
|
| 315 |
+
line = f'{fkey}: {val}'
|
| 316 |
+
assert line in pkg_lines_set
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
class TestParityWithMetadataFromPyPaWheel:
|
| 320 |
+
def base_example(self):
|
| 321 |
+
attrs = dict(
|
| 322 |
+
**EXAMPLE_BASE_INFO,
|
| 323 |
+
# Example with complex requirement definition
|
| 324 |
+
python_requires=">=3.8",
|
| 325 |
+
install_requires="""
|
| 326 |
+
packaging==23.2
|
| 327 |
+
more-itertools==8.8.0; extra == "other"
|
| 328 |
+
jaraco.text==3.7.0
|
| 329 |
+
importlib-resources==5.10.2; python_version<"3.8"
|
| 330 |
+
importlib-metadata==6.0.0 ; python_version<"3.8"
|
| 331 |
+
colorama>=0.4.4; sys_platform == "win32"
|
| 332 |
+
""",
|
| 333 |
+
extras_require={
|
| 334 |
+
"testing": """
|
| 335 |
+
pytest >= 6
|
| 336 |
+
pytest-checkdocs >= 2.4
|
| 337 |
+
tomli ; \\
|
| 338 |
+
# Using stdlib when possible
|
| 339 |
+
python_version < "3.11"
|
| 340 |
+
ini2toml[lite]>=0.9
|
| 341 |
+
""",
|
| 342 |
+
"other": [],
|
| 343 |
+
},
|
| 344 |
+
)
|
| 345 |
+
# Generate a PKG-INFO file using setuptools
|
| 346 |
+
return Distribution(attrs)
|
| 347 |
+
|
| 348 |
+
def test_requires_dist(self, tmp_path):
|
| 349 |
+
dist = self.base_example()
|
| 350 |
+
pkg_info = _get_pkginfo(dist)
|
| 351 |
+
assert _valid_metadata(pkg_info)
|
| 352 |
+
|
| 353 |
+
# Ensure Requires-Dist is present
|
| 354 |
+
expected = [
|
| 355 |
+
'Metadata-Version:',
|
| 356 |
+
'Requires-Python: >=3.8',
|
| 357 |
+
'Provides-Extra: other',
|
| 358 |
+
'Provides-Extra: testing',
|
| 359 |
+
'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"',
|
| 360 |
+
'Requires-Dist: more-itertools==8.8.0; extra == "other"',
|
| 361 |
+
'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"',
|
| 362 |
+
]
|
| 363 |
+
for line in expected:
|
| 364 |
+
assert line in pkg_info
|
| 365 |
+
|
| 366 |
+
HERE = Path(__file__).parent
|
| 367 |
+
EXAMPLES_FILE = HERE / "config/setupcfg_examples.txt"
|
| 368 |
+
|
| 369 |
+
@pytest.fixture(params=[None, *urls_from_file(EXAMPLES_FILE)])
|
| 370 |
+
def dist(self, request, monkeypatch, tmp_path):
|
| 371 |
+
"""Example of distribution with arbitrary configuration"""
|
| 372 |
+
monkeypatch.chdir(tmp_path)
|
| 373 |
+
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.42"))
|
| 374 |
+
monkeypatch.setattr(expand, "read_files", Mock(return_value="hello world"))
|
| 375 |
+
if request.param is None:
|
| 376 |
+
yield self.base_example()
|
| 377 |
+
else:
|
| 378 |
+
# Real-world usage
|
| 379 |
+
config = retrieve_file(request.param)
|
| 380 |
+
yield setupcfg.apply_configuration(Distribution({}), config)
|
| 381 |
+
|
| 382 |
+
@pytest.mark.uses_network
|
| 383 |
+
def test_equivalent_output(self, tmp_path, dist):
|
| 384 |
+
"""Ensure output from setuptools is equivalent to the one from `pypa/wheel`"""
|
| 385 |
+
# Generate a METADATA file using pypa/wheel for comparison
|
| 386 |
+
wheel_metadata = importlib.import_module("wheel.metadata")
|
| 387 |
+
pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
|
| 388 |
+
|
| 389 |
+
if pkginfo_to_metadata is None: # pragma: nocover
|
| 390 |
+
pytest.xfail(
|
| 391 |
+
"wheel.metadata.pkginfo_to_metadata is undefined, "
|
| 392 |
+
"(this is likely to be caused by API changes in pypa/wheel"
|
| 393 |
+
)
|
| 394 |
+
|
| 395 |
+
# Generate an simplified "egg-info" dir for pypa/wheel to convert
|
| 396 |
+
pkg_info = _get_pkginfo(dist)
|
| 397 |
+
egg_info_dir = tmp_path / "pkg.egg-info"
|
| 398 |
+
egg_info_dir.mkdir(parents=True)
|
| 399 |
+
(egg_info_dir / "PKG-INFO").write_text(pkg_info, encoding="utf-8")
|
| 400 |
+
write_requirements(egg_info(dist), egg_info_dir, egg_info_dir / "requires.txt")
|
| 401 |
+
|
| 402 |
+
# Get pypa/wheel generated METADATA but normalize requirements formatting
|
| 403 |
+
metadata_msg = pkginfo_to_metadata(egg_info_dir, egg_info_dir / "PKG-INFO")
|
| 404 |
+
metadata_str = _normalize_metadata(metadata_msg)
|
| 405 |
+
pkg_info_msg = message_from_string(pkg_info)
|
| 406 |
+
pkg_info_str = _normalize_metadata(pkg_info_msg)
|
| 407 |
+
|
| 408 |
+
# Compare setuptools PKG-INFO x pypa/wheel METADATA
|
| 409 |
+
assert metadata_str == pkg_info_str
|
| 410 |
+
|
| 411 |
+
# Make sure it parses/serializes well in pypa/wheel
|
| 412 |
+
_assert_roundtrip_message(pkg_info)
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
class TestPEP643:
|
| 416 |
+
STATIC_CONFIG = {
|
| 417 |
+
"setup.cfg": cleandoc(
|
| 418 |
+
"""
|
| 419 |
+
[metadata]
|
| 420 |
+
name = package
|
| 421 |
+
version = 0.0.1
|
| 422 |
+
author = Foo Bar
|
| 423 |
+
author_email = foo@bar.net
|
| 424 |
+
long_description = Long
|
| 425 |
+
description
|
| 426 |
+
description = Short description
|
| 427 |
+
keywords = one, two
|
| 428 |
+
platforms = abcd
|
| 429 |
+
[options]
|
| 430 |
+
install_requires = requests
|
| 431 |
+
"""
|
| 432 |
+
),
|
| 433 |
+
"pyproject.toml": cleandoc(
|
| 434 |
+
"""
|
| 435 |
+
[project]
|
| 436 |
+
name = "package"
|
| 437 |
+
version = "0.0.1"
|
| 438 |
+
authors = [
|
| 439 |
+
{name = "Foo Bar", email = "foo@bar.net"}
|
| 440 |
+
]
|
| 441 |
+
description = "Short description"
|
| 442 |
+
readme = {text = "Long\\ndescription", content-type = "text/plain"}
|
| 443 |
+
keywords = ["one", "two"]
|
| 444 |
+
dependencies = ["requests"]
|
| 445 |
+
[tool.setuptools]
|
| 446 |
+
provides = ["abcd"]
|
| 447 |
+
obsoletes = ["abcd"]
|
| 448 |
+
"""
|
| 449 |
+
),
|
| 450 |
+
}
|
| 451 |
+
|
| 452 |
+
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
|
| 453 |
+
def test_static_config_has_no_dynamic(self, file, tmpdir_cwd):
|
| 454 |
+
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
|
| 455 |
+
metadata = _get_metadata()
|
| 456 |
+
assert metadata.get_all("Dynamic") is None
|
| 457 |
+
assert metadata.get_all("dynamic") is None
|
| 458 |
+
|
| 459 |
+
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
|
| 460 |
+
@pytest.mark.parametrize(
|
| 461 |
+
"fields",
|
| 462 |
+
[
|
| 463 |
+
# Single dynamic field
|
| 464 |
+
{"requires-python": ("python_requires", ">=3.12")},
|
| 465 |
+
{"author-email": ("author_email", "snoopy@peanuts.com")},
|
| 466 |
+
{"keywords": ("keywords", ["hello", "world"])},
|
| 467 |
+
{"platform": ("platforms", ["abcd"])},
|
| 468 |
+
# Multiple dynamic fields
|
| 469 |
+
{
|
| 470 |
+
"summary": ("description", "hello world"),
|
| 471 |
+
"description": ("long_description", "bla bla bla bla"),
|
| 472 |
+
"requires-dist": ("install_requires", ["hello-world"]),
|
| 473 |
+
},
|
| 474 |
+
],
|
| 475 |
+
)
|
| 476 |
+
def test_modified_fields_marked_as_dynamic(self, file, fields, tmpdir_cwd):
|
| 477 |
+
# We start with a static config
|
| 478 |
+
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
|
| 479 |
+
dist = _makedist()
|
| 480 |
+
|
| 481 |
+
# ... but then we simulate the effects of a plugin modifying the distribution
|
| 482 |
+
for attr, value in fields.values():
|
| 483 |
+
# `dist` and `dist.metadata` are complicated...
|
| 484 |
+
# Some attributes work when set on `dist`, others on `dist.metadata`...
|
| 485 |
+
# Here we set in both just in case (this also avoids calling `_finalize_*`)
|
| 486 |
+
setattr(dist, attr, value)
|
| 487 |
+
setattr(dist.metadata, attr, value)
|
| 488 |
+
|
| 489 |
+
# Then we should be able to list the modified fields as Dynamic
|
| 490 |
+
metadata = _get_metadata(dist)
|
| 491 |
+
assert set(metadata.get_all("Dynamic")) == set(fields)
|
| 492 |
+
|
| 493 |
+
|
| 494 |
+
def _makedist(**attrs):
|
| 495 |
+
dist = Distribution(attrs)
|
| 496 |
+
dist.parse_config_files()
|
| 497 |
+
return dist
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
def _assert_roundtrip_message(metadata: str) -> None:
|
| 501 |
+
"""Emulate the way wheel.bdist_wheel parses and regenerates the message,
|
| 502 |
+
then ensures the metadata generated by setuptools is compatible.
|
| 503 |
+
"""
|
| 504 |
+
with io.StringIO(metadata) as buffer:
|
| 505 |
+
msg = Parser(EmailMessage).parse(buffer)
|
| 506 |
+
|
| 507 |
+
serialization_policy = EmailPolicy(
|
| 508 |
+
utf8=True,
|
| 509 |
+
mangle_from_=False,
|
| 510 |
+
max_line_length=0,
|
| 511 |
+
)
|
| 512 |
+
with io.BytesIO() as buffer:
|
| 513 |
+
out = io.TextIOWrapper(buffer, encoding="utf-8")
|
| 514 |
+
Generator(out, policy=serialization_policy).flatten(msg)
|
| 515 |
+
out.flush()
|
| 516 |
+
regenerated = buffer.getvalue()
|
| 517 |
+
|
| 518 |
+
raw_metadata = bytes(metadata, "utf-8")
|
| 519 |
+
# Normalise newlines to avoid test errors on Windows:
|
| 520 |
+
raw_metadata = b"\n".join(raw_metadata.splitlines())
|
| 521 |
+
regenerated = b"\n".join(regenerated.splitlines())
|
| 522 |
+
assert regenerated == raw_metadata
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def _normalize_metadata(msg: Message) -> str:
|
| 526 |
+
"""Allow equivalent metadata to be compared directly"""
|
| 527 |
+
# The main challenge regards the requirements and extras.
|
| 528 |
+
# Both setuptools and wheel already apply some level of normalization
|
| 529 |
+
# but they differ regarding which character is chosen, according to the
|
| 530 |
+
# following spec it should be "-":
|
| 531 |
+
# https://packaging.python.org/en/latest/specifications/name-normalization/
|
| 532 |
+
|
| 533 |
+
# Related issues:
|
| 534 |
+
# https://github.com/pypa/packaging/issues/845
|
| 535 |
+
# https://github.com/pypa/packaging/issues/644#issuecomment-2429813968
|
| 536 |
+
|
| 537 |
+
extras = {x.replace("_", "-"): x for x in msg.get_all("Provides-Extra", [])}
|
| 538 |
+
reqs = [
|
| 539 |
+
_normalize_req(req, extras)
|
| 540 |
+
for req in _reqs.parse(msg.get_all("Requires-Dist", []))
|
| 541 |
+
]
|
| 542 |
+
del msg["Requires-Dist"]
|
| 543 |
+
del msg["Provides-Extra"]
|
| 544 |
+
|
| 545 |
+
# Ensure consistent ord
|
| 546 |
+
for req in sorted(reqs):
|
| 547 |
+
msg["Requires-Dist"] = req
|
| 548 |
+
for extra in sorted(extras):
|
| 549 |
+
msg["Provides-Extra"] = extra
|
| 550 |
+
|
| 551 |
+
# TODO: Handle lack of PEP 643 implementation in pypa/wheel?
|
| 552 |
+
del msg["Metadata-Version"]
|
| 553 |
+
|
| 554 |
+
return msg.as_string()
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
def _normalize_req(req: Requirement, extras: dict[str, str]) -> str:
|
| 558 |
+
"""Allow equivalent requirement objects to be compared directly"""
|
| 559 |
+
as_str = str(req).replace(req.name, req.name.replace("_", "-"))
|
| 560 |
+
for norm, orig in extras.items():
|
| 561 |
+
as_str = as_str.replace(orig, norm)
|
| 562 |
+
return as_str
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def _get_pkginfo(dist: Distribution):
|
| 566 |
+
with io.StringIO() as fp:
|
| 567 |
+
dist.metadata.write_pkg_file(fp)
|
| 568 |
+
return fp.getvalue()
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def _get_metadata(dist: Distribution | None = None):
|
| 572 |
+
return message_from_string(_get_pkginfo(dist or _makedist()))
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
def _valid_metadata(text: str) -> bool:
|
| 576 |
+
metadata = Metadata.from_email(text, validate=True) # can raise exceptions
|
| 577 |
+
return metadata is not None
|
evalkit_llava/lib/python3.10/site-packages/setuptools/tests/test_depends.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from setuptools import depends
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestGetModuleConstant:
|
| 7 |
+
def test_basic(self):
|
| 8 |
+
"""
|
| 9 |
+
Invoke get_module_constant on a module in
|
| 10 |
+
the test package.
|
| 11 |
+
"""
|
| 12 |
+
mod_name = 'setuptools.tests.mod_with_constant'
|
| 13 |
+
val = depends.get_module_constant(mod_name, 'value')
|
| 14 |
+
assert val == 'three, sir!'
|
| 15 |
+
assert 'setuptools.tests.mod_with_constant' not in sys.modules
|