Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/AUTHORS +58 -0
- videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/INSTALLER +1 -0
- videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/LICENSE +29 -0
- videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/top_level.txt +1 -0
- videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/INSTALLER +1 -0
- videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/LICENSE +27 -0
- videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/METADATA +78 -0
- videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/RECORD +10 -0
- videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/REQUESTED +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/formats.py +375 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/__init__.py +13 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_setupcfg.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/downloads/preload.py +18 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/setupcfg_examples.txt +22 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_apply_pyprojecttoml.py +539 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_expand.py +247 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml.py +396 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py +109 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_setupcfg.py +965 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/fixtures.py +157 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py +1 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/server.py +86 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py +73 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_build.py +33 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py +970 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_build_py.py +480 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_core_metadata.py +577 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_depends.py +15 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_develop.py +175 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_dist_info.py +210 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py +1289 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_glob.py +45 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py +138 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_sandbox.py +134 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/test_shutil_wrapper.py +23 -0
- videollama2/lib/python3.10/site-packages/setuptools/tests/textwrap.py +6 -0
- videollama2/lib/python3.10/site-packages/sklearn/datasets/tests/data/openml/id_42074/data-v1-dl-21552912.arff.gz +3 -0
- videollama2/lib/python3.10/site-packages/websockets/server.py +575 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/context/container.h +167 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/context/context.h +174 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/functions/recvrpc_backward.h +49 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/functions/sendrpc_backward.h +37 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/rpc_messages/rref_backward_req.h +39 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/Backend.hpp +416 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/CUDASymmetricMemory.hpp +115 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/GroupRegistry.hpp +22 -0
- vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/NanCheck.hpp +16 -0
videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/AUTHORS
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
GitPython was originally written by Michael Trier.
|
| 2 |
+
GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich.
|
| 3 |
+
|
| 4 |
+
Contributors are:
|
| 5 |
+
|
| 6 |
+
-Michael Trier <mtrier _at_ gmail.com>
|
| 7 |
+
-Alan Briolat
|
| 8 |
+
-Florian Apolloner <florian _at_ apolloner.eu>
|
| 9 |
+
-David Aguilar <davvid _at_ gmail.com>
|
| 10 |
+
-Jelmer Vernooij <jelmer _at_ samba.org>
|
| 11 |
+
-Steve Frécinaux <code _at_ istique.net>
|
| 12 |
+
-Kai Lautaportti <kai _at_ lautaportti.fi>
|
| 13 |
+
-Paul Sowden <paul _at_ idontsmoke.co.uk>
|
| 14 |
+
-Sebastian Thiel <byronimo _at_ gmail.com>
|
| 15 |
+
-Jonathan Chu <jonathan.chu _at_ me.com>
|
| 16 |
+
-Vincent Driessen <me _at_ nvie.com>
|
| 17 |
+
-Phil Elson <pelson _dot_ pub _at_ gmail.com>
|
| 18 |
+
-Bernard `Guyzmo` Pratz <guyzmo+gitpython+pub@m0g.net>
|
| 19 |
+
-Timothy B. Hartman <tbhartman _at_ gmail.com>
|
| 20 |
+
-Konstantin Popov <konstantin.popov.89 _at_ yandex.ru>
|
| 21 |
+
-Peter Jones <pjones _at_ redhat.com>
|
| 22 |
+
-Anson Mansfield <anson.mansfield _at_ gmail.com>
|
| 23 |
+
-Ken Odegard <ken.odegard _at_ gmail.com>
|
| 24 |
+
-Alexis Horgix Chotard
|
| 25 |
+
-Piotr Babij <piotr.babij _at_ gmail.com>
|
| 26 |
+
-Mikuláš Poul <mikulaspoul _at_ gmail.com>
|
| 27 |
+
-Charles Bouchard-Légaré <cblegare.atl _at_ ntis.ca>
|
| 28 |
+
-Yaroslav Halchenko <debian _at_ onerussian.com>
|
| 29 |
+
-Tim Swast <swast _at_ google.com>
|
| 30 |
+
-William Luc Ritchie
|
| 31 |
+
-David Host <hostdm _at_ outlook.com>
|
| 32 |
+
-A. Jesse Jiryu Davis <jesse _at_ emptysquare.net>
|
| 33 |
+
-Steven Whitman <ninloot _at_ gmail.com>
|
| 34 |
+
-Stefan Stancu <stefan.stancu _at_ gmail.com>
|
| 35 |
+
-César Izurieta <cesar _at_ caih.org>
|
| 36 |
+
-Arthur Milchior <arthur _at_ milchior.fr>
|
| 37 |
+
-Anil Khatri <anil.soccer.khatri _at_ gmail.com>
|
| 38 |
+
-JJ Graham <thetwoj _at_ gmail.com>
|
| 39 |
+
-Ben Thayer <ben _at_ benthayer.com>
|
| 40 |
+
-Dries Kennes <admin _at_ dries007.net>
|
| 41 |
+
-Pratik Anurag <panurag247365 _at_ gmail.com>
|
| 42 |
+
-Harmon <harmon.public _at_ gmail.com>
|
| 43 |
+
-Liam Beguin <liambeguin _at_ gmail.com>
|
| 44 |
+
-Ram Rachum <ram _at_ rachum.com>
|
| 45 |
+
-Alba Mendez <me _at_ alba.sh>
|
| 46 |
+
-Robert Westman <robert _at_ byteflux.io>
|
| 47 |
+
-Hugo van Kemenade
|
| 48 |
+
-Hiroki Tokunaga <tokusan441 _at_ gmail.com>
|
| 49 |
+
-Julien Mauroy <pro.julien.mauroy _at_ gmail.com>
|
| 50 |
+
-Patrick Gerard
|
| 51 |
+
-Luke Twist <itsluketwist@gmail.com>
|
| 52 |
+
-Joseph Hale <me _at_ jhale.dev>
|
| 53 |
+
-Santos Gallegos <stsewd _at_ proton.me>
|
| 54 |
+
-Wenhan Zhu <wzhu.cosmos _at_ gmail.com>
|
| 55 |
+
-Eliah Kagan <eliah.kagan _at_ gmail.com>
|
| 56 |
+
-Ethan Lin <et.repositories _at_ gmail.com>
|
| 57 |
+
|
| 58 |
+
Portions derived from other open source works and are clearly marked.
|
videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (C) 2008, 2009 Michael Trier and contributors
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions
|
| 6 |
+
are met:
|
| 7 |
+
|
| 8 |
+
* Redistributions of source code must retain the above copyright
|
| 9 |
+
notice, this list of conditions and the following disclaimer.
|
| 10 |
+
|
| 11 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 12 |
+
notice, this list of conditions and the following disclaimer in the
|
| 13 |
+
documentation and/or other materials provided with the distribution.
|
| 14 |
+
|
| 15 |
+
* Neither the name of the GitPython project nor the names of
|
| 16 |
+
its contributors may be used to endorse or promote products derived
|
| 17 |
+
from this software without specific prior written permission.
|
| 18 |
+
|
| 19 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
| 25 |
+
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
| 26 |
+
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
| 27 |
+
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
| 28 |
+
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
| 29 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
videollama2/lib/python3.10/site-packages/GitPython-3.1.43.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
git
|
videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2015, matplotlib project
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions are met:
|
| 6 |
+
|
| 7 |
+
* Redistributions of source code must retain the above copyright notice, this
|
| 8 |
+
list of conditions and the following disclaimer.
|
| 9 |
+
|
| 10 |
+
* Redistributions in binary form must reproduce the above copyright notice,
|
| 11 |
+
this list of conditions and the following disclaimer in the documentation
|
| 12 |
+
and/or other materials provided with the distribution.
|
| 13 |
+
|
| 14 |
+
* Neither the name of the matplotlib project nor the names of its
|
| 15 |
+
contributors may be used to endorse or promote products derived from
|
| 16 |
+
this software without specific prior written permission.
|
| 17 |
+
|
| 18 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 19 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 20 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 21 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
| 22 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
| 23 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 24 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
| 26 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 27 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: cycler
|
| 3 |
+
Version: 0.12.1
|
| 4 |
+
Summary: Composable style cycles
|
| 5 |
+
Author-email: Thomas A Caswell <matplotlib-users@python.org>
|
| 6 |
+
License: Copyright (c) 2015, matplotlib project
|
| 7 |
+
All rights reserved.
|
| 8 |
+
|
| 9 |
+
Redistribution and use in source and binary forms, with or without
|
| 10 |
+
modification, are permitted provided that the following conditions are met:
|
| 11 |
+
|
| 12 |
+
* Redistributions of source code must retain the above copyright notice, this
|
| 13 |
+
list of conditions and the following disclaimer.
|
| 14 |
+
|
| 15 |
+
* Redistributions in binary form must reproduce the above copyright notice,
|
| 16 |
+
this list of conditions and the following disclaimer in the documentation
|
| 17 |
+
and/or other materials provided with the distribution.
|
| 18 |
+
|
| 19 |
+
* Neither the name of the matplotlib project nor the names of its
|
| 20 |
+
contributors may be used to endorse or promote products derived from
|
| 21 |
+
this software without specific prior written permission.
|
| 22 |
+
|
| 23 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 24 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 25 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 26 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
| 27 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
| 28 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 29 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 30 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
| 31 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 32 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 33 |
+
Project-URL: homepage, https://matplotlib.org/cycler/
|
| 34 |
+
Project-URL: repository, https://github.com/matplotlib/cycler
|
| 35 |
+
Keywords: cycle kwargs
|
| 36 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 37 |
+
Classifier: Development Status :: 4 - Beta
|
| 38 |
+
Classifier: Programming Language :: Python :: 3
|
| 39 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 40 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 41 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 42 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 43 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 44 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 45 |
+
Requires-Python: >=3.8
|
| 46 |
+
Description-Content-Type: text/x-rst
|
| 47 |
+
License-File: LICENSE
|
| 48 |
+
Provides-Extra: docs
|
| 49 |
+
Requires-Dist: ipython ; extra == 'docs'
|
| 50 |
+
Requires-Dist: matplotlib ; extra == 'docs'
|
| 51 |
+
Requires-Dist: numpydoc ; extra == 'docs'
|
| 52 |
+
Requires-Dist: sphinx ; extra == 'docs'
|
| 53 |
+
Provides-Extra: tests
|
| 54 |
+
Requires-Dist: pytest ; extra == 'tests'
|
| 55 |
+
Requires-Dist: pytest-cov ; extra == 'tests'
|
| 56 |
+
Requires-Dist: pytest-xdist ; extra == 'tests'
|
| 57 |
+
|
| 58 |
+
|PyPi|_ |Conda|_ |Supported Python versions|_ |GitHub Actions|_ |Codecov|_
|
| 59 |
+
|
| 60 |
+
.. |PyPi| image:: https://img.shields.io/pypi/v/cycler.svg?style=flat
|
| 61 |
+
.. _PyPi: https://pypi.python.org/pypi/cycler
|
| 62 |
+
|
| 63 |
+
.. |Conda| image:: https://img.shields.io/conda/v/conda-forge/cycler
|
| 64 |
+
.. _Conda: https://anaconda.org/conda-forge/cycler
|
| 65 |
+
|
| 66 |
+
.. |Supported Python versions| image:: https://img.shields.io/pypi/pyversions/cycler.svg
|
| 67 |
+
.. _Supported Python versions: https://pypi.python.org/pypi/cycler
|
| 68 |
+
|
| 69 |
+
.. |GitHub Actions| image:: https://github.com/matplotlib/cycler/actions/workflows/tests.yml/badge.svg
|
| 70 |
+
.. _GitHub Actions: https://github.com/matplotlib/cycler/actions
|
| 71 |
+
|
| 72 |
+
.. |Codecov| image:: https://codecov.io/github/matplotlib/cycler/badge.svg?branch=main&service=github
|
| 73 |
+
.. _Codecov: https://codecov.io/github/matplotlib/cycler?branch=main
|
| 74 |
+
|
| 75 |
+
cycler: composable cycles
|
| 76 |
+
=========================
|
| 77 |
+
|
| 78 |
+
Docs: https://matplotlib.org/cycler/
|
videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cycler-0.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
cycler-0.12.1.dist-info/LICENSE,sha256=8SGBQ9dm2j_qZvEzlrfxXfRqgzA_Kb-Wum6Y601C9Ag,1497
|
| 3 |
+
cycler-0.12.1.dist-info/METADATA,sha256=IyieGbdvHgE5Qidpbmryts0c556JcxIJv5GVFIsY7TY,3779
|
| 4 |
+
cycler-0.12.1.dist-info/RECORD,,
|
| 5 |
+
cycler-0.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
cycler-0.12.1.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
| 7 |
+
cycler-0.12.1.dist-info/top_level.txt,sha256=D8BVVDdAAelLb2FOEz7lDpc6-AL21ylKPrMhtG6yzyE,7
|
| 8 |
+
cycler/__init__.py,sha256=1JdRgv5Zzxo-W1ev7B_LWquysWP6LZH6CHk_COtIaXE,16709
|
| 9 |
+
cycler/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
cycler/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
videollama2/lib/python3.10/site-packages/cycler-0.12.1.dist-info/REQUESTED
ADDED
|
File without changes
|
videollama2/lib/python3.10/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-310.pyc
ADDED
|
Binary file (16.9 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-310.pyc
ADDED
|
Binary file (16 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-310.pyc
ADDED
|
Binary file (1.58 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/formats.py
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The functions in this module are used to validate schemas with the
|
| 3 |
+
`format JSON Schema keyword
|
| 4 |
+
<https://json-schema.org/understanding-json-schema/reference/string#format>`_.
|
| 5 |
+
|
| 6 |
+
The correspondence is given by replacing the ``_`` character in the name of the
|
| 7 |
+
function with a ``-`` to obtain the format name and vice versa.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import builtins
|
| 11 |
+
import logging
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
import string
|
| 15 |
+
import typing
|
| 16 |
+
from itertools import chain as _chain
|
| 17 |
+
|
| 18 |
+
if typing.TYPE_CHECKING:
|
| 19 |
+
from typing_extensions import Literal
|
| 20 |
+
|
| 21 |
+
_logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
# -------------------------------------------------------------------------------------
|
| 24 |
+
# PEP 440
|
| 25 |
+
|
| 26 |
+
VERSION_PATTERN = r"""
|
| 27 |
+
v?
|
| 28 |
+
(?:
|
| 29 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
| 30 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
| 31 |
+
(?P<pre> # pre-release
|
| 32 |
+
[-_\.]?
|
| 33 |
+
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
| 34 |
+
[-_\.]?
|
| 35 |
+
(?P<pre_n>[0-9]+)?
|
| 36 |
+
)?
|
| 37 |
+
(?P<post> # post release
|
| 38 |
+
(?:-(?P<post_n1>[0-9]+))
|
| 39 |
+
|
|
| 40 |
+
(?:
|
| 41 |
+
[-_\.]?
|
| 42 |
+
(?P<post_l>post|rev|r)
|
| 43 |
+
[-_\.]?
|
| 44 |
+
(?P<post_n2>[0-9]+)?
|
| 45 |
+
)
|
| 46 |
+
)?
|
| 47 |
+
(?P<dev> # dev release
|
| 48 |
+
[-_\.]?
|
| 49 |
+
(?P<dev_l>dev)
|
| 50 |
+
[-_\.]?
|
| 51 |
+
(?P<dev_n>[0-9]+)?
|
| 52 |
+
)?
|
| 53 |
+
)
|
| 54 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def pep440(version: str) -> bool:
|
| 61 |
+
"""See :ref:`PyPA's version specification <pypa:version-specifiers>`
|
| 62 |
+
(initially introduced in :pep:`440`).
|
| 63 |
+
"""
|
| 64 |
+
return VERSION_REGEX.match(version) is not None
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# -------------------------------------------------------------------------------------
|
| 68 |
+
# PEP 508
|
| 69 |
+
|
| 70 |
+
PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
|
| 71 |
+
PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def pep508_identifier(name: str) -> bool:
|
| 75 |
+
"""See :ref:`PyPA's name specification <pypa:name-format>`
|
| 76 |
+
(initially introduced in :pep:`508#names`).
|
| 77 |
+
"""
|
| 78 |
+
return PEP508_IDENTIFIER_REGEX.match(name) is not None
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
try:
|
| 82 |
+
try:
|
| 83 |
+
from packaging import requirements as _req
|
| 84 |
+
except ImportError: # pragma: no cover
|
| 85 |
+
# let's try setuptools vendored version
|
| 86 |
+
from setuptools._vendor.packaging import ( # type: ignore[no-redef]
|
| 87 |
+
requirements as _req,
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
def pep508(value: str) -> bool:
|
| 91 |
+
"""See :ref:`PyPA's dependency specifiers <pypa:dependency-specifiers>`
|
| 92 |
+
(initially introduced in :pep:`508`).
|
| 93 |
+
"""
|
| 94 |
+
try:
|
| 95 |
+
_req.Requirement(value)
|
| 96 |
+
return True
|
| 97 |
+
except _req.InvalidRequirement:
|
| 98 |
+
return False
|
| 99 |
+
|
| 100 |
+
except ImportError: # pragma: no cover
|
| 101 |
+
_logger.warning(
|
| 102 |
+
"Could not find an installation of `packaging`. Requirements, dependencies and "
|
| 103 |
+
"versions might not be validated. "
|
| 104 |
+
"To enforce validation, please install `packaging`."
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
def pep508(value: str) -> bool:
|
| 108 |
+
return True
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def pep508_versionspec(value: str) -> bool:
|
| 112 |
+
"""Expression that can be used to specify/lock versions (including ranges)
|
| 113 |
+
See ``versionspec`` in :ref:`PyPA's dependency specifiers
|
| 114 |
+
<pypa:dependency-specifiers>` (initially introduced in :pep:`508`).
|
| 115 |
+
"""
|
| 116 |
+
if any(c in value for c in (";", "]", "@")):
|
| 117 |
+
# In PEP 508:
|
| 118 |
+
# conditional markers, extras and URL specs are not included in the
|
| 119 |
+
# versionspec
|
| 120 |
+
return False
|
| 121 |
+
# Let's pretend we have a dependency called `requirement` with the given
|
| 122 |
+
# version spec, then we can reuse the pep508 function for validation:
|
| 123 |
+
return pep508(f"requirement{value}")
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
# -------------------------------------------------------------------------------------
|
| 127 |
+
# PEP 517
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def pep517_backend_reference(value: str) -> bool:
|
| 131 |
+
"""See PyPA's specification for defining build-backend references
|
| 132 |
+
introduced in :pep:`517#source-trees`.
|
| 133 |
+
|
| 134 |
+
This is similar to an entry-point reference (e.g., ``package.module:object``).
|
| 135 |
+
"""
|
| 136 |
+
module, _, obj = value.partition(":")
|
| 137 |
+
identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
|
| 138 |
+
return all(python_identifier(i) for i in identifiers if i)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
# -------------------------------------------------------------------------------------
|
| 142 |
+
# Classifiers - PEP 301
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _download_classifiers() -> str:
|
| 146 |
+
import ssl
|
| 147 |
+
from email.message import Message
|
| 148 |
+
from urllib.request import urlopen
|
| 149 |
+
|
| 150 |
+
url = "https://pypi.org/pypi?:action=list_classifiers"
|
| 151 |
+
context = ssl.create_default_context()
|
| 152 |
+
with urlopen(url, context=context) as response: # noqa: S310 (audit URLs)
|
| 153 |
+
headers = Message()
|
| 154 |
+
headers["content_type"] = response.getheader("content-type", "text/plain")
|
| 155 |
+
return response.read().decode(headers.get_param("charset", "utf-8")) # type: ignore[no-any-return]
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class _TroveClassifier:
|
| 159 |
+
"""The ``trove_classifiers`` package is the official way of validating classifiers,
|
| 160 |
+
however this package might not be always available.
|
| 161 |
+
As a workaround we can still download a list from PyPI.
|
| 162 |
+
We also don't want to be over strict about it, so simply skipping silently is an
|
| 163 |
+
option (classifiers will be validated anyway during the upload to PyPI).
|
| 164 |
+
"""
|
| 165 |
+
|
| 166 |
+
downloaded: typing.Union[None, "Literal[False]", typing.Set[str]]
|
| 167 |
+
|
| 168 |
+
def __init__(self) -> None:
|
| 169 |
+
self.downloaded = None
|
| 170 |
+
self._skip_download = False
|
| 171 |
+
# None => not cached yet
|
| 172 |
+
# False => cache not available
|
| 173 |
+
self.__name__ = "trove_classifier" # Emulate a public function
|
| 174 |
+
|
| 175 |
+
def _disable_download(self) -> None:
|
| 176 |
+
# This is a private API. Only setuptools has the consent of using it.
|
| 177 |
+
self._skip_download = True
|
| 178 |
+
|
| 179 |
+
def __call__(self, value: str) -> bool:
|
| 180 |
+
if self.downloaded is False or self._skip_download is True:
|
| 181 |
+
return True
|
| 182 |
+
|
| 183 |
+
if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"):
|
| 184 |
+
self.downloaded = False
|
| 185 |
+
msg = (
|
| 186 |
+
"Install ``trove-classifiers`` to ensure proper validation. "
|
| 187 |
+
"Skipping download of classifiers list from PyPI (NO_NETWORK)."
|
| 188 |
+
)
|
| 189 |
+
_logger.debug(msg)
|
| 190 |
+
return True
|
| 191 |
+
|
| 192 |
+
if self.downloaded is None:
|
| 193 |
+
msg = (
|
| 194 |
+
"Install ``trove-classifiers`` to ensure proper validation. "
|
| 195 |
+
"Meanwhile a list of classifiers will be downloaded from PyPI."
|
| 196 |
+
)
|
| 197 |
+
_logger.debug(msg)
|
| 198 |
+
try:
|
| 199 |
+
self.downloaded = set(_download_classifiers().splitlines())
|
| 200 |
+
except Exception:
|
| 201 |
+
self.downloaded = False
|
| 202 |
+
_logger.debug("Problem with download, skipping validation")
|
| 203 |
+
return True
|
| 204 |
+
|
| 205 |
+
return value in self.downloaded or value.lower().startswith("private ::")
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
try:
|
| 209 |
+
from trove_classifiers import classifiers as _trove_classifiers
|
| 210 |
+
|
| 211 |
+
def trove_classifier(value: str) -> bool:
|
| 212 |
+
"""See https://pypi.org/classifiers/"""
|
| 213 |
+
return value in _trove_classifiers or value.lower().startswith("private ::")
|
| 214 |
+
|
| 215 |
+
except ImportError: # pragma: no cover
|
| 216 |
+
trove_classifier = _TroveClassifier()
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
# -------------------------------------------------------------------------------------
|
| 220 |
+
# Stub packages - PEP 561
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def pep561_stub_name(value: str) -> bool:
|
| 224 |
+
"""Name of a directory containing type stubs.
|
| 225 |
+
It must follow the name scheme ``<package>-stubs`` as defined in
|
| 226 |
+
:pep:`561#stub-only-packages`.
|
| 227 |
+
"""
|
| 228 |
+
top, *children = value.split(".")
|
| 229 |
+
if not top.endswith("-stubs"):
|
| 230 |
+
return False
|
| 231 |
+
return python_module_name(".".join([top[: -len("-stubs")], *children]))
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
# -------------------------------------------------------------------------------------
|
| 235 |
+
# Non-PEP related
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def url(value: str) -> bool:
|
| 239 |
+
"""Valid URL (validation uses :obj:`urllib.parse`).
|
| 240 |
+
For maximum compatibility please make sure to include a ``scheme`` prefix
|
| 241 |
+
in your URL (e.g. ``http://``).
|
| 242 |
+
"""
|
| 243 |
+
from urllib.parse import urlparse
|
| 244 |
+
|
| 245 |
+
try:
|
| 246 |
+
parts = urlparse(value)
|
| 247 |
+
if not parts.scheme:
|
| 248 |
+
_logger.warning(
|
| 249 |
+
"For maximum compatibility please make sure to include a "
|
| 250 |
+
"`scheme` prefix in your URL (e.g. 'http://'). "
|
| 251 |
+
f"Given value: {value}"
|
| 252 |
+
)
|
| 253 |
+
if not (value.startswith("/") or value.startswith("\\") or "@" in value):
|
| 254 |
+
parts = urlparse(f"http://{value}")
|
| 255 |
+
|
| 256 |
+
return bool(parts.scheme and parts.netloc)
|
| 257 |
+
except Exception:
|
| 258 |
+
return False
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
# https://packaging.python.org/specifications/entry-points/
|
| 262 |
+
ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
|
| 263 |
+
ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
|
| 264 |
+
RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
|
| 265 |
+
RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
|
| 266 |
+
ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
|
| 267 |
+
ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def python_identifier(value: str) -> bool:
|
| 271 |
+
"""Can be used as identifier in Python.
|
| 272 |
+
(Validation uses :obj:`str.isidentifier`).
|
| 273 |
+
"""
|
| 274 |
+
return value.isidentifier()
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
def python_qualified_identifier(value: str) -> bool:
|
| 278 |
+
"""
|
| 279 |
+
Python "dotted identifier", i.e. a sequence of :obj:`python_identifier`
|
| 280 |
+
concatenated with ``"."`` (e.g.: ``package.module.submodule``).
|
| 281 |
+
"""
|
| 282 |
+
if value.startswith(".") or value.endswith("."):
|
| 283 |
+
return False
|
| 284 |
+
return all(python_identifier(m) for m in value.split("."))
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
def python_module_name(value: str) -> bool:
|
| 288 |
+
"""Module name that can be used in an ``import``-statement in Python.
|
| 289 |
+
See :obj:`python_qualified_identifier`.
|
| 290 |
+
"""
|
| 291 |
+
return python_qualified_identifier(value)
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def python_module_name_relaxed(value: str) -> bool:
|
| 295 |
+
"""Similar to :obj:`python_module_name`, but relaxed to also accept
|
| 296 |
+
dash characters (``-``) and cover special cases like ``pip-run``.
|
| 297 |
+
|
| 298 |
+
It is recommended, however, that beginners avoid dash characters,
|
| 299 |
+
as they require advanced knowledge about Python internals.
|
| 300 |
+
|
| 301 |
+
The following are disallowed:
|
| 302 |
+
|
| 303 |
+
* names starting/ending in dashes,
|
| 304 |
+
* names ending in ``-stubs`` (potentially collide with :obj:`pep561_stub_name`).
|
| 305 |
+
"""
|
| 306 |
+
if value.startswith("-") or value.endswith("-"):
|
| 307 |
+
return False
|
| 308 |
+
if value.endswith("-stubs"):
|
| 309 |
+
return False # Avoid collision with PEP 561
|
| 310 |
+
return python_module_name(value.replace("-", "_"))
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def python_entrypoint_group(value: str) -> bool:
|
| 314 |
+
"""See ``Data model > group`` in the :ref:`PyPA's entry-points specification
|
| 315 |
+
<pypa:entry-points>`.
|
| 316 |
+
"""
|
| 317 |
+
return ENTRYPOINT_GROUP_REGEX.match(value) is not None
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def python_entrypoint_name(value: str) -> bool:
|
| 321 |
+
"""See ``Data model > name`` in the :ref:`PyPA's entry-points specification
|
| 322 |
+
<pypa:entry-points>`.
|
| 323 |
+
"""
|
| 324 |
+
if not ENTRYPOINT_REGEX.match(value):
|
| 325 |
+
return False
|
| 326 |
+
if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
|
| 327 |
+
msg = f"Entry point `{value}` does not follow recommended pattern: "
|
| 328 |
+
msg += RECOMMEDED_ENTRYPOINT_PATTERN
|
| 329 |
+
_logger.warning(msg)
|
| 330 |
+
return True
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def python_entrypoint_reference(value: str) -> bool:
|
| 334 |
+
"""Reference to a Python object using in the format::
|
| 335 |
+
|
| 336 |
+
importable.module:object.attr
|
| 337 |
+
|
| 338 |
+
See ``Data model >object reference`` in the :ref:`PyPA's entry-points specification
|
| 339 |
+
<pypa:entry-points>`.
|
| 340 |
+
"""
|
| 341 |
+
module, _, rest = value.partition(":")
|
| 342 |
+
if "[" in rest:
|
| 343 |
+
obj, _, extras_ = rest.partition("[")
|
| 344 |
+
if extras_.strip()[-1] != "]":
|
| 345 |
+
return False
|
| 346 |
+
extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
|
| 347 |
+
if not all(pep508_identifier(e) for e in extras):
|
| 348 |
+
return False
|
| 349 |
+
_logger.warning(f"`{value}` - using extras for entry points is not recommended")
|
| 350 |
+
else:
|
| 351 |
+
obj = rest
|
| 352 |
+
|
| 353 |
+
module_parts = module.split(".")
|
| 354 |
+
identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
|
| 355 |
+
return all(python_identifier(i.strip()) for i in identifiers)
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def uint8(value: builtins.int) -> bool:
|
| 359 |
+
r"""Unsigned 8-bit integer (:math:`0 \leq x < 2^8`)"""
|
| 360 |
+
return 0 <= value < 2**8
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
def uint16(value: builtins.int) -> bool:
|
| 364 |
+
r"""Unsigned 16-bit integer (:math:`0 \leq x < 2^{16}`)"""
|
| 365 |
+
return 0 <= value < 2**16
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def uint(value: builtins.int) -> bool:
|
| 369 |
+
r"""Unsigned 64-bit integer (:math:`0 \leq x < 2^{64}`)"""
|
| 370 |
+
return 0 <= value < 2**64
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
def int(value: builtins.int) -> bool:
|
| 374 |
+
r"""Signed 64-bit integer (:math:`-2^{63} \leq x < 2^{63}`)"""
|
| 375 |
+
return -(2**63) <= value < 2**63
|
videollama2/lib/python3.10/site-packages/setuptools/tests/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import locale
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
__all__ = ['fail_on_ascii']
|
| 7 |
+
|
| 8 |
+
if sys.version_info >= (3, 11):
|
| 9 |
+
locale_encoding = locale.getencoding()
|
| 10 |
+
else:
|
| 11 |
+
locale_encoding = locale.getpreferredencoding(False)
|
| 12 |
+
is_ascii = locale_encoding == 'ANSI_X3.4-1968'
|
| 13 |
+
fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/__pycache__/test_setupcfg.cpython-310.pyc
ADDED
|
Binary file (27.9 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/downloads/preload.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This file can be used to preload files needed for testing.
|
| 2 |
+
|
| 3 |
+
For example you can use::
|
| 4 |
+
|
| 5 |
+
cd setuptools/tests/config
|
| 6 |
+
python -m downloads.preload setupcfg_examples.txt
|
| 7 |
+
|
| 8 |
+
to make sure the `setup.cfg` examples are downloaded before starting the tests.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
|
| 14 |
+
from . import retrieve_file, urls_from_file
|
| 15 |
+
|
| 16 |
+
if __name__ == "__main__":
|
| 17 |
+
urls = urls_from_file(Path(sys.argv[1]))
|
| 18 |
+
list(map(retrieve_file, urls))
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/setupcfg_examples.txt
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ====================================================================
|
| 2 |
+
# Some popular packages that use setup.cfg (and others not so popular)
|
| 3 |
+
# Reference: https://hugovk.github.io/top-pypi-packages/
|
| 4 |
+
# ====================================================================
|
| 5 |
+
https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg
|
| 6 |
+
https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg
|
| 7 |
+
https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg
|
| 8 |
+
https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg
|
| 9 |
+
https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg
|
| 10 |
+
https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg
|
| 11 |
+
https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg
|
| 12 |
+
https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg
|
| 13 |
+
https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg
|
| 14 |
+
https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg
|
| 15 |
+
https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
|
| 16 |
+
https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
|
| 17 |
+
https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
|
| 18 |
+
https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
|
| 19 |
+
https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
|
| 20 |
+
https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
|
| 21 |
+
https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg
|
| 22 |
+
https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_apply_pyprojecttoml.py
ADDED
|
@@ -0,0 +1,539 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Make sure that applying the configuration from pyproject.toml is equivalent to
|
| 2 |
+
applying a similar configuration from setup.cfg
|
| 3 |
+
|
| 4 |
+
To run these tests offline, please have a look on ``./downloads/preload.py``
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import io
|
| 10 |
+
import re
|
| 11 |
+
import tarfile
|
| 12 |
+
from inspect import cleandoc
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from unittest.mock import Mock
|
| 15 |
+
|
| 16 |
+
import pytest
|
| 17 |
+
from ini2toml.api import LiteTranslator
|
| 18 |
+
from packaging.metadata import Metadata
|
| 19 |
+
|
| 20 |
+
import setuptools # noqa: F401 # ensure monkey patch to metadata
|
| 21 |
+
from setuptools._static import is_static
|
| 22 |
+
from setuptools.command.egg_info import write_requirements
|
| 23 |
+
from setuptools.config import expand, pyprojecttoml, setupcfg
|
| 24 |
+
from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
|
| 25 |
+
from setuptools.dist import Distribution
|
| 26 |
+
from setuptools.errors import RemovedConfigError
|
| 27 |
+
|
| 28 |
+
from .downloads import retrieve_file, urls_from_file
|
| 29 |
+
|
| 30 |
+
HERE = Path(__file__).parent
|
| 31 |
+
EXAMPLES_FILE = "setupcfg_examples.txt"
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def makedist(path, **attrs):
|
| 35 |
+
return Distribution({"src_root": path, **attrs})
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
|
| 39 |
+
@pytest.mark.filterwarnings("ignore")
|
| 40 |
+
@pytest.mark.uses_network
|
| 41 |
+
def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
|
| 42 |
+
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
|
| 43 |
+
setupcfg_example = retrieve_file(url)
|
| 44 |
+
pyproject_example = Path(tmp_path, "pyproject.toml")
|
| 45 |
+
setupcfg_text = setupcfg_example.read_text(encoding="utf-8")
|
| 46 |
+
toml_config = LiteTranslator().translate(setupcfg_text, "setup.cfg")
|
| 47 |
+
pyproject_example.write_text(toml_config, encoding="utf-8")
|
| 48 |
+
|
| 49 |
+
dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
|
| 50 |
+
dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
|
| 51 |
+
|
| 52 |
+
pkg_info_toml = core_metadata(dist_toml)
|
| 53 |
+
pkg_info_cfg = core_metadata(dist_cfg)
|
| 54 |
+
assert pkg_info_toml == pkg_info_cfg
|
| 55 |
+
|
| 56 |
+
if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)):
|
| 57 |
+
assert set(dist_toml.license_files) == set(dist_cfg.license_files)
|
| 58 |
+
|
| 59 |
+
if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)):
|
| 60 |
+
print(dist_cfg.entry_points)
|
| 61 |
+
ep_toml = {
|
| 62 |
+
(k, *sorted(i.replace(" ", "") for i in v))
|
| 63 |
+
for k, v in dist_toml.entry_points.items()
|
| 64 |
+
}
|
| 65 |
+
ep_cfg = {
|
| 66 |
+
(k, *sorted(i.replace(" ", "") for i in v))
|
| 67 |
+
for k, v in dist_cfg.entry_points.items()
|
| 68 |
+
}
|
| 69 |
+
assert ep_toml == ep_cfg
|
| 70 |
+
|
| 71 |
+
if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)):
|
| 72 |
+
pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()}
|
| 73 |
+
pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()}
|
| 74 |
+
assert pkg_data_toml == pkg_data_cfg
|
| 75 |
+
|
| 76 |
+
if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)):
|
| 77 |
+
data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files}
|
| 78 |
+
data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files}
|
| 79 |
+
assert data_files_toml == data_files_cfg
|
| 80 |
+
|
| 81 |
+
assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
|
| 82 |
+
if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
|
| 83 |
+
extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
|
| 84 |
+
extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
|
| 85 |
+
assert extra_req_toml == extra_req_cfg
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
PEP621_EXAMPLE = """\
|
| 89 |
+
[project]
|
| 90 |
+
name = "spam"
|
| 91 |
+
version = "2020.0.0"
|
| 92 |
+
description = "Lovely Spam! Wonderful Spam!"
|
| 93 |
+
readme = "README.rst"
|
| 94 |
+
requires-python = ">=3.8"
|
| 95 |
+
license = {file = "LICENSE.txt"}
|
| 96 |
+
keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
|
| 97 |
+
authors = [
|
| 98 |
+
{email = "hi@pradyunsg.me"},
|
| 99 |
+
{name = "Tzu-Ping Chung"}
|
| 100 |
+
]
|
| 101 |
+
maintainers = [
|
| 102 |
+
{name = "Brett Cannon", email = "brett@python.org"},
|
| 103 |
+
{name = "John X. Ãørçeč", email = "john@utf8.org"},
|
| 104 |
+
{name = "Γαμα קּ 東", email = "gama@utf8.org"},
|
| 105 |
+
]
|
| 106 |
+
classifiers = [
|
| 107 |
+
"Development Status :: 4 - Beta",
|
| 108 |
+
"Programming Language :: Python"
|
| 109 |
+
]
|
| 110 |
+
|
| 111 |
+
dependencies = [
|
| 112 |
+
"httpx",
|
| 113 |
+
"gidgethub[httpx]>4.0.0",
|
| 114 |
+
"django>2.1; os_name != 'nt'",
|
| 115 |
+
"django>2.0; os_name == 'nt'"
|
| 116 |
+
]
|
| 117 |
+
|
| 118 |
+
[project.optional-dependencies]
|
| 119 |
+
test = [
|
| 120 |
+
"pytest < 5.0.0",
|
| 121 |
+
"pytest-cov[all]"
|
| 122 |
+
]
|
| 123 |
+
|
| 124 |
+
[project.urls]
|
| 125 |
+
homepage = "http://example.com"
|
| 126 |
+
documentation = "http://readthedocs.org"
|
| 127 |
+
repository = "http://github.com"
|
| 128 |
+
changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md"
|
| 129 |
+
|
| 130 |
+
[project.scripts]
|
| 131 |
+
spam-cli = "spam:main_cli"
|
| 132 |
+
|
| 133 |
+
[project.gui-scripts]
|
| 134 |
+
spam-gui = "spam:main_gui"
|
| 135 |
+
|
| 136 |
+
[project.entry-points."spam.magical"]
|
| 137 |
+
tomatoes = "spam:main_tomatoes"
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
PEP621_INTERNATIONAL_EMAIL_EXAMPLE = """\
|
| 141 |
+
[project]
|
| 142 |
+
name = "spam"
|
| 143 |
+
version = "2020.0.0"
|
| 144 |
+
authors = [
|
| 145 |
+
{email = "hi@pradyunsg.me"},
|
| 146 |
+
{name = "Tzu-Ping Chung"}
|
| 147 |
+
]
|
| 148 |
+
maintainers = [
|
| 149 |
+
{name = "Степан Бандера", email = "криївка@оун-упа.укр"},
|
| 150 |
+
]
|
| 151 |
+
"""
|
| 152 |
+
|
| 153 |
+
PEP621_EXAMPLE_SCRIPT = """
|
| 154 |
+
def main_cli(): pass
|
| 155 |
+
def main_gui(): pass
|
| 156 |
+
def main_tomatoes(): pass
|
| 157 |
+
"""
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def _pep621_example_project(
|
| 161 |
+
tmp_path,
|
| 162 |
+
readme="README.rst",
|
| 163 |
+
pyproject_text=PEP621_EXAMPLE,
|
| 164 |
+
):
|
| 165 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 166 |
+
text = pyproject_text
|
| 167 |
+
replacements = {'readme = "README.rst"': f'readme = "{readme}"'}
|
| 168 |
+
for orig, subst in replacements.items():
|
| 169 |
+
text = text.replace(orig, subst)
|
| 170 |
+
pyproject.write_text(text, encoding="utf-8")
|
| 171 |
+
|
| 172 |
+
(tmp_path / readme).write_text("hello world", encoding="utf-8")
|
| 173 |
+
(tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---", encoding="utf-8")
|
| 174 |
+
(tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT, encoding="utf-8")
|
| 175 |
+
return pyproject
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def test_pep621_example(tmp_path):
|
| 179 |
+
"""Make sure the example in PEP 621 works"""
|
| 180 |
+
pyproject = _pep621_example_project(tmp_path)
|
| 181 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 182 |
+
assert dist.metadata.license == "--- LICENSE stub ---"
|
| 183 |
+
assert set(dist.metadata.license_files) == {"LICENSE.txt"}
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@pytest.mark.parametrize(
|
| 187 |
+
("readme", "ctype"),
|
| 188 |
+
[
|
| 189 |
+
("Readme.txt", "text/plain"),
|
| 190 |
+
("readme.md", "text/markdown"),
|
| 191 |
+
("text.rst", "text/x-rst"),
|
| 192 |
+
],
|
| 193 |
+
)
|
| 194 |
+
def test_readme_content_type(tmp_path, readme, ctype):
|
| 195 |
+
pyproject = _pep621_example_project(tmp_path, readme)
|
| 196 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 197 |
+
assert dist.metadata.long_description_content_type == ctype
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def test_undefined_content_type(tmp_path):
|
| 201 |
+
pyproject = _pep621_example_project(tmp_path, "README.tex")
|
| 202 |
+
with pytest.raises(ValueError, match="Undefined content type for README.tex"):
|
| 203 |
+
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def test_no_explicit_content_type_for_missing_extension(tmp_path):
|
| 207 |
+
pyproject = _pep621_example_project(tmp_path, "README")
|
| 208 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 209 |
+
assert dist.metadata.long_description_content_type is None
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
@pytest.mark.parametrize(
|
| 213 |
+
("pyproject_text", "expected_maintainers_meta_value"),
|
| 214 |
+
(
|
| 215 |
+
pytest.param(
|
| 216 |
+
PEP621_EXAMPLE,
|
| 217 |
+
(
|
| 218 |
+
'Brett Cannon <brett@python.org>, "John X. Ãørçeč" <john@utf8.org>, '
|
| 219 |
+
'Γαμα קּ 東 <gama@utf8.org>'
|
| 220 |
+
),
|
| 221 |
+
id='non-international-emails',
|
| 222 |
+
),
|
| 223 |
+
pytest.param(
|
| 224 |
+
PEP621_INTERNATIONAL_EMAIL_EXAMPLE,
|
| 225 |
+
'Степан Бандера <криївка@оун-упа.укр>',
|
| 226 |
+
marks=pytest.mark.xfail(
|
| 227 |
+
reason="CPython's `email.headerregistry.Address` only supports "
|
| 228 |
+
'RFC 5322, as of Nov 10, 2022 and latest Python 3.11.0',
|
| 229 |
+
strict=True,
|
| 230 |
+
),
|
| 231 |
+
id='international-email',
|
| 232 |
+
),
|
| 233 |
+
),
|
| 234 |
+
)
|
| 235 |
+
def test_utf8_maintainer_in_metadata( # issue-3663
|
| 236 |
+
expected_maintainers_meta_value,
|
| 237 |
+
pyproject_text,
|
| 238 |
+
tmp_path,
|
| 239 |
+
):
|
| 240 |
+
pyproject = _pep621_example_project(
|
| 241 |
+
tmp_path,
|
| 242 |
+
"README",
|
| 243 |
+
pyproject_text=pyproject_text,
|
| 244 |
+
)
|
| 245 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 246 |
+
assert dist.metadata.maintainer_email == expected_maintainers_meta_value
|
| 247 |
+
pkg_file = tmp_path / "PKG-FILE"
|
| 248 |
+
with open(pkg_file, "w", encoding="utf-8") as fh:
|
| 249 |
+
dist.metadata.write_pkg_file(fh)
|
| 250 |
+
content = pkg_file.read_text(encoding="utf-8")
|
| 251 |
+
assert f"Maintainer-email: {expected_maintainers_meta_value}" in content
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
class TestLicenseFiles:
|
| 255 |
+
# TODO: After PEP 639 is accepted, we have to move the license-files
|
| 256 |
+
# to the `project` table instead of `tool.setuptools`
|
| 257 |
+
|
| 258 |
+
def base_pyproject(self, tmp_path, additional_text):
|
| 259 |
+
pyproject = _pep621_example_project(tmp_path, "README")
|
| 260 |
+
text = pyproject.read_text(encoding="utf-8")
|
| 261 |
+
|
| 262 |
+
# Sanity-check
|
| 263 |
+
assert 'license = {file = "LICENSE.txt"}' in text
|
| 264 |
+
assert "[tool.setuptools]" not in text
|
| 265 |
+
|
| 266 |
+
text = f"{text}\n{additional_text}\n"
|
| 267 |
+
pyproject.write_text(text, encoding="utf-8")
|
| 268 |
+
return pyproject
|
| 269 |
+
|
| 270 |
+
def test_both_license_and_license_files_defined(self, tmp_path):
|
| 271 |
+
setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
|
| 272 |
+
pyproject = self.base_pyproject(tmp_path, setuptools_config)
|
| 273 |
+
|
| 274 |
+
(tmp_path / "_FILE.txt").touch()
|
| 275 |
+
(tmp_path / "_FILE.rst").touch()
|
| 276 |
+
|
| 277 |
+
# Would normally match the `license_files` patterns, but we want to exclude it
|
| 278 |
+
# by being explicit. On the other hand, contents should be added to `license`
|
| 279 |
+
license = tmp_path / "LICENSE.txt"
|
| 280 |
+
license.write_text("LicenseRef-Proprietary\n", encoding="utf-8")
|
| 281 |
+
|
| 282 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 283 |
+
assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
|
| 284 |
+
assert dist.metadata.license == "LicenseRef-Proprietary\n"
|
| 285 |
+
|
| 286 |
+
def test_default_patterns(self, tmp_path):
|
| 287 |
+
setuptools_config = '[tool.setuptools]\nzip-safe = false'
|
| 288 |
+
# ^ used just to trigger section validation
|
| 289 |
+
pyproject = self.base_pyproject(tmp_path, setuptools_config)
|
| 290 |
+
|
| 291 |
+
license_files = "LICENCE-a.html COPYING-abc.txt AUTHORS-xyz NOTICE,def".split()
|
| 292 |
+
|
| 293 |
+
for fname in license_files:
|
| 294 |
+
(tmp_path / fname).write_text(f"{fname}\n", encoding="utf-8")
|
| 295 |
+
|
| 296 |
+
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 297 |
+
assert (tmp_path / "LICENSE.txt").exists() # from base example
|
| 298 |
+
assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
class TestPyModules:
|
| 302 |
+
# https://github.com/pypa/setuptools/issues/4316
|
| 303 |
+
|
| 304 |
+
def dist(self, name):
|
| 305 |
+
toml_config = f"""
|
| 306 |
+
[project]
|
| 307 |
+
name = "test"
|
| 308 |
+
version = "42.0"
|
| 309 |
+
[tool.setuptools]
|
| 310 |
+
py-modules = [{name!r}]
|
| 311 |
+
"""
|
| 312 |
+
pyproject = Path("pyproject.toml")
|
| 313 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 314 |
+
return pyprojecttoml.apply_configuration(Distribution({}), pyproject)
|
| 315 |
+
|
| 316 |
+
@pytest.mark.parametrize("module", ["pip-run", "abc-d.λ-xyz-e"])
|
| 317 |
+
def test_valid_module_name(self, tmp_path, monkeypatch, module):
|
| 318 |
+
monkeypatch.chdir(tmp_path)
|
| 319 |
+
assert module in self.dist(module).py_modules
|
| 320 |
+
|
| 321 |
+
@pytest.mark.parametrize("module", ["pip run", "-pip-run", "pip-run-stubs"])
|
| 322 |
+
def test_invalid_module_name(self, tmp_path, monkeypatch, module):
|
| 323 |
+
monkeypatch.chdir(tmp_path)
|
| 324 |
+
with pytest.raises(ValueError, match="py-modules"):
|
| 325 |
+
self.dist(module).py_modules
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class TestExtModules:
|
| 329 |
+
def test_pyproject_sets_attribute(self, tmp_path, monkeypatch):
|
| 330 |
+
monkeypatch.chdir(tmp_path)
|
| 331 |
+
pyproject = Path("pyproject.toml")
|
| 332 |
+
toml_config = """
|
| 333 |
+
[project]
|
| 334 |
+
name = "test"
|
| 335 |
+
version = "42.0"
|
| 336 |
+
[tool.setuptools]
|
| 337 |
+
ext-modules = [
|
| 338 |
+
{name = "my.ext", sources = ["hello.c", "world.c"]}
|
| 339 |
+
]
|
| 340 |
+
"""
|
| 341 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 342 |
+
with pytest.warns(pyprojecttoml._ExperimentalConfiguration):
|
| 343 |
+
dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
|
| 344 |
+
assert len(dist.ext_modules) == 1
|
| 345 |
+
assert dist.ext_modules[0].name == "my.ext"
|
| 346 |
+
assert set(dist.ext_modules[0].sources) == {"hello.c", "world.c"}
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
class TestDeprecatedFields:
|
| 350 |
+
def test_namespace_packages(self, tmp_path):
|
| 351 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 352 |
+
config = """
|
| 353 |
+
[project]
|
| 354 |
+
name = "myproj"
|
| 355 |
+
version = "42"
|
| 356 |
+
[tool.setuptools]
|
| 357 |
+
namespace-packages = ["myproj.pkg"]
|
| 358 |
+
"""
|
| 359 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 360 |
+
with pytest.raises(RemovedConfigError, match="namespace-packages"):
|
| 361 |
+
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
class TestPresetField:
|
| 365 |
+
def pyproject(self, tmp_path, dynamic, extra_content=""):
|
| 366 |
+
content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n"
|
| 367 |
+
if "version" not in dynamic:
|
| 368 |
+
content += "version = '42'\n"
|
| 369 |
+
file = tmp_path / "pyproject.toml"
|
| 370 |
+
file.write_text(content + extra_content, encoding="utf-8")
|
| 371 |
+
return file
|
| 372 |
+
|
| 373 |
+
@pytest.mark.parametrize(
|
| 374 |
+
("attr", "field", "value"),
|
| 375 |
+
[
|
| 376 |
+
("classifiers", "classifiers", ["Private :: Classifier"]),
|
| 377 |
+
("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}),
|
| 378 |
+
("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}),
|
| 379 |
+
pytest.param(
|
| 380 |
+
*("install_requires", "dependencies", ["six"]),
|
| 381 |
+
marks=[
|
| 382 |
+
pytest.mark.filterwarnings("ignore:.*install_requires. overwritten")
|
| 383 |
+
],
|
| 384 |
+
),
|
| 385 |
+
],
|
| 386 |
+
)
|
| 387 |
+
def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
|
| 388 |
+
"""Setuptools cannot set a field if not listed in ``dynamic``"""
|
| 389 |
+
pyproject = self.pyproject(tmp_path, [])
|
| 390 |
+
dist = makedist(tmp_path, **{attr: value})
|
| 391 |
+
msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S)
|
| 392 |
+
with pytest.warns(_MissingDynamic, match=msg):
|
| 393 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 394 |
+
|
| 395 |
+
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
|
| 396 |
+
assert not dist_value
|
| 397 |
+
|
| 398 |
+
@pytest.mark.parametrize(
|
| 399 |
+
("attr", "field", "value"),
|
| 400 |
+
[
|
| 401 |
+
("install_requires", "dependencies", []),
|
| 402 |
+
("extras_require", "optional-dependencies", {}),
|
| 403 |
+
("install_requires", "dependencies", ["six"]),
|
| 404 |
+
("classifiers", "classifiers", ["Private :: Classifier"]),
|
| 405 |
+
],
|
| 406 |
+
)
|
| 407 |
+
def test_listed_in_dynamic(self, tmp_path, attr, field, value):
|
| 408 |
+
pyproject = self.pyproject(tmp_path, [field])
|
| 409 |
+
dist = makedist(tmp_path, **{attr: value})
|
| 410 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 411 |
+
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
|
| 412 |
+
assert dist_value == value
|
| 413 |
+
|
| 414 |
+
def test_warning_overwritten_dependencies(self, tmp_path):
|
| 415 |
+
src = "[project]\nname='pkg'\nversion='0.1'\ndependencies=['click']\n"
|
| 416 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 417 |
+
pyproject.write_text(src, encoding="utf-8")
|
| 418 |
+
dist = makedist(tmp_path, install_requires=["wheel"])
|
| 419 |
+
with pytest.warns(match="`install_requires` overwritten"):
|
| 420 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 421 |
+
assert "wheel" not in dist.install_requires
|
| 422 |
+
|
| 423 |
+
def test_optional_dependencies_dont_remove_env_markers(self, tmp_path):
|
| 424 |
+
"""
|
| 425 |
+
Internally setuptools converts dependencies with markers to "extras".
|
| 426 |
+
If ``install_requires`` is given by ``setup.py``, we have to ensure that
|
| 427 |
+
applying ``optional-dependencies`` does not overwrite the mandatory
|
| 428 |
+
dependencies with markers (see #3204).
|
| 429 |
+
"""
|
| 430 |
+
# If setuptools replace its internal mechanism that uses `requires.txt`
|
| 431 |
+
# this test has to be rewritten to adapt accordingly
|
| 432 |
+
extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n"
|
| 433 |
+
pyproject = self.pyproject(tmp_path, ["dependencies"], extra)
|
| 434 |
+
install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"']
|
| 435 |
+
dist = makedist(tmp_path, install_requires=install_req)
|
| 436 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 437 |
+
assert "foo" in dist.extras_require
|
| 438 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 439 |
+
write_requirements(egg_info, tmp_path, tmp_path / "requires.txt")
|
| 440 |
+
reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8")
|
| 441 |
+
assert "importlib-resources" in reqs
|
| 442 |
+
assert "bar" in reqs
|
| 443 |
+
assert ':python_version < "3.7"' in reqs
|
| 444 |
+
|
| 445 |
+
@pytest.mark.parametrize(
|
| 446 |
+
("field", "group"),
|
| 447 |
+
[("scripts", "console_scripts"), ("gui-scripts", "gui_scripts")],
|
| 448 |
+
)
|
| 449 |
+
@pytest.mark.filterwarnings("error")
|
| 450 |
+
def test_scripts_dont_require_dynamic_entry_points(self, tmp_path, field, group):
|
| 451 |
+
# Issue 3862
|
| 452 |
+
pyproject = self.pyproject(tmp_path, [field])
|
| 453 |
+
dist = makedist(tmp_path, entry_points={group: ["foobar=foobar:main"]})
|
| 454 |
+
dist = pyprojecttoml.apply_configuration(dist, pyproject)
|
| 455 |
+
assert group in dist.entry_points
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
class TestMeta:
|
| 459 |
+
def test_example_file_in_sdist(self, setuptools_sdist):
|
| 460 |
+
"""Meta test to ensure tests can run from sdist"""
|
| 461 |
+
with tarfile.open(setuptools_sdist) as tar:
|
| 462 |
+
assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames())
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
class TestInteropCommandLineParsing:
|
| 466 |
+
def test_version(self, tmp_path, monkeypatch, capsys):
|
| 467 |
+
# See pypa/setuptools#4047
|
| 468 |
+
# This test can be removed once the CLI interface of setup.py is removed
|
| 469 |
+
monkeypatch.chdir(tmp_path)
|
| 470 |
+
toml_config = """
|
| 471 |
+
[project]
|
| 472 |
+
name = "test"
|
| 473 |
+
version = "42.0"
|
| 474 |
+
"""
|
| 475 |
+
pyproject = Path(tmp_path, "pyproject.toml")
|
| 476 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 477 |
+
opts = {"script_args": ["--version"]}
|
| 478 |
+
dist = pyprojecttoml.apply_configuration(Distribution(opts), pyproject)
|
| 479 |
+
dist.parse_command_line() # <-- there should be no exception here.
|
| 480 |
+
captured = capsys.readouterr()
|
| 481 |
+
assert "42.0" in captured.out
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
class TestStaticConfig:
|
| 485 |
+
def test_mark_static_fields(self, tmp_path, monkeypatch):
|
| 486 |
+
monkeypatch.chdir(tmp_path)
|
| 487 |
+
toml_config = """
|
| 488 |
+
[project]
|
| 489 |
+
name = "test"
|
| 490 |
+
version = "42.0"
|
| 491 |
+
dependencies = ["hello"]
|
| 492 |
+
keywords = ["world"]
|
| 493 |
+
classifiers = ["private :: hello world"]
|
| 494 |
+
[tool.setuptools]
|
| 495 |
+
obsoletes = ["abcd"]
|
| 496 |
+
provides = ["abcd"]
|
| 497 |
+
platforms = ["abcd"]
|
| 498 |
+
"""
|
| 499 |
+
pyproject = Path(tmp_path, "pyproject.toml")
|
| 500 |
+
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
|
| 501 |
+
dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
|
| 502 |
+
assert is_static(dist.install_requires)
|
| 503 |
+
assert is_static(dist.metadata.keywords)
|
| 504 |
+
assert is_static(dist.metadata.classifiers)
|
| 505 |
+
assert is_static(dist.metadata.obsoletes)
|
| 506 |
+
assert is_static(dist.metadata.provides)
|
| 507 |
+
assert is_static(dist.metadata.platforms)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
# --- Auxiliary Functions ---
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
def core_metadata(dist) -> str:
|
| 514 |
+
with io.StringIO() as buffer:
|
| 515 |
+
dist.metadata.write_pkg_file(buffer)
|
| 516 |
+
pkg_file_txt = buffer.getvalue()
|
| 517 |
+
|
| 518 |
+
# Make sure core metadata is valid
|
| 519 |
+
Metadata.from_email(pkg_file_txt, validate=True) # can raise exceptions
|
| 520 |
+
|
| 521 |
+
skip_prefixes: tuple[str, ...] = ()
|
| 522 |
+
skip_lines = set()
|
| 523 |
+
# ---- DIFF NORMALISATION ----
|
| 524 |
+
# PEP 621 is very particular about author/maintainer metadata conversion, so skip
|
| 525 |
+
skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:")
|
| 526 |
+
# May be redundant with Home-page
|
| 527 |
+
skip_prefixes += ("Project-URL: Homepage,", "Home-page:")
|
| 528 |
+
# May be missing in original (relying on default) but backfilled in the TOML
|
| 529 |
+
skip_prefixes += ("Description-Content-Type:",)
|
| 530 |
+
# Remove empty lines
|
| 531 |
+
skip_lines.add("")
|
| 532 |
+
|
| 533 |
+
result = []
|
| 534 |
+
for line in pkg_file_txt.splitlines():
|
| 535 |
+
if line.startswith(skip_prefixes) or line in skip_lines:
|
| 536 |
+
continue
|
| 537 |
+
result.append(line + "\n")
|
| 538 |
+
|
| 539 |
+
return "".join(result)
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_expand.py
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
from setuptools._static import is_static
|
| 8 |
+
from setuptools.config import expand
|
| 9 |
+
from setuptools.discovery import find_package_path
|
| 10 |
+
|
| 11 |
+
from distutils.errors import DistutilsOptionError
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def write_files(files, root_dir):
|
| 15 |
+
for file, content in files.items():
|
| 16 |
+
path = root_dir / file
|
| 17 |
+
path.parent.mkdir(exist_ok=True, parents=True)
|
| 18 |
+
path.write_text(content, encoding="utf-8")
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def test_glob_relative(tmp_path, monkeypatch):
|
| 22 |
+
files = {
|
| 23 |
+
"dir1/dir2/dir3/file1.txt",
|
| 24 |
+
"dir1/dir2/file2.txt",
|
| 25 |
+
"dir1/file3.txt",
|
| 26 |
+
"a.ini",
|
| 27 |
+
"b.ini",
|
| 28 |
+
"dir1/c.ini",
|
| 29 |
+
"dir1/dir2/a.ini",
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
write_files({k: "" for k in files}, tmp_path)
|
| 33 |
+
patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"]
|
| 34 |
+
monkeypatch.chdir(tmp_path)
|
| 35 |
+
assert set(expand.glob_relative(patterns)) == files
|
| 36 |
+
# Make sure the same APIs work outside cwd
|
| 37 |
+
assert set(expand.glob_relative(patterns, tmp_path)) == files
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def test_read_files(tmp_path, monkeypatch):
|
| 41 |
+
dir_ = tmp_path / "dir_"
|
| 42 |
+
(tmp_path / "_dir").mkdir(exist_ok=True)
|
| 43 |
+
(tmp_path / "a.txt").touch()
|
| 44 |
+
files = {"a.txt": "a", "dir1/b.txt": "b", "dir1/dir2/c.txt": "c"}
|
| 45 |
+
write_files(files, dir_)
|
| 46 |
+
|
| 47 |
+
secrets = Path(str(dir_) + "secrets")
|
| 48 |
+
secrets.mkdir(exist_ok=True)
|
| 49 |
+
write_files({"secrets.txt": "secret keys"}, secrets)
|
| 50 |
+
|
| 51 |
+
with monkeypatch.context() as m:
|
| 52 |
+
m.chdir(dir_)
|
| 53 |
+
assert expand.read_files(list(files)) == "a\nb\nc"
|
| 54 |
+
|
| 55 |
+
cannot_access_msg = r"Cannot access '.*\.\..a\.txt'"
|
| 56 |
+
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
|
| 57 |
+
expand.read_files(["../a.txt"])
|
| 58 |
+
|
| 59 |
+
cannot_access_secrets_msg = r"Cannot access '.*secrets\.txt'"
|
| 60 |
+
with pytest.raises(DistutilsOptionError, match=cannot_access_secrets_msg):
|
| 61 |
+
expand.read_files(["../dir_secrets/secrets.txt"])
|
| 62 |
+
|
| 63 |
+
# Make sure the same APIs work outside cwd
|
| 64 |
+
assert expand.read_files(list(files), dir_) == "a\nb\nc"
|
| 65 |
+
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
|
| 66 |
+
expand.read_files(["../a.txt"], dir_)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class TestReadAttr:
|
| 70 |
+
@pytest.mark.parametrize(
|
| 71 |
+
"example",
|
| 72 |
+
[
|
| 73 |
+
# No cookie means UTF-8:
|
| 74 |
+
b"__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
|
| 75 |
+
# If a cookie is present, honor it:
|
| 76 |
+
b"# -*- coding: utf-8 -*-\n__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
|
| 77 |
+
b"# -*- coding: latin1 -*-\n__version__ = '\xe9'\nraise SystemExit(1)\n",
|
| 78 |
+
],
|
| 79 |
+
)
|
| 80 |
+
def test_read_attr_encoding_cookie(self, example, tmp_path):
|
| 81 |
+
(tmp_path / "mod.py").write_bytes(example)
|
| 82 |
+
assert expand.read_attr('mod.__version__', root_dir=tmp_path) == 'é'
|
| 83 |
+
|
| 84 |
+
def test_read_attr(self, tmp_path, monkeypatch):
|
| 85 |
+
files = {
|
| 86 |
+
"pkg/__init__.py": "",
|
| 87 |
+
"pkg/sub/__init__.py": "VERSION = '0.1.1'",
|
| 88 |
+
"pkg/sub/mod.py": (
|
| 89 |
+
"VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\nraise SystemExit(1)"
|
| 90 |
+
),
|
| 91 |
+
}
|
| 92 |
+
write_files(files, tmp_path)
|
| 93 |
+
|
| 94 |
+
with monkeypatch.context() as m:
|
| 95 |
+
m.chdir(tmp_path)
|
| 96 |
+
# Make sure it can read the attr statically without evaluating the module
|
| 97 |
+
version = expand.read_attr('pkg.sub.VERSION')
|
| 98 |
+
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
|
| 99 |
+
|
| 100 |
+
assert version == '0.1.1'
|
| 101 |
+
assert is_static(values)
|
| 102 |
+
|
| 103 |
+
assert values['a'] == 0
|
| 104 |
+
assert values['b'] == {42}
|
| 105 |
+
assert is_static(values)
|
| 106 |
+
|
| 107 |
+
# Make sure the same APIs work outside cwd
|
| 108 |
+
assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
|
| 109 |
+
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path)
|
| 110 |
+
assert values['c'] == (0, 1, 1)
|
| 111 |
+
|
| 112 |
+
@pytest.mark.parametrize(
|
| 113 |
+
"example",
|
| 114 |
+
[
|
| 115 |
+
"VERSION: str\nVERSION = '0.1.1'\nraise SystemExit(1)\n",
|
| 116 |
+
"VERSION: str = '0.1.1'\nraise SystemExit(1)\n",
|
| 117 |
+
],
|
| 118 |
+
)
|
| 119 |
+
def test_read_annotated_attr(self, tmp_path, example):
|
| 120 |
+
files = {
|
| 121 |
+
"pkg/__init__.py": "",
|
| 122 |
+
"pkg/sub/__init__.py": example,
|
| 123 |
+
}
|
| 124 |
+
write_files(files, tmp_path)
|
| 125 |
+
# Make sure this attribute can be read statically
|
| 126 |
+
version = expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path)
|
| 127 |
+
assert version == '0.1.1'
|
| 128 |
+
assert is_static(version)
|
| 129 |
+
|
| 130 |
+
@pytest.mark.parametrize(
|
| 131 |
+
"example",
|
| 132 |
+
[
|
| 133 |
+
"VERSION = (lambda: '0.1.1')()\n",
|
| 134 |
+
"def fn(): return '0.1.1'\nVERSION = fn()\n",
|
| 135 |
+
"VERSION: str = (lambda: '0.1.1')()\n",
|
| 136 |
+
],
|
| 137 |
+
)
|
| 138 |
+
def test_read_dynamic_attr(self, tmp_path, monkeypatch, example):
|
| 139 |
+
files = {
|
| 140 |
+
"pkg/__init__.py": "",
|
| 141 |
+
"pkg/sub/__init__.py": example,
|
| 142 |
+
}
|
| 143 |
+
write_files(files, tmp_path)
|
| 144 |
+
monkeypatch.chdir(tmp_path)
|
| 145 |
+
version = expand.read_attr('pkg.sub.VERSION')
|
| 146 |
+
assert version == '0.1.1'
|
| 147 |
+
assert not is_static(version)
|
| 148 |
+
|
| 149 |
+
def test_import_order(self, tmp_path):
|
| 150 |
+
"""
|
| 151 |
+
Sometimes the import machinery will import the parent package of a nested
|
| 152 |
+
module, which triggers side-effects and might create problems (see issue #3176)
|
| 153 |
+
|
| 154 |
+
``read_attr`` should bypass these limitations by resolving modules statically
|
| 155 |
+
(via ast.literal_eval).
|
| 156 |
+
"""
|
| 157 |
+
files = {
|
| 158 |
+
"src/pkg/__init__.py": "from .main import func\nfrom .about import version",
|
| 159 |
+
"src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42",
|
| 160 |
+
"src/pkg/about.py": "version = '42'",
|
| 161 |
+
}
|
| 162 |
+
write_files(files, tmp_path)
|
| 163 |
+
attr_desc = "pkg.about.version"
|
| 164 |
+
package_dir = {"": "src"}
|
| 165 |
+
# `import super_complicated_dep` should not run, otherwise the build fails
|
| 166 |
+
assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42"
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
@pytest.mark.parametrize(
|
| 170 |
+
("package_dir", "file", "module", "return_value"),
|
| 171 |
+
[
|
| 172 |
+
({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
|
| 173 |
+
({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
|
| 174 |
+
({}, "single_module.py", "single_module", 70),
|
| 175 |
+
({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
|
| 176 |
+
],
|
| 177 |
+
)
|
| 178 |
+
def test_resolve_class(monkeypatch, tmp_path, package_dir, file, module, return_value):
|
| 179 |
+
monkeypatch.setattr(sys, "modules", {}) # reproducibility
|
| 180 |
+
files = {file: f"class Custom:\n def testing(self): return {return_value}"}
|
| 181 |
+
write_files(files, tmp_path)
|
| 182 |
+
cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)
|
| 183 |
+
assert cls().testing() == return_value
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@pytest.mark.parametrize(
|
| 187 |
+
("args", "pkgs"),
|
| 188 |
+
[
|
| 189 |
+
({"where": ["."], "namespaces": False}, {"pkg", "other"}),
|
| 190 |
+
({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
|
| 191 |
+
({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}),
|
| 192 |
+
({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces`
|
| 193 |
+
],
|
| 194 |
+
)
|
| 195 |
+
def test_find_packages(tmp_path, args, pkgs):
|
| 196 |
+
files = {
|
| 197 |
+
"pkg/__init__.py",
|
| 198 |
+
"other/__init__.py",
|
| 199 |
+
"dir1/dir2/__init__.py",
|
| 200 |
+
}
|
| 201 |
+
write_files({k: "" for k in files}, tmp_path)
|
| 202 |
+
|
| 203 |
+
package_dir = {}
|
| 204 |
+
kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args}
|
| 205 |
+
where = kwargs.get("where", ["."])
|
| 206 |
+
assert set(expand.find_packages(**kwargs)) == pkgs
|
| 207 |
+
for pkg in pkgs:
|
| 208 |
+
pkg_path = find_package_path(pkg, package_dir, tmp_path)
|
| 209 |
+
assert os.path.exists(pkg_path)
|
| 210 |
+
|
| 211 |
+
# Make sure the same APIs work outside cwd
|
| 212 |
+
where = [
|
| 213 |
+
str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths
|
| 214 |
+
for p in args.pop("where", ["."])
|
| 215 |
+
]
|
| 216 |
+
|
| 217 |
+
assert set(expand.find_packages(where=where, **args)) == pkgs
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
@pytest.mark.parametrize(
|
| 221 |
+
("files", "where", "expected_package_dir"),
|
| 222 |
+
[
|
| 223 |
+
(["pkg1/__init__.py", "pkg1/other.py"], ["."], {}),
|
| 224 |
+
(["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}),
|
| 225 |
+
(["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}),
|
| 226 |
+
(["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}),
|
| 227 |
+
(
|
| 228 |
+
["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"],
|
| 229 |
+
["src1", "src2"],
|
| 230 |
+
{"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"},
|
| 231 |
+
),
|
| 232 |
+
(
|
| 233 |
+
["src/pkg1/__init__.py", "pkg2/__init__.py"],
|
| 234 |
+
["src", "."],
|
| 235 |
+
{"pkg1": "src/pkg1"},
|
| 236 |
+
),
|
| 237 |
+
],
|
| 238 |
+
)
|
| 239 |
+
def test_fill_package_dir(tmp_path, files, where, expected_package_dir):
|
| 240 |
+
write_files({k: "" for k in files}, tmp_path)
|
| 241 |
+
pkg_dir = {}
|
| 242 |
+
kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False}
|
| 243 |
+
pkgs = expand.find_packages(where=where, **kwargs)
|
| 244 |
+
assert set(pkg_dir.items()) == set(expected_package_dir.items())
|
| 245 |
+
for pkg in pkgs:
|
| 246 |
+
pkg_path = find_package_path(pkg, pkg_dir, tmp_path)
|
| 247 |
+
assert os.path.exists(pkg_path)
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml.py
ADDED
|
@@ -0,0 +1,396 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from configparser import ConfigParser
|
| 3 |
+
from inspect import cleandoc
|
| 4 |
+
|
| 5 |
+
import jaraco.path
|
| 6 |
+
import pytest
|
| 7 |
+
import tomli_w
|
| 8 |
+
from path import Path
|
| 9 |
+
|
| 10 |
+
import setuptools # noqa: F401 # force distutils.core to be patched
|
| 11 |
+
from setuptools.config.pyprojecttoml import (
|
| 12 |
+
_ToolsTypoInMetadata,
|
| 13 |
+
apply_configuration,
|
| 14 |
+
expand_configuration,
|
| 15 |
+
read_configuration,
|
| 16 |
+
validate,
|
| 17 |
+
)
|
| 18 |
+
from setuptools.dist import Distribution
|
| 19 |
+
from setuptools.errors import OptionError
|
| 20 |
+
|
| 21 |
+
import distutils.core
|
| 22 |
+
|
| 23 |
+
EXAMPLE = """
|
| 24 |
+
[project]
|
| 25 |
+
name = "myproj"
|
| 26 |
+
keywords = ["some", "key", "words"]
|
| 27 |
+
dynamic = ["version", "readme"]
|
| 28 |
+
requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
| 29 |
+
dependencies = [
|
| 30 |
+
'importlib-metadata>=0.12;python_version<"3.8"',
|
| 31 |
+
'importlib-resources>=1.0;python_version<"3.7"',
|
| 32 |
+
'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
|
| 33 |
+
]
|
| 34 |
+
|
| 35 |
+
[project.optional-dependencies]
|
| 36 |
+
docs = [
|
| 37 |
+
"sphinx>=3",
|
| 38 |
+
"sphinx-argparse>=0.2.5",
|
| 39 |
+
"sphinx-rtd-theme>=0.4.3",
|
| 40 |
+
]
|
| 41 |
+
testing = [
|
| 42 |
+
"pytest>=1",
|
| 43 |
+
"coverage>=3,<5",
|
| 44 |
+
]
|
| 45 |
+
|
| 46 |
+
[project.scripts]
|
| 47 |
+
exec = "pkg.__main__:exec"
|
| 48 |
+
|
| 49 |
+
[build-system]
|
| 50 |
+
requires = ["setuptools", "wheel"]
|
| 51 |
+
build-backend = "setuptools.build_meta"
|
| 52 |
+
|
| 53 |
+
[tool.setuptools]
|
| 54 |
+
package-dir = {"" = "src"}
|
| 55 |
+
zip-safe = true
|
| 56 |
+
platforms = ["any"]
|
| 57 |
+
|
| 58 |
+
[tool.setuptools.packages.find]
|
| 59 |
+
where = ["src"]
|
| 60 |
+
|
| 61 |
+
[tool.setuptools.cmdclass]
|
| 62 |
+
sdist = "pkg.mod.CustomSdist"
|
| 63 |
+
|
| 64 |
+
[tool.setuptools.dynamic.version]
|
| 65 |
+
attr = "pkg.__version__.VERSION"
|
| 66 |
+
|
| 67 |
+
[tool.setuptools.dynamic.readme]
|
| 68 |
+
file = ["README.md"]
|
| 69 |
+
content-type = "text/markdown"
|
| 70 |
+
|
| 71 |
+
[tool.setuptools.package-data]
|
| 72 |
+
"*" = ["*.txt"]
|
| 73 |
+
|
| 74 |
+
[tool.setuptools.data-files]
|
| 75 |
+
"data" = ["_files/*.txt"]
|
| 76 |
+
|
| 77 |
+
[tool.distutils.sdist]
|
| 78 |
+
formats = "gztar"
|
| 79 |
+
|
| 80 |
+
[tool.distutils.bdist_wheel]
|
| 81 |
+
universal = true
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def create_example(path, pkg_root):
|
| 86 |
+
files = {
|
| 87 |
+
"pyproject.toml": EXAMPLE,
|
| 88 |
+
"README.md": "hello world",
|
| 89 |
+
"_files": {
|
| 90 |
+
"file.txt": "",
|
| 91 |
+
},
|
| 92 |
+
}
|
| 93 |
+
packages = {
|
| 94 |
+
"pkg": {
|
| 95 |
+
"__init__.py": "",
|
| 96 |
+
"mod.py": "class CustomSdist: pass",
|
| 97 |
+
"__version__.py": "VERSION = (3, 10)",
|
| 98 |
+
"__main__.py": "def exec(): print('hello')",
|
| 99 |
+
},
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
assert pkg_root # Meta-test: cannot be empty string.
|
| 103 |
+
|
| 104 |
+
if pkg_root == ".":
|
| 105 |
+
files = {**files, **packages}
|
| 106 |
+
# skip other files: flat-layout will raise error for multi-package dist
|
| 107 |
+
else:
|
| 108 |
+
# Use this opportunity to ensure namespaces are discovered
|
| 109 |
+
files[pkg_root] = {**packages, "other": {"nested": {"__init__.py": ""}}}
|
| 110 |
+
|
| 111 |
+
jaraco.path.build(files, prefix=path)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def verify_example(config, path, pkg_root):
|
| 115 |
+
pyproject = path / "pyproject.toml"
|
| 116 |
+
pyproject.write_text(tomli_w.dumps(config), encoding="utf-8")
|
| 117 |
+
expanded = expand_configuration(config, path)
|
| 118 |
+
expanded_project = expanded["project"]
|
| 119 |
+
assert read_configuration(pyproject, expand=True) == expanded
|
| 120 |
+
assert expanded_project["version"] == "3.10"
|
| 121 |
+
assert expanded_project["readme"]["text"] == "hello world"
|
| 122 |
+
assert "packages" in expanded["tool"]["setuptools"]
|
| 123 |
+
if pkg_root == ".":
|
| 124 |
+
# Auto-discovery will raise error for multi-package dist
|
| 125 |
+
assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"}
|
| 126 |
+
else:
|
| 127 |
+
assert set(expanded["tool"]["setuptools"]["packages"]) == {
|
| 128 |
+
"pkg",
|
| 129 |
+
"other",
|
| 130 |
+
"other.nested",
|
| 131 |
+
}
|
| 132 |
+
assert expanded["tool"]["setuptools"]["include-package-data"] is True
|
| 133 |
+
assert "" in expanded["tool"]["setuptools"]["package-data"]
|
| 134 |
+
assert "*" not in expanded["tool"]["setuptools"]["package-data"]
|
| 135 |
+
assert expanded["tool"]["setuptools"]["data-files"] == [
|
| 136 |
+
("data", ["_files/file.txt"])
|
| 137 |
+
]
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def test_read_configuration(tmp_path):
|
| 141 |
+
create_example(tmp_path, "src")
|
| 142 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 143 |
+
|
| 144 |
+
config = read_configuration(pyproject, expand=False)
|
| 145 |
+
assert config["project"].get("version") is None
|
| 146 |
+
assert config["project"].get("readme") is None
|
| 147 |
+
|
| 148 |
+
verify_example(config, tmp_path, "src")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
@pytest.mark.parametrize(
|
| 152 |
+
("pkg_root", "opts"),
|
| 153 |
+
[
|
| 154 |
+
(".", {}),
|
| 155 |
+
("src", {}),
|
| 156 |
+
("lib", {"packages": {"find": {"where": ["lib"]}}}),
|
| 157 |
+
],
|
| 158 |
+
)
|
| 159 |
+
def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts):
|
| 160 |
+
create_example(tmp_path, pkg_root)
|
| 161 |
+
|
| 162 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 163 |
+
|
| 164 |
+
config = read_configuration(pyproject, expand=False)
|
| 165 |
+
assert config["project"].get("version") is None
|
| 166 |
+
assert config["project"].get("readme") is None
|
| 167 |
+
config["tool"]["setuptools"].pop("packages", None)
|
| 168 |
+
config["tool"]["setuptools"].pop("package-dir", None)
|
| 169 |
+
|
| 170 |
+
config["tool"]["setuptools"].update(opts)
|
| 171 |
+
verify_example(config, tmp_path, pkg_root)
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
ENTRY_POINTS = {
|
| 175 |
+
"console_scripts": {"a": "mod.a:func"},
|
| 176 |
+
"gui_scripts": {"b": "mod.b:func"},
|
| 177 |
+
"other": {"c": "mod.c:func [extra]"},
|
| 178 |
+
}
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
class TestEntryPoints:
|
| 182 |
+
def write_entry_points(self, tmp_path):
|
| 183 |
+
entry_points = ConfigParser()
|
| 184 |
+
entry_points.read_dict(ENTRY_POINTS)
|
| 185 |
+
with open(tmp_path / "entry-points.txt", "w", encoding="utf-8") as f:
|
| 186 |
+
entry_points.write(f)
|
| 187 |
+
|
| 188 |
+
def pyproject(self, dynamic=None):
|
| 189 |
+
project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]}
|
| 190 |
+
tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}
|
| 191 |
+
return {"project": project, "tool": {"setuptools": tool}}
|
| 192 |
+
|
| 193 |
+
def test_all_listed_in_dynamic(self, tmp_path):
|
| 194 |
+
self.write_entry_points(tmp_path)
|
| 195 |
+
expanded = expand_configuration(self.pyproject(), tmp_path)
|
| 196 |
+
expanded_project = expanded["project"]
|
| 197 |
+
assert len(expanded_project["scripts"]) == 1
|
| 198 |
+
assert expanded_project["scripts"]["a"] == "mod.a:func"
|
| 199 |
+
assert len(expanded_project["gui-scripts"]) == 1
|
| 200 |
+
assert expanded_project["gui-scripts"]["b"] == "mod.b:func"
|
| 201 |
+
assert len(expanded_project["entry-points"]) == 1
|
| 202 |
+
assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]"
|
| 203 |
+
|
| 204 |
+
@pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts"))
|
| 205 |
+
def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic):
|
| 206 |
+
self.write_entry_points(tmp_path)
|
| 207 |
+
dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic}
|
| 208 |
+
|
| 209 |
+
msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}"
|
| 210 |
+
with pytest.raises(OptionError, match=re.compile(msg, re.S)):
|
| 211 |
+
expand_configuration(self.pyproject(dynamic), tmp_path)
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
class TestClassifiers:
|
| 215 |
+
def test_dynamic(self, tmp_path):
|
| 216 |
+
# Let's create a project example that has dynamic classifiers
|
| 217 |
+
# coming from a txt file.
|
| 218 |
+
create_example(tmp_path, "src")
|
| 219 |
+
classifiers = cleandoc(
|
| 220 |
+
"""
|
| 221 |
+
Framework :: Flask
|
| 222 |
+
Programming Language :: Haskell
|
| 223 |
+
"""
|
| 224 |
+
)
|
| 225 |
+
(tmp_path / "classifiers.txt").write_text(classifiers, encoding="utf-8")
|
| 226 |
+
|
| 227 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 228 |
+
config = read_configuration(pyproject, expand=False)
|
| 229 |
+
dynamic = config["project"]["dynamic"]
|
| 230 |
+
config["project"]["dynamic"] = list({*dynamic, "classifiers"})
|
| 231 |
+
dynamic_config = config["tool"]["setuptools"]["dynamic"]
|
| 232 |
+
dynamic_config["classifiers"] = {"file": "classifiers.txt"}
|
| 233 |
+
|
| 234 |
+
# When the configuration is expanded,
|
| 235 |
+
# each line of the file should be an different classifier.
|
| 236 |
+
validate(config, pyproject)
|
| 237 |
+
expanded = expand_configuration(config, tmp_path)
|
| 238 |
+
|
| 239 |
+
assert set(expanded["project"]["classifiers"]) == {
|
| 240 |
+
"Framework :: Flask",
|
| 241 |
+
"Programming Language :: Haskell",
|
| 242 |
+
}
|
| 243 |
+
|
| 244 |
+
def test_dynamic_without_config(self, tmp_path):
|
| 245 |
+
config = """
|
| 246 |
+
[project]
|
| 247 |
+
name = "myproj"
|
| 248 |
+
version = '42'
|
| 249 |
+
dynamic = ["classifiers"]
|
| 250 |
+
"""
|
| 251 |
+
|
| 252 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 253 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 254 |
+
with pytest.raises(OptionError, match="No configuration .* .classifiers."):
|
| 255 |
+
read_configuration(pyproject)
|
| 256 |
+
|
| 257 |
+
def test_dynamic_readme_from_setup_script_args(self, tmp_path):
|
| 258 |
+
config = """
|
| 259 |
+
[project]
|
| 260 |
+
name = "myproj"
|
| 261 |
+
version = '42'
|
| 262 |
+
dynamic = ["readme"]
|
| 263 |
+
"""
|
| 264 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 265 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 266 |
+
dist = Distribution(attrs={"long_description": "42"})
|
| 267 |
+
# No error should occur because of missing `readme`
|
| 268 |
+
dist = apply_configuration(dist, pyproject)
|
| 269 |
+
assert dist.metadata.long_description == "42"
|
| 270 |
+
|
| 271 |
+
def test_dynamic_without_file(self, tmp_path):
|
| 272 |
+
config = """
|
| 273 |
+
[project]
|
| 274 |
+
name = "myproj"
|
| 275 |
+
version = '42'
|
| 276 |
+
dynamic = ["classifiers"]
|
| 277 |
+
|
| 278 |
+
[tool.setuptools.dynamic]
|
| 279 |
+
classifiers = {file = ["classifiers.txt"]}
|
| 280 |
+
"""
|
| 281 |
+
|
| 282 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 283 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 284 |
+
with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"):
|
| 285 |
+
expanded = read_configuration(pyproject)
|
| 286 |
+
assert "classifiers" not in expanded["project"]
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
@pytest.mark.parametrize(
|
| 290 |
+
"example",
|
| 291 |
+
(
|
| 292 |
+
"""
|
| 293 |
+
[project]
|
| 294 |
+
name = "myproj"
|
| 295 |
+
version = "1.2"
|
| 296 |
+
|
| 297 |
+
[my-tool.that-disrespect.pep518]
|
| 298 |
+
value = 42
|
| 299 |
+
""",
|
| 300 |
+
),
|
| 301 |
+
)
|
| 302 |
+
def test_ignore_unrelated_config(tmp_path, example):
|
| 303 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 304 |
+
pyproject.write_text(cleandoc(example), encoding="utf-8")
|
| 305 |
+
|
| 306 |
+
# Make sure no error is raised due to 3rd party configs in pyproject.toml
|
| 307 |
+
assert read_configuration(pyproject) is not None
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@pytest.mark.parametrize(
|
| 311 |
+
("example", "error_msg"),
|
| 312 |
+
[
|
| 313 |
+
(
|
| 314 |
+
"""
|
| 315 |
+
[project]
|
| 316 |
+
name = "myproj"
|
| 317 |
+
version = "1.2"
|
| 318 |
+
requires = ['pywin32; platform_system=="Windows"' ]
|
| 319 |
+
""",
|
| 320 |
+
"configuration error: .project. must not contain ..requires.. properties",
|
| 321 |
+
),
|
| 322 |
+
],
|
| 323 |
+
)
|
| 324 |
+
def test_invalid_example(tmp_path, example, error_msg):
|
| 325 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 326 |
+
pyproject.write_text(cleandoc(example), encoding="utf-8")
|
| 327 |
+
|
| 328 |
+
pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S)
|
| 329 |
+
with pytest.raises(ValueError, match=pattern):
|
| 330 |
+
read_configuration(pyproject)
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42"))
|
| 334 |
+
def test_empty(tmp_path, config):
|
| 335 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 336 |
+
pyproject.write_text(config, encoding="utf-8")
|
| 337 |
+
|
| 338 |
+
# Make sure no error is raised
|
| 339 |
+
assert read_configuration(pyproject) == {}
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",))
|
| 343 |
+
def test_include_package_data_by_default(tmp_path, config):
|
| 344 |
+
"""Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as
|
| 345 |
+
default.
|
| 346 |
+
"""
|
| 347 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 348 |
+
pyproject.write_text(config, encoding="utf-8")
|
| 349 |
+
|
| 350 |
+
config = read_configuration(pyproject)
|
| 351 |
+
assert config["tool"]["setuptools"]["include-package-data"] is True
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
def test_include_package_data_in_setuppy(tmp_path):
|
| 355 |
+
"""Builds with ``pyproject.toml`` should consider ``include_package_data`` set in
|
| 356 |
+
``setup.py``.
|
| 357 |
+
|
| 358 |
+
See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889
|
| 359 |
+
"""
|
| 360 |
+
files = {
|
| 361 |
+
"pyproject.toml": "[project]\nname = 'myproj'\nversion='42'\n",
|
| 362 |
+
"setup.py": "__import__('setuptools').setup(include_package_data=False)",
|
| 363 |
+
}
|
| 364 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 365 |
+
|
| 366 |
+
with Path(tmp_path):
|
| 367 |
+
dist = distutils.core.run_setup("setup.py", {}, stop_after="config")
|
| 368 |
+
|
| 369 |
+
assert dist.get_name() == "myproj"
|
| 370 |
+
assert dist.get_version() == "42"
|
| 371 |
+
assert dist.include_package_data is False
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def test_warn_tools_typo(tmp_path):
|
| 375 |
+
"""Test that the common ``tools.setuptools`` typo in ``pyproject.toml`` issues a warning
|
| 376 |
+
|
| 377 |
+
See https://github.com/pypa/setuptools/issues/4150
|
| 378 |
+
"""
|
| 379 |
+
config = """
|
| 380 |
+
[build-system]
|
| 381 |
+
requires = ["setuptools"]
|
| 382 |
+
build-backend = "setuptools.build_meta"
|
| 383 |
+
|
| 384 |
+
[project]
|
| 385 |
+
name = "myproj"
|
| 386 |
+
version = '42'
|
| 387 |
+
|
| 388 |
+
[tools.setuptools]
|
| 389 |
+
packages = ["package"]
|
| 390 |
+
"""
|
| 391 |
+
|
| 392 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 393 |
+
pyproject.write_text(cleandoc(config), encoding="utf-8")
|
| 394 |
+
|
| 395 |
+
with pytest.warns(_ToolsTypoInMetadata):
|
| 396 |
+
read_configuration(pyproject)
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from inspect import cleandoc
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
from jaraco import path
|
| 5 |
+
|
| 6 |
+
from setuptools.config.pyprojecttoml import apply_configuration
|
| 7 |
+
from setuptools.dist import Distribution
|
| 8 |
+
from setuptools.warnings import SetuptoolsWarning
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def test_dynamic_dependencies(tmp_path):
|
| 12 |
+
files = {
|
| 13 |
+
"requirements.txt": "six\n # comment\n",
|
| 14 |
+
"pyproject.toml": cleandoc(
|
| 15 |
+
"""
|
| 16 |
+
[project]
|
| 17 |
+
name = "myproj"
|
| 18 |
+
version = "1.0"
|
| 19 |
+
dynamic = ["dependencies"]
|
| 20 |
+
|
| 21 |
+
[build-system]
|
| 22 |
+
requires = ["setuptools", "wheel"]
|
| 23 |
+
build-backend = "setuptools.build_meta"
|
| 24 |
+
|
| 25 |
+
[tool.setuptools.dynamic.dependencies]
|
| 26 |
+
file = ["requirements.txt"]
|
| 27 |
+
"""
|
| 28 |
+
),
|
| 29 |
+
}
|
| 30 |
+
path.build(files, prefix=tmp_path)
|
| 31 |
+
dist = Distribution()
|
| 32 |
+
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
|
| 33 |
+
assert dist.install_requires == ["six"]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def test_dynamic_optional_dependencies(tmp_path):
|
| 37 |
+
files = {
|
| 38 |
+
"requirements-docs.txt": "sphinx\n # comment\n",
|
| 39 |
+
"pyproject.toml": cleandoc(
|
| 40 |
+
"""
|
| 41 |
+
[project]
|
| 42 |
+
name = "myproj"
|
| 43 |
+
version = "1.0"
|
| 44 |
+
dynamic = ["optional-dependencies"]
|
| 45 |
+
|
| 46 |
+
[tool.setuptools.dynamic.optional-dependencies.docs]
|
| 47 |
+
file = ["requirements-docs.txt"]
|
| 48 |
+
|
| 49 |
+
[build-system]
|
| 50 |
+
requires = ["setuptools", "wheel"]
|
| 51 |
+
build-backend = "setuptools.build_meta"
|
| 52 |
+
"""
|
| 53 |
+
),
|
| 54 |
+
}
|
| 55 |
+
path.build(files, prefix=tmp_path)
|
| 56 |
+
dist = Distribution()
|
| 57 |
+
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
|
| 58 |
+
assert dist.extras_require == {"docs": ["sphinx"]}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def test_mixed_dynamic_optional_dependencies(tmp_path):
|
| 62 |
+
"""
|
| 63 |
+
Test that if PEP 621 was loosened to allow mixing of dynamic and static
|
| 64 |
+
configurations in the case of fields containing sub-fields (groups),
|
| 65 |
+
things would work out.
|
| 66 |
+
"""
|
| 67 |
+
files = {
|
| 68 |
+
"requirements-images.txt": "pillow~=42.0\n # comment\n",
|
| 69 |
+
"pyproject.toml": cleandoc(
|
| 70 |
+
"""
|
| 71 |
+
[project]
|
| 72 |
+
name = "myproj"
|
| 73 |
+
version = "1.0"
|
| 74 |
+
dynamic = ["optional-dependencies"]
|
| 75 |
+
|
| 76 |
+
[project.optional-dependencies]
|
| 77 |
+
docs = ["sphinx"]
|
| 78 |
+
|
| 79 |
+
[tool.setuptools.dynamic.optional-dependencies.images]
|
| 80 |
+
file = ["requirements-images.txt"]
|
| 81 |
+
"""
|
| 82 |
+
),
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
path.build(files, prefix=tmp_path)
|
| 86 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 87 |
+
with pytest.raises(ValueError, match="project.optional-dependencies"):
|
| 88 |
+
apply_configuration(Distribution(), pyproject)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def test_mixed_extras_require_optional_dependencies(tmp_path):
|
| 92 |
+
files = {
|
| 93 |
+
"pyproject.toml": cleandoc(
|
| 94 |
+
"""
|
| 95 |
+
[project]
|
| 96 |
+
name = "myproj"
|
| 97 |
+
version = "1.0"
|
| 98 |
+
optional-dependencies.docs = ["sphinx"]
|
| 99 |
+
"""
|
| 100 |
+
),
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
path.build(files, prefix=tmp_path)
|
| 104 |
+
pyproject = tmp_path / "pyproject.toml"
|
| 105 |
+
|
| 106 |
+
with pytest.warns(SetuptoolsWarning, match=".extras_require. overwritten"):
|
| 107 |
+
dist = Distribution({"extras_require": {"hello": ["world"]}})
|
| 108 |
+
dist = apply_configuration(dist, pyproject)
|
| 109 |
+
assert dist.extras_require == {"docs": ["sphinx"]}
|
videollama2/lib/python3.10/site-packages/setuptools/tests/config/test_setupcfg.py
ADDED
|
@@ -0,0 +1,965 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import configparser
|
| 2 |
+
import contextlib
|
| 3 |
+
import inspect
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from unittest.mock import Mock, patch
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
from packaging.requirements import InvalidRequirement
|
| 9 |
+
|
| 10 |
+
from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration
|
| 11 |
+
from setuptools.dist import Distribution, _Distribution
|
| 12 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning
|
| 13 |
+
|
| 14 |
+
from ..textwrap import DALS
|
| 15 |
+
|
| 16 |
+
from distutils.errors import DistutilsFileError, DistutilsOptionError
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class ErrConfigHandler(ConfigHandler[Target]):
|
| 20 |
+
"""Erroneous handler. Fails to implement required methods."""
|
| 21 |
+
|
| 22 |
+
section_prefix = "**err**"
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def make_package_dir(name, base_dir, ns=False):
|
| 26 |
+
dir_package = base_dir
|
| 27 |
+
for dir_name in name.split('/'):
|
| 28 |
+
dir_package = dir_package.mkdir(dir_name)
|
| 29 |
+
init_file = None
|
| 30 |
+
if not ns:
|
| 31 |
+
init_file = dir_package.join('__init__.py')
|
| 32 |
+
init_file.write('')
|
| 33 |
+
return dir_package, init_file
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def fake_env(
|
| 37 |
+
tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package'
|
| 38 |
+
):
|
| 39 |
+
if setup_py is None:
|
| 40 |
+
setup_py = 'from setuptools import setup\nsetup()\n'
|
| 41 |
+
|
| 42 |
+
tmpdir.join('setup.py').write(setup_py)
|
| 43 |
+
config = tmpdir.join('setup.cfg')
|
| 44 |
+
config.write(setup_cfg.encode(encoding), mode='wb')
|
| 45 |
+
|
| 46 |
+
package_dir, init_file = make_package_dir(package_path, tmpdir)
|
| 47 |
+
|
| 48 |
+
init_file.write(
|
| 49 |
+
'VERSION = (1, 2, 3)\n'
|
| 50 |
+
'\n'
|
| 51 |
+
'VERSION_MAJOR = 1'
|
| 52 |
+
'\n'
|
| 53 |
+
'def get_version():\n'
|
| 54 |
+
' return [3, 4, 5, "dev"]\n'
|
| 55 |
+
'\n'
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
return package_dir, config
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@contextlib.contextmanager
|
| 62 |
+
def get_dist(tmpdir, kwargs_initial=None, parse=True):
|
| 63 |
+
kwargs_initial = kwargs_initial or {}
|
| 64 |
+
|
| 65 |
+
with tmpdir.as_cwd():
|
| 66 |
+
dist = Distribution(kwargs_initial)
|
| 67 |
+
dist.script_name = 'setup.py'
|
| 68 |
+
parse and dist.parse_config_files()
|
| 69 |
+
|
| 70 |
+
yield dist
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def test_parsers_implemented():
|
| 74 |
+
with pytest.raises(NotImplementedError):
|
| 75 |
+
handler = ErrConfigHandler(None, {}, False, Mock())
|
| 76 |
+
handler.parsers
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class TestConfigurationReader:
|
| 80 |
+
def test_basic(self, tmpdir):
|
| 81 |
+
_, config = fake_env(
|
| 82 |
+
tmpdir,
|
| 83 |
+
'[metadata]\n'
|
| 84 |
+
'version = 10.1.1\n'
|
| 85 |
+
'keywords = one, two\n'
|
| 86 |
+
'\n'
|
| 87 |
+
'[options]\n'
|
| 88 |
+
'scripts = bin/a.py, bin/b.py\n',
|
| 89 |
+
)
|
| 90 |
+
config_dict = read_configuration(str(config))
|
| 91 |
+
assert config_dict['metadata']['version'] == '10.1.1'
|
| 92 |
+
assert config_dict['metadata']['keywords'] == ['one', 'two']
|
| 93 |
+
assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
|
| 94 |
+
|
| 95 |
+
def test_no_config(self, tmpdir):
|
| 96 |
+
with pytest.raises(DistutilsFileError):
|
| 97 |
+
read_configuration(str(tmpdir.join('setup.cfg')))
|
| 98 |
+
|
| 99 |
+
def test_ignore_errors(self, tmpdir):
|
| 100 |
+
_, config = fake_env(
|
| 101 |
+
tmpdir,
|
| 102 |
+
'[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
|
| 103 |
+
)
|
| 104 |
+
with pytest.raises(ImportError):
|
| 105 |
+
read_configuration(str(config))
|
| 106 |
+
|
| 107 |
+
config_dict = read_configuration(str(config), ignore_option_errors=True)
|
| 108 |
+
|
| 109 |
+
assert config_dict['metadata']['keywords'] == ['one', 'two']
|
| 110 |
+
assert 'version' not in config_dict['metadata']
|
| 111 |
+
|
| 112 |
+
config.remove()
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class TestMetadata:
|
| 116 |
+
def test_basic(self, tmpdir):
|
| 117 |
+
fake_env(
|
| 118 |
+
tmpdir,
|
| 119 |
+
'[metadata]\n'
|
| 120 |
+
'version = 10.1.1\n'
|
| 121 |
+
'description = Some description\n'
|
| 122 |
+
'long_description_content_type = text/something\n'
|
| 123 |
+
'long_description = file: README\n'
|
| 124 |
+
'name = fake_name\n'
|
| 125 |
+
'keywords = one, two\n'
|
| 126 |
+
'provides = package, package.sub\n'
|
| 127 |
+
'license = otherlic\n'
|
| 128 |
+
'download_url = http://test.test.com/test/\n'
|
| 129 |
+
'maintainer_email = test@test.com\n',
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
tmpdir.join('README').write('readme contents\nline2')
|
| 133 |
+
|
| 134 |
+
meta_initial = {
|
| 135 |
+
# This will be used so `otherlic` won't replace it.
|
| 136 |
+
'license': 'BSD 3-Clause License',
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
with get_dist(tmpdir, meta_initial) as dist:
|
| 140 |
+
metadata = dist.metadata
|
| 141 |
+
|
| 142 |
+
assert metadata.version == '10.1.1'
|
| 143 |
+
assert metadata.description == 'Some description'
|
| 144 |
+
assert metadata.long_description_content_type == 'text/something'
|
| 145 |
+
assert metadata.long_description == 'readme contents\nline2'
|
| 146 |
+
assert metadata.provides == ['package', 'package.sub']
|
| 147 |
+
assert metadata.license == 'BSD 3-Clause License'
|
| 148 |
+
assert metadata.name == 'fake_name'
|
| 149 |
+
assert metadata.keywords == ['one', 'two']
|
| 150 |
+
assert metadata.download_url == 'http://test.test.com/test/'
|
| 151 |
+
assert metadata.maintainer_email == 'test@test.com'
|
| 152 |
+
|
| 153 |
+
def test_license_cfg(self, tmpdir):
|
| 154 |
+
fake_env(
|
| 155 |
+
tmpdir,
|
| 156 |
+
DALS(
|
| 157 |
+
"""
|
| 158 |
+
[metadata]
|
| 159 |
+
name=foo
|
| 160 |
+
version=0.0.1
|
| 161 |
+
license=Apache 2.0
|
| 162 |
+
"""
|
| 163 |
+
),
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
with get_dist(tmpdir) as dist:
|
| 167 |
+
metadata = dist.metadata
|
| 168 |
+
|
| 169 |
+
assert metadata.name == "foo"
|
| 170 |
+
assert metadata.version == "0.0.1"
|
| 171 |
+
assert metadata.license == "Apache 2.0"
|
| 172 |
+
|
| 173 |
+
def test_file_mixed(self, tmpdir):
|
| 174 |
+
fake_env(
|
| 175 |
+
tmpdir,
|
| 176 |
+
'[metadata]\nlong_description = file: README.rst, CHANGES.rst\n\n',
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
tmpdir.join('README.rst').write('readme contents\nline2')
|
| 180 |
+
tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
|
| 181 |
+
|
| 182 |
+
with get_dist(tmpdir) as dist:
|
| 183 |
+
assert dist.metadata.long_description == (
|
| 184 |
+
'readme contents\nline2\nchangelog contents\nand stuff'
|
| 185 |
+
)
|
| 186 |
+
|
| 187 |
+
def test_file_sandboxed(self, tmpdir):
|
| 188 |
+
tmpdir.ensure("README")
|
| 189 |
+
project = tmpdir.join('depth1', 'depth2')
|
| 190 |
+
project.ensure(dir=True)
|
| 191 |
+
fake_env(project, '[metadata]\nlong_description = file: ../../README\n')
|
| 192 |
+
|
| 193 |
+
with get_dist(project, parse=False) as dist:
|
| 194 |
+
with pytest.raises(DistutilsOptionError):
|
| 195 |
+
dist.parse_config_files() # file: out of sandbox
|
| 196 |
+
|
| 197 |
+
def test_aliases(self, tmpdir):
|
| 198 |
+
fake_env(
|
| 199 |
+
tmpdir,
|
| 200 |
+
'[metadata]\n'
|
| 201 |
+
'author_email = test@test.com\n'
|
| 202 |
+
'home_page = http://test.test.com/test/\n'
|
| 203 |
+
'summary = Short summary\n'
|
| 204 |
+
'platform = a, b\n'
|
| 205 |
+
'classifier =\n'
|
| 206 |
+
' Framework :: Django\n'
|
| 207 |
+
' Programming Language :: Python :: 3.5\n',
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
with get_dist(tmpdir) as dist:
|
| 211 |
+
metadata = dist.metadata
|
| 212 |
+
assert metadata.author_email == 'test@test.com'
|
| 213 |
+
assert metadata.url == 'http://test.test.com/test/'
|
| 214 |
+
assert metadata.description == 'Short summary'
|
| 215 |
+
assert metadata.platforms == ['a', 'b']
|
| 216 |
+
assert metadata.classifiers == [
|
| 217 |
+
'Framework :: Django',
|
| 218 |
+
'Programming Language :: Python :: 3.5',
|
| 219 |
+
]
|
| 220 |
+
|
| 221 |
+
def test_multiline(self, tmpdir):
|
| 222 |
+
fake_env(
|
| 223 |
+
tmpdir,
|
| 224 |
+
'[metadata]\n'
|
| 225 |
+
'name = fake_name\n'
|
| 226 |
+
'keywords =\n'
|
| 227 |
+
' one\n'
|
| 228 |
+
' two\n'
|
| 229 |
+
'classifiers =\n'
|
| 230 |
+
' Framework :: Django\n'
|
| 231 |
+
' Programming Language :: Python :: 3.5\n',
|
| 232 |
+
)
|
| 233 |
+
with get_dist(tmpdir) as dist:
|
| 234 |
+
metadata = dist.metadata
|
| 235 |
+
assert metadata.keywords == ['one', 'two']
|
| 236 |
+
assert metadata.classifiers == [
|
| 237 |
+
'Framework :: Django',
|
| 238 |
+
'Programming Language :: Python :: 3.5',
|
| 239 |
+
]
|
| 240 |
+
|
| 241 |
+
def test_dict(self, tmpdir):
|
| 242 |
+
fake_env(
|
| 243 |
+
tmpdir,
|
| 244 |
+
'[metadata]\n'
|
| 245 |
+
'project_urls =\n'
|
| 246 |
+
' Link One = https://example.com/one/\n'
|
| 247 |
+
' Link Two = https://example.com/two/\n',
|
| 248 |
+
)
|
| 249 |
+
with get_dist(tmpdir) as dist:
|
| 250 |
+
metadata = dist.metadata
|
| 251 |
+
assert metadata.project_urls == {
|
| 252 |
+
'Link One': 'https://example.com/one/',
|
| 253 |
+
'Link Two': 'https://example.com/two/',
|
| 254 |
+
}
|
| 255 |
+
|
| 256 |
+
def test_version(self, tmpdir):
|
| 257 |
+
package_dir, config = fake_env(
|
| 258 |
+
tmpdir, '[metadata]\nversion = attr: fake_package.VERSION\n'
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
sub_a = package_dir.mkdir('subpkg_a')
|
| 262 |
+
sub_a.join('__init__.py').write('')
|
| 263 |
+
sub_a.join('mod.py').write('VERSION = (2016, 11, 26)')
|
| 264 |
+
|
| 265 |
+
sub_b = package_dir.mkdir('subpkg_b')
|
| 266 |
+
sub_b.join('__init__.py').write('')
|
| 267 |
+
sub_b.join('mod.py').write(
|
| 268 |
+
'import third_party_module\nVERSION = (2016, 11, 26)'
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
with get_dist(tmpdir) as dist:
|
| 272 |
+
assert dist.metadata.version == '1.2.3'
|
| 273 |
+
|
| 274 |
+
config.write('[metadata]\nversion = attr: fake_package.get_version\n')
|
| 275 |
+
with get_dist(tmpdir) as dist:
|
| 276 |
+
assert dist.metadata.version == '3.4.5.dev'
|
| 277 |
+
|
| 278 |
+
config.write('[metadata]\nversion = attr: fake_package.VERSION_MAJOR\n')
|
| 279 |
+
with get_dist(tmpdir) as dist:
|
| 280 |
+
assert dist.metadata.version == '1'
|
| 281 |
+
|
| 282 |
+
config.write('[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n')
|
| 283 |
+
with get_dist(tmpdir) as dist:
|
| 284 |
+
assert dist.metadata.version == '2016.11.26'
|
| 285 |
+
|
| 286 |
+
config.write('[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n')
|
| 287 |
+
with get_dist(tmpdir) as dist:
|
| 288 |
+
assert dist.metadata.version == '2016.11.26'
|
| 289 |
+
|
| 290 |
+
def test_version_file(self, tmpdir):
|
| 291 |
+
fake_env(tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n')
|
| 292 |
+
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
|
| 293 |
+
|
| 294 |
+
with get_dist(tmpdir) as dist:
|
| 295 |
+
assert dist.metadata.version == '1.2.3'
|
| 296 |
+
|
| 297 |
+
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n')
|
| 298 |
+
with pytest.raises(DistutilsOptionError):
|
| 299 |
+
with get_dist(tmpdir) as dist:
|
| 300 |
+
dist.metadata.version
|
| 301 |
+
|
| 302 |
+
def test_version_with_package_dir_simple(self, tmpdir):
|
| 303 |
+
fake_env(
|
| 304 |
+
tmpdir,
|
| 305 |
+
'[metadata]\n'
|
| 306 |
+
'version = attr: fake_package_simple.VERSION\n'
|
| 307 |
+
'[options]\n'
|
| 308 |
+
'package_dir =\n'
|
| 309 |
+
' = src\n',
|
| 310 |
+
package_path='src/fake_package_simple',
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
with get_dist(tmpdir) as dist:
|
| 314 |
+
assert dist.metadata.version == '1.2.3'
|
| 315 |
+
|
| 316 |
+
def test_version_with_package_dir_rename(self, tmpdir):
|
| 317 |
+
fake_env(
|
| 318 |
+
tmpdir,
|
| 319 |
+
'[metadata]\n'
|
| 320 |
+
'version = attr: fake_package_rename.VERSION\n'
|
| 321 |
+
'[options]\n'
|
| 322 |
+
'package_dir =\n'
|
| 323 |
+
' fake_package_rename = fake_dir\n',
|
| 324 |
+
package_path='fake_dir',
|
| 325 |
+
)
|
| 326 |
+
|
| 327 |
+
with get_dist(tmpdir) as dist:
|
| 328 |
+
assert dist.metadata.version == '1.2.3'
|
| 329 |
+
|
| 330 |
+
def test_version_with_package_dir_complex(self, tmpdir):
|
| 331 |
+
fake_env(
|
| 332 |
+
tmpdir,
|
| 333 |
+
'[metadata]\n'
|
| 334 |
+
'version = attr: fake_package_complex.VERSION\n'
|
| 335 |
+
'[options]\n'
|
| 336 |
+
'package_dir =\n'
|
| 337 |
+
' fake_package_complex = src/fake_dir\n',
|
| 338 |
+
package_path='src/fake_dir',
|
| 339 |
+
)
|
| 340 |
+
|
| 341 |
+
with get_dist(tmpdir) as dist:
|
| 342 |
+
assert dist.metadata.version == '1.2.3'
|
| 343 |
+
|
| 344 |
+
def test_unknown_meta_item(self, tmpdir):
|
| 345 |
+
fake_env(tmpdir, '[metadata]\nname = fake_name\nunknown = some\n')
|
| 346 |
+
with get_dist(tmpdir, parse=False) as dist:
|
| 347 |
+
dist.parse_config_files() # Skip unknown.
|
| 348 |
+
|
| 349 |
+
def test_usupported_section(self, tmpdir):
|
| 350 |
+
fake_env(tmpdir, '[metadata.some]\nkey = val\n')
|
| 351 |
+
with get_dist(tmpdir, parse=False) as dist:
|
| 352 |
+
with pytest.raises(DistutilsOptionError):
|
| 353 |
+
dist.parse_config_files()
|
| 354 |
+
|
| 355 |
+
def test_classifiers(self, tmpdir):
|
| 356 |
+
expected = set([
|
| 357 |
+
'Framework :: Django',
|
| 358 |
+
'Programming Language :: Python :: 3',
|
| 359 |
+
'Programming Language :: Python :: 3.5',
|
| 360 |
+
])
|
| 361 |
+
|
| 362 |
+
# From file.
|
| 363 |
+
_, config = fake_env(tmpdir, '[metadata]\nclassifiers = file: classifiers\n')
|
| 364 |
+
|
| 365 |
+
tmpdir.join('classifiers').write(
|
| 366 |
+
'Framework :: Django\n'
|
| 367 |
+
'Programming Language :: Python :: 3\n'
|
| 368 |
+
'Programming Language :: Python :: 3.5\n'
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
with get_dist(tmpdir) as dist:
|
| 372 |
+
assert set(dist.metadata.classifiers) == expected
|
| 373 |
+
|
| 374 |
+
# From list notation
|
| 375 |
+
config.write(
|
| 376 |
+
'[metadata]\n'
|
| 377 |
+
'classifiers =\n'
|
| 378 |
+
' Framework :: Django\n'
|
| 379 |
+
' Programming Language :: Python :: 3\n'
|
| 380 |
+
' Programming Language :: Python :: 3.5\n'
|
| 381 |
+
)
|
| 382 |
+
with get_dist(tmpdir) as dist:
|
| 383 |
+
assert set(dist.metadata.classifiers) == expected
|
| 384 |
+
|
| 385 |
+
def test_interpolation(self, tmpdir):
|
| 386 |
+
fake_env(tmpdir, '[metadata]\ndescription = %(message)s\n')
|
| 387 |
+
with pytest.raises(configparser.InterpolationMissingOptionError):
|
| 388 |
+
with get_dist(tmpdir):
|
| 389 |
+
pass
|
| 390 |
+
|
| 391 |
+
def test_non_ascii_1(self, tmpdir):
|
| 392 |
+
fake_env(tmpdir, '[metadata]\ndescription = éàïôñ\n', encoding='utf-8')
|
| 393 |
+
with get_dist(tmpdir):
|
| 394 |
+
pass
|
| 395 |
+
|
| 396 |
+
def test_non_ascii_3(self, tmpdir):
|
| 397 |
+
fake_env(tmpdir, '\n# -*- coding: invalid\n')
|
| 398 |
+
with get_dist(tmpdir):
|
| 399 |
+
pass
|
| 400 |
+
|
| 401 |
+
def test_non_ascii_4(self, tmpdir):
|
| 402 |
+
fake_env(
|
| 403 |
+
tmpdir,
|
| 404 |
+
'# -*- coding: utf-8\n[metadata]\ndescription = éàïôñ\n',
|
| 405 |
+
encoding='utf-8',
|
| 406 |
+
)
|
| 407 |
+
with get_dist(tmpdir) as dist:
|
| 408 |
+
assert dist.metadata.description == 'éàïôñ'
|
| 409 |
+
|
| 410 |
+
def test_not_utf8(self, tmpdir):
|
| 411 |
+
"""
|
| 412 |
+
Config files encoded not in UTF-8 will fail
|
| 413 |
+
"""
|
| 414 |
+
fake_env(
|
| 415 |
+
tmpdir,
|
| 416 |
+
'# vim: set fileencoding=iso-8859-15 :\n[metadata]\ndescription = éàïôñ\n',
|
| 417 |
+
encoding='iso-8859-15',
|
| 418 |
+
)
|
| 419 |
+
with pytest.raises(UnicodeDecodeError):
|
| 420 |
+
with get_dist(tmpdir):
|
| 421 |
+
pass
|
| 422 |
+
|
| 423 |
+
def test_warn_dash_deprecation(self, tmpdir):
|
| 424 |
+
# warn_dash_deprecation() is a method in setuptools.dist
|
| 425 |
+
# remove this test and the method when no longer needed
|
| 426 |
+
fake_env(
|
| 427 |
+
tmpdir,
|
| 428 |
+
'[metadata]\n'
|
| 429 |
+
'author-email = test@test.com\n'
|
| 430 |
+
'maintainer_email = foo@foo.com\n',
|
| 431 |
+
)
|
| 432 |
+
msg = "Usage of dash-separated 'author-email' will not be supported"
|
| 433 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 434 |
+
with get_dist(tmpdir) as dist:
|
| 435 |
+
metadata = dist.metadata
|
| 436 |
+
|
| 437 |
+
assert metadata.author_email == 'test@test.com'
|
| 438 |
+
assert metadata.maintainer_email == 'foo@foo.com'
|
| 439 |
+
|
| 440 |
+
def test_make_option_lowercase(self, tmpdir):
|
| 441 |
+
# remove this test and the method make_option_lowercase() in setuptools.dist
|
| 442 |
+
# when no longer needed
|
| 443 |
+
fake_env(tmpdir, '[metadata]\nName = foo\ndescription = Some description\n')
|
| 444 |
+
msg = "Usage of uppercase key 'Name' in 'metadata' will not be supported"
|
| 445 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 446 |
+
with get_dist(tmpdir) as dist:
|
| 447 |
+
metadata = dist.metadata
|
| 448 |
+
|
| 449 |
+
assert metadata.name == 'foo'
|
| 450 |
+
assert metadata.description == 'Some description'
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
class TestOptions:
|
| 454 |
+
def test_basic(self, tmpdir):
|
| 455 |
+
fake_env(
|
| 456 |
+
tmpdir,
|
| 457 |
+
'[options]\n'
|
| 458 |
+
'zip_safe = True\n'
|
| 459 |
+
'include_package_data = yes\n'
|
| 460 |
+
'package_dir = b=c, =src\n'
|
| 461 |
+
'packages = pack_a, pack_b.subpack\n'
|
| 462 |
+
'namespace_packages = pack1, pack2\n'
|
| 463 |
+
'scripts = bin/one.py, bin/two.py\n'
|
| 464 |
+
'eager_resources = bin/one.py, bin/two.py\n'
|
| 465 |
+
'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
|
| 466 |
+
'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
|
| 467 |
+
'dependency_links = http://some.com/here/1, '
|
| 468 |
+
'http://some.com/there/2\n'
|
| 469 |
+
'python_requires = >=1.0, !=2.8\n'
|
| 470 |
+
'py_modules = module1, module2\n',
|
| 471 |
+
)
|
| 472 |
+
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
|
| 473 |
+
with deprec, get_dist(tmpdir) as dist:
|
| 474 |
+
assert dist.zip_safe
|
| 475 |
+
assert dist.include_package_data
|
| 476 |
+
assert dist.package_dir == {'': 'src', 'b': 'c'}
|
| 477 |
+
assert dist.packages == ['pack_a', 'pack_b.subpack']
|
| 478 |
+
assert dist.namespace_packages == ['pack1', 'pack2']
|
| 479 |
+
assert dist.scripts == ['bin/one.py', 'bin/two.py']
|
| 480 |
+
assert dist.dependency_links == ([
|
| 481 |
+
'http://some.com/here/1',
|
| 482 |
+
'http://some.com/there/2',
|
| 483 |
+
])
|
| 484 |
+
assert dist.install_requires == ([
|
| 485 |
+
'docutils>=0.3',
|
| 486 |
+
'pack==1.1,==1.3',
|
| 487 |
+
'hey',
|
| 488 |
+
])
|
| 489 |
+
assert dist.setup_requires == ([
|
| 490 |
+
'docutils>=0.3',
|
| 491 |
+
'spack ==1.1, ==1.3',
|
| 492 |
+
'there',
|
| 493 |
+
])
|
| 494 |
+
assert dist.python_requires == '>=1.0, !=2.8'
|
| 495 |
+
assert dist.py_modules == ['module1', 'module2']
|
| 496 |
+
|
| 497 |
+
def test_multiline(self, tmpdir):
|
| 498 |
+
fake_env(
|
| 499 |
+
tmpdir,
|
| 500 |
+
'[options]\n'
|
| 501 |
+
'package_dir = \n'
|
| 502 |
+
' b=c\n'
|
| 503 |
+
' =src\n'
|
| 504 |
+
'packages = \n'
|
| 505 |
+
' pack_a\n'
|
| 506 |
+
' pack_b.subpack\n'
|
| 507 |
+
'namespace_packages = \n'
|
| 508 |
+
' pack1\n'
|
| 509 |
+
' pack2\n'
|
| 510 |
+
'scripts = \n'
|
| 511 |
+
' bin/one.py\n'
|
| 512 |
+
' bin/two.py\n'
|
| 513 |
+
'eager_resources = \n'
|
| 514 |
+
' bin/one.py\n'
|
| 515 |
+
' bin/two.py\n'
|
| 516 |
+
'install_requires = \n'
|
| 517 |
+
' docutils>=0.3\n'
|
| 518 |
+
' pack ==1.1, ==1.3\n'
|
| 519 |
+
' hey\n'
|
| 520 |
+
'setup_requires = \n'
|
| 521 |
+
' docutils>=0.3\n'
|
| 522 |
+
' spack ==1.1, ==1.3\n'
|
| 523 |
+
' there\n'
|
| 524 |
+
'dependency_links = \n'
|
| 525 |
+
' http://some.com/here/1\n'
|
| 526 |
+
' http://some.com/there/2\n',
|
| 527 |
+
)
|
| 528 |
+
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
|
| 529 |
+
with deprec, get_dist(tmpdir) as dist:
|
| 530 |
+
assert dist.package_dir == {'': 'src', 'b': 'c'}
|
| 531 |
+
assert dist.packages == ['pack_a', 'pack_b.subpack']
|
| 532 |
+
assert dist.namespace_packages == ['pack1', 'pack2']
|
| 533 |
+
assert dist.scripts == ['bin/one.py', 'bin/two.py']
|
| 534 |
+
assert dist.dependency_links == ([
|
| 535 |
+
'http://some.com/here/1',
|
| 536 |
+
'http://some.com/there/2',
|
| 537 |
+
])
|
| 538 |
+
assert dist.install_requires == ([
|
| 539 |
+
'docutils>=0.3',
|
| 540 |
+
'pack==1.1,==1.3',
|
| 541 |
+
'hey',
|
| 542 |
+
])
|
| 543 |
+
assert dist.setup_requires == ([
|
| 544 |
+
'docutils>=0.3',
|
| 545 |
+
'spack ==1.1, ==1.3',
|
| 546 |
+
'there',
|
| 547 |
+
])
|
| 548 |
+
|
| 549 |
+
def test_package_dir_fail(self, tmpdir):
|
| 550 |
+
fake_env(tmpdir, '[options]\npackage_dir = a b\n')
|
| 551 |
+
with get_dist(tmpdir, parse=False) as dist:
|
| 552 |
+
with pytest.raises(DistutilsOptionError):
|
| 553 |
+
dist.parse_config_files()
|
| 554 |
+
|
| 555 |
+
def test_package_data(self, tmpdir):
|
| 556 |
+
fake_env(
|
| 557 |
+
tmpdir,
|
| 558 |
+
'[options.package_data]\n'
|
| 559 |
+
'* = *.txt, *.rst\n'
|
| 560 |
+
'hello = *.msg\n'
|
| 561 |
+
'\n'
|
| 562 |
+
'[options.exclude_package_data]\n'
|
| 563 |
+
'* = fake1.txt, fake2.txt\n'
|
| 564 |
+
'hello = *.dat\n',
|
| 565 |
+
)
|
| 566 |
+
|
| 567 |
+
with get_dist(tmpdir) as dist:
|
| 568 |
+
assert dist.package_data == {
|
| 569 |
+
'': ['*.txt', '*.rst'],
|
| 570 |
+
'hello': ['*.msg'],
|
| 571 |
+
}
|
| 572 |
+
assert dist.exclude_package_data == {
|
| 573 |
+
'': ['fake1.txt', 'fake2.txt'],
|
| 574 |
+
'hello': ['*.dat'],
|
| 575 |
+
}
|
| 576 |
+
|
| 577 |
+
def test_packages(self, tmpdir):
|
| 578 |
+
fake_env(tmpdir, '[options]\npackages = find:\n')
|
| 579 |
+
|
| 580 |
+
with get_dist(tmpdir) as dist:
|
| 581 |
+
assert dist.packages == ['fake_package']
|
| 582 |
+
|
| 583 |
+
def test_find_directive(self, tmpdir):
|
| 584 |
+
dir_package, config = fake_env(tmpdir, '[options]\npackages = find:\n')
|
| 585 |
+
|
| 586 |
+
make_package_dir('sub_one', dir_package)
|
| 587 |
+
make_package_dir('sub_two', dir_package)
|
| 588 |
+
|
| 589 |
+
with get_dist(tmpdir) as dist:
|
| 590 |
+
assert set(dist.packages) == set([
|
| 591 |
+
'fake_package',
|
| 592 |
+
'fake_package.sub_two',
|
| 593 |
+
'fake_package.sub_one',
|
| 594 |
+
])
|
| 595 |
+
|
| 596 |
+
config.write(
|
| 597 |
+
'[options]\n'
|
| 598 |
+
'packages = find:\n'
|
| 599 |
+
'\n'
|
| 600 |
+
'[options.packages.find]\n'
|
| 601 |
+
'where = .\n'
|
| 602 |
+
'include =\n'
|
| 603 |
+
' fake_package.sub_one\n'
|
| 604 |
+
' two\n'
|
| 605 |
+
)
|
| 606 |
+
with get_dist(tmpdir) as dist:
|
| 607 |
+
assert dist.packages == ['fake_package.sub_one']
|
| 608 |
+
|
| 609 |
+
config.write(
|
| 610 |
+
'[options]\n'
|
| 611 |
+
'packages = find:\n'
|
| 612 |
+
'\n'
|
| 613 |
+
'[options.packages.find]\n'
|
| 614 |
+
'exclude =\n'
|
| 615 |
+
' fake_package.sub_one\n'
|
| 616 |
+
)
|
| 617 |
+
with get_dist(tmpdir) as dist:
|
| 618 |
+
assert set(dist.packages) == set(['fake_package', 'fake_package.sub_two'])
|
| 619 |
+
|
| 620 |
+
def test_find_namespace_directive(self, tmpdir):
|
| 621 |
+
dir_package, config = fake_env(
|
| 622 |
+
tmpdir, '[options]\npackages = find_namespace:\n'
|
| 623 |
+
)
|
| 624 |
+
|
| 625 |
+
make_package_dir('sub_one', dir_package)
|
| 626 |
+
make_package_dir('sub_two', dir_package, ns=True)
|
| 627 |
+
|
| 628 |
+
with get_dist(tmpdir) as dist:
|
| 629 |
+
assert set(dist.packages) == {
|
| 630 |
+
'fake_package',
|
| 631 |
+
'fake_package.sub_two',
|
| 632 |
+
'fake_package.sub_one',
|
| 633 |
+
}
|
| 634 |
+
|
| 635 |
+
config.write(
|
| 636 |
+
'[options]\n'
|
| 637 |
+
'packages = find_namespace:\n'
|
| 638 |
+
'\n'
|
| 639 |
+
'[options.packages.find]\n'
|
| 640 |
+
'where = .\n'
|
| 641 |
+
'include =\n'
|
| 642 |
+
' fake_package.sub_one\n'
|
| 643 |
+
' two\n'
|
| 644 |
+
)
|
| 645 |
+
with get_dist(tmpdir) as dist:
|
| 646 |
+
assert dist.packages == ['fake_package.sub_one']
|
| 647 |
+
|
| 648 |
+
config.write(
|
| 649 |
+
'[options]\n'
|
| 650 |
+
'packages = find_namespace:\n'
|
| 651 |
+
'\n'
|
| 652 |
+
'[options.packages.find]\n'
|
| 653 |
+
'exclude =\n'
|
| 654 |
+
' fake_package.sub_one\n'
|
| 655 |
+
)
|
| 656 |
+
with get_dist(tmpdir) as dist:
|
| 657 |
+
assert set(dist.packages) == {'fake_package', 'fake_package.sub_two'}
|
| 658 |
+
|
| 659 |
+
def test_extras_require(self, tmpdir):
|
| 660 |
+
fake_env(
|
| 661 |
+
tmpdir,
|
| 662 |
+
'[options.extras_require]\n'
|
| 663 |
+
'pdf = ReportLab>=1.2; RXP\n'
|
| 664 |
+
'rest = \n'
|
| 665 |
+
' docutils>=0.3\n'
|
| 666 |
+
' pack ==1.1, ==1.3\n',
|
| 667 |
+
)
|
| 668 |
+
|
| 669 |
+
with get_dist(tmpdir) as dist:
|
| 670 |
+
assert dist.extras_require == {
|
| 671 |
+
'pdf': ['ReportLab>=1.2', 'RXP'],
|
| 672 |
+
'rest': ['docutils>=0.3', 'pack==1.1,==1.3'],
|
| 673 |
+
}
|
| 674 |
+
assert set(dist.metadata.provides_extras) == {'pdf', 'rest'}
|
| 675 |
+
|
| 676 |
+
@pytest.mark.parametrize(
|
| 677 |
+
"config",
|
| 678 |
+
[
|
| 679 |
+
"[options.extras_require]\nfoo = bar;python_version<'3'",
|
| 680 |
+
"[options.extras_require]\nfoo = bar;os_name=='linux'",
|
| 681 |
+
"[options.extras_require]\nfoo = bar;python_version<'3'\n",
|
| 682 |
+
"[options.extras_require]\nfoo = bar;os_name=='linux'\n",
|
| 683 |
+
"[options]\ninstall_requires = bar;python_version<'3'",
|
| 684 |
+
"[options]\ninstall_requires = bar;os_name=='linux'",
|
| 685 |
+
"[options]\ninstall_requires = bar;python_version<'3'\n",
|
| 686 |
+
"[options]\ninstall_requires = bar;os_name=='linux'\n",
|
| 687 |
+
],
|
| 688 |
+
)
|
| 689 |
+
def test_raises_accidental_env_marker_misconfig(self, config, tmpdir):
|
| 690 |
+
fake_env(tmpdir, config)
|
| 691 |
+
match = (
|
| 692 |
+
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
|
| 693 |
+
"looks like a valid environment marker.*"
|
| 694 |
+
)
|
| 695 |
+
with pytest.raises(InvalidRequirement, match=match):
|
| 696 |
+
with get_dist(tmpdir) as _:
|
| 697 |
+
pass
|
| 698 |
+
|
| 699 |
+
@pytest.mark.parametrize(
|
| 700 |
+
"config",
|
| 701 |
+
[
|
| 702 |
+
"[options.extras_require]\nfoo = bar;python_version<3",
|
| 703 |
+
"[options.extras_require]\nfoo = bar;python_version<3\n",
|
| 704 |
+
"[options]\ninstall_requires = bar;python_version<3",
|
| 705 |
+
"[options]\ninstall_requires = bar;python_version<3\n",
|
| 706 |
+
],
|
| 707 |
+
)
|
| 708 |
+
def test_warn_accidental_env_marker_misconfig(self, config, tmpdir):
|
| 709 |
+
fake_env(tmpdir, config)
|
| 710 |
+
match = (
|
| 711 |
+
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
|
| 712 |
+
"looks like a valid environment marker.*"
|
| 713 |
+
)
|
| 714 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=match):
|
| 715 |
+
with get_dist(tmpdir) as _:
|
| 716 |
+
pass
|
| 717 |
+
|
| 718 |
+
@pytest.mark.parametrize(
|
| 719 |
+
"config",
|
| 720 |
+
[
|
| 721 |
+
"[options.extras_require]\nfoo =\n bar;python_version<'3'",
|
| 722 |
+
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy",
|
| 723 |
+
"[options.extras_require]\nfoo =\n bar;python_version<'3'\n",
|
| 724 |
+
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy\n",
|
| 725 |
+
"[options.extras_require]\nfoo =\n bar\n python_version<3\n",
|
| 726 |
+
"[options]\ninstall_requires =\n bar;python_version<'3'",
|
| 727 |
+
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy",
|
| 728 |
+
"[options]\ninstall_requires =\n bar;python_version<'3'\n",
|
| 729 |
+
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy\n",
|
| 730 |
+
"[options]\ninstall_requires =\n bar\n python_version<3\n",
|
| 731 |
+
],
|
| 732 |
+
)
|
| 733 |
+
@pytest.mark.filterwarnings("error::setuptools.SetuptoolsDeprecationWarning")
|
| 734 |
+
def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn):
|
| 735 |
+
fake_env(tmpdir, config)
|
| 736 |
+
num_warnings = len(recwarn)
|
| 737 |
+
with get_dist(tmpdir) as _:
|
| 738 |
+
pass
|
| 739 |
+
# The examples are valid, no warnings shown
|
| 740 |
+
assert len(recwarn) == num_warnings
|
| 741 |
+
|
| 742 |
+
def test_dash_preserved_extras_require(self, tmpdir):
|
| 743 |
+
fake_env(tmpdir, '[options.extras_require]\nfoo-a = foo\nfoo_b = test\n')
|
| 744 |
+
|
| 745 |
+
with get_dist(tmpdir) as dist:
|
| 746 |
+
assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']}
|
| 747 |
+
|
| 748 |
+
def test_entry_points(self, tmpdir):
|
| 749 |
+
_, config = fake_env(
|
| 750 |
+
tmpdir,
|
| 751 |
+
'[options.entry_points]\n'
|
| 752 |
+
'group1 = point1 = pack.module:func, '
|
| 753 |
+
'.point2 = pack.module2:func_rest [rest]\n'
|
| 754 |
+
'group2 = point3 = pack.module:func2\n',
|
| 755 |
+
)
|
| 756 |
+
|
| 757 |
+
with get_dist(tmpdir) as dist:
|
| 758 |
+
assert dist.entry_points == {
|
| 759 |
+
'group1': [
|
| 760 |
+
'point1 = pack.module:func',
|
| 761 |
+
'.point2 = pack.module2:func_rest [rest]',
|
| 762 |
+
],
|
| 763 |
+
'group2': ['point3 = pack.module:func2'],
|
| 764 |
+
}
|
| 765 |
+
|
| 766 |
+
expected = (
|
| 767 |
+
'[blogtool.parsers]\n'
|
| 768 |
+
'.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
|
| 769 |
+
)
|
| 770 |
+
|
| 771 |
+
tmpdir.join('entry_points').write(expected)
|
| 772 |
+
|
| 773 |
+
# From file.
|
| 774 |
+
config.write('[options]\nentry_points = file: entry_points\n')
|
| 775 |
+
|
| 776 |
+
with get_dist(tmpdir) as dist:
|
| 777 |
+
assert dist.entry_points == expected
|
| 778 |
+
|
| 779 |
+
def test_case_sensitive_entry_points(self, tmpdir):
|
| 780 |
+
fake_env(
|
| 781 |
+
tmpdir,
|
| 782 |
+
'[options.entry_points]\n'
|
| 783 |
+
'GROUP1 = point1 = pack.module:func, '
|
| 784 |
+
'.point2 = pack.module2:func_rest [rest]\n'
|
| 785 |
+
'group2 = point3 = pack.module:func2\n',
|
| 786 |
+
)
|
| 787 |
+
|
| 788 |
+
with get_dist(tmpdir) as dist:
|
| 789 |
+
assert dist.entry_points == {
|
| 790 |
+
'GROUP1': [
|
| 791 |
+
'point1 = pack.module:func',
|
| 792 |
+
'.point2 = pack.module2:func_rest [rest]',
|
| 793 |
+
],
|
| 794 |
+
'group2': ['point3 = pack.module:func2'],
|
| 795 |
+
}
|
| 796 |
+
|
| 797 |
+
def test_data_files(self, tmpdir):
|
| 798 |
+
fake_env(
|
| 799 |
+
tmpdir,
|
| 800 |
+
'[options.data_files]\n'
|
| 801 |
+
'cfg =\n'
|
| 802 |
+
' a/b.conf\n'
|
| 803 |
+
' c/d.conf\n'
|
| 804 |
+
'data = e/f.dat, g/h.dat\n',
|
| 805 |
+
)
|
| 806 |
+
|
| 807 |
+
with get_dist(tmpdir) as dist:
|
| 808 |
+
expected = [
|
| 809 |
+
('cfg', ['a/b.conf', 'c/d.conf']),
|
| 810 |
+
('data', ['e/f.dat', 'g/h.dat']),
|
| 811 |
+
]
|
| 812 |
+
assert sorted(dist.data_files) == sorted(expected)
|
| 813 |
+
|
| 814 |
+
def test_data_files_globby(self, tmpdir):
|
| 815 |
+
fake_env(
|
| 816 |
+
tmpdir,
|
| 817 |
+
'[options.data_files]\n'
|
| 818 |
+
'cfg =\n'
|
| 819 |
+
' a/b.conf\n'
|
| 820 |
+
' c/d.conf\n'
|
| 821 |
+
'data = *.dat\n'
|
| 822 |
+
'icons = \n'
|
| 823 |
+
' *.ico\n'
|
| 824 |
+
'audio = \n'
|
| 825 |
+
' *.wav\n'
|
| 826 |
+
' sounds.db\n',
|
| 827 |
+
)
|
| 828 |
+
|
| 829 |
+
# Create dummy files for glob()'s sake:
|
| 830 |
+
tmpdir.join('a.dat').write('')
|
| 831 |
+
tmpdir.join('b.dat').write('')
|
| 832 |
+
tmpdir.join('c.dat').write('')
|
| 833 |
+
tmpdir.join('a.ico').write('')
|
| 834 |
+
tmpdir.join('b.ico').write('')
|
| 835 |
+
tmpdir.join('c.ico').write('')
|
| 836 |
+
tmpdir.join('beep.wav').write('')
|
| 837 |
+
tmpdir.join('boop.wav').write('')
|
| 838 |
+
tmpdir.join('sounds.db').write('')
|
| 839 |
+
|
| 840 |
+
with get_dist(tmpdir) as dist:
|
| 841 |
+
expected = [
|
| 842 |
+
('cfg', ['a/b.conf', 'c/d.conf']),
|
| 843 |
+
('data', ['a.dat', 'b.dat', 'c.dat']),
|
| 844 |
+
('icons', ['a.ico', 'b.ico', 'c.ico']),
|
| 845 |
+
('audio', ['beep.wav', 'boop.wav', 'sounds.db']),
|
| 846 |
+
]
|
| 847 |
+
assert sorted(dist.data_files) == sorted(expected)
|
| 848 |
+
|
| 849 |
+
def test_python_requires_simple(self, tmpdir):
|
| 850 |
+
fake_env(
|
| 851 |
+
tmpdir,
|
| 852 |
+
DALS(
|
| 853 |
+
"""
|
| 854 |
+
[options]
|
| 855 |
+
python_requires=>=2.7
|
| 856 |
+
"""
|
| 857 |
+
),
|
| 858 |
+
)
|
| 859 |
+
with get_dist(tmpdir) as dist:
|
| 860 |
+
dist.parse_config_files()
|
| 861 |
+
|
| 862 |
+
def test_python_requires_compound(self, tmpdir):
|
| 863 |
+
fake_env(
|
| 864 |
+
tmpdir,
|
| 865 |
+
DALS(
|
| 866 |
+
"""
|
| 867 |
+
[options]
|
| 868 |
+
python_requires=>=2.7,!=3.0.*
|
| 869 |
+
"""
|
| 870 |
+
),
|
| 871 |
+
)
|
| 872 |
+
with get_dist(tmpdir) as dist:
|
| 873 |
+
dist.parse_config_files()
|
| 874 |
+
|
| 875 |
+
def test_python_requires_invalid(self, tmpdir):
|
| 876 |
+
fake_env(
|
| 877 |
+
tmpdir,
|
| 878 |
+
DALS(
|
| 879 |
+
"""
|
| 880 |
+
[options]
|
| 881 |
+
python_requires=invalid
|
| 882 |
+
"""
|
| 883 |
+
),
|
| 884 |
+
)
|
| 885 |
+
with pytest.raises(Exception):
|
| 886 |
+
with get_dist(tmpdir) as dist:
|
| 887 |
+
dist.parse_config_files()
|
| 888 |
+
|
| 889 |
+
def test_cmdclass(self, tmpdir):
|
| 890 |
+
module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src
|
| 891 |
+
module_path.parent.mkdir(parents=True, exist_ok=True)
|
| 892 |
+
module_path.write_text(
|
| 893 |
+
"from distutils.core import Command\nclass CustomCmd(Command): pass\n",
|
| 894 |
+
encoding="utf-8",
|
| 895 |
+
)
|
| 896 |
+
|
| 897 |
+
setup_cfg = """
|
| 898 |
+
[options]
|
| 899 |
+
cmdclass =
|
| 900 |
+
customcmd = custom_build.CustomCmd
|
| 901 |
+
"""
|
| 902 |
+
fake_env(tmpdir, inspect.cleandoc(setup_cfg))
|
| 903 |
+
|
| 904 |
+
with get_dist(tmpdir) as dist:
|
| 905 |
+
cmdclass = dist.cmdclass['customcmd']
|
| 906 |
+
assert cmdclass.__name__ == "CustomCmd"
|
| 907 |
+
assert cmdclass.__module__ == "custom_build"
|
| 908 |
+
assert module_path.samefile(inspect.getfile(cmdclass))
|
| 909 |
+
|
| 910 |
+
def test_requirements_file(self, tmpdir):
|
| 911 |
+
fake_env(
|
| 912 |
+
tmpdir,
|
| 913 |
+
DALS(
|
| 914 |
+
"""
|
| 915 |
+
[options]
|
| 916 |
+
install_requires = file:requirements.txt
|
| 917 |
+
[options.extras_require]
|
| 918 |
+
colors = file:requirements-extra.txt
|
| 919 |
+
"""
|
| 920 |
+
),
|
| 921 |
+
)
|
| 922 |
+
|
| 923 |
+
tmpdir.join('requirements.txt').write('\ndocutils>=0.3\n\n')
|
| 924 |
+
tmpdir.join('requirements-extra.txt').write('colorama')
|
| 925 |
+
|
| 926 |
+
with get_dist(tmpdir) as dist:
|
| 927 |
+
assert dist.install_requires == ['docutils>=0.3']
|
| 928 |
+
assert dist.extras_require == {'colors': ['colorama']}
|
| 929 |
+
|
| 930 |
+
|
| 931 |
+
saved_dist_init = _Distribution.__init__
|
| 932 |
+
|
| 933 |
+
|
| 934 |
+
class TestExternalSetters:
|
| 935 |
+
# During creation of the setuptools Distribution() object, we call
|
| 936 |
+
# the init of the parent distutils Distribution object via
|
| 937 |
+
# _Distribution.__init__ ().
|
| 938 |
+
#
|
| 939 |
+
# It's possible distutils calls out to various keyword
|
| 940 |
+
# implementations (i.e. distutils.setup_keywords entry points)
|
| 941 |
+
# that may set a range of variables.
|
| 942 |
+
#
|
| 943 |
+
# This wraps distutil's Distribution.__init__ and simulates
|
| 944 |
+
# pbr or something else setting these values.
|
| 945 |
+
def _fake_distribution_init(self, dist, attrs):
|
| 946 |
+
saved_dist_init(dist, attrs)
|
| 947 |
+
# see self._DISTUTILS_UNSUPPORTED_METADATA
|
| 948 |
+
dist.metadata.long_description_content_type = 'text/something'
|
| 949 |
+
# Test overwrite setup() args
|
| 950 |
+
dist.metadata.project_urls = {
|
| 951 |
+
'Link One': 'https://example.com/one/',
|
| 952 |
+
'Link Two': 'https://example.com/two/',
|
| 953 |
+
}
|
| 954 |
+
|
| 955 |
+
@patch.object(_Distribution, '__init__', autospec=True)
|
| 956 |
+
def test_external_setters(self, mock_parent_init, tmpdir):
|
| 957 |
+
mock_parent_init.side_effect = self._fake_distribution_init
|
| 958 |
+
|
| 959 |
+
dist = Distribution(attrs={'project_urls': {'will_be': 'ignored'}})
|
| 960 |
+
|
| 961 |
+
assert dist.metadata.long_description_content_type == 'text/something'
|
| 962 |
+
assert dist.metadata.project_urls == {
|
| 963 |
+
'Link One': 'https://example.com/one/',
|
| 964 |
+
'Link Two': 'https://example.com/two/',
|
| 965 |
+
}
|
videollama2/lib/python3.10/site-packages/setuptools/tests/fixtures.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import os
|
| 3 |
+
import subprocess
|
| 4 |
+
import sys
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
import path
|
| 8 |
+
import pytest
|
| 9 |
+
|
| 10 |
+
from . import contexts, environment
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@pytest.fixture
|
| 14 |
+
def user_override(monkeypatch):
|
| 15 |
+
"""
|
| 16 |
+
Override site.USER_BASE and site.USER_SITE with temporary directories in
|
| 17 |
+
a context.
|
| 18 |
+
"""
|
| 19 |
+
with contexts.tempdir() as user_base:
|
| 20 |
+
monkeypatch.setattr('site.USER_BASE', user_base)
|
| 21 |
+
with contexts.tempdir() as user_site:
|
| 22 |
+
monkeypatch.setattr('site.USER_SITE', user_site)
|
| 23 |
+
with contexts.save_user_site_setting():
|
| 24 |
+
yield
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@pytest.fixture
|
| 28 |
+
def tmpdir_cwd(tmpdir):
|
| 29 |
+
with tmpdir.as_cwd() as orig:
|
| 30 |
+
yield orig
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture(autouse=True, scope="session")
|
| 34 |
+
def workaround_xdist_376(request):
|
| 35 |
+
"""
|
| 36 |
+
Workaround pytest-dev/pytest-xdist#376
|
| 37 |
+
|
| 38 |
+
``pytest-xdist`` tends to inject '' into ``sys.path``,
|
| 39 |
+
which may break certain isolation expectations.
|
| 40 |
+
Remove the entry so the import
|
| 41 |
+
machinery behaves the same irrespective of xdist.
|
| 42 |
+
"""
|
| 43 |
+
if not request.config.pluginmanager.has_plugin('xdist'):
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
with contextlib.suppress(ValueError):
|
| 47 |
+
sys.path.remove('')
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
@pytest.fixture
|
| 51 |
+
def sample_project(tmp_path):
|
| 52 |
+
"""
|
| 53 |
+
Clone the 'sampleproject' and return a path to it.
|
| 54 |
+
"""
|
| 55 |
+
cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
|
| 56 |
+
try:
|
| 57 |
+
subprocess.check_call(cmd, cwd=str(tmp_path))
|
| 58 |
+
except Exception:
|
| 59 |
+
pytest.skip("Unable to clone sampleproject")
|
| 60 |
+
return tmp_path / 'sampleproject'
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# sdist and wheel artifacts should be stable across a round of tests
|
| 64 |
+
# so we can build them once per session and use the files as "readonly"
|
| 65 |
+
|
| 66 |
+
# In the case of setuptools, building the wheel without sdist may cause
|
| 67 |
+
# it to contain the `build` directory, and therefore create situations with
|
| 68 |
+
# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _build_distributions(tmp_path_factory, request):
|
| 72 |
+
with contexts.session_locked_tmp_dir(
|
| 73 |
+
request, tmp_path_factory, "dist_build"
|
| 74 |
+
) as tmp: # pragma: no cover
|
| 75 |
+
sdist = next(tmp.glob("*.tar.gz"), None)
|
| 76 |
+
wheel = next(tmp.glob("*.whl"), None)
|
| 77 |
+
if sdist and wheel:
|
| 78 |
+
return (sdist, wheel)
|
| 79 |
+
|
| 80 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 81 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 82 |
+
|
| 83 |
+
subprocess.check_output([
|
| 84 |
+
sys.executable,
|
| 85 |
+
"-m",
|
| 86 |
+
"build",
|
| 87 |
+
"--outdir",
|
| 88 |
+
str(tmp),
|
| 89 |
+
str(request.config.rootdir),
|
| 90 |
+
])
|
| 91 |
+
|
| 92 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 93 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 94 |
+
|
| 95 |
+
return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@pytest.fixture(scope="session")
|
| 99 |
+
def setuptools_sdist(tmp_path_factory, request):
|
| 100 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
|
| 101 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 102 |
+
return Path(prebuilt).resolve()
|
| 103 |
+
|
| 104 |
+
sdist, _ = _build_distributions(tmp_path_factory, request)
|
| 105 |
+
return sdist
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
@pytest.fixture(scope="session")
|
| 109 |
+
def setuptools_wheel(tmp_path_factory, request):
|
| 110 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
|
| 111 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 112 |
+
return Path(prebuilt).resolve()
|
| 113 |
+
|
| 114 |
+
_, wheel = _build_distributions(tmp_path_factory, request)
|
| 115 |
+
return wheel
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@pytest.fixture
|
| 119 |
+
def venv(tmp_path, setuptools_wheel):
|
| 120 |
+
"""Virtual env with the version of setuptools under test installed"""
|
| 121 |
+
env = environment.VirtualEnv()
|
| 122 |
+
env.root = path.Path(tmp_path / 'venv')
|
| 123 |
+
env.create_opts = ['--no-setuptools', '--wheel=bundle']
|
| 124 |
+
# TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
|
| 125 |
+
env.req = str(setuptools_wheel)
|
| 126 |
+
# In some environments (eg. downstream distro packaging),
|
| 127 |
+
# where tox isn't used to run tests and PYTHONPATH is set to point to
|
| 128 |
+
# a specific setuptools codebase, PYTHONPATH will leak into the spawned
|
| 129 |
+
# processes.
|
| 130 |
+
# env.create() should install the just created setuptools
|
| 131 |
+
# wheel, but it doesn't if it finds another existing matching setuptools
|
| 132 |
+
# installation present on PYTHONPATH:
|
| 133 |
+
# `setuptools is already installed with the same version as the provided
|
| 134 |
+
# wheel. Use --force-reinstall to force an installation of the wheel.`
|
| 135 |
+
# This prevents leaking PYTHONPATH to the created environment.
|
| 136 |
+
with contexts.environment(PYTHONPATH=None):
|
| 137 |
+
return env.create()
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@pytest.fixture
|
| 141 |
+
def venv_without_setuptools(tmp_path):
|
| 142 |
+
"""Virtual env without any version of setuptools installed"""
|
| 143 |
+
env = environment.VirtualEnv()
|
| 144 |
+
env.root = path.Path(tmp_path / 'venv_without_setuptools')
|
| 145 |
+
env.create_opts = ['--no-setuptools', '--no-wheel']
|
| 146 |
+
env.ensure_env()
|
| 147 |
+
return env
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
@pytest.fixture
|
| 151 |
+
def bare_venv(tmp_path):
|
| 152 |
+
"""Virtual env without any common packages installed"""
|
| 153 |
+
env = environment.VirtualEnv()
|
| 154 |
+
env.root = path.Path(tmp_path / 'bare_venv')
|
| 155 |
+
env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
|
| 156 |
+
env.ensure_env()
|
| 157 |
+
return env
|
videollama2/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
result = 'passed'
|
videollama2/lib/python3.10/site-packages/setuptools/tests/server.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Basic http server for tests to simulate PyPI or custom indexes"""
|
| 2 |
+
|
| 3 |
+
import http.server
|
| 4 |
+
import os
|
| 5 |
+
import threading
|
| 6 |
+
import time
|
| 7 |
+
import urllib.parse
|
| 8 |
+
import urllib.request
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class IndexServer(http.server.HTTPServer):
|
| 12 |
+
"""Basic single-threaded http server simulating a package index
|
| 13 |
+
|
| 14 |
+
You can use this server in unittest like this::
|
| 15 |
+
s = IndexServer()
|
| 16 |
+
s.start()
|
| 17 |
+
index_url = s.base_url() + 'mytestindex'
|
| 18 |
+
# do some test requests to the index
|
| 19 |
+
# The index files should be located in setuptools/tests/indexes
|
| 20 |
+
s.stop()
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
def __init__(
|
| 24 |
+
self,
|
| 25 |
+
server_address=('', 0),
|
| 26 |
+
RequestHandlerClass=http.server.SimpleHTTPRequestHandler,
|
| 27 |
+
):
|
| 28 |
+
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
| 29 |
+
self._run = True
|
| 30 |
+
|
| 31 |
+
def start(self):
|
| 32 |
+
self.thread = threading.Thread(target=self.serve_forever)
|
| 33 |
+
self.thread.start()
|
| 34 |
+
|
| 35 |
+
def stop(self):
|
| 36 |
+
"Stop the server"
|
| 37 |
+
|
| 38 |
+
# Let the server finish the last request and wait for a new one.
|
| 39 |
+
time.sleep(0.1)
|
| 40 |
+
|
| 41 |
+
self.shutdown()
|
| 42 |
+
self.thread.join()
|
| 43 |
+
self.socket.close()
|
| 44 |
+
|
| 45 |
+
def base_url(self):
|
| 46 |
+
port = self.server_port
|
| 47 |
+
return f'http://127.0.0.1:{port}/setuptools/tests/indexes/'
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class RequestRecorder(http.server.BaseHTTPRequestHandler):
|
| 51 |
+
def do_GET(self):
|
| 52 |
+
requests = vars(self.server).setdefault('requests', [])
|
| 53 |
+
requests.append(self)
|
| 54 |
+
self.send_response(200, 'OK')
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class MockServer(http.server.HTTPServer, threading.Thread):
|
| 58 |
+
"""
|
| 59 |
+
A simple HTTP Server that records the requests made to it.
|
| 60 |
+
"""
|
| 61 |
+
|
| 62 |
+
def __init__(self, server_address=('', 0), RequestHandlerClass=RequestRecorder):
|
| 63 |
+
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
| 64 |
+
threading.Thread.__init__(self)
|
| 65 |
+
self.daemon = True
|
| 66 |
+
self.requests = []
|
| 67 |
+
|
| 68 |
+
def run(self):
|
| 69 |
+
self.serve_forever()
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def netloc(self):
|
| 73 |
+
return f'localhost:{self.server_port}'
|
| 74 |
+
|
| 75 |
+
@property
|
| 76 |
+
def url(self):
|
| 77 |
+
return f'http://{self.netloc}/'
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def path_to_url(path, authority=None):
|
| 81 |
+
"""Convert a path to a file: URL."""
|
| 82 |
+
path = os.path.normpath(os.path.abspath(path))
|
| 83 |
+
base = 'file:'
|
| 84 |
+
if authority is not None:
|
| 85 |
+
base += '//' + authority
|
| 86 |
+
return urllib.parse.urljoin(base, urllib.request.pathname2url(path))
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import zipfile
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from setuptools.dist import Distribution
|
| 10 |
+
|
| 11 |
+
from . import contexts
|
| 12 |
+
|
| 13 |
+
SETUP_PY = """\
|
| 14 |
+
from setuptools import setup
|
| 15 |
+
|
| 16 |
+
setup(py_modules=['hi'])
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@pytest.fixture
|
| 21 |
+
def setup_context(tmpdir):
|
| 22 |
+
with (tmpdir / 'setup.py').open('w') as f:
|
| 23 |
+
f.write(SETUP_PY)
|
| 24 |
+
with (tmpdir / 'hi.py').open('w') as f:
|
| 25 |
+
f.write('1\n')
|
| 26 |
+
with tmpdir.as_cwd():
|
| 27 |
+
yield tmpdir
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Test:
|
| 31 |
+
@pytest.mark.usefixtures("user_override")
|
| 32 |
+
@pytest.mark.usefixtures("setup_context")
|
| 33 |
+
def test_bdist_egg(self):
|
| 34 |
+
dist = Distribution(
|
| 35 |
+
dict(
|
| 36 |
+
script_name='setup.py',
|
| 37 |
+
script_args=['bdist_egg'],
|
| 38 |
+
name='foo',
|
| 39 |
+
py_modules=['hi'],
|
| 40 |
+
)
|
| 41 |
+
)
|
| 42 |
+
os.makedirs(os.path.join('build', 'src'))
|
| 43 |
+
with contexts.quiet():
|
| 44 |
+
dist.parse_command_line()
|
| 45 |
+
dist.run_commands()
|
| 46 |
+
|
| 47 |
+
# let's see if we got our egg link at the right place
|
| 48 |
+
[content] = os.listdir('dist')
|
| 49 |
+
assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
|
| 50 |
+
|
| 51 |
+
@pytest.mark.xfail(
|
| 52 |
+
os.environ.get('PYTHONDONTWRITEBYTECODE', False),
|
| 53 |
+
reason="Byte code disabled",
|
| 54 |
+
)
|
| 55 |
+
@pytest.mark.usefixtures("user_override")
|
| 56 |
+
@pytest.mark.usefixtures("setup_context")
|
| 57 |
+
def test_exclude_source_files(self):
|
| 58 |
+
dist = Distribution(
|
| 59 |
+
dict(
|
| 60 |
+
script_name='setup.py',
|
| 61 |
+
script_args=['bdist_egg', '--exclude-source-files'],
|
| 62 |
+
py_modules=['hi'],
|
| 63 |
+
)
|
| 64 |
+
)
|
| 65 |
+
with contexts.quiet():
|
| 66 |
+
dist.parse_command_line()
|
| 67 |
+
dist.run_commands()
|
| 68 |
+
[dist_name] = os.listdir('dist')
|
| 69 |
+
dist_filename = os.path.join('dist', dist_name)
|
| 70 |
+
zip = zipfile.ZipFile(dist_filename)
|
| 71 |
+
names = list(zi.filename for zi in zip.filelist)
|
| 72 |
+
assert 'hi.pyc' in names
|
| 73 |
+
assert 'hi.py' not in names
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_build.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools import Command
|
| 2 |
+
from setuptools.command.build import build
|
| 3 |
+
from setuptools.dist import Distribution
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
|
| 7 |
+
"""
|
| 8 |
+
Check that the setuptools Distribution uses the
|
| 9 |
+
setuptools specific build object.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
dist = Distribution(
|
| 13 |
+
dict(
|
| 14 |
+
script_name='setup.py',
|
| 15 |
+
script_args=['build'],
|
| 16 |
+
packages=[],
|
| 17 |
+
package_data={'': ['path/*']},
|
| 18 |
+
)
|
| 19 |
+
)
|
| 20 |
+
assert isinstance(dist.get_command_obj("build"), build)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class Subcommand(Command):
|
| 24 |
+
"""Dummy command to be used in tests"""
|
| 25 |
+
|
| 26 |
+
def initialize_options(self):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
def finalize_options(self):
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
def run(self):
|
| 33 |
+
raise NotImplementedError("just to check if the command runs")
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py
ADDED
|
@@ -0,0 +1,970 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import importlib
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import shutil
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
import tarfile
|
| 9 |
+
from concurrent import futures
|
| 10 |
+
from pathlib import Path
|
| 11 |
+
from typing import Any, Callable
|
| 12 |
+
from zipfile import ZipFile
|
| 13 |
+
|
| 14 |
+
import pytest
|
| 15 |
+
from jaraco import path
|
| 16 |
+
from packaging.requirements import Requirement
|
| 17 |
+
|
| 18 |
+
from .textwrap import DALS
|
| 19 |
+
|
| 20 |
+
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
|
| 24 |
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
pytestmark = pytest.mark.skipif(
|
| 28 |
+
sys.platform == "win32" and IS_PYPY,
|
| 29 |
+
reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
|
| 30 |
+
"is flaky and problematic",
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class BuildBackendBase:
|
| 35 |
+
def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'):
|
| 36 |
+
self.cwd = cwd
|
| 37 |
+
self.env = env or {}
|
| 38 |
+
self.backend_name = backend_name
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class BuildBackend(BuildBackendBase):
|
| 42 |
+
"""PEP 517 Build Backend"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, *args, **kwargs):
|
| 45 |
+
super().__init__(*args, **kwargs)
|
| 46 |
+
self.pool = futures.ProcessPoolExecutor(max_workers=1)
|
| 47 |
+
|
| 48 |
+
def __getattr__(self, name: str) -> Callable[..., Any]:
|
| 49 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 50 |
+
|
| 51 |
+
def method(*args, **kw):
|
| 52 |
+
root = os.path.abspath(self.cwd)
|
| 53 |
+
caller = BuildBackendCaller(root, self.env, self.backend_name)
|
| 54 |
+
pid = None
|
| 55 |
+
try:
|
| 56 |
+
pid = self.pool.submit(os.getpid).result(TIMEOUT)
|
| 57 |
+
return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
|
| 58 |
+
except futures.TimeoutError:
|
| 59 |
+
self.pool.shutdown(wait=False) # doesn't stop already running processes
|
| 60 |
+
self._kill(pid)
|
| 61 |
+
pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
|
| 62 |
+
except (futures.process.BrokenProcessPool, MemoryError, OSError):
|
| 63 |
+
if IS_PYPY:
|
| 64 |
+
pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
|
| 65 |
+
raise
|
| 66 |
+
|
| 67 |
+
return method
|
| 68 |
+
|
| 69 |
+
def _kill(self, pid):
|
| 70 |
+
if pid is None:
|
| 71 |
+
return
|
| 72 |
+
with contextlib.suppress(ProcessLookupError, OSError):
|
| 73 |
+
os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class BuildBackendCaller(BuildBackendBase):
|
| 77 |
+
def __init__(self, *args, **kwargs):
|
| 78 |
+
super().__init__(*args, **kwargs)
|
| 79 |
+
|
| 80 |
+
(self.backend_name, _, self.backend_obj) = self.backend_name.partition(':')
|
| 81 |
+
|
| 82 |
+
def __call__(self, name, *args, **kw):
|
| 83 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 84 |
+
os.chdir(self.cwd)
|
| 85 |
+
os.environ.update(self.env)
|
| 86 |
+
mod = importlib.import_module(self.backend_name)
|
| 87 |
+
|
| 88 |
+
if self.backend_obj:
|
| 89 |
+
backend = getattr(mod, self.backend_obj)
|
| 90 |
+
else:
|
| 91 |
+
backend = mod
|
| 92 |
+
|
| 93 |
+
return getattr(backend, name)(*args, **kw)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
defns = [
|
| 97 |
+
{ # simple setup.py script
|
| 98 |
+
'setup.py': DALS(
|
| 99 |
+
"""
|
| 100 |
+
__import__('setuptools').setup(
|
| 101 |
+
name='foo',
|
| 102 |
+
version='0.0.0',
|
| 103 |
+
py_modules=['hello'],
|
| 104 |
+
setup_requires=['six'],
|
| 105 |
+
)
|
| 106 |
+
"""
|
| 107 |
+
),
|
| 108 |
+
'hello.py': DALS(
|
| 109 |
+
"""
|
| 110 |
+
def run():
|
| 111 |
+
print('hello')
|
| 112 |
+
"""
|
| 113 |
+
),
|
| 114 |
+
},
|
| 115 |
+
{ # setup.py that relies on __name__
|
| 116 |
+
'setup.py': DALS(
|
| 117 |
+
"""
|
| 118 |
+
assert __name__ == '__main__'
|
| 119 |
+
__import__('setuptools').setup(
|
| 120 |
+
name='foo',
|
| 121 |
+
version='0.0.0',
|
| 122 |
+
py_modules=['hello'],
|
| 123 |
+
setup_requires=['six'],
|
| 124 |
+
)
|
| 125 |
+
"""
|
| 126 |
+
),
|
| 127 |
+
'hello.py': DALS(
|
| 128 |
+
"""
|
| 129 |
+
def run():
|
| 130 |
+
print('hello')
|
| 131 |
+
"""
|
| 132 |
+
),
|
| 133 |
+
},
|
| 134 |
+
{ # setup.py script that runs arbitrary code
|
| 135 |
+
'setup.py': DALS(
|
| 136 |
+
"""
|
| 137 |
+
variable = True
|
| 138 |
+
def function():
|
| 139 |
+
return variable
|
| 140 |
+
assert variable
|
| 141 |
+
__import__('setuptools').setup(
|
| 142 |
+
name='foo',
|
| 143 |
+
version='0.0.0',
|
| 144 |
+
py_modules=['hello'],
|
| 145 |
+
setup_requires=['six'],
|
| 146 |
+
)
|
| 147 |
+
"""
|
| 148 |
+
),
|
| 149 |
+
'hello.py': DALS(
|
| 150 |
+
"""
|
| 151 |
+
def run():
|
| 152 |
+
print('hello')
|
| 153 |
+
"""
|
| 154 |
+
),
|
| 155 |
+
},
|
| 156 |
+
{ # setup.py script that constructs temp files to be included in the distribution
|
| 157 |
+
'setup.py': DALS(
|
| 158 |
+
"""
|
| 159 |
+
# Some packages construct files on the fly, include them in the package,
|
| 160 |
+
# and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
|
| 161 |
+
# Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
|
| 162 |
+
# to obtain a distribution object first, and then run the distutils
|
| 163 |
+
# commands later, because these files will be removed in the meantime.
|
| 164 |
+
|
| 165 |
+
with open('world.py', 'w', encoding="utf-8") as f:
|
| 166 |
+
f.write('x = 42')
|
| 167 |
+
|
| 168 |
+
try:
|
| 169 |
+
__import__('setuptools').setup(
|
| 170 |
+
name='foo',
|
| 171 |
+
version='0.0.0',
|
| 172 |
+
py_modules=['world'],
|
| 173 |
+
setup_requires=['six'],
|
| 174 |
+
)
|
| 175 |
+
finally:
|
| 176 |
+
# Some packages will clean temporary files
|
| 177 |
+
__import__('os').unlink('world.py')
|
| 178 |
+
"""
|
| 179 |
+
),
|
| 180 |
+
},
|
| 181 |
+
{ # setup.cfg only
|
| 182 |
+
'setup.cfg': DALS(
|
| 183 |
+
"""
|
| 184 |
+
[metadata]
|
| 185 |
+
name = foo
|
| 186 |
+
version = 0.0.0
|
| 187 |
+
|
| 188 |
+
[options]
|
| 189 |
+
py_modules=hello
|
| 190 |
+
setup_requires=six
|
| 191 |
+
"""
|
| 192 |
+
),
|
| 193 |
+
'hello.py': DALS(
|
| 194 |
+
"""
|
| 195 |
+
def run():
|
| 196 |
+
print('hello')
|
| 197 |
+
"""
|
| 198 |
+
),
|
| 199 |
+
},
|
| 200 |
+
{ # setup.cfg and setup.py
|
| 201 |
+
'setup.cfg': DALS(
|
| 202 |
+
"""
|
| 203 |
+
[metadata]
|
| 204 |
+
name = foo
|
| 205 |
+
version = 0.0.0
|
| 206 |
+
|
| 207 |
+
[options]
|
| 208 |
+
py_modules=hello
|
| 209 |
+
setup_requires=six
|
| 210 |
+
"""
|
| 211 |
+
),
|
| 212 |
+
'setup.py': "__import__('setuptools').setup()",
|
| 213 |
+
'hello.py': DALS(
|
| 214 |
+
"""
|
| 215 |
+
def run():
|
| 216 |
+
print('hello')
|
| 217 |
+
"""
|
| 218 |
+
),
|
| 219 |
+
},
|
| 220 |
+
]
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class TestBuildMetaBackend:
|
| 224 |
+
backend_name = 'setuptools.build_meta'
|
| 225 |
+
|
| 226 |
+
def get_build_backend(self):
|
| 227 |
+
return BuildBackend(backend_name=self.backend_name)
|
| 228 |
+
|
| 229 |
+
@pytest.fixture(params=defns)
|
| 230 |
+
def build_backend(self, tmpdir, request):
|
| 231 |
+
path.build(request.param, prefix=str(tmpdir))
|
| 232 |
+
with tmpdir.as_cwd():
|
| 233 |
+
yield self.get_build_backend()
|
| 234 |
+
|
| 235 |
+
def test_get_requires_for_build_wheel(self, build_backend):
|
| 236 |
+
actual = build_backend.get_requires_for_build_wheel()
|
| 237 |
+
expected = ['six']
|
| 238 |
+
assert sorted(actual) == sorted(expected)
|
| 239 |
+
|
| 240 |
+
def test_get_requires_for_build_sdist(self, build_backend):
|
| 241 |
+
actual = build_backend.get_requires_for_build_sdist()
|
| 242 |
+
expected = ['six']
|
| 243 |
+
assert sorted(actual) == sorted(expected)
|
| 244 |
+
|
| 245 |
+
def test_build_wheel(self, build_backend):
|
| 246 |
+
dist_dir = os.path.abspath('pip-wheel')
|
| 247 |
+
os.makedirs(dist_dir)
|
| 248 |
+
wheel_name = build_backend.build_wheel(dist_dir)
|
| 249 |
+
|
| 250 |
+
wheel_file = os.path.join(dist_dir, wheel_name)
|
| 251 |
+
assert os.path.isfile(wheel_file)
|
| 252 |
+
|
| 253 |
+
# Temporary files should be removed
|
| 254 |
+
assert not os.path.isfile('world.py')
|
| 255 |
+
|
| 256 |
+
with ZipFile(wheel_file) as zipfile:
|
| 257 |
+
wheel_contents = set(zipfile.namelist())
|
| 258 |
+
|
| 259 |
+
# Each one of the examples have a single module
|
| 260 |
+
# that should be included in the distribution
|
| 261 |
+
python_scripts = (f for f in wheel_contents if f.endswith('.py'))
|
| 262 |
+
modules = [f for f in python_scripts if not f.endswith('setup.py')]
|
| 263 |
+
assert len(modules) == 1
|
| 264 |
+
|
| 265 |
+
@pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
|
| 266 |
+
def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
|
| 267 |
+
# Building a sdist/wheel should still succeed if there's
|
| 268 |
+
# already a sdist/wheel in the destination directory.
|
| 269 |
+
files = {
|
| 270 |
+
'setup.py': "from setuptools import setup\nsetup()",
|
| 271 |
+
'VERSION': "0.0.1",
|
| 272 |
+
'setup.cfg': DALS(
|
| 273 |
+
"""
|
| 274 |
+
[metadata]
|
| 275 |
+
name = foo
|
| 276 |
+
version = file: VERSION
|
| 277 |
+
"""
|
| 278 |
+
),
|
| 279 |
+
'pyproject.toml': DALS(
|
| 280 |
+
"""
|
| 281 |
+
[build-system]
|
| 282 |
+
requires = ["setuptools", "wheel"]
|
| 283 |
+
build-backend = "setuptools.build_meta"
|
| 284 |
+
"""
|
| 285 |
+
),
|
| 286 |
+
}
|
| 287 |
+
|
| 288 |
+
path.build(files)
|
| 289 |
+
|
| 290 |
+
dist_dir = os.path.abspath('preexisting-' + build_type)
|
| 291 |
+
|
| 292 |
+
build_backend = self.get_build_backend()
|
| 293 |
+
build_method = getattr(build_backend, 'build_' + build_type)
|
| 294 |
+
|
| 295 |
+
# Build a first sdist/wheel.
|
| 296 |
+
# Note: this also check the destination directory is
|
| 297 |
+
# successfully created if it does not exist already.
|
| 298 |
+
first_result = build_method(dist_dir)
|
| 299 |
+
|
| 300 |
+
# Change version.
|
| 301 |
+
with open("VERSION", "wt", encoding="utf-8") as version_file:
|
| 302 |
+
version_file.write("0.0.2")
|
| 303 |
+
|
| 304 |
+
# Build a *second* sdist/wheel.
|
| 305 |
+
second_result = build_method(dist_dir)
|
| 306 |
+
|
| 307 |
+
assert os.path.isfile(os.path.join(dist_dir, first_result))
|
| 308 |
+
assert first_result != second_result
|
| 309 |
+
|
| 310 |
+
# And if rebuilding the exact same sdist/wheel?
|
| 311 |
+
open(os.path.join(dist_dir, second_result), 'wb').close()
|
| 312 |
+
third_result = build_method(dist_dir)
|
| 313 |
+
assert third_result == second_result
|
| 314 |
+
assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
|
| 315 |
+
|
| 316 |
+
@pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
|
| 317 |
+
def test_build_with_pyproject_config(self, tmpdir, setup_script):
|
| 318 |
+
files = {
|
| 319 |
+
'pyproject.toml': DALS(
|
| 320 |
+
"""
|
| 321 |
+
[build-system]
|
| 322 |
+
requires = ["setuptools", "wheel"]
|
| 323 |
+
build-backend = "setuptools.build_meta"
|
| 324 |
+
|
| 325 |
+
[project]
|
| 326 |
+
name = "foo"
|
| 327 |
+
license = {text = "MIT"}
|
| 328 |
+
description = "This is a Python package"
|
| 329 |
+
dynamic = ["version", "readme"]
|
| 330 |
+
classifiers = [
|
| 331 |
+
"Development Status :: 5 - Production/Stable",
|
| 332 |
+
"Intended Audience :: Developers"
|
| 333 |
+
]
|
| 334 |
+
urls = {Homepage = "http://github.com"}
|
| 335 |
+
dependencies = [
|
| 336 |
+
"appdirs",
|
| 337 |
+
]
|
| 338 |
+
|
| 339 |
+
[project.optional-dependencies]
|
| 340 |
+
all = [
|
| 341 |
+
"tomli>=1",
|
| 342 |
+
"pyscaffold>=4,<5",
|
| 343 |
+
'importlib; python_version == "2.6"',
|
| 344 |
+
]
|
| 345 |
+
|
| 346 |
+
[project.scripts]
|
| 347 |
+
foo = "foo.cli:main"
|
| 348 |
+
|
| 349 |
+
[tool.setuptools]
|
| 350 |
+
zip-safe = false
|
| 351 |
+
package-dir = {"" = "src"}
|
| 352 |
+
packages = {find = {where = ["src"]}}
|
| 353 |
+
license-files = ["LICENSE*"]
|
| 354 |
+
|
| 355 |
+
[tool.setuptools.dynamic]
|
| 356 |
+
version = {attr = "foo.__version__"}
|
| 357 |
+
readme = {file = "README.rst"}
|
| 358 |
+
|
| 359 |
+
[tool.distutils.sdist]
|
| 360 |
+
formats = "gztar"
|
| 361 |
+
"""
|
| 362 |
+
),
|
| 363 |
+
"MANIFEST.in": DALS(
|
| 364 |
+
"""
|
| 365 |
+
global-include *.py *.txt
|
| 366 |
+
global-exclude *.py[cod]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
"README.rst": "This is a ``README``",
|
| 370 |
+
"LICENSE.txt": "---- placeholder MIT license ----",
|
| 371 |
+
"src": {
|
| 372 |
+
"foo": {
|
| 373 |
+
"__init__.py": "__version__ = '0.1'",
|
| 374 |
+
"__init__.pyi": "__version__: str",
|
| 375 |
+
"cli.py": "def main(): print('hello world')",
|
| 376 |
+
"data.txt": "def main(): print('hello world')",
|
| 377 |
+
"py.typed": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
}
|
| 381 |
+
if setup_script:
|
| 382 |
+
files["setup.py"] = setup_script
|
| 383 |
+
|
| 384 |
+
build_backend = self.get_build_backend()
|
| 385 |
+
with tmpdir.as_cwd():
|
| 386 |
+
path.build(files)
|
| 387 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 388 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 389 |
+
|
| 390 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 391 |
+
sdist_contents = set(tar.getnames())
|
| 392 |
+
|
| 393 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 394 |
+
wheel_contents = set(zipfile.namelist())
|
| 395 |
+
metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
|
| 396 |
+
license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
|
| 397 |
+
epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
|
| 398 |
+
|
| 399 |
+
assert sdist_contents - {"foo-0.1/setup.py"} == {
|
| 400 |
+
'foo-0.1',
|
| 401 |
+
'foo-0.1/LICENSE.txt',
|
| 402 |
+
'foo-0.1/MANIFEST.in',
|
| 403 |
+
'foo-0.1/PKG-INFO',
|
| 404 |
+
'foo-0.1/README.rst',
|
| 405 |
+
'foo-0.1/pyproject.toml',
|
| 406 |
+
'foo-0.1/setup.cfg',
|
| 407 |
+
'foo-0.1/src',
|
| 408 |
+
'foo-0.1/src/foo',
|
| 409 |
+
'foo-0.1/src/foo/__init__.py',
|
| 410 |
+
'foo-0.1/src/foo/__init__.pyi',
|
| 411 |
+
'foo-0.1/src/foo/cli.py',
|
| 412 |
+
'foo-0.1/src/foo/data.txt',
|
| 413 |
+
'foo-0.1/src/foo/py.typed',
|
| 414 |
+
'foo-0.1/src/foo.egg-info',
|
| 415 |
+
'foo-0.1/src/foo.egg-info/PKG-INFO',
|
| 416 |
+
'foo-0.1/src/foo.egg-info/SOURCES.txt',
|
| 417 |
+
'foo-0.1/src/foo.egg-info/dependency_links.txt',
|
| 418 |
+
'foo-0.1/src/foo.egg-info/entry_points.txt',
|
| 419 |
+
'foo-0.1/src/foo.egg-info/requires.txt',
|
| 420 |
+
'foo-0.1/src/foo.egg-info/top_level.txt',
|
| 421 |
+
'foo-0.1/src/foo.egg-info/not-zip-safe',
|
| 422 |
+
}
|
| 423 |
+
assert wheel_contents == {
|
| 424 |
+
"foo/__init__.py",
|
| 425 |
+
"foo/__init__.pyi", # include type information by default
|
| 426 |
+
"foo/cli.py",
|
| 427 |
+
"foo/data.txt", # include_package_data defaults to True
|
| 428 |
+
"foo/py.typed", # include type information by default
|
| 429 |
+
"foo-0.1.dist-info/LICENSE.txt",
|
| 430 |
+
"foo-0.1.dist-info/METADATA",
|
| 431 |
+
"foo-0.1.dist-info/WHEEL",
|
| 432 |
+
"foo-0.1.dist-info/entry_points.txt",
|
| 433 |
+
"foo-0.1.dist-info/top_level.txt",
|
| 434 |
+
"foo-0.1.dist-info/RECORD",
|
| 435 |
+
}
|
| 436 |
+
assert license == "---- placeholder MIT license ----"
|
| 437 |
+
|
| 438 |
+
for line in (
|
| 439 |
+
"Summary: This is a Python package",
|
| 440 |
+
"License: MIT",
|
| 441 |
+
"Classifier: Intended Audience :: Developers",
|
| 442 |
+
"Requires-Dist: appdirs",
|
| 443 |
+
"Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
|
| 444 |
+
"Requires-Dist: "
|
| 445 |
+
+ str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
|
| 446 |
+
):
|
| 447 |
+
assert line in metadata, (line, metadata)
|
| 448 |
+
|
| 449 |
+
assert metadata.strip().endswith("This is a ``README``")
|
| 450 |
+
assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
|
| 451 |
+
|
| 452 |
+
def test_static_metadata_in_pyproject_config(self, tmpdir):
|
| 453 |
+
# Make sure static metadata in pyproject.toml is not overwritten by setup.py
|
| 454 |
+
# as required by PEP 621
|
| 455 |
+
files = {
|
| 456 |
+
'pyproject.toml': DALS(
|
| 457 |
+
"""
|
| 458 |
+
[build-system]
|
| 459 |
+
requires = ["setuptools", "wheel"]
|
| 460 |
+
build-backend = "setuptools.build_meta"
|
| 461 |
+
|
| 462 |
+
[project]
|
| 463 |
+
name = "foo"
|
| 464 |
+
description = "This is a Python package"
|
| 465 |
+
version = "42"
|
| 466 |
+
dependencies = ["six"]
|
| 467 |
+
"""
|
| 468 |
+
),
|
| 469 |
+
'hello.py': DALS(
|
| 470 |
+
"""
|
| 471 |
+
def run():
|
| 472 |
+
print('hello')
|
| 473 |
+
"""
|
| 474 |
+
),
|
| 475 |
+
'setup.py': DALS(
|
| 476 |
+
"""
|
| 477 |
+
__import__('setuptools').setup(
|
| 478 |
+
name='bar',
|
| 479 |
+
version='13',
|
| 480 |
+
)
|
| 481 |
+
"""
|
| 482 |
+
),
|
| 483 |
+
}
|
| 484 |
+
build_backend = self.get_build_backend()
|
| 485 |
+
with tmpdir.as_cwd():
|
| 486 |
+
path.build(files)
|
| 487 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 488 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 489 |
+
|
| 490 |
+
assert (tmpdir / "temp/foo-42.tar.gz").exists()
|
| 491 |
+
assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
|
| 492 |
+
assert not (tmpdir / "temp/bar-13.tar.gz").exists()
|
| 493 |
+
assert not (tmpdir / "temp/bar-42.tar.gz").exists()
|
| 494 |
+
assert not (tmpdir / "temp/foo-13.tar.gz").exists()
|
| 495 |
+
assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
|
| 496 |
+
assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
|
| 497 |
+
assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
|
| 498 |
+
|
| 499 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 500 |
+
pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
|
| 501 |
+
members = tar.getnames()
|
| 502 |
+
assert "bar-13/PKG-INFO" not in members
|
| 503 |
+
|
| 504 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 505 |
+
metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
|
| 506 |
+
members = zipfile.namelist()
|
| 507 |
+
assert "bar-13.dist-info/METADATA" not in members
|
| 508 |
+
|
| 509 |
+
for file in pkg_info, metadata:
|
| 510 |
+
for line in ("Name: foo", "Version: 42"):
|
| 511 |
+
assert line in file
|
| 512 |
+
for line in ("Name: bar", "Version: 13"):
|
| 513 |
+
assert line not in file
|
| 514 |
+
|
| 515 |
+
def test_build_sdist(self, build_backend):
|
| 516 |
+
dist_dir = os.path.abspath('pip-sdist')
|
| 517 |
+
os.makedirs(dist_dir)
|
| 518 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 519 |
+
|
| 520 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 521 |
+
|
| 522 |
+
def test_prepare_metadata_for_build_wheel(self, build_backend):
|
| 523 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 524 |
+
os.makedirs(dist_dir)
|
| 525 |
+
|
| 526 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 527 |
+
|
| 528 |
+
assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
|
| 529 |
+
|
| 530 |
+
def test_prepare_metadata_inplace(self, build_backend):
|
| 531 |
+
"""
|
| 532 |
+
Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
|
| 533 |
+
See issue #3523.
|
| 534 |
+
"""
|
| 535 |
+
for pre_existing in [
|
| 536 |
+
".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
|
| 537 |
+
".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
|
| 538 |
+
".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
|
| 539 |
+
".venv/python3.10/site-packages/click-8.1.3.dist-info",
|
| 540 |
+
"venv/python3.10/site-packages/distlib-0.3.5.dist-info",
|
| 541 |
+
"env/python3.10/site-packages/docutils-0.19.dist-info",
|
| 542 |
+
]:
|
| 543 |
+
os.makedirs(pre_existing, exist_ok=True)
|
| 544 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(".")
|
| 545 |
+
assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
|
| 546 |
+
|
| 547 |
+
def test_build_sdist_explicit_dist(self, build_backend):
|
| 548 |
+
# explicitly specifying the dist folder should work
|
| 549 |
+
# the folder sdist_directory and the ``--dist-dir`` can be the same
|
| 550 |
+
dist_dir = os.path.abspath('dist')
|
| 551 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 552 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 553 |
+
|
| 554 |
+
def test_build_sdist_version_change(self, build_backend):
|
| 555 |
+
sdist_into_directory = os.path.abspath("out_sdist")
|
| 556 |
+
os.makedirs(sdist_into_directory)
|
| 557 |
+
|
| 558 |
+
sdist_name = build_backend.build_sdist(sdist_into_directory)
|
| 559 |
+
assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
|
| 560 |
+
|
| 561 |
+
# if the setup.py changes subsequent call of the build meta
|
| 562 |
+
# should still succeed, given the
|
| 563 |
+
# sdist_directory the frontend specifies is empty
|
| 564 |
+
setup_loc = os.path.abspath("setup.py")
|
| 565 |
+
if not os.path.exists(setup_loc):
|
| 566 |
+
setup_loc = os.path.abspath("setup.cfg")
|
| 567 |
+
|
| 568 |
+
with open(setup_loc, 'rt', encoding="utf-8") as file_handler:
|
| 569 |
+
content = file_handler.read()
|
| 570 |
+
with open(setup_loc, 'wt', encoding="utf-8") as file_handler:
|
| 571 |
+
file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
|
| 572 |
+
|
| 573 |
+
shutil.rmtree(sdist_into_directory)
|
| 574 |
+
os.makedirs(sdist_into_directory)
|
| 575 |
+
|
| 576 |
+
sdist_name = build_backend.build_sdist("out_sdist")
|
| 577 |
+
assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
|
| 578 |
+
|
| 579 |
+
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
|
| 580 |
+
files = {
|
| 581 |
+
'setup.py': DALS(
|
| 582 |
+
"""
|
| 583 |
+
__import__('setuptools').setup(
|
| 584 |
+
name='foo',
|
| 585 |
+
version='0.0.0',
|
| 586 |
+
py_modules=['hello']
|
| 587 |
+
)"""
|
| 588 |
+
),
|
| 589 |
+
'hello.py': '',
|
| 590 |
+
'pyproject.toml': DALS(
|
| 591 |
+
"""
|
| 592 |
+
[build-system]
|
| 593 |
+
requires = ["setuptools", "wheel"]
|
| 594 |
+
build-backend = "setuptools.build_meta"
|
| 595 |
+
"""
|
| 596 |
+
),
|
| 597 |
+
}
|
| 598 |
+
path.build(files)
|
| 599 |
+
build_backend = self.get_build_backend()
|
| 600 |
+
targz_path = build_backend.build_sdist("temp")
|
| 601 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 602 |
+
assert any('pyproject.toml' in name for name in tar.getnames())
|
| 603 |
+
|
| 604 |
+
def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
|
| 605 |
+
# If build_sdist is called from a script other than setup.py,
|
| 606 |
+
# ensure setup.py is included
|
| 607 |
+
path.build(defns[0])
|
| 608 |
+
|
| 609 |
+
build_backend = self.get_build_backend()
|
| 610 |
+
targz_path = build_backend.build_sdist("temp")
|
| 611 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 612 |
+
assert any('setup.py' in name for name in tar.getnames())
|
| 613 |
+
|
| 614 |
+
def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
|
| 615 |
+
# Ensure that MANIFEST.in can exclude setup.py
|
| 616 |
+
files = {
|
| 617 |
+
'setup.py': DALS(
|
| 618 |
+
"""
|
| 619 |
+
__import__('setuptools').setup(
|
| 620 |
+
name='foo',
|
| 621 |
+
version='0.0.0',
|
| 622 |
+
py_modules=['hello']
|
| 623 |
+
)"""
|
| 624 |
+
),
|
| 625 |
+
'hello.py': '',
|
| 626 |
+
'MANIFEST.in': DALS(
|
| 627 |
+
"""
|
| 628 |
+
exclude setup.py
|
| 629 |
+
"""
|
| 630 |
+
),
|
| 631 |
+
}
|
| 632 |
+
|
| 633 |
+
path.build(files)
|
| 634 |
+
|
| 635 |
+
build_backend = self.get_build_backend()
|
| 636 |
+
targz_path = build_backend.build_sdist("temp")
|
| 637 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 638 |
+
assert not any('setup.py' in name for name in tar.getnames())
|
| 639 |
+
|
| 640 |
+
def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
|
| 641 |
+
files = {
|
| 642 |
+
'setup.py': DALS(
|
| 643 |
+
"""
|
| 644 |
+
__import__('setuptools').setup(
|
| 645 |
+
name='foo',
|
| 646 |
+
version='0.0.0',
|
| 647 |
+
py_modules=['hello']
|
| 648 |
+
)"""
|
| 649 |
+
),
|
| 650 |
+
'hello.py': '',
|
| 651 |
+
'setup.cfg': DALS(
|
| 652 |
+
"""
|
| 653 |
+
[sdist]
|
| 654 |
+
formats=zip
|
| 655 |
+
"""
|
| 656 |
+
),
|
| 657 |
+
}
|
| 658 |
+
|
| 659 |
+
path.build(files)
|
| 660 |
+
|
| 661 |
+
build_backend = self.get_build_backend()
|
| 662 |
+
build_backend.build_sdist("temp")
|
| 663 |
+
|
| 664 |
+
_relative_path_import_files = {
|
| 665 |
+
'setup.py': DALS(
|
| 666 |
+
"""
|
| 667 |
+
__import__('setuptools').setup(
|
| 668 |
+
name='foo',
|
| 669 |
+
version=__import__('hello').__version__,
|
| 670 |
+
py_modules=['hello']
|
| 671 |
+
)"""
|
| 672 |
+
),
|
| 673 |
+
'hello.py': '__version__ = "0.0.0"',
|
| 674 |
+
'setup.cfg': DALS(
|
| 675 |
+
"""
|
| 676 |
+
[sdist]
|
| 677 |
+
formats=zip
|
| 678 |
+
"""
|
| 679 |
+
),
|
| 680 |
+
}
|
| 681 |
+
|
| 682 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 683 |
+
path.build(self._relative_path_import_files)
|
| 684 |
+
build_backend = self.get_build_backend()
|
| 685 |
+
with pytest.raises(ImportError, match="^No module named 'hello'$"):
|
| 686 |
+
build_backend.build_sdist("temp")
|
| 687 |
+
|
| 688 |
+
_simple_pyproject_example = {
|
| 689 |
+
"pyproject.toml": DALS(
|
| 690 |
+
"""
|
| 691 |
+
[project]
|
| 692 |
+
name = "proj"
|
| 693 |
+
version = "42"
|
| 694 |
+
"""
|
| 695 |
+
),
|
| 696 |
+
"src": {"proj": {"__init__.py": ""}},
|
| 697 |
+
}
|
| 698 |
+
|
| 699 |
+
def _assert_link_tree(self, parent_dir):
|
| 700 |
+
"""All files in the directory should be either links or hard links"""
|
| 701 |
+
files = list(Path(parent_dir).glob("**/*"))
|
| 702 |
+
assert files # Should not be empty
|
| 703 |
+
for file in files:
|
| 704 |
+
assert file.is_symlink() or os.stat(file).st_nlink > 0
|
| 705 |
+
|
| 706 |
+
def test_editable_without_config_settings(self, tmpdir_cwd):
|
| 707 |
+
"""
|
| 708 |
+
Sanity check to ensure tests with --mode=strict are different from the ones
|
| 709 |
+
without --mode.
|
| 710 |
+
|
| 711 |
+
--mode=strict should create a local directory with a package tree.
|
| 712 |
+
The directory should not get created otherwise.
|
| 713 |
+
"""
|
| 714 |
+
path.build(self._simple_pyproject_example)
|
| 715 |
+
build_backend = self.get_build_backend()
|
| 716 |
+
assert not Path("build").exists()
|
| 717 |
+
build_backend.build_editable("temp")
|
| 718 |
+
assert not Path("build").exists()
|
| 719 |
+
|
| 720 |
+
def test_build_wheel_inplace(self, tmpdir_cwd):
|
| 721 |
+
config_settings = {"--build-option": ["build_ext", "--inplace"]}
|
| 722 |
+
path.build(self._simple_pyproject_example)
|
| 723 |
+
build_backend = self.get_build_backend()
|
| 724 |
+
assert not Path("build").exists()
|
| 725 |
+
Path("build").mkdir()
|
| 726 |
+
build_backend.prepare_metadata_for_build_wheel("build", config_settings)
|
| 727 |
+
build_backend.build_wheel("build", config_settings)
|
| 728 |
+
assert Path("build/proj-42-py3-none-any.whl").exists()
|
| 729 |
+
|
| 730 |
+
@pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}])
|
| 731 |
+
def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
|
| 732 |
+
path.build({**self._simple_pyproject_example, '_meta': {}})
|
| 733 |
+
assert not Path("build").exists()
|
| 734 |
+
build_backend = self.get_build_backend()
|
| 735 |
+
build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
|
| 736 |
+
build_backend.build_editable("temp", config_settings, "_meta")
|
| 737 |
+
self._assert_link_tree(next(Path("build").glob("__editable__.*")))
|
| 738 |
+
|
| 739 |
+
@pytest.mark.parametrize(
|
| 740 |
+
("setup_literal", "requirements"),
|
| 741 |
+
[
|
| 742 |
+
("'foo'", ['foo']),
|
| 743 |
+
("['foo']", ['foo']),
|
| 744 |
+
(r"'foo\n'", ['foo']),
|
| 745 |
+
(r"'foo\n\n'", ['foo']),
|
| 746 |
+
("['foo', 'bar']", ['foo', 'bar']),
|
| 747 |
+
(r"'# Has a comment line\nfoo'", ['foo']),
|
| 748 |
+
(r"'foo # Has an inline comment'", ['foo']),
|
| 749 |
+
(r"'foo \\\n >=3.0'", ['foo>=3.0']),
|
| 750 |
+
(r"'foo\nbar'", ['foo', 'bar']),
|
| 751 |
+
(r"'foo\nbar\n'", ['foo', 'bar']),
|
| 752 |
+
(r"['foo\n', 'bar\n']", ['foo', 'bar']),
|
| 753 |
+
],
|
| 754 |
+
)
|
| 755 |
+
@pytest.mark.parametrize('use_wheel', [True, False])
|
| 756 |
+
def test_setup_requires(self, setup_literal, requirements, use_wheel, tmpdir_cwd):
|
| 757 |
+
files = {
|
| 758 |
+
'setup.py': DALS(
|
| 759 |
+
"""
|
| 760 |
+
from setuptools import setup
|
| 761 |
+
|
| 762 |
+
setup(
|
| 763 |
+
name="qux",
|
| 764 |
+
version="0.0.0",
|
| 765 |
+
py_modules=["hello"],
|
| 766 |
+
setup_requires={setup_literal},
|
| 767 |
+
)
|
| 768 |
+
"""
|
| 769 |
+
).format(setup_literal=setup_literal),
|
| 770 |
+
'hello.py': DALS(
|
| 771 |
+
"""
|
| 772 |
+
def run():
|
| 773 |
+
print('hello')
|
| 774 |
+
"""
|
| 775 |
+
),
|
| 776 |
+
}
|
| 777 |
+
|
| 778 |
+
path.build(files)
|
| 779 |
+
|
| 780 |
+
build_backend = self.get_build_backend()
|
| 781 |
+
|
| 782 |
+
if use_wheel:
|
| 783 |
+
get_requires = build_backend.get_requires_for_build_wheel
|
| 784 |
+
else:
|
| 785 |
+
get_requires = build_backend.get_requires_for_build_sdist
|
| 786 |
+
|
| 787 |
+
# Ensure that the build requirements are properly parsed
|
| 788 |
+
expected = sorted(requirements)
|
| 789 |
+
actual = get_requires()
|
| 790 |
+
|
| 791 |
+
assert expected == sorted(actual)
|
| 792 |
+
|
| 793 |
+
def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
|
| 794 |
+
# Make sure patches introduced to retrieve setup_requires don't accidentally
|
| 795 |
+
# activate auto-discovery and cause problems due to the incomplete set of
|
| 796 |
+
# attributes passed to MinimalDistribution
|
| 797 |
+
files = {
|
| 798 |
+
'pyproject.toml': DALS(
|
| 799 |
+
"""
|
| 800 |
+
[project]
|
| 801 |
+
name = "proj"
|
| 802 |
+
version = "42"
|
| 803 |
+
"""
|
| 804 |
+
),
|
| 805 |
+
"setup.py": DALS(
|
| 806 |
+
"""
|
| 807 |
+
__import__('setuptools').setup(
|
| 808 |
+
setup_requires=["foo"],
|
| 809 |
+
py_modules = ["hello", "world"]
|
| 810 |
+
)
|
| 811 |
+
"""
|
| 812 |
+
),
|
| 813 |
+
'hello.py': "'hello'",
|
| 814 |
+
'world.py': "'world'",
|
| 815 |
+
}
|
| 816 |
+
path.build(files)
|
| 817 |
+
build_backend = self.get_build_backend()
|
| 818 |
+
setup_requires = build_backend.get_requires_for_build_wheel()
|
| 819 |
+
assert setup_requires == ["foo"]
|
| 820 |
+
|
| 821 |
+
def test_dont_install_setup_requires(self, tmpdir_cwd):
|
| 822 |
+
files = {
|
| 823 |
+
'setup.py': DALS(
|
| 824 |
+
"""
|
| 825 |
+
from setuptools import setup
|
| 826 |
+
|
| 827 |
+
setup(
|
| 828 |
+
name="qux",
|
| 829 |
+
version="0.0.0",
|
| 830 |
+
py_modules=["hello"],
|
| 831 |
+
setup_requires=["does-not-exist >99"],
|
| 832 |
+
)
|
| 833 |
+
"""
|
| 834 |
+
),
|
| 835 |
+
'hello.py': DALS(
|
| 836 |
+
"""
|
| 837 |
+
def run():
|
| 838 |
+
print('hello')
|
| 839 |
+
"""
|
| 840 |
+
),
|
| 841 |
+
}
|
| 842 |
+
|
| 843 |
+
path.build(files)
|
| 844 |
+
|
| 845 |
+
build_backend = self.get_build_backend()
|
| 846 |
+
|
| 847 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 848 |
+
os.makedirs(dist_dir)
|
| 849 |
+
|
| 850 |
+
# does-not-exist can't be satisfied, so if it attempts to install
|
| 851 |
+
# setup_requires, it will fail.
|
| 852 |
+
build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 853 |
+
|
| 854 |
+
_sys_argv_0_passthrough = {
|
| 855 |
+
'setup.py': DALS(
|
| 856 |
+
"""
|
| 857 |
+
import os
|
| 858 |
+
import sys
|
| 859 |
+
|
| 860 |
+
__import__('setuptools').setup(
|
| 861 |
+
name='foo',
|
| 862 |
+
version='0.0.0',
|
| 863 |
+
)
|
| 864 |
+
|
| 865 |
+
sys_argv = os.path.abspath(sys.argv[0])
|
| 866 |
+
file_path = os.path.abspath('setup.py')
|
| 867 |
+
assert sys_argv == file_path
|
| 868 |
+
"""
|
| 869 |
+
)
|
| 870 |
+
}
|
| 871 |
+
|
| 872 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 873 |
+
path.build(self._sys_argv_0_passthrough)
|
| 874 |
+
build_backend = self.get_build_backend()
|
| 875 |
+
with pytest.raises(AssertionError):
|
| 876 |
+
build_backend.build_sdist("temp")
|
| 877 |
+
|
| 878 |
+
_setup_py_file_abspath = {
|
| 879 |
+
'setup.py': DALS(
|
| 880 |
+
"""
|
| 881 |
+
import os
|
| 882 |
+
assert os.path.isabs(__file__)
|
| 883 |
+
__import__('setuptools').setup(
|
| 884 |
+
name='foo',
|
| 885 |
+
version='0.0.0',
|
| 886 |
+
py_modules=['hello'],
|
| 887 |
+
setup_requires=['six'],
|
| 888 |
+
)
|
| 889 |
+
"""
|
| 890 |
+
)
|
| 891 |
+
}
|
| 892 |
+
|
| 893 |
+
def test_setup_py_file_abspath(self, tmpdir_cwd):
|
| 894 |
+
path.build(self._setup_py_file_abspath)
|
| 895 |
+
build_backend = self.get_build_backend()
|
| 896 |
+
build_backend.build_sdist("temp")
|
| 897 |
+
|
| 898 |
+
@pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
|
| 899 |
+
def test_build_with_empty_setuppy(self, build_backend, build_hook):
|
| 900 |
+
files = {'setup.py': ''}
|
| 901 |
+
path.build(files)
|
| 902 |
+
|
| 903 |
+
msg = re.escape('No distribution was found.')
|
| 904 |
+
with pytest.raises(ValueError, match=msg):
|
| 905 |
+
getattr(build_backend, build_hook)("temp")
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
|
| 909 |
+
backend_name = 'setuptools.build_meta:__legacy__'
|
| 910 |
+
|
| 911 |
+
# build_meta_legacy-specific tests
|
| 912 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 913 |
+
# This must fail in build_meta, but must pass in build_meta_legacy
|
| 914 |
+
path.build(self._relative_path_import_files)
|
| 915 |
+
|
| 916 |
+
build_backend = self.get_build_backend()
|
| 917 |
+
build_backend.build_sdist("temp")
|
| 918 |
+
|
| 919 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 920 |
+
path.build(self._sys_argv_0_passthrough)
|
| 921 |
+
|
| 922 |
+
build_backend = self.get_build_backend()
|
| 923 |
+
build_backend.build_sdist("temp")
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
|
| 927 |
+
pyproject = """
|
| 928 |
+
[build-system]
|
| 929 |
+
requires = ["setuptools"]
|
| 930 |
+
build-backend = "setuptools.build_meta"
|
| 931 |
+
[project]
|
| 932 |
+
name = "myproj"
|
| 933 |
+
version = "42"
|
| 934 |
+
"""
|
| 935 |
+
path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
|
| 936 |
+
|
| 937 |
+
# First: sanity check
|
| 938 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 939 |
+
output = venv.run(cmd, cwd=tmpdir).lower()
|
| 940 |
+
assert "running setup.py develop for myproj" not in output
|
| 941 |
+
assert "created wheel for myproj" in output
|
| 942 |
+
|
| 943 |
+
# Then: real test
|
| 944 |
+
env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
|
| 945 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 946 |
+
output = venv.run(cmd, cwd=tmpdir, env=env).lower()
|
| 947 |
+
assert "running setup.py develop for myproj" in output
|
| 948 |
+
|
| 949 |
+
|
| 950 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 951 |
+
def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
|
| 952 |
+
"""Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""
|
| 953 |
+
monkeypatch.chdir(tmp_path)
|
| 954 |
+
setuppy = """
|
| 955 |
+
import sys, setuptools
|
| 956 |
+
setuptools.setup(name='foo', version='0.0.0')
|
| 957 |
+
sys.exit(0)
|
| 958 |
+
"""
|
| 959 |
+
(tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
|
| 960 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 961 |
+
assert backend.get_requires_for_build_wheel() == []
|
| 962 |
+
|
| 963 |
+
|
| 964 |
+
def test_system_exit_in_setuppy(monkeypatch, tmp_path):
|
| 965 |
+
monkeypatch.chdir(tmp_path)
|
| 966 |
+
setuppy = "import sys; sys.exit('some error')"
|
| 967 |
+
(tmp_path / "setup.py").write_text(setuppy, encoding="utf-8")
|
| 968 |
+
with pytest.raises(SystemExit, match="some error"):
|
| 969 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 970 |
+
backend.get_requires_for_build_wheel()
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_build_py.py
ADDED
|
@@ -0,0 +1,480 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import stat
|
| 4 |
+
import warnings
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from unittest.mock import Mock
|
| 7 |
+
|
| 8 |
+
import jaraco.path
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from setuptools import SetuptoolsDeprecationWarning
|
| 12 |
+
from setuptools.dist import Distribution
|
| 13 |
+
|
| 14 |
+
from .textwrap import DALS
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def test_directories_in_package_data_glob(tmpdir_cwd):
|
| 18 |
+
"""
|
| 19 |
+
Directories matching the glob in package_data should
|
| 20 |
+
not be included in the package data.
|
| 21 |
+
|
| 22 |
+
Regression test for #261.
|
| 23 |
+
"""
|
| 24 |
+
dist = Distribution(
|
| 25 |
+
dict(
|
| 26 |
+
script_name='setup.py',
|
| 27 |
+
script_args=['build_py'],
|
| 28 |
+
packages=[''],
|
| 29 |
+
package_data={'': ['path/*']},
|
| 30 |
+
)
|
| 31 |
+
)
|
| 32 |
+
os.makedirs('path/subpath')
|
| 33 |
+
dist.parse_command_line()
|
| 34 |
+
dist.run_commands()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def test_recursive_in_package_data_glob(tmpdir_cwd):
|
| 38 |
+
"""
|
| 39 |
+
Files matching recursive globs (**) in package_data should
|
| 40 |
+
be included in the package data.
|
| 41 |
+
|
| 42 |
+
#1806
|
| 43 |
+
"""
|
| 44 |
+
dist = Distribution(
|
| 45 |
+
dict(
|
| 46 |
+
script_name='setup.py',
|
| 47 |
+
script_args=['build_py'],
|
| 48 |
+
packages=[''],
|
| 49 |
+
package_data={'': ['path/**/data']},
|
| 50 |
+
)
|
| 51 |
+
)
|
| 52 |
+
os.makedirs('path/subpath/subsubpath')
|
| 53 |
+
open('path/subpath/subsubpath/data', 'wb').close()
|
| 54 |
+
|
| 55 |
+
dist.parse_command_line()
|
| 56 |
+
dist.run_commands()
|
| 57 |
+
|
| 58 |
+
assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
|
| 59 |
+
"File is not included"
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def test_read_only(tmpdir_cwd):
|
| 64 |
+
"""
|
| 65 |
+
Ensure read-only flag is not preserved in copy
|
| 66 |
+
for package modules and package data, as that
|
| 67 |
+
causes problems with deleting read-only files on
|
| 68 |
+
Windows.
|
| 69 |
+
|
| 70 |
+
#1451
|
| 71 |
+
"""
|
| 72 |
+
dist = Distribution(
|
| 73 |
+
dict(
|
| 74 |
+
script_name='setup.py',
|
| 75 |
+
script_args=['build_py'],
|
| 76 |
+
packages=['pkg'],
|
| 77 |
+
package_data={'pkg': ['data.dat']},
|
| 78 |
+
)
|
| 79 |
+
)
|
| 80 |
+
os.makedirs('pkg')
|
| 81 |
+
open('pkg/__init__.py', 'wb').close()
|
| 82 |
+
open('pkg/data.dat', 'wb').close()
|
| 83 |
+
os.chmod('pkg/__init__.py', stat.S_IREAD)
|
| 84 |
+
os.chmod('pkg/data.dat', stat.S_IREAD)
|
| 85 |
+
dist.parse_command_line()
|
| 86 |
+
dist.run_commands()
|
| 87 |
+
shutil.rmtree('build')
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
@pytest.mark.xfail(
|
| 91 |
+
'platform.system() == "Windows"',
|
| 92 |
+
reason="On Windows, files do not have executable bits",
|
| 93 |
+
raises=AssertionError,
|
| 94 |
+
strict=True,
|
| 95 |
+
)
|
| 96 |
+
def test_executable_data(tmpdir_cwd):
|
| 97 |
+
"""
|
| 98 |
+
Ensure executable bit is preserved in copy for
|
| 99 |
+
package data, as users rely on it for scripts.
|
| 100 |
+
|
| 101 |
+
#2041
|
| 102 |
+
"""
|
| 103 |
+
dist = Distribution(
|
| 104 |
+
dict(
|
| 105 |
+
script_name='setup.py',
|
| 106 |
+
script_args=['build_py'],
|
| 107 |
+
packages=['pkg'],
|
| 108 |
+
package_data={'pkg': ['run-me']},
|
| 109 |
+
)
|
| 110 |
+
)
|
| 111 |
+
os.makedirs('pkg')
|
| 112 |
+
open('pkg/__init__.py', 'wb').close()
|
| 113 |
+
open('pkg/run-me', 'wb').close()
|
| 114 |
+
os.chmod('pkg/run-me', 0o700)
|
| 115 |
+
|
| 116 |
+
dist.parse_command_line()
|
| 117 |
+
dist.run_commands()
|
| 118 |
+
|
| 119 |
+
assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
|
| 120 |
+
"Script is not executable"
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
EXAMPLE_WITH_MANIFEST = {
|
| 125 |
+
"setup.cfg": DALS(
|
| 126 |
+
"""
|
| 127 |
+
[metadata]
|
| 128 |
+
name = mypkg
|
| 129 |
+
version = 42
|
| 130 |
+
|
| 131 |
+
[options]
|
| 132 |
+
include_package_data = True
|
| 133 |
+
packages = find:
|
| 134 |
+
|
| 135 |
+
[options.packages.find]
|
| 136 |
+
exclude = *.tests*
|
| 137 |
+
"""
|
| 138 |
+
),
|
| 139 |
+
"mypkg": {
|
| 140 |
+
"__init__.py": "",
|
| 141 |
+
"resource_file.txt": "",
|
| 142 |
+
"tests": {
|
| 143 |
+
"__init__.py": "",
|
| 144 |
+
"test_mypkg.py": "",
|
| 145 |
+
"test_file.txt": "",
|
| 146 |
+
},
|
| 147 |
+
},
|
| 148 |
+
"MANIFEST.in": DALS(
|
| 149 |
+
"""
|
| 150 |
+
global-include *.py *.txt
|
| 151 |
+
global-exclude *.py[cod]
|
| 152 |
+
prune dist
|
| 153 |
+
prune build
|
| 154 |
+
prune *.egg-info
|
| 155 |
+
"""
|
| 156 |
+
),
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def test_excluded_subpackages(tmpdir_cwd):
|
| 161 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 162 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 163 |
+
dist.parse_config_files()
|
| 164 |
+
|
| 165 |
+
build_py = dist.get_command_obj("build_py")
|
| 166 |
+
|
| 167 |
+
msg = r"Python recognizes 'mypkg\.tests' as an importable package"
|
| 168 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 169 |
+
# TODO: To fix #3260 we need some transition period to deprecate the
|
| 170 |
+
# existing behavior of `include_package_data`. After the transition, we
|
| 171 |
+
# should remove the warning and fix the behaviour.
|
| 172 |
+
|
| 173 |
+
if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
|
| 174 |
+
# pytest.warns reset the warning filter temporarily
|
| 175 |
+
# https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
|
| 176 |
+
warnings.filterwarnings(
|
| 177 |
+
"ignore",
|
| 178 |
+
"'encoding' argument not specified",
|
| 179 |
+
module="distutils.text_file",
|
| 180 |
+
# This warning is already fixed in pypa/distutils but not in stdlib
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
build_py.finalize_options()
|
| 184 |
+
build_py.run()
|
| 185 |
+
|
| 186 |
+
build_dir = Path(dist.get_command_obj("build_py").build_lib)
|
| 187 |
+
assert (build_dir / "mypkg/__init__.py").exists()
|
| 188 |
+
assert (build_dir / "mypkg/resource_file.txt").exists()
|
| 189 |
+
|
| 190 |
+
# Setuptools is configured to ignore `mypkg.tests`, therefore the following
|
| 191 |
+
# files/dirs should not be included in the distribution.
|
| 192 |
+
for f in [
|
| 193 |
+
"mypkg/tests/__init__.py",
|
| 194 |
+
"mypkg/tests/test_mypkg.py",
|
| 195 |
+
"mypkg/tests/test_file.txt",
|
| 196 |
+
"mypkg/tests",
|
| 197 |
+
]:
|
| 198 |
+
with pytest.raises(AssertionError):
|
| 199 |
+
# TODO: Enforce the following assertion once #3260 is fixed
|
| 200 |
+
# (remove context manager and the following xfail).
|
| 201 |
+
assert not (build_dir / f).exists()
|
| 202 |
+
|
| 203 |
+
pytest.xfail("#3260")
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 207 |
+
def test_existing_egg_info(tmpdir_cwd, monkeypatch):
|
| 208 |
+
"""When provided with the ``existing_egg_info_dir`` attribute, build_py should not
|
| 209 |
+
attempt to run egg_info again.
|
| 210 |
+
"""
|
| 211 |
+
# == Pre-condition ==
|
| 212 |
+
# Generate an egg-info dir
|
| 213 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 214 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 215 |
+
dist.parse_config_files()
|
| 216 |
+
assert dist.include_package_data
|
| 217 |
+
|
| 218 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 219 |
+
dist.run_command("egg_info")
|
| 220 |
+
egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
|
| 221 |
+
assert egg_info_dir.is_dir()
|
| 222 |
+
|
| 223 |
+
# == Setup ==
|
| 224 |
+
build_py = dist.get_command_obj("build_py")
|
| 225 |
+
build_py.finalize_options()
|
| 226 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 227 |
+
egg_info_run = Mock(side_effect=egg_info.run)
|
| 228 |
+
monkeypatch.setattr(egg_info, "run", egg_info_run)
|
| 229 |
+
|
| 230 |
+
# == Remove caches ==
|
| 231 |
+
# egg_info is called when build_py looks for data_files, which gets cached.
|
| 232 |
+
# We need to ensure it is not cached yet, otherwise it may impact on the tests
|
| 233 |
+
build_py.__dict__.pop('data_files', None)
|
| 234 |
+
dist.reinitialize_command(egg_info)
|
| 235 |
+
|
| 236 |
+
# == Sanity check ==
|
| 237 |
+
# Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
|
| 238 |
+
build_py.existing_egg_info_dir = None
|
| 239 |
+
build_py.run()
|
| 240 |
+
egg_info_run.assert_called()
|
| 241 |
+
|
| 242 |
+
# == Remove caches ==
|
| 243 |
+
egg_info_run.reset_mock()
|
| 244 |
+
build_py.__dict__.pop('data_files', None)
|
| 245 |
+
dist.reinitialize_command(egg_info)
|
| 246 |
+
|
| 247 |
+
# == Actual test ==
|
| 248 |
+
# Ensure that if existing_egg_info_dir is given, egg_info doesn't run
|
| 249 |
+
build_py.existing_egg_info_dir = egg_info_dir
|
| 250 |
+
build_py.run()
|
| 251 |
+
egg_info_run.assert_not_called()
|
| 252 |
+
assert build_py.data_files
|
| 253 |
+
|
| 254 |
+
# Make sure the list of outputs is actually OK
|
| 255 |
+
outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
|
| 256 |
+
assert outputs
|
| 257 |
+
example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
|
| 258 |
+
assert example in outputs
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
EXAMPLE_ARBITRARY_MAPPING = {
|
| 262 |
+
"pyproject.toml": DALS(
|
| 263 |
+
"""
|
| 264 |
+
[project]
|
| 265 |
+
name = "mypkg"
|
| 266 |
+
version = "42"
|
| 267 |
+
|
| 268 |
+
[tool.setuptools]
|
| 269 |
+
packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
|
| 270 |
+
|
| 271 |
+
[tool.setuptools.package-dir]
|
| 272 |
+
"" = "src"
|
| 273 |
+
"mypkg.sub2" = "src/mypkg/_sub2"
|
| 274 |
+
"mypkg.sub2.nested" = "other"
|
| 275 |
+
"""
|
| 276 |
+
),
|
| 277 |
+
"src": {
|
| 278 |
+
"mypkg": {
|
| 279 |
+
"__init__.py": "",
|
| 280 |
+
"resource_file.txt": "",
|
| 281 |
+
"sub1": {
|
| 282 |
+
"__init__.py": "",
|
| 283 |
+
"mod1.py": "",
|
| 284 |
+
},
|
| 285 |
+
"_sub2": {
|
| 286 |
+
"mod2.py": "",
|
| 287 |
+
},
|
| 288 |
+
},
|
| 289 |
+
},
|
| 290 |
+
"other": {
|
| 291 |
+
"__init__.py": "",
|
| 292 |
+
"mod3.py": "",
|
| 293 |
+
},
|
| 294 |
+
"MANIFEST.in": DALS(
|
| 295 |
+
"""
|
| 296 |
+
global-include *.py *.txt
|
| 297 |
+
global-exclude *.py[cod]
|
| 298 |
+
"""
|
| 299 |
+
),
|
| 300 |
+
}
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
def test_get_outputs(tmpdir_cwd):
|
| 304 |
+
jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
|
| 305 |
+
dist = Distribution({"script_name": "%test%"})
|
| 306 |
+
dist.parse_config_files()
|
| 307 |
+
|
| 308 |
+
build_py = dist.get_command_obj("build_py")
|
| 309 |
+
build_py.editable_mode = True
|
| 310 |
+
build_py.ensure_finalized()
|
| 311 |
+
build_lib = build_py.build_lib.replace(os.sep, "/")
|
| 312 |
+
outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
|
| 313 |
+
assert outputs == {
|
| 314 |
+
f"{build_lib}/mypkg/__init__.py",
|
| 315 |
+
f"{build_lib}/mypkg/resource_file.txt",
|
| 316 |
+
f"{build_lib}/mypkg/sub1/__init__.py",
|
| 317 |
+
f"{build_lib}/mypkg/sub1/mod1.py",
|
| 318 |
+
f"{build_lib}/mypkg/sub2/mod2.py",
|
| 319 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py",
|
| 320 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py",
|
| 321 |
+
}
|
| 322 |
+
mapping = {
|
| 323 |
+
k.replace(os.sep, "/"): v.replace(os.sep, "/")
|
| 324 |
+
for k, v in build_py.get_output_mapping().items()
|
| 325 |
+
}
|
| 326 |
+
assert mapping == {
|
| 327 |
+
f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
|
| 328 |
+
f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
|
| 329 |
+
f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
|
| 330 |
+
f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
|
| 331 |
+
f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
|
| 332 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
|
| 333 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
class TestTypeInfoFiles:
|
| 338 |
+
PYPROJECTS = {
|
| 339 |
+
"default_pyproject": DALS(
|
| 340 |
+
"""
|
| 341 |
+
[project]
|
| 342 |
+
name = "foo"
|
| 343 |
+
version = "1"
|
| 344 |
+
"""
|
| 345 |
+
),
|
| 346 |
+
"dont_include_package_data": DALS(
|
| 347 |
+
"""
|
| 348 |
+
[project]
|
| 349 |
+
name = "foo"
|
| 350 |
+
version = "1"
|
| 351 |
+
|
| 352 |
+
[tool.setuptools]
|
| 353 |
+
include-package-data = false
|
| 354 |
+
"""
|
| 355 |
+
),
|
| 356 |
+
"exclude_type_info": DALS(
|
| 357 |
+
"""
|
| 358 |
+
[project]
|
| 359 |
+
name = "foo"
|
| 360 |
+
version = "1"
|
| 361 |
+
|
| 362 |
+
[tool.setuptools]
|
| 363 |
+
include-package-data = false
|
| 364 |
+
|
| 365 |
+
[tool.setuptools.exclude-package-data]
|
| 366 |
+
"*" = ["py.typed", "*.pyi"]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
}
|
| 370 |
+
|
| 371 |
+
EXAMPLES = {
|
| 372 |
+
"simple_namespace": {
|
| 373 |
+
"directory_structure": {
|
| 374 |
+
"foo": {
|
| 375 |
+
"bar.pyi": "",
|
| 376 |
+
"py.typed": "",
|
| 377 |
+
"__init__.py": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
"expected_type_files": {"foo/bar.pyi", "foo/py.typed"},
|
| 381 |
+
},
|
| 382 |
+
"nested_inside_namespace": {
|
| 383 |
+
"directory_structure": {
|
| 384 |
+
"foo": {
|
| 385 |
+
"bar": {
|
| 386 |
+
"py.typed": "",
|
| 387 |
+
"mod.pyi": "",
|
| 388 |
+
}
|
| 389 |
+
}
|
| 390 |
+
},
|
| 391 |
+
"expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"},
|
| 392 |
+
},
|
| 393 |
+
"namespace_nested_inside_regular": {
|
| 394 |
+
"directory_structure": {
|
| 395 |
+
"foo": {
|
| 396 |
+
"namespace": {
|
| 397 |
+
"foo.pyi": "",
|
| 398 |
+
},
|
| 399 |
+
"__init__.pyi": "",
|
| 400 |
+
"py.typed": "",
|
| 401 |
+
}
|
| 402 |
+
},
|
| 403 |
+
"expected_type_files": {
|
| 404 |
+
"foo/namespace/foo.pyi",
|
| 405 |
+
"foo/__init__.pyi",
|
| 406 |
+
"foo/py.typed",
|
| 407 |
+
},
|
| 408 |
+
},
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
@pytest.mark.parametrize(
|
| 412 |
+
"pyproject",
|
| 413 |
+
[
|
| 414 |
+
"default_pyproject",
|
| 415 |
+
pytest.param(
|
| 416 |
+
"dont_include_package_data",
|
| 417 |
+
marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
|
| 418 |
+
),
|
| 419 |
+
],
|
| 420 |
+
)
|
| 421 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 422 |
+
def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
|
| 423 |
+
structure = {
|
| 424 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 425 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 426 |
+
}
|
| 427 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 428 |
+
jaraco.path.build(structure)
|
| 429 |
+
|
| 430 |
+
build_py = get_finalized_build_py()
|
| 431 |
+
outputs = get_outputs(build_py)
|
| 432 |
+
assert expected_type_files <= outputs
|
| 433 |
+
|
| 434 |
+
@pytest.mark.parametrize("pyproject", ["exclude_type_info"])
|
| 435 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 436 |
+
def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example):
|
| 437 |
+
structure = {
|
| 438 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 439 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 440 |
+
}
|
| 441 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 442 |
+
jaraco.path.build(structure)
|
| 443 |
+
|
| 444 |
+
build_py = get_finalized_build_py()
|
| 445 |
+
outputs = get_outputs(build_py)
|
| 446 |
+
assert expected_type_files.isdisjoint(outputs)
|
| 447 |
+
|
| 448 |
+
def test_stub_only_package(self, tmpdir_cwd):
|
| 449 |
+
structure = {
|
| 450 |
+
"pyproject.toml": DALS(
|
| 451 |
+
"""
|
| 452 |
+
[project]
|
| 453 |
+
name = "foo-stubs"
|
| 454 |
+
version = "1"
|
| 455 |
+
"""
|
| 456 |
+
),
|
| 457 |
+
"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""},
|
| 458 |
+
}
|
| 459 |
+
expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"}
|
| 460 |
+
jaraco.path.build(structure)
|
| 461 |
+
|
| 462 |
+
build_py = get_finalized_build_py()
|
| 463 |
+
outputs = get_outputs(build_py)
|
| 464 |
+
assert expected_type_files <= outputs
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def get_finalized_build_py(script_name="%build_py-test%"):
|
| 468 |
+
dist = Distribution({"script_name": script_name})
|
| 469 |
+
dist.parse_config_files()
|
| 470 |
+
build_py = dist.get_command_obj("build_py")
|
| 471 |
+
build_py.finalize_options()
|
| 472 |
+
return build_py
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def get_outputs(build_py):
|
| 476 |
+
build_dir = Path(build_py.build_lib)
|
| 477 |
+
return {
|
| 478 |
+
os.path.relpath(x, build_dir).replace(os.sep, "/")
|
| 479 |
+
for x in build_py.get_outputs()
|
| 480 |
+
}
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_core_metadata.py
ADDED
|
@@ -0,0 +1,577 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import importlib
|
| 5 |
+
import io
|
| 6 |
+
from email import message_from_string
|
| 7 |
+
from email.generator import Generator
|
| 8 |
+
from email.message import EmailMessage, Message
|
| 9 |
+
from email.parser import Parser
|
| 10 |
+
from email.policy import EmailPolicy
|
| 11 |
+
from inspect import cleandoc
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
from unittest.mock import Mock
|
| 14 |
+
|
| 15 |
+
import pytest
|
| 16 |
+
from packaging.metadata import Metadata
|
| 17 |
+
from packaging.requirements import Requirement
|
| 18 |
+
|
| 19 |
+
from setuptools import _reqs, sic
|
| 20 |
+
from setuptools._core_metadata import rfc822_escape, rfc822_unescape
|
| 21 |
+
from setuptools.command.egg_info import egg_info, write_requirements
|
| 22 |
+
from setuptools.config import expand, setupcfg
|
| 23 |
+
from setuptools.dist import Distribution
|
| 24 |
+
|
| 25 |
+
from .config.downloads import retrieve_file, urls_from_file
|
| 26 |
+
|
| 27 |
+
EXAMPLE_BASE_INFO = dict(
|
| 28 |
+
name="package",
|
| 29 |
+
version="0.0.1",
|
| 30 |
+
author="Foo Bar",
|
| 31 |
+
author_email="foo@bar.net",
|
| 32 |
+
long_description="Long\ndescription",
|
| 33 |
+
description="Short description",
|
| 34 |
+
keywords=["one", "two"],
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.mark.parametrize(
|
| 39 |
+
("content", "result"),
|
| 40 |
+
(
|
| 41 |
+
pytest.param(
|
| 42 |
+
"Just a single line",
|
| 43 |
+
None,
|
| 44 |
+
id="single_line",
|
| 45 |
+
),
|
| 46 |
+
pytest.param(
|
| 47 |
+
"Multiline\nText\nwithout\nextra indents\n",
|
| 48 |
+
None,
|
| 49 |
+
id="multiline",
|
| 50 |
+
),
|
| 51 |
+
pytest.param(
|
| 52 |
+
"Multiline\n With\n\nadditional\n indentation",
|
| 53 |
+
None,
|
| 54 |
+
id="multiline_with_indentation",
|
| 55 |
+
),
|
| 56 |
+
pytest.param(
|
| 57 |
+
" Leading whitespace",
|
| 58 |
+
"Leading whitespace",
|
| 59 |
+
id="remove_leading_whitespace",
|
| 60 |
+
),
|
| 61 |
+
pytest.param(
|
| 62 |
+
" Leading whitespace\nIn\n Multiline comment",
|
| 63 |
+
"Leading whitespace\nIn\n Multiline comment",
|
| 64 |
+
id="remove_leading_whitespace_multiline",
|
| 65 |
+
),
|
| 66 |
+
),
|
| 67 |
+
)
|
| 68 |
+
def test_rfc822_unescape(content, result):
|
| 69 |
+
assert (result or content) == rfc822_unescape(rfc822_escape(content))
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def __read_test_cases():
|
| 73 |
+
base = EXAMPLE_BASE_INFO
|
| 74 |
+
|
| 75 |
+
params = functools.partial(dict, base)
|
| 76 |
+
|
| 77 |
+
return [
|
| 78 |
+
('Metadata version 1.0', params()),
|
| 79 |
+
(
|
| 80 |
+
'Metadata Version 1.0: Short long description',
|
| 81 |
+
params(
|
| 82 |
+
long_description='Short long description',
|
| 83 |
+
),
|
| 84 |
+
),
|
| 85 |
+
(
|
| 86 |
+
'Metadata version 1.1: Classifiers',
|
| 87 |
+
params(
|
| 88 |
+
classifiers=[
|
| 89 |
+
'Programming Language :: Python :: 3',
|
| 90 |
+
'Programming Language :: Python :: 3.7',
|
| 91 |
+
'License :: OSI Approved :: MIT License',
|
| 92 |
+
],
|
| 93 |
+
),
|
| 94 |
+
),
|
| 95 |
+
(
|
| 96 |
+
'Metadata version 1.1: Download URL',
|
| 97 |
+
params(
|
| 98 |
+
download_url='https://example.com',
|
| 99 |
+
),
|
| 100 |
+
),
|
| 101 |
+
(
|
| 102 |
+
'Metadata Version 1.2: Requires-Python',
|
| 103 |
+
params(
|
| 104 |
+
python_requires='>=3.7',
|
| 105 |
+
),
|
| 106 |
+
),
|
| 107 |
+
pytest.param(
|
| 108 |
+
'Metadata Version 1.2: Project-Url',
|
| 109 |
+
params(project_urls=dict(Foo='https://example.bar')),
|
| 110 |
+
marks=pytest.mark.xfail(
|
| 111 |
+
reason="Issue #1578: project_urls not read",
|
| 112 |
+
),
|
| 113 |
+
),
|
| 114 |
+
(
|
| 115 |
+
'Metadata Version 2.1: Long Description Content Type',
|
| 116 |
+
params(
|
| 117 |
+
long_description_content_type='text/x-rst; charset=UTF-8',
|
| 118 |
+
),
|
| 119 |
+
),
|
| 120 |
+
(
|
| 121 |
+
'License',
|
| 122 |
+
params(
|
| 123 |
+
license='MIT',
|
| 124 |
+
),
|
| 125 |
+
),
|
| 126 |
+
(
|
| 127 |
+
'License multiline',
|
| 128 |
+
params(
|
| 129 |
+
license='This is a long license \nover multiple lines',
|
| 130 |
+
),
|
| 131 |
+
),
|
| 132 |
+
pytest.param(
|
| 133 |
+
'Metadata Version 2.1: Provides Extra',
|
| 134 |
+
params(provides_extras=['foo', 'bar']),
|
| 135 |
+
marks=pytest.mark.xfail(reason="provides_extras not read"),
|
| 136 |
+
),
|
| 137 |
+
(
|
| 138 |
+
'Missing author',
|
| 139 |
+
dict(
|
| 140 |
+
name='foo',
|
| 141 |
+
version='1.0.0',
|
| 142 |
+
author_email='snorri@sturluson.name',
|
| 143 |
+
),
|
| 144 |
+
),
|
| 145 |
+
(
|
| 146 |
+
'Missing author e-mail',
|
| 147 |
+
dict(
|
| 148 |
+
name='foo',
|
| 149 |
+
version='1.0.0',
|
| 150 |
+
author='Snorri Sturluson',
|
| 151 |
+
),
|
| 152 |
+
),
|
| 153 |
+
(
|
| 154 |
+
'Missing author and e-mail',
|
| 155 |
+
dict(
|
| 156 |
+
name='foo',
|
| 157 |
+
version='1.0.0',
|
| 158 |
+
),
|
| 159 |
+
),
|
| 160 |
+
(
|
| 161 |
+
'Bypass normalized version',
|
| 162 |
+
dict(
|
| 163 |
+
name='foo',
|
| 164 |
+
version=sic('1.0.0a'),
|
| 165 |
+
),
|
| 166 |
+
),
|
| 167 |
+
]
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@pytest.mark.parametrize(("name", "attrs"), __read_test_cases())
|
| 171 |
+
def test_read_metadata(name, attrs):
|
| 172 |
+
dist = Distribution(attrs)
|
| 173 |
+
metadata_out = dist.metadata
|
| 174 |
+
dist_class = metadata_out.__class__
|
| 175 |
+
|
| 176 |
+
# Write to PKG_INFO and then load into a new metadata object
|
| 177 |
+
PKG_INFO = io.StringIO()
|
| 178 |
+
|
| 179 |
+
metadata_out.write_pkg_file(PKG_INFO)
|
| 180 |
+
PKG_INFO.seek(0)
|
| 181 |
+
pkg_info = PKG_INFO.read()
|
| 182 |
+
assert _valid_metadata(pkg_info)
|
| 183 |
+
|
| 184 |
+
PKG_INFO.seek(0)
|
| 185 |
+
metadata_in = dist_class()
|
| 186 |
+
metadata_in.read_pkg_file(PKG_INFO)
|
| 187 |
+
|
| 188 |
+
tested_attrs = [
|
| 189 |
+
('name', dist_class.get_name),
|
| 190 |
+
('version', dist_class.get_version),
|
| 191 |
+
('author', dist_class.get_contact),
|
| 192 |
+
('author_email', dist_class.get_contact_email),
|
| 193 |
+
('metadata_version', dist_class.get_metadata_version),
|
| 194 |
+
('provides', dist_class.get_provides),
|
| 195 |
+
('description', dist_class.get_description),
|
| 196 |
+
('long_description', dist_class.get_long_description),
|
| 197 |
+
('download_url', dist_class.get_download_url),
|
| 198 |
+
('keywords', dist_class.get_keywords),
|
| 199 |
+
('platforms', dist_class.get_platforms),
|
| 200 |
+
('obsoletes', dist_class.get_obsoletes),
|
| 201 |
+
('requires', dist_class.get_requires),
|
| 202 |
+
('classifiers', dist_class.get_classifiers),
|
| 203 |
+
('project_urls', lambda s: getattr(s, 'project_urls', {})),
|
| 204 |
+
('provides_extras', lambda s: getattr(s, 'provides_extras', {})),
|
| 205 |
+
]
|
| 206 |
+
|
| 207 |
+
for attr, getter in tested_attrs:
|
| 208 |
+
assert getter(metadata_in) == getter(metadata_out)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def __maintainer_test_cases():
|
| 212 |
+
attrs = {"name": "package", "version": "1.0", "description": "xxx"}
|
| 213 |
+
|
| 214 |
+
def merge_dicts(d1, d2):
|
| 215 |
+
d1 = d1.copy()
|
| 216 |
+
d1.update(d2)
|
| 217 |
+
|
| 218 |
+
return d1
|
| 219 |
+
|
| 220 |
+
return [
|
| 221 |
+
('No author, no maintainer', attrs.copy()),
|
| 222 |
+
(
|
| 223 |
+
'Author (no e-mail), no maintainer',
|
| 224 |
+
merge_dicts(attrs, {'author': 'Author Name'}),
|
| 225 |
+
),
|
| 226 |
+
(
|
| 227 |
+
'Author (e-mail), no maintainer',
|
| 228 |
+
merge_dicts(
|
| 229 |
+
attrs, {'author': 'Author Name', 'author_email': 'author@name.com'}
|
| 230 |
+
),
|
| 231 |
+
),
|
| 232 |
+
(
|
| 233 |
+
'No author, maintainer (no e-mail)',
|
| 234 |
+
merge_dicts(attrs, {'maintainer': 'Maintainer Name'}),
|
| 235 |
+
),
|
| 236 |
+
(
|
| 237 |
+
'No author, maintainer (e-mail)',
|
| 238 |
+
merge_dicts(
|
| 239 |
+
attrs,
|
| 240 |
+
{
|
| 241 |
+
'maintainer': 'Maintainer Name',
|
| 242 |
+
'maintainer_email': 'maintainer@name.com',
|
| 243 |
+
},
|
| 244 |
+
),
|
| 245 |
+
),
|
| 246 |
+
(
|
| 247 |
+
'Author (no e-mail), Maintainer (no-email)',
|
| 248 |
+
merge_dicts(
|
| 249 |
+
attrs, {'author': 'Author Name', 'maintainer': 'Maintainer Name'}
|
| 250 |
+
),
|
| 251 |
+
),
|
| 252 |
+
(
|
| 253 |
+
'Author (e-mail), Maintainer (e-mail)',
|
| 254 |
+
merge_dicts(
|
| 255 |
+
attrs,
|
| 256 |
+
{
|
| 257 |
+
'author': 'Author Name',
|
| 258 |
+
'author_email': 'author@name.com',
|
| 259 |
+
'maintainer': 'Maintainer Name',
|
| 260 |
+
'maintainer_email': 'maintainer@name.com',
|
| 261 |
+
},
|
| 262 |
+
),
|
| 263 |
+
),
|
| 264 |
+
(
|
| 265 |
+
'No author (e-mail), no maintainer (e-mail)',
|
| 266 |
+
merge_dicts(
|
| 267 |
+
attrs,
|
| 268 |
+
{
|
| 269 |
+
'author_email': 'author@name.com',
|
| 270 |
+
'maintainer_email': 'maintainer@name.com',
|
| 271 |
+
},
|
| 272 |
+
),
|
| 273 |
+
),
|
| 274 |
+
('Author unicode', merge_dicts(attrs, {'author': '鉄沢寛'})),
|
| 275 |
+
('Maintainer unicode', merge_dicts(attrs, {'maintainer': 'Jan Łukasiewicz'})),
|
| 276 |
+
]
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
@pytest.mark.parametrize(("name", "attrs"), __maintainer_test_cases())
|
| 280 |
+
def test_maintainer_author(name, attrs, tmpdir):
|
| 281 |
+
tested_keys = {
|
| 282 |
+
'author': 'Author',
|
| 283 |
+
'author_email': 'Author-email',
|
| 284 |
+
'maintainer': 'Maintainer',
|
| 285 |
+
'maintainer_email': 'Maintainer-email',
|
| 286 |
+
}
|
| 287 |
+
|
| 288 |
+
# Generate a PKG-INFO file
|
| 289 |
+
dist = Distribution(attrs)
|
| 290 |
+
fn = tmpdir.mkdir('pkg_info')
|
| 291 |
+
fn_s = str(fn)
|
| 292 |
+
|
| 293 |
+
dist.metadata.write_pkg_info(fn_s)
|
| 294 |
+
|
| 295 |
+
with open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f:
|
| 296 |
+
pkg_info = f.read()
|
| 297 |
+
|
| 298 |
+
assert _valid_metadata(pkg_info)
|
| 299 |
+
|
| 300 |
+
# Drop blank lines and strip lines from default description
|
| 301 |
+
raw_pkg_lines = pkg_info.splitlines()
|
| 302 |
+
pkg_lines = list(filter(None, raw_pkg_lines[:-2]))
|
| 303 |
+
|
| 304 |
+
pkg_lines_set = set(pkg_lines)
|
| 305 |
+
|
| 306 |
+
# Duplicate lines should not be generated
|
| 307 |
+
assert len(pkg_lines) == len(pkg_lines_set)
|
| 308 |
+
|
| 309 |
+
for fkey, dkey in tested_keys.items():
|
| 310 |
+
val = attrs.get(dkey, None)
|
| 311 |
+
if val is None:
|
| 312 |
+
for line in pkg_lines:
|
| 313 |
+
assert not line.startswith(fkey + ':')
|
| 314 |
+
else:
|
| 315 |
+
line = f'{fkey}: {val}'
|
| 316 |
+
assert line in pkg_lines_set
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
class TestParityWithMetadataFromPyPaWheel:
|
| 320 |
+
def base_example(self):
|
| 321 |
+
attrs = dict(
|
| 322 |
+
**EXAMPLE_BASE_INFO,
|
| 323 |
+
# Example with complex requirement definition
|
| 324 |
+
python_requires=">=3.8",
|
| 325 |
+
install_requires="""
|
| 326 |
+
packaging==23.2
|
| 327 |
+
more-itertools==8.8.0; extra == "other"
|
| 328 |
+
jaraco.text==3.7.0
|
| 329 |
+
importlib-resources==5.10.2; python_version<"3.8"
|
| 330 |
+
importlib-metadata==6.0.0 ; python_version<"3.8"
|
| 331 |
+
colorama>=0.4.4; sys_platform == "win32"
|
| 332 |
+
""",
|
| 333 |
+
extras_require={
|
| 334 |
+
"testing": """
|
| 335 |
+
pytest >= 6
|
| 336 |
+
pytest-checkdocs >= 2.4
|
| 337 |
+
tomli ; \\
|
| 338 |
+
# Using stdlib when possible
|
| 339 |
+
python_version < "3.11"
|
| 340 |
+
ini2toml[lite]>=0.9
|
| 341 |
+
""",
|
| 342 |
+
"other": [],
|
| 343 |
+
},
|
| 344 |
+
)
|
| 345 |
+
# Generate a PKG-INFO file using setuptools
|
| 346 |
+
return Distribution(attrs)
|
| 347 |
+
|
| 348 |
+
def test_requires_dist(self, tmp_path):
|
| 349 |
+
dist = self.base_example()
|
| 350 |
+
pkg_info = _get_pkginfo(dist)
|
| 351 |
+
assert _valid_metadata(pkg_info)
|
| 352 |
+
|
| 353 |
+
# Ensure Requires-Dist is present
|
| 354 |
+
expected = [
|
| 355 |
+
'Metadata-Version:',
|
| 356 |
+
'Requires-Python: >=3.8',
|
| 357 |
+
'Provides-Extra: other',
|
| 358 |
+
'Provides-Extra: testing',
|
| 359 |
+
'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"',
|
| 360 |
+
'Requires-Dist: more-itertools==8.8.0; extra == "other"',
|
| 361 |
+
'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"',
|
| 362 |
+
]
|
| 363 |
+
for line in expected:
|
| 364 |
+
assert line in pkg_info
|
| 365 |
+
|
| 366 |
+
HERE = Path(__file__).parent
|
| 367 |
+
EXAMPLES_FILE = HERE / "config/setupcfg_examples.txt"
|
| 368 |
+
|
| 369 |
+
@pytest.fixture(params=[None, *urls_from_file(EXAMPLES_FILE)])
|
| 370 |
+
def dist(self, request, monkeypatch, tmp_path):
|
| 371 |
+
"""Example of distribution with arbitrary configuration"""
|
| 372 |
+
monkeypatch.chdir(tmp_path)
|
| 373 |
+
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.42"))
|
| 374 |
+
monkeypatch.setattr(expand, "read_files", Mock(return_value="hello world"))
|
| 375 |
+
if request.param is None:
|
| 376 |
+
yield self.base_example()
|
| 377 |
+
else:
|
| 378 |
+
# Real-world usage
|
| 379 |
+
config = retrieve_file(request.param)
|
| 380 |
+
yield setupcfg.apply_configuration(Distribution({}), config)
|
| 381 |
+
|
| 382 |
+
@pytest.mark.uses_network
|
| 383 |
+
def test_equivalent_output(self, tmp_path, dist):
|
| 384 |
+
"""Ensure output from setuptools is equivalent to the one from `pypa/wheel`"""
|
| 385 |
+
# Generate a METADATA file using pypa/wheel for comparison
|
| 386 |
+
wheel_metadata = importlib.import_module("wheel.metadata")
|
| 387 |
+
pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
|
| 388 |
+
|
| 389 |
+
if pkginfo_to_metadata is None: # pragma: nocover
|
| 390 |
+
pytest.xfail(
|
| 391 |
+
"wheel.metadata.pkginfo_to_metadata is undefined, "
|
| 392 |
+
"(this is likely to be caused by API changes in pypa/wheel"
|
| 393 |
+
)
|
| 394 |
+
|
| 395 |
+
# Generate an simplified "egg-info" dir for pypa/wheel to convert
|
| 396 |
+
pkg_info = _get_pkginfo(dist)
|
| 397 |
+
egg_info_dir = tmp_path / "pkg.egg-info"
|
| 398 |
+
egg_info_dir.mkdir(parents=True)
|
| 399 |
+
(egg_info_dir / "PKG-INFO").write_text(pkg_info, encoding="utf-8")
|
| 400 |
+
write_requirements(egg_info(dist), egg_info_dir, egg_info_dir / "requires.txt")
|
| 401 |
+
|
| 402 |
+
# Get pypa/wheel generated METADATA but normalize requirements formatting
|
| 403 |
+
metadata_msg = pkginfo_to_metadata(egg_info_dir, egg_info_dir / "PKG-INFO")
|
| 404 |
+
metadata_str = _normalize_metadata(metadata_msg)
|
| 405 |
+
pkg_info_msg = message_from_string(pkg_info)
|
| 406 |
+
pkg_info_str = _normalize_metadata(pkg_info_msg)
|
| 407 |
+
|
| 408 |
+
# Compare setuptools PKG-INFO x pypa/wheel METADATA
|
| 409 |
+
assert metadata_str == pkg_info_str
|
| 410 |
+
|
| 411 |
+
# Make sure it parses/serializes well in pypa/wheel
|
| 412 |
+
_assert_roundtrip_message(pkg_info)
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
class TestPEP643:
|
| 416 |
+
STATIC_CONFIG = {
|
| 417 |
+
"setup.cfg": cleandoc(
|
| 418 |
+
"""
|
| 419 |
+
[metadata]
|
| 420 |
+
name = package
|
| 421 |
+
version = 0.0.1
|
| 422 |
+
author = Foo Bar
|
| 423 |
+
author_email = foo@bar.net
|
| 424 |
+
long_description = Long
|
| 425 |
+
description
|
| 426 |
+
description = Short description
|
| 427 |
+
keywords = one, two
|
| 428 |
+
platforms = abcd
|
| 429 |
+
[options]
|
| 430 |
+
install_requires = requests
|
| 431 |
+
"""
|
| 432 |
+
),
|
| 433 |
+
"pyproject.toml": cleandoc(
|
| 434 |
+
"""
|
| 435 |
+
[project]
|
| 436 |
+
name = "package"
|
| 437 |
+
version = "0.0.1"
|
| 438 |
+
authors = [
|
| 439 |
+
{name = "Foo Bar", email = "foo@bar.net"}
|
| 440 |
+
]
|
| 441 |
+
description = "Short description"
|
| 442 |
+
readme = {text = "Long\\ndescription", content-type = "text/plain"}
|
| 443 |
+
keywords = ["one", "two"]
|
| 444 |
+
dependencies = ["requests"]
|
| 445 |
+
[tool.setuptools]
|
| 446 |
+
provides = ["abcd"]
|
| 447 |
+
obsoletes = ["abcd"]
|
| 448 |
+
"""
|
| 449 |
+
),
|
| 450 |
+
}
|
| 451 |
+
|
| 452 |
+
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
|
| 453 |
+
def test_static_config_has_no_dynamic(self, file, tmpdir_cwd):
|
| 454 |
+
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
|
| 455 |
+
metadata = _get_metadata()
|
| 456 |
+
assert metadata.get_all("Dynamic") is None
|
| 457 |
+
assert metadata.get_all("dynamic") is None
|
| 458 |
+
|
| 459 |
+
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
|
| 460 |
+
@pytest.mark.parametrize(
|
| 461 |
+
"fields",
|
| 462 |
+
[
|
| 463 |
+
# Single dynamic field
|
| 464 |
+
{"requires-python": ("python_requires", ">=3.12")},
|
| 465 |
+
{"author-email": ("author_email", "snoopy@peanuts.com")},
|
| 466 |
+
{"keywords": ("keywords", ["hello", "world"])},
|
| 467 |
+
{"platform": ("platforms", ["abcd"])},
|
| 468 |
+
# Multiple dynamic fields
|
| 469 |
+
{
|
| 470 |
+
"summary": ("description", "hello world"),
|
| 471 |
+
"description": ("long_description", "bla bla bla bla"),
|
| 472 |
+
"requires-dist": ("install_requires", ["hello-world"]),
|
| 473 |
+
},
|
| 474 |
+
],
|
| 475 |
+
)
|
| 476 |
+
def test_modified_fields_marked_as_dynamic(self, file, fields, tmpdir_cwd):
|
| 477 |
+
# We start with a static config
|
| 478 |
+
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
|
| 479 |
+
dist = _makedist()
|
| 480 |
+
|
| 481 |
+
# ... but then we simulate the effects of a plugin modifying the distribution
|
| 482 |
+
for attr, value in fields.values():
|
| 483 |
+
# `dist` and `dist.metadata` are complicated...
|
| 484 |
+
# Some attributes work when set on `dist`, others on `dist.metadata`...
|
| 485 |
+
# Here we set in both just in case (this also avoids calling `_finalize_*`)
|
| 486 |
+
setattr(dist, attr, value)
|
| 487 |
+
setattr(dist.metadata, attr, value)
|
| 488 |
+
|
| 489 |
+
# Then we should be able to list the modified fields as Dynamic
|
| 490 |
+
metadata = _get_metadata(dist)
|
| 491 |
+
assert set(metadata.get_all("Dynamic")) == set(fields)
|
| 492 |
+
|
| 493 |
+
|
| 494 |
+
def _makedist(**attrs):
|
| 495 |
+
dist = Distribution(attrs)
|
| 496 |
+
dist.parse_config_files()
|
| 497 |
+
return dist
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
def _assert_roundtrip_message(metadata: str) -> None:
|
| 501 |
+
"""Emulate the way wheel.bdist_wheel parses and regenerates the message,
|
| 502 |
+
then ensures the metadata generated by setuptools is compatible.
|
| 503 |
+
"""
|
| 504 |
+
with io.StringIO(metadata) as buffer:
|
| 505 |
+
msg = Parser(EmailMessage).parse(buffer)
|
| 506 |
+
|
| 507 |
+
serialization_policy = EmailPolicy(
|
| 508 |
+
utf8=True,
|
| 509 |
+
mangle_from_=False,
|
| 510 |
+
max_line_length=0,
|
| 511 |
+
)
|
| 512 |
+
with io.BytesIO() as buffer:
|
| 513 |
+
out = io.TextIOWrapper(buffer, encoding="utf-8")
|
| 514 |
+
Generator(out, policy=serialization_policy).flatten(msg)
|
| 515 |
+
out.flush()
|
| 516 |
+
regenerated = buffer.getvalue()
|
| 517 |
+
|
| 518 |
+
raw_metadata = bytes(metadata, "utf-8")
|
| 519 |
+
# Normalise newlines to avoid test errors on Windows:
|
| 520 |
+
raw_metadata = b"\n".join(raw_metadata.splitlines())
|
| 521 |
+
regenerated = b"\n".join(regenerated.splitlines())
|
| 522 |
+
assert regenerated == raw_metadata
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def _normalize_metadata(msg: Message) -> str:
|
| 526 |
+
"""Allow equivalent metadata to be compared directly"""
|
| 527 |
+
# The main challenge regards the requirements and extras.
|
| 528 |
+
# Both setuptools and wheel already apply some level of normalization
|
| 529 |
+
# but they differ regarding which character is chosen, according to the
|
| 530 |
+
# following spec it should be "-":
|
| 531 |
+
# https://packaging.python.org/en/latest/specifications/name-normalization/
|
| 532 |
+
|
| 533 |
+
# Related issues:
|
| 534 |
+
# https://github.com/pypa/packaging/issues/845
|
| 535 |
+
# https://github.com/pypa/packaging/issues/644#issuecomment-2429813968
|
| 536 |
+
|
| 537 |
+
extras = {x.replace("_", "-"): x for x in msg.get_all("Provides-Extra", [])}
|
| 538 |
+
reqs = [
|
| 539 |
+
_normalize_req(req, extras)
|
| 540 |
+
for req in _reqs.parse(msg.get_all("Requires-Dist", []))
|
| 541 |
+
]
|
| 542 |
+
del msg["Requires-Dist"]
|
| 543 |
+
del msg["Provides-Extra"]
|
| 544 |
+
|
| 545 |
+
# Ensure consistent ord
|
| 546 |
+
for req in sorted(reqs):
|
| 547 |
+
msg["Requires-Dist"] = req
|
| 548 |
+
for extra in sorted(extras):
|
| 549 |
+
msg["Provides-Extra"] = extra
|
| 550 |
+
|
| 551 |
+
# TODO: Handle lack of PEP 643 implementation in pypa/wheel?
|
| 552 |
+
del msg["Metadata-Version"]
|
| 553 |
+
|
| 554 |
+
return msg.as_string()
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
def _normalize_req(req: Requirement, extras: dict[str, str]) -> str:
|
| 558 |
+
"""Allow equivalent requirement objects to be compared directly"""
|
| 559 |
+
as_str = str(req).replace(req.name, req.name.replace("_", "-"))
|
| 560 |
+
for norm, orig in extras.items():
|
| 561 |
+
as_str = as_str.replace(orig, norm)
|
| 562 |
+
return as_str
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def _get_pkginfo(dist: Distribution):
|
| 566 |
+
with io.StringIO() as fp:
|
| 567 |
+
dist.metadata.write_pkg_file(fp)
|
| 568 |
+
return fp.getvalue()
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def _get_metadata(dist: Distribution | None = None):
|
| 572 |
+
return message_from_string(_get_pkginfo(dist or _makedist()))
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
def _valid_metadata(text: str) -> bool:
|
| 576 |
+
metadata = Metadata.from_email(text, validate=True) # can raise exceptions
|
| 577 |
+
return metadata is not None
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_depends.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from setuptools import depends
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestGetModuleConstant:
|
| 7 |
+
def test_basic(self):
|
| 8 |
+
"""
|
| 9 |
+
Invoke get_module_constant on a module in
|
| 10 |
+
the test package.
|
| 11 |
+
"""
|
| 12 |
+
mod_name = 'setuptools.tests.mod_with_constant'
|
| 13 |
+
val = depends.get_module_constant(mod_name, 'value')
|
| 14 |
+
assert val == 'three, sir!'
|
| 15 |
+
assert 'setuptools.tests.mod_with_constant' not in sys.modules
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_develop.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import platform
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from setuptools._path import paths_on_pythonpath
|
| 12 |
+
from setuptools.command.develop import develop
|
| 13 |
+
from setuptools.dist import Distribution
|
| 14 |
+
|
| 15 |
+
from . import contexts, namespaces
|
| 16 |
+
|
| 17 |
+
SETUP_PY = """\
|
| 18 |
+
from setuptools import setup
|
| 19 |
+
|
| 20 |
+
setup(name='foo',
|
| 21 |
+
packages=['foo'],
|
| 22 |
+
)
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
INIT_PY = """print "foo"
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@pytest.fixture
|
| 30 |
+
def temp_user(monkeypatch):
|
| 31 |
+
with contexts.tempdir() as user_base:
|
| 32 |
+
with contexts.tempdir() as user_site:
|
| 33 |
+
monkeypatch.setattr('site.USER_BASE', user_base)
|
| 34 |
+
monkeypatch.setattr('site.USER_SITE', user_site)
|
| 35 |
+
yield
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.fixture
|
| 39 |
+
def test_env(tmpdir, temp_user):
|
| 40 |
+
target = tmpdir
|
| 41 |
+
foo = target.mkdir('foo')
|
| 42 |
+
setup = target / 'setup.py'
|
| 43 |
+
if setup.isfile():
|
| 44 |
+
raise ValueError(dir(target))
|
| 45 |
+
with setup.open('w') as f:
|
| 46 |
+
f.write(SETUP_PY)
|
| 47 |
+
init = foo / '__init__.py'
|
| 48 |
+
with init.open('w') as f:
|
| 49 |
+
f.write(INIT_PY)
|
| 50 |
+
with target.as_cwd():
|
| 51 |
+
yield target
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class TestDevelop:
|
| 55 |
+
in_virtualenv = hasattr(sys, 'real_prefix')
|
| 56 |
+
in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
|
| 57 |
+
|
| 58 |
+
def test_console_scripts(self, tmpdir):
|
| 59 |
+
"""
|
| 60 |
+
Test that console scripts are installed and that they reference
|
| 61 |
+
only the project by name and not the current version.
|
| 62 |
+
"""
|
| 63 |
+
pytest.skip(
|
| 64 |
+
"TODO: needs a fixture to cause 'develop' "
|
| 65 |
+
"to be invoked without mutating environment."
|
| 66 |
+
)
|
| 67 |
+
settings = dict(
|
| 68 |
+
name='foo',
|
| 69 |
+
packages=['foo'],
|
| 70 |
+
version='0.0',
|
| 71 |
+
entry_points={
|
| 72 |
+
'console_scripts': [
|
| 73 |
+
'foocmd = foo:foo',
|
| 74 |
+
],
|
| 75 |
+
},
|
| 76 |
+
)
|
| 77 |
+
dist = Distribution(settings)
|
| 78 |
+
dist.script_name = 'setup.py'
|
| 79 |
+
cmd = develop(dist)
|
| 80 |
+
cmd.ensure_finalized()
|
| 81 |
+
cmd.install_dir = tmpdir
|
| 82 |
+
cmd.run()
|
| 83 |
+
# assert '0.0' not in foocmd_text
|
| 84 |
+
|
| 85 |
+
@pytest.mark.xfail(reason="legacy behavior retained for compatibility #4167")
|
| 86 |
+
def test_egg_link_filename(self):
|
| 87 |
+
settings = dict(
|
| 88 |
+
name='Foo $$$ Bar_baz-bing',
|
| 89 |
+
)
|
| 90 |
+
dist = Distribution(settings)
|
| 91 |
+
cmd = develop(dist)
|
| 92 |
+
cmd.ensure_finalized()
|
| 93 |
+
link = pathlib.Path(cmd.egg_link)
|
| 94 |
+
assert link.suffix == '.egg-link'
|
| 95 |
+
assert link.stem == 'Foo_Bar_baz_bing'
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TestResolver:
|
| 99 |
+
"""
|
| 100 |
+
TODO: These tests were written with a minimal understanding
|
| 101 |
+
of what _resolve_setup_path is intending to do. Come up with
|
| 102 |
+
more meaningful cases that look like real-world scenarios.
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
def test_resolve_setup_path_cwd(self):
|
| 106 |
+
assert develop._resolve_setup_path('.', '.', '.') == '.'
|
| 107 |
+
|
| 108 |
+
def test_resolve_setup_path_one_dir(self):
|
| 109 |
+
assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../'
|
| 110 |
+
|
| 111 |
+
def test_resolve_setup_path_one_dir_trailing_slash(self):
|
| 112 |
+
assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../'
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class TestNamespaces:
|
| 116 |
+
@staticmethod
|
| 117 |
+
def install_develop(src_dir, target):
|
| 118 |
+
develop_cmd = [
|
| 119 |
+
sys.executable,
|
| 120 |
+
'setup.py',
|
| 121 |
+
'develop',
|
| 122 |
+
'--install-dir',
|
| 123 |
+
str(target),
|
| 124 |
+
]
|
| 125 |
+
with src_dir.as_cwd():
|
| 126 |
+
with paths_on_pythonpath([str(target)]):
|
| 127 |
+
subprocess.check_call(develop_cmd)
|
| 128 |
+
|
| 129 |
+
@pytest.mark.skipif(
|
| 130 |
+
bool(os.environ.get("APPVEYOR")),
|
| 131 |
+
reason="https://github.com/pypa/setuptools/issues/851",
|
| 132 |
+
)
|
| 133 |
+
@pytest.mark.skipif(
|
| 134 |
+
platform.python_implementation() == 'PyPy',
|
| 135 |
+
reason="https://github.com/pypa/setuptools/issues/1202",
|
| 136 |
+
)
|
| 137 |
+
def test_namespace_package_importable(self, tmpdir):
|
| 138 |
+
"""
|
| 139 |
+
Installing two packages sharing the same namespace, one installed
|
| 140 |
+
naturally using pip or `--single-version-externally-managed`
|
| 141 |
+
and the other installed using `develop` should leave the namespace
|
| 142 |
+
in tact and both packages reachable by import.
|
| 143 |
+
"""
|
| 144 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 145 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 146 |
+
target = tmpdir / 'packages'
|
| 147 |
+
# use pip to install to the target directory
|
| 148 |
+
install_cmd = [
|
| 149 |
+
sys.executable,
|
| 150 |
+
'-m',
|
| 151 |
+
'pip',
|
| 152 |
+
'install',
|
| 153 |
+
str(pkg_A),
|
| 154 |
+
'-t',
|
| 155 |
+
str(target),
|
| 156 |
+
]
|
| 157 |
+
subprocess.check_call(install_cmd)
|
| 158 |
+
self.install_develop(pkg_B, target)
|
| 159 |
+
namespaces.make_site_dir(target)
|
| 160 |
+
try_import = [
|
| 161 |
+
sys.executable,
|
| 162 |
+
'-c',
|
| 163 |
+
'import myns.pkgA; import myns.pkgB',
|
| 164 |
+
]
|
| 165 |
+
with paths_on_pythonpath([str(target)]):
|
| 166 |
+
subprocess.check_call(try_import)
|
| 167 |
+
|
| 168 |
+
# additionally ensure that pkg_resources import works
|
| 169 |
+
pkg_resources_imp = [
|
| 170 |
+
sys.executable,
|
| 171 |
+
'-c',
|
| 172 |
+
'import pkg_resources',
|
| 173 |
+
]
|
| 174 |
+
with paths_on_pythonpath([str(target)]):
|
| 175 |
+
subprocess.check_call(pkg_resources_imp)
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_dist_info.py
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test .dist-info style distributions."""
|
| 2 |
+
|
| 3 |
+
import pathlib
|
| 4 |
+
import re
|
| 5 |
+
import shutil
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
from functools import partial
|
| 9 |
+
|
| 10 |
+
import pytest
|
| 11 |
+
|
| 12 |
+
import pkg_resources
|
| 13 |
+
from setuptools.archive_util import unpack_archive
|
| 14 |
+
|
| 15 |
+
from .textwrap import DALS
|
| 16 |
+
|
| 17 |
+
read = partial(pathlib.Path.read_text, encoding="utf-8")
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class TestDistInfo:
|
| 21 |
+
metadata_base = DALS(
|
| 22 |
+
"""
|
| 23 |
+
Metadata-Version: 1.2
|
| 24 |
+
Requires-Dist: splort (==4)
|
| 25 |
+
Provides-Extra: baz
|
| 26 |
+
Requires-Dist: quux (>=1.1); extra == 'baz'
|
| 27 |
+
"""
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
@classmethod
|
| 31 |
+
def build_metadata(cls, **kwargs):
|
| 32 |
+
lines = ('{key}: {value}\n'.format(**locals()) for key, value in kwargs.items())
|
| 33 |
+
return cls.metadata_base + ''.join(lines)
|
| 34 |
+
|
| 35 |
+
@pytest.fixture
|
| 36 |
+
def metadata(self, tmpdir):
|
| 37 |
+
dist_info_name = 'VersionedDistribution-2.718.dist-info'
|
| 38 |
+
versioned = tmpdir / dist_info_name
|
| 39 |
+
versioned.mkdir()
|
| 40 |
+
filename = versioned / 'METADATA'
|
| 41 |
+
content = self.build_metadata(
|
| 42 |
+
Name='VersionedDistribution',
|
| 43 |
+
)
|
| 44 |
+
filename.write_text(content, encoding='utf-8')
|
| 45 |
+
|
| 46 |
+
dist_info_name = 'UnversionedDistribution.dist-info'
|
| 47 |
+
unversioned = tmpdir / dist_info_name
|
| 48 |
+
unversioned.mkdir()
|
| 49 |
+
filename = unversioned / 'METADATA'
|
| 50 |
+
content = self.build_metadata(
|
| 51 |
+
Name='UnversionedDistribution',
|
| 52 |
+
Version='0.3',
|
| 53 |
+
)
|
| 54 |
+
filename.write_text(content, encoding='utf-8')
|
| 55 |
+
|
| 56 |
+
return str(tmpdir)
|
| 57 |
+
|
| 58 |
+
def test_distinfo(self, metadata):
|
| 59 |
+
dists = dict(
|
| 60 |
+
(d.project_name, d) for d in pkg_resources.find_distributions(metadata)
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
assert len(dists) == 2, dists
|
| 64 |
+
|
| 65 |
+
unversioned = dists['UnversionedDistribution']
|
| 66 |
+
versioned = dists['VersionedDistribution']
|
| 67 |
+
|
| 68 |
+
assert versioned.version == '2.718' # from filename
|
| 69 |
+
assert unversioned.version == '0.3' # from METADATA
|
| 70 |
+
|
| 71 |
+
def test_conditional_dependencies(self, metadata):
|
| 72 |
+
specs = 'splort==4', 'quux>=1.1'
|
| 73 |
+
requires = list(map(pkg_resources.Requirement.parse, specs))
|
| 74 |
+
|
| 75 |
+
for d in pkg_resources.find_distributions(metadata):
|
| 76 |
+
assert d.requires() == requires[:1]
|
| 77 |
+
assert d.requires(extras=('baz',)) == [
|
| 78 |
+
requires[0],
|
| 79 |
+
pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'),
|
| 80 |
+
]
|
| 81 |
+
assert d.extras == ['baz']
|
| 82 |
+
|
| 83 |
+
def test_invalid_version(self, tmp_path):
|
| 84 |
+
"""
|
| 85 |
+
Supplying an invalid version crashes dist_info.
|
| 86 |
+
"""
|
| 87 |
+
config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n"
|
| 88 |
+
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
|
| 89 |
+
msg = re.compile("invalid version", re.M | re.I)
|
| 90 |
+
proc = run_command_inner("dist_info", cwd=tmp_path, check=False)
|
| 91 |
+
assert proc.returncode
|
| 92 |
+
assert msg.search(proc.stdout)
|
| 93 |
+
assert not list(tmp_path.glob("*.dist-info"))
|
| 94 |
+
|
| 95 |
+
def test_tag_arguments(self, tmp_path):
|
| 96 |
+
config = """
|
| 97 |
+
[metadata]
|
| 98 |
+
name=proj
|
| 99 |
+
version=42
|
| 100 |
+
[egg_info]
|
| 101 |
+
tag_date=1
|
| 102 |
+
tag_build=.post
|
| 103 |
+
"""
|
| 104 |
+
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
|
| 105 |
+
|
| 106 |
+
print(run_command("dist_info", "--no-date", cwd=tmp_path))
|
| 107 |
+
dist_info = next(tmp_path.glob("*.dist-info"))
|
| 108 |
+
assert dist_info.name.startswith("proj-42")
|
| 109 |
+
shutil.rmtree(dist_info)
|
| 110 |
+
|
| 111 |
+
print(run_command("dist_info", "--tag-build", ".a", cwd=tmp_path))
|
| 112 |
+
dist_info = next(tmp_path.glob("*.dist-info"))
|
| 113 |
+
assert dist_info.name.startswith("proj-42a")
|
| 114 |
+
|
| 115 |
+
@pytest.mark.parametrize("keep_egg_info", (False, True))
|
| 116 |
+
def test_output_dir(self, tmp_path, keep_egg_info):
|
| 117 |
+
config = "[metadata]\nname=proj\nversion=42\n"
|
| 118 |
+
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
|
| 119 |
+
out = tmp_path / "__out"
|
| 120 |
+
out.mkdir()
|
| 121 |
+
opts = ["--keep-egg-info"] if keep_egg_info else []
|
| 122 |
+
run_command("dist_info", "--output-dir", out, *opts, cwd=tmp_path)
|
| 123 |
+
assert len(list(out.glob("*.dist-info"))) == 1
|
| 124 |
+
assert len(list(tmp_path.glob("*.dist-info"))) == 0
|
| 125 |
+
expected_egg_info = int(keep_egg_info)
|
| 126 |
+
assert len(list(out.glob("*.egg-info"))) == expected_egg_info
|
| 127 |
+
assert len(list(tmp_path.glob("*.egg-info"))) == 0
|
| 128 |
+
assert len(list(out.glob("*.__bkp__"))) == 0
|
| 129 |
+
assert len(list(tmp_path.glob("*.__bkp__"))) == 0
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class TestWheelCompatibility:
|
| 133 |
+
"""Make sure the .dist-info directory produced with the ``dist_info`` command
|
| 134 |
+
is the same as the one produced by ``bdist_wheel``.
|
| 135 |
+
"""
|
| 136 |
+
|
| 137 |
+
SETUPCFG = DALS(
|
| 138 |
+
"""
|
| 139 |
+
[metadata]
|
| 140 |
+
name = {name}
|
| 141 |
+
version = {version}
|
| 142 |
+
|
| 143 |
+
[options]
|
| 144 |
+
install_requires =
|
| 145 |
+
foo>=12; sys_platform != "linux"
|
| 146 |
+
|
| 147 |
+
[options.extras_require]
|
| 148 |
+
test = pytest
|
| 149 |
+
|
| 150 |
+
[options.entry_points]
|
| 151 |
+
console_scripts =
|
| 152 |
+
executable-name = my_package.module:function
|
| 153 |
+
discover =
|
| 154 |
+
myproj = my_package.other_module:function
|
| 155 |
+
"""
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
EGG_INFO_OPTS = [
|
| 159 |
+
# Related: #3088 #2872
|
| 160 |
+
("", ""),
|
| 161 |
+
(".post", "[egg_info]\ntag_build = post\n"),
|
| 162 |
+
(".post", "[egg_info]\ntag_build = .post\n"),
|
| 163 |
+
(".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"),
|
| 164 |
+
(".dev", "[egg_info]\ntag_build = .dev\n"),
|
| 165 |
+
(".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"),
|
| 166 |
+
("a1", "[egg_info]\ntag_build = .a1\n"),
|
| 167 |
+
("+local", "[egg_info]\ntag_build = +local\n"),
|
| 168 |
+
]
|
| 169 |
+
|
| 170 |
+
@pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split())
|
| 171 |
+
@pytest.mark.parametrize("version", ["0.42.13"])
|
| 172 |
+
@pytest.mark.parametrize(("suffix", "cfg"), EGG_INFO_OPTS)
|
| 173 |
+
def test_dist_info_is_the_same_as_in_wheel(
|
| 174 |
+
self, name, version, tmp_path, suffix, cfg
|
| 175 |
+
):
|
| 176 |
+
config = self.SETUPCFG.format(name=name, version=version) + cfg
|
| 177 |
+
|
| 178 |
+
for i in "dir_wheel", "dir_dist":
|
| 179 |
+
(tmp_path / i).mkdir()
|
| 180 |
+
(tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8")
|
| 181 |
+
|
| 182 |
+
run_command("bdist_wheel", cwd=tmp_path / "dir_wheel")
|
| 183 |
+
wheel = next(tmp_path.glob("dir_wheel/dist/*.whl"))
|
| 184 |
+
unpack_archive(wheel, tmp_path / "unpack")
|
| 185 |
+
wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info"))
|
| 186 |
+
|
| 187 |
+
run_command("dist_info", cwd=tmp_path / "dir_dist")
|
| 188 |
+
dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
|
| 189 |
+
|
| 190 |
+
assert dist_info.name == wheel_dist_info.name
|
| 191 |
+
assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}")
|
| 192 |
+
for file in "METADATA", "entry_points.txt":
|
| 193 |
+
assert read(dist_info / file) == read(wheel_dist_info / file)
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def run_command_inner(*cmd, **kwargs):
|
| 197 |
+
opts = {
|
| 198 |
+
"stderr": subprocess.STDOUT,
|
| 199 |
+
"stdout": subprocess.PIPE,
|
| 200 |
+
"text": True,
|
| 201 |
+
"encoding": "utf-8",
|
| 202 |
+
"check": True,
|
| 203 |
+
**kwargs,
|
| 204 |
+
}
|
| 205 |
+
cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *map(str, cmd)]
|
| 206 |
+
return subprocess.run(cmd, **opts)
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def run_command(*args, **kwargs):
|
| 210 |
+
return run_command_inner(*args, **kwargs).stdout
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py
ADDED
|
@@ -0,0 +1,1289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import platform
|
| 5 |
+
import stat
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
from copy import deepcopy
|
| 9 |
+
from importlib import import_module
|
| 10 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from textwrap import dedent
|
| 13 |
+
from typing import Any
|
| 14 |
+
from unittest.mock import Mock
|
| 15 |
+
from uuid import uuid4
|
| 16 |
+
|
| 17 |
+
import jaraco.envs
|
| 18 |
+
import jaraco.path
|
| 19 |
+
import pytest
|
| 20 |
+
from path import Path as _Path
|
| 21 |
+
|
| 22 |
+
from setuptools._importlib import resources as importlib_resources
|
| 23 |
+
from setuptools.command.editable_wheel import (
|
| 24 |
+
_DebuggingTips,
|
| 25 |
+
_encode_pth,
|
| 26 |
+
_find_namespaces,
|
| 27 |
+
_find_package_roots,
|
| 28 |
+
_find_virtual_namespaces,
|
| 29 |
+
_finder_template,
|
| 30 |
+
_LinkTree,
|
| 31 |
+
_TopLevelFinder,
|
| 32 |
+
editable_wheel,
|
| 33 |
+
)
|
| 34 |
+
from setuptools.dist import Distribution
|
| 35 |
+
from setuptools.extension import Extension
|
| 36 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning
|
| 37 |
+
|
| 38 |
+
from . import contexts, namespaces
|
| 39 |
+
|
| 40 |
+
from distutils.core import run_setup
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@pytest.fixture(params=["strict", "lenient"])
|
| 44 |
+
def editable_opts(request):
|
| 45 |
+
if request.param == "strict":
|
| 46 |
+
return ["--config-settings", "editable-mode=strict"]
|
| 47 |
+
return []
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
EXAMPLE = {
|
| 51 |
+
'pyproject.toml': dedent(
|
| 52 |
+
"""\
|
| 53 |
+
[build-system]
|
| 54 |
+
requires = ["setuptools"]
|
| 55 |
+
build-backend = "setuptools.build_meta"
|
| 56 |
+
|
| 57 |
+
[project]
|
| 58 |
+
name = "mypkg"
|
| 59 |
+
version = "3.14159"
|
| 60 |
+
license = {text = "MIT"}
|
| 61 |
+
description = "This is a Python package"
|
| 62 |
+
dynamic = ["readme"]
|
| 63 |
+
classifiers = [
|
| 64 |
+
"Development Status :: 5 - Production/Stable",
|
| 65 |
+
"Intended Audience :: Developers"
|
| 66 |
+
]
|
| 67 |
+
urls = {Homepage = "https://github.com"}
|
| 68 |
+
|
| 69 |
+
[tool.setuptools]
|
| 70 |
+
package-dir = {"" = "src"}
|
| 71 |
+
packages = {find = {where = ["src"]}}
|
| 72 |
+
license-files = ["LICENSE*"]
|
| 73 |
+
|
| 74 |
+
[tool.setuptools.dynamic]
|
| 75 |
+
readme = {file = "README.rst"}
|
| 76 |
+
|
| 77 |
+
[tool.distutils.egg_info]
|
| 78 |
+
tag-build = ".post0"
|
| 79 |
+
"""
|
| 80 |
+
),
|
| 81 |
+
"MANIFEST.in": dedent(
|
| 82 |
+
"""\
|
| 83 |
+
global-include *.py *.txt
|
| 84 |
+
global-exclude *.py[cod]
|
| 85 |
+
prune dist
|
| 86 |
+
prune build
|
| 87 |
+
"""
|
| 88 |
+
).strip(),
|
| 89 |
+
"README.rst": "This is a ``README``",
|
| 90 |
+
"LICENSE.txt": "---- placeholder MIT license ----",
|
| 91 |
+
"src": {
|
| 92 |
+
"mypkg": {
|
| 93 |
+
"__init__.py": dedent(
|
| 94 |
+
"""\
|
| 95 |
+
import sys
|
| 96 |
+
from importlib.metadata import PackageNotFoundError, version
|
| 97 |
+
|
| 98 |
+
try:
|
| 99 |
+
__version__ = version(__name__)
|
| 100 |
+
except PackageNotFoundError:
|
| 101 |
+
__version__ = "unknown"
|
| 102 |
+
"""
|
| 103 |
+
),
|
| 104 |
+
"__main__.py": dedent(
|
| 105 |
+
"""\
|
| 106 |
+
from importlib.resources import read_text
|
| 107 |
+
from . import __version__, __name__ as parent
|
| 108 |
+
from .mod import x
|
| 109 |
+
|
| 110 |
+
data = read_text(parent, "data.txt")
|
| 111 |
+
print(__version__, data, x)
|
| 112 |
+
"""
|
| 113 |
+
),
|
| 114 |
+
"mod.py": "x = ''",
|
| 115 |
+
"data.txt": "Hello World",
|
| 116 |
+
}
|
| 117 |
+
},
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
|
| 125 |
+
@pytest.mark.parametrize(
|
| 126 |
+
"files",
|
| 127 |
+
[
|
| 128 |
+
{**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB},
|
| 129 |
+
EXAMPLE, # No setup.py script
|
| 130 |
+
],
|
| 131 |
+
)
|
| 132 |
+
def test_editable_with_pyproject(tmp_path, venv, files, editable_opts):
|
| 133 |
+
project = tmp_path / "mypkg"
|
| 134 |
+
project.mkdir()
|
| 135 |
+
jaraco.path.build(files, prefix=project)
|
| 136 |
+
|
| 137 |
+
cmd = [
|
| 138 |
+
"python",
|
| 139 |
+
"-m",
|
| 140 |
+
"pip",
|
| 141 |
+
"install",
|
| 142 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 143 |
+
"-e",
|
| 144 |
+
str(project),
|
| 145 |
+
*editable_opts,
|
| 146 |
+
]
|
| 147 |
+
print(venv.run(cmd))
|
| 148 |
+
|
| 149 |
+
cmd = ["python", "-m", "mypkg"]
|
| 150 |
+
assert venv.run(cmd).strip() == "3.14159.post0 Hello World"
|
| 151 |
+
|
| 152 |
+
(project / "src/mypkg/data.txt").write_text("foobar", encoding="utf-8")
|
| 153 |
+
(project / "src/mypkg/mod.py").write_text("x = 42", encoding="utf-8")
|
| 154 |
+
assert venv.run(cmd).strip() == "3.14159.post0 foobar 42"
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
|
| 158 |
+
files = {
|
| 159 |
+
"mypkg": {
|
| 160 |
+
"pyproject.toml": dedent(
|
| 161 |
+
"""\
|
| 162 |
+
[build-system]
|
| 163 |
+
requires = ["setuptools", "wheel"]
|
| 164 |
+
build-backend = "setuptools.build_meta"
|
| 165 |
+
|
| 166 |
+
[project]
|
| 167 |
+
name = "mypkg"
|
| 168 |
+
version = "3.14159"
|
| 169 |
+
|
| 170 |
+
[tool.setuptools]
|
| 171 |
+
packages = ["pkg"]
|
| 172 |
+
py-modules = ["mod"]
|
| 173 |
+
"""
|
| 174 |
+
),
|
| 175 |
+
"pkg": {"__init__.py": "a = 4"},
|
| 176 |
+
"mod.py": "b = 2",
|
| 177 |
+
},
|
| 178 |
+
}
|
| 179 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 180 |
+
project = tmp_path / "mypkg"
|
| 181 |
+
|
| 182 |
+
cmd = [
|
| 183 |
+
"python",
|
| 184 |
+
"-m",
|
| 185 |
+
"pip",
|
| 186 |
+
"install",
|
| 187 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 188 |
+
"-e",
|
| 189 |
+
str(project),
|
| 190 |
+
*editable_opts,
|
| 191 |
+
]
|
| 192 |
+
print(venv.run(cmd))
|
| 193 |
+
cmd = ["python", "-c", "import pkg, mod; print(pkg.a, mod.b)"]
|
| 194 |
+
assert venv.run(cmd).strip() == "4 2"
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def test_editable_with_single_module(tmp_path, venv, editable_opts):
|
| 198 |
+
files = {
|
| 199 |
+
"mypkg": {
|
| 200 |
+
"pyproject.toml": dedent(
|
| 201 |
+
"""\
|
| 202 |
+
[build-system]
|
| 203 |
+
requires = ["setuptools", "wheel"]
|
| 204 |
+
build-backend = "setuptools.build_meta"
|
| 205 |
+
|
| 206 |
+
[project]
|
| 207 |
+
name = "mod"
|
| 208 |
+
version = "3.14159"
|
| 209 |
+
|
| 210 |
+
[tool.setuptools]
|
| 211 |
+
py-modules = ["mod"]
|
| 212 |
+
"""
|
| 213 |
+
),
|
| 214 |
+
"mod.py": "b = 2",
|
| 215 |
+
},
|
| 216 |
+
}
|
| 217 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 218 |
+
project = tmp_path / "mypkg"
|
| 219 |
+
|
| 220 |
+
cmd = [
|
| 221 |
+
"python",
|
| 222 |
+
"-m",
|
| 223 |
+
"pip",
|
| 224 |
+
"install",
|
| 225 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 226 |
+
"-e",
|
| 227 |
+
str(project),
|
| 228 |
+
*editable_opts,
|
| 229 |
+
]
|
| 230 |
+
print(venv.run(cmd))
|
| 231 |
+
cmd = ["python", "-c", "import mod; print(mod.b)"]
|
| 232 |
+
assert venv.run(cmd).strip() == "2"
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
class TestLegacyNamespaces:
|
| 236 |
+
# legacy => pkg_resources.declare_namespace(...) + setup(namespace_packages=...)
|
| 237 |
+
|
| 238 |
+
def test_nspkg_file_is_unique(self, tmp_path, monkeypatch):
|
| 239 |
+
deprecation = pytest.warns(
|
| 240 |
+
SetuptoolsDeprecationWarning, match=".*namespace_packages parameter.*"
|
| 241 |
+
)
|
| 242 |
+
installation_dir = tmp_path / ".installation_dir"
|
| 243 |
+
installation_dir.mkdir()
|
| 244 |
+
examples = (
|
| 245 |
+
"myns.pkgA",
|
| 246 |
+
"myns.pkgB",
|
| 247 |
+
"myns.n.pkgA",
|
| 248 |
+
"myns.n.pkgB",
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
for name in examples:
|
| 252 |
+
pkg = namespaces.build_namespace_package(tmp_path, name, version="42")
|
| 253 |
+
with deprecation, monkeypatch.context() as ctx:
|
| 254 |
+
ctx.chdir(pkg)
|
| 255 |
+
dist = run_setup("setup.py", stop_after="config")
|
| 256 |
+
cmd = editable_wheel(dist)
|
| 257 |
+
cmd.finalize_options()
|
| 258 |
+
editable_name = cmd.get_finalized_command("dist_info").name
|
| 259 |
+
cmd._install_namespaces(installation_dir, editable_name)
|
| 260 |
+
|
| 261 |
+
files = list(installation_dir.glob("*-nspkg.pth"))
|
| 262 |
+
assert len(files) == len(examples)
|
| 263 |
+
|
| 264 |
+
@pytest.mark.parametrize(
|
| 265 |
+
"impl",
|
| 266 |
+
(
|
| 267 |
+
"pkg_resources",
|
| 268 |
+
# "pkgutil", => does not work
|
| 269 |
+
),
|
| 270 |
+
)
|
| 271 |
+
@pytest.mark.parametrize("ns", ("myns.n",))
|
| 272 |
+
def test_namespace_package_importable(
|
| 273 |
+
self, venv, tmp_path, ns, impl, editable_opts
|
| 274 |
+
):
|
| 275 |
+
"""
|
| 276 |
+
Installing two packages sharing the same namespace, one installed
|
| 277 |
+
naturally using pip or `--single-version-externally-managed`
|
| 278 |
+
and the other installed in editable mode should leave the namespace
|
| 279 |
+
intact and both packages reachable by import.
|
| 280 |
+
(Ported from test_develop).
|
| 281 |
+
"""
|
| 282 |
+
build_system = """\
|
| 283 |
+
[build-system]
|
| 284 |
+
requires = ["setuptools"]
|
| 285 |
+
build-backend = "setuptools.build_meta"
|
| 286 |
+
"""
|
| 287 |
+
pkg_A = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgA", impl=impl)
|
| 288 |
+
pkg_B = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgB", impl=impl)
|
| 289 |
+
(pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8")
|
| 290 |
+
(pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8")
|
| 291 |
+
# use pip to install to the target directory
|
| 292 |
+
opts = editable_opts[:]
|
| 293 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 294 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 295 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 296 |
+
venv.run(["python", "-c", f"import {ns}.pkgA; import {ns}.pkgB"])
|
| 297 |
+
# additionally ensure that pkg_resources import works
|
| 298 |
+
venv.run(["python", "-c", "import pkg_resources"])
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
class TestPep420Namespaces:
|
| 302 |
+
def test_namespace_package_importable(self, venv, tmp_path, editable_opts):
|
| 303 |
+
"""
|
| 304 |
+
Installing two packages sharing the same namespace, one installed
|
| 305 |
+
normally using pip and the other installed in editable mode
|
| 306 |
+
should allow importing both packages.
|
| 307 |
+
"""
|
| 308 |
+
pkg_A = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgA')
|
| 309 |
+
pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
|
| 310 |
+
# use pip to install to the target directory
|
| 311 |
+
opts = editable_opts[:]
|
| 312 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 313 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 314 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 315 |
+
venv.run(["python", "-c", "import myns.n.pkgA; import myns.n.pkgB"])
|
| 316 |
+
|
| 317 |
+
def test_namespace_created_via_package_dir(self, venv, tmp_path, editable_opts):
|
| 318 |
+
"""Currently users can create a namespace by tweaking `package_dir`"""
|
| 319 |
+
files = {
|
| 320 |
+
"pkgA": {
|
| 321 |
+
"pyproject.toml": dedent(
|
| 322 |
+
"""\
|
| 323 |
+
[build-system]
|
| 324 |
+
requires = ["setuptools", "wheel"]
|
| 325 |
+
build-backend = "setuptools.build_meta"
|
| 326 |
+
|
| 327 |
+
[project]
|
| 328 |
+
name = "pkgA"
|
| 329 |
+
version = "3.14159"
|
| 330 |
+
|
| 331 |
+
[tool.setuptools]
|
| 332 |
+
package-dir = {"myns.n.pkgA" = "src"}
|
| 333 |
+
"""
|
| 334 |
+
),
|
| 335 |
+
"src": {"__init__.py": "a = 1"},
|
| 336 |
+
},
|
| 337 |
+
}
|
| 338 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 339 |
+
pkg_A = tmp_path / "pkgA"
|
| 340 |
+
pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
|
| 341 |
+
pkg_C = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgC')
|
| 342 |
+
|
| 343 |
+
# use pip to install to the target directory
|
| 344 |
+
opts = editable_opts[:]
|
| 345 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 346 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 347 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 348 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_C), *opts])
|
| 349 |
+
venv.run(["python", "-c", "from myns.n import pkgA, pkgB, pkgC"])
|
| 350 |
+
|
| 351 |
+
def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
|
| 352 |
+
"""Sometimes users might specify an ``include`` pattern that ignores parent
|
| 353 |
+
packages. In a normal installation this would ignore all modules inside the
|
| 354 |
+
parent packages, and make them namespaces (reported in issue #3504),
|
| 355 |
+
so the editable mode should preserve this behaviour.
|
| 356 |
+
"""
|
| 357 |
+
files = {
|
| 358 |
+
"pkgA": {
|
| 359 |
+
"pyproject.toml": dedent(
|
| 360 |
+
"""\
|
| 361 |
+
[build-system]
|
| 362 |
+
requires = ["setuptools", "wheel"]
|
| 363 |
+
build-backend = "setuptools.build_meta"
|
| 364 |
+
|
| 365 |
+
[project]
|
| 366 |
+
name = "pkgA"
|
| 367 |
+
version = "3.14159"
|
| 368 |
+
|
| 369 |
+
[tool.setuptools]
|
| 370 |
+
packages.find.include = ["mypkg.*"]
|
| 371 |
+
"""
|
| 372 |
+
),
|
| 373 |
+
"mypkg": {
|
| 374 |
+
"__init__.py": "",
|
| 375 |
+
"other.py": "b = 1",
|
| 376 |
+
"n": {
|
| 377 |
+
"__init__.py": "",
|
| 378 |
+
"pkgA.py": "a = 1",
|
| 379 |
+
},
|
| 380 |
+
},
|
| 381 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 382 |
+
},
|
| 383 |
+
}
|
| 384 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 385 |
+
pkg_A = tmp_path / "pkgA"
|
| 386 |
+
|
| 387 |
+
# use pip to install to the target directory
|
| 388 |
+
opts = ["--no-build-isolation"] # force current version of setuptools
|
| 389 |
+
venv.run(["python", "-m", "pip", "-v", "install", "-e", str(pkg_A), *opts])
|
| 390 |
+
out = venv.run(["python", "-c", "from mypkg.n import pkgA; print(pkgA.a)"])
|
| 391 |
+
assert out.strip() == "1"
|
| 392 |
+
cmd = """\
|
| 393 |
+
try:
|
| 394 |
+
import mypkg.other
|
| 395 |
+
except ImportError:
|
| 396 |
+
print("mypkg.other not defined")
|
| 397 |
+
"""
|
| 398 |
+
out = venv.run(["python", "-c", dedent(cmd)])
|
| 399 |
+
assert "mypkg.other not defined" in out
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
def test_editable_with_prefix(tmp_path, sample_project, editable_opts):
|
| 403 |
+
"""
|
| 404 |
+
Editable install to a prefix should be discoverable.
|
| 405 |
+
"""
|
| 406 |
+
prefix = tmp_path / 'prefix'
|
| 407 |
+
|
| 408 |
+
# figure out where pip will likely install the package
|
| 409 |
+
site_packages_all = [
|
| 410 |
+
prefix / Path(path).relative_to(sys.prefix)
|
| 411 |
+
for path in sys.path
|
| 412 |
+
if 'site-packages' in path and path.startswith(sys.prefix)
|
| 413 |
+
]
|
| 414 |
+
|
| 415 |
+
for sp in site_packages_all:
|
| 416 |
+
sp.mkdir(parents=True)
|
| 417 |
+
|
| 418 |
+
# install workaround
|
| 419 |
+
_addsitedirs(site_packages_all)
|
| 420 |
+
|
| 421 |
+
env = dict(os.environ, PYTHONPATH=os.pathsep.join(map(str, site_packages_all)))
|
| 422 |
+
cmd = [
|
| 423 |
+
sys.executable,
|
| 424 |
+
'-m',
|
| 425 |
+
'pip',
|
| 426 |
+
'install',
|
| 427 |
+
'--editable',
|
| 428 |
+
str(sample_project),
|
| 429 |
+
'--prefix',
|
| 430 |
+
str(prefix),
|
| 431 |
+
'--no-build-isolation',
|
| 432 |
+
*editable_opts,
|
| 433 |
+
]
|
| 434 |
+
subprocess.check_call(cmd, env=env)
|
| 435 |
+
|
| 436 |
+
# now run 'sample' with the prefix on the PYTHONPATH
|
| 437 |
+
bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
|
| 438 |
+
exe = prefix / bin / 'sample'
|
| 439 |
+
subprocess.check_call([exe], env=env)
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
class TestFinderTemplate:
|
| 443 |
+
"""This test focus in getting a particular implementation detail right.
|
| 444 |
+
If at some point in time the implementation is changed for something different,
|
| 445 |
+
this test can be modified or even excluded.
|
| 446 |
+
"""
|
| 447 |
+
|
| 448 |
+
def install_finder(self, finder):
|
| 449 |
+
loc = {}
|
| 450 |
+
exec(finder, loc, loc)
|
| 451 |
+
loc["install"]()
|
| 452 |
+
|
| 453 |
+
def test_packages(self, tmp_path):
|
| 454 |
+
files = {
|
| 455 |
+
"src1": {
|
| 456 |
+
"pkg1": {
|
| 457 |
+
"__init__.py": "",
|
| 458 |
+
"subpkg": {"mod1.py": "a = 42"},
|
| 459 |
+
},
|
| 460 |
+
},
|
| 461 |
+
"src2": {"mod2.py": "a = 43"},
|
| 462 |
+
}
|
| 463 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 464 |
+
|
| 465 |
+
mapping = {
|
| 466 |
+
"pkg1": str(tmp_path / "src1/pkg1"),
|
| 467 |
+
"mod2": str(tmp_path / "src2/mod2"),
|
| 468 |
+
}
|
| 469 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 470 |
+
|
| 471 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 472 |
+
for mod in ("pkg1", "pkg1.subpkg", "pkg1.subpkg.mod1", "mod2"):
|
| 473 |
+
sys.modules.pop(mod, None)
|
| 474 |
+
|
| 475 |
+
self.install_finder(template)
|
| 476 |
+
mod1 = import_module("pkg1.subpkg.mod1")
|
| 477 |
+
mod2 = import_module("mod2")
|
| 478 |
+
subpkg = import_module("pkg1.subpkg")
|
| 479 |
+
|
| 480 |
+
assert mod1.a == 42
|
| 481 |
+
assert mod2.a == 43
|
| 482 |
+
expected = str((tmp_path / "src1/pkg1/subpkg").resolve())
|
| 483 |
+
assert_path(subpkg, expected)
|
| 484 |
+
|
| 485 |
+
def test_namespace(self, tmp_path):
|
| 486 |
+
files = {"pkg": {"__init__.py": "a = 13", "text.txt": "abc"}}
|
| 487 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 488 |
+
|
| 489 |
+
mapping = {"ns.othername": str(tmp_path / "pkg")}
|
| 490 |
+
namespaces = {"ns": []}
|
| 491 |
+
|
| 492 |
+
template = _finder_template(str(uuid4()), mapping, namespaces)
|
| 493 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 494 |
+
for mod in ("ns", "ns.othername"):
|
| 495 |
+
sys.modules.pop(mod, None)
|
| 496 |
+
|
| 497 |
+
self.install_finder(template)
|
| 498 |
+
pkg = import_module("ns.othername")
|
| 499 |
+
text = importlib_resources.files(pkg) / "text.txt"
|
| 500 |
+
|
| 501 |
+
expected = str((tmp_path / "pkg").resolve())
|
| 502 |
+
assert_path(pkg, expected)
|
| 503 |
+
assert pkg.a == 13
|
| 504 |
+
|
| 505 |
+
# Make sure resources can also be found
|
| 506 |
+
assert text.read_text(encoding="utf-8") == "abc"
|
| 507 |
+
|
| 508 |
+
def test_combine_namespaces(self, tmp_path):
|
| 509 |
+
files = {
|
| 510 |
+
"src1": {"ns": {"pkg1": {"__init__.py": "a = 13"}}},
|
| 511 |
+
"src2": {"ns": {"mod2.py": "b = 37"}},
|
| 512 |
+
}
|
| 513 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 514 |
+
|
| 515 |
+
mapping = {
|
| 516 |
+
"ns.pkgA": str(tmp_path / "src1/ns/pkg1"),
|
| 517 |
+
"ns": str(tmp_path / "src2/ns"),
|
| 518 |
+
}
|
| 519 |
+
namespaces_ = {"ns": [str(tmp_path / "src1"), str(tmp_path / "src2")]}
|
| 520 |
+
template = _finder_template(str(uuid4()), mapping, namespaces_)
|
| 521 |
+
|
| 522 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 523 |
+
for mod in ("ns", "ns.pkgA", "ns.mod2"):
|
| 524 |
+
sys.modules.pop(mod, None)
|
| 525 |
+
|
| 526 |
+
self.install_finder(template)
|
| 527 |
+
pkgA = import_module("ns.pkgA")
|
| 528 |
+
mod2 = import_module("ns.mod2")
|
| 529 |
+
|
| 530 |
+
expected = str((tmp_path / "src1/ns/pkg1").resolve())
|
| 531 |
+
assert_path(pkgA, expected)
|
| 532 |
+
assert pkgA.a == 13
|
| 533 |
+
assert mod2.b == 37
|
| 534 |
+
|
| 535 |
+
def test_combine_namespaces_nested(self, tmp_path):
|
| 536 |
+
"""
|
| 537 |
+
Users may attempt to combine namespace packages in a nested way via
|
| 538 |
+
``package_dir`` as shown in pypa/setuptools#4248.
|
| 539 |
+
"""
|
| 540 |
+
|
| 541 |
+
files = {
|
| 542 |
+
"src": {"my_package": {"my_module.py": "a = 13"}},
|
| 543 |
+
"src2": {"my_package2": {"my_module2.py": "b = 37"}},
|
| 544 |
+
}
|
| 545 |
+
|
| 546 |
+
stack = jaraco.path.DirectoryStack()
|
| 547 |
+
with stack.context(tmp_path):
|
| 548 |
+
jaraco.path.build(files)
|
| 549 |
+
attrs = {
|
| 550 |
+
"script_name": "%PEP 517%",
|
| 551 |
+
"package_dir": {
|
| 552 |
+
"different_name": "src/my_package",
|
| 553 |
+
"different_name.subpkg": "src2/my_package2",
|
| 554 |
+
},
|
| 555 |
+
"packages": ["different_name", "different_name.subpkg"],
|
| 556 |
+
}
|
| 557 |
+
dist = Distribution(attrs)
|
| 558 |
+
finder = _TopLevelFinder(dist, str(uuid4()))
|
| 559 |
+
code = next(v for k, v in finder.get_implementation() if k.endswith(".py"))
|
| 560 |
+
|
| 561 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 562 |
+
for mod in attrs["packages"]:
|
| 563 |
+
sys.modules.pop(mod, None)
|
| 564 |
+
|
| 565 |
+
self.install_finder(code)
|
| 566 |
+
mod1 = import_module("different_name.my_module")
|
| 567 |
+
mod2 = import_module("different_name.subpkg.my_module2")
|
| 568 |
+
|
| 569 |
+
expected = str((tmp_path / "src/my_package/my_module.py").resolve())
|
| 570 |
+
assert str(Path(mod1.__file__).resolve()) == expected
|
| 571 |
+
|
| 572 |
+
expected = str((tmp_path / "src2/my_package2/my_module2.py").resolve())
|
| 573 |
+
assert str(Path(mod2.__file__).resolve()) == expected
|
| 574 |
+
|
| 575 |
+
assert mod1.a == 13
|
| 576 |
+
assert mod2.b == 37
|
| 577 |
+
|
| 578 |
+
def test_dynamic_path_computation(self, tmp_path):
|
| 579 |
+
# Follows the example in PEP 420
|
| 580 |
+
files = {
|
| 581 |
+
"project1": {"parent": {"child": {"one.py": "x = 1"}}},
|
| 582 |
+
"project2": {"parent": {"child": {"two.py": "x = 2"}}},
|
| 583 |
+
"project3": {"parent": {"child": {"three.py": "x = 3"}}},
|
| 584 |
+
}
|
| 585 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 586 |
+
mapping = {}
|
| 587 |
+
namespaces_ = {"parent": [str(tmp_path / "project1/parent")]}
|
| 588 |
+
template = _finder_template(str(uuid4()), mapping, namespaces_)
|
| 589 |
+
|
| 590 |
+
mods = (f"parent.child.{name}" for name in ("one", "two", "three"))
|
| 591 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 592 |
+
for mod in ("parent", "parent.child", "parent.child", *mods):
|
| 593 |
+
sys.modules.pop(mod, None)
|
| 594 |
+
|
| 595 |
+
self.install_finder(template)
|
| 596 |
+
|
| 597 |
+
one = import_module("parent.child.one")
|
| 598 |
+
assert one.x == 1
|
| 599 |
+
|
| 600 |
+
with pytest.raises(ImportError):
|
| 601 |
+
import_module("parent.child.two")
|
| 602 |
+
|
| 603 |
+
sys.path.append(str(tmp_path / "project2"))
|
| 604 |
+
two = import_module("parent.child.two")
|
| 605 |
+
assert two.x == 2
|
| 606 |
+
|
| 607 |
+
with pytest.raises(ImportError):
|
| 608 |
+
import_module("parent.child.three")
|
| 609 |
+
|
| 610 |
+
sys.path.append(str(tmp_path / "project3"))
|
| 611 |
+
three = import_module("parent.child.three")
|
| 612 |
+
assert three.x == 3
|
| 613 |
+
|
| 614 |
+
def test_no_recursion(self, tmp_path):
|
| 615 |
+
# See issue #3550
|
| 616 |
+
files = {
|
| 617 |
+
"pkg": {
|
| 618 |
+
"__init__.py": "from . import pkg",
|
| 619 |
+
},
|
| 620 |
+
}
|
| 621 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 622 |
+
|
| 623 |
+
mapping = {
|
| 624 |
+
"pkg": str(tmp_path / "pkg"),
|
| 625 |
+
}
|
| 626 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 627 |
+
|
| 628 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 629 |
+
sys.modules.pop("pkg", None)
|
| 630 |
+
|
| 631 |
+
self.install_finder(template)
|
| 632 |
+
with pytest.raises(ImportError, match="pkg"):
|
| 633 |
+
import_module("pkg")
|
| 634 |
+
|
| 635 |
+
def test_similar_name(self, tmp_path):
|
| 636 |
+
files = {
|
| 637 |
+
"foo": {
|
| 638 |
+
"__init__.py": "",
|
| 639 |
+
"bar": {
|
| 640 |
+
"__init__.py": "",
|
| 641 |
+
},
|
| 642 |
+
},
|
| 643 |
+
}
|
| 644 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 645 |
+
|
| 646 |
+
mapping = {
|
| 647 |
+
"foo": str(tmp_path / "foo"),
|
| 648 |
+
}
|
| 649 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 650 |
+
|
| 651 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 652 |
+
sys.modules.pop("foo", None)
|
| 653 |
+
sys.modules.pop("foo.bar", None)
|
| 654 |
+
|
| 655 |
+
self.install_finder(template)
|
| 656 |
+
with pytest.raises(ImportError, match="foobar"):
|
| 657 |
+
import_module("foobar")
|
| 658 |
+
|
| 659 |
+
def test_case_sensitivity(self, tmp_path):
|
| 660 |
+
files = {
|
| 661 |
+
"foo": {
|
| 662 |
+
"__init__.py": "",
|
| 663 |
+
"lowercase.py": "x = 1",
|
| 664 |
+
"bar": {
|
| 665 |
+
"__init__.py": "",
|
| 666 |
+
"lowercase.py": "x = 2",
|
| 667 |
+
},
|
| 668 |
+
},
|
| 669 |
+
}
|
| 670 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 671 |
+
mapping = {
|
| 672 |
+
"foo": str(tmp_path / "foo"),
|
| 673 |
+
}
|
| 674 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 675 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 676 |
+
sys.modules.pop("foo", None)
|
| 677 |
+
|
| 678 |
+
self.install_finder(template)
|
| 679 |
+
with pytest.raises(ImportError, match="'FOO'"):
|
| 680 |
+
import_module("FOO")
|
| 681 |
+
|
| 682 |
+
with pytest.raises(ImportError, match="'foo\\.LOWERCASE'"):
|
| 683 |
+
import_module("foo.LOWERCASE")
|
| 684 |
+
|
| 685 |
+
with pytest.raises(ImportError, match="'foo\\.bar\\.Lowercase'"):
|
| 686 |
+
import_module("foo.bar.Lowercase")
|
| 687 |
+
|
| 688 |
+
with pytest.raises(ImportError, match="'foo\\.BAR'"):
|
| 689 |
+
import_module("foo.BAR.lowercase")
|
| 690 |
+
|
| 691 |
+
with pytest.raises(ImportError, match="'FOO'"):
|
| 692 |
+
import_module("FOO.bar.lowercase")
|
| 693 |
+
|
| 694 |
+
mod = import_module("foo.lowercase")
|
| 695 |
+
assert mod.x == 1
|
| 696 |
+
|
| 697 |
+
mod = import_module("foo.bar.lowercase")
|
| 698 |
+
assert mod.x == 2
|
| 699 |
+
|
| 700 |
+
def test_namespace_case_sensitivity(self, tmp_path):
|
| 701 |
+
files = {
|
| 702 |
+
"pkg": {
|
| 703 |
+
"__init__.py": "a = 13",
|
| 704 |
+
"foo": {
|
| 705 |
+
"__init__.py": "b = 37",
|
| 706 |
+
"bar.py": "c = 42",
|
| 707 |
+
},
|
| 708 |
+
},
|
| 709 |
+
}
|
| 710 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 711 |
+
|
| 712 |
+
mapping = {"ns.othername": str(tmp_path / "pkg")}
|
| 713 |
+
namespaces = {"ns": []}
|
| 714 |
+
|
| 715 |
+
template = _finder_template(str(uuid4()), mapping, namespaces)
|
| 716 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 717 |
+
for mod in ("ns", "ns.othername"):
|
| 718 |
+
sys.modules.pop(mod, None)
|
| 719 |
+
|
| 720 |
+
self.install_finder(template)
|
| 721 |
+
pkg = import_module("ns.othername")
|
| 722 |
+
expected = str((tmp_path / "pkg").resolve())
|
| 723 |
+
assert_path(pkg, expected)
|
| 724 |
+
assert pkg.a == 13
|
| 725 |
+
|
| 726 |
+
foo = import_module("ns.othername.foo")
|
| 727 |
+
assert foo.b == 37
|
| 728 |
+
|
| 729 |
+
bar = import_module("ns.othername.foo.bar")
|
| 730 |
+
assert bar.c == 42
|
| 731 |
+
|
| 732 |
+
with pytest.raises(ImportError, match="'NS'"):
|
| 733 |
+
import_module("NS.othername.foo")
|
| 734 |
+
|
| 735 |
+
with pytest.raises(ImportError, match="'ns\\.othername\\.FOO\\'"):
|
| 736 |
+
import_module("ns.othername.FOO")
|
| 737 |
+
|
| 738 |
+
with pytest.raises(ImportError, match="'ns\\.othername\\.foo\\.BAR\\'"):
|
| 739 |
+
import_module("ns.othername.foo.BAR")
|
| 740 |
+
|
| 741 |
+
def test_intermediate_packages(self, tmp_path):
|
| 742 |
+
"""
|
| 743 |
+
The finder should not import ``fullname`` if the intermediate segments
|
| 744 |
+
don't exist (see pypa/setuptools#4019).
|
| 745 |
+
"""
|
| 746 |
+
files = {
|
| 747 |
+
"src": {
|
| 748 |
+
"mypkg": {
|
| 749 |
+
"__init__.py": "",
|
| 750 |
+
"config.py": "a = 13",
|
| 751 |
+
"helloworld.py": "b = 13",
|
| 752 |
+
"components": {
|
| 753 |
+
"config.py": "a = 37",
|
| 754 |
+
},
|
| 755 |
+
},
|
| 756 |
+
}
|
| 757 |
+
}
|
| 758 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 759 |
+
|
| 760 |
+
mapping = {"mypkg": str(tmp_path / "src/mypkg")}
|
| 761 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 762 |
+
|
| 763 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 764 |
+
for mod in (
|
| 765 |
+
"mypkg",
|
| 766 |
+
"mypkg.config",
|
| 767 |
+
"mypkg.helloworld",
|
| 768 |
+
"mypkg.components",
|
| 769 |
+
"mypkg.components.config",
|
| 770 |
+
"mypkg.components.helloworld",
|
| 771 |
+
):
|
| 772 |
+
sys.modules.pop(mod, None)
|
| 773 |
+
|
| 774 |
+
self.install_finder(template)
|
| 775 |
+
|
| 776 |
+
config = import_module("mypkg.components.config")
|
| 777 |
+
assert config.a == 37
|
| 778 |
+
|
| 779 |
+
helloworld = import_module("mypkg.helloworld")
|
| 780 |
+
assert helloworld.b == 13
|
| 781 |
+
|
| 782 |
+
with pytest.raises(ImportError):
|
| 783 |
+
import_module("mypkg.components.helloworld")
|
| 784 |
+
|
| 785 |
+
|
| 786 |
+
def test_pkg_roots(tmp_path):
|
| 787 |
+
"""This test focus in getting a particular implementation detail right.
|
| 788 |
+
If at some point in time the implementation is changed for something different,
|
| 789 |
+
this test can be modified or even excluded.
|
| 790 |
+
"""
|
| 791 |
+
files = {
|
| 792 |
+
"a": {"b": {"__init__.py": "ab = 1"}, "__init__.py": "a = 1"},
|
| 793 |
+
"d": {"__init__.py": "d = 1", "e": {"__init__.py": "de = 1"}},
|
| 794 |
+
"f": {"g": {"h": {"__init__.py": "fgh = 1"}}},
|
| 795 |
+
"other": {"__init__.py": "abc = 1"},
|
| 796 |
+
"another": {"__init__.py": "abcxyz = 1"},
|
| 797 |
+
"yet_another": {"__init__.py": "mnopq = 1"},
|
| 798 |
+
}
|
| 799 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 800 |
+
package_dir = {
|
| 801 |
+
"a.b.c": "other",
|
| 802 |
+
"a.b.c.x.y.z": "another",
|
| 803 |
+
"m.n.o.p.q": "yet_another",
|
| 804 |
+
}
|
| 805 |
+
packages = [
|
| 806 |
+
"a",
|
| 807 |
+
"a.b",
|
| 808 |
+
"a.b.c",
|
| 809 |
+
"a.b.c.x.y",
|
| 810 |
+
"a.b.c.x.y.z",
|
| 811 |
+
"d",
|
| 812 |
+
"d.e",
|
| 813 |
+
"f",
|
| 814 |
+
"f.g",
|
| 815 |
+
"f.g.h",
|
| 816 |
+
"m.n.o.p.q",
|
| 817 |
+
]
|
| 818 |
+
roots = _find_package_roots(packages, package_dir, tmp_path)
|
| 819 |
+
assert roots == {
|
| 820 |
+
"a": str(tmp_path / "a"),
|
| 821 |
+
"a.b.c": str(tmp_path / "other"),
|
| 822 |
+
"a.b.c.x.y.z": str(tmp_path / "another"),
|
| 823 |
+
"d": str(tmp_path / "d"),
|
| 824 |
+
"f": str(tmp_path / "f"),
|
| 825 |
+
"m.n.o.p.q": str(tmp_path / "yet_another"),
|
| 826 |
+
}
|
| 827 |
+
|
| 828 |
+
ns = set(dict(_find_namespaces(packages, roots)))
|
| 829 |
+
assert ns == {"f", "f.g"}
|
| 830 |
+
|
| 831 |
+
ns = set(_find_virtual_namespaces(roots))
|
| 832 |
+
assert ns == {"a.b", "a.b.c.x", "a.b.c.x.y", "m", "m.n", "m.n.o", "m.n.o.p"}
|
| 833 |
+
|
| 834 |
+
|
| 835 |
+
class TestOverallBehaviour:
|
| 836 |
+
PYPROJECT = """\
|
| 837 |
+
[build-system]
|
| 838 |
+
requires = ["setuptools"]
|
| 839 |
+
build-backend = "setuptools.build_meta"
|
| 840 |
+
|
| 841 |
+
[project]
|
| 842 |
+
name = "mypkg"
|
| 843 |
+
version = "3.14159"
|
| 844 |
+
"""
|
| 845 |
+
|
| 846 |
+
# Any: Would need a TypedDict. Keep it simple for tests
|
| 847 |
+
FLAT_LAYOUT: dict[str, Any] = {
|
| 848 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 849 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 850 |
+
"otherfile.py": "",
|
| 851 |
+
"mypkg": {
|
| 852 |
+
"__init__.py": "",
|
| 853 |
+
"mod1.py": "var = 42",
|
| 854 |
+
"subpackage": {
|
| 855 |
+
"__init__.py": "",
|
| 856 |
+
"mod2.py": "var = 13",
|
| 857 |
+
"resource_file.txt": "resource 39",
|
| 858 |
+
},
|
| 859 |
+
},
|
| 860 |
+
}
|
| 861 |
+
|
| 862 |
+
EXAMPLES = {
|
| 863 |
+
"flat-layout": FLAT_LAYOUT,
|
| 864 |
+
"src-layout": {
|
| 865 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 866 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 867 |
+
"otherfile.py": "",
|
| 868 |
+
"src": {"mypkg": FLAT_LAYOUT["mypkg"]},
|
| 869 |
+
},
|
| 870 |
+
"custom-layout": {
|
| 871 |
+
"pyproject.toml": dedent(PYPROJECT)
|
| 872 |
+
+ dedent(
|
| 873 |
+
"""\
|
| 874 |
+
[tool.setuptools]
|
| 875 |
+
packages = ["mypkg", "mypkg.subpackage"]
|
| 876 |
+
|
| 877 |
+
[tool.setuptools.package-dir]
|
| 878 |
+
"mypkg.subpackage" = "other"
|
| 879 |
+
"""
|
| 880 |
+
),
|
| 881 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 882 |
+
"otherfile.py": "",
|
| 883 |
+
"mypkg": {
|
| 884 |
+
"__init__.py": "",
|
| 885 |
+
"mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
|
| 886 |
+
},
|
| 887 |
+
"other": FLAT_LAYOUT["mypkg"]["subpackage"],
|
| 888 |
+
},
|
| 889 |
+
"namespace": {
|
| 890 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 891 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 892 |
+
"otherfile.py": "",
|
| 893 |
+
"src": {
|
| 894 |
+
"mypkg": {
|
| 895 |
+
"mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
|
| 896 |
+
"subpackage": FLAT_LAYOUT["mypkg"]["subpackage"],
|
| 897 |
+
},
|
| 898 |
+
},
|
| 899 |
+
},
|
| 900 |
+
}
|
| 901 |
+
|
| 902 |
+
@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
|
| 903 |
+
@pytest.mark.parametrize("layout", EXAMPLES.keys())
|
| 904 |
+
def test_editable_install(self, tmp_path, venv, layout, editable_opts):
|
| 905 |
+
project, _ = install_project(
|
| 906 |
+
"mypkg", venv, tmp_path, self.EXAMPLES[layout], *editable_opts
|
| 907 |
+
)
|
| 908 |
+
|
| 909 |
+
# Ensure stray files are not importable
|
| 910 |
+
cmd_import_error = """\
|
| 911 |
+
try:
|
| 912 |
+
import otherfile
|
| 913 |
+
except ImportError as ex:
|
| 914 |
+
print(ex)
|
| 915 |
+
"""
|
| 916 |
+
out = venv.run(["python", "-c", dedent(cmd_import_error)])
|
| 917 |
+
assert "No module named 'otherfile'" in out
|
| 918 |
+
|
| 919 |
+
# Ensure the modules are importable
|
| 920 |
+
cmd_get_vars = """\
|
| 921 |
+
import mypkg, mypkg.mod1, mypkg.subpackage.mod2
|
| 922 |
+
print(mypkg.mod1.var, mypkg.subpackage.mod2.var)
|
| 923 |
+
"""
|
| 924 |
+
out = venv.run(["python", "-c", dedent(cmd_get_vars)])
|
| 925 |
+
assert "42 13" in out
|
| 926 |
+
|
| 927 |
+
# Ensure resources are reachable
|
| 928 |
+
cmd_get_resource = """\
|
| 929 |
+
import mypkg.subpackage
|
| 930 |
+
from setuptools._importlib import resources as importlib_resources
|
| 931 |
+
text = importlib_resources.files(mypkg.subpackage) / "resource_file.txt"
|
| 932 |
+
print(text.read_text(encoding="utf-8"))
|
| 933 |
+
"""
|
| 934 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 935 |
+
assert "resource 39" in out
|
| 936 |
+
|
| 937 |
+
# Ensure files are editable
|
| 938 |
+
mod1 = next(project.glob("**/mod1.py"))
|
| 939 |
+
mod2 = next(project.glob("**/mod2.py"))
|
| 940 |
+
resource_file = next(project.glob("**/resource_file.txt"))
|
| 941 |
+
|
| 942 |
+
mod1.write_text("var = 17", encoding="utf-8")
|
| 943 |
+
mod2.write_text("var = 781", encoding="utf-8")
|
| 944 |
+
resource_file.write_text("resource 374", encoding="utf-8")
|
| 945 |
+
|
| 946 |
+
out = venv.run(["python", "-c", dedent(cmd_get_vars)])
|
| 947 |
+
assert "42 13" not in out
|
| 948 |
+
assert "17 781" in out
|
| 949 |
+
|
| 950 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 951 |
+
assert "resource 39" not in out
|
| 952 |
+
assert "resource 374" in out
|
| 953 |
+
|
| 954 |
+
|
| 955 |
+
class TestLinkTree:
|
| 956 |
+
FILES = deepcopy(TestOverallBehaviour.EXAMPLES["src-layout"])
|
| 957 |
+
FILES["pyproject.toml"] += dedent(
|
| 958 |
+
"""\
|
| 959 |
+
[tool.setuptools]
|
| 960 |
+
# Temporary workaround: both `include-package-data` and `package-data` configs
|
| 961 |
+
# can be removed after #3260 is fixed.
|
| 962 |
+
include-package-data = false
|
| 963 |
+
package-data = {"*" = ["*.txt"]}
|
| 964 |
+
|
| 965 |
+
[tool.setuptools.packages.find]
|
| 966 |
+
where = ["src"]
|
| 967 |
+
exclude = ["*.subpackage*"]
|
| 968 |
+
"""
|
| 969 |
+
)
|
| 970 |
+
FILES["src"]["mypkg"]["resource.not_in_manifest"] = "abc"
|
| 971 |
+
|
| 972 |
+
def test_generated_tree(self, tmp_path):
|
| 973 |
+
jaraco.path.build(self.FILES, prefix=tmp_path)
|
| 974 |
+
|
| 975 |
+
with _Path(tmp_path):
|
| 976 |
+
name = "mypkg-3.14159"
|
| 977 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 978 |
+
dist.parse_config_files()
|
| 979 |
+
|
| 980 |
+
wheel = Mock()
|
| 981 |
+
aux = tmp_path / ".aux"
|
| 982 |
+
build = tmp_path / ".build"
|
| 983 |
+
aux.mkdir()
|
| 984 |
+
build.mkdir()
|
| 985 |
+
|
| 986 |
+
build_py = dist.get_command_obj("build_py")
|
| 987 |
+
build_py.editable_mode = True
|
| 988 |
+
build_py.build_lib = str(build)
|
| 989 |
+
build_py.ensure_finalized()
|
| 990 |
+
outputs = build_py.get_outputs()
|
| 991 |
+
output_mapping = build_py.get_output_mapping()
|
| 992 |
+
|
| 993 |
+
make_tree = _LinkTree(dist, name, aux, build)
|
| 994 |
+
make_tree(wheel, outputs, output_mapping)
|
| 995 |
+
|
| 996 |
+
mod1 = next(aux.glob("**/mod1.py"))
|
| 997 |
+
expected = tmp_path / "src/mypkg/mod1.py"
|
| 998 |
+
assert_link_to(mod1, expected)
|
| 999 |
+
|
| 1000 |
+
assert next(aux.glob("**/subpackage"), None) is None
|
| 1001 |
+
assert next(aux.glob("**/mod2.py"), None) is None
|
| 1002 |
+
assert next(aux.glob("**/resource_file.txt"), None) is None
|
| 1003 |
+
|
| 1004 |
+
assert next(aux.glob("**/resource.not_in_manifest"), None) is None
|
| 1005 |
+
|
| 1006 |
+
def test_strict_install(self, tmp_path, venv):
|
| 1007 |
+
opts = ["--config-settings", "editable-mode=strict"]
|
| 1008 |
+
install_project("mypkg", venv, tmp_path, self.FILES, *opts)
|
| 1009 |
+
|
| 1010 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1011 |
+
assert "42" in out
|
| 1012 |
+
|
| 1013 |
+
# Ensure packages excluded from distribution are not importable
|
| 1014 |
+
cmd_import_error = """\
|
| 1015 |
+
try:
|
| 1016 |
+
from mypkg import subpackage
|
| 1017 |
+
except ImportError as ex:
|
| 1018 |
+
print(ex)
|
| 1019 |
+
"""
|
| 1020 |
+
out = venv.run(["python", "-c", dedent(cmd_import_error)])
|
| 1021 |
+
assert "cannot import name 'subpackage'" in out
|
| 1022 |
+
|
| 1023 |
+
# Ensure resource files excluded from distribution are not reachable
|
| 1024 |
+
cmd_get_resource = """\
|
| 1025 |
+
import mypkg
|
| 1026 |
+
from setuptools._importlib import resources as importlib_resources
|
| 1027 |
+
try:
|
| 1028 |
+
text = importlib_resources.files(mypkg) / "resource.not_in_manifest"
|
| 1029 |
+
print(text.read_text(encoding="utf-8"))
|
| 1030 |
+
except FileNotFoundError as ex:
|
| 1031 |
+
print(ex)
|
| 1032 |
+
"""
|
| 1033 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 1034 |
+
assert "No such file or directory" in out
|
| 1035 |
+
assert "resource.not_in_manifest" in out
|
| 1036 |
+
|
| 1037 |
+
|
| 1038 |
+
@pytest.mark.filterwarnings("ignore:.*compat.*:setuptools.SetuptoolsDeprecationWarning")
|
| 1039 |
+
def test_compat_install(tmp_path, venv):
|
| 1040 |
+
# TODO: Remove `compat` after Dec/2022.
|
| 1041 |
+
opts = ["--config-settings", "editable-mode=compat"]
|
| 1042 |
+
files = TestOverallBehaviour.EXAMPLES["custom-layout"]
|
| 1043 |
+
install_project("mypkg", venv, tmp_path, files, *opts)
|
| 1044 |
+
|
| 1045 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1046 |
+
assert "42" in out
|
| 1047 |
+
|
| 1048 |
+
expected_path = comparable_path(str(tmp_path))
|
| 1049 |
+
|
| 1050 |
+
# Compatible behaviour will make spurious modules and excluded
|
| 1051 |
+
# files importable directly from the original path
|
| 1052 |
+
for cmd in (
|
| 1053 |
+
"import otherfile; print(otherfile)",
|
| 1054 |
+
"import other; print(other)",
|
| 1055 |
+
"import mypkg; print(mypkg)",
|
| 1056 |
+
):
|
| 1057 |
+
out = comparable_path(venv.run(["python", "-c", cmd]))
|
| 1058 |
+
assert expected_path in out
|
| 1059 |
+
|
| 1060 |
+
# Compatible behaviour will not consider custom mappings
|
| 1061 |
+
cmd = """\
|
| 1062 |
+
try:
|
| 1063 |
+
from mypkg import subpackage;
|
| 1064 |
+
except ImportError as ex:
|
| 1065 |
+
print(ex)
|
| 1066 |
+
"""
|
| 1067 |
+
out = venv.run(["python", "-c", dedent(cmd)])
|
| 1068 |
+
assert "cannot import name 'subpackage'" in out
|
| 1069 |
+
|
| 1070 |
+
|
| 1071 |
+
def test_pbr_integration(tmp_path, venv, editable_opts):
|
| 1072 |
+
"""Ensure editable installs work with pbr, issue #3500"""
|
| 1073 |
+
files = {
|
| 1074 |
+
"pyproject.toml": dedent(
|
| 1075 |
+
"""\
|
| 1076 |
+
[build-system]
|
| 1077 |
+
requires = ["setuptools"]
|
| 1078 |
+
build-backend = "setuptools.build_meta"
|
| 1079 |
+
"""
|
| 1080 |
+
),
|
| 1081 |
+
"setup.py": dedent(
|
| 1082 |
+
"""\
|
| 1083 |
+
__import__('setuptools').setup(
|
| 1084 |
+
pbr=True,
|
| 1085 |
+
setup_requires=["pbr"],
|
| 1086 |
+
)
|
| 1087 |
+
"""
|
| 1088 |
+
),
|
| 1089 |
+
"setup.cfg": dedent(
|
| 1090 |
+
"""\
|
| 1091 |
+
[metadata]
|
| 1092 |
+
name = mypkg
|
| 1093 |
+
|
| 1094 |
+
[files]
|
| 1095 |
+
packages =
|
| 1096 |
+
mypkg
|
| 1097 |
+
"""
|
| 1098 |
+
),
|
| 1099 |
+
"mypkg": {
|
| 1100 |
+
"__init__.py": "",
|
| 1101 |
+
"hello.py": "print('Hello world!')",
|
| 1102 |
+
},
|
| 1103 |
+
"other": {"test.txt": "Another file in here."},
|
| 1104 |
+
}
|
| 1105 |
+
venv.run(["python", "-m", "pip", "install", "pbr"])
|
| 1106 |
+
|
| 1107 |
+
with contexts.environment(PBR_VERSION="0.42"):
|
| 1108 |
+
install_project("mypkg", venv, tmp_path, files, *editable_opts)
|
| 1109 |
+
|
| 1110 |
+
out = venv.run(["python", "-c", "import mypkg.hello"])
|
| 1111 |
+
assert "Hello world!" in out
|
| 1112 |
+
|
| 1113 |
+
|
| 1114 |
+
class TestCustomBuildPy:
|
| 1115 |
+
"""
|
| 1116 |
+
Issue #3501 indicates that some plugins/customizations might rely on:
|
| 1117 |
+
|
| 1118 |
+
1. ``build_py`` not running
|
| 1119 |
+
2. ``build_py`` always copying files to ``build_lib``
|
| 1120 |
+
|
| 1121 |
+
During the transition period setuptools should prevent potential errors from
|
| 1122 |
+
happening due to those assumptions.
|
| 1123 |
+
"""
|
| 1124 |
+
|
| 1125 |
+
# TODO: Remove tests after _run_build_steps is removed.
|
| 1126 |
+
|
| 1127 |
+
FILES = {
|
| 1128 |
+
**TestOverallBehaviour.EXAMPLES["flat-layout"],
|
| 1129 |
+
"setup.py": dedent(
|
| 1130 |
+
"""\
|
| 1131 |
+
import pathlib
|
| 1132 |
+
from setuptools import setup
|
| 1133 |
+
from setuptools.command.build_py import build_py as orig
|
| 1134 |
+
|
| 1135 |
+
class my_build_py(orig):
|
| 1136 |
+
def run(self):
|
| 1137 |
+
super().run()
|
| 1138 |
+
raise ValueError("TEST_RAISE")
|
| 1139 |
+
|
| 1140 |
+
setup(cmdclass={"build_py": my_build_py})
|
| 1141 |
+
"""
|
| 1142 |
+
),
|
| 1143 |
+
}
|
| 1144 |
+
|
| 1145 |
+
def test_safeguarded_from_errors(self, tmp_path, venv):
|
| 1146 |
+
"""Ensure that errors in custom build_py are reported as warnings"""
|
| 1147 |
+
# Warnings should show up
|
| 1148 |
+
_, out = install_project("mypkg", venv, tmp_path, self.FILES)
|
| 1149 |
+
assert "SetuptoolsDeprecationWarning" in out
|
| 1150 |
+
assert "ValueError: TEST_RAISE" in out
|
| 1151 |
+
# but installation should be successful
|
| 1152 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1153 |
+
assert "42" in out
|
| 1154 |
+
|
| 1155 |
+
|
| 1156 |
+
class TestCustomBuildWheel:
|
| 1157 |
+
def install_custom_build_wheel(self, dist):
|
| 1158 |
+
bdist_wheel_cls = dist.get_command_class("bdist_wheel")
|
| 1159 |
+
|
| 1160 |
+
class MyBdistWheel(bdist_wheel_cls):
|
| 1161 |
+
def get_tag(self):
|
| 1162 |
+
# In issue #3513, we can see that some extensions may try to access
|
| 1163 |
+
# the `plat_name` property in bdist_wheel
|
| 1164 |
+
if self.plat_name.startswith("macosx-"):
|
| 1165 |
+
_ = "macOS platform"
|
| 1166 |
+
return super().get_tag()
|
| 1167 |
+
|
| 1168 |
+
dist.cmdclass["bdist_wheel"] = MyBdistWheel
|
| 1169 |
+
|
| 1170 |
+
def test_access_plat_name(self, tmpdir_cwd):
|
| 1171 |
+
# Even when a custom bdist_wheel tries to access plat_name the build should
|
| 1172 |
+
# be successful
|
| 1173 |
+
jaraco.path.build({"module.py": "x = 42"})
|
| 1174 |
+
dist = Distribution()
|
| 1175 |
+
dist.script_name = "setup.py"
|
| 1176 |
+
dist.set_defaults()
|
| 1177 |
+
self.install_custom_build_wheel(dist)
|
| 1178 |
+
cmd = editable_wheel(dist)
|
| 1179 |
+
cmd.ensure_finalized()
|
| 1180 |
+
cmd.run()
|
| 1181 |
+
wheel_file = str(next(Path().glob('dist/*.whl')))
|
| 1182 |
+
assert "editable" in wheel_file
|
| 1183 |
+
|
| 1184 |
+
|
| 1185 |
+
class TestCustomBuildExt:
|
| 1186 |
+
def install_custom_build_ext_distutils(self, dist):
|
| 1187 |
+
from distutils.command.build_ext import build_ext as build_ext_cls
|
| 1188 |
+
|
| 1189 |
+
class MyBuildExt(build_ext_cls):
|
| 1190 |
+
pass
|
| 1191 |
+
|
| 1192 |
+
dist.cmdclass["build_ext"] = MyBuildExt
|
| 1193 |
+
|
| 1194 |
+
@pytest.mark.skipif(
|
| 1195 |
+
sys.platform != "linux", reason="compilers may fail without correct setup"
|
| 1196 |
+
)
|
| 1197 |
+
def test_distutils_leave_inplace_files(self, tmpdir_cwd):
|
| 1198 |
+
jaraco.path.build({"module.c": ""})
|
| 1199 |
+
attrs = {
|
| 1200 |
+
"ext_modules": [Extension("module", ["module.c"])],
|
| 1201 |
+
}
|
| 1202 |
+
dist = Distribution(attrs)
|
| 1203 |
+
dist.script_name = "setup.py"
|
| 1204 |
+
dist.set_defaults()
|
| 1205 |
+
self.install_custom_build_ext_distutils(dist)
|
| 1206 |
+
cmd = editable_wheel(dist)
|
| 1207 |
+
cmd.ensure_finalized()
|
| 1208 |
+
cmd.run()
|
| 1209 |
+
wheel_file = str(next(Path().glob('dist/*.whl')))
|
| 1210 |
+
assert "editable" in wheel_file
|
| 1211 |
+
files = [p for p in Path().glob("module.*") if p.suffix != ".c"]
|
| 1212 |
+
assert len(files) == 1
|
| 1213 |
+
name = files[0].name
|
| 1214 |
+
assert any(name.endswith(ext) for ext in EXTENSION_SUFFIXES)
|
| 1215 |
+
|
| 1216 |
+
|
| 1217 |
+
def test_debugging_tips(tmpdir_cwd, monkeypatch):
|
| 1218 |
+
"""Make sure to display useful debugging tips to the user."""
|
| 1219 |
+
jaraco.path.build({"module.py": "x = 42"})
|
| 1220 |
+
dist = Distribution()
|
| 1221 |
+
dist.script_name = "setup.py"
|
| 1222 |
+
dist.set_defaults()
|
| 1223 |
+
cmd = editable_wheel(dist)
|
| 1224 |
+
cmd.ensure_finalized()
|
| 1225 |
+
|
| 1226 |
+
SimulatedErr = type("SimulatedErr", (Exception,), {})
|
| 1227 |
+
simulated_failure = Mock(side_effect=SimulatedErr())
|
| 1228 |
+
monkeypatch.setattr(cmd, "get_finalized_command", simulated_failure)
|
| 1229 |
+
|
| 1230 |
+
expected_msg = "following steps are recommended to help debug"
|
| 1231 |
+
with pytest.raises(SimulatedErr), pytest.warns(_DebuggingTips, match=expected_msg):
|
| 1232 |
+
cmd.run()
|
| 1233 |
+
|
| 1234 |
+
|
| 1235 |
+
@pytest.mark.filterwarnings("error")
|
| 1236 |
+
def test_encode_pth():
|
| 1237 |
+
"""Ensure _encode_pth function does not produce encoding warnings"""
|
| 1238 |
+
content = _encode_pth("tkmilan_ç_utf8") # no warnings (would be turned into errors)
|
| 1239 |
+
assert isinstance(content, bytes)
|
| 1240 |
+
|
| 1241 |
+
|
| 1242 |
+
def install_project(name, venv, tmp_path, files, *opts):
|
| 1243 |
+
project = tmp_path / name
|
| 1244 |
+
project.mkdir()
|
| 1245 |
+
jaraco.path.build(files, prefix=project)
|
| 1246 |
+
opts = [*opts, "--no-build-isolation"] # force current version of setuptools
|
| 1247 |
+
out = venv.run(
|
| 1248 |
+
["python", "-m", "pip", "-v", "install", "-e", str(project), *opts],
|
| 1249 |
+
stderr=subprocess.STDOUT,
|
| 1250 |
+
)
|
| 1251 |
+
return project, out
|
| 1252 |
+
|
| 1253 |
+
|
| 1254 |
+
def _addsitedirs(new_dirs):
|
| 1255 |
+
"""To use this function, it is necessary to insert new_dir in front of sys.path.
|
| 1256 |
+
The Python process will try to import a ``sitecustomize`` module on startup.
|
| 1257 |
+
If we manipulate sys.path/PYTHONPATH, we can force it to run our code,
|
| 1258 |
+
which invokes ``addsitedir`` and ensure ``.pth`` files are loaded.
|
| 1259 |
+
"""
|
| 1260 |
+
content = '\n'.join(
|
| 1261 |
+
("import site",)
|
| 1262 |
+
+ tuple(f"site.addsitedir({os.fspath(new_dir)!r})" for new_dir in new_dirs)
|
| 1263 |
+
)
|
| 1264 |
+
(new_dirs[0] / "sitecustomize.py").write_text(content, encoding="utf-8")
|
| 1265 |
+
|
| 1266 |
+
|
| 1267 |
+
# ---- Assertion Helpers ----
|
| 1268 |
+
|
| 1269 |
+
|
| 1270 |
+
def assert_path(pkg, expected):
|
| 1271 |
+
# __path__ is not guaranteed to exist, so we have to account for that
|
| 1272 |
+
if pkg.__path__:
|
| 1273 |
+
path = next(iter(pkg.__path__), None)
|
| 1274 |
+
if path:
|
| 1275 |
+
assert str(Path(path).resolve()) == expected
|
| 1276 |
+
|
| 1277 |
+
|
| 1278 |
+
def assert_link_to(file: Path, other: Path) -> None:
|
| 1279 |
+
if file.is_symlink():
|
| 1280 |
+
assert str(file.resolve()) == str(other.resolve())
|
| 1281 |
+
else:
|
| 1282 |
+
file_stat = file.stat()
|
| 1283 |
+
other_stat = other.stat()
|
| 1284 |
+
assert file_stat[stat.ST_INO] == other_stat[stat.ST_INO]
|
| 1285 |
+
assert file_stat[stat.ST_DEV] == other_stat[stat.ST_DEV]
|
| 1286 |
+
|
| 1287 |
+
|
| 1288 |
+
def comparable_path(str_with_path: str) -> str:
|
| 1289 |
+
return str_with_path.lower().replace(os.sep, "/").replace("//", "/")
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_glob.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
from jaraco import path
|
| 3 |
+
|
| 4 |
+
from setuptools.glob import glob
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@pytest.mark.parametrize(
|
| 8 |
+
('tree', 'pattern', 'matches'),
|
| 9 |
+
(
|
| 10 |
+
('', b'', []),
|
| 11 |
+
('', '', []),
|
| 12 |
+
(
|
| 13 |
+
"""
|
| 14 |
+
appveyor.yml
|
| 15 |
+
CHANGES.rst
|
| 16 |
+
LICENSE
|
| 17 |
+
MANIFEST.in
|
| 18 |
+
pyproject.toml
|
| 19 |
+
README.rst
|
| 20 |
+
setup.cfg
|
| 21 |
+
setup.py
|
| 22 |
+
""",
|
| 23 |
+
'*.rst',
|
| 24 |
+
('CHANGES.rst', 'README.rst'),
|
| 25 |
+
),
|
| 26 |
+
(
|
| 27 |
+
"""
|
| 28 |
+
appveyor.yml
|
| 29 |
+
CHANGES.rst
|
| 30 |
+
LICENSE
|
| 31 |
+
MANIFEST.in
|
| 32 |
+
pyproject.toml
|
| 33 |
+
README.rst
|
| 34 |
+
setup.cfg
|
| 35 |
+
setup.py
|
| 36 |
+
""",
|
| 37 |
+
b'*.rst',
|
| 38 |
+
(b'CHANGES.rst', b'README.rst'),
|
| 39 |
+
),
|
| 40 |
+
),
|
| 41 |
+
)
|
| 42 |
+
def test_glob(monkeypatch, tmpdir, tree, pattern, matches):
|
| 43 |
+
monkeypatch.chdir(tmpdir)
|
| 44 |
+
path.build({name: '' for name in tree.split()})
|
| 45 |
+
assert list(sorted(glob(pattern))) == list(sorted(matches))
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import subprocess
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
from setuptools._path import paths_on_pythonpath
|
| 5 |
+
|
| 6 |
+
from . import namespaces
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TestNamespaces:
|
| 10 |
+
def test_mixed_site_and_non_site(self, tmpdir):
|
| 11 |
+
"""
|
| 12 |
+
Installing two packages sharing the same namespace, one installed
|
| 13 |
+
to a site dir and the other installed just to a path on PYTHONPATH
|
| 14 |
+
should leave the namespace in tact and both packages reachable by
|
| 15 |
+
import.
|
| 16 |
+
"""
|
| 17 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 18 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 19 |
+
site_packages = tmpdir / 'site-packages'
|
| 20 |
+
path_packages = tmpdir / 'path-packages'
|
| 21 |
+
targets = site_packages, path_packages
|
| 22 |
+
# use pip to install to the target directory
|
| 23 |
+
install_cmd = [
|
| 24 |
+
sys.executable,
|
| 25 |
+
'-m',
|
| 26 |
+
'pip.__main__',
|
| 27 |
+
'install',
|
| 28 |
+
str(pkg_A),
|
| 29 |
+
'-t',
|
| 30 |
+
str(site_packages),
|
| 31 |
+
]
|
| 32 |
+
subprocess.check_call(install_cmd)
|
| 33 |
+
namespaces.make_site_dir(site_packages)
|
| 34 |
+
install_cmd = [
|
| 35 |
+
sys.executable,
|
| 36 |
+
'-m',
|
| 37 |
+
'pip.__main__',
|
| 38 |
+
'install',
|
| 39 |
+
str(pkg_B),
|
| 40 |
+
'-t',
|
| 41 |
+
str(path_packages),
|
| 42 |
+
]
|
| 43 |
+
subprocess.check_call(install_cmd)
|
| 44 |
+
try_import = [
|
| 45 |
+
sys.executable,
|
| 46 |
+
'-c',
|
| 47 |
+
'import myns.pkgA; import myns.pkgB',
|
| 48 |
+
]
|
| 49 |
+
with paths_on_pythonpath(map(str, targets)):
|
| 50 |
+
subprocess.check_call(try_import)
|
| 51 |
+
|
| 52 |
+
def test_pkg_resources_import(self, tmpdir):
|
| 53 |
+
"""
|
| 54 |
+
Ensure that a namespace package doesn't break on import
|
| 55 |
+
of pkg_resources.
|
| 56 |
+
"""
|
| 57 |
+
pkg = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 58 |
+
target = tmpdir / 'packages'
|
| 59 |
+
target.mkdir()
|
| 60 |
+
install_cmd = [
|
| 61 |
+
sys.executable,
|
| 62 |
+
'-m',
|
| 63 |
+
'pip',
|
| 64 |
+
'install',
|
| 65 |
+
'-t',
|
| 66 |
+
str(target),
|
| 67 |
+
str(pkg),
|
| 68 |
+
]
|
| 69 |
+
with paths_on_pythonpath([str(target)]):
|
| 70 |
+
subprocess.check_call(install_cmd)
|
| 71 |
+
namespaces.make_site_dir(target)
|
| 72 |
+
try_import = [
|
| 73 |
+
sys.executable,
|
| 74 |
+
'-c',
|
| 75 |
+
'import pkg_resources',
|
| 76 |
+
]
|
| 77 |
+
with paths_on_pythonpath([str(target)]):
|
| 78 |
+
subprocess.check_call(try_import)
|
| 79 |
+
|
| 80 |
+
def test_namespace_package_installed_and_cwd(self, tmpdir):
|
| 81 |
+
"""
|
| 82 |
+
Installing a namespace packages but also having it in the current
|
| 83 |
+
working directory, only one version should take precedence.
|
| 84 |
+
"""
|
| 85 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 86 |
+
target = tmpdir / 'packages'
|
| 87 |
+
# use pip to install to the target directory
|
| 88 |
+
install_cmd = [
|
| 89 |
+
sys.executable,
|
| 90 |
+
'-m',
|
| 91 |
+
'pip.__main__',
|
| 92 |
+
'install',
|
| 93 |
+
str(pkg_A),
|
| 94 |
+
'-t',
|
| 95 |
+
str(target),
|
| 96 |
+
]
|
| 97 |
+
subprocess.check_call(install_cmd)
|
| 98 |
+
namespaces.make_site_dir(target)
|
| 99 |
+
|
| 100 |
+
# ensure that package imports and pkg_resources imports
|
| 101 |
+
pkg_resources_imp = [
|
| 102 |
+
sys.executable,
|
| 103 |
+
'-c',
|
| 104 |
+
'import pkg_resources; import myns.pkgA',
|
| 105 |
+
]
|
| 106 |
+
with paths_on_pythonpath([str(target)]):
|
| 107 |
+
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
|
| 108 |
+
|
| 109 |
+
def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
|
| 110 |
+
"""
|
| 111 |
+
Installing one namespace package and also have another in the same
|
| 112 |
+
namespace in the current working directory, both of them must be
|
| 113 |
+
importable.
|
| 114 |
+
"""
|
| 115 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 116 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 117 |
+
target = tmpdir / 'packages'
|
| 118 |
+
# use pip to install to the target directory
|
| 119 |
+
install_cmd = [
|
| 120 |
+
sys.executable,
|
| 121 |
+
'-m',
|
| 122 |
+
'pip.__main__',
|
| 123 |
+
'install',
|
| 124 |
+
str(pkg_A),
|
| 125 |
+
'-t',
|
| 126 |
+
str(target),
|
| 127 |
+
]
|
| 128 |
+
subprocess.check_call(install_cmd)
|
| 129 |
+
namespaces.make_site_dir(target)
|
| 130 |
+
|
| 131 |
+
# ensure that all packages import and pkg_resources imports
|
| 132 |
+
pkg_resources_imp = [
|
| 133 |
+
sys.executable,
|
| 134 |
+
'-c',
|
| 135 |
+
'import pkg_resources; import myns.pkgA; import myns.pkgB',
|
| 136 |
+
]
|
| 137 |
+
with paths_on_pythonpath([str(target)]):
|
| 138 |
+
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B))
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_sandbox.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import types
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
import pkg_resources
|
| 9 |
+
import setuptools.sandbox
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestSandbox:
|
| 13 |
+
def test_devnull(self, tmpdir):
|
| 14 |
+
with setuptools.sandbox.DirectorySandbox(str(tmpdir)):
|
| 15 |
+
self._file_writer(os.devnull)
|
| 16 |
+
|
| 17 |
+
@staticmethod
|
| 18 |
+
def _file_writer(path):
|
| 19 |
+
def do_write():
|
| 20 |
+
with open(path, 'w', encoding="utf-8") as f:
|
| 21 |
+
f.write('xxx')
|
| 22 |
+
|
| 23 |
+
return do_write
|
| 24 |
+
|
| 25 |
+
def test_setup_py_with_BOM(self):
|
| 26 |
+
"""
|
| 27 |
+
It should be possible to execute a setup.py with a Byte Order Mark
|
| 28 |
+
"""
|
| 29 |
+
target = pkg_resources.resource_filename(__name__, 'script-with-bom.py')
|
| 30 |
+
namespace = types.ModuleType('namespace')
|
| 31 |
+
setuptools.sandbox._execfile(target, vars(namespace))
|
| 32 |
+
assert namespace.result == 'passed'
|
| 33 |
+
|
| 34 |
+
def test_setup_py_with_CRLF(self, tmpdir):
|
| 35 |
+
setup_py = tmpdir / 'setup.py'
|
| 36 |
+
with setup_py.open('wb') as stream:
|
| 37 |
+
stream.write(b'"degenerate script"\r\n')
|
| 38 |
+
setuptools.sandbox._execfile(str(setup_py), globals())
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class TestExceptionSaver:
|
| 42 |
+
def test_exception_trapped(self):
|
| 43 |
+
with setuptools.sandbox.ExceptionSaver():
|
| 44 |
+
raise ValueError("details")
|
| 45 |
+
|
| 46 |
+
def test_exception_resumed(self):
|
| 47 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 48 |
+
raise ValueError("details")
|
| 49 |
+
|
| 50 |
+
with pytest.raises(ValueError) as caught:
|
| 51 |
+
saved_exc.resume()
|
| 52 |
+
|
| 53 |
+
assert isinstance(caught.value, ValueError)
|
| 54 |
+
assert str(caught.value) == 'details'
|
| 55 |
+
|
| 56 |
+
def test_exception_reconstructed(self):
|
| 57 |
+
orig_exc = ValueError("details")
|
| 58 |
+
|
| 59 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 60 |
+
raise orig_exc
|
| 61 |
+
|
| 62 |
+
with pytest.raises(ValueError) as caught:
|
| 63 |
+
saved_exc.resume()
|
| 64 |
+
|
| 65 |
+
assert isinstance(caught.value, ValueError)
|
| 66 |
+
assert caught.value is not orig_exc
|
| 67 |
+
|
| 68 |
+
def test_no_exception_passes_quietly(self):
|
| 69 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
saved_exc.resume()
|
| 73 |
+
|
| 74 |
+
def test_unpickleable_exception(self):
|
| 75 |
+
class CantPickleThis(Exception):
|
| 76 |
+
"This Exception is unpickleable because it's not in globals"
|
| 77 |
+
|
| 78 |
+
def __repr__(self) -> str:
|
| 79 |
+
return f'CantPickleThis{self.args!r}'
|
| 80 |
+
|
| 81 |
+
with setuptools.sandbox.ExceptionSaver() as saved_exc:
|
| 82 |
+
raise CantPickleThis('detail')
|
| 83 |
+
|
| 84 |
+
with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
|
| 85 |
+
saved_exc.resume()
|
| 86 |
+
|
| 87 |
+
assert str(caught.value) == "CantPickleThis('detail',)"
|
| 88 |
+
|
| 89 |
+
def test_unpickleable_exception_when_hiding_setuptools(self):
|
| 90 |
+
"""
|
| 91 |
+
As revealed in #440, an infinite recursion can occur if an unpickleable
|
| 92 |
+
exception while setuptools is hidden. Ensure this doesn't happen.
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
class ExceptionUnderTest(Exception):
|
| 96 |
+
"""
|
| 97 |
+
An unpickleable exception (not in globals).
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
|
| 101 |
+
with setuptools.sandbox.save_modules():
|
| 102 |
+
setuptools.sandbox.hide_setuptools()
|
| 103 |
+
raise ExceptionUnderTest
|
| 104 |
+
|
| 105 |
+
(msg,) = caught.value.args
|
| 106 |
+
assert msg == 'ExceptionUnderTest()'
|
| 107 |
+
|
| 108 |
+
def test_sandbox_violation_raised_hiding_setuptools(self, tmpdir):
|
| 109 |
+
"""
|
| 110 |
+
When in a sandbox with setuptools hidden, a SandboxViolation
|
| 111 |
+
should reflect a proper exception and not be wrapped in
|
| 112 |
+
an UnpickleableException.
|
| 113 |
+
"""
|
| 114 |
+
|
| 115 |
+
def write_file():
|
| 116 |
+
"Trigger a SandboxViolation by writing outside the sandbox"
|
| 117 |
+
with open('/etc/foo', 'w', encoding="utf-8"):
|
| 118 |
+
pass
|
| 119 |
+
|
| 120 |
+
with pytest.raises(setuptools.sandbox.SandboxViolation) as caught:
|
| 121 |
+
with setuptools.sandbox.save_modules():
|
| 122 |
+
setuptools.sandbox.hide_setuptools()
|
| 123 |
+
with setuptools.sandbox.DirectorySandbox(str(tmpdir)):
|
| 124 |
+
write_file()
|
| 125 |
+
|
| 126 |
+
cmd, args, kwargs = caught.value.args
|
| 127 |
+
assert cmd == 'open'
|
| 128 |
+
assert args == ('/etc/foo', 'w')
|
| 129 |
+
assert kwargs == {"encoding": "utf-8"}
|
| 130 |
+
|
| 131 |
+
msg = str(caught.value)
|
| 132 |
+
assert 'open' in msg
|
| 133 |
+
assert "('/etc/foo', 'w')" in msg
|
| 134 |
+
assert "{'encoding': 'utf-8'}" in msg
|
videollama2/lib/python3.10/site-packages/setuptools/tests/test_shutil_wrapper.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import stat
|
| 2 |
+
import sys
|
| 3 |
+
from unittest.mock import Mock
|
| 4 |
+
|
| 5 |
+
from setuptools import _shutil
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_rmtree_readonly(monkeypatch, tmp_path):
|
| 9 |
+
"""Verify onerr works as expected"""
|
| 10 |
+
|
| 11 |
+
tmp_dir = tmp_path / "with_readonly"
|
| 12 |
+
tmp_dir.mkdir()
|
| 13 |
+
some_file = tmp_dir.joinpath("file.txt")
|
| 14 |
+
some_file.touch()
|
| 15 |
+
some_file.chmod(stat.S_IREAD)
|
| 16 |
+
|
| 17 |
+
expected_count = 1 if sys.platform.startswith("win") else 0
|
| 18 |
+
chmod_fn = Mock(wraps=_shutil.attempt_chmod_verbose)
|
| 19 |
+
monkeypatch.setattr(_shutil, "attempt_chmod_verbose", chmod_fn)
|
| 20 |
+
|
| 21 |
+
_shutil.rmtree(tmp_dir)
|
| 22 |
+
assert chmod_fn.call_count == expected_count
|
| 23 |
+
assert not tmp_dir.is_dir()
|
videollama2/lib/python3.10/site-packages/setuptools/tests/textwrap.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import textwrap
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def DALS(s):
|
| 5 |
+
"dedent and left-strip"
|
| 6 |
+
return textwrap.dedent(s).lstrip()
|
videollama2/lib/python3.10/site-packages/sklearn/datasets/tests/data/openml/id_42074/data-v1-dl-21552912.arff.gz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f623e777c0a36ae6c82fae10a7c2088cb383298ea244595bf8dc95449c9be4c4
|
| 3 |
+
size 2326
|
videollama2/lib/python3.10/site-packages/websockets/server.py
ADDED
|
@@ -0,0 +1,575 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import base64
|
| 4 |
+
import binascii
|
| 5 |
+
import email.utils
|
| 6 |
+
import http
|
| 7 |
+
import warnings
|
| 8 |
+
from typing import Any, Callable, Generator, List, Optional, Sequence, Tuple, cast
|
| 9 |
+
|
| 10 |
+
from .datastructures import Headers, MultipleValuesError
|
| 11 |
+
from .exceptions import (
|
| 12 |
+
InvalidHandshake,
|
| 13 |
+
InvalidHeader,
|
| 14 |
+
InvalidHeaderValue,
|
| 15 |
+
InvalidOrigin,
|
| 16 |
+
InvalidStatus,
|
| 17 |
+
InvalidUpgrade,
|
| 18 |
+
NegotiationError,
|
| 19 |
+
)
|
| 20 |
+
from .extensions import Extension, ServerExtensionFactory
|
| 21 |
+
from .headers import (
|
| 22 |
+
build_extension,
|
| 23 |
+
parse_connection,
|
| 24 |
+
parse_extension,
|
| 25 |
+
parse_subprotocol,
|
| 26 |
+
parse_upgrade,
|
| 27 |
+
)
|
| 28 |
+
from .http11 import Request, Response
|
| 29 |
+
from .protocol import CONNECTING, OPEN, SERVER, Protocol, State
|
| 30 |
+
from .typing import (
|
| 31 |
+
ConnectionOption,
|
| 32 |
+
ExtensionHeader,
|
| 33 |
+
LoggerLike,
|
| 34 |
+
Origin,
|
| 35 |
+
Subprotocol,
|
| 36 |
+
UpgradeProtocol,
|
| 37 |
+
)
|
| 38 |
+
from .utils import accept_key
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
# See #940 for why lazy_import isn't used here for backwards compatibility.
|
| 42 |
+
from .legacy.server import * # isort:skip # noqa: I001
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
__all__ = ["ServerProtocol"]
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class ServerProtocol(Protocol):
|
| 49 |
+
"""
|
| 50 |
+
Sans-I/O implementation of a WebSocket server connection.
|
| 51 |
+
|
| 52 |
+
Args:
|
| 53 |
+
origins: acceptable values of the ``Origin`` header; include
|
| 54 |
+
:obj:`None` in the list if the lack of an origin is acceptable.
|
| 55 |
+
This is useful for defending against Cross-Site WebSocket
|
| 56 |
+
Hijacking attacks.
|
| 57 |
+
extensions: list of supported extensions, in order in which they
|
| 58 |
+
should be tried.
|
| 59 |
+
subprotocols: list of supported subprotocols, in order of decreasing
|
| 60 |
+
preference.
|
| 61 |
+
select_subprotocol: Callback for selecting a subprotocol among
|
| 62 |
+
those supported by the client and the server. It has the same
|
| 63 |
+
signature as the :meth:`select_subprotocol` method, including a
|
| 64 |
+
:class:`ServerProtocol` instance as first argument.
|
| 65 |
+
state: initial state of the WebSocket connection.
|
| 66 |
+
max_size: maximum size of incoming messages in bytes;
|
| 67 |
+
:obj:`None` disables the limit.
|
| 68 |
+
logger: logger for this connection;
|
| 69 |
+
defaults to ``logging.getLogger("websockets.client")``;
|
| 70 |
+
see the :doc:`logging guide <../../topics/logging>` for details.
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
def __init__(
|
| 75 |
+
self,
|
| 76 |
+
*,
|
| 77 |
+
origins: Optional[Sequence[Optional[Origin]]] = None,
|
| 78 |
+
extensions: Optional[Sequence[ServerExtensionFactory]] = None,
|
| 79 |
+
subprotocols: Optional[Sequence[Subprotocol]] = None,
|
| 80 |
+
select_subprotocol: Optional[
|
| 81 |
+
Callable[
|
| 82 |
+
[ServerProtocol, Sequence[Subprotocol]],
|
| 83 |
+
Optional[Subprotocol],
|
| 84 |
+
]
|
| 85 |
+
] = None,
|
| 86 |
+
state: State = CONNECTING,
|
| 87 |
+
max_size: Optional[int] = 2**20,
|
| 88 |
+
logger: Optional[LoggerLike] = None,
|
| 89 |
+
):
|
| 90 |
+
super().__init__(
|
| 91 |
+
side=SERVER,
|
| 92 |
+
state=state,
|
| 93 |
+
max_size=max_size,
|
| 94 |
+
logger=logger,
|
| 95 |
+
)
|
| 96 |
+
self.origins = origins
|
| 97 |
+
self.available_extensions = extensions
|
| 98 |
+
self.available_subprotocols = subprotocols
|
| 99 |
+
if select_subprotocol is not None:
|
| 100 |
+
# Bind select_subprotocol then shadow self.select_subprotocol.
|
| 101 |
+
# Use setattr to work around https://github.com/python/mypy/issues/2427.
|
| 102 |
+
setattr(
|
| 103 |
+
self,
|
| 104 |
+
"select_subprotocol",
|
| 105 |
+
select_subprotocol.__get__(self, self.__class__),
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
def accept(self, request: Request) -> Response:
|
| 109 |
+
"""
|
| 110 |
+
Create a handshake response to accept the connection.
|
| 111 |
+
|
| 112 |
+
If the connection cannot be established, the handshake response
|
| 113 |
+
actually rejects the handshake.
|
| 114 |
+
|
| 115 |
+
You must send the handshake response with :meth:`send_response`.
|
| 116 |
+
|
| 117 |
+
You may modify it before sending it, for example to add HTTP headers.
|
| 118 |
+
|
| 119 |
+
Args:
|
| 120 |
+
request: WebSocket handshake request event received from the client.
|
| 121 |
+
|
| 122 |
+
Returns:
|
| 123 |
+
WebSocket handshake response event to send to the client.
|
| 124 |
+
|
| 125 |
+
"""
|
| 126 |
+
try:
|
| 127 |
+
(
|
| 128 |
+
accept_header,
|
| 129 |
+
extensions_header,
|
| 130 |
+
protocol_header,
|
| 131 |
+
) = self.process_request(request)
|
| 132 |
+
except InvalidOrigin as exc:
|
| 133 |
+
request._exception = exc
|
| 134 |
+
self.handshake_exc = exc
|
| 135 |
+
if self.debug:
|
| 136 |
+
self.logger.debug("! invalid origin", exc_info=True)
|
| 137 |
+
return self.reject(
|
| 138 |
+
http.HTTPStatus.FORBIDDEN,
|
| 139 |
+
f"Failed to open a WebSocket connection: {exc}.\n",
|
| 140 |
+
)
|
| 141 |
+
except InvalidUpgrade as exc:
|
| 142 |
+
request._exception = exc
|
| 143 |
+
self.handshake_exc = exc
|
| 144 |
+
if self.debug:
|
| 145 |
+
self.logger.debug("! invalid upgrade", exc_info=True)
|
| 146 |
+
response = self.reject(
|
| 147 |
+
http.HTTPStatus.UPGRADE_REQUIRED,
|
| 148 |
+
(
|
| 149 |
+
f"Failed to open a WebSocket connection: {exc}.\n"
|
| 150 |
+
f"\n"
|
| 151 |
+
f"You cannot access a WebSocket server directly "
|
| 152 |
+
f"with a browser. You need a WebSocket client.\n"
|
| 153 |
+
),
|
| 154 |
+
)
|
| 155 |
+
response.headers["Upgrade"] = "websocket"
|
| 156 |
+
return response
|
| 157 |
+
except InvalidHandshake as exc:
|
| 158 |
+
request._exception = exc
|
| 159 |
+
self.handshake_exc = exc
|
| 160 |
+
if self.debug:
|
| 161 |
+
self.logger.debug("! invalid handshake", exc_info=True)
|
| 162 |
+
return self.reject(
|
| 163 |
+
http.HTTPStatus.BAD_REQUEST,
|
| 164 |
+
f"Failed to open a WebSocket connection: {exc}.\n",
|
| 165 |
+
)
|
| 166 |
+
except Exception as exc:
|
| 167 |
+
# Handle exceptions raised by user-provided select_subprotocol and
|
| 168 |
+
# unexpected errors.
|
| 169 |
+
request._exception = exc
|
| 170 |
+
self.handshake_exc = exc
|
| 171 |
+
self.logger.error("opening handshake failed", exc_info=True)
|
| 172 |
+
return self.reject(
|
| 173 |
+
http.HTTPStatus.INTERNAL_SERVER_ERROR,
|
| 174 |
+
(
|
| 175 |
+
"Failed to open a WebSocket connection.\n"
|
| 176 |
+
"See server log for more information.\n"
|
| 177 |
+
),
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
headers = Headers()
|
| 181 |
+
|
| 182 |
+
headers["Date"] = email.utils.formatdate(usegmt=True)
|
| 183 |
+
|
| 184 |
+
headers["Upgrade"] = "websocket"
|
| 185 |
+
headers["Connection"] = "Upgrade"
|
| 186 |
+
headers["Sec-WebSocket-Accept"] = accept_header
|
| 187 |
+
|
| 188 |
+
if extensions_header is not None:
|
| 189 |
+
headers["Sec-WebSocket-Extensions"] = extensions_header
|
| 190 |
+
|
| 191 |
+
if protocol_header is not None:
|
| 192 |
+
headers["Sec-WebSocket-Protocol"] = protocol_header
|
| 193 |
+
|
| 194 |
+
self.logger.info("connection open")
|
| 195 |
+
return Response(101, "Switching Protocols", headers)
|
| 196 |
+
|
| 197 |
+
def process_request(
|
| 198 |
+
self,
|
| 199 |
+
request: Request,
|
| 200 |
+
) -> Tuple[str, Optional[str], Optional[str]]:
|
| 201 |
+
"""
|
| 202 |
+
Check a handshake request and negotiate extensions and subprotocol.
|
| 203 |
+
|
| 204 |
+
This function doesn't verify that the request is an HTTP/1.1 or higher
|
| 205 |
+
GET request and doesn't check the ``Host`` header. These controls are
|
| 206 |
+
usually performed earlier in the HTTP request handling code. They're
|
| 207 |
+
the responsibility of the caller.
|
| 208 |
+
|
| 209 |
+
Args:
|
| 210 |
+
request: WebSocket handshake request received from the client.
|
| 211 |
+
|
| 212 |
+
Returns:
|
| 213 |
+
Tuple[str, Optional[str], Optional[str]]:
|
| 214 |
+
``Sec-WebSocket-Accept``, ``Sec-WebSocket-Extensions``, and
|
| 215 |
+
``Sec-WebSocket-Protocol`` headers for the handshake response.
|
| 216 |
+
|
| 217 |
+
Raises:
|
| 218 |
+
InvalidHandshake: if the handshake request is invalid;
|
| 219 |
+
then the server must return 400 Bad Request error.
|
| 220 |
+
|
| 221 |
+
"""
|
| 222 |
+
headers = request.headers
|
| 223 |
+
|
| 224 |
+
connection: List[ConnectionOption] = sum(
|
| 225 |
+
[parse_connection(value) for value in headers.get_all("Connection")], []
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
if not any(value.lower() == "upgrade" for value in connection):
|
| 229 |
+
raise InvalidUpgrade(
|
| 230 |
+
"Connection", ", ".join(connection) if connection else None
|
| 231 |
+
)
|
| 232 |
+
|
| 233 |
+
upgrade: List[UpgradeProtocol] = sum(
|
| 234 |
+
[parse_upgrade(value) for value in headers.get_all("Upgrade")], []
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
# For compatibility with non-strict implementations, ignore case when
|
| 238 |
+
# checking the Upgrade header. The RFC always uses "websocket", except
|
| 239 |
+
# in section 11.2. (IANA registration) where it uses "WebSocket".
|
| 240 |
+
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
|
| 241 |
+
raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None)
|
| 242 |
+
|
| 243 |
+
try:
|
| 244 |
+
key = headers["Sec-WebSocket-Key"]
|
| 245 |
+
except KeyError as exc:
|
| 246 |
+
raise InvalidHeader("Sec-WebSocket-Key") from exc
|
| 247 |
+
except MultipleValuesError as exc:
|
| 248 |
+
raise InvalidHeader(
|
| 249 |
+
"Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
|
| 250 |
+
) from exc
|
| 251 |
+
|
| 252 |
+
try:
|
| 253 |
+
raw_key = base64.b64decode(key.encode(), validate=True)
|
| 254 |
+
except binascii.Error as exc:
|
| 255 |
+
raise InvalidHeaderValue("Sec-WebSocket-Key", key) from exc
|
| 256 |
+
if len(raw_key) != 16:
|
| 257 |
+
raise InvalidHeaderValue("Sec-WebSocket-Key", key)
|
| 258 |
+
|
| 259 |
+
try:
|
| 260 |
+
version = headers["Sec-WebSocket-Version"]
|
| 261 |
+
except KeyError as exc:
|
| 262 |
+
raise InvalidHeader("Sec-WebSocket-Version") from exc
|
| 263 |
+
except MultipleValuesError as exc:
|
| 264 |
+
raise InvalidHeader(
|
| 265 |
+
"Sec-WebSocket-Version",
|
| 266 |
+
"more than one Sec-WebSocket-Version header found",
|
| 267 |
+
) from exc
|
| 268 |
+
|
| 269 |
+
if version != "13":
|
| 270 |
+
raise InvalidHeaderValue("Sec-WebSocket-Version", version)
|
| 271 |
+
|
| 272 |
+
accept_header = accept_key(key)
|
| 273 |
+
|
| 274 |
+
self.origin = self.process_origin(headers)
|
| 275 |
+
|
| 276 |
+
extensions_header, self.extensions = self.process_extensions(headers)
|
| 277 |
+
|
| 278 |
+
protocol_header = self.subprotocol = self.process_subprotocol(headers)
|
| 279 |
+
|
| 280 |
+
return (
|
| 281 |
+
accept_header,
|
| 282 |
+
extensions_header,
|
| 283 |
+
protocol_header,
|
| 284 |
+
)
|
| 285 |
+
|
| 286 |
+
def process_origin(self, headers: Headers) -> Optional[Origin]:
|
| 287 |
+
"""
|
| 288 |
+
Handle the Origin HTTP request header.
|
| 289 |
+
|
| 290 |
+
Args:
|
| 291 |
+
headers: WebSocket handshake request headers.
|
| 292 |
+
|
| 293 |
+
Returns:
|
| 294 |
+
Optional[Origin]: origin, if it is acceptable.
|
| 295 |
+
|
| 296 |
+
Raises:
|
| 297 |
+
InvalidHandshake: if the Origin header is invalid.
|
| 298 |
+
InvalidOrigin: if the origin isn't acceptable.
|
| 299 |
+
|
| 300 |
+
"""
|
| 301 |
+
# "The user agent MUST NOT include more than one Origin header field"
|
| 302 |
+
# per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3.
|
| 303 |
+
try:
|
| 304 |
+
origin = cast(Optional[Origin], headers.get("Origin"))
|
| 305 |
+
except MultipleValuesError as exc:
|
| 306 |
+
raise InvalidHeader("Origin", "more than one Origin header found") from exc
|
| 307 |
+
if self.origins is not None:
|
| 308 |
+
if origin not in self.origins:
|
| 309 |
+
raise InvalidOrigin(origin)
|
| 310 |
+
return origin
|
| 311 |
+
|
| 312 |
+
def process_extensions(
|
| 313 |
+
self,
|
| 314 |
+
headers: Headers,
|
| 315 |
+
) -> Tuple[Optional[str], List[Extension]]:
|
| 316 |
+
"""
|
| 317 |
+
Handle the Sec-WebSocket-Extensions HTTP request header.
|
| 318 |
+
|
| 319 |
+
Accept or reject each extension proposed in the client request.
|
| 320 |
+
Negotiate parameters for accepted extensions.
|
| 321 |
+
|
| 322 |
+
Per :rfc:`6455`, negotiation rules are defined by the specification of
|
| 323 |
+
each extension.
|
| 324 |
+
|
| 325 |
+
To provide this level of flexibility, for each extension proposed by
|
| 326 |
+
the client, we check for a match with each extension available in the
|
| 327 |
+
server configuration. If no match is found, the extension is ignored.
|
| 328 |
+
|
| 329 |
+
If several variants of the same extension are proposed by the client,
|
| 330 |
+
it may be accepted several times, which won't make sense in general.
|
| 331 |
+
Extensions must implement their own requirements. For this purpose,
|
| 332 |
+
the list of previously accepted extensions is provided.
|
| 333 |
+
|
| 334 |
+
This process doesn't allow the server to reorder extensions. It can
|
| 335 |
+
only select a subset of the extensions proposed by the client.
|
| 336 |
+
|
| 337 |
+
Other requirements, for example related to mandatory extensions or the
|
| 338 |
+
order of extensions, may be implemented by overriding this method.
|
| 339 |
+
|
| 340 |
+
Args:
|
| 341 |
+
headers: WebSocket handshake request headers.
|
| 342 |
+
|
| 343 |
+
Returns:
|
| 344 |
+
Tuple[Optional[str], List[Extension]]: ``Sec-WebSocket-Extensions``
|
| 345 |
+
HTTP response header and list of accepted extensions.
|
| 346 |
+
|
| 347 |
+
Raises:
|
| 348 |
+
InvalidHandshake: if the Sec-WebSocket-Extensions header is invalid.
|
| 349 |
+
|
| 350 |
+
"""
|
| 351 |
+
response_header_value: Optional[str] = None
|
| 352 |
+
|
| 353 |
+
extension_headers: List[ExtensionHeader] = []
|
| 354 |
+
accepted_extensions: List[Extension] = []
|
| 355 |
+
|
| 356 |
+
header_values = headers.get_all("Sec-WebSocket-Extensions")
|
| 357 |
+
|
| 358 |
+
if header_values and self.available_extensions:
|
| 359 |
+
parsed_header_values: List[ExtensionHeader] = sum(
|
| 360 |
+
[parse_extension(header_value) for header_value in header_values], []
|
| 361 |
+
)
|
| 362 |
+
|
| 363 |
+
for name, request_params in parsed_header_values:
|
| 364 |
+
for ext_factory in self.available_extensions:
|
| 365 |
+
# Skip non-matching extensions based on their name.
|
| 366 |
+
if ext_factory.name != name:
|
| 367 |
+
continue
|
| 368 |
+
|
| 369 |
+
# Skip non-matching extensions based on their params.
|
| 370 |
+
try:
|
| 371 |
+
response_params, extension = ext_factory.process_request_params(
|
| 372 |
+
request_params, accepted_extensions
|
| 373 |
+
)
|
| 374 |
+
except NegotiationError:
|
| 375 |
+
continue
|
| 376 |
+
|
| 377 |
+
# Add matching extension to the final list.
|
| 378 |
+
extension_headers.append((name, response_params))
|
| 379 |
+
accepted_extensions.append(extension)
|
| 380 |
+
|
| 381 |
+
# Break out of the loop once we have a match.
|
| 382 |
+
break
|
| 383 |
+
|
| 384 |
+
# If we didn't break from the loop, no extension in our list
|
| 385 |
+
# matched what the client sent. The extension is declined.
|
| 386 |
+
|
| 387 |
+
# Serialize extension header.
|
| 388 |
+
if extension_headers:
|
| 389 |
+
response_header_value = build_extension(extension_headers)
|
| 390 |
+
|
| 391 |
+
return response_header_value, accepted_extensions
|
| 392 |
+
|
| 393 |
+
def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]:
|
| 394 |
+
"""
|
| 395 |
+
Handle the Sec-WebSocket-Protocol HTTP request header.
|
| 396 |
+
|
| 397 |
+
Args:
|
| 398 |
+
headers: WebSocket handshake request headers.
|
| 399 |
+
|
| 400 |
+
Returns:
|
| 401 |
+
Optional[Subprotocol]: Subprotocol, if one was selected; this is
|
| 402 |
+
also the value of the ``Sec-WebSocket-Protocol`` response header.
|
| 403 |
+
|
| 404 |
+
Raises:
|
| 405 |
+
InvalidHandshake: if the Sec-WebSocket-Subprotocol header is invalid.
|
| 406 |
+
|
| 407 |
+
"""
|
| 408 |
+
subprotocols: Sequence[Subprotocol] = sum(
|
| 409 |
+
[
|
| 410 |
+
parse_subprotocol(header_value)
|
| 411 |
+
for header_value in headers.get_all("Sec-WebSocket-Protocol")
|
| 412 |
+
],
|
| 413 |
+
[],
|
| 414 |
+
)
|
| 415 |
+
|
| 416 |
+
return self.select_subprotocol(subprotocols)
|
| 417 |
+
|
| 418 |
+
def select_subprotocol(
|
| 419 |
+
self,
|
| 420 |
+
subprotocols: Sequence[Subprotocol],
|
| 421 |
+
) -> Optional[Subprotocol]:
|
| 422 |
+
"""
|
| 423 |
+
Pick a subprotocol among those offered by the client.
|
| 424 |
+
|
| 425 |
+
If several subprotocols are supported by both the client and the server,
|
| 426 |
+
pick the first one in the list declared the server.
|
| 427 |
+
|
| 428 |
+
If the server doesn't support any subprotocols, continue without a
|
| 429 |
+
subprotocol, regardless of what the client offers.
|
| 430 |
+
|
| 431 |
+
If the server supports at least one subprotocol and the client doesn't
|
| 432 |
+
offer any, abort the handshake with an HTTP 400 error.
|
| 433 |
+
|
| 434 |
+
You provide a ``select_subprotocol`` argument to :class:`ServerProtocol`
|
| 435 |
+
to override this logic. For example, you could accept the connection
|
| 436 |
+
even if client doesn't offer a subprotocol, rather than reject it.
|
| 437 |
+
|
| 438 |
+
Here's how to negotiate the ``chat`` subprotocol if the client supports
|
| 439 |
+
it and continue without a subprotocol otherwise::
|
| 440 |
+
|
| 441 |
+
def select_subprotocol(protocol, subprotocols):
|
| 442 |
+
if "chat" in subprotocols:
|
| 443 |
+
return "chat"
|
| 444 |
+
|
| 445 |
+
Args:
|
| 446 |
+
subprotocols: list of subprotocols offered by the client.
|
| 447 |
+
|
| 448 |
+
Returns:
|
| 449 |
+
Optional[Subprotocol]: Selected subprotocol, if a common subprotocol
|
| 450 |
+
was found.
|
| 451 |
+
|
| 452 |
+
:obj:`None` to continue without a subprotocol.
|
| 453 |
+
|
| 454 |
+
Raises:
|
| 455 |
+
NegotiationError: custom implementations may raise this exception
|
| 456 |
+
to abort the handshake with an HTTP 400 error.
|
| 457 |
+
|
| 458 |
+
"""
|
| 459 |
+
# Server doesn't offer any subprotocols.
|
| 460 |
+
if not self.available_subprotocols: # None or empty list
|
| 461 |
+
return None
|
| 462 |
+
|
| 463 |
+
# Server offers at least one subprotocol but client doesn't offer any.
|
| 464 |
+
if not subprotocols:
|
| 465 |
+
raise NegotiationError("missing subprotocol")
|
| 466 |
+
|
| 467 |
+
# Server and client both offer subprotocols. Look for a shared one.
|
| 468 |
+
proposed_subprotocols = set(subprotocols)
|
| 469 |
+
for subprotocol in self.available_subprotocols:
|
| 470 |
+
if subprotocol in proposed_subprotocols:
|
| 471 |
+
return subprotocol
|
| 472 |
+
|
| 473 |
+
# No common subprotocol was found.
|
| 474 |
+
raise NegotiationError(
|
| 475 |
+
"invalid subprotocol; expected one of "
|
| 476 |
+
+ ", ".join(self.available_subprotocols)
|
| 477 |
+
)
|
| 478 |
+
|
| 479 |
+
def reject(
|
| 480 |
+
self,
|
| 481 |
+
status: http.HTTPStatus,
|
| 482 |
+
text: str,
|
| 483 |
+
) -> Response:
|
| 484 |
+
"""
|
| 485 |
+
Create a handshake response to reject the connection.
|
| 486 |
+
|
| 487 |
+
A short plain text response is the best fallback when failing to
|
| 488 |
+
establish a WebSocket connection.
|
| 489 |
+
|
| 490 |
+
You must send the handshake response with :meth:`send_response`.
|
| 491 |
+
|
| 492 |
+
You can modify it before sending it, for example to alter HTTP headers.
|
| 493 |
+
|
| 494 |
+
Args:
|
| 495 |
+
status: HTTP status code.
|
| 496 |
+
text: HTTP response body; will be encoded to UTF-8.
|
| 497 |
+
|
| 498 |
+
Returns:
|
| 499 |
+
Response: WebSocket handshake response event to send to the client.
|
| 500 |
+
|
| 501 |
+
"""
|
| 502 |
+
body = text.encode()
|
| 503 |
+
headers = Headers(
|
| 504 |
+
[
|
| 505 |
+
("Date", email.utils.formatdate(usegmt=True)),
|
| 506 |
+
("Connection", "close"),
|
| 507 |
+
("Content-Length", str(len(body))),
|
| 508 |
+
("Content-Type", "text/plain; charset=utf-8"),
|
| 509 |
+
]
|
| 510 |
+
)
|
| 511 |
+
response = Response(status.value, status.phrase, headers, body)
|
| 512 |
+
# When reject() is called from accept(), handshake_exc is already set.
|
| 513 |
+
# If a user calls reject(), set handshake_exc to guarantee invariant:
|
| 514 |
+
# "handshake_exc is None if and only if opening handshake succeeded."
|
| 515 |
+
if self.handshake_exc is None:
|
| 516 |
+
self.handshake_exc = InvalidStatus(response)
|
| 517 |
+
self.logger.info("connection failed (%d %s)", status.value, status.phrase)
|
| 518 |
+
return response
|
| 519 |
+
|
| 520 |
+
def send_response(self, response: Response) -> None:
|
| 521 |
+
"""
|
| 522 |
+
Send a handshake response to the client.
|
| 523 |
+
|
| 524 |
+
Args:
|
| 525 |
+
response: WebSocket handshake response event to send.
|
| 526 |
+
|
| 527 |
+
"""
|
| 528 |
+
if self.debug:
|
| 529 |
+
code, phrase = response.status_code, response.reason_phrase
|
| 530 |
+
self.logger.debug("> HTTP/1.1 %d %s", code, phrase)
|
| 531 |
+
for key, value in response.headers.raw_items():
|
| 532 |
+
self.logger.debug("> %s: %s", key, value)
|
| 533 |
+
if response.body is not None:
|
| 534 |
+
self.logger.debug("> [body] (%d bytes)", len(response.body))
|
| 535 |
+
|
| 536 |
+
self.writes.append(response.serialize())
|
| 537 |
+
|
| 538 |
+
if response.status_code == 101:
|
| 539 |
+
assert self.state is CONNECTING
|
| 540 |
+
self.state = OPEN
|
| 541 |
+
else:
|
| 542 |
+
self.send_eof()
|
| 543 |
+
self.parser = self.discard()
|
| 544 |
+
next(self.parser) # start coroutine
|
| 545 |
+
|
| 546 |
+
def parse(self) -> Generator[None, None, None]:
|
| 547 |
+
if self.state is CONNECTING:
|
| 548 |
+
try:
|
| 549 |
+
request = yield from Request.parse(
|
| 550 |
+
self.reader.read_line,
|
| 551 |
+
)
|
| 552 |
+
except Exception as exc:
|
| 553 |
+
self.handshake_exc = exc
|
| 554 |
+
self.send_eof()
|
| 555 |
+
self.parser = self.discard()
|
| 556 |
+
next(self.parser) # start coroutine
|
| 557 |
+
yield
|
| 558 |
+
|
| 559 |
+
if self.debug:
|
| 560 |
+
self.logger.debug("< GET %s HTTP/1.1", request.path)
|
| 561 |
+
for key, value in request.headers.raw_items():
|
| 562 |
+
self.logger.debug("< %s: %s", key, value)
|
| 563 |
+
|
| 564 |
+
self.events.append(request)
|
| 565 |
+
|
| 566 |
+
yield from super().parse()
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
class ServerConnection(ServerProtocol):
|
| 570 |
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
| 571 |
+
warnings.warn(
|
| 572 |
+
"ServerConnection was renamed to ServerProtocol",
|
| 573 |
+
DeprecationWarning,
|
| 574 |
+
)
|
| 575 |
+
super().__init__(*args, **kwargs)
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/context/container.h
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <mutex>
|
| 4 |
+
#include <unordered_map>
|
| 5 |
+
|
| 6 |
+
#include <torch/csrc/distributed/autograd/context/context.h>
|
| 7 |
+
|
| 8 |
+
namespace torch {
|
| 9 |
+
namespace distributed {
|
| 10 |
+
namespace autograd {
|
| 11 |
+
|
| 12 |
+
// Singleton class per worker which is responsible for storing the distributed
|
| 13 |
+
// autograd context for each autograd pass and also cleans up data for an
|
| 14 |
+
// autograd pass once its done.
|
| 15 |
+
//
|
| 16 |
+
// Each autograd pass is assigned a unique autograd_context_id and all data for
|
| 17 |
+
// that pass (DistAutogradContext) is stored in this container indexed by the
|
| 18 |
+
// autograd_context_id. The autograd_context_id itself is a 64 bit globally
|
| 19 |
+
// unique id. The first 16 bits is the worker_id and the next 48 bits is an
|
| 20 |
+
// auto-incrementing id for each worker.
|
| 21 |
+
//
|
| 22 |
+
// This container is also responsible for maintaining a globally unique message
|
| 23 |
+
// id, which is used to associate send/recv autograd function pairs. The format
|
| 24 |
+
// is similar to the autograd_context_id where we have a 64 bit integer with
|
| 25 |
+
// first 16 bits being the worker id and next 48 bits are auto-incrementing.
|
| 26 |
+
class TORCH_API DistAutogradContainer {
|
| 27 |
+
public:
|
| 28 |
+
explicit DistAutogradContainer(uint32_t num_shards);
|
| 29 |
+
|
| 30 |
+
// One time initialization of the container.
|
| 31 |
+
static DistAutogradContainer& init(int64_t worker_id);
|
| 32 |
+
|
| 33 |
+
// Retrieve the singleton instance of the container, ensures we have
|
| 34 |
+
// initialized the container.
|
| 35 |
+
static DistAutogradContainer& getInstance();
|
| 36 |
+
|
| 37 |
+
// Create a new context for a distributed autograd pass.
|
| 38 |
+
const ContextPtr newContext();
|
| 39 |
+
|
| 40 |
+
// Clean up resources for a given context_id once the autograd pass is done.
|
| 41 |
+
// Sends RPC to other workers this worker knows about, telling them to clean
|
| 42 |
+
// up their context as well. Throws an exception if the context_id does not
|
| 43 |
+
// exist.
|
| 44 |
+
void releaseContext(int64_t context_id);
|
| 45 |
+
|
| 46 |
+
// Releases an autograd context if it is present on this node. Also sends RPC
|
| 47 |
+
// to other workers this worker knows about, telling them to clean up their
|
| 48 |
+
// context. Does nothing if it is not present.
|
| 49 |
+
void releaseContextIfPresent(int64_t context_id);
|
| 50 |
+
|
| 51 |
+
// Checks if the passed in context_id is valid.
|
| 52 |
+
void isValidContext(int64_t context_id);
|
| 53 |
+
|
| 54 |
+
// Retrieve the autograd context for a given context_id.
|
| 55 |
+
ContextPtr retrieveContext(int64_t context_id);
|
| 56 |
+
|
| 57 |
+
// Retrieves the currently active autograd context for the current thread.
|
| 58 |
+
ContextPtr currentContext();
|
| 59 |
+
|
| 60 |
+
// Checks whether or not the current thread has a valid autograd context.
|
| 61 |
+
bool hasValidContext() const;
|
| 62 |
+
|
| 63 |
+
// Generate a new autograd_message_id for send/recv autograd functions.
|
| 64 |
+
int64_t newAutogradMessageId();
|
| 65 |
+
|
| 66 |
+
// Creates a new autograd context with the provided context_id. If a context
|
| 67 |
+
// already exists with the provided context_id, we just return it.
|
| 68 |
+
// This does not set the current context for the current thread.
|
| 69 |
+
ContextPtr getOrCreateContext(int64_t context_id);
|
| 70 |
+
|
| 71 |
+
// Retrieves the maximum possible autograd_context_id/autograd_message_id that
|
| 72 |
+
// can be generated by this worker.
|
| 73 |
+
int64_t getMaxId();
|
| 74 |
+
|
| 75 |
+
// Retrieves the worker ID for this node
|
| 76 |
+
rpc::worker_id_t getWorkerId() const;
|
| 77 |
+
|
| 78 |
+
// Can set current context id if there is no valid context yet
|
| 79 |
+
static void setCurrentContextId(int64_t contextId);
|
| 80 |
+
|
| 81 |
+
// Forcibly sets the thread local current context id. Should only be used in
|
| 82 |
+
// cases where you know what you're doing and need to override the thread
|
| 83 |
+
// local. Otherwise, use setCurrentContextId instead.
|
| 84 |
+
static void forceCurrentContextId(int64_t contextId);
|
| 85 |
+
|
| 86 |
+
// Clear current context id
|
| 87 |
+
void clearCurrentContext();
|
| 88 |
+
|
| 89 |
+
// Returns the number of autograd contexts in the container.
|
| 90 |
+
size_t numAutogradContexts() const;
|
| 91 |
+
|
| 92 |
+
// Returns the current thread local context id for this thread.
|
| 93 |
+
static int64_t currentContextId();
|
| 94 |
+
|
| 95 |
+
DistAutogradContainer(const DistAutogradContainer&) = delete;
|
| 96 |
+
DistAutogradContainer& operator=(const DistAutogradContainer&) = delete;
|
| 97 |
+
DistAutogradContainer(DistAutogradContainer&&) = delete;
|
| 98 |
+
DistAutogradContainer& operator=(DistAutogradContainer&&) = delete;
|
| 99 |
+
|
| 100 |
+
private:
|
| 101 |
+
// Number of shards for the map storing autograd contexts. We'd like this
|
| 102 |
+
// to be a power of 2 and we don't expect a value much higher than the
|
| 103 |
+
// number of cores would provide much benefit.
|
| 104 |
+
static constexpr uint32_t kNumDefaultShards = 128;
|
| 105 |
+
|
| 106 |
+
// Use cache line size for alignment.
|
| 107 |
+
static constexpr int kCacheLineSize = 64;
|
| 108 |
+
|
| 109 |
+
// Structure holding one shard of the sharded autograd context map with its
|
| 110 |
+
// associated lock. Align to cache line size to avoid contention between
|
| 111 |
+
// adjacent entries.
|
| 112 |
+
struct alignas(kCacheLineSize) ContextsShard {
|
| 113 |
+
// Lock for this shard.
|
| 114 |
+
mutable std::mutex lock;
|
| 115 |
+
|
| 116 |
+
// Map storing autograd contexts for this shard.
|
| 117 |
+
std::unordered_map<int64_t, ContextPtr> contexts;
|
| 118 |
+
};
|
| 119 |
+
|
| 120 |
+
DistAutogradContainer() = delete;
|
| 121 |
+
~DistAutogradContainer() = default;
|
| 122 |
+
|
| 123 |
+
static DistAutogradContainer& getInstanceInternal();
|
| 124 |
+
|
| 125 |
+
// Retrieve the shard for given context_id.
|
| 126 |
+
ContextsShard& getShard(int64_t context_id);
|
| 127 |
+
|
| 128 |
+
// Sends an RPC to the workers that have a context corresponding to passed in
|
| 129 |
+
// context_id. This function should be called with the lock.
|
| 130 |
+
void sendReleaseContextRpc(
|
| 131 |
+
const std::unordered_set<rpc::worker_id_t>& workerIds,
|
| 132 |
+
int64_t context_id);
|
| 133 |
+
|
| 134 |
+
// Erase context_id from the autograd context map, and reset the thread local
|
| 135 |
+
// current context id if it corresponds to the passed in context id. This
|
| 136 |
+
// function should be called with the lock.
|
| 137 |
+
void eraseContextIdAndReset(ContextsShard& shard, int64_t context_id);
|
| 138 |
+
|
| 139 |
+
// Compute the number of shards for the autograd_contexts_ map.
|
| 140 |
+
static uint32_t computeNumShards();
|
| 141 |
+
|
| 142 |
+
// Auto incrementing context id used to identify unique autograd passes.
|
| 143 |
+
// Initialized with the first 16 bits being the worker_id.
|
| 144 |
+
std::atomic<int64_t> next_context_id_;
|
| 145 |
+
|
| 146 |
+
// Unique id to identify a worker in the distributed setting.
|
| 147 |
+
int16_t worker_id_;
|
| 148 |
+
|
| 149 |
+
// Whether or not the container has been initialized appropriately.
|
| 150 |
+
bool initialized_;
|
| 151 |
+
|
| 152 |
+
// Sharded autograd context map.
|
| 153 |
+
std::vector<ContextsShard> autograd_contexts_;
|
| 154 |
+
|
| 155 |
+
// Number of shards for the sharded autograd_contexts_ map.
|
| 156 |
+
uint32_t num_shards_;
|
| 157 |
+
|
| 158 |
+
// Autograd message id to identify unique send/recv autograd function pairs.
|
| 159 |
+
std::atomic<int64_t> next_autograd_message_id_;
|
| 160 |
+
|
| 161 |
+
// Maximum allowed value for autograd_context_id or autograd_message_id.
|
| 162 |
+
int64_t max_id_;
|
| 163 |
+
};
|
| 164 |
+
|
| 165 |
+
} // namespace autograd
|
| 166 |
+
} // namespace distributed
|
| 167 |
+
} // namespace torch
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/context/context.h
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <cstdint>
|
| 4 |
+
#include <functional>
|
| 5 |
+
|
| 6 |
+
#include <ATen/core/Dict.h>
|
| 7 |
+
#include <torch/csrc/autograd/engine.h>
|
| 8 |
+
#include <torch/csrc/distributed/autograd/functions/recvrpc_backward.h>
|
| 9 |
+
#include <torch/csrc/distributed/autograd/functions/sendrpc_backward.h>
|
| 10 |
+
#include <torch/csrc/distributed/rpc/rpc_agent.h>
|
| 11 |
+
|
| 12 |
+
namespace torch {
|
| 13 |
+
namespace distributed {
|
| 14 |
+
namespace autograd {
|
| 15 |
+
|
| 16 |
+
class RecvRpcBackward;
|
| 17 |
+
|
| 18 |
+
// DistAutogradContext which stores information for a single distributed
|
| 19 |
+
// autograd pass on a worker.
|
| 20 |
+
class TORCH_API DistAutogradContext {
|
| 21 |
+
public:
|
| 22 |
+
using GradCallback = std::function<bool(torch::Tensor&)>;
|
| 23 |
+
|
| 24 |
+
explicit DistAutogradContext(int64_t contextId);
|
| 25 |
+
|
| 26 |
+
// Retrieves the autograd context id for this context.
|
| 27 |
+
int64_t contextId() const;
|
| 28 |
+
|
| 29 |
+
// Records a 'send' autograd function for this context with the provided
|
| 30 |
+
// message id.
|
| 31 |
+
void addSendFunction(
|
| 32 |
+
const std::shared_ptr<SendRpcBackward>& func,
|
| 33 |
+
int64_t autograd_message_id);
|
| 34 |
+
|
| 35 |
+
// Records a 'recv' autograd function for this context with the provided
|
| 36 |
+
// message id.
|
| 37 |
+
void addRecvFunction(
|
| 38 |
+
std::shared_ptr<RecvRpcBackward>& func,
|
| 39 |
+
int64_t autograd_message_id);
|
| 40 |
+
|
| 41 |
+
// Given an autograd_message_id, retrieve the appropriate send function.
|
| 42 |
+
std::shared_ptr<SendRpcBackward> retrieveSendFunction(
|
| 43 |
+
int64_t autograd_message_id);
|
| 44 |
+
|
| 45 |
+
// Return all send functions for this context.
|
| 46 |
+
std::unordered_map<int64_t, std::shared_ptr<SendRpcBackward>> sendFunctions()
|
| 47 |
+
const;
|
| 48 |
+
|
| 49 |
+
// Return all recv functions for this context.
|
| 50 |
+
std::unordered_map<int64_t, std::shared_ptr<RecvRpcBackward>> recvFunctions()
|
| 51 |
+
const;
|
| 52 |
+
|
| 53 |
+
// Adds a future message recording an outstanding RPC.
|
| 54 |
+
void addOutstandingRpc(const c10::intrusive_ptr<rpc::JitFuture>& jitFuture);
|
| 55 |
+
|
| 56 |
+
// Returns all gradients.
|
| 57 |
+
const c10::Dict<torch::Tensor, torch::Tensor> getGradients() const;
|
| 58 |
+
|
| 59 |
+
// This function gives a mutable grad reference to the callback.
|
| 60 |
+
// If the callback returns true, it means the grad in the context
|
| 61 |
+
// needs to be updated.
|
| 62 |
+
void runGradCallbackForVariable(
|
| 63 |
+
const torch::autograd::Variable& variable,
|
| 64 |
+
GradCallback&& cb);
|
| 65 |
+
|
| 66 |
+
DistAutogradContext(const DistAutogradContext&) = delete;
|
| 67 |
+
DistAutogradContext& operator=(const DistAutogradContext&) = delete;
|
| 68 |
+
DistAutogradContext(DistAutogradContext&&) = delete;
|
| 69 |
+
DistAutogradContext& operator=(DistAutogradContext&&) = delete;
|
| 70 |
+
|
| 71 |
+
// records the workerID of a node that we sent an RPC to.
|
| 72 |
+
// workerIDs are added here when we attach a send function to this autograd
|
| 73 |
+
// context
|
| 74 |
+
void addKnownWorkerId(const rpc::worker_id_t workerId);
|
| 75 |
+
|
| 76 |
+
// Retrieves a set containing the known workerIds for this context
|
| 77 |
+
// These are the different workers that this context has sent RPCs to.
|
| 78 |
+
std::unordered_set<rpc::worker_id_t> getKnownWorkerIds() const;
|
| 79 |
+
|
| 80 |
+
private:
|
| 81 |
+
friend class BackwardPassCleanupGuard;
|
| 82 |
+
friend class DistEngine;
|
| 83 |
+
friend class RecvRpcBackward;
|
| 84 |
+
friend class DistAccumulateGradCaptureHook;
|
| 85 |
+
|
| 86 |
+
// Record that we would like to accumulate the provided gradient on the given
|
| 87 |
+
// variable.
|
| 88 |
+
void accumulateGrad(
|
| 89 |
+
const torch::autograd::Variable& variable,
|
| 90 |
+
const torch::Tensor& grad,
|
| 91 |
+
size_t num_expected_refs);
|
| 92 |
+
|
| 93 |
+
// Retrieve the GraphTask.
|
| 94 |
+
std::shared_ptr<torch::autograd::GraphTask> retrieveGraphTask();
|
| 95 |
+
|
| 96 |
+
// Set the appropriate graph task for the backward pass. Can be called only
|
| 97 |
+
// once.
|
| 98 |
+
void setGraphTask(std::shared_ptr<torch::autograd::GraphTask> graphTask);
|
| 99 |
+
|
| 100 |
+
// Resets the graph task to ensure we can run another distributed backward
|
| 101 |
+
// pass for the same autograd context.
|
| 102 |
+
void resetGraphTask();
|
| 103 |
+
|
| 104 |
+
// Waits for all outstanding RPCs for this context to finish and clears all
|
| 105 |
+
// outstanding rpcs held in this context. This should be called only once.
|
| 106 |
+
c10::intrusive_ptr<c10::ivalue::Future> clearAndWaitForOutstandingRpcsAsync();
|
| 107 |
+
|
| 108 |
+
void clearOutstandingRpcs();
|
| 109 |
+
|
| 110 |
+
// Record an event to mark the completion of gradient computation. These
|
| 111 |
+
// events will later help to properly synchronize gradients consumptions
|
| 112 |
+
// in getGradients(). We need these events because backward and
|
| 113 |
+
// optimizer.step are separate RPC calls, and will occur on different CUDA
|
| 114 |
+
// streams. Without synchronization, it is possible that gradients are
|
| 115 |
+
// consumed before they are ready.
|
| 116 |
+
void recordGradEvent(c10::Device device);
|
| 117 |
+
|
| 118 |
+
const int64_t contextId_;
|
| 119 |
+
|
| 120 |
+
// Set containing known worker IDs, used in cleaning up autograd context.
|
| 121 |
+
// Whenever a sendRpcBackward is attached to the autograd graph for this
|
| 122 |
+
// context, the destination is added here.
|
| 123 |
+
std::unordered_set<rpc::worker_id_t> knownWorkerIds_;
|
| 124 |
+
|
| 125 |
+
// Map from autograd_message_id to appropriate 'send' autograd function.
|
| 126 |
+
std::unordered_map<int64_t, std::shared_ptr<SendRpcBackward>>
|
| 127 |
+
sendAutogradFunctions_;
|
| 128 |
+
|
| 129 |
+
// Map from autograd_message_id to appropriate 'recv' autograd function.
|
| 130 |
+
std::unordered_map<int64_t, std::shared_ptr<RecvRpcBackward>>
|
| 131 |
+
recvAutogradFunctions_;
|
| 132 |
+
|
| 133 |
+
// Gradients accumulated in this context so far. The key is the variable on
|
| 134 |
+
// which the gradient needs to be accumulated and the value is the gradient
|
| 135 |
+
// that needs to be accumulated on that variable..
|
| 136 |
+
c10::Dict<torch::Tensor, torch::Tensor> accumulatedGrads_;
|
| 137 |
+
|
| 138 |
+
// See comments for recordGradEvent(c10::Device device);
|
| 139 |
+
std::unordered_map<c10::Device, c10::Event> gradReadyEvents_;
|
| 140 |
+
const c10::impl::VirtualGuardImpl impl_;
|
| 141 |
+
|
| 142 |
+
// The autograd GraphTask for the backward pass on this node for this context.
|
| 143 |
+
std::shared_ptr<torch::autograd::GraphTask> graphTask_;
|
| 144 |
+
|
| 145 |
+
// List of futures for RPCs initiated by this node to propagate gradients to
|
| 146 |
+
// other nodes. The distributed autograd engine on this node can return
|
| 147 |
+
// successfully only if all these futures are done and are successful.
|
| 148 |
+
std::vector<c10::intrusive_ptr<rpc::JitFuture>> outStandingRpcs_;
|
| 149 |
+
|
| 150 |
+
// Lock to protect concurrent modification of the context.
|
| 151 |
+
mutable std::mutex lock_;
|
| 152 |
+
};
|
| 153 |
+
|
| 154 |
+
using ContextPtr = std::shared_ptr<DistAutogradContext>;
|
| 155 |
+
|
| 156 |
+
// This class stores a shared_ptr to a DistAutogradContext instance in a
|
| 157 |
+
// thread local variable. The instance is given by the call site. The class
|
| 158 |
+
// doesn't know the current context. It's just a util class.
|
| 159 |
+
class TORCH_API ThreadLocalDistAutogradContext {
|
| 160 |
+
public:
|
| 161 |
+
// Store 'new_context' to the thread local variable maintained by this class.
|
| 162 |
+
explicit ThreadLocalDistAutogradContext(ContextPtr&& new_context);
|
| 163 |
+
~ThreadLocalDistAutogradContext();
|
| 164 |
+
|
| 165 |
+
// Retrieve the stored DistAutogradContext instance.
|
| 166 |
+
static ContextPtr getContextPtr();
|
| 167 |
+
|
| 168 |
+
private:
|
| 169 |
+
ContextPtr prev_context_ptr_;
|
| 170 |
+
};
|
| 171 |
+
|
| 172 |
+
} // namespace autograd
|
| 173 |
+
} // namespace distributed
|
| 174 |
+
} // namespace torch
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/functions/recvrpc_backward.h
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <torch/csrc/autograd/function.h>
|
| 4 |
+
#include <torch/csrc/distributed/autograd/context/context.h>
|
| 5 |
+
#include <torch/csrc/distributed/autograd/rpc_messages/autograd_metadata.h>
|
| 6 |
+
#include <torch/csrc/distributed/rpc/rpc_agent.h>
|
| 7 |
+
|
| 8 |
+
namespace torch {
|
| 9 |
+
namespace distributed {
|
| 10 |
+
namespace autograd {
|
| 11 |
+
|
| 12 |
+
// Forward declarations.
|
| 13 |
+
class DistAutogradContext;
|
| 14 |
+
|
| 15 |
+
// As part of our distributed autograd implementation, whenever we receive an
|
| 16 |
+
// RPC from a node, we add a 'RecvRpcBackward' autograd function to the
|
| 17 |
+
// autograd graph. This is more or less a placeholder function that is used to
|
| 18 |
+
// pass gradients to the remote host during the backward pass. The inputs to the
|
| 19 |
+
// RPC function are the inputs to this autograd function.
|
| 20 |
+
class TORCH_API RecvRpcBackward : public torch::autograd::Node {
|
| 21 |
+
public:
|
| 22 |
+
explicit RecvRpcBackward(
|
| 23 |
+
const AutogradMetadata& autogradMetadata,
|
| 24 |
+
std::shared_ptr<DistAutogradContext> autogradContext,
|
| 25 |
+
rpc::worker_id_t fromWorkerId,
|
| 26 |
+
rpc::DeviceMap deviceMap);
|
| 27 |
+
|
| 28 |
+
torch::autograd::variable_list apply(
|
| 29 |
+
torch::autograd::variable_list&& grads) override;
|
| 30 |
+
|
| 31 |
+
private:
|
| 32 |
+
const AutogradMetadata autogradMetadata_;
|
| 33 |
+
|
| 34 |
+
// Hold a weak reference to the autograd context to avoid circular
|
| 35 |
+
// dependencies with the context (since it holds a reference to
|
| 36 |
+
// RecvRpcBackward).
|
| 37 |
+
std::weak_ptr<DistAutogradContext> autogradContext_;
|
| 38 |
+
|
| 39 |
+
// The worker id from which the RPC was received. During the backward pass,
|
| 40 |
+
// we need to propagate the gradients to this workerId.
|
| 41 |
+
rpc::worker_id_t fromWorkerId_;
|
| 42 |
+
|
| 43 |
+
// Device mapping for tensors sent over RPC.
|
| 44 |
+
const rpc::DeviceMap deviceMap_;
|
| 45 |
+
};
|
| 46 |
+
|
| 47 |
+
} // namespace autograd
|
| 48 |
+
} // namespace distributed
|
| 49 |
+
} // namespace torch
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/functions/sendrpc_backward.h
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <torch/csrc/autograd/function.h>
|
| 4 |
+
|
| 5 |
+
namespace torch {
|
| 6 |
+
namespace distributed {
|
| 7 |
+
namespace autograd {
|
| 8 |
+
|
| 9 |
+
// As part of our distributed autograd implementation, whenever we send an RPC
|
| 10 |
+
// from one node to another, we add a 'SendRpcBackward' autograd function to the
|
| 11 |
+
// autograd graph. This is more or less a placeholder function that is used to
|
| 12 |
+
// kickoff the autograd engine on the current worker on the backward pass. The
|
| 13 |
+
// edges for this autograd function are the inputs to the RPC method.
|
| 14 |
+
//
|
| 15 |
+
// During the backward pass, this function is queued for execution in the
|
| 16 |
+
// autograd engine which eventually runs the rest of the autograd graph.
|
| 17 |
+
struct TORCH_API SendRpcBackward : public torch::autograd::Node {
|
| 18 |
+
public:
|
| 19 |
+
torch::autograd::variable_list apply(
|
| 20 |
+
torch::autograd::variable_list&& inputs) override;
|
| 21 |
+
|
| 22 |
+
// SendRpcBackward is actually the root of an autograd graph on the local
|
| 23 |
+
// node. As a result, it doesn't receive any 'inputs', but rather the RPC
|
| 24 |
+
// framework passes gradients over to this function to kickoff local autograd
|
| 25 |
+
// computation.
|
| 26 |
+
void setGrads(const torch::autograd::variable_list& grads);
|
| 27 |
+
|
| 28 |
+
// Retrieve the grads for the function.
|
| 29 |
+
const torch::autograd::variable_list& getGrads() const;
|
| 30 |
+
|
| 31 |
+
private:
|
| 32 |
+
torch::autograd::variable_list grads_;
|
| 33 |
+
};
|
| 34 |
+
|
| 35 |
+
} // namespace autograd
|
| 36 |
+
} // namespace distributed
|
| 37 |
+
} // namespace torch
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/autograd/rpc_messages/rref_backward_req.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <torch/csrc/distributed/rpc/message.h>
|
| 4 |
+
#include <torch/csrc/distributed/rpc/rpc_command_base.h>
|
| 5 |
+
#include <torch/csrc/distributed/rpc/types.h>
|
| 6 |
+
|
| 7 |
+
namespace torch {
|
| 8 |
+
namespace distributed {
|
| 9 |
+
namespace autograd {
|
| 10 |
+
|
| 11 |
+
// Internal system RPC to invoke distributed backward pass on remote nodes when
|
| 12 |
+
// 'rref.backward()' is invoked.
|
| 13 |
+
class TORCH_API RRefBackwardReq : public rpc::RpcCommandBase {
|
| 14 |
+
public:
|
| 15 |
+
RRefBackwardReq(
|
| 16 |
+
const rpc::RRefId& rrefId,
|
| 17 |
+
int64_t autogradContextId,
|
| 18 |
+
bool retainGraph = false);
|
| 19 |
+
|
| 20 |
+
const rpc::RRefId& getRRefId() const;
|
| 21 |
+
|
| 22 |
+
int64_t getAutogradContextId() const;
|
| 23 |
+
|
| 24 |
+
bool retainGraph() const;
|
| 25 |
+
|
| 26 |
+
// Serialization and deserialization methods.
|
| 27 |
+
c10::intrusive_ptr<rpc::Message> toMessageImpl() && override;
|
| 28 |
+
static std::unique_ptr<RRefBackwardReq> fromMessage(
|
| 29 |
+
const rpc::Message& message);
|
| 30 |
+
|
| 31 |
+
private:
|
| 32 |
+
const rpc::RRefId rrefId_;
|
| 33 |
+
const int64_t autogradContextId_;
|
| 34 |
+
const bool retainGraph_;
|
| 35 |
+
};
|
| 36 |
+
|
| 37 |
+
} // namespace autograd
|
| 38 |
+
} // namespace distributed
|
| 39 |
+
} // namespace torch
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/Backend.hpp
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <memory>
|
| 4 |
+
#include <utility>
|
| 5 |
+
#include <vector>
|
| 6 |
+
|
| 7 |
+
#include <ATen/ATen.h>
|
| 8 |
+
#include <c10/macros/Macros.h>
|
| 9 |
+
|
| 10 |
+
#include <torch/csrc/distributed/c10d/Types.hpp>
|
| 11 |
+
#include <torch/csrc/distributed/c10d/Utils.hpp>
|
| 12 |
+
#include <torch/csrc/distributed/c10d/Work.hpp>
|
| 13 |
+
#include <torch/csrc/distributed/c10d/debug.h>
|
| 14 |
+
|
| 15 |
+
constexpr auto kBackendDefaultTimeout =
|
| 16 |
+
std::chrono::milliseconds(30 * 60 * 1000);
|
| 17 |
+
|
| 18 |
+
namespace c10d {
|
| 19 |
+
|
| 20 |
+
class TORCH_API Backend : public torch::CustomClassHolder {
|
| 21 |
+
public:
|
| 22 |
+
// Backend Options is a base struct that defines the basic options
|
| 23 |
+
// when constructing a Backend. Each Backend subclass should
|
| 24 |
+
// extend this struct and define its options if it wants to provide more
|
| 25 |
+
// config options (beyond basic ones defined here) to end user.
|
| 26 |
+
struct TORCH_API Options : torch::CustomClassHolder {
|
| 27 |
+
explicit Options(
|
| 28 |
+
std::string backend,
|
| 29 |
+
std::chrono::milliseconds timeout = kBackendDefaultTimeout)
|
| 30 |
+
: timeout(timeout), backend(std::move(backend)) {}
|
| 31 |
+
~Options() override = default;
|
| 32 |
+
|
| 33 |
+
std::chrono::milliseconds timeout;
|
| 34 |
+
|
| 35 |
+
// backend name
|
| 36 |
+
// NOLINTNEXTLINE(cppcoreguidelines-avoid-const-or-ref-data-members)
|
| 37 |
+
const std::string backend;
|
| 38 |
+
};
|
| 39 |
+
|
| 40 |
+
explicit Backend(int rank, int size);
|
| 41 |
+
~Backend() override = 0;
|
| 42 |
+
|
| 43 |
+
int getRank() const {
|
| 44 |
+
return rank_;
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
int getSize() const {
|
| 48 |
+
return size_;
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
// Returns an unique opaque ID of this backend that can be used to correlate
|
| 52 |
+
// with its collectives.
|
| 53 |
+
int64_t getID() const {
|
| 54 |
+
return reinterpret_cast<std::intptr_t>(this);
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
virtual bool supportsSplitting() const {
|
| 58 |
+
return false;
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
virtual void startCoalescing() {
|
| 62 |
+
TORCH_CHECK(
|
| 63 |
+
false,
|
| 64 |
+
c10::str(
|
| 65 |
+
"Backend ",
|
| 66 |
+
getBackendName(),
|
| 67 |
+
" does not implement startCoalescing"));
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
virtual c10::intrusive_ptr<Work> endCoalescing() {
|
| 71 |
+
TORCH_CHECK(
|
| 72 |
+
false,
|
| 73 |
+
c10::str(
|
| 74 |
+
"Backend ", getBackendName(), " does not implement endCoalescing"));
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
// Subclasses must override this method to return the backend name
|
| 78 |
+
virtual const std::string getBackendName() const {
|
| 79 |
+
TORCH_INTERNAL_ASSERT(false, "getBackendName is not implemented.");
|
| 80 |
+
};
|
| 81 |
+
|
| 82 |
+
virtual c10::intrusive_ptr<Work> broadcast(
|
| 83 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 84 |
+
const BroadcastOptions& /* opts */ = BroadcastOptions()) {
|
| 85 |
+
TORCH_CHECK(
|
| 86 |
+
false,
|
| 87 |
+
c10::str("Backend ", getBackendName(), " does not support broadcast"));
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
virtual c10::intrusive_ptr<Work> allreduce(
|
| 91 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 92 |
+
const AllreduceOptions& /* opts */ = AllreduceOptions()) {
|
| 93 |
+
TORCH_CHECK(
|
| 94 |
+
false,
|
| 95 |
+
c10::str("Backend ", getBackendName(), " does not support allreduce"));
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
virtual c10::intrusive_ptr<Work> allreduce_sparse(
|
| 99 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 100 |
+
const AllreduceOptions& /* opts */ = AllreduceOptions()) {
|
| 101 |
+
TORCH_CHECK(
|
| 102 |
+
false,
|
| 103 |
+
c10::str(
|
| 104 |
+
"Backend ",
|
| 105 |
+
getBackendName(),
|
| 106 |
+
" does not support allreduce sparse"));
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
virtual c10::intrusive_ptr<Work> allreduce_coalesced(
|
| 110 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 111 |
+
const AllreduceCoalescedOptions& /* opts */ =
|
| 112 |
+
AllreduceCoalescedOptions()) {
|
| 113 |
+
TORCH_CHECK(
|
| 114 |
+
false,
|
| 115 |
+
c10::str(
|
| 116 |
+
"Backend ",
|
| 117 |
+
getBackendName(),
|
| 118 |
+
" does not support allreduce_coalesced"));
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
virtual c10::intrusive_ptr<Work> reduce(
|
| 122 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 123 |
+
const ReduceOptions& /* opts */ = ReduceOptions()) {
|
| 124 |
+
TORCH_CHECK(
|
| 125 |
+
false,
|
| 126 |
+
c10::str("Backend ", getBackendName(), " does not support reduce"));
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
virtual c10::intrusive_ptr<Work> allgather(
|
| 130 |
+
std::vector<std::vector<at::Tensor>>& /* outputTensors */,
|
| 131 |
+
std::vector<at::Tensor>& /* inputTensors */,
|
| 132 |
+
const AllgatherOptions& /* opts */ = AllgatherOptions()) {
|
| 133 |
+
TORCH_CHECK(
|
| 134 |
+
false,
|
| 135 |
+
c10::str("Backend ", getBackendName(), " does not support allgather"));
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
// Gathers a single tensor inputBuffer into a single buffer outputBuffer that
|
| 139 |
+
// is interpreted as a contiguous collection of size inputBuffer * WORLD_SIZE.
|
| 140 |
+
// For implementers of ProcessGroup API and advanced users only.
|
| 141 |
+
// Note: this function will be deprecated in near future.
|
| 142 |
+
virtual c10::intrusive_ptr<Work> _allgather_base(
|
| 143 |
+
at::Tensor& /* outputBuffer */,
|
| 144 |
+
at::Tensor& /* inputBuffer */,
|
| 145 |
+
const AllgatherOptions& /* opts */ = AllgatherOptions()) {
|
| 146 |
+
TORCH_CHECK(
|
| 147 |
+
false,
|
| 148 |
+
c10::str(
|
| 149 |
+
"Backend ", getBackendName(), " does not support _allgather_base"));
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
// This function is deprecated and will be moved out of Backend to comms:
|
| 153 |
+
// * do not add dependencies on this function,
|
| 154 |
+
// * do not implement it in your Backend, implement _allgather_base
|
| 155 |
+
// instead.
|
| 156 |
+
virtual c10::intrusive_ptr<Work> allgather_coalesced(
|
| 157 |
+
std::vector<std::vector<at::Tensor>>& /* outputTensorLists */,
|
| 158 |
+
std::vector<at::Tensor>& /* inputTensors */,
|
| 159 |
+
const AllgatherOptions& /* opts */ = AllgatherOptions()) {
|
| 160 |
+
TORCH_CHECK(
|
| 161 |
+
false,
|
| 162 |
+
c10::str(
|
| 163 |
+
"Backend ",
|
| 164 |
+
getBackendName(),
|
| 165 |
+
" does not support allgather_coalesced"));
|
| 166 |
+
}
|
| 167 |
+
|
| 168 |
+
// This function is a coalesced version of `allgather_into_tensor` (currently
|
| 169 |
+
// still named as `_allgather_base`). Each tensor in the vector corresponds to
|
| 170 |
+
// an input/output of one `allgather_into_tensor` operation.
|
| 171 |
+
virtual c10::intrusive_ptr<Work> allgather_into_tensor_coalesced(
|
| 172 |
+
std::vector<at::Tensor>& /* outputs */,
|
| 173 |
+
std::vector<at::Tensor>& /* inputs */,
|
| 174 |
+
const AllgatherOptions& /* opts */ = AllgatherOptions()) {
|
| 175 |
+
TORCH_CHECK(
|
| 176 |
+
false,
|
| 177 |
+
c10::str(
|
| 178 |
+
"Backend ",
|
| 179 |
+
getBackendName(),
|
| 180 |
+
" does not support allgather_into_tensor_coalesced"));
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
virtual c10::intrusive_ptr<Work> gather(
|
| 184 |
+
std::vector<std::vector<at::Tensor>>& /* outputTensors */,
|
| 185 |
+
std::vector<at::Tensor>& /* inputTensors */,
|
| 186 |
+
const GatherOptions& /* opts */ = GatherOptions()) {
|
| 187 |
+
TORCH_CHECK(
|
| 188 |
+
false,
|
| 189 |
+
c10::str("Backend ", getBackendName(), " does not support gather"));
|
| 190 |
+
}
|
| 191 |
+
|
| 192 |
+
virtual c10::intrusive_ptr<Work> scatter(
|
| 193 |
+
std::vector<at::Tensor>& /* outputTensors */,
|
| 194 |
+
std::vector<std::vector<at::Tensor>>& /* inputTensors */,
|
| 195 |
+
const ScatterOptions& /* opts */ = ScatterOptions()) {
|
| 196 |
+
TORCH_CHECK(
|
| 197 |
+
false,
|
| 198 |
+
c10::str("Backend ", getBackendName(), " does not support scatter"));
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
virtual c10::intrusive_ptr<Work> reduce_scatter(
|
| 202 |
+
std::vector<at::Tensor>& /* outputTensors */,
|
| 203 |
+
std::vector<std::vector<at::Tensor>>& /* inputTensors */,
|
| 204 |
+
const ReduceScatterOptions& /* opts */ = ReduceScatterOptions()) {
|
| 205 |
+
TORCH_CHECK(
|
| 206 |
+
false,
|
| 207 |
+
c10::str(
|
| 208 |
+
"Backend ", getBackendName(), " does not support reduce_scatter"));
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
virtual c10::intrusive_ptr<Work> _reduce_scatter_base(
|
| 212 |
+
at::Tensor& /* outputBuffer */,
|
| 213 |
+
at::Tensor& /* inputBuffer */,
|
| 214 |
+
const ReduceScatterOptions& /* opts */ = ReduceScatterOptions()) {
|
| 215 |
+
TORCH_CHECK(
|
| 216 |
+
false,
|
| 217 |
+
c10::str(
|
| 218 |
+
"Backend ",
|
| 219 |
+
getBackendName(),
|
| 220 |
+
" does not support _reduce_scatter_base"));
|
| 221 |
+
}
|
| 222 |
+
|
| 223 |
+
// This function is a coalesced version of `reduce_scatter_tensor` (currently
|
| 224 |
+
// still named as `_reduce_scatter_base`). Each tensor in the vector
|
| 225 |
+
// corresponds to an input/output of one `reduce_scatter_tensor` operation.
|
| 226 |
+
virtual c10::intrusive_ptr<Work> reduce_scatter_tensor_coalesced(
|
| 227 |
+
std::vector<at::Tensor>& /* outputs */,
|
| 228 |
+
std::vector<at::Tensor>& /* inputs */,
|
| 229 |
+
const ReduceScatterOptions& /* opts */ = ReduceScatterOptions()) {
|
| 230 |
+
TORCH_CHECK(
|
| 231 |
+
false,
|
| 232 |
+
c10::str(
|
| 233 |
+
"Backend ",
|
| 234 |
+
getBackendName(),
|
| 235 |
+
" does not support reduce_scatter_tensor_coalesced"));
|
| 236 |
+
}
|
| 237 |
+
|
| 238 |
+
virtual c10::intrusive_ptr<Work> alltoall_base(
|
| 239 |
+
at::Tensor& /* outputBuffer */,
|
| 240 |
+
at::Tensor& /* inputBuffer */,
|
| 241 |
+
std::vector<int64_t>& /* outputSplitSizes */,
|
| 242 |
+
std::vector<int64_t>& /* inputSplitSizes */,
|
| 243 |
+
const AllToAllOptions& /* opts */ = AllToAllOptions()) {
|
| 244 |
+
TORCH_CHECK(
|
| 245 |
+
false,
|
| 246 |
+
c10::str(
|
| 247 |
+
"Backend ", getBackendName(), " does not support alltoall_base"));
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
virtual c10::intrusive_ptr<Work> alltoall(
|
| 251 |
+
std::vector<at::Tensor>& /* outputTensors */,
|
| 252 |
+
std::vector<at::Tensor>& /* inputTensors */,
|
| 253 |
+
const AllToAllOptions& opts = AllToAllOptions()) {
|
| 254 |
+
TORCH_CHECK(
|
| 255 |
+
false,
|
| 256 |
+
c10::str("Backend ", getBackendName(), " does not support alltoall"));
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
virtual void monitoredBarrier(
|
| 260 |
+
const BarrierOptions& /* unused */,
|
| 261 |
+
bool /* unused */ = false) {
|
| 262 |
+
auto backendName = getBackendName();
|
| 263 |
+
TORCH_CHECK(
|
| 264 |
+
false,
|
| 265 |
+
c10::str(
|
| 266 |
+
"Backend ",
|
| 267 |
+
backendName,
|
| 268 |
+
" does not support monitoredBarrier, only GLOO supports monitored barrier."));
|
| 269 |
+
}
|
| 270 |
+
|
| 271 |
+
// Agrees on an initial sequence number for the whole group by having rank 0
|
| 272 |
+
// create it and broadcast it to other ranks using the store. Only implemented
|
| 273 |
+
// for GLOO and NCCL backends currently.
|
| 274 |
+
virtual void setSequenceNumberForGroup() {
|
| 275 |
+
auto backendName = getBackendName();
|
| 276 |
+
TORCH_CHECK(
|
| 277 |
+
false,
|
| 278 |
+
c10::str(
|
| 279 |
+
"Backend ",
|
| 280 |
+
backendName,
|
| 281 |
+
" does not yet support sequence numbers."));
|
| 282 |
+
}
|
| 283 |
+
|
| 284 |
+
// Retrieves the current sequence number for the whole group, which should be
|
| 285 |
+
// in sync. If the returned number is not consistent across the group, it
|
| 286 |
+
// may indicate that there is some sort of collective desynchronization.
|
| 287 |
+
virtual uint64_t getSequenceNumberForGroup() {
|
| 288 |
+
auto backendName = getBackendName();
|
| 289 |
+
TORCH_CHECK(
|
| 290 |
+
false,
|
| 291 |
+
c10::str(
|
| 292 |
+
"Backend ",
|
| 293 |
+
backendName,
|
| 294 |
+
" does not yet support sequence numbers."));
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
virtual c10::intrusive_ptr<Work> send(
|
| 298 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 299 |
+
int /* dstRank */,
|
| 300 |
+
int /* tag */) {
|
| 301 |
+
TORCH_CHECK(
|
| 302 |
+
false,
|
| 303 |
+
c10::str("Backend ", getBackendName(), " does not support send"));
|
| 304 |
+
}
|
| 305 |
+
|
| 306 |
+
virtual c10::intrusive_ptr<Work> recv(
|
| 307 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 308 |
+
int /* srcRank */,
|
| 309 |
+
int /* tag */) {
|
| 310 |
+
TORCH_CHECK(
|
| 311 |
+
false,
|
| 312 |
+
c10::str("Backend ", getBackendName(), " does not support recv"));
|
| 313 |
+
}
|
| 314 |
+
|
| 315 |
+
virtual c10::intrusive_ptr<Work> recvAnysource(
|
| 316 |
+
std::vector<at::Tensor>& /* tensors */,
|
| 317 |
+
int /* tag */) {
|
| 318 |
+
TORCH_CHECK(
|
| 319 |
+
false,
|
| 320 |
+
c10::str(
|
| 321 |
+
"Backend ", getBackendName(), " does not support recvAnysource"));
|
| 322 |
+
}
|
| 323 |
+
|
| 324 |
+
virtual c10::intrusive_ptr<Work> barrier(
|
| 325 |
+
const BarrierOptions& /* opts */ = BarrierOptions()) {
|
| 326 |
+
TORCH_CHECK(
|
| 327 |
+
false,
|
| 328 |
+
c10::str("Backend ", getBackendName(), " does not support barrier"));
|
| 329 |
+
}
|
| 330 |
+
|
| 331 |
+
virtual void registerOnCompletionHook(
|
| 332 |
+
std::function<void(std::shared_ptr<WorkInfo>)>&& hook) {
|
| 333 |
+
TORCH_CHECK(
|
| 334 |
+
false,
|
| 335 |
+
"Only ProcessGrouppNCCL supports onCompletion hook, but got ",
|
| 336 |
+
getBackendName(),
|
| 337 |
+
" backend.");
|
| 338 |
+
}
|
| 339 |
+
|
| 340 |
+
virtual void waitForPendingWorks() {
|
| 341 |
+
TORCH_CHECK(
|
| 342 |
+
false,
|
| 343 |
+
"Only ProcessGrouppNCCL supports waitForPendingWorks, but got ",
|
| 344 |
+
getBackendName(),
|
| 345 |
+
" backend.");
|
| 346 |
+
}
|
| 347 |
+
|
| 348 |
+
virtual void enableCollectivesTiming() {
|
| 349 |
+
TORCH_CHECK(
|
| 350 |
+
false,
|
| 351 |
+
"Backend ",
|
| 352 |
+
getBackendName(),
|
| 353 |
+
" is missing implementation of enableCollectivesTiming.");
|
| 354 |
+
}
|
| 355 |
+
|
| 356 |
+
bool hasHooks() const {
|
| 357 |
+
return onCompletionHook_ != nullptr;
|
| 358 |
+
}
|
| 359 |
+
|
| 360 |
+
// Do not call this directly, use ProcessGroup::setGroupName instead.
|
| 361 |
+
void setGroupUid(const std::string& pg_uid) {
|
| 362 |
+
pg_uid_ = pg_uid;
|
| 363 |
+
}
|
| 364 |
+
|
| 365 |
+
const std::string& getGroupUid() const {
|
| 366 |
+
return pg_uid_;
|
| 367 |
+
}
|
| 368 |
+
|
| 369 |
+
void setGroupDesc(const std::string& desc) {
|
| 370 |
+
pg_desc_ = desc;
|
| 371 |
+
}
|
| 372 |
+
|
| 373 |
+
const std::string& getGroupDesc() const {
|
| 374 |
+
return pg_desc_;
|
| 375 |
+
}
|
| 376 |
+
|
| 377 |
+
// See similar functions in ProcessGroup.hpp for context.
|
| 378 |
+
std::optional<at::Device> getBoundDeviceId() const {
|
| 379 |
+
return bound_device_id_;
|
| 380 |
+
}
|
| 381 |
+
|
| 382 |
+
// Perform an eager connect to the specified device if the backend supports
|
| 383 |
+
// it.
|
| 384 |
+
virtual void eagerConnectSingleDevice(at::Device device) {
|
| 385 |
+
// no-op in the default case; this is an optimization some
|
| 386 |
+
// backends may perform
|
| 387 |
+
}
|
| 388 |
+
|
| 389 |
+
void setBoundDeviceId(std::optional<at::Device> device) {
|
| 390 |
+
if (device) {
|
| 391 |
+
TORCH_CHECK(device->has_index(), "setBoundDeviceId must have an index");
|
| 392 |
+
}
|
| 393 |
+
bound_device_id_ = device;
|
| 394 |
+
}
|
| 395 |
+
|
| 396 |
+
protected:
|
| 397 |
+
// Implementations of this interface need to call this to setup
|
| 398 |
+
// appropriate logging etc.
|
| 399 |
+
void init();
|
| 400 |
+
|
| 401 |
+
// NOLINTNEXTLINE(cppcoreguidelines-avoid-const-or-ref-data-members)
|
| 402 |
+
const int rank_;
|
| 403 |
+
// NOLINTNEXTLINE(cppcoreguidelines-avoid-const-or-ref-data-members)
|
| 404 |
+
const int size_;
|
| 405 |
+
// Debug level setting. It is parsed once when ProcessGroup is constructed and
|
| 406 |
+
// remains the same across use of this process group.
|
| 407 |
+
DebugLevel dist_debug_level_;
|
| 408 |
+
std::string pg_uid_;
|
| 409 |
+
std::string pg_desc_;
|
| 410 |
+
|
| 411 |
+
std::function<void(std::shared_ptr<WorkInfo>)> onCompletionHook_;
|
| 412 |
+
|
| 413 |
+
std::optional<at::Device> bound_device_id_;
|
| 414 |
+
};
|
| 415 |
+
|
| 416 |
+
} // namespace c10d
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/CUDASymmetricMemory.hpp
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <ATen/ATen.h>
|
| 4 |
+
#include <torch/csrc/distributed/c10d/Store.hpp>
|
| 5 |
+
#include <torch/csrc/distributed/c10d/SymmetricMemory.hpp>
|
| 6 |
+
|
| 7 |
+
namespace c10d {
|
| 8 |
+
namespace symmetric_memory {
|
| 9 |
+
|
| 10 |
+
#if !defined(USE_ROCM) && defined(PYTORCH_C10_DRIVER_API_SUPPORTED)
|
| 11 |
+
using HandleType = CUmemGenericAllocationHandle;
|
| 12 |
+
#else
|
| 13 |
+
using HandleType = void*;
|
| 14 |
+
#endif
|
| 15 |
+
|
| 16 |
+
class CUDASymmetricMemory : public SymmetricMemory {
|
| 17 |
+
public:
|
| 18 |
+
CUDASymmetricMemory(
|
| 19 |
+
std::vector<HandleType> handles,
|
| 20 |
+
size_t block_size,
|
| 21 |
+
std::vector<void*> buffers,
|
| 22 |
+
std::vector<void*> signal_pads,
|
| 23 |
+
HandleType mc_handle,
|
| 24 |
+
void* mc_addr,
|
| 25 |
+
size_t buffer_size,
|
| 26 |
+
int local_device_idx,
|
| 27 |
+
int rank,
|
| 28 |
+
int world_size);
|
| 29 |
+
|
| 30 |
+
~CUDASymmetricMemory() override;
|
| 31 |
+
|
| 32 |
+
std::vector<void*> get_buffer_ptrs() override;
|
| 33 |
+
std::vector<void*> get_signal_pad_ptrs() override;
|
| 34 |
+
void** get_buffer_ptrs_dev() override;
|
| 35 |
+
void** get_signal_pad_ptrs_dev() override;
|
| 36 |
+
size_t get_buffer_size() override;
|
| 37 |
+
size_t get_signal_pad_size() override;
|
| 38 |
+
|
| 39 |
+
bool has_multicast_support() override;
|
| 40 |
+
void* get_multicast_ptr() override;
|
| 41 |
+
|
| 42 |
+
at::Tensor get_buffer(
|
| 43 |
+
int rank,
|
| 44 |
+
c10::IntArrayRef sizes,
|
| 45 |
+
c10::ScalarType dtype,
|
| 46 |
+
int64_t storage_offset) override;
|
| 47 |
+
|
| 48 |
+
void barrier(int channel) override;
|
| 49 |
+
void put_signal(int dst_rank, int channel) override;
|
| 50 |
+
void wait_signal(int src_rank, int channel) override;
|
| 51 |
+
|
| 52 |
+
int get_rank() override;
|
| 53 |
+
int get_world_size() override;
|
| 54 |
+
|
| 55 |
+
private:
|
| 56 |
+
std::vector<HandleType> handles_;
|
| 57 |
+
size_t block_size_;
|
| 58 |
+
std::vector<void*> buffers_;
|
| 59 |
+
std::vector<void*> signal_pads_;
|
| 60 |
+
HandleType mc_handle_;
|
| 61 |
+
void* mc_addr_;
|
| 62 |
+
size_t buffer_size_;
|
| 63 |
+
int local_device_idx_;
|
| 64 |
+
int rank_;
|
| 65 |
+
int world_size_;
|
| 66 |
+
void** buffers_dev_;
|
| 67 |
+
void** signal_pads_dev_;
|
| 68 |
+
std::optional<std::function<void(void)>> finalizer_;
|
| 69 |
+
};
|
| 70 |
+
|
| 71 |
+
struct Block : public c10::intrusive_ptr_target {
|
| 72 |
+
HandleType handle;
|
| 73 |
+
int device_idx;
|
| 74 |
+
size_t block_size;
|
| 75 |
+
size_t buffer_size;
|
| 76 |
+
size_t signal_pad_offset;
|
| 77 |
+
std::string group_name;
|
| 78 |
+
c10::intrusive_ptr<CUDASymmetricMemory> symm_mem = nullptr;
|
| 79 |
+
|
| 80 |
+
Block(
|
| 81 |
+
HandleType handle,
|
| 82 |
+
int device_idx,
|
| 83 |
+
size_t block_size,
|
| 84 |
+
size_t buffer_size,
|
| 85 |
+
size_t signal_pad_offset,
|
| 86 |
+
const std::string& group_name)
|
| 87 |
+
: handle(handle),
|
| 88 |
+
device_idx(device_idx),
|
| 89 |
+
block_size(block_size),
|
| 90 |
+
buffer_size(buffer_size),
|
| 91 |
+
signal_pad_offset(signal_pad_offset),
|
| 92 |
+
group_name(group_name),
|
| 93 |
+
symm_mem(nullptr) {}
|
| 94 |
+
};
|
| 95 |
+
|
| 96 |
+
class CUDASymmetricMemoryAllocator : public SymmetricMemoryAllocator {
|
| 97 |
+
public:
|
| 98 |
+
void* alloc(size_t size, int device_idx, const std::string& group_name)
|
| 99 |
+
override;
|
| 100 |
+
|
| 101 |
+
void free(void* ptr) override;
|
| 102 |
+
size_t get_alloc_size(void* ptr) override;
|
| 103 |
+
c10::intrusive_ptr<SymmetricMemory> rendezvous(void* ptr) override;
|
| 104 |
+
bool is_rendezvous_completed(void* ptr) override;
|
| 105 |
+
bool has_multicast_support(int device_idx) override;
|
| 106 |
+
|
| 107 |
+
private:
|
| 108 |
+
c10::intrusive_ptr<Block> find_block(void* ptr);
|
| 109 |
+
|
| 110 |
+
std::shared_mutex mutex_;
|
| 111 |
+
std::unordered_map<void*, c10::intrusive_ptr<Block>> ptr_to_block_;
|
| 112 |
+
};
|
| 113 |
+
|
| 114 |
+
} // namespace symmetric_memory
|
| 115 |
+
} // namespace c10d
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/GroupRegistry.hpp
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#include <torch/csrc/distributed/c10d/ProcessGroup.hpp>
|
| 4 |
+
|
| 5 |
+
namespace c10d {
|
| 6 |
+
|
| 7 |
+
C10_EXPORT void set_thread_isolation_mode(bool enable);
|
| 8 |
+
|
| 9 |
+
bool get_thread_isolation_mode();
|
| 10 |
+
|
| 11 |
+
C10_EXPORT void register_process_group(
|
| 12 |
+
const std::string& group_name,
|
| 13 |
+
c10::intrusive_ptr<c10d::ProcessGroup> group);
|
| 14 |
+
|
| 15 |
+
C10_EXPORT c10::intrusive_ptr<c10d::ProcessGroup> resolve_process_group(
|
| 16 |
+
const std::string& group_name);
|
| 17 |
+
|
| 18 |
+
C10_EXPORT void unregister_process_group(const std::string& group_name);
|
| 19 |
+
|
| 20 |
+
C10_EXPORT void unregister_all_process_groups();
|
| 21 |
+
|
| 22 |
+
} // namespace c10d
|
vllm/lib/python3.10/site-packages/torch/include/torch/csrc/distributed/c10d/NanCheck.hpp
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
#ifdef USE_C10D_NCCL
|
| 4 |
+
|
| 5 |
+
#include <ATen/ATen.h>
|
| 6 |
+
#include <c10/cuda/CUDAStream.h>
|
| 7 |
+
|
| 8 |
+
namespace c10d {
|
| 9 |
+
|
| 10 |
+
// Check for NaNs in a tensor on a given stream. If any are found, throw a
|
| 11 |
+
// device-side error.
|
| 12 |
+
void checkForNan(const at::Tensor& tensor, at::cuda::CUDAStream& stream);
|
| 13 |
+
|
| 14 |
+
} // namespace c10d
|
| 15 |
+
|
| 16 |
+
#endif // USE_C10D_NCCL
|