Datasets:
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/Lib/site-packages/setuptools-68.2.0.dist-info/INSTALLER +1 -0
- .venv/Lib/site-packages/setuptools-68.2.0.dist-info/LICENSE +17 -0
- .venv/Lib/site-packages/setuptools-68.2.0.dist-info/METADATA +133 -0
- .venv/Lib/site-packages/setuptools-68.2.0.dist-info/RECORD +484 -0
- .venv/Lib/site-packages/setuptools-68.2.0.dist-info/WHEEL +5 -0
- .venv/Lib/site-packages/setuptools-68.2.0.dist-info/entry_points.txt +56 -0
- .venv/Lib/site-packages/setuptools-68.2.0.dist-info/top_level.txt +3 -0
- .venv/Lib/site-packages/setuptools/_distutils/__init__.py +14 -0
- .venv/Lib/site-packages/setuptools/_distutils/_collections.py +194 -0
- .venv/Lib/site-packages/setuptools/_distutils/_functools.py +20 -0
- .venv/Lib/site-packages/setuptools/_distutils/_log.py +4 -0
- .venv/Lib/site-packages/setuptools/_distutils/_macos_compat.py +12 -0
- .venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py +568 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/__init__.py +25 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/_framework_compat.py +55 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/bdist.py +156 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py +143 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py +614 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/build.py +152 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py +207 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py +788 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/build_py.py +406 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py +31 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/register.py +320 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/sdist.py +530 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/upload.py +206 -0
- .venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py +175 -0
- .venv/Lib/site-packages/setuptools/_vendor/__init__.py +0 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__init__.py +904 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_adapters.py +90 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_collections.py +30 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_compat.py +72 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_functools.py +104 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_itertools.py +73 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_meta.py +49 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_py39compat.py +35 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_text.py +99 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__init__.py +36 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_adapters.py +170 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_common.py +207 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_compat.py +108 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_itertools.py +35 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_legacy.py +120 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/abc.py +170 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/readers.py +120 -0
- .venv/Lib/site-packages/setuptools/_vendor/importlib_resources/simple.py +106 -0
- .venv/Lib/site-packages/setuptools/_vendor/jaraco/__init__.py +0 -0
- .venv/Lib/site-packages/setuptools/_vendor/jaraco/context.py +288 -0
- .venv/Lib/site-packages/setuptools/_vendor/jaraco/functools.py +556 -0
- .venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__init__.py +599 -0
.venv/Lib/site-packages/setuptools-68.2.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/Lib/site-packages/setuptools-68.2.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 2 |
+
of this software and associated documentation files (the "Software"), to
|
| 3 |
+
deal in the Software without restriction, including without limitation the
|
| 4 |
+
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
| 5 |
+
sell copies of the Software, and to permit persons to whom the Software is
|
| 6 |
+
furnished to do so, subject to the following conditions:
|
| 7 |
+
|
| 8 |
+
The above copyright notice and this permission notice shall be included in
|
| 9 |
+
all copies or substantial portions of the Software.
|
| 10 |
+
|
| 11 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 12 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 13 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 14 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 15 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
| 16 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
| 17 |
+
IN THE SOFTWARE.
|
.venv/Lib/site-packages/setuptools-68.2.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: setuptools
|
| 3 |
+
Version: 68.2.0
|
| 4 |
+
Summary: Easily download, build, install, upgrade, and uninstall Python packages
|
| 5 |
+
Home-page: https://github.com/pypa/setuptools
|
| 6 |
+
Author: Python Packaging Authority
|
| 7 |
+
Author-email: distutils-sig@python.org
|
| 8 |
+
Project-URL: Documentation, https://setuptools.pypa.io/
|
| 9 |
+
Project-URL: Changelog, https://setuptools.pypa.io/en/stable/history.html
|
| 10 |
+
Keywords: CPAN PyPI distutils eggs package management
|
| 11 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 12 |
+
Classifier: Intended Audience :: Developers
|
| 13 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 14 |
+
Classifier: Programming Language :: Python :: 3
|
| 15 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 16 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 17 |
+
Classifier: Topic :: System :: Archiving :: Packaging
|
| 18 |
+
Classifier: Topic :: System :: Systems Administration
|
| 19 |
+
Classifier: Topic :: Utilities
|
| 20 |
+
Requires-Python: >=3.8
|
| 21 |
+
License-File: LICENSE
|
| 22 |
+
Provides-Extra: certs
|
| 23 |
+
Provides-Extra: docs
|
| 24 |
+
Requires-Dist: sphinx >=3.5 ; extra == 'docs'
|
| 25 |
+
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
|
| 26 |
+
Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
|
| 27 |
+
Requires-Dist: furo ; extra == 'docs'
|
| 28 |
+
Requires-Dist: sphinx-lint ; extra == 'docs'
|
| 29 |
+
Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
|
| 30 |
+
Requires-Dist: pygments-github-lexers ==0.0.5 ; extra == 'docs'
|
| 31 |
+
Requires-Dist: sphinx-favicon ; extra == 'docs'
|
| 32 |
+
Requires-Dist: sphinx-inline-tabs ; extra == 'docs'
|
| 33 |
+
Requires-Dist: sphinx-reredirects ; extra == 'docs'
|
| 34 |
+
Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs'
|
| 35 |
+
Requires-Dist: sphinx-notfound-page <2,>=1 ; extra == 'docs'
|
| 36 |
+
Requires-Dist: sphinx-hoverxref <2 ; extra == 'docs'
|
| 37 |
+
Provides-Extra: ssl
|
| 38 |
+
Provides-Extra: testing
|
| 39 |
+
Requires-Dist: pytest >=6 ; extra == 'testing'
|
| 40 |
+
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
|
| 41 |
+
Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
|
| 42 |
+
Requires-Dist: flake8-2020 ; extra == 'testing'
|
| 43 |
+
Requires-Dist: virtualenv >=13.0.0 ; extra == 'testing'
|
| 44 |
+
Requires-Dist: wheel ; extra == 'testing'
|
| 45 |
+
Requires-Dist: pip >=19.1 ; extra == 'testing'
|
| 46 |
+
Requires-Dist: jaraco.envs >=2.2 ; extra == 'testing'
|
| 47 |
+
Requires-Dist: pytest-xdist ; extra == 'testing'
|
| 48 |
+
Requires-Dist: jaraco.path >=3.2.0 ; extra == 'testing'
|
| 49 |
+
Requires-Dist: build[virtualenv] ; extra == 'testing'
|
| 50 |
+
Requires-Dist: filelock >=3.4.0 ; extra == 'testing'
|
| 51 |
+
Requires-Dist: ini2toml[lite] >=0.9 ; extra == 'testing'
|
| 52 |
+
Requires-Dist: tomli-w >=1.0.0 ; extra == 'testing'
|
| 53 |
+
Requires-Dist: pytest-timeout ; extra == 'testing'
|
| 54 |
+
Provides-Extra: testing-integration
|
| 55 |
+
Requires-Dist: pytest ; extra == 'testing-integration'
|
| 56 |
+
Requires-Dist: pytest-xdist ; extra == 'testing-integration'
|
| 57 |
+
Requires-Dist: pytest-enabler ; extra == 'testing-integration'
|
| 58 |
+
Requires-Dist: virtualenv >=13.0.0 ; extra == 'testing-integration'
|
| 59 |
+
Requires-Dist: tomli ; extra == 'testing-integration'
|
| 60 |
+
Requires-Dist: wheel ; extra == 'testing-integration'
|
| 61 |
+
Requires-Dist: jaraco.path >=3.2.0 ; extra == 'testing-integration'
|
| 62 |
+
Requires-Dist: jaraco.envs >=2.2 ; extra == 'testing-integration'
|
| 63 |
+
Requires-Dist: build[virtualenv] ; extra == 'testing-integration'
|
| 64 |
+
Requires-Dist: filelock >=3.4.0 ; extra == 'testing-integration'
|
| 65 |
+
Requires-Dist: packaging ; extra == 'testing-integration'
|
| 66 |
+
Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
| 67 |
+
Requires-Dist: pytest-cov ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
| 68 |
+
Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
| 69 |
+
Requires-Dist: jaraco.develop >=7.21 ; (python_version >= "3.9" and sys_platform != "cygwin") and extra == 'testing'
|
| 70 |
+
Requires-Dist: pytest-ruff ; (sys_platform != "cygwin") and extra == 'testing'
|
| 71 |
+
Requires-Dist: pytest-perf ; (sys_platform != "cygwin") and extra == 'testing'
|
| 72 |
+
|
| 73 |
+
.. image:: https://img.shields.io/pypi/v/setuptools.svg
|
| 74 |
+
:target: https://pypi.org/project/setuptools
|
| 75 |
+
|
| 76 |
+
.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
|
| 77 |
+
|
| 78 |
+
.. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg
|
| 79 |
+
:target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
|
| 80 |
+
:alt: tests
|
| 81 |
+
|
| 82 |
+
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
|
| 83 |
+
:target: https://github.com/astral-sh/ruff
|
| 84 |
+
:alt: Ruff
|
| 85 |
+
|
| 86 |
+
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
| 87 |
+
:target: https://github.com/psf/black
|
| 88 |
+
:alt: Code style: Black
|
| 89 |
+
|
| 90 |
+
.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg
|
| 91 |
+
:target: https://setuptools.pypa.io
|
| 92 |
+
|
| 93 |
+
.. image:: https://img.shields.io/badge/skeleton-2023-informational
|
| 94 |
+
:target: https://blog.jaraco.com/skeleton
|
| 95 |
+
|
| 96 |
+
.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
|
| 97 |
+
:target: https://codecov.io/gh/pypa/setuptools
|
| 98 |
+
|
| 99 |
+
.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
|
| 100 |
+
:target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
|
| 101 |
+
|
| 102 |
+
.. image:: https://img.shields.io/discord/803025117553754132
|
| 103 |
+
:target: https://discord.com/channels/803025117553754132/815945031150993468
|
| 104 |
+
:alt: Discord
|
| 105 |
+
|
| 106 |
+
See the `Installation Instructions
|
| 107 |
+
<https://packaging.python.org/installing/>`_ in the Python Packaging
|
| 108 |
+
User's Guide for instructions on installing, upgrading, and uninstalling
|
| 109 |
+
Setuptools.
|
| 110 |
+
|
| 111 |
+
Questions and comments should be directed to `GitHub Discussions
|
| 112 |
+
<https://github.com/pypa/setuptools/discussions>`_.
|
| 113 |
+
Bug reports and especially tested patches may be
|
| 114 |
+
submitted directly to the `bug tracker
|
| 115 |
+
<https://github.com/pypa/setuptools/issues>`_.
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
Code of Conduct
|
| 119 |
+
===============
|
| 120 |
+
|
| 121 |
+
Everyone interacting in the setuptools project's codebases, issue trackers,
|
| 122 |
+
chat rooms, and fora is expected to follow the
|
| 123 |
+
`PSF Code of Conduct <https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md>`_.
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
For Enterprise
|
| 127 |
+
==============
|
| 128 |
+
|
| 129 |
+
Available as part of the Tidelift Subscription.
|
| 130 |
+
|
| 131 |
+
Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
| 132 |
+
|
| 133 |
+
`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_.
|
.venv/Lib/site-packages/setuptools-68.2.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,484 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
distutils-precedence.pth,sha256=JjjOniUA5XKl4N5_rtZmHrVp0baW_LoHsN0iPaX10iQ,151
|
| 2 |
+
_distutils_hack/__init__.py,sha256=RoSaYKfMhRic9rWsYrPxNQBIYs5qllQKgcle9vvE3D4,6299
|
| 3 |
+
_distutils_hack/override.py,sha256=Eu_s-NF6VIZ4Cqd0tbbA5wtWky2IZPNd8et6GLt1mzo,44
|
| 4 |
+
pkg_resources/__init__.py,sha256=UNF7nekS-QAn35ZVSEd1A09LkF88zbHvBqsapABjuvw,109429
|
| 5 |
+
pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
pkg_resources/_vendor/typing_extensions.py,sha256=ipqWiq5AHzrwczt6c26AP05Llh6a5_GaXRpOBqbogHA,80078
|
| 7 |
+
pkg_resources/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
| 8 |
+
pkg_resources/_vendor/importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
|
| 9 |
+
pkg_resources/_vendor/importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
|
| 10 |
+
pkg_resources/_vendor/importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
|
| 11 |
+
pkg_resources/_vendor/importlib_resources/_compat.py,sha256=L8HTWyAC_MIKuxWZuw0zvTq5qmUA0ttrvK941OzDKU8,2925
|
| 12 |
+
pkg_resources/_vendor/importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
|
| 13 |
+
pkg_resources/_vendor/importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481
|
| 14 |
+
pkg_resources/_vendor/importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140
|
| 15 |
+
pkg_resources/_vendor/importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
|
| 16 |
+
pkg_resources/_vendor/importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
|
| 17 |
+
pkg_resources/_vendor/jaraco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 18 |
+
pkg_resources/_vendor/jaraco/context.py,sha256=vlyDzb_PvZ9H7R9bbTr_CMRnveW5Dc56eC7eyd_GfoA,7460
|
| 19 |
+
pkg_resources/_vendor/jaraco/functools.py,sha256=ggupfjztLyRtNk4pS2JqVrH3lWUX-QbE3wz5PyIKZWE,15056
|
| 20 |
+
pkg_resources/_vendor/jaraco/text/__init__.py,sha256=cN55bFcceW4wTHG5ruv5IuEDRarP-4hBYX8zl94_c30,15526
|
| 21 |
+
pkg_resources/_vendor/more_itertools/__init__.py,sha256=mTzXsWGDHiVW5x8zHzcRu1imUMzrEtJnUhfsN-dBrV4,148
|
| 22 |
+
pkg_resources/_vendor/more_itertools/more.py,sha256=hAluuEi5QOSe0OZfD2_sCwwbfbK5NnAxHg6uvU5AfPU,134976
|
| 23 |
+
pkg_resources/_vendor/more_itertools/recipes.py,sha256=lgw5bP3UoNfvUPhRaz1VIAfRFkF9pKWN-8UB6H0W5Eo,25416
|
| 24 |
+
pkg_resources/_vendor/packaging/__init__.py,sha256=kYVZSmXT6CWInT4UJPDtrSQBAZu8fMuFBxpv5GsDTLk,501
|
| 25 |
+
pkg_resources/_vendor/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
|
| 26 |
+
pkg_resources/_vendor/packaging/_manylinux.py,sha256=ESGrDEVmBc8jYTtdZRAWiLk72lOzAKWeezFgoJ_MuBc,8926
|
| 27 |
+
pkg_resources/_vendor/packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524
|
| 28 |
+
pkg_resources/_vendor/packaging/_parser.py,sha256=KJQkBh_Xbfb-qsB560YIEItrTpCZaOh4_YMfBtd5XIY,10194
|
| 29 |
+
pkg_resources/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
| 30 |
+
pkg_resources/_vendor/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
|
| 31 |
+
pkg_resources/_vendor/packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
|
| 32 |
+
pkg_resources/_vendor/packaging/metadata.py,sha256=PjELMLxKG_iu3HWjKAOdKhuNrHfWgpdTF2Q4nObsZeM,16397
|
| 33 |
+
pkg_resources/_vendor/packaging/requirements.py,sha256=hJzvtJyAvENc_VfwfhnOZV1851-VW8JCGh-R96NE4Pc,3287
|
| 34 |
+
pkg_resources/_vendor/packaging/specifiers.py,sha256=ZOpqL_w_Kj6ZF_OWdliQUzhEyHlDbi6989kr-sF5GHs,39206
|
| 35 |
+
pkg_resources/_vendor/packaging/tags.py,sha256=_1gLX8h1SgpjAdYCP9XqU37zRjXtU5ZliGy3IM-WcSM,18106
|
| 36 |
+
pkg_resources/_vendor/packaging/utils.py,sha256=es0cCezKspzriQ-3V88h3yJzxz028euV2sUwM61kE-o,4355
|
| 37 |
+
pkg_resources/_vendor/packaging/version.py,sha256=2NH3E57hzRhn0BV9boUBvgPsxlTqLJeI0EpYQoNvGi0,16326
|
| 38 |
+
pkg_resources/_vendor/platformdirs/__init__.py,sha256=edi2JSKpLCapqir0AW_CjpHtinRE3hf6aDk5-VHggLk,12806
|
| 39 |
+
pkg_resources/_vendor/platformdirs/__main__.py,sha256=VsC0t5m-6f0YVr96PVks93G3EDF8MSNY4KpUMvPahDA,1164
|
| 40 |
+
pkg_resources/_vendor/platformdirs/android.py,sha256=GKizhyS7ESRiU67u8UnBJLm46goau9937EchXWbPBlk,4068
|
| 41 |
+
pkg_resources/_vendor/platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910
|
| 42 |
+
pkg_resources/_vendor/platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655
|
| 43 |
+
pkg_resources/_vendor/platformdirs/unix.py,sha256=P-WQjSSieE38DXjMDa1t4XHnKJQ5idEaKT0PyXwm8KQ,6911
|
| 44 |
+
pkg_resources/_vendor/platformdirs/version.py,sha256=qaN-fw_htIgKUVXoAuAEVgKxQu3tZ9qE2eiKkWIS7LA,160
|
| 45 |
+
pkg_resources/_vendor/platformdirs/windows.py,sha256=LOrXLgI0CjQldDo2zhOZYGYZ6g4e_cJOCB_pF9aMRWQ,6596
|
| 46 |
+
pkg_resources/extern/__init__.py,sha256=nDtjbrhEaDu388fp4O6BGSpbihZmHh7PoOz2hhFk-Qg,2442
|
| 47 |
+
setuptools/__init__.py,sha256=mpVwlKNmw8XaMmeGQReCoAOsZb1T-iiqa24QEQHWaGk,9214
|
| 48 |
+
setuptools/_core_metadata.py,sha256=08Etw3qFpFdeQM5ilB-kXKGAZwAs_ha0GN5U3x0r1uk,8858
|
| 49 |
+
setuptools/_entry_points.py,sha256=P-Utt8hvMGkkJdw7VPzZ00uijeA9dohUCMTcDbbeQkU,2235
|
| 50 |
+
setuptools/_imp.py,sha256=1Y1gH0NOppV4nbr1eidD5iGQ8UVPfiVZi6rTqrfC06c,2433
|
| 51 |
+
setuptools/_importlib.py,sha256=ZWlYbGHjb-QwRpH3SQ9uuxn_X-F2ihcQCS5HtT_W9lk,1468
|
| 52 |
+
setuptools/_itertools.py,sha256=pZAgXNz6tRPUFnHAaKJ90xAgD0gLPemcE1396Zgz73o,675
|
| 53 |
+
setuptools/_normalization.py,sha256=l_Dx1p6Mm7Q1i_LjkOX1mA-vWk1EvKa9ZC5IOOearTE,4042
|
| 54 |
+
setuptools/_path.py,sha256=5xWH5ZZEJVcp_b0JjcAyTuTX2iz1H3F2Yti7fPIxueU,1056
|
| 55 |
+
setuptools/_reqs.py,sha256=1UTUBFswyoz1BiCQ-ofVlHNBpFYQ1eiNjraQsARoklk,882
|
| 56 |
+
setuptools/archive_util.py,sha256=lRK7l7GkpLJeNqnESJWUfDre4q4wR9x6Z8WD3cIagXc,7331
|
| 57 |
+
setuptools/build_meta.py,sha256=rKW7Zj_JF5HWO7iGm-WG3GM_ugEZtqh2V15qfxGFaPw,20091
|
| 58 |
+
setuptools/cli-32.exe,sha256=MqzBvFQxFsviz_EMuGd3LfLyVP8mNMhwrvC0bEtpb9s,11776
|
| 59 |
+
setuptools/cli-64.exe,sha256=u7PeVwdinmpgoMI4zUd7KPB_AGaYL9qVP6b87DkHOko,14336
|
| 60 |
+
setuptools/cli-arm64.exe,sha256=uafQjaiA36yLz1SOuksG-1m28JsX0zFIoPZhgyiSbGE,13824
|
| 61 |
+
setuptools/cli.exe,sha256=MqzBvFQxFsviz_EMuGd3LfLyVP8mNMhwrvC0bEtpb9s,11776
|
| 62 |
+
setuptools/dep_util.py,sha256=T-z4hg_BDsgQgLxUZERfELAnYGsIs6mS902Irf__uRc,936
|
| 63 |
+
setuptools/depends.py,sha256=b0EQ1bMHwIo5P5KAS_yenG3aKnJNqJxOG1Bfevp-0ac,5518
|
| 64 |
+
setuptools/discovery.py,sha256=-PqkaOszc9o-7LTR1aqOL4SqiyAwbGdURKVlgHuHudg,21147
|
| 65 |
+
setuptools/dist.py,sha256=i54zXAQAC4g3abIn4bxzQAQHRbdprOd924nxghl401k,38626
|
| 66 |
+
setuptools/errors.py,sha256=2uToNIRA7dG995pf8ox8a4r7nJtP62-hpLhzsRirnx0,2464
|
| 67 |
+
setuptools/extension.py,sha256=jpsAdQvCBCkAuvmEXYI90TV4kNGO2Y13NqDr_PrvdhA,5591
|
| 68 |
+
setuptools/glob.py,sha256=Ip2HBUIz5ma7Wo-S_a4XI6m2-N4vDRgfJxtytV13VUE,4868
|
| 69 |
+
setuptools/gui-32.exe,sha256=hdrh6V13hF8stZvKw9Sv50u-TJGpvMW_SnHNQxBNvnw,11776
|
| 70 |
+
setuptools/gui-64.exe,sha256=NHG2FA6txkEid9u-_j_vjDRaDxpZd2CGuAo2GMOoPjs,14336
|
| 71 |
+
setuptools/gui-arm64.exe,sha256=5pT0dDQFyLWSb_RX22_n8aEt7HwWqcOGR4TT9OB64Jc,13824
|
| 72 |
+
setuptools/gui.exe,sha256=hdrh6V13hF8stZvKw9Sv50u-TJGpvMW_SnHNQxBNvnw,11776
|
| 73 |
+
setuptools/installer.py,sha256=IMw5qVCEC4Ojyin8v_ql-TZJkCjf10UOIB-SBcPSPvU,4989
|
| 74 |
+
setuptools/launch.py,sha256=TyPT-Ic1T2EnYvGO26gfNRP4ysBlrhpbRjQxWsiO414,812
|
| 75 |
+
setuptools/logging.py,sha256=JA7DVtLlC3gskysgtORtm9-4UWh9kWr9FjbXbdQsIRo,1239
|
| 76 |
+
setuptools/monkey.py,sha256=H7Rlo2qhA5vZcl4sqpBq8ZbaHWtUB63P5-P3acswQfQ,4782
|
| 77 |
+
setuptools/msvc.py,sha256=OdP64fK5reebikVi60z9hTpHgsSSSRy06v-SJwF6tMM,47495
|
| 78 |
+
setuptools/namespaces.py,sha256=epZT2G6fiQV6l0H--xfm2_s8EOz9H9xv2ceQoAyX2Z4,3073
|
| 79 |
+
setuptools/package_index.py,sha256=1B5ZXXEr5T6b2xqVLVH9v1_xL1do-EMeGw5diYkyhVE,38350
|
| 80 |
+
setuptools/py312compat.py,sha256=6qfRL57v2DWBBQdqv-w_T70KxK0iowZiCLVhESfj36Y,330
|
| 81 |
+
setuptools/sandbox.py,sha256=Xhj-2948bZhytdV_pJDMXAgV7Vg1lBC_7mcYb4DOwRI,14349
|
| 82 |
+
setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218
|
| 83 |
+
setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138
|
| 84 |
+
setuptools/unicode_utils.py,sha256=aOOFo4JGwAsiBttGYDsqFS7YqWQeZ2j6DWiCuctR_00,941
|
| 85 |
+
setuptools/version.py,sha256=WJCeUuyq74Aok2TeK9-OexZOu8XrlQy7-y0BEuWNovQ,161
|
| 86 |
+
setuptools/warnings.py,sha256=e-R_k8T3HYIC2DScA4nzjcwigsXF8rn2lCsp3KUrYAo,3697
|
| 87 |
+
setuptools/wheel.py,sha256=NoYuHzbajNGm9n_Jma4q6-s0yjjgUr6UaOArBSUvCLM,8628
|
| 88 |
+
setuptools/windows_support.py,sha256=hTJZVs5oLGQKIgmAjaFH56bLv9hgQd3WSnfsHpoqQLI,719
|
| 89 |
+
setuptools/_distutils/__init__.py,sha256=swqU6jm29LbH4slGa3UTxYAaMUCLOzPY1qTMa4tv7PE,359
|
| 90 |
+
setuptools/_distutils/_collections.py,sha256=2qMJB2M_i53g0LmeYfD5V3SQ9fx3FScCXdFUS03wfiU,5300
|
| 91 |
+
setuptools/_distutils/_functools.py,sha256=ABZ-Lyw-igKwBFoLF3QYtFmfutwZLiAdWcpRMbcacGU,411
|
| 92 |
+
setuptools/_distutils/_log.py,sha256=zwFOk2ValRHMQa_kCqDXpHnwaqqZzhxGEwuR4zV-dEs,43
|
| 93 |
+
setuptools/_distutils/_macos_compat.py,sha256=-v_Z0M1LEH5k-VhSBBbuz_pDp3nSZ4rzU9E7iIskPDc,239
|
| 94 |
+
setuptools/_distutils/_msvccompiler.py,sha256=sWNC_gUhWzQ0FkCS6bD3Tj2Fvlnk2AwLnP8OvcV_gvQ,19616
|
| 95 |
+
setuptools/_distutils/archive_util.py,sha256=JtMIta8JuFkCXVTHvZhmneAEdIMnpsdX84nOWKF24rk,8572
|
| 96 |
+
setuptools/_distutils/bcppcompiler.py,sha256=IAFbt_mF3q3QFBhHGKHA68K1uNfU4MrkhoAJ0zA9S_k,14721
|
| 97 |
+
setuptools/_distutils/ccompiler.py,sha256=rnLM-1MMQgWm-lMOHz9a7XJ0YARP1xnuCAWkQY0XsDQ,48643
|
| 98 |
+
setuptools/_distutils/cmd.py,sha256=PcjcZszunlBw0FRICIr63LAAc8lUQoqia9GRLePzqc0,17861
|
| 99 |
+
setuptools/_distutils/config.py,sha256=NrQjaUO9B88P-JtOfww3BMt9rSn1TirU4G7u0ut5FrM,4911
|
| 100 |
+
setuptools/_distutils/core.py,sha256=2zrS7rdu7Oe2143xsmCld8H61IbSpwnru9GDeSCQLbY,9397
|
| 101 |
+
setuptools/_distutils/cygwinccompiler.py,sha256=hBv-OShb_uKvLjo_E2uqtQLEJNBBXTFglvf6mzbUN8o,11924
|
| 102 |
+
setuptools/_distutils/debug.py,sha256=N6MrTAqK6l9SVk6tWweR108PM8Ol7qNlfyV-nHcLhsY,139
|
| 103 |
+
setuptools/_distutils/dep_util.py,sha256=9pqhyGw2q2HGGGXAOpbbezj024aAr_47xDfXz5Fas7U,3414
|
| 104 |
+
setuptools/_distutils/dir_util.py,sha256=Ob0omB4OlZZXfFQtalVoIY6CgIrOkD5YZfATYv2DXZg,8072
|
| 105 |
+
setuptools/_distutils/dist.py,sha256=YU6OeLdWPDWMg-GRCeykT21fOp7PxAYn1uwnoRpI-uM,50174
|
| 106 |
+
setuptools/_distutils/errors.py,sha256=ZtBwnhDpQA2bxIazPXNDQ25uNxM4p2omsaSRNpV3rpE,3589
|
| 107 |
+
setuptools/_distutils/extension.py,sha256=F0TBNjYkMmte_Yg1bhKVHXSNWWNFEPIDUgwhuHdkox8,10270
|
| 108 |
+
setuptools/_distutils/fancy_getopt.py,sha256=njv20bPVKKusIRbs8Md1YNWlGZQV1mW5fWPNkdYx-QI,17899
|
| 109 |
+
setuptools/_distutils/file_util.py,sha256=koQCT7uz5wVTVGy-gdsFFPFQO5GfIhc06JUYbIX5V08,8212
|
| 110 |
+
setuptools/_distutils/filelist.py,sha256=rOKJPBvuLSjElfYuOwju95AzR3Ev5lvJoCJvI_XvZ9g,13715
|
| 111 |
+
setuptools/_distutils/log.py,sha256=725W7ISJzoSYNtLnEP1FwZe_IMUn1Xq6NEYwFbXg63k,1201
|
| 112 |
+
setuptools/_distutils/msvc9compiler.py,sha256=X2Xf2g-RMKzb_B4MIihiO3ogyTFjJNV1xRWpZTsbbSA,30188
|
| 113 |
+
setuptools/_distutils/msvccompiler.py,sha256=Vus9UyDuNCT_PfZjwu253wL0v5PiQ9miiMZmdIro5wM,23577
|
| 114 |
+
setuptools/_distutils/py38compat.py,sha256=gZ-NQ5c6ufwVEkJ0BwkbrqG9TvWirVJIrVGqhgvaY-Q,217
|
| 115 |
+
setuptools/_distutils/py39compat.py,sha256=vkxjv22H1bhToalClz3M0UUD8Xr21klbUBTQoVQxx20,639
|
| 116 |
+
setuptools/_distutils/spawn.py,sha256=E6Il74CIINCRjakXUcWqSWjfC_sdp4Qtod0Bw5y_NNQ,3495
|
| 117 |
+
setuptools/_distutils/sysconfig.py,sha256=BbXNQAF9_tErImHCfSori3188FwSw2TUFqLBvU1BLdg,18928
|
| 118 |
+
setuptools/_distutils/text_file.py,sha256=SBgU_IeHYRZMvmmqyE6I8qXAbh1Z-wd60Hf0Yv97Cls,12085
|
| 119 |
+
setuptools/_distutils/unixccompiler.py,sha256=HYO3TXHm5kLGSsIdf9ytVLYCzUpdLQMt4Jd2NN7duzQ,15601
|
| 120 |
+
setuptools/_distutils/util.py,sha256=bef-Z_j0XzPU2E1AHJQNvGYNovSxdiJMa3JIbanQm7g,18099
|
| 121 |
+
setuptools/_distutils/version.py,sha256=9dCa7JcCWXBrfGUsv7Zzvqm-Mrf7yaK6cC5xRzx3iqg,12951
|
| 122 |
+
setuptools/_distutils/versionpredicate.py,sha256=mkg9LtyF3EWox-KnbBx08gKV8zu0ymIl1izIho2-f7k,5205
|
| 123 |
+
setuptools/_distutils/command/__init__.py,sha256=fVUps4DJhvShMAod0y7xl02m46bd7r31irEhNofPrrs,430
|
| 124 |
+
setuptools/_distutils/command/_framework_compat.py,sha256=HW84Z1cWmg4b6aMJvlMI9o6sGZSEH_aWMTlDKstL8lY,1614
|
| 125 |
+
setuptools/_distutils/command/bdist.py,sha256=EpbYBIrW4QTYrA6G8uUJIKZaLmj8w4S5KWnXzmr6hQo,5408
|
| 126 |
+
setuptools/_distutils/command/bdist_dumb.py,sha256=FvvNgx_B7ypjf7rMxFNNBOsuF_Dj_OV8L4dmkULhQKM,4665
|
| 127 |
+
setuptools/_distutils/command/bdist_rpm.py,sha256=QNQku4v38GcOcctHGNbRVoYv5mVMVcexnmCxh9fqpGw,22013
|
| 128 |
+
setuptools/_distutils/command/build.py,sha256=XDgkAsMp_jLX9mj-6ESdf7GK_8RuX9kwILwXOhN1GaM,5584
|
| 129 |
+
setuptools/_distutils/command/build_clib.py,sha256=stRzgT6gdXMTmsEi8PyudEO32ZDC7iP--sdUErcMuOs,7684
|
| 130 |
+
setuptools/_distutils/command/build_ext.py,sha256=2poWttNAhj3Y45ZddgIVMwXjNXAdUcAOO_sc0wh6anQ,31503
|
| 131 |
+
setuptools/_distutils/command/build_py.py,sha256=LK_l_5gnFv6D02YtyJRBp5kE3SWmHVEC7CbBKe2tjk8,16537
|
| 132 |
+
setuptools/_distutils/command/build_scripts.py,sha256=cp6WiXTaEd8LWwxizpnFSmbCOSizPLclAHFFsqxRqqs,5604
|
| 133 |
+
setuptools/_distutils/command/check.py,sha256=f7QOy4LkKUXiRyyti4orzCJX9Z8sY_uOyMYUADADG6g,4872
|
| 134 |
+
setuptools/_distutils/command/clean.py,sha256=VCRg7BPVdLXgtevEi7t_iChJW6k6fOaO0GyqR_m_MRw,2594
|
| 135 |
+
setuptools/_distutils/command/config.py,sha256=FU8kAanpAvaaecBbRZTvZ7lcoxxBXq5_nTufwOyZUXg,13077
|
| 136 |
+
setuptools/_distutils/command/install.py,sha256=5h_6BldPSUPUkYDzdY1t6Jiqaw21yBZZokpkMVaBnyo,30153
|
| 137 |
+
setuptools/_distutils/command/install_data.py,sha256=NgW_xUoUqcBGjGFr2VHrkYFejVqeAmwsGSu_fGQb384,2762
|
| 138 |
+
setuptools/_distutils/command/install_egg_info.py,sha256=Cv69kqrFORuwb1I1owe-IxyK0ZANirqGgiLyxcYSnBI,2788
|
| 139 |
+
setuptools/_distutils/command/install_headers.py,sha256=v-QcVkjaWX5yf0xaup9_KySanVlmd6LhuzEhGpmTiTU,1180
|
| 140 |
+
setuptools/_distutils/command/install_lib.py,sha256=v3we1bymtqvE-j_7yCSnb4a0Jy32s3z1SLZzF91NpjY,8409
|
| 141 |
+
setuptools/_distutils/command/install_scripts.py,sha256=oiYYD6IhTx9F4CQMfz5LQeGT1y5hZrndxbKBYSvzTa8,1932
|
| 142 |
+
setuptools/_distutils/command/py37compat.py,sha256=EoJC8gVYMIv2tA1NpVA2XDyCT1qGp4BEn7aX_5ve1gw,672
|
| 143 |
+
setuptools/_distutils/command/register.py,sha256=q8kKVA-6IPWbgHPBbc8HvWwRi9DXerjnyiMgMG1fu8A,11817
|
| 144 |
+
setuptools/_distutils/command/sdist.py,sha256=JkT1SJQUgtlZyjFmyqx0lOL45tDb9I9Dn38iz9ySb-k,19232
|
| 145 |
+
setuptools/_distutils/command/upload.py,sha256=jsb3Kj3XQtNqwwvtc1WUt_Jk8AEXIehjEXIj3dInv6M,7491
|
| 146 |
+
setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 147 |
+
setuptools/_vendor/ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130
|
| 148 |
+
setuptools/_vendor/typing_extensions.py,sha256=1uqi_RSlI7gos4eJB_NEV3d5wQwzTUQHd3_jrkbTo8Q,87149
|
| 149 |
+
setuptools/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
| 150 |
+
setuptools/_vendor/importlib_metadata/__init__.py,sha256=fQEsJb7Gs_9Vq9V0xHICB0EFxNRGyxubr4w4ZFmGcxY,26498
|
| 151 |
+
setuptools/_vendor/importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454
|
| 152 |
+
setuptools/_vendor/importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
|
| 153 |
+
setuptools/_vendor/importlib_metadata/_compat.py,sha256=GtdqmFy_ykVSTkz6MdGL2g3V5kxvQKHTWxKZCk5Q59Q,1859
|
| 154 |
+
setuptools/_vendor/importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895
|
| 155 |
+
setuptools/_vendor/importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068
|
| 156 |
+
setuptools/_vendor/importlib_metadata/_meta.py,sha256=v5e1ZDG7yZTH3h7TjbS5bM5p8AGzMPVOu8skDMv4h6k,1165
|
| 157 |
+
setuptools/_vendor/importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098
|
| 158 |
+
setuptools/_vendor/importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
|
| 159 |
+
setuptools/_vendor/importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
|
| 160 |
+
setuptools/_vendor/importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
|
| 161 |
+
setuptools/_vendor/importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
|
| 162 |
+
setuptools/_vendor/importlib_resources/_compat.py,sha256=L8HTWyAC_MIKuxWZuw0zvTq5qmUA0ttrvK941OzDKU8,2925
|
| 163 |
+
setuptools/_vendor/importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
|
| 164 |
+
setuptools/_vendor/importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481
|
| 165 |
+
setuptools/_vendor/importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140
|
| 166 |
+
setuptools/_vendor/importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
|
| 167 |
+
setuptools/_vendor/importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
|
| 168 |
+
setuptools/_vendor/jaraco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 169 |
+
setuptools/_vendor/jaraco/context.py,sha256=vlyDzb_PvZ9H7R9bbTr_CMRnveW5Dc56eC7eyd_GfoA,7460
|
| 170 |
+
setuptools/_vendor/jaraco/functools.py,sha256=0rUJxpJvN1TNlBScfYB2NbFGO1Pv7BeMJwzvqkVqnbY,15053
|
| 171 |
+
setuptools/_vendor/jaraco/text/__init__.py,sha256=KfFGMerrkN_0V0rgtJVx-9dHt3tW7i_uJypjwEcLtC0,15517
|
| 172 |
+
setuptools/_vendor/more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82
|
| 173 |
+
setuptools/_vendor/more_itertools/more.py,sha256=0rB_mibFR51sq33UlAI_bWfaNdsYNnJr1v6S0CaW7QA,117959
|
| 174 |
+
setuptools/_vendor/more_itertools/recipes.py,sha256=UkNkrsZyqiwgLHANBTmvMhCvaNSvSNYhyOpz_Jc55DY,16256
|
| 175 |
+
setuptools/_vendor/packaging/__init__.py,sha256=kYVZSmXT6CWInT4UJPDtrSQBAZu8fMuFBxpv5GsDTLk,501
|
| 176 |
+
setuptools/_vendor/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
|
| 177 |
+
setuptools/_vendor/packaging/_manylinux.py,sha256=ESGrDEVmBc8jYTtdZRAWiLk72lOzAKWeezFgoJ_MuBc,8926
|
| 178 |
+
setuptools/_vendor/packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524
|
| 179 |
+
setuptools/_vendor/packaging/_parser.py,sha256=KJQkBh_Xbfb-qsB560YIEItrTpCZaOh4_YMfBtd5XIY,10194
|
| 180 |
+
setuptools/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
| 181 |
+
setuptools/_vendor/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
|
| 182 |
+
setuptools/_vendor/packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
|
| 183 |
+
setuptools/_vendor/packaging/metadata.py,sha256=PjELMLxKG_iu3HWjKAOdKhuNrHfWgpdTF2Q4nObsZeM,16397
|
| 184 |
+
setuptools/_vendor/packaging/requirements.py,sha256=hJzvtJyAvENc_VfwfhnOZV1851-VW8JCGh-R96NE4Pc,3287
|
| 185 |
+
setuptools/_vendor/packaging/specifiers.py,sha256=ZOpqL_w_Kj6ZF_OWdliQUzhEyHlDbi6989kr-sF5GHs,39206
|
| 186 |
+
setuptools/_vendor/packaging/tags.py,sha256=_1gLX8h1SgpjAdYCP9XqU37zRjXtU5ZliGy3IM-WcSM,18106
|
| 187 |
+
setuptools/_vendor/packaging/utils.py,sha256=es0cCezKspzriQ-3V88h3yJzxz028euV2sUwM61kE-o,4355
|
| 188 |
+
setuptools/_vendor/packaging/version.py,sha256=2NH3E57hzRhn0BV9boUBvgPsxlTqLJeI0EpYQoNvGi0,16326
|
| 189 |
+
setuptools/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396
|
| 190 |
+
setuptools/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633
|
| 191 |
+
setuptools/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943
|
| 192 |
+
setuptools/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254
|
| 193 |
+
setuptools/command/__init__.py,sha256=HZlSppOB8Vro73ffvP-xrORuMrh4GnVkOqJspFRG8Pg,396
|
| 194 |
+
setuptools/command/_requirestxt.py,sha256=6JAng0wqlHHgVULNp-0bZerp3RhvDaE4jMh2mmkxvd4,4249
|
| 195 |
+
setuptools/command/alias.py,sha256=1holrSsdYxp1Esoa2yfRHLjiYlCRi3jYZy2yWm62YVU,2383
|
| 196 |
+
setuptools/command/bdist_egg.py,sha256=Y_t_cgP-dicS4Fy6ZBZRsa_nrM0u27FyfO__B7M0oSg,16559
|
| 197 |
+
setuptools/command/bdist_rpm.py,sha256=9JLFlvVbcY-ilqbfJtDoDHU5dGQG2iPAWRWkD6T5pQQ,1309
|
| 198 |
+
setuptools/command/build.py,sha256=oRWmv6b8a21B3I5X2Fhbmu79w2hXKxtSZTzmNngyzr8,6784
|
| 199 |
+
setuptools/command/build_clib.py,sha256=rCXGTo2roFQCXMkIH_nNauzqRqjyINNaw8mudyrR5AA,4398
|
| 200 |
+
setuptools/command/build_ext.py,sha256=Xhmu6oSZtNyDqaRZIxGcq_mClH0Gx-_ptchHvOxe_a8,17504
|
| 201 |
+
setuptools/command/build_py.py,sha256=4Kcxif1dW8VYQRwUPZvLYNl58PisJJgN1spQHG12oiY,14997
|
| 202 |
+
setuptools/command/develop.py,sha256=hvaiPtS_-QLZR-ih9sjP6AGkLZU7T9du_DAYcNQJmJ4,6722
|
| 203 |
+
setuptools/command/dist_info.py,sha256=U__5IrmcaT_5lMpZsYXfV1KZW8T8hyu5rRhFBemK_Wg,4242
|
| 204 |
+
setuptools/command/easy_install.py,sha256=4I-WLbxlzPhAYvGv9KhzJfG6KD6AtdaPVc4zgoeIwOs,86493
|
| 205 |
+
setuptools/command/editable_wheel.py,sha256=gyzLPL06C3fyxw2gFNrUjYsvAmb_rwI9rF-VKfT1rkg,33758
|
| 206 |
+
setuptools/command/egg_info.py,sha256=wChND7a0-ilKJDVxj453jiAn9Ew659BcKEyZc8Ja8gA,26399
|
| 207 |
+
setuptools/command/install.py,sha256=MZBFeNiphOdVcw62El5cOmy76oS5LS_AwCRR-2eWRjw,5627
|
| 208 |
+
setuptools/command/install_egg_info.py,sha256=zpDDCmOJspfkEekUON7wU0ABFNW-0uXUZpzpHRYUdiI,2066
|
| 209 |
+
setuptools/command/install_lib.py,sha256=gUEW1ACrDcK_Mq7_RiF3YUlKA-9e-Tq9AcQs7KA-glk,3870
|
| 210 |
+
setuptools/command/install_scripts.py,sha256=n2toonBXHYFZcn2wkZ7eNl15c816kouMiNpNuTjIKSo,2359
|
| 211 |
+
setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628
|
| 212 |
+
setuptools/command/register.py,sha256=kk3DxXCb5lXTvqnhfwx2g6q7iwbUmgTyXUCaBooBOUk,468
|
| 213 |
+
setuptools/command/rotate.py,sha256=2z_6-q4mlnP8KK4E5I61wsFCjBnIAk8RhJxVLXDYGHg,2097
|
| 214 |
+
setuptools/command/saveopts.py,sha256=mVAPMRIGE98gl6eXQ3C2Wo-qPOgl9lbH-Q_YsbLuqeg,657
|
| 215 |
+
setuptools/command/sdist.py,sha256=dVeovgNpONfKOxi32unl2ajJXePACx_pZZ3uRc1Z4h4,7098
|
| 216 |
+
setuptools/command/setopt.py,sha256=CTNgVkgm2yV9c1bO9wem86_M8X6TGyuEtitUjBzGwBc,4927
|
| 217 |
+
setuptools/command/test.py,sha256=y9YIFfW5TOpg6dES2UOy__QvJ7y-26pBifXzuIjhr6s,8101
|
| 218 |
+
setuptools/command/upload.py,sha256=XT3YFVfYPAmA5qhGg0euluU98ftxRUW-PzKcODMLxUs,462
|
| 219 |
+
setuptools/command/upload_docs.py,sha256=NMJyWRxdEV8rdBdPIjoYIqm34RCiE3ihEXzvS0yRbJY,7773
|
| 220 |
+
setuptools/config/__init__.py,sha256=HVZX0i-bM5lIfhej4Vck0o8ZM6W6w6MEXLqCXcC9lYI,1498
|
| 221 |
+
setuptools/config/_apply_pyprojecttoml.py,sha256=cd4zROhZKqDAsWYRZ0vL2fu35iWVPyjFo72yJRyzYjo,14143
|
| 222 |
+
setuptools/config/expand.py,sha256=loOney9Z532Bv2lCrZpLokWR0vYMZrXDotVs76dLY5E,16401
|
| 223 |
+
setuptools/config/pyprojecttoml.py,sha256=HlpXC6uC0lx7EmAMdCOZywoAuHVz5ol7ZULaFMjorqg,17490
|
| 224 |
+
setuptools/config/setupcfg.py,sha256=sVN1wSeUaIFXhkM-V8F-fdB5IwzYMtm2-AMAxmx0ZdY,26184
|
| 225 |
+
setuptools/config/_validate_pyproject/__init__.py,sha256=5YXPW1sabVn5jpZ25sUjeF6ij3_4odJiwUWi4nRD2Dc,1038
|
| 226 |
+
setuptools/config/_validate_pyproject/error_reporting.py,sha256=vWiDs0hjlCBjZ_g4Xszsh97lIP9M4_JaLQ6MCQ26W9U,11266
|
| 227 |
+
setuptools/config/_validate_pyproject/extra_validations.py,sha256=wHzrgfdZUMRPBR1ke1lg5mhqRsBSbjEYOMsuFXQH9jY,1153
|
| 228 |
+
setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py,sha256=w749JgqKi8clBFcObdcbZVqsmF4oJ_QByhZ1SGbUFNw,1612
|
| 229 |
+
setuptools/config/_validate_pyproject/fastjsonschema_validations.py,sha256=YZrDSH0fbVZIeHkAaJD1MtYn19dYCOKlsAcEXuMgegg,274908
|
| 230 |
+
setuptools/config/_validate_pyproject/formats.py,sha256=-3f_VtIrcgY95yILC5-o-jh51Woj9Q0RhL3bmbOjJ-E,9160
|
| 231 |
+
setuptools/extern/__init__.py,sha256=Ym7fkCaybFYoPEZl0fFH4uZBjoMrl8rmOpC617v9EsA,2539
|
| 232 |
+
setuptools-68.2.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
| 233 |
+
setuptools-68.2.0.dist-info/METADATA,sha256=qX_MLQnFIKf7lHRVpfnqfgZ5JbCGhzgA5UQvxEWry40,6263
|
| 234 |
+
setuptools-68.2.0.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
| 235 |
+
setuptools-68.2.0.dist-info/entry_points.txt,sha256=Fe-UZkzgLTUZQOH94hbLTyP4HxM1nxlMuEZ_rS6zNnE,2676
|
| 236 |
+
setuptools-68.2.0.dist-info/top_level.txt,sha256=d9yL39v_W7qmKDDSH6sT4bE0j_Ls1M3P161OGgdsm4g,41
|
| 237 |
+
setuptools-68.2.0.dist-info/RECORD,,
|
| 238 |
+
setuptools\_distutils\py38compat.cpython-310.pyc,,
|
| 239 |
+
pkg_resources\_vendor\importlib_resources\_adapters.cpython-310.pyc,,
|
| 240 |
+
pkg_resources\_vendor\packaging\__init__.cpython-310.pyc,,
|
| 241 |
+
setuptools\depends.cpython-310.pyc,,
|
| 242 |
+
setuptools\config\_validate_pyproject\__pycache__,,
|
| 243 |
+
setuptools\build_meta.cpython-310.pyc,,
|
| 244 |
+
setuptools\_vendor\__pycache__,,
|
| 245 |
+
setuptools\_distutils\debug.cpython-310.pyc,,
|
| 246 |
+
setuptools\_vendor\importlib_resources\_adapters.cpython-310.pyc,,
|
| 247 |
+
setuptools\command\setopt.cpython-310.pyc,,
|
| 248 |
+
setuptools\_vendor\packaging\__init__.cpython-310.pyc,,
|
| 249 |
+
setuptools\command\install_scripts.cpython-310.pyc,,
|
| 250 |
+
setuptools\_importlib.cpython-310.pyc,,
|
| 251 |
+
setuptools\_vendor\importlib_metadata\_functools.cpython-310.pyc,,
|
| 252 |
+
setuptools\_vendor\jaraco\text\__pycache__,,
|
| 253 |
+
setuptools\_distutils\command\install_data.cpython-310.pyc,,
|
| 254 |
+
_distutils_hack\__pycache__,,
|
| 255 |
+
setuptools\config\setupcfg.cpython-310.pyc,,
|
| 256 |
+
setuptools\command\install_lib.cpython-310.pyc,,
|
| 257 |
+
setuptools\_vendor\importlib_metadata\_compat.cpython-310.pyc,,
|
| 258 |
+
setuptools\_itertools.cpython-310.pyc,,
|
| 259 |
+
setuptools\__pycache__,,
|
| 260 |
+
setuptools\command\__init__.cpython-310.pyc,,
|
| 261 |
+
setuptools\_vendor\importlib_metadata\_meta.cpython-310.pyc,,
|
| 262 |
+
setuptools\_entry_points.cpython-310.pyc,,
|
| 263 |
+
setuptools\_distutils\dir_util.cpython-310.pyc,,
|
| 264 |
+
setuptools\_vendor\importlib_metadata\__init__.cpython-310.pyc,,
|
| 265 |
+
pkg_resources\_vendor\packaging\version.cpython-310.pyc,,
|
| 266 |
+
pkg_resources\_vendor\platformdirs\macos.cpython-310.pyc,,
|
| 267 |
+
setuptools\dep_util.cpython-310.pyc,,
|
| 268 |
+
pkg_resources\_vendor\more_itertools\__init__.cpython-310.pyc,,
|
| 269 |
+
setuptools\_vendor\more_itertools\__init__.cpython-310.pyc,,
|
| 270 |
+
setuptools-68.2.0.virtualenv,,
|
| 271 |
+
pkg_resources\_vendor\zipp.cpython-310.pyc,,
|
| 272 |
+
setuptools\_distutils\__init__.cpython-310.pyc,,
|
| 273 |
+
pkg_resources\_vendor\packaging\metadata.cpython-310.pyc,,
|
| 274 |
+
setuptools\_vendor\packaging\metadata.cpython-310.pyc,,
|
| 275 |
+
setuptools\monkey.cpython-310.pyc,,
|
| 276 |
+
setuptools\extern\__pycache__,,
|
| 277 |
+
setuptools\_vendor\importlib_metadata\_adapters.cpython-310.pyc,,
|
| 278 |
+
setuptools\_distutils\cygwinccompiler.cpython-310.pyc,,
|
| 279 |
+
setuptools\_distutils\command\config.cpython-310.pyc,,
|
| 280 |
+
setuptools\_distutils\command\bdist_rpm.cpython-310.pyc,,
|
| 281 |
+
setuptools\_distutils\command\build_clib.cpython-310.pyc,,
|
| 282 |
+
setuptools\command\_requirestxt.cpython-310.pyc,,
|
| 283 |
+
pkg_resources\_vendor\platformdirs\unix.cpython-310.pyc,,
|
| 284 |
+
pkg_resources\_vendor\platformdirs\__pycache__,,
|
| 285 |
+
setuptools\glob.cpython-310.pyc,,
|
| 286 |
+
setuptools\_vendor\importlib_metadata\_collections.cpython-310.pyc,,
|
| 287 |
+
setuptools\command\upload_docs.cpython-310.pyc,,
|
| 288 |
+
pkg_resources\_vendor\importlib_resources\_common.cpython-310.pyc,,
|
| 289 |
+
pkg_resources\_vendor\jaraco\__pycache__,,
|
| 290 |
+
setuptools\msvc.cpython-310.pyc,,
|
| 291 |
+
setuptools\warnings.cpython-310.pyc,,
|
| 292 |
+
setuptools\command\build_py.cpython-310.pyc,,
|
| 293 |
+
setuptools\_distutils\log.cpython-310.pyc,,
|
| 294 |
+
setuptools\command\install.cpython-310.pyc,,
|
| 295 |
+
pkg_resources\_vendor\typing_extensions.cpython-310.pyc,,
|
| 296 |
+
setuptools\_vendor\ordered_set.cpython-310.pyc,,
|
| 297 |
+
setuptools\_vendor\jaraco\__pycache__,,
|
| 298 |
+
setuptools\_vendor\importlib_resources\_common.cpython-310.pyc,,
|
| 299 |
+
setuptools\_vendor\tomli\_types.cpython-310.pyc,,
|
| 300 |
+
setuptools\_distutils\spawn.cpython-310.pyc,,
|
| 301 |
+
setuptools\_distutils\command\__pycache__,,
|
| 302 |
+
pkg_resources\_vendor\importlib_resources\_legacy.cpython-310.pyc,,
|
| 303 |
+
setuptools\_vendor\importlib_resources\_legacy.cpython-310.pyc,,
|
| 304 |
+
setuptools\_distutils\text_file.cpython-310.pyc,,
|
| 305 |
+
setuptools\_vendor\tomli\_parser.cpython-310.pyc,,
|
| 306 |
+
setuptools\sandbox.cpython-310.pyc,,
|
| 307 |
+
setuptools\_distutils\archive_util.cpython-310.pyc,,
|
| 308 |
+
pkg_resources\_vendor\packaging\markers.cpython-310.pyc,,
|
| 309 |
+
setuptools\_vendor\packaging\markers.cpython-310.pyc,,
|
| 310 |
+
pkg_resources\__pycache__,,
|
| 311 |
+
setuptools\command\test.cpython-310.pyc,,
|
| 312 |
+
pkg_resources\_vendor\packaging\utils.cpython-310.pyc,,
|
| 313 |
+
setuptools\_distutils\unixccompiler.cpython-310.pyc,,
|
| 314 |
+
setuptools\_distutils\command\install_egg_info.cpython-310.pyc,,
|
| 315 |
+
setuptools\_vendor\tomli\__pycache__,,
|
| 316 |
+
setuptools\_reqs.cpython-310.pyc,,
|
| 317 |
+
setuptools\_vendor\packaging\utils.cpython-310.pyc,,
|
| 318 |
+
setuptools\command\upload.cpython-310.pyc,,
|
| 319 |
+
setuptools\_distutils\command\build_ext.cpython-310.pyc,,
|
| 320 |
+
setuptools\_distutils\command\build_scripts.cpython-310.pyc,,
|
| 321 |
+
setuptools-68.2.0.dist-info\__pycache__,,
|
| 322 |
+
pkg_resources\_vendor\packaging\_tokenizer.cpython-310.pyc,,
|
| 323 |
+
setuptools\command\rotate.cpython-310.pyc,,
|
| 324 |
+
setuptools\_distutils\command\sdist.cpython-310.pyc,,
|
| 325 |
+
setuptools\config\_validate_pyproject\__init__.cpython-310.pyc,,
|
| 326 |
+
setuptools\windows_support.cpython-310.pyc,,
|
| 327 |
+
setuptools\_vendor\packaging\_tokenizer.cpython-310.pyc,,
|
| 328 |
+
_distutils_hack\override.cpython-310.pyc,,
|
| 329 |
+
setuptools\config\_apply_pyprojecttoml.cpython-310.pyc,,
|
| 330 |
+
setuptools\command\develop.cpython-310.pyc,,
|
| 331 |
+
setuptools\logging.cpython-310.pyc,,
|
| 332 |
+
setuptools\_distutils\_functools.cpython-310.pyc,,
|
| 333 |
+
setuptools\command\easy_install.cpython-310.pyc,,
|
| 334 |
+
setuptools\_distutils\command\bdist_dumb.cpython-310.pyc,,
|
| 335 |
+
setuptools\_vendor\packaging\version.cpython-310.pyc,,
|
| 336 |
+
setuptools\_core_metadata.cpython-310.pyc,,
|
| 337 |
+
pkg_resources\_vendor\__init__.cpython-310.pyc,,
|
| 338 |
+
setuptools\_distutils\file_util.cpython-310.pyc,,
|
| 339 |
+
setuptools\_distutils\errors.cpython-310.pyc,,
|
| 340 |
+
pkg_resources\_vendor\packaging\requirements.cpython-310.pyc,,
|
| 341 |
+
setuptools\_vendor\zipp.cpython-310.pyc,,
|
| 342 |
+
setuptools\_vendor\jaraco\text\__init__.cpython-310.pyc,,
|
| 343 |
+
setuptools\_vendor\packaging\requirements.cpython-310.pyc,,
|
| 344 |
+
pkg_resources\_vendor\packaging\_parser.cpython-310.pyc,,
|
| 345 |
+
setuptools\_vendor\packaging\_parser.cpython-310.pyc,,
|
| 346 |
+
pkg_resources\_vendor\importlib_resources\abc.cpython-310.pyc,,
|
| 347 |
+
setuptools\_vendor\importlib_resources\abc.cpython-310.pyc,,
|
| 348 |
+
setuptools\_distutils\command\register.cpython-310.pyc,,
|
| 349 |
+
setuptools\py312compat.cpython-310.pyc,,
|
| 350 |
+
pkg_resources\_vendor\packaging\__pycache__,,
|
| 351 |
+
setuptools\_vendor\packaging\__pycache__,,
|
| 352 |
+
setuptools\config\expand.cpython-310.pyc,,
|
| 353 |
+
setuptools\_distutils\dist.cpython-310.pyc,,
|
| 354 |
+
setuptools\command\saveopts.cpython-310.pyc,,
|
| 355 |
+
setuptools\config\_validate_pyproject\extra_validations.cpython-310.pyc,,
|
| 356 |
+
setuptools\_distutils\command\check.cpython-310.pyc,,
|
| 357 |
+
setuptools\_vendor\typing_extensions.cpython-310.pyc,,
|
| 358 |
+
setuptools\_distutils\_msvccompiler.cpython-310.pyc,,
|
| 359 |
+
setuptools\config\pyprojecttoml.cpython-310.pyc,,
|
| 360 |
+
setuptools\_distutils\sysconfig.cpython-310.pyc,,
|
| 361 |
+
setuptools\config\_validate_pyproject\error_reporting.cpython-310.pyc,,
|
| 362 |
+
setuptools\_distutils\command\build.cpython-310.pyc,,
|
| 363 |
+
pkg_resources\_vendor\platformdirs\windows.cpython-310.pyc,,
|
| 364 |
+
setuptools\_vendor\importlib_metadata\_itertools.cpython-310.pyc,,
|
| 365 |
+
setuptools\_distutils\version.cpython-310.pyc,,
|
| 366 |
+
pkg_resources\_vendor\packaging\_structures.cpython-310.pyc,,
|
| 367 |
+
setuptools\_distutils\cmd.cpython-310.pyc,,
|
| 368 |
+
setuptools\unicode_utils.cpython-310.pyc,,
|
| 369 |
+
setuptools\_distutils\_collections.cpython-310.pyc,,
|
| 370 |
+
pkg_resources\_vendor\platformdirs\__main__.cpython-310.pyc,,
|
| 371 |
+
pkg_resources\extern\__init__.cpython-310.pyc,,
|
| 372 |
+
setuptools\_vendor\importlib_metadata\__pycache__,,
|
| 373 |
+
setuptools\_vendor\packaging\_structures.cpython-310.pyc,,
|
| 374 |
+
pkg_resources\_vendor\platformdirs\__init__.cpython-310.pyc,,
|
| 375 |
+
setuptools\_vendor\importlib_metadata\_text.cpython-310.pyc,,
|
| 376 |
+
setuptools\installer.cpython-310.pyc,,
|
| 377 |
+
pkg_resources\_vendor\packaging\tags.cpython-310.pyc,,
|
| 378 |
+
pkg_resources\_vendor\jaraco\text\__init__.cpython-310.pyc,,
|
| 379 |
+
setuptools\_imp.cpython-310.pyc,,
|
| 380 |
+
setuptools\_vendor\packaging\tags.cpython-310.pyc,,
|
| 381 |
+
pkg_resources\_vendor\jaraco\__init__.cpython-310.pyc,,
|
| 382 |
+
setuptools\_vendor\jaraco\__init__.cpython-310.pyc,,
|
| 383 |
+
setuptools\_distutils\command\install_scripts.cpython-310.pyc,,
|
| 384 |
+
setuptools\_distutils\command\install_headers.cpython-310.pyc,,
|
| 385 |
+
setuptools\_distutils\dep_util.cpython-310.pyc,,
|
| 386 |
+
setuptools\_distutils\util.cpython-310.pyc,,
|
| 387 |
+
setuptools\_distutils\msvccompiler.cpython-310.pyc,,
|
| 388 |
+
setuptools\_distutils\command\install_lib.cpython-310.pyc,,
|
| 389 |
+
setuptools\_distutils\versionpredicate.cpython-310.pyc,,
|
| 390 |
+
setuptools\_distutils\command\__init__.cpython-310.pyc,,
|
| 391 |
+
pkg_resources\_vendor\more_itertools\more.cpython-310.pyc,,
|
| 392 |
+
pkg_resources\_vendor\packaging\_manylinux.cpython-310.pyc,,
|
| 393 |
+
setuptools\_vendor\more_itertools\more.cpython-310.pyc,,
|
| 394 |
+
setuptools\_vendor\packaging\_manylinux.cpython-310.pyc,,
|
| 395 |
+
pkg_resources\_vendor\importlib_resources\_itertools.cpython-310.pyc,,
|
| 396 |
+
setuptools\_vendor\importlib_resources\_itertools.cpython-310.pyc,,
|
| 397 |
+
pkg_resources\_vendor\importlib_resources\__pycache__,,
|
| 398 |
+
setuptools\wheel.cpython-310.pyc,,
|
| 399 |
+
setuptools\_vendor\__init__.cpython-310.pyc,,
|
| 400 |
+
setuptools\_vendor\importlib_resources\__pycache__,,
|
| 401 |
+
setuptools\_vendor\importlib_metadata\_py39compat.cpython-310.pyc,,
|
| 402 |
+
pkg_resources\_vendor\jaraco\functools.cpython-310.pyc,,
|
| 403 |
+
setuptools\package_index.cpython-310.pyc,,
|
| 404 |
+
pkg_resources\__init__.cpython-310.pyc,,
|
| 405 |
+
setuptools\_vendor\jaraco\functools.cpython-310.pyc,,
|
| 406 |
+
setuptools\_vendor\tomli\__init__.cpython-310.pyc,,
|
| 407 |
+
setuptools\config\__pycache__,,
|
| 408 |
+
setuptools\command\alias.cpython-310.pyc,,
|
| 409 |
+
setuptools\_distutils\command\clean.cpython-310.pyc,,
|
| 410 |
+
setuptools\errors.cpython-310.pyc,,
|
| 411 |
+
setuptools-68.2.0.dist-info\INSTALLER,,
|
| 412 |
+
setuptools\command\bdist_rpm.cpython-310.pyc,,
|
| 413 |
+
_distutils_hack\__init__.cpython-310.pyc,,
|
| 414 |
+
setuptools\_distutils\command\py37compat.cpython-310.pyc,,
|
| 415 |
+
pkg_resources\_vendor\importlib_resources\readers.cpython-310.pyc,,
|
| 416 |
+
setuptools\_vendor\importlib_resources\readers.cpython-310.pyc,,
|
| 417 |
+
setuptools\__init__.cpython-310.pyc,,
|
| 418 |
+
pkg_resources\_vendor\packaging\_musllinux.cpython-310.pyc,,
|
| 419 |
+
setuptools\_vendor\packaging\_musllinux.cpython-310.pyc,,
|
| 420 |
+
setuptools\_distutils\config.cpython-310.pyc,,
|
| 421 |
+
setuptools\_distutils\extension.cpython-310.pyc,,
|
| 422 |
+
setuptools\dist.cpython-310.pyc,,
|
| 423 |
+
setuptools\_distutils\ccompiler.cpython-310.pyc,,
|
| 424 |
+
setuptools\_distutils\fancy_getopt.cpython-310.pyc,,
|
| 425 |
+
pkg_resources\_vendor\importlib_resources\simple.cpython-310.pyc,,
|
| 426 |
+
setuptools\_vendor\importlib_resources\simple.cpython-310.pyc,,
|
| 427 |
+
setuptools\_distutils\command\_framework_compat.cpython-310.pyc,,
|
| 428 |
+
setuptools\_distutils\command\build_py.cpython-310.pyc,,
|
| 429 |
+
setuptools\_distutils\command\install.cpython-310.pyc,,
|
| 430 |
+
setuptools\command\__pycache__,,
|
| 431 |
+
setuptools\config\_validate_pyproject\fastjsonschema_exceptions.cpython-310.pyc,,
|
| 432 |
+
setuptools\extern\__init__.cpython-310.pyc,,
|
| 433 |
+
setuptools\version.cpython-310.pyc,,
|
| 434 |
+
pkg_resources\_vendor\__pycache__,,
|
| 435 |
+
pkg_resources\_vendor\platformdirs\android.cpython-310.pyc,,
|
| 436 |
+
setuptools\_normalization.cpython-310.pyc,,
|
| 437 |
+
pkg_resources\_vendor\more_itertools\__pycache__,,
|
| 438 |
+
setuptools\command\install_egg_info.cpython-310.pyc,,
|
| 439 |
+
setuptools\_vendor\more_itertools\__pycache__,,
|
| 440 |
+
pkg_resources\_vendor\platformdirs\api.cpython-310.pyc,,
|
| 441 |
+
setuptools\command\bdist_egg.cpython-310.pyc,,
|
| 442 |
+
setuptools\namespaces.cpython-310.pyc,,
|
| 443 |
+
setuptools\_distutils\__pycache__,,
|
| 444 |
+
setuptools\command\build_ext.cpython-310.pyc,,
|
| 445 |
+
setuptools\command\egg_info.cpython-310.pyc,,
|
| 446 |
+
setuptools\launch.cpython-310.pyc,,
|
| 447 |
+
setuptools\command\sdist.cpython-310.pyc,,
|
| 448 |
+
setuptools\_distutils\bcppcompiler.cpython-310.pyc,,
|
| 449 |
+
setuptools\_distutils\py39compat.cpython-310.pyc,,
|
| 450 |
+
setuptools\archive_util.cpython-310.pyc,,
|
| 451 |
+
setuptools\_distutils\command\upload.cpython-310.pyc,,
|
| 452 |
+
setuptools\command\editable_wheel.cpython-310.pyc,,
|
| 453 |
+
setuptools\discovery.cpython-310.pyc,,
|
| 454 |
+
pkg_resources\_vendor\platformdirs\version.cpython-310.pyc,,
|
| 455 |
+
setuptools\command\register.cpython-310.pyc,,
|
| 456 |
+
pkg_resources\_vendor\packaging\_elffile.cpython-310.pyc,,
|
| 457 |
+
setuptools\_vendor\packaging\_elffile.cpython-310.pyc,,
|
| 458 |
+
setuptools\_vendor\tomli\_re.cpython-310.pyc,,
|
| 459 |
+
pkg_resources\extern\__pycache__,,
|
| 460 |
+
setuptools\_distutils\command\bdist.cpython-310.pyc,,
|
| 461 |
+
pkg_resources\_vendor\importlib_resources\_compat.cpython-310.pyc,,
|
| 462 |
+
setuptools\_distutils\filelist.cpython-310.pyc,,
|
| 463 |
+
setuptools\_path.cpython-310.pyc,,
|
| 464 |
+
setuptools\command\build_clib.cpython-310.pyc,,
|
| 465 |
+
setuptools\_vendor\importlib_resources\_compat.cpython-310.pyc,,
|
| 466 |
+
setuptools\_distutils\_macos_compat.cpython-310.pyc,,
|
| 467 |
+
pkg_resources\_vendor\importlib_resources\__init__.cpython-310.pyc,,
|
| 468 |
+
pkg_resources\_vendor\jaraco\text\__pycache__,,
|
| 469 |
+
setuptools\_vendor\importlib_resources\__init__.cpython-310.pyc,,
|
| 470 |
+
setuptools\_distutils\core.cpython-310.pyc,,
|
| 471 |
+
setuptools\_distutils\msvc9compiler.cpython-310.pyc,,
|
| 472 |
+
pkg_resources\_vendor\jaraco\context.cpython-310.pyc,,
|
| 473 |
+
setuptools\config\_validate_pyproject\formats.cpython-310.pyc,,
|
| 474 |
+
setuptools\_vendor\jaraco\context.cpython-310.pyc,,
|
| 475 |
+
pkg_resources\_vendor\more_itertools\recipes.cpython-310.pyc,,
|
| 476 |
+
setuptools\command\build.cpython-310.pyc,,
|
| 477 |
+
setuptools\config\__init__.cpython-310.pyc,,
|
| 478 |
+
setuptools\extension.cpython-310.pyc,,
|
| 479 |
+
setuptools\_distutils\_log.cpython-310.pyc,,
|
| 480 |
+
setuptools\config\_validate_pyproject\fastjsonschema_validations.cpython-310.pyc,,
|
| 481 |
+
setuptools\_vendor\more_itertools\recipes.cpython-310.pyc,,
|
| 482 |
+
setuptools\command\dist_info.cpython-310.pyc,,
|
| 483 |
+
pkg_resources\_vendor\packaging\specifiers.cpython-310.pyc,,
|
| 484 |
+
setuptools\_vendor\packaging\specifiers.cpython-310.pyc,,
|
.venv/Lib/site-packages/setuptools-68.2.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.41.2)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
.venv/Lib/site-packages/setuptools-68.2.0.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[distutils.commands]
|
| 2 |
+
alias = setuptools.command.alias:alias
|
| 3 |
+
bdist_egg = setuptools.command.bdist_egg:bdist_egg
|
| 4 |
+
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
|
| 5 |
+
build = setuptools.command.build:build
|
| 6 |
+
build_clib = setuptools.command.build_clib:build_clib
|
| 7 |
+
build_ext = setuptools.command.build_ext:build_ext
|
| 8 |
+
build_py = setuptools.command.build_py:build_py
|
| 9 |
+
develop = setuptools.command.develop:develop
|
| 10 |
+
dist_info = setuptools.command.dist_info:dist_info
|
| 11 |
+
easy_install = setuptools.command.easy_install:easy_install
|
| 12 |
+
editable_wheel = setuptools.command.editable_wheel:editable_wheel
|
| 13 |
+
egg_info = setuptools.command.egg_info:egg_info
|
| 14 |
+
install = setuptools.command.install:install
|
| 15 |
+
install_egg_info = setuptools.command.install_egg_info:install_egg_info
|
| 16 |
+
install_lib = setuptools.command.install_lib:install_lib
|
| 17 |
+
install_scripts = setuptools.command.install_scripts:install_scripts
|
| 18 |
+
rotate = setuptools.command.rotate:rotate
|
| 19 |
+
saveopts = setuptools.command.saveopts:saveopts
|
| 20 |
+
sdist = setuptools.command.sdist:sdist
|
| 21 |
+
setopt = setuptools.command.setopt:setopt
|
| 22 |
+
test = setuptools.command.test:test
|
| 23 |
+
upload_docs = setuptools.command.upload_docs:upload_docs
|
| 24 |
+
|
| 25 |
+
[distutils.setup_keywords]
|
| 26 |
+
dependency_links = setuptools.dist:assert_string_list
|
| 27 |
+
eager_resources = setuptools.dist:assert_string_list
|
| 28 |
+
entry_points = setuptools.dist:check_entry_points
|
| 29 |
+
exclude_package_data = setuptools.dist:check_package_data
|
| 30 |
+
extras_require = setuptools.dist:check_extras
|
| 31 |
+
include_package_data = setuptools.dist:assert_bool
|
| 32 |
+
install_requires = setuptools.dist:check_requirements
|
| 33 |
+
namespace_packages = setuptools.dist:check_nsp
|
| 34 |
+
package_data = setuptools.dist:check_package_data
|
| 35 |
+
packages = setuptools.dist:check_packages
|
| 36 |
+
python_requires = setuptools.dist:check_specifier
|
| 37 |
+
setup_requires = setuptools.dist:check_requirements
|
| 38 |
+
test_loader = setuptools.dist:check_importable
|
| 39 |
+
test_runner = setuptools.dist:check_importable
|
| 40 |
+
test_suite = setuptools.dist:check_test_suite
|
| 41 |
+
tests_require = setuptools.dist:check_requirements
|
| 42 |
+
use_2to3 = setuptools.dist:invalid_unless_false
|
| 43 |
+
zip_safe = setuptools.dist:assert_bool
|
| 44 |
+
|
| 45 |
+
[egg_info.writers]
|
| 46 |
+
PKG-INFO = setuptools.command.egg_info:write_pkg_info
|
| 47 |
+
dependency_links.txt = setuptools.command.egg_info:overwrite_arg
|
| 48 |
+
eager_resources.txt = setuptools.command.egg_info:overwrite_arg
|
| 49 |
+
entry_points.txt = setuptools.command.egg_info:write_entries
|
| 50 |
+
namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
|
| 51 |
+
requires.txt = setuptools.command.egg_info:write_requirements
|
| 52 |
+
top_level.txt = setuptools.command.egg_info:write_toplevel_names
|
| 53 |
+
|
| 54 |
+
[setuptools.finalize_distribution_options]
|
| 55 |
+
keywords = setuptools.dist:Distribution._finalize_setup_keywords
|
| 56 |
+
parent_finalize = setuptools.dist:_Distribution.finalize_options
|
.venv/Lib/site-packages/setuptools-68.2.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_distutils_hack
|
| 2 |
+
pkg_resources
|
| 3 |
+
setuptools
|
.venv/Lib/site-packages/setuptools/_distutils/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import importlib
|
| 3 |
+
|
| 4 |
+
__version__, _, _ = sys.version.partition(' ')
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
# Allow Debian and pkgsrc (only) to customize system
|
| 9 |
+
# behavior. Ref pypa/distutils#2 and pypa/distutils#16.
|
| 10 |
+
# This hook is deprecated and no other environments
|
| 11 |
+
# should use it.
|
| 12 |
+
importlib.import_module('_distutils_system_mod')
|
| 13 |
+
except ImportError:
|
| 14 |
+
pass
|
.venv/Lib/site-packages/setuptools/_distutils/_collections.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import functools
|
| 3 |
+
import itertools
|
| 4 |
+
import operator
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
# from jaraco.collections 3.5.1
|
| 8 |
+
class DictStack(list, collections.abc.Mapping):
|
| 9 |
+
"""
|
| 10 |
+
A stack of dictionaries that behaves as a view on those dictionaries,
|
| 11 |
+
giving preference to the last.
|
| 12 |
+
|
| 13 |
+
>>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
|
| 14 |
+
>>> stack['a']
|
| 15 |
+
2
|
| 16 |
+
>>> stack['b']
|
| 17 |
+
2
|
| 18 |
+
>>> stack['c']
|
| 19 |
+
2
|
| 20 |
+
>>> len(stack)
|
| 21 |
+
3
|
| 22 |
+
>>> stack.push(dict(a=3))
|
| 23 |
+
>>> stack['a']
|
| 24 |
+
3
|
| 25 |
+
>>> set(stack.keys()) == set(['a', 'b', 'c'])
|
| 26 |
+
True
|
| 27 |
+
>>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)])
|
| 28 |
+
True
|
| 29 |
+
>>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2)
|
| 30 |
+
True
|
| 31 |
+
>>> d = stack.pop()
|
| 32 |
+
>>> stack['a']
|
| 33 |
+
2
|
| 34 |
+
>>> d = stack.pop()
|
| 35 |
+
>>> stack['a']
|
| 36 |
+
1
|
| 37 |
+
>>> stack.get('b', None)
|
| 38 |
+
>>> 'c' in stack
|
| 39 |
+
True
|
| 40 |
+
"""
|
| 41 |
+
|
| 42 |
+
def __iter__(self):
|
| 43 |
+
dicts = list.__iter__(self)
|
| 44 |
+
return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts)))
|
| 45 |
+
|
| 46 |
+
def __getitem__(self, key):
|
| 47 |
+
for scope in reversed(tuple(list.__iter__(self))):
|
| 48 |
+
if key in scope:
|
| 49 |
+
return scope[key]
|
| 50 |
+
raise KeyError(key)
|
| 51 |
+
|
| 52 |
+
push = list.append
|
| 53 |
+
|
| 54 |
+
def __contains__(self, other):
|
| 55 |
+
return collections.abc.Mapping.__contains__(self, other)
|
| 56 |
+
|
| 57 |
+
def __len__(self):
|
| 58 |
+
return len(list(iter(self)))
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
# from jaraco.collections 3.7
|
| 62 |
+
class RangeMap(dict):
|
| 63 |
+
"""
|
| 64 |
+
A dictionary-like object that uses the keys as bounds for a range.
|
| 65 |
+
Inclusion of the value for that range is determined by the
|
| 66 |
+
key_match_comparator, which defaults to less-than-or-equal.
|
| 67 |
+
A value is returned for a key if it is the first key that matches in
|
| 68 |
+
the sorted list of keys.
|
| 69 |
+
|
| 70 |
+
One may supply keyword parameters to be passed to the sort function used
|
| 71 |
+
to sort keys (i.e. key, reverse) as sort_params.
|
| 72 |
+
|
| 73 |
+
Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b'
|
| 74 |
+
|
| 75 |
+
>>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy
|
| 76 |
+
>>> r[1], r[2], r[3], r[4], r[5], r[6]
|
| 77 |
+
('a', 'a', 'a', 'b', 'b', 'b')
|
| 78 |
+
|
| 79 |
+
Even float values should work so long as the comparison operator
|
| 80 |
+
supports it.
|
| 81 |
+
|
| 82 |
+
>>> r[4.5]
|
| 83 |
+
'b'
|
| 84 |
+
|
| 85 |
+
But you'll notice that the way rangemap is defined, it must be open-ended
|
| 86 |
+
on one side.
|
| 87 |
+
|
| 88 |
+
>>> r[0]
|
| 89 |
+
'a'
|
| 90 |
+
>>> r[-1]
|
| 91 |
+
'a'
|
| 92 |
+
|
| 93 |
+
One can close the open-end of the RangeMap by using undefined_value
|
| 94 |
+
|
| 95 |
+
>>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
|
| 96 |
+
>>> r[0]
|
| 97 |
+
Traceback (most recent call last):
|
| 98 |
+
...
|
| 99 |
+
KeyError: 0
|
| 100 |
+
|
| 101 |
+
One can get the first or last elements in the range by using RangeMap.Item
|
| 102 |
+
|
| 103 |
+
>>> last_item = RangeMap.Item(-1)
|
| 104 |
+
>>> r[last_item]
|
| 105 |
+
'b'
|
| 106 |
+
|
| 107 |
+
.last_item is a shortcut for Item(-1)
|
| 108 |
+
|
| 109 |
+
>>> r[RangeMap.last_item]
|
| 110 |
+
'b'
|
| 111 |
+
|
| 112 |
+
Sometimes it's useful to find the bounds for a RangeMap
|
| 113 |
+
|
| 114 |
+
>>> r.bounds()
|
| 115 |
+
(0, 6)
|
| 116 |
+
|
| 117 |
+
RangeMap supports .get(key, default)
|
| 118 |
+
|
| 119 |
+
>>> r.get(0, 'not found')
|
| 120 |
+
'not found'
|
| 121 |
+
|
| 122 |
+
>>> r.get(7, 'not found')
|
| 123 |
+
'not found'
|
| 124 |
+
|
| 125 |
+
One often wishes to define the ranges by their left-most values,
|
| 126 |
+
which requires use of sort params and a key_match_comparator.
|
| 127 |
+
|
| 128 |
+
>>> r = RangeMap({1: 'a', 4: 'b'},
|
| 129 |
+
... sort_params=dict(reverse=True),
|
| 130 |
+
... key_match_comparator=operator.ge)
|
| 131 |
+
>>> r[1], r[2], r[3], r[4], r[5], r[6]
|
| 132 |
+
('a', 'a', 'a', 'b', 'b', 'b')
|
| 133 |
+
|
| 134 |
+
That wasn't nearly as easy as before, so an alternate constructor
|
| 135 |
+
is provided:
|
| 136 |
+
|
| 137 |
+
>>> r = RangeMap.left({1: 'a', 4: 'b', 7: RangeMap.undefined_value})
|
| 138 |
+
>>> r[1], r[2], r[3], r[4], r[5], r[6]
|
| 139 |
+
('a', 'a', 'a', 'b', 'b', 'b')
|
| 140 |
+
|
| 141 |
+
"""
|
| 142 |
+
|
| 143 |
+
def __init__(self, source, sort_params={}, key_match_comparator=operator.le):
|
| 144 |
+
dict.__init__(self, source)
|
| 145 |
+
self.sort_params = sort_params
|
| 146 |
+
self.match = key_match_comparator
|
| 147 |
+
|
| 148 |
+
@classmethod
|
| 149 |
+
def left(cls, source):
|
| 150 |
+
return cls(
|
| 151 |
+
source, sort_params=dict(reverse=True), key_match_comparator=operator.ge
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
def __getitem__(self, item):
|
| 155 |
+
sorted_keys = sorted(self.keys(), **self.sort_params)
|
| 156 |
+
if isinstance(item, RangeMap.Item):
|
| 157 |
+
result = self.__getitem__(sorted_keys[item])
|
| 158 |
+
else:
|
| 159 |
+
key = self._find_first_match_(sorted_keys, item)
|
| 160 |
+
result = dict.__getitem__(self, key)
|
| 161 |
+
if result is RangeMap.undefined_value:
|
| 162 |
+
raise KeyError(key)
|
| 163 |
+
return result
|
| 164 |
+
|
| 165 |
+
def get(self, key, default=None):
|
| 166 |
+
"""
|
| 167 |
+
Return the value for key if key is in the dictionary, else default.
|
| 168 |
+
If default is not given, it defaults to None, so that this method
|
| 169 |
+
never raises a KeyError.
|
| 170 |
+
"""
|
| 171 |
+
try:
|
| 172 |
+
return self[key]
|
| 173 |
+
except KeyError:
|
| 174 |
+
return default
|
| 175 |
+
|
| 176 |
+
def _find_first_match_(self, keys, item):
|
| 177 |
+
is_match = functools.partial(self.match, item)
|
| 178 |
+
matches = list(filter(is_match, keys))
|
| 179 |
+
if matches:
|
| 180 |
+
return matches[0]
|
| 181 |
+
raise KeyError(item)
|
| 182 |
+
|
| 183 |
+
def bounds(self):
|
| 184 |
+
sorted_keys = sorted(self.keys(), **self.sort_params)
|
| 185 |
+
return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item])
|
| 186 |
+
|
| 187 |
+
# some special values for the RangeMap
|
| 188 |
+
undefined_value = type('RangeValueUndefined', (), {})()
|
| 189 |
+
|
| 190 |
+
class Item(int):
|
| 191 |
+
"RangeMap Item"
|
| 192 |
+
|
| 193 |
+
first_item = Item(0)
|
| 194 |
+
last_item = Item(-1)
|
.venv/Lib/site-packages/setuptools/_distutils/_functools.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
# from jaraco.functools 3.5
|
| 5 |
+
def pass_none(func):
|
| 6 |
+
"""
|
| 7 |
+
Wrap func so it's not called if its first param is None
|
| 8 |
+
|
| 9 |
+
>>> print_text = pass_none(print)
|
| 10 |
+
>>> print_text('text')
|
| 11 |
+
text
|
| 12 |
+
>>> print_text(None)
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
@functools.wraps(func)
|
| 16 |
+
def wrapper(param, *args, **kwargs):
|
| 17 |
+
if param is not None:
|
| 18 |
+
return func(param, *args, **kwargs)
|
| 19 |
+
|
| 20 |
+
return wrapper
|
.venv/Lib/site-packages/setuptools/_distutils/_log.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
log = logging.getLogger()
|
.venv/Lib/site-packages/setuptools/_distutils/_macos_compat.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import importlib
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def bypass_compiler_fixup(cmd, args):
|
| 6 |
+
return cmd
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
if sys.platform == 'darwin':
|
| 10 |
+
compiler_fixup = importlib.import_module('_osx_support').compiler_fixup
|
| 11 |
+
else:
|
| 12 |
+
compiler_fixup = bypass_compiler_fixup
|
.venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py
ADDED
|
@@ -0,0 +1,568 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils._msvccompiler
|
| 2 |
+
|
| 3 |
+
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
| 4 |
+
for Microsoft Visual Studio 2015.
|
| 5 |
+
|
| 6 |
+
The module is compatible with VS 2015 and later. You can find legacy support
|
| 7 |
+
for older versions in distutils.msvc9compiler and distutils.msvccompiler.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Written by Perry Stoll
|
| 11 |
+
# hacked by Robin Becker and Thomas Heller to do a better job of
|
| 12 |
+
# finding DevStudio (through the registry)
|
| 13 |
+
# ported to VS 2005 and VS 2008 by Christian Heimes
|
| 14 |
+
# ported to VS 2015 by Steve Dower
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import subprocess
|
| 18 |
+
import contextlib
|
| 19 |
+
import warnings
|
| 20 |
+
import unittest.mock as mock
|
| 21 |
+
|
| 22 |
+
with contextlib.suppress(ImportError):
|
| 23 |
+
import winreg
|
| 24 |
+
|
| 25 |
+
from .errors import (
|
| 26 |
+
DistutilsExecError,
|
| 27 |
+
DistutilsPlatformError,
|
| 28 |
+
CompileError,
|
| 29 |
+
LibError,
|
| 30 |
+
LinkError,
|
| 31 |
+
)
|
| 32 |
+
from .ccompiler import CCompiler, gen_lib_options
|
| 33 |
+
from ._log import log
|
| 34 |
+
from .util import get_platform
|
| 35 |
+
|
| 36 |
+
from itertools import count
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _find_vc2015():
|
| 40 |
+
try:
|
| 41 |
+
key = winreg.OpenKeyEx(
|
| 42 |
+
winreg.HKEY_LOCAL_MACHINE,
|
| 43 |
+
r"Software\Microsoft\VisualStudio\SxS\VC7",
|
| 44 |
+
access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY,
|
| 45 |
+
)
|
| 46 |
+
except OSError:
|
| 47 |
+
log.debug("Visual C++ is not registered")
|
| 48 |
+
return None, None
|
| 49 |
+
|
| 50 |
+
best_version = 0
|
| 51 |
+
best_dir = None
|
| 52 |
+
with key:
|
| 53 |
+
for i in count():
|
| 54 |
+
try:
|
| 55 |
+
v, vc_dir, vt = winreg.EnumValue(key, i)
|
| 56 |
+
except OSError:
|
| 57 |
+
break
|
| 58 |
+
if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
|
| 59 |
+
try:
|
| 60 |
+
version = int(float(v))
|
| 61 |
+
except (ValueError, TypeError):
|
| 62 |
+
continue
|
| 63 |
+
if version >= 14 and version > best_version:
|
| 64 |
+
best_version, best_dir = version, vc_dir
|
| 65 |
+
return best_version, best_dir
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _find_vc2017():
|
| 69 |
+
"""Returns "15, path" based on the result of invoking vswhere.exe
|
| 70 |
+
If no install is found, returns "None, None"
|
| 71 |
+
|
| 72 |
+
The version is returned to avoid unnecessarily changing the function
|
| 73 |
+
result. It may be ignored when the path is not None.
|
| 74 |
+
|
| 75 |
+
If vswhere.exe is not available, by definition, VS 2017 is not
|
| 76 |
+
installed.
|
| 77 |
+
"""
|
| 78 |
+
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
| 79 |
+
if not root:
|
| 80 |
+
return None, None
|
| 81 |
+
|
| 82 |
+
try:
|
| 83 |
+
path = subprocess.check_output(
|
| 84 |
+
[
|
| 85 |
+
os.path.join(
|
| 86 |
+
root, "Microsoft Visual Studio", "Installer", "vswhere.exe"
|
| 87 |
+
),
|
| 88 |
+
"-latest",
|
| 89 |
+
"-prerelease",
|
| 90 |
+
"-requires",
|
| 91 |
+
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
| 92 |
+
"-property",
|
| 93 |
+
"installationPath",
|
| 94 |
+
"-products",
|
| 95 |
+
"*",
|
| 96 |
+
],
|
| 97 |
+
encoding="mbcs",
|
| 98 |
+
errors="strict",
|
| 99 |
+
).strip()
|
| 100 |
+
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
| 101 |
+
return None, None
|
| 102 |
+
|
| 103 |
+
path = os.path.join(path, "VC", "Auxiliary", "Build")
|
| 104 |
+
if os.path.isdir(path):
|
| 105 |
+
return 15, path
|
| 106 |
+
|
| 107 |
+
return None, None
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
PLAT_SPEC_TO_RUNTIME = {
|
| 111 |
+
'x86': 'x86',
|
| 112 |
+
'x86_amd64': 'x64',
|
| 113 |
+
'x86_arm': 'arm',
|
| 114 |
+
'x86_arm64': 'arm64',
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def _find_vcvarsall(plat_spec):
|
| 119 |
+
# bpo-38597: Removed vcruntime return value
|
| 120 |
+
_, best_dir = _find_vc2017()
|
| 121 |
+
|
| 122 |
+
if not best_dir:
|
| 123 |
+
best_version, best_dir = _find_vc2015()
|
| 124 |
+
|
| 125 |
+
if not best_dir:
|
| 126 |
+
log.debug("No suitable Visual C++ version found")
|
| 127 |
+
return None, None
|
| 128 |
+
|
| 129 |
+
vcvarsall = os.path.join(best_dir, "vcvarsall.bat")
|
| 130 |
+
if not os.path.isfile(vcvarsall):
|
| 131 |
+
log.debug("%s cannot be found", vcvarsall)
|
| 132 |
+
return None, None
|
| 133 |
+
|
| 134 |
+
return vcvarsall, None
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def _get_vc_env(plat_spec):
|
| 138 |
+
if os.getenv("DISTUTILS_USE_SDK"):
|
| 139 |
+
return {key.lower(): value for key, value in os.environ.items()}
|
| 140 |
+
|
| 141 |
+
vcvarsall, _ = _find_vcvarsall(plat_spec)
|
| 142 |
+
if not vcvarsall:
|
| 143 |
+
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
|
| 144 |
+
|
| 145 |
+
try:
|
| 146 |
+
out = subprocess.check_output(
|
| 147 |
+
f'cmd /u /c "{vcvarsall}" {plat_spec} && set',
|
| 148 |
+
stderr=subprocess.STDOUT,
|
| 149 |
+
).decode('utf-16le', errors='replace')
|
| 150 |
+
except subprocess.CalledProcessError as exc:
|
| 151 |
+
log.error(exc.output)
|
| 152 |
+
raise DistutilsPlatformError(f"Error executing {exc.cmd}")
|
| 153 |
+
|
| 154 |
+
env = {
|
| 155 |
+
key.lower(): value
|
| 156 |
+
for key, _, value in (line.partition('=') for line in out.splitlines())
|
| 157 |
+
if key and value
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
return env
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def _find_exe(exe, paths=None):
|
| 164 |
+
"""Return path to an MSVC executable program.
|
| 165 |
+
|
| 166 |
+
Tries to find the program in several places: first, one of the
|
| 167 |
+
MSVC program search paths from the registry; next, the directories
|
| 168 |
+
in the PATH environment variable. If any of those work, return an
|
| 169 |
+
absolute path that is known to exist. If none of them work, just
|
| 170 |
+
return the original program name, 'exe'.
|
| 171 |
+
"""
|
| 172 |
+
if not paths:
|
| 173 |
+
paths = os.getenv('path').split(os.pathsep)
|
| 174 |
+
for p in paths:
|
| 175 |
+
fn = os.path.join(os.path.abspath(p), exe)
|
| 176 |
+
if os.path.isfile(fn):
|
| 177 |
+
return fn
|
| 178 |
+
return exe
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
# A map keyed by get_platform() return values to values accepted by
|
| 182 |
+
# 'vcvarsall.bat'. Always cross-compile from x86 to work with the
|
| 183 |
+
# lighter-weight MSVC installs that do not include native 64-bit tools.
|
| 184 |
+
PLAT_TO_VCVARS = {
|
| 185 |
+
'win32': 'x86',
|
| 186 |
+
'win-amd64': 'x86_amd64',
|
| 187 |
+
'win-arm32': 'x86_arm',
|
| 188 |
+
'win-arm64': 'x86_arm64',
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
class MSVCCompiler(CCompiler):
|
| 193 |
+
"""Concrete class that implements an interface to Microsoft Visual C++,
|
| 194 |
+
as defined by the CCompiler abstract class."""
|
| 195 |
+
|
| 196 |
+
compiler_type = 'msvc'
|
| 197 |
+
|
| 198 |
+
# Just set this so CCompiler's constructor doesn't barf. We currently
|
| 199 |
+
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
| 200 |
+
# as it really isn't necessary for this sort of single-compiler class.
|
| 201 |
+
# Would be nice to have a consistent interface with UnixCCompiler,
|
| 202 |
+
# though, so it's worth thinking about.
|
| 203 |
+
executables = {}
|
| 204 |
+
|
| 205 |
+
# Private class data (need to distinguish C from C++ source for compiler)
|
| 206 |
+
_c_extensions = ['.c']
|
| 207 |
+
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
| 208 |
+
_rc_extensions = ['.rc']
|
| 209 |
+
_mc_extensions = ['.mc']
|
| 210 |
+
|
| 211 |
+
# Needed for the filename generation methods provided by the
|
| 212 |
+
# base class, CCompiler.
|
| 213 |
+
src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
|
| 214 |
+
res_extension = '.res'
|
| 215 |
+
obj_extension = '.obj'
|
| 216 |
+
static_lib_extension = '.lib'
|
| 217 |
+
shared_lib_extension = '.dll'
|
| 218 |
+
static_lib_format = shared_lib_format = '%s%s'
|
| 219 |
+
exe_extension = '.exe'
|
| 220 |
+
|
| 221 |
+
def __init__(self, verbose=0, dry_run=0, force=0):
|
| 222 |
+
super().__init__(verbose, dry_run, force)
|
| 223 |
+
# target platform (.plat_name is consistent with 'bdist')
|
| 224 |
+
self.plat_name = None
|
| 225 |
+
self.initialized = False
|
| 226 |
+
|
| 227 |
+
@classmethod
|
| 228 |
+
def _configure(cls, vc_env):
|
| 229 |
+
"""
|
| 230 |
+
Set class-level include/lib dirs.
|
| 231 |
+
"""
|
| 232 |
+
cls.include_dirs = cls._parse_path(vc_env.get('include', ''))
|
| 233 |
+
cls.library_dirs = cls._parse_path(vc_env.get('lib', ''))
|
| 234 |
+
|
| 235 |
+
@staticmethod
|
| 236 |
+
def _parse_path(val):
|
| 237 |
+
return [dir.rstrip(os.sep) for dir in val.split(os.pathsep) if dir]
|
| 238 |
+
|
| 239 |
+
def initialize(self, plat_name=None):
|
| 240 |
+
# multi-init means we would need to check platform same each time...
|
| 241 |
+
assert not self.initialized, "don't init multiple times"
|
| 242 |
+
if plat_name is None:
|
| 243 |
+
plat_name = get_platform()
|
| 244 |
+
# sanity check for platforms to prevent obscure errors later.
|
| 245 |
+
if plat_name not in PLAT_TO_VCVARS:
|
| 246 |
+
raise DistutilsPlatformError(
|
| 247 |
+
f"--plat-name must be one of {tuple(PLAT_TO_VCVARS)}"
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
# Get the vcvarsall.bat spec for the requested platform.
|
| 251 |
+
plat_spec = PLAT_TO_VCVARS[plat_name]
|
| 252 |
+
|
| 253 |
+
vc_env = _get_vc_env(plat_spec)
|
| 254 |
+
if not vc_env:
|
| 255 |
+
raise DistutilsPlatformError(
|
| 256 |
+
"Unable to find a compatible " "Visual Studio installation."
|
| 257 |
+
)
|
| 258 |
+
self._configure(vc_env)
|
| 259 |
+
|
| 260 |
+
self._paths = vc_env.get('path', '')
|
| 261 |
+
paths = self._paths.split(os.pathsep)
|
| 262 |
+
self.cc = _find_exe("cl.exe", paths)
|
| 263 |
+
self.linker = _find_exe("link.exe", paths)
|
| 264 |
+
self.lib = _find_exe("lib.exe", paths)
|
| 265 |
+
self.rc = _find_exe("rc.exe", paths) # resource compiler
|
| 266 |
+
self.mc = _find_exe("mc.exe", paths) # message compiler
|
| 267 |
+
self.mt = _find_exe("mt.exe", paths) # message compiler
|
| 268 |
+
|
| 269 |
+
self.preprocess_options = None
|
| 270 |
+
# bpo-38597: Always compile with dynamic linking
|
| 271 |
+
# Future releases of Python 3.x will include all past
|
| 272 |
+
# versions of vcruntime*.dll for compatibility.
|
| 273 |
+
self.compile_options = ['/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD']
|
| 274 |
+
|
| 275 |
+
self.compile_options_debug = [
|
| 276 |
+
'/nologo',
|
| 277 |
+
'/Od',
|
| 278 |
+
'/MDd',
|
| 279 |
+
'/Zi',
|
| 280 |
+
'/W3',
|
| 281 |
+
'/D_DEBUG',
|
| 282 |
+
]
|
| 283 |
+
|
| 284 |
+
ldflags = ['/nologo', '/INCREMENTAL:NO', '/LTCG']
|
| 285 |
+
|
| 286 |
+
ldflags_debug = ['/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL']
|
| 287 |
+
|
| 288 |
+
self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1']
|
| 289 |
+
self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1']
|
| 290 |
+
self.ldflags_shared = [
|
| 291 |
+
*ldflags,
|
| 292 |
+
'/DLL',
|
| 293 |
+
'/MANIFEST:EMBED,ID=2',
|
| 294 |
+
'/MANIFESTUAC:NO',
|
| 295 |
+
]
|
| 296 |
+
self.ldflags_shared_debug = [
|
| 297 |
+
*ldflags_debug,
|
| 298 |
+
'/DLL',
|
| 299 |
+
'/MANIFEST:EMBED,ID=2',
|
| 300 |
+
'/MANIFESTUAC:NO',
|
| 301 |
+
]
|
| 302 |
+
self.ldflags_static = [*ldflags]
|
| 303 |
+
self.ldflags_static_debug = [*ldflags_debug]
|
| 304 |
+
|
| 305 |
+
self._ldflags = {
|
| 306 |
+
(CCompiler.EXECUTABLE, None): self.ldflags_exe,
|
| 307 |
+
(CCompiler.EXECUTABLE, False): self.ldflags_exe,
|
| 308 |
+
(CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
|
| 309 |
+
(CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
|
| 310 |
+
(CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
|
| 311 |
+
(CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
|
| 312 |
+
(CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
|
| 313 |
+
(CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
|
| 314 |
+
(CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
|
| 315 |
+
}
|
| 316 |
+
|
| 317 |
+
self.initialized = True
|
| 318 |
+
|
| 319 |
+
# -- Worker methods ------------------------------------------------
|
| 320 |
+
|
| 321 |
+
@property
|
| 322 |
+
def out_extensions(self):
|
| 323 |
+
return {
|
| 324 |
+
**super().out_extensions,
|
| 325 |
+
**{
|
| 326 |
+
ext: self.res_extension
|
| 327 |
+
for ext in self._rc_extensions + self._mc_extensions
|
| 328 |
+
},
|
| 329 |
+
}
|
| 330 |
+
|
| 331 |
+
def compile( # noqa: C901
|
| 332 |
+
self,
|
| 333 |
+
sources,
|
| 334 |
+
output_dir=None,
|
| 335 |
+
macros=None,
|
| 336 |
+
include_dirs=None,
|
| 337 |
+
debug=0,
|
| 338 |
+
extra_preargs=None,
|
| 339 |
+
extra_postargs=None,
|
| 340 |
+
depends=None,
|
| 341 |
+
):
|
| 342 |
+
if not self.initialized:
|
| 343 |
+
self.initialize()
|
| 344 |
+
compile_info = self._setup_compile(
|
| 345 |
+
output_dir, macros, include_dirs, sources, depends, extra_postargs
|
| 346 |
+
)
|
| 347 |
+
macros, objects, extra_postargs, pp_opts, build = compile_info
|
| 348 |
+
|
| 349 |
+
compile_opts = extra_preargs or []
|
| 350 |
+
compile_opts.append('/c')
|
| 351 |
+
if debug:
|
| 352 |
+
compile_opts.extend(self.compile_options_debug)
|
| 353 |
+
else:
|
| 354 |
+
compile_opts.extend(self.compile_options)
|
| 355 |
+
|
| 356 |
+
add_cpp_opts = False
|
| 357 |
+
|
| 358 |
+
for obj in objects:
|
| 359 |
+
try:
|
| 360 |
+
src, ext = build[obj]
|
| 361 |
+
except KeyError:
|
| 362 |
+
continue
|
| 363 |
+
if debug:
|
| 364 |
+
# pass the full pathname to MSVC in debug mode,
|
| 365 |
+
# this allows the debugger to find the source file
|
| 366 |
+
# without asking the user to browse for it
|
| 367 |
+
src = os.path.abspath(src)
|
| 368 |
+
|
| 369 |
+
if ext in self._c_extensions:
|
| 370 |
+
input_opt = "/Tc" + src
|
| 371 |
+
elif ext in self._cpp_extensions:
|
| 372 |
+
input_opt = "/Tp" + src
|
| 373 |
+
add_cpp_opts = True
|
| 374 |
+
elif ext in self._rc_extensions:
|
| 375 |
+
# compile .RC to .RES file
|
| 376 |
+
input_opt = src
|
| 377 |
+
output_opt = "/fo" + obj
|
| 378 |
+
try:
|
| 379 |
+
self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
|
| 380 |
+
except DistutilsExecError as msg:
|
| 381 |
+
raise CompileError(msg)
|
| 382 |
+
continue
|
| 383 |
+
elif ext in self._mc_extensions:
|
| 384 |
+
# Compile .MC to .RC file to .RES file.
|
| 385 |
+
# * '-h dir' specifies the directory for the
|
| 386 |
+
# generated include file
|
| 387 |
+
# * '-r dir' specifies the target directory of the
|
| 388 |
+
# generated RC file and the binary message resource
|
| 389 |
+
# it includes
|
| 390 |
+
#
|
| 391 |
+
# For now (since there are no options to change this),
|
| 392 |
+
# we use the source-directory for the include file and
|
| 393 |
+
# the build directory for the RC file and message
|
| 394 |
+
# resources. This works at least for win32all.
|
| 395 |
+
h_dir = os.path.dirname(src)
|
| 396 |
+
rc_dir = os.path.dirname(obj)
|
| 397 |
+
try:
|
| 398 |
+
# first compile .MC to .RC and .H file
|
| 399 |
+
self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
|
| 400 |
+
base, _ = os.path.splitext(os.path.basename(src))
|
| 401 |
+
rc_file = os.path.join(rc_dir, base + '.rc')
|
| 402 |
+
# then compile .RC to .RES file
|
| 403 |
+
self.spawn([self.rc, "/fo" + obj, rc_file])
|
| 404 |
+
|
| 405 |
+
except DistutilsExecError as msg:
|
| 406 |
+
raise CompileError(msg)
|
| 407 |
+
continue
|
| 408 |
+
else:
|
| 409 |
+
# how to handle this file?
|
| 410 |
+
raise CompileError(f"Don't know how to compile {src} to {obj}")
|
| 411 |
+
|
| 412 |
+
args = [self.cc] + compile_opts + pp_opts
|
| 413 |
+
if add_cpp_opts:
|
| 414 |
+
args.append('/EHsc')
|
| 415 |
+
args.extend((input_opt, "/Fo" + obj))
|
| 416 |
+
args.extend(extra_postargs)
|
| 417 |
+
|
| 418 |
+
try:
|
| 419 |
+
self.spawn(args)
|
| 420 |
+
except DistutilsExecError as msg:
|
| 421 |
+
raise CompileError(msg)
|
| 422 |
+
|
| 423 |
+
return objects
|
| 424 |
+
|
| 425 |
+
def create_static_lib(
|
| 426 |
+
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
| 427 |
+
):
|
| 428 |
+
if not self.initialized:
|
| 429 |
+
self.initialize()
|
| 430 |
+
objects, output_dir = self._fix_object_args(objects, output_dir)
|
| 431 |
+
output_filename = self.library_filename(output_libname, output_dir=output_dir)
|
| 432 |
+
|
| 433 |
+
if self._need_link(objects, output_filename):
|
| 434 |
+
lib_args = objects + ['/OUT:' + output_filename]
|
| 435 |
+
if debug:
|
| 436 |
+
pass # XXX what goes here?
|
| 437 |
+
try:
|
| 438 |
+
log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args))
|
| 439 |
+
self.spawn([self.lib] + lib_args)
|
| 440 |
+
except DistutilsExecError as msg:
|
| 441 |
+
raise LibError(msg)
|
| 442 |
+
else:
|
| 443 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 444 |
+
|
| 445 |
+
def link(
|
| 446 |
+
self,
|
| 447 |
+
target_desc,
|
| 448 |
+
objects,
|
| 449 |
+
output_filename,
|
| 450 |
+
output_dir=None,
|
| 451 |
+
libraries=None,
|
| 452 |
+
library_dirs=None,
|
| 453 |
+
runtime_library_dirs=None,
|
| 454 |
+
export_symbols=None,
|
| 455 |
+
debug=0,
|
| 456 |
+
extra_preargs=None,
|
| 457 |
+
extra_postargs=None,
|
| 458 |
+
build_temp=None,
|
| 459 |
+
target_lang=None,
|
| 460 |
+
):
|
| 461 |
+
if not self.initialized:
|
| 462 |
+
self.initialize()
|
| 463 |
+
objects, output_dir = self._fix_object_args(objects, output_dir)
|
| 464 |
+
fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
|
| 465 |
+
libraries, library_dirs, runtime_library_dirs = fixed_args
|
| 466 |
+
|
| 467 |
+
if runtime_library_dirs:
|
| 468 |
+
self.warn(
|
| 469 |
+
"I don't know what to do with 'runtime_library_dirs': "
|
| 470 |
+
+ str(runtime_library_dirs)
|
| 471 |
+
)
|
| 472 |
+
|
| 473 |
+
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
|
| 474 |
+
if output_dir is not None:
|
| 475 |
+
output_filename = os.path.join(output_dir, output_filename)
|
| 476 |
+
|
| 477 |
+
if self._need_link(objects, output_filename):
|
| 478 |
+
ldflags = self._ldflags[target_desc, debug]
|
| 479 |
+
|
| 480 |
+
export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])]
|
| 481 |
+
|
| 482 |
+
ld_args = (
|
| 483 |
+
ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
|
| 484 |
+
)
|
| 485 |
+
|
| 486 |
+
# The MSVC linker generates .lib and .exp files, which cannot be
|
| 487 |
+
# suppressed by any linker switches. The .lib files may even be
|
| 488 |
+
# needed! Make sure they are generated in the temporary build
|
| 489 |
+
# directory. Since they have different names for debug and release
|
| 490 |
+
# builds, they can go into the same directory.
|
| 491 |
+
build_temp = os.path.dirname(objects[0])
|
| 492 |
+
if export_symbols is not None:
|
| 493 |
+
(dll_name, dll_ext) = os.path.splitext(
|
| 494 |
+
os.path.basename(output_filename)
|
| 495 |
+
)
|
| 496 |
+
implib_file = os.path.join(build_temp, self.library_filename(dll_name))
|
| 497 |
+
ld_args.append('/IMPLIB:' + implib_file)
|
| 498 |
+
|
| 499 |
+
if extra_preargs:
|
| 500 |
+
ld_args[:0] = extra_preargs
|
| 501 |
+
if extra_postargs:
|
| 502 |
+
ld_args.extend(extra_postargs)
|
| 503 |
+
|
| 504 |
+
output_dir = os.path.dirname(os.path.abspath(output_filename))
|
| 505 |
+
self.mkpath(output_dir)
|
| 506 |
+
try:
|
| 507 |
+
log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
|
| 508 |
+
self.spawn([self.linker] + ld_args)
|
| 509 |
+
except DistutilsExecError as msg:
|
| 510 |
+
raise LinkError(msg)
|
| 511 |
+
else:
|
| 512 |
+
log.debug("skipping %s (up-to-date)", output_filename)
|
| 513 |
+
|
| 514 |
+
def spawn(self, cmd):
|
| 515 |
+
env = dict(os.environ, PATH=self._paths)
|
| 516 |
+
with self._fallback_spawn(cmd, env) as fallback:
|
| 517 |
+
return super().spawn(cmd, env=env)
|
| 518 |
+
return fallback.value
|
| 519 |
+
|
| 520 |
+
@contextlib.contextmanager
|
| 521 |
+
def _fallback_spawn(self, cmd, env):
|
| 522 |
+
"""
|
| 523 |
+
Discovered in pypa/distutils#15, some tools monkeypatch the compiler,
|
| 524 |
+
so the 'env' kwarg causes a TypeError. Detect this condition and
|
| 525 |
+
restore the legacy, unsafe behavior.
|
| 526 |
+
"""
|
| 527 |
+
bag = type('Bag', (), {})()
|
| 528 |
+
try:
|
| 529 |
+
yield bag
|
| 530 |
+
except TypeError as exc:
|
| 531 |
+
if "unexpected keyword argument 'env'" not in str(exc):
|
| 532 |
+
raise
|
| 533 |
+
else:
|
| 534 |
+
return
|
| 535 |
+
warnings.warn("Fallback spawn triggered. Please update distutils monkeypatch.")
|
| 536 |
+
with mock.patch.dict('os.environ', env):
|
| 537 |
+
bag.value = super().spawn(cmd)
|
| 538 |
+
|
| 539 |
+
# -- Miscellaneous methods -----------------------------------------
|
| 540 |
+
# These are all used by the 'gen_lib_options() function, in
|
| 541 |
+
# ccompiler.py.
|
| 542 |
+
|
| 543 |
+
def library_dir_option(self, dir):
|
| 544 |
+
return "/LIBPATH:" + dir
|
| 545 |
+
|
| 546 |
+
def runtime_library_dir_option(self, dir):
|
| 547 |
+
raise DistutilsPlatformError(
|
| 548 |
+
"don't know how to set runtime library search path for MSVC"
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
def library_option(self, lib):
|
| 552 |
+
return self.library_filename(lib)
|
| 553 |
+
|
| 554 |
+
def find_library_file(self, dirs, lib, debug=0):
|
| 555 |
+
# Prefer a debugging library if found (and requested), but deal
|
| 556 |
+
# with it if we don't have one.
|
| 557 |
+
if debug:
|
| 558 |
+
try_names = [lib + "_d", lib]
|
| 559 |
+
else:
|
| 560 |
+
try_names = [lib]
|
| 561 |
+
for dir in dirs:
|
| 562 |
+
for name in try_names:
|
| 563 |
+
libfile = os.path.join(dir, self.library_filename(name))
|
| 564 |
+
if os.path.isfile(libfile):
|
| 565 |
+
return libfile
|
| 566 |
+
else:
|
| 567 |
+
# Oops, didn't find it in *any* of 'dirs'
|
| 568 |
+
return None
|
.venv/Lib/site-packages/setuptools/_distutils/command/__init__.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command
|
| 2 |
+
|
| 3 |
+
Package containing implementation of all the standard Distutils
|
| 4 |
+
commands."""
|
| 5 |
+
|
| 6 |
+
__all__ = [ # noqa: F822
|
| 7 |
+
'build',
|
| 8 |
+
'build_py',
|
| 9 |
+
'build_ext',
|
| 10 |
+
'build_clib',
|
| 11 |
+
'build_scripts',
|
| 12 |
+
'clean',
|
| 13 |
+
'install',
|
| 14 |
+
'install_lib',
|
| 15 |
+
'install_headers',
|
| 16 |
+
'install_scripts',
|
| 17 |
+
'install_data',
|
| 18 |
+
'sdist',
|
| 19 |
+
'register',
|
| 20 |
+
'bdist',
|
| 21 |
+
'bdist_dumb',
|
| 22 |
+
'bdist_rpm',
|
| 23 |
+
'check',
|
| 24 |
+
'upload',
|
| 25 |
+
]
|
.venv/Lib/site-packages/setuptools/_distutils/command/_framework_compat.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backward compatibility for homebrew builds on macOS.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
import sys
|
| 7 |
+
import os
|
| 8 |
+
import functools
|
| 9 |
+
import subprocess
|
| 10 |
+
import sysconfig
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@functools.lru_cache()
|
| 14 |
+
def enabled():
|
| 15 |
+
"""
|
| 16 |
+
Only enabled for Python 3.9 framework homebrew builds
|
| 17 |
+
except ensurepip and venv.
|
| 18 |
+
"""
|
| 19 |
+
PY39 = (3, 9) < sys.version_info < (3, 10)
|
| 20 |
+
framework = sys.platform == 'darwin' and sys._framework
|
| 21 |
+
homebrew = "Cellar" in sysconfig.get_config_var('projectbase')
|
| 22 |
+
venv = sys.prefix != sys.base_prefix
|
| 23 |
+
ensurepip = os.environ.get("ENSUREPIP_OPTIONS")
|
| 24 |
+
return PY39 and framework and homebrew and not venv and not ensurepip
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
schemes = dict(
|
| 28 |
+
osx_framework_library=dict(
|
| 29 |
+
stdlib='{installed_base}/{platlibdir}/python{py_version_short}',
|
| 30 |
+
platstdlib='{platbase}/{platlibdir}/python{py_version_short}',
|
| 31 |
+
purelib='{homebrew_prefix}/lib/python{py_version_short}/site-packages',
|
| 32 |
+
platlib='{homebrew_prefix}/{platlibdir}/python{py_version_short}/site-packages',
|
| 33 |
+
include='{installed_base}/include/python{py_version_short}{abiflags}',
|
| 34 |
+
platinclude='{installed_platbase}/include/python{py_version_short}{abiflags}',
|
| 35 |
+
scripts='{homebrew_prefix}/bin',
|
| 36 |
+
data='{homebrew_prefix}',
|
| 37 |
+
)
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@functools.lru_cache()
|
| 42 |
+
def vars():
|
| 43 |
+
if not enabled():
|
| 44 |
+
return {}
|
| 45 |
+
homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip()
|
| 46 |
+
return locals()
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def scheme(name):
|
| 50 |
+
"""
|
| 51 |
+
Override the selected scheme for posix_prefix.
|
| 52 |
+
"""
|
| 53 |
+
if not enabled() or not name.endswith('_prefix'):
|
| 54 |
+
return name
|
| 55 |
+
return 'osx_framework_library'
|
.venv/Lib/site-packages/setuptools/_distutils/command/bdist.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist' command (create a built [binary]
|
| 4 |
+
distribution)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
from ..core import Command
|
| 10 |
+
from ..errors import DistutilsPlatformError, DistutilsOptionError
|
| 11 |
+
from ..util import get_platform
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def show_formats():
|
| 15 |
+
"""Print list of available formats (arguments to "--format" option)."""
|
| 16 |
+
from ..fancy_getopt import FancyGetopt
|
| 17 |
+
|
| 18 |
+
formats = []
|
| 19 |
+
for format in bdist.format_commands:
|
| 20 |
+
formats.append(("formats=" + format, None, bdist.format_commands[format][1]))
|
| 21 |
+
pretty_printer = FancyGetopt(formats)
|
| 22 |
+
pretty_printer.print_help("List of available distribution formats:")
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class ListCompat(dict):
|
| 26 |
+
# adapter to allow for Setuptools compatibility in format_commands
|
| 27 |
+
def append(self, item):
|
| 28 |
+
warnings.warn(
|
| 29 |
+
"""format_commands is now a dict. append is deprecated.""",
|
| 30 |
+
DeprecationWarning,
|
| 31 |
+
stacklevel=2,
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class bdist(Command):
|
| 36 |
+
description = "create a built (binary) distribution"
|
| 37 |
+
|
| 38 |
+
user_options = [
|
| 39 |
+
('bdist-base=', 'b', "temporary directory for creating built distributions"),
|
| 40 |
+
(
|
| 41 |
+
'plat-name=',
|
| 42 |
+
'p',
|
| 43 |
+
"platform name to embed in generated filenames "
|
| 44 |
+
"(default: %s)" % get_platform(),
|
| 45 |
+
),
|
| 46 |
+
('formats=', None, "formats for distribution (comma-separated list)"),
|
| 47 |
+
(
|
| 48 |
+
'dist-dir=',
|
| 49 |
+
'd',
|
| 50 |
+
"directory to put final built distributions in " "[default: dist]",
|
| 51 |
+
),
|
| 52 |
+
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
| 53 |
+
(
|
| 54 |
+
'owner=',
|
| 55 |
+
'u',
|
| 56 |
+
"Owner name used when creating a tar file" " [default: current user]",
|
| 57 |
+
),
|
| 58 |
+
(
|
| 59 |
+
'group=',
|
| 60 |
+
'g',
|
| 61 |
+
"Group name used when creating a tar file" " [default: current group]",
|
| 62 |
+
),
|
| 63 |
+
]
|
| 64 |
+
|
| 65 |
+
boolean_options = ['skip-build']
|
| 66 |
+
|
| 67 |
+
help_options = [
|
| 68 |
+
('help-formats', None, "lists available distribution formats", show_formats),
|
| 69 |
+
]
|
| 70 |
+
|
| 71 |
+
# The following commands do not take a format option from bdist
|
| 72 |
+
no_format_option = ('bdist_rpm',)
|
| 73 |
+
|
| 74 |
+
# This won't do in reality: will need to distinguish RPM-ish Linux,
|
| 75 |
+
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
|
| 76 |
+
default_format = {'posix': 'gztar', 'nt': 'zip'}
|
| 77 |
+
|
| 78 |
+
# Define commands in preferred order for the --help-formats option
|
| 79 |
+
format_commands = ListCompat(
|
| 80 |
+
{
|
| 81 |
+
'rpm': ('bdist_rpm', "RPM distribution"),
|
| 82 |
+
'gztar': ('bdist_dumb', "gzip'ed tar file"),
|
| 83 |
+
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
|
| 84 |
+
'xztar': ('bdist_dumb', "xz'ed tar file"),
|
| 85 |
+
'ztar': ('bdist_dumb', "compressed tar file"),
|
| 86 |
+
'tar': ('bdist_dumb', "tar file"),
|
| 87 |
+
'zip': ('bdist_dumb', "ZIP file"),
|
| 88 |
+
}
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
# for compatibility until consumers only reference format_commands
|
| 92 |
+
format_command = format_commands
|
| 93 |
+
|
| 94 |
+
def initialize_options(self):
|
| 95 |
+
self.bdist_base = None
|
| 96 |
+
self.plat_name = None
|
| 97 |
+
self.formats = None
|
| 98 |
+
self.dist_dir = None
|
| 99 |
+
self.skip_build = 0
|
| 100 |
+
self.group = None
|
| 101 |
+
self.owner = None
|
| 102 |
+
|
| 103 |
+
def finalize_options(self):
|
| 104 |
+
# have to finalize 'plat_name' before 'bdist_base'
|
| 105 |
+
if self.plat_name is None:
|
| 106 |
+
if self.skip_build:
|
| 107 |
+
self.plat_name = get_platform()
|
| 108 |
+
else:
|
| 109 |
+
self.plat_name = self.get_finalized_command('build').plat_name
|
| 110 |
+
|
| 111 |
+
# 'bdist_base' -- parent of per-built-distribution-format
|
| 112 |
+
# temporary directories (eg. we'll probably have
|
| 113 |
+
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
|
| 114 |
+
if self.bdist_base is None:
|
| 115 |
+
build_base = self.get_finalized_command('build').build_base
|
| 116 |
+
self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name)
|
| 117 |
+
|
| 118 |
+
self.ensure_string_list('formats')
|
| 119 |
+
if self.formats is None:
|
| 120 |
+
try:
|
| 121 |
+
self.formats = [self.default_format[os.name]]
|
| 122 |
+
except KeyError:
|
| 123 |
+
raise DistutilsPlatformError(
|
| 124 |
+
"don't know how to create built distributions "
|
| 125 |
+
"on platform %s" % os.name
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
if self.dist_dir is None:
|
| 129 |
+
self.dist_dir = "dist"
|
| 130 |
+
|
| 131 |
+
def run(self):
|
| 132 |
+
# Figure out which sub-commands we need to run.
|
| 133 |
+
commands = []
|
| 134 |
+
for format in self.formats:
|
| 135 |
+
try:
|
| 136 |
+
commands.append(self.format_commands[format][0])
|
| 137 |
+
except KeyError:
|
| 138 |
+
raise DistutilsOptionError("invalid format '%s'" % format)
|
| 139 |
+
|
| 140 |
+
# Reinitialize and run each command.
|
| 141 |
+
for i in range(len(self.formats)):
|
| 142 |
+
cmd_name = commands[i]
|
| 143 |
+
sub_cmd = self.reinitialize_command(cmd_name)
|
| 144 |
+
if cmd_name not in self.no_format_option:
|
| 145 |
+
sub_cmd.format = self.formats[i]
|
| 146 |
+
|
| 147 |
+
# passing the owner and group names for tar archiving
|
| 148 |
+
if cmd_name == 'bdist_dumb':
|
| 149 |
+
sub_cmd.owner = self.owner
|
| 150 |
+
sub_cmd.group = self.group
|
| 151 |
+
|
| 152 |
+
# If we're going to need to run this command again, tell it to
|
| 153 |
+
# keep its temporary files around so subsequent runs go faster.
|
| 154 |
+
if cmd_name in commands[i + 1 :]:
|
| 155 |
+
sub_cmd.keep_temp = 1
|
| 156 |
+
self.run_command(cmd_name)
|
.venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist_dumb
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
|
| 4 |
+
distribution -- i.e., just an archive to be unpacked under $prefix or
|
| 5 |
+
$exec_prefix)."""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from ..core import Command
|
| 9 |
+
from ..util import get_platform
|
| 10 |
+
from ..dir_util import remove_tree, ensure_relative
|
| 11 |
+
from ..errors import DistutilsPlatformError
|
| 12 |
+
from ..sysconfig import get_python_version
|
| 13 |
+
from distutils._log import log
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class bdist_dumb(Command):
|
| 17 |
+
description = "create a \"dumb\" built distribution"
|
| 18 |
+
|
| 19 |
+
user_options = [
|
| 20 |
+
('bdist-dir=', 'd', "temporary directory for creating the distribution"),
|
| 21 |
+
(
|
| 22 |
+
'plat-name=',
|
| 23 |
+
'p',
|
| 24 |
+
"platform name to embed in generated filenames "
|
| 25 |
+
"(default: %s)" % get_platform(),
|
| 26 |
+
),
|
| 27 |
+
(
|
| 28 |
+
'format=',
|
| 29 |
+
'f',
|
| 30 |
+
"archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)",
|
| 31 |
+
),
|
| 32 |
+
(
|
| 33 |
+
'keep-temp',
|
| 34 |
+
'k',
|
| 35 |
+
"keep the pseudo-installation tree around after "
|
| 36 |
+
+ "creating the distribution archive",
|
| 37 |
+
),
|
| 38 |
+
('dist-dir=', 'd', "directory to put final built distributions in"),
|
| 39 |
+
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
| 40 |
+
(
|
| 41 |
+
'relative',
|
| 42 |
+
None,
|
| 43 |
+
"build the archive using relative paths " "(default: false)",
|
| 44 |
+
),
|
| 45 |
+
(
|
| 46 |
+
'owner=',
|
| 47 |
+
'u',
|
| 48 |
+
"Owner name used when creating a tar file" " [default: current user]",
|
| 49 |
+
),
|
| 50 |
+
(
|
| 51 |
+
'group=',
|
| 52 |
+
'g',
|
| 53 |
+
"Group name used when creating a tar file" " [default: current group]",
|
| 54 |
+
),
|
| 55 |
+
]
|
| 56 |
+
|
| 57 |
+
boolean_options = ['keep-temp', 'skip-build', 'relative']
|
| 58 |
+
|
| 59 |
+
default_format = {'posix': 'gztar', 'nt': 'zip'}
|
| 60 |
+
|
| 61 |
+
def initialize_options(self):
|
| 62 |
+
self.bdist_dir = None
|
| 63 |
+
self.plat_name = None
|
| 64 |
+
self.format = None
|
| 65 |
+
self.keep_temp = 0
|
| 66 |
+
self.dist_dir = None
|
| 67 |
+
self.skip_build = None
|
| 68 |
+
self.relative = 0
|
| 69 |
+
self.owner = None
|
| 70 |
+
self.group = None
|
| 71 |
+
|
| 72 |
+
def finalize_options(self):
|
| 73 |
+
if self.bdist_dir is None:
|
| 74 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
| 75 |
+
self.bdist_dir = os.path.join(bdist_base, 'dumb')
|
| 76 |
+
|
| 77 |
+
if self.format is None:
|
| 78 |
+
try:
|
| 79 |
+
self.format = self.default_format[os.name]
|
| 80 |
+
except KeyError:
|
| 81 |
+
raise DistutilsPlatformError(
|
| 82 |
+
"don't know how to create dumb built distributions "
|
| 83 |
+
"on platform %s" % os.name
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
self.set_undefined_options(
|
| 87 |
+
'bdist',
|
| 88 |
+
('dist_dir', 'dist_dir'),
|
| 89 |
+
('plat_name', 'plat_name'),
|
| 90 |
+
('skip_build', 'skip_build'),
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
def run(self):
|
| 94 |
+
if not self.skip_build:
|
| 95 |
+
self.run_command('build')
|
| 96 |
+
|
| 97 |
+
install = self.reinitialize_command('install', reinit_subcommands=1)
|
| 98 |
+
install.root = self.bdist_dir
|
| 99 |
+
install.skip_build = self.skip_build
|
| 100 |
+
install.warn_dir = 0
|
| 101 |
+
|
| 102 |
+
log.info("installing to %s", self.bdist_dir)
|
| 103 |
+
self.run_command('install')
|
| 104 |
+
|
| 105 |
+
# And make an archive relative to the root of the
|
| 106 |
+
# pseudo-installation tree.
|
| 107 |
+
archive_basename = "{}.{}".format(
|
| 108 |
+
self.distribution.get_fullname(), self.plat_name
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
| 112 |
+
if not self.relative:
|
| 113 |
+
archive_root = self.bdist_dir
|
| 114 |
+
else:
|
| 115 |
+
if self.distribution.has_ext_modules() and (
|
| 116 |
+
install.install_base != install.install_platbase
|
| 117 |
+
):
|
| 118 |
+
raise DistutilsPlatformError(
|
| 119 |
+
"can't make a dumb built distribution where "
|
| 120 |
+
"base and platbase are different (%s, %s)"
|
| 121 |
+
% (repr(install.install_base), repr(install.install_platbase))
|
| 122 |
+
)
|
| 123 |
+
else:
|
| 124 |
+
archive_root = os.path.join(
|
| 125 |
+
self.bdist_dir, ensure_relative(install.install_base)
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
# Make the archive
|
| 129 |
+
filename = self.make_archive(
|
| 130 |
+
pseudoinstall_root,
|
| 131 |
+
self.format,
|
| 132 |
+
root_dir=archive_root,
|
| 133 |
+
owner=self.owner,
|
| 134 |
+
group=self.group,
|
| 135 |
+
)
|
| 136 |
+
if self.distribution.has_ext_modules():
|
| 137 |
+
pyversion = get_python_version()
|
| 138 |
+
else:
|
| 139 |
+
pyversion = 'any'
|
| 140 |
+
self.distribution.dist_files.append(('bdist_dumb', pyversion, filename))
|
| 141 |
+
|
| 142 |
+
if not self.keep_temp:
|
| 143 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
.venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py
ADDED
|
@@ -0,0 +1,614 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist_rpm
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
|
| 4 |
+
distributions)."""
|
| 5 |
+
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
|
| 10 |
+
from ..core import Command
|
| 11 |
+
from ..debug import DEBUG
|
| 12 |
+
from ..file_util import write_file
|
| 13 |
+
from ..errors import (
|
| 14 |
+
DistutilsOptionError,
|
| 15 |
+
DistutilsPlatformError,
|
| 16 |
+
DistutilsFileError,
|
| 17 |
+
DistutilsExecError,
|
| 18 |
+
)
|
| 19 |
+
from ..sysconfig import get_python_version
|
| 20 |
+
from distutils._log import log
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class bdist_rpm(Command):
|
| 24 |
+
description = "create an RPM distribution"
|
| 25 |
+
|
| 26 |
+
user_options = [
|
| 27 |
+
('bdist-base=', None, "base directory for creating built distributions"),
|
| 28 |
+
(
|
| 29 |
+
'rpm-base=',
|
| 30 |
+
None,
|
| 31 |
+
"base directory for creating RPMs (defaults to \"rpm\" under "
|
| 32 |
+
"--bdist-base; must be specified for RPM 2)",
|
| 33 |
+
),
|
| 34 |
+
(
|
| 35 |
+
'dist-dir=',
|
| 36 |
+
'd',
|
| 37 |
+
"directory to put final RPM files in " "(and .spec files if --spec-only)",
|
| 38 |
+
),
|
| 39 |
+
(
|
| 40 |
+
'python=',
|
| 41 |
+
None,
|
| 42 |
+
"path to Python interpreter to hard-code in the .spec file "
|
| 43 |
+
"(default: \"python\")",
|
| 44 |
+
),
|
| 45 |
+
(
|
| 46 |
+
'fix-python',
|
| 47 |
+
None,
|
| 48 |
+
"hard-code the exact path to the current Python interpreter in "
|
| 49 |
+
"the .spec file",
|
| 50 |
+
),
|
| 51 |
+
('spec-only', None, "only regenerate spec file"),
|
| 52 |
+
('source-only', None, "only generate source RPM"),
|
| 53 |
+
('binary-only', None, "only generate binary RPM"),
|
| 54 |
+
('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"),
|
| 55 |
+
# More meta-data: too RPM-specific to put in the setup script,
|
| 56 |
+
# but needs to go in the .spec file -- so we make these options
|
| 57 |
+
# to "bdist_rpm". The idea is that packagers would put this
|
| 58 |
+
# info in setup.cfg, although they are of course free to
|
| 59 |
+
# supply it on the command line.
|
| 60 |
+
(
|
| 61 |
+
'distribution-name=',
|
| 62 |
+
None,
|
| 63 |
+
"name of the (Linux) distribution to which this "
|
| 64 |
+
"RPM applies (*not* the name of the module distribution!)",
|
| 65 |
+
),
|
| 66 |
+
('group=', None, "package classification [default: \"Development/Libraries\"]"),
|
| 67 |
+
('release=', None, "RPM release number"),
|
| 68 |
+
('serial=', None, "RPM serial number"),
|
| 69 |
+
(
|
| 70 |
+
'vendor=',
|
| 71 |
+
None,
|
| 72 |
+
"RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
|
| 73 |
+
"[default: maintainer or author from setup script]",
|
| 74 |
+
),
|
| 75 |
+
(
|
| 76 |
+
'packager=',
|
| 77 |
+
None,
|
| 78 |
+
"RPM packager (eg. \"Jane Doe <jane@example.net>\") " "[default: vendor]",
|
| 79 |
+
),
|
| 80 |
+
('doc-files=', None, "list of documentation files (space or comma-separated)"),
|
| 81 |
+
('changelog=', None, "RPM changelog"),
|
| 82 |
+
('icon=', None, "name of icon file"),
|
| 83 |
+
('provides=', None, "capabilities provided by this package"),
|
| 84 |
+
('requires=', None, "capabilities required by this package"),
|
| 85 |
+
('conflicts=', None, "capabilities which conflict with this package"),
|
| 86 |
+
('build-requires=', None, "capabilities required to build this package"),
|
| 87 |
+
('obsoletes=', None, "capabilities made obsolete by this package"),
|
| 88 |
+
('no-autoreq', None, "do not automatically calculate dependencies"),
|
| 89 |
+
# Actions to take when building RPM
|
| 90 |
+
('keep-temp', 'k', "don't clean up RPM build directory"),
|
| 91 |
+
('no-keep-temp', None, "clean up RPM build directory [default]"),
|
| 92 |
+
(
|
| 93 |
+
'use-rpm-opt-flags',
|
| 94 |
+
None,
|
| 95 |
+
"compile with RPM_OPT_FLAGS when building from source RPM",
|
| 96 |
+
),
|
| 97 |
+
('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"),
|
| 98 |
+
('rpm3-mode', None, "RPM 3 compatibility mode (default)"),
|
| 99 |
+
('rpm2-mode', None, "RPM 2 compatibility mode"),
|
| 100 |
+
# Add the hooks necessary for specifying custom scripts
|
| 101 |
+
('prep-script=', None, "Specify a script for the PREP phase of RPM building"),
|
| 102 |
+
('build-script=', None, "Specify a script for the BUILD phase of RPM building"),
|
| 103 |
+
(
|
| 104 |
+
'pre-install=',
|
| 105 |
+
None,
|
| 106 |
+
"Specify a script for the pre-INSTALL phase of RPM building",
|
| 107 |
+
),
|
| 108 |
+
(
|
| 109 |
+
'install-script=',
|
| 110 |
+
None,
|
| 111 |
+
"Specify a script for the INSTALL phase of RPM building",
|
| 112 |
+
),
|
| 113 |
+
(
|
| 114 |
+
'post-install=',
|
| 115 |
+
None,
|
| 116 |
+
"Specify a script for the post-INSTALL phase of RPM building",
|
| 117 |
+
),
|
| 118 |
+
(
|
| 119 |
+
'pre-uninstall=',
|
| 120 |
+
None,
|
| 121 |
+
"Specify a script for the pre-UNINSTALL phase of RPM building",
|
| 122 |
+
),
|
| 123 |
+
(
|
| 124 |
+
'post-uninstall=',
|
| 125 |
+
None,
|
| 126 |
+
"Specify a script for the post-UNINSTALL phase of RPM building",
|
| 127 |
+
),
|
| 128 |
+
('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"),
|
| 129 |
+
(
|
| 130 |
+
'verify-script=',
|
| 131 |
+
None,
|
| 132 |
+
"Specify a script for the VERIFY phase of the RPM build",
|
| 133 |
+
),
|
| 134 |
+
# Allow a packager to explicitly force an architecture
|
| 135 |
+
('force-arch=', None, "Force an architecture onto the RPM build process"),
|
| 136 |
+
('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"),
|
| 137 |
+
]
|
| 138 |
+
|
| 139 |
+
boolean_options = [
|
| 140 |
+
'keep-temp',
|
| 141 |
+
'use-rpm-opt-flags',
|
| 142 |
+
'rpm3-mode',
|
| 143 |
+
'no-autoreq',
|
| 144 |
+
'quiet',
|
| 145 |
+
]
|
| 146 |
+
|
| 147 |
+
negative_opt = {
|
| 148 |
+
'no-keep-temp': 'keep-temp',
|
| 149 |
+
'no-rpm-opt-flags': 'use-rpm-opt-flags',
|
| 150 |
+
'rpm2-mode': 'rpm3-mode',
|
| 151 |
+
}
|
| 152 |
+
|
| 153 |
+
def initialize_options(self):
|
| 154 |
+
self.bdist_base = None
|
| 155 |
+
self.rpm_base = None
|
| 156 |
+
self.dist_dir = None
|
| 157 |
+
self.python = None
|
| 158 |
+
self.fix_python = None
|
| 159 |
+
self.spec_only = None
|
| 160 |
+
self.binary_only = None
|
| 161 |
+
self.source_only = None
|
| 162 |
+
self.use_bzip2 = None
|
| 163 |
+
|
| 164 |
+
self.distribution_name = None
|
| 165 |
+
self.group = None
|
| 166 |
+
self.release = None
|
| 167 |
+
self.serial = None
|
| 168 |
+
self.vendor = None
|
| 169 |
+
self.packager = None
|
| 170 |
+
self.doc_files = None
|
| 171 |
+
self.changelog = None
|
| 172 |
+
self.icon = None
|
| 173 |
+
|
| 174 |
+
self.prep_script = None
|
| 175 |
+
self.build_script = None
|
| 176 |
+
self.install_script = None
|
| 177 |
+
self.clean_script = None
|
| 178 |
+
self.verify_script = None
|
| 179 |
+
self.pre_install = None
|
| 180 |
+
self.post_install = None
|
| 181 |
+
self.pre_uninstall = None
|
| 182 |
+
self.post_uninstall = None
|
| 183 |
+
self.prep = None
|
| 184 |
+
self.provides = None
|
| 185 |
+
self.requires = None
|
| 186 |
+
self.conflicts = None
|
| 187 |
+
self.build_requires = None
|
| 188 |
+
self.obsoletes = None
|
| 189 |
+
|
| 190 |
+
self.keep_temp = 0
|
| 191 |
+
self.use_rpm_opt_flags = 1
|
| 192 |
+
self.rpm3_mode = 1
|
| 193 |
+
self.no_autoreq = 0
|
| 194 |
+
|
| 195 |
+
self.force_arch = None
|
| 196 |
+
self.quiet = 0
|
| 197 |
+
|
| 198 |
+
def finalize_options(self):
|
| 199 |
+
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
| 200 |
+
if self.rpm_base is None:
|
| 201 |
+
if not self.rpm3_mode:
|
| 202 |
+
raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode")
|
| 203 |
+
self.rpm_base = os.path.join(self.bdist_base, "rpm")
|
| 204 |
+
|
| 205 |
+
if self.python is None:
|
| 206 |
+
if self.fix_python:
|
| 207 |
+
self.python = sys.executable
|
| 208 |
+
else:
|
| 209 |
+
self.python = "python3"
|
| 210 |
+
elif self.fix_python:
|
| 211 |
+
raise DistutilsOptionError(
|
| 212 |
+
"--python and --fix-python are mutually exclusive options"
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
if os.name != 'posix':
|
| 216 |
+
raise DistutilsPlatformError(
|
| 217 |
+
"don't know how to create RPM " "distributions on platform %s" % os.name
|
| 218 |
+
)
|
| 219 |
+
if self.binary_only and self.source_only:
|
| 220 |
+
raise DistutilsOptionError(
|
| 221 |
+
"cannot supply both '--source-only' and '--binary-only'"
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
# don't pass CFLAGS to pure python distributions
|
| 225 |
+
if not self.distribution.has_ext_modules():
|
| 226 |
+
self.use_rpm_opt_flags = 0
|
| 227 |
+
|
| 228 |
+
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
| 229 |
+
self.finalize_package_data()
|
| 230 |
+
|
| 231 |
+
def finalize_package_data(self):
|
| 232 |
+
self.ensure_string('group', "Development/Libraries")
|
| 233 |
+
self.ensure_string(
|
| 234 |
+
'vendor',
|
| 235 |
+
"%s <%s>"
|
| 236 |
+
% (self.distribution.get_contact(), self.distribution.get_contact_email()),
|
| 237 |
+
)
|
| 238 |
+
self.ensure_string('packager')
|
| 239 |
+
self.ensure_string_list('doc_files')
|
| 240 |
+
if isinstance(self.doc_files, list):
|
| 241 |
+
for readme in ('README', 'README.txt'):
|
| 242 |
+
if os.path.exists(readme) and readme not in self.doc_files:
|
| 243 |
+
self.doc_files.append(readme)
|
| 244 |
+
|
| 245 |
+
self.ensure_string('release', "1")
|
| 246 |
+
self.ensure_string('serial') # should it be an int?
|
| 247 |
+
|
| 248 |
+
self.ensure_string('distribution_name')
|
| 249 |
+
|
| 250 |
+
self.ensure_string('changelog')
|
| 251 |
+
# Format changelog correctly
|
| 252 |
+
self.changelog = self._format_changelog(self.changelog)
|
| 253 |
+
|
| 254 |
+
self.ensure_filename('icon')
|
| 255 |
+
|
| 256 |
+
self.ensure_filename('prep_script')
|
| 257 |
+
self.ensure_filename('build_script')
|
| 258 |
+
self.ensure_filename('install_script')
|
| 259 |
+
self.ensure_filename('clean_script')
|
| 260 |
+
self.ensure_filename('verify_script')
|
| 261 |
+
self.ensure_filename('pre_install')
|
| 262 |
+
self.ensure_filename('post_install')
|
| 263 |
+
self.ensure_filename('pre_uninstall')
|
| 264 |
+
self.ensure_filename('post_uninstall')
|
| 265 |
+
|
| 266 |
+
# XXX don't forget we punted on summaries and descriptions -- they
|
| 267 |
+
# should be handled here eventually!
|
| 268 |
+
|
| 269 |
+
# Now *this* is some meta-data that belongs in the setup script...
|
| 270 |
+
self.ensure_string_list('provides')
|
| 271 |
+
self.ensure_string_list('requires')
|
| 272 |
+
self.ensure_string_list('conflicts')
|
| 273 |
+
self.ensure_string_list('build_requires')
|
| 274 |
+
self.ensure_string_list('obsoletes')
|
| 275 |
+
|
| 276 |
+
self.ensure_string('force_arch')
|
| 277 |
+
|
| 278 |
+
def run(self): # noqa: C901
|
| 279 |
+
if DEBUG:
|
| 280 |
+
print("before _get_package_data():")
|
| 281 |
+
print("vendor =", self.vendor)
|
| 282 |
+
print("packager =", self.packager)
|
| 283 |
+
print("doc_files =", self.doc_files)
|
| 284 |
+
print("changelog =", self.changelog)
|
| 285 |
+
|
| 286 |
+
# make directories
|
| 287 |
+
if self.spec_only:
|
| 288 |
+
spec_dir = self.dist_dir
|
| 289 |
+
self.mkpath(spec_dir)
|
| 290 |
+
else:
|
| 291 |
+
rpm_dir = {}
|
| 292 |
+
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
|
| 293 |
+
rpm_dir[d] = os.path.join(self.rpm_base, d)
|
| 294 |
+
self.mkpath(rpm_dir[d])
|
| 295 |
+
spec_dir = rpm_dir['SPECS']
|
| 296 |
+
|
| 297 |
+
# Spec file goes into 'dist_dir' if '--spec-only specified',
|
| 298 |
+
# build/rpm.<plat> otherwise.
|
| 299 |
+
spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name())
|
| 300 |
+
self.execute(
|
| 301 |
+
write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
if self.spec_only: # stop if requested
|
| 305 |
+
return
|
| 306 |
+
|
| 307 |
+
# Make a source distribution and copy to SOURCES directory with
|
| 308 |
+
# optional icon.
|
| 309 |
+
saved_dist_files = self.distribution.dist_files[:]
|
| 310 |
+
sdist = self.reinitialize_command('sdist')
|
| 311 |
+
if self.use_bzip2:
|
| 312 |
+
sdist.formats = ['bztar']
|
| 313 |
+
else:
|
| 314 |
+
sdist.formats = ['gztar']
|
| 315 |
+
self.run_command('sdist')
|
| 316 |
+
self.distribution.dist_files = saved_dist_files
|
| 317 |
+
|
| 318 |
+
source = sdist.get_archive_files()[0]
|
| 319 |
+
source_dir = rpm_dir['SOURCES']
|
| 320 |
+
self.copy_file(source, source_dir)
|
| 321 |
+
|
| 322 |
+
if self.icon:
|
| 323 |
+
if os.path.exists(self.icon):
|
| 324 |
+
self.copy_file(self.icon, source_dir)
|
| 325 |
+
else:
|
| 326 |
+
raise DistutilsFileError("icon file '%s' does not exist" % self.icon)
|
| 327 |
+
|
| 328 |
+
# build package
|
| 329 |
+
log.info("building RPMs")
|
| 330 |
+
rpm_cmd = ['rpmbuild']
|
| 331 |
+
|
| 332 |
+
if self.source_only: # what kind of RPMs?
|
| 333 |
+
rpm_cmd.append('-bs')
|
| 334 |
+
elif self.binary_only:
|
| 335 |
+
rpm_cmd.append('-bb')
|
| 336 |
+
else:
|
| 337 |
+
rpm_cmd.append('-ba')
|
| 338 |
+
rpm_cmd.extend(['--define', '__python %s' % self.python])
|
| 339 |
+
if self.rpm3_mode:
|
| 340 |
+
rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)])
|
| 341 |
+
if not self.keep_temp:
|
| 342 |
+
rpm_cmd.append('--clean')
|
| 343 |
+
|
| 344 |
+
if self.quiet:
|
| 345 |
+
rpm_cmd.append('--quiet')
|
| 346 |
+
|
| 347 |
+
rpm_cmd.append(spec_path)
|
| 348 |
+
# Determine the binary rpm names that should be built out of this spec
|
| 349 |
+
# file
|
| 350 |
+
# Note that some of these may not be really built (if the file
|
| 351 |
+
# list is empty)
|
| 352 |
+
nvr_string = "%{name}-%{version}-%{release}"
|
| 353 |
+
src_rpm = nvr_string + ".src.rpm"
|
| 354 |
+
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
|
| 355 |
+
q_cmd = r"rpm -q --qf '{} {}\n' --specfile '{}'".format(
|
| 356 |
+
src_rpm,
|
| 357 |
+
non_src_rpm,
|
| 358 |
+
spec_path,
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
out = os.popen(q_cmd)
|
| 362 |
+
try:
|
| 363 |
+
binary_rpms = []
|
| 364 |
+
source_rpm = None
|
| 365 |
+
while True:
|
| 366 |
+
line = out.readline()
|
| 367 |
+
if not line:
|
| 368 |
+
break
|
| 369 |
+
ell = line.strip().split()
|
| 370 |
+
assert len(ell) == 2
|
| 371 |
+
binary_rpms.append(ell[1])
|
| 372 |
+
# The source rpm is named after the first entry in the spec file
|
| 373 |
+
if source_rpm is None:
|
| 374 |
+
source_rpm = ell[0]
|
| 375 |
+
|
| 376 |
+
status = out.close()
|
| 377 |
+
if status:
|
| 378 |
+
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
|
| 379 |
+
|
| 380 |
+
finally:
|
| 381 |
+
out.close()
|
| 382 |
+
|
| 383 |
+
self.spawn(rpm_cmd)
|
| 384 |
+
|
| 385 |
+
if not self.dry_run:
|
| 386 |
+
if self.distribution.has_ext_modules():
|
| 387 |
+
pyversion = get_python_version()
|
| 388 |
+
else:
|
| 389 |
+
pyversion = 'any'
|
| 390 |
+
|
| 391 |
+
if not self.binary_only:
|
| 392 |
+
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
|
| 393 |
+
assert os.path.exists(srpm)
|
| 394 |
+
self.move_file(srpm, self.dist_dir)
|
| 395 |
+
filename = os.path.join(self.dist_dir, source_rpm)
|
| 396 |
+
self.distribution.dist_files.append(('bdist_rpm', pyversion, filename))
|
| 397 |
+
|
| 398 |
+
if not self.source_only:
|
| 399 |
+
for rpm in binary_rpms:
|
| 400 |
+
rpm = os.path.join(rpm_dir['RPMS'], rpm)
|
| 401 |
+
if os.path.exists(rpm):
|
| 402 |
+
self.move_file(rpm, self.dist_dir)
|
| 403 |
+
filename = os.path.join(self.dist_dir, os.path.basename(rpm))
|
| 404 |
+
self.distribution.dist_files.append(
|
| 405 |
+
('bdist_rpm', pyversion, filename)
|
| 406 |
+
)
|
| 407 |
+
|
| 408 |
+
def _dist_path(self, path):
|
| 409 |
+
return os.path.join(self.dist_dir, os.path.basename(path))
|
| 410 |
+
|
| 411 |
+
def _make_spec_file(self): # noqa: C901
|
| 412 |
+
"""Generate the text of an RPM spec file and return it as a
|
| 413 |
+
list of strings (one per line).
|
| 414 |
+
"""
|
| 415 |
+
# definitions and headers
|
| 416 |
+
spec_file = [
|
| 417 |
+
'%define name ' + self.distribution.get_name(),
|
| 418 |
+
'%define version ' + self.distribution.get_version().replace('-', '_'),
|
| 419 |
+
'%define unmangled_version ' + self.distribution.get_version(),
|
| 420 |
+
'%define release ' + self.release.replace('-', '_'),
|
| 421 |
+
'',
|
| 422 |
+
'Summary: ' + (self.distribution.get_description() or "UNKNOWN"),
|
| 423 |
+
]
|
| 424 |
+
|
| 425 |
+
# Workaround for #14443 which affects some RPM based systems such as
|
| 426 |
+
# RHEL6 (and probably derivatives)
|
| 427 |
+
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
|
| 428 |
+
# Generate a potential replacement value for __os_install_post (whilst
|
| 429 |
+
# normalizing the whitespace to simplify the test for whether the
|
| 430 |
+
# invocation of brp-python-bytecompile passes in __python):
|
| 431 |
+
vendor_hook = '\n'.join(
|
| 432 |
+
[' %s \\' % line.strip() for line in vendor_hook.splitlines()]
|
| 433 |
+
)
|
| 434 |
+
problem = "brp-python-bytecompile \\\n"
|
| 435 |
+
fixed = "brp-python-bytecompile %{__python} \\\n"
|
| 436 |
+
fixed_hook = vendor_hook.replace(problem, fixed)
|
| 437 |
+
if fixed_hook != vendor_hook:
|
| 438 |
+
spec_file.append('# Workaround for http://bugs.python.org/issue14443')
|
| 439 |
+
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
|
| 440 |
+
|
| 441 |
+
# put locale summaries into spec file
|
| 442 |
+
# XXX not supported for now (hard to put a dictionary
|
| 443 |
+
# in a config file -- arg!)
|
| 444 |
+
# for locale in self.summaries.keys():
|
| 445 |
+
# spec_file.append('Summary(%s): %s' % (locale,
|
| 446 |
+
# self.summaries[locale]))
|
| 447 |
+
|
| 448 |
+
spec_file.extend(
|
| 449 |
+
[
|
| 450 |
+
'Name: %{name}',
|
| 451 |
+
'Version: %{version}',
|
| 452 |
+
'Release: %{release}',
|
| 453 |
+
]
|
| 454 |
+
)
|
| 455 |
+
|
| 456 |
+
# XXX yuck! this filename is available from the "sdist" command,
|
| 457 |
+
# but only after it has run: and we create the spec file before
|
| 458 |
+
# running "sdist", in case of --spec-only.
|
| 459 |
+
if self.use_bzip2:
|
| 460 |
+
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
|
| 461 |
+
else:
|
| 462 |
+
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
|
| 463 |
+
|
| 464 |
+
spec_file.extend(
|
| 465 |
+
[
|
| 466 |
+
'License: ' + (self.distribution.get_license() or "UNKNOWN"),
|
| 467 |
+
'Group: ' + self.group,
|
| 468 |
+
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
|
| 469 |
+
'Prefix: %{_prefix}',
|
| 470 |
+
]
|
| 471 |
+
)
|
| 472 |
+
|
| 473 |
+
if not self.force_arch:
|
| 474 |
+
# noarch if no extension modules
|
| 475 |
+
if not self.distribution.has_ext_modules():
|
| 476 |
+
spec_file.append('BuildArch: noarch')
|
| 477 |
+
else:
|
| 478 |
+
spec_file.append('BuildArch: %s' % self.force_arch)
|
| 479 |
+
|
| 480 |
+
for field in (
|
| 481 |
+
'Vendor',
|
| 482 |
+
'Packager',
|
| 483 |
+
'Provides',
|
| 484 |
+
'Requires',
|
| 485 |
+
'Conflicts',
|
| 486 |
+
'Obsoletes',
|
| 487 |
+
):
|
| 488 |
+
val = getattr(self, field.lower())
|
| 489 |
+
if isinstance(val, list):
|
| 490 |
+
spec_file.append('{}: {}'.format(field, ' '.join(val)))
|
| 491 |
+
elif val is not None:
|
| 492 |
+
spec_file.append('{}: {}'.format(field, val))
|
| 493 |
+
|
| 494 |
+
if self.distribution.get_url():
|
| 495 |
+
spec_file.append('Url: ' + self.distribution.get_url())
|
| 496 |
+
|
| 497 |
+
if self.distribution_name:
|
| 498 |
+
spec_file.append('Distribution: ' + self.distribution_name)
|
| 499 |
+
|
| 500 |
+
if self.build_requires:
|
| 501 |
+
spec_file.append('BuildRequires: ' + ' '.join(self.build_requires))
|
| 502 |
+
|
| 503 |
+
if self.icon:
|
| 504 |
+
spec_file.append('Icon: ' + os.path.basename(self.icon))
|
| 505 |
+
|
| 506 |
+
if self.no_autoreq:
|
| 507 |
+
spec_file.append('AutoReq: 0')
|
| 508 |
+
|
| 509 |
+
spec_file.extend(
|
| 510 |
+
[
|
| 511 |
+
'',
|
| 512 |
+
'%description',
|
| 513 |
+
self.distribution.get_long_description() or "",
|
| 514 |
+
]
|
| 515 |
+
)
|
| 516 |
+
|
| 517 |
+
# put locale descriptions into spec file
|
| 518 |
+
# XXX again, suppressed because config file syntax doesn't
|
| 519 |
+
# easily support this ;-(
|
| 520 |
+
# for locale in self.descriptions.keys():
|
| 521 |
+
# spec_file.extend([
|
| 522 |
+
# '',
|
| 523 |
+
# '%description -l ' + locale,
|
| 524 |
+
# self.descriptions[locale],
|
| 525 |
+
# ])
|
| 526 |
+
|
| 527 |
+
# rpm scripts
|
| 528 |
+
# figure out default build script
|
| 529 |
+
def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0]))
|
| 530 |
+
def_build = "%s build" % def_setup_call
|
| 531 |
+
if self.use_rpm_opt_flags:
|
| 532 |
+
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
|
| 533 |
+
|
| 534 |
+
# insert contents of files
|
| 535 |
+
|
| 536 |
+
# XXX this is kind of misleading: user-supplied options are files
|
| 537 |
+
# that we open and interpolate into the spec file, but the defaults
|
| 538 |
+
# are just text that we drop in as-is. Hmmm.
|
| 539 |
+
|
| 540 |
+
install_cmd = (
|
| 541 |
+
'%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
|
| 542 |
+
) % def_setup_call
|
| 543 |
+
|
| 544 |
+
script_options = [
|
| 545 |
+
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
|
| 546 |
+
('build', 'build_script', def_build),
|
| 547 |
+
('install', 'install_script', install_cmd),
|
| 548 |
+
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
|
| 549 |
+
('verifyscript', 'verify_script', None),
|
| 550 |
+
('pre', 'pre_install', None),
|
| 551 |
+
('post', 'post_install', None),
|
| 552 |
+
('preun', 'pre_uninstall', None),
|
| 553 |
+
('postun', 'post_uninstall', None),
|
| 554 |
+
]
|
| 555 |
+
|
| 556 |
+
for rpm_opt, attr, default in script_options:
|
| 557 |
+
# Insert contents of file referred to, if no file is referred to
|
| 558 |
+
# use 'default' as contents of script
|
| 559 |
+
val = getattr(self, attr)
|
| 560 |
+
if val or default:
|
| 561 |
+
spec_file.extend(
|
| 562 |
+
[
|
| 563 |
+
'',
|
| 564 |
+
'%' + rpm_opt,
|
| 565 |
+
]
|
| 566 |
+
)
|
| 567 |
+
if val:
|
| 568 |
+
with open(val) as f:
|
| 569 |
+
spec_file.extend(f.read().split('\n'))
|
| 570 |
+
else:
|
| 571 |
+
spec_file.append(default)
|
| 572 |
+
|
| 573 |
+
# files section
|
| 574 |
+
spec_file.extend(
|
| 575 |
+
[
|
| 576 |
+
'',
|
| 577 |
+
'%files -f INSTALLED_FILES',
|
| 578 |
+
'%defattr(-,root,root)',
|
| 579 |
+
]
|
| 580 |
+
)
|
| 581 |
+
|
| 582 |
+
if self.doc_files:
|
| 583 |
+
spec_file.append('%doc ' + ' '.join(self.doc_files))
|
| 584 |
+
|
| 585 |
+
if self.changelog:
|
| 586 |
+
spec_file.extend(
|
| 587 |
+
[
|
| 588 |
+
'',
|
| 589 |
+
'%changelog',
|
| 590 |
+
]
|
| 591 |
+
)
|
| 592 |
+
spec_file.extend(self.changelog)
|
| 593 |
+
|
| 594 |
+
return spec_file
|
| 595 |
+
|
| 596 |
+
def _format_changelog(self, changelog):
|
| 597 |
+
"""Format the changelog correctly and convert it to a list of strings"""
|
| 598 |
+
if not changelog:
|
| 599 |
+
return changelog
|
| 600 |
+
new_changelog = []
|
| 601 |
+
for line in changelog.strip().split('\n'):
|
| 602 |
+
line = line.strip()
|
| 603 |
+
if line[0] == '*':
|
| 604 |
+
new_changelog.extend(['', line])
|
| 605 |
+
elif line[0] == '-':
|
| 606 |
+
new_changelog.append(line)
|
| 607 |
+
else:
|
| 608 |
+
new_changelog.append(' ' + line)
|
| 609 |
+
|
| 610 |
+
# strip trailing newline inserted by first changelog entry
|
| 611 |
+
if not new_changelog[0]:
|
| 612 |
+
del new_changelog[0]
|
| 613 |
+
|
| 614 |
+
return new_changelog
|
.venv/Lib/site-packages/setuptools/_distutils/command/build.py
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build' command."""
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
import os
|
| 7 |
+
from ..core import Command
|
| 8 |
+
from ..errors import DistutilsOptionError
|
| 9 |
+
from ..util import get_platform
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def show_compilers():
|
| 13 |
+
from ..ccompiler import show_compilers
|
| 14 |
+
|
| 15 |
+
show_compilers()
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class build(Command):
|
| 19 |
+
description = "build everything needed to install"
|
| 20 |
+
|
| 21 |
+
user_options = [
|
| 22 |
+
('build-base=', 'b', "base directory for build library"),
|
| 23 |
+
('build-purelib=', None, "build directory for platform-neutral distributions"),
|
| 24 |
+
('build-platlib=', None, "build directory for platform-specific distributions"),
|
| 25 |
+
(
|
| 26 |
+
'build-lib=',
|
| 27 |
+
None,
|
| 28 |
+
"build directory for all distribution (defaults to either "
|
| 29 |
+
+ "build-purelib or build-platlib",
|
| 30 |
+
),
|
| 31 |
+
('build-scripts=', None, "build directory for scripts"),
|
| 32 |
+
('build-temp=', 't', "temporary build directory"),
|
| 33 |
+
(
|
| 34 |
+
'plat-name=',
|
| 35 |
+
'p',
|
| 36 |
+
"platform name to build for, if supported "
|
| 37 |
+
"(default: %s)" % get_platform(),
|
| 38 |
+
),
|
| 39 |
+
('compiler=', 'c', "specify the compiler type"),
|
| 40 |
+
('parallel=', 'j', "number of parallel build jobs"),
|
| 41 |
+
('debug', 'g', "compile extensions and libraries with debugging information"),
|
| 42 |
+
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
| 43 |
+
('executable=', 'e', "specify final destination interpreter path (build.py)"),
|
| 44 |
+
]
|
| 45 |
+
|
| 46 |
+
boolean_options = ['debug', 'force']
|
| 47 |
+
|
| 48 |
+
help_options = [
|
| 49 |
+
('help-compiler', None, "list available compilers", show_compilers),
|
| 50 |
+
]
|
| 51 |
+
|
| 52 |
+
def initialize_options(self):
|
| 53 |
+
self.build_base = 'build'
|
| 54 |
+
# these are decided only after 'build_base' has its final value
|
| 55 |
+
# (unless overridden by the user or client)
|
| 56 |
+
self.build_purelib = None
|
| 57 |
+
self.build_platlib = None
|
| 58 |
+
self.build_lib = None
|
| 59 |
+
self.build_temp = None
|
| 60 |
+
self.build_scripts = None
|
| 61 |
+
self.compiler = None
|
| 62 |
+
self.plat_name = None
|
| 63 |
+
self.debug = None
|
| 64 |
+
self.force = 0
|
| 65 |
+
self.executable = None
|
| 66 |
+
self.parallel = None
|
| 67 |
+
|
| 68 |
+
def finalize_options(self): # noqa: C901
|
| 69 |
+
if self.plat_name is None:
|
| 70 |
+
self.plat_name = get_platform()
|
| 71 |
+
else:
|
| 72 |
+
# plat-name only supported for windows (other platforms are
|
| 73 |
+
# supported via ./configure flags, if at all). Avoid misleading
|
| 74 |
+
# other platforms.
|
| 75 |
+
if os.name != 'nt':
|
| 76 |
+
raise DistutilsOptionError(
|
| 77 |
+
"--plat-name only supported on Windows (try "
|
| 78 |
+
"using './configure --help' on your platform)"
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
plat_specifier = ".{}-{}".format(self.plat_name, sys.implementation.cache_tag)
|
| 82 |
+
|
| 83 |
+
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
|
| 84 |
+
# share the same build directories. Doing so confuses the build
|
| 85 |
+
# process for C modules
|
| 86 |
+
if hasattr(sys, 'gettotalrefcount'):
|
| 87 |
+
plat_specifier += '-pydebug'
|
| 88 |
+
|
| 89 |
+
# 'build_purelib' and 'build_platlib' just default to 'lib' and
|
| 90 |
+
# 'lib.<plat>' under the base build directory. We only use one of
|
| 91 |
+
# them for a given distribution, though --
|
| 92 |
+
if self.build_purelib is None:
|
| 93 |
+
self.build_purelib = os.path.join(self.build_base, 'lib')
|
| 94 |
+
if self.build_platlib is None:
|
| 95 |
+
self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier)
|
| 96 |
+
|
| 97 |
+
# 'build_lib' is the actual directory that we will use for this
|
| 98 |
+
# particular module distribution -- if user didn't supply it, pick
|
| 99 |
+
# one of 'build_purelib' or 'build_platlib'.
|
| 100 |
+
if self.build_lib is None:
|
| 101 |
+
if self.distribution.has_ext_modules():
|
| 102 |
+
self.build_lib = self.build_platlib
|
| 103 |
+
else:
|
| 104 |
+
self.build_lib = self.build_purelib
|
| 105 |
+
|
| 106 |
+
# 'build_temp' -- temporary directory for compiler turds,
|
| 107 |
+
# "build/temp.<plat>"
|
| 108 |
+
if self.build_temp is None:
|
| 109 |
+
self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier)
|
| 110 |
+
if self.build_scripts is None:
|
| 111 |
+
self.build_scripts = os.path.join(
|
| 112 |
+
self.build_base, 'scripts-%d.%d' % sys.version_info[:2]
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
if self.executable is None and sys.executable:
|
| 116 |
+
self.executable = os.path.normpath(sys.executable)
|
| 117 |
+
|
| 118 |
+
if isinstance(self.parallel, str):
|
| 119 |
+
try:
|
| 120 |
+
self.parallel = int(self.parallel)
|
| 121 |
+
except ValueError:
|
| 122 |
+
raise DistutilsOptionError("parallel should be an integer")
|
| 123 |
+
|
| 124 |
+
def run(self):
|
| 125 |
+
# Run all relevant sub-commands. This will be some subset of:
|
| 126 |
+
# - build_py - pure Python modules
|
| 127 |
+
# - build_clib - standalone C libraries
|
| 128 |
+
# - build_ext - Python extensions
|
| 129 |
+
# - build_scripts - (Python) scripts
|
| 130 |
+
for cmd_name in self.get_sub_commands():
|
| 131 |
+
self.run_command(cmd_name)
|
| 132 |
+
|
| 133 |
+
# -- Predicates for the sub-command list ---------------------------
|
| 134 |
+
|
| 135 |
+
def has_pure_modules(self):
|
| 136 |
+
return self.distribution.has_pure_modules()
|
| 137 |
+
|
| 138 |
+
def has_c_libraries(self):
|
| 139 |
+
return self.distribution.has_c_libraries()
|
| 140 |
+
|
| 141 |
+
def has_ext_modules(self):
|
| 142 |
+
return self.distribution.has_ext_modules()
|
| 143 |
+
|
| 144 |
+
def has_scripts(self):
|
| 145 |
+
return self.distribution.has_scripts()
|
| 146 |
+
|
| 147 |
+
sub_commands = [
|
| 148 |
+
('build_py', has_pure_modules),
|
| 149 |
+
('build_clib', has_c_libraries),
|
| 150 |
+
('build_ext', has_ext_modules),
|
| 151 |
+
('build_scripts', has_scripts),
|
| 152 |
+
]
|
.venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_clib
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_clib' command, to build a C/C++ library
|
| 4 |
+
that is included in the module distribution and needed by an extension
|
| 5 |
+
module."""
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
# XXX this module has *lots* of code ripped-off quite transparently from
|
| 9 |
+
# build_ext.py -- not surprisingly really, as the work required to build
|
| 10 |
+
# a static library from a collection of C source files is not really all
|
| 11 |
+
# that different from what's required to build a shared object file from
|
| 12 |
+
# a collection of C source files. Nevertheless, I haven't done the
|
| 13 |
+
# necessary refactoring to account for the overlap in code between the
|
| 14 |
+
# two modules, mainly because a number of subtle details changed in the
|
| 15 |
+
# cut 'n paste. Sigh.
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
from ..core import Command
|
| 19 |
+
from ..errors import DistutilsSetupError
|
| 20 |
+
from ..sysconfig import customize_compiler
|
| 21 |
+
from distutils._log import log
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def show_compilers():
|
| 25 |
+
from ..ccompiler import show_compilers
|
| 26 |
+
|
| 27 |
+
show_compilers()
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class build_clib(Command):
|
| 31 |
+
description = "build C/C++ libraries used by Python extensions"
|
| 32 |
+
|
| 33 |
+
user_options = [
|
| 34 |
+
('build-clib=', 'b', "directory to build C/C++ libraries to"),
|
| 35 |
+
('build-temp=', 't', "directory to put temporary build by-products"),
|
| 36 |
+
('debug', 'g', "compile with debugging information"),
|
| 37 |
+
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
| 38 |
+
('compiler=', 'c', "specify the compiler type"),
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
boolean_options = ['debug', 'force']
|
| 42 |
+
|
| 43 |
+
help_options = [
|
| 44 |
+
('help-compiler', None, "list available compilers", show_compilers),
|
| 45 |
+
]
|
| 46 |
+
|
| 47 |
+
def initialize_options(self):
|
| 48 |
+
self.build_clib = None
|
| 49 |
+
self.build_temp = None
|
| 50 |
+
|
| 51 |
+
# List of libraries to build
|
| 52 |
+
self.libraries = None
|
| 53 |
+
|
| 54 |
+
# Compilation options for all libraries
|
| 55 |
+
self.include_dirs = None
|
| 56 |
+
self.define = None
|
| 57 |
+
self.undef = None
|
| 58 |
+
self.debug = None
|
| 59 |
+
self.force = 0
|
| 60 |
+
self.compiler = None
|
| 61 |
+
|
| 62 |
+
def finalize_options(self):
|
| 63 |
+
# This might be confusing: both build-clib and build-temp default
|
| 64 |
+
# to build-temp as defined by the "build" command. This is because
|
| 65 |
+
# I think that C libraries are really just temporary build
|
| 66 |
+
# by-products, at least from the point of view of building Python
|
| 67 |
+
# extensions -- but I want to keep my options open.
|
| 68 |
+
self.set_undefined_options(
|
| 69 |
+
'build',
|
| 70 |
+
('build_temp', 'build_clib'),
|
| 71 |
+
('build_temp', 'build_temp'),
|
| 72 |
+
('compiler', 'compiler'),
|
| 73 |
+
('debug', 'debug'),
|
| 74 |
+
('force', 'force'),
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
self.libraries = self.distribution.libraries
|
| 78 |
+
if self.libraries:
|
| 79 |
+
self.check_library_list(self.libraries)
|
| 80 |
+
|
| 81 |
+
if self.include_dirs is None:
|
| 82 |
+
self.include_dirs = self.distribution.include_dirs or []
|
| 83 |
+
if isinstance(self.include_dirs, str):
|
| 84 |
+
self.include_dirs = self.include_dirs.split(os.pathsep)
|
| 85 |
+
|
| 86 |
+
# XXX same as for build_ext -- what about 'self.define' and
|
| 87 |
+
# 'self.undef' ?
|
| 88 |
+
|
| 89 |
+
def run(self):
|
| 90 |
+
if not self.libraries:
|
| 91 |
+
return
|
| 92 |
+
|
| 93 |
+
# Yech -- this is cut 'n pasted from build_ext.py!
|
| 94 |
+
from ..ccompiler import new_compiler
|
| 95 |
+
|
| 96 |
+
self.compiler = new_compiler(
|
| 97 |
+
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
| 98 |
+
)
|
| 99 |
+
customize_compiler(self.compiler)
|
| 100 |
+
|
| 101 |
+
if self.include_dirs is not None:
|
| 102 |
+
self.compiler.set_include_dirs(self.include_dirs)
|
| 103 |
+
if self.define is not None:
|
| 104 |
+
# 'define' option is a list of (name,value) tuples
|
| 105 |
+
for name, value in self.define:
|
| 106 |
+
self.compiler.define_macro(name, value)
|
| 107 |
+
if self.undef is not None:
|
| 108 |
+
for macro in self.undef:
|
| 109 |
+
self.compiler.undefine_macro(macro)
|
| 110 |
+
|
| 111 |
+
self.build_libraries(self.libraries)
|
| 112 |
+
|
| 113 |
+
def check_library_list(self, libraries):
|
| 114 |
+
"""Ensure that the list of libraries is valid.
|
| 115 |
+
|
| 116 |
+
`library` is presumably provided as a command option 'libraries'.
|
| 117 |
+
This method checks that it is a list of 2-tuples, where the tuples
|
| 118 |
+
are (library_name, build_info_dict).
|
| 119 |
+
|
| 120 |
+
Raise DistutilsSetupError if the structure is invalid anywhere;
|
| 121 |
+
just returns otherwise.
|
| 122 |
+
"""
|
| 123 |
+
if not isinstance(libraries, list):
|
| 124 |
+
raise DistutilsSetupError("'libraries' option must be a list of tuples")
|
| 125 |
+
|
| 126 |
+
for lib in libraries:
|
| 127 |
+
if not isinstance(lib, tuple) and len(lib) != 2:
|
| 128 |
+
raise DistutilsSetupError("each element of 'libraries' must a 2-tuple")
|
| 129 |
+
|
| 130 |
+
name, build_info = lib
|
| 131 |
+
|
| 132 |
+
if not isinstance(name, str):
|
| 133 |
+
raise DistutilsSetupError(
|
| 134 |
+
"first element of each tuple in 'libraries' "
|
| 135 |
+
"must be a string (the library name)"
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
if '/' in name or (os.sep != '/' and os.sep in name):
|
| 139 |
+
raise DistutilsSetupError(
|
| 140 |
+
"bad library name '%s': "
|
| 141 |
+
"may not contain directory separators" % lib[0]
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
if not isinstance(build_info, dict):
|
| 145 |
+
raise DistutilsSetupError(
|
| 146 |
+
"second element of each tuple in 'libraries' "
|
| 147 |
+
"must be a dictionary (build info)"
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
def get_library_names(self):
|
| 151 |
+
# Assume the library list is valid -- 'check_library_list()' is
|
| 152 |
+
# called from 'finalize_options()', so it should be!
|
| 153 |
+
if not self.libraries:
|
| 154 |
+
return None
|
| 155 |
+
|
| 156 |
+
lib_names = []
|
| 157 |
+
for lib_name, build_info in self.libraries:
|
| 158 |
+
lib_names.append(lib_name)
|
| 159 |
+
return lib_names
|
| 160 |
+
|
| 161 |
+
def get_source_files(self):
|
| 162 |
+
self.check_library_list(self.libraries)
|
| 163 |
+
filenames = []
|
| 164 |
+
for lib_name, build_info in self.libraries:
|
| 165 |
+
sources = build_info.get('sources')
|
| 166 |
+
if sources is None or not isinstance(sources, (list, tuple)):
|
| 167 |
+
raise DistutilsSetupError(
|
| 168 |
+
"in 'libraries' option (library '%s'), "
|
| 169 |
+
"'sources' must be present and must be "
|
| 170 |
+
"a list of source filenames" % lib_name
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
filenames.extend(sources)
|
| 174 |
+
return filenames
|
| 175 |
+
|
| 176 |
+
def build_libraries(self, libraries):
|
| 177 |
+
for lib_name, build_info in libraries:
|
| 178 |
+
sources = build_info.get('sources')
|
| 179 |
+
if sources is None or not isinstance(sources, (list, tuple)):
|
| 180 |
+
raise DistutilsSetupError(
|
| 181 |
+
"in 'libraries' option (library '%s'), "
|
| 182 |
+
"'sources' must be present and must be "
|
| 183 |
+
"a list of source filenames" % lib_name
|
| 184 |
+
)
|
| 185 |
+
sources = list(sources)
|
| 186 |
+
|
| 187 |
+
log.info("building '%s' library", lib_name)
|
| 188 |
+
|
| 189 |
+
# First, compile the source code to object files in the library
|
| 190 |
+
# directory. (This should probably change to putting object
|
| 191 |
+
# files in a temporary build directory.)
|
| 192 |
+
macros = build_info.get('macros')
|
| 193 |
+
include_dirs = build_info.get('include_dirs')
|
| 194 |
+
objects = self.compiler.compile(
|
| 195 |
+
sources,
|
| 196 |
+
output_dir=self.build_temp,
|
| 197 |
+
macros=macros,
|
| 198 |
+
include_dirs=include_dirs,
|
| 199 |
+
debug=self.debug,
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
# Now "link" the object files together into a static library.
|
| 203 |
+
# (On Unix at least, this isn't really linking -- it just
|
| 204 |
+
# builds an archive. Whatever.)
|
| 205 |
+
self.compiler.create_static_lib(
|
| 206 |
+
objects, lib_name, output_dir=self.build_clib, debug=self.debug
|
| 207 |
+
)
|
.venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py
ADDED
|
@@ -0,0 +1,788 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_ext
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_ext' command, for building extension
|
| 4 |
+
modules (currently limited to C extensions, should accommodate C++
|
| 5 |
+
extensions ASAP)."""
|
| 6 |
+
|
| 7 |
+
import contextlib
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import sys
|
| 11 |
+
from ..core import Command
|
| 12 |
+
from ..errors import (
|
| 13 |
+
DistutilsOptionError,
|
| 14 |
+
DistutilsSetupError,
|
| 15 |
+
CCompilerError,
|
| 16 |
+
DistutilsError,
|
| 17 |
+
CompileError,
|
| 18 |
+
DistutilsPlatformError,
|
| 19 |
+
)
|
| 20 |
+
from ..sysconfig import customize_compiler, get_python_version
|
| 21 |
+
from ..sysconfig import get_config_h_filename
|
| 22 |
+
from ..dep_util import newer_group
|
| 23 |
+
from ..extension import Extension
|
| 24 |
+
from ..util import get_platform
|
| 25 |
+
from distutils._log import log
|
| 26 |
+
from . import py37compat
|
| 27 |
+
|
| 28 |
+
from site import USER_BASE
|
| 29 |
+
|
| 30 |
+
# An extension name is just a dot-separated list of Python NAMEs (ie.
|
| 31 |
+
# the same as a fully-qualified module name).
|
| 32 |
+
extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def show_compilers():
|
| 36 |
+
from ..ccompiler import show_compilers
|
| 37 |
+
|
| 38 |
+
show_compilers()
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class build_ext(Command):
|
| 42 |
+
description = "build C/C++ extensions (compile/link to build directory)"
|
| 43 |
+
|
| 44 |
+
# XXX thoughts on how to deal with complex command-line options like
|
| 45 |
+
# these, i.e. how to make it so fancy_getopt can suck them off the
|
| 46 |
+
# command line and make it look like setup.py defined the appropriate
|
| 47 |
+
# lists of tuples of what-have-you.
|
| 48 |
+
# - each command needs a callback to process its command-line options
|
| 49 |
+
# - Command.__init__() needs access to its share of the whole
|
| 50 |
+
# command line (must ultimately come from
|
| 51 |
+
# Distribution.parse_command_line())
|
| 52 |
+
# - it then calls the current command class' option-parsing
|
| 53 |
+
# callback to deal with weird options like -D, which have to
|
| 54 |
+
# parse the option text and churn out some custom data
|
| 55 |
+
# structure
|
| 56 |
+
# - that data structure (in this case, a list of 2-tuples)
|
| 57 |
+
# will then be present in the command object by the time
|
| 58 |
+
# we get to finalize_options() (i.e. the constructor
|
| 59 |
+
# takes care of both command-line and client options
|
| 60 |
+
# in between initialize_options() and finalize_options())
|
| 61 |
+
|
| 62 |
+
sep_by = " (separated by '%s')" % os.pathsep
|
| 63 |
+
user_options = [
|
| 64 |
+
('build-lib=', 'b', "directory for compiled extension modules"),
|
| 65 |
+
('build-temp=', 't', "directory for temporary files (build by-products)"),
|
| 66 |
+
(
|
| 67 |
+
'plat-name=',
|
| 68 |
+
'p',
|
| 69 |
+
"platform name to cross-compile for, if supported "
|
| 70 |
+
"(default: %s)" % get_platform(),
|
| 71 |
+
),
|
| 72 |
+
(
|
| 73 |
+
'inplace',
|
| 74 |
+
'i',
|
| 75 |
+
"ignore build-lib and put compiled extensions into the source "
|
| 76 |
+
+ "directory alongside your pure Python modules",
|
| 77 |
+
),
|
| 78 |
+
(
|
| 79 |
+
'include-dirs=',
|
| 80 |
+
'I',
|
| 81 |
+
"list of directories to search for header files" + sep_by,
|
| 82 |
+
),
|
| 83 |
+
('define=', 'D', "C preprocessor macros to define"),
|
| 84 |
+
('undef=', 'U', "C preprocessor macros to undefine"),
|
| 85 |
+
('libraries=', 'l', "external C libraries to link with"),
|
| 86 |
+
(
|
| 87 |
+
'library-dirs=',
|
| 88 |
+
'L',
|
| 89 |
+
"directories to search for external C libraries" + sep_by,
|
| 90 |
+
),
|
| 91 |
+
('rpath=', 'R', "directories to search for shared C libraries at runtime"),
|
| 92 |
+
('link-objects=', 'O', "extra explicit link objects to include in the link"),
|
| 93 |
+
('debug', 'g', "compile/link with debugging information"),
|
| 94 |
+
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
| 95 |
+
('compiler=', 'c', "specify the compiler type"),
|
| 96 |
+
('parallel=', 'j', "number of parallel build jobs"),
|
| 97 |
+
('swig-cpp', None, "make SWIG create C++ files (default is C)"),
|
| 98 |
+
('swig-opts=', None, "list of SWIG command line options"),
|
| 99 |
+
('swig=', None, "path to the SWIG executable"),
|
| 100 |
+
('user', None, "add user include, library and rpath"),
|
| 101 |
+
]
|
| 102 |
+
|
| 103 |
+
boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
|
| 104 |
+
|
| 105 |
+
help_options = [
|
| 106 |
+
('help-compiler', None, "list available compilers", show_compilers),
|
| 107 |
+
]
|
| 108 |
+
|
| 109 |
+
def initialize_options(self):
|
| 110 |
+
self.extensions = None
|
| 111 |
+
self.build_lib = None
|
| 112 |
+
self.plat_name = None
|
| 113 |
+
self.build_temp = None
|
| 114 |
+
self.inplace = 0
|
| 115 |
+
self.package = None
|
| 116 |
+
|
| 117 |
+
self.include_dirs = None
|
| 118 |
+
self.define = None
|
| 119 |
+
self.undef = None
|
| 120 |
+
self.libraries = None
|
| 121 |
+
self.library_dirs = None
|
| 122 |
+
self.rpath = None
|
| 123 |
+
self.link_objects = None
|
| 124 |
+
self.debug = None
|
| 125 |
+
self.force = None
|
| 126 |
+
self.compiler = None
|
| 127 |
+
self.swig = None
|
| 128 |
+
self.swig_cpp = None
|
| 129 |
+
self.swig_opts = None
|
| 130 |
+
self.user = None
|
| 131 |
+
self.parallel = None
|
| 132 |
+
|
| 133 |
+
def finalize_options(self): # noqa: C901
|
| 134 |
+
from distutils import sysconfig
|
| 135 |
+
|
| 136 |
+
self.set_undefined_options(
|
| 137 |
+
'build',
|
| 138 |
+
('build_lib', 'build_lib'),
|
| 139 |
+
('build_temp', 'build_temp'),
|
| 140 |
+
('compiler', 'compiler'),
|
| 141 |
+
('debug', 'debug'),
|
| 142 |
+
('force', 'force'),
|
| 143 |
+
('parallel', 'parallel'),
|
| 144 |
+
('plat_name', 'plat_name'),
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
if self.package is None:
|
| 148 |
+
self.package = self.distribution.ext_package
|
| 149 |
+
|
| 150 |
+
self.extensions = self.distribution.ext_modules
|
| 151 |
+
|
| 152 |
+
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
| 153 |
+
# etc.) are in the include search path.
|
| 154 |
+
py_include = sysconfig.get_python_inc()
|
| 155 |
+
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
|
| 156 |
+
if self.include_dirs is None:
|
| 157 |
+
self.include_dirs = self.distribution.include_dirs or []
|
| 158 |
+
if isinstance(self.include_dirs, str):
|
| 159 |
+
self.include_dirs = self.include_dirs.split(os.pathsep)
|
| 160 |
+
|
| 161 |
+
# If in a virtualenv, add its include directory
|
| 162 |
+
# Issue 16116
|
| 163 |
+
if sys.exec_prefix != sys.base_exec_prefix:
|
| 164 |
+
self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
|
| 165 |
+
|
| 166 |
+
# Put the Python "system" include dir at the end, so that
|
| 167 |
+
# any local include dirs take precedence.
|
| 168 |
+
self.include_dirs.extend(py_include.split(os.path.pathsep))
|
| 169 |
+
if plat_py_include != py_include:
|
| 170 |
+
self.include_dirs.extend(plat_py_include.split(os.path.pathsep))
|
| 171 |
+
|
| 172 |
+
self.ensure_string_list('libraries')
|
| 173 |
+
self.ensure_string_list('link_objects')
|
| 174 |
+
|
| 175 |
+
# Life is easier if we're not forever checking for None, so
|
| 176 |
+
# simplify these options to empty lists if unset
|
| 177 |
+
if self.libraries is None:
|
| 178 |
+
self.libraries = []
|
| 179 |
+
if self.library_dirs is None:
|
| 180 |
+
self.library_dirs = []
|
| 181 |
+
elif isinstance(self.library_dirs, str):
|
| 182 |
+
self.library_dirs = self.library_dirs.split(os.pathsep)
|
| 183 |
+
|
| 184 |
+
if self.rpath is None:
|
| 185 |
+
self.rpath = []
|
| 186 |
+
elif isinstance(self.rpath, str):
|
| 187 |
+
self.rpath = self.rpath.split(os.pathsep)
|
| 188 |
+
|
| 189 |
+
# for extensions under windows use different directories
|
| 190 |
+
# for Release and Debug builds.
|
| 191 |
+
# also Python's library directory must be appended to library_dirs
|
| 192 |
+
if os.name == 'nt':
|
| 193 |
+
# the 'libs' directory is for binary installs - we assume that
|
| 194 |
+
# must be the *native* platform. But we don't really support
|
| 195 |
+
# cross-compiling via a binary install anyway, so we let it go.
|
| 196 |
+
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
|
| 197 |
+
if sys.base_exec_prefix != sys.prefix: # Issue 16116
|
| 198 |
+
self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
|
| 199 |
+
if self.debug:
|
| 200 |
+
self.build_temp = os.path.join(self.build_temp, "Debug")
|
| 201 |
+
else:
|
| 202 |
+
self.build_temp = os.path.join(self.build_temp, "Release")
|
| 203 |
+
|
| 204 |
+
# Append the source distribution include and library directories,
|
| 205 |
+
# this allows distutils on windows to work in the source tree
|
| 206 |
+
self.include_dirs.append(os.path.dirname(get_config_h_filename()))
|
| 207 |
+
self.library_dirs.append(sys.base_exec_prefix)
|
| 208 |
+
|
| 209 |
+
# Use the .lib files for the correct architecture
|
| 210 |
+
if self.plat_name == 'win32':
|
| 211 |
+
suffix = 'win32'
|
| 212 |
+
else:
|
| 213 |
+
# win-amd64
|
| 214 |
+
suffix = self.plat_name[4:]
|
| 215 |
+
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
|
| 216 |
+
if suffix:
|
| 217 |
+
new_lib = os.path.join(new_lib, suffix)
|
| 218 |
+
self.library_dirs.append(new_lib)
|
| 219 |
+
|
| 220 |
+
# For extensions under Cygwin, Python's library directory must be
|
| 221 |
+
# appended to library_dirs
|
| 222 |
+
if sys.platform[:6] == 'cygwin':
|
| 223 |
+
if not sysconfig.python_build:
|
| 224 |
+
# building third party extensions
|
| 225 |
+
self.library_dirs.append(
|
| 226 |
+
os.path.join(
|
| 227 |
+
sys.prefix, "lib", "python" + get_python_version(), "config"
|
| 228 |
+
)
|
| 229 |
+
)
|
| 230 |
+
else:
|
| 231 |
+
# building python standard extensions
|
| 232 |
+
self.library_dirs.append('.')
|
| 233 |
+
|
| 234 |
+
# For building extensions with a shared Python library,
|
| 235 |
+
# Python's library directory must be appended to library_dirs
|
| 236 |
+
# See Issues: #1600860, #4366
|
| 237 |
+
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
|
| 238 |
+
if not sysconfig.python_build:
|
| 239 |
+
# building third party extensions
|
| 240 |
+
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
|
| 241 |
+
else:
|
| 242 |
+
# building python standard extensions
|
| 243 |
+
self.library_dirs.append('.')
|
| 244 |
+
|
| 245 |
+
# The argument parsing will result in self.define being a string, but
|
| 246 |
+
# it has to be a list of 2-tuples. All the preprocessor symbols
|
| 247 |
+
# specified by the 'define' option will be set to '1'. Multiple
|
| 248 |
+
# symbols can be separated with commas.
|
| 249 |
+
|
| 250 |
+
if self.define:
|
| 251 |
+
defines = self.define.split(',')
|
| 252 |
+
self.define = [(symbol, '1') for symbol in defines]
|
| 253 |
+
|
| 254 |
+
# The option for macros to undefine is also a string from the
|
| 255 |
+
# option parsing, but has to be a list. Multiple symbols can also
|
| 256 |
+
# be separated with commas here.
|
| 257 |
+
if self.undef:
|
| 258 |
+
self.undef = self.undef.split(',')
|
| 259 |
+
|
| 260 |
+
if self.swig_opts is None:
|
| 261 |
+
self.swig_opts = []
|
| 262 |
+
else:
|
| 263 |
+
self.swig_opts = self.swig_opts.split(' ')
|
| 264 |
+
|
| 265 |
+
# Finally add the user include and library directories if requested
|
| 266 |
+
if self.user:
|
| 267 |
+
user_include = os.path.join(USER_BASE, "include")
|
| 268 |
+
user_lib = os.path.join(USER_BASE, "lib")
|
| 269 |
+
if os.path.isdir(user_include):
|
| 270 |
+
self.include_dirs.append(user_include)
|
| 271 |
+
if os.path.isdir(user_lib):
|
| 272 |
+
self.library_dirs.append(user_lib)
|
| 273 |
+
self.rpath.append(user_lib)
|
| 274 |
+
|
| 275 |
+
if isinstance(self.parallel, str):
|
| 276 |
+
try:
|
| 277 |
+
self.parallel = int(self.parallel)
|
| 278 |
+
except ValueError:
|
| 279 |
+
raise DistutilsOptionError("parallel should be an integer")
|
| 280 |
+
|
| 281 |
+
def run(self): # noqa: C901
|
| 282 |
+
from ..ccompiler import new_compiler
|
| 283 |
+
|
| 284 |
+
# 'self.extensions', as supplied by setup.py, is a list of
|
| 285 |
+
# Extension instances. See the documentation for Extension (in
|
| 286 |
+
# distutils.extension) for details.
|
| 287 |
+
#
|
| 288 |
+
# For backwards compatibility with Distutils 0.8.2 and earlier, we
|
| 289 |
+
# also allow the 'extensions' list to be a list of tuples:
|
| 290 |
+
# (ext_name, build_info)
|
| 291 |
+
# where build_info is a dictionary containing everything that
|
| 292 |
+
# Extension instances do except the name, with a few things being
|
| 293 |
+
# differently named. We convert these 2-tuples to Extension
|
| 294 |
+
# instances as needed.
|
| 295 |
+
|
| 296 |
+
if not self.extensions:
|
| 297 |
+
return
|
| 298 |
+
|
| 299 |
+
# If we were asked to build any C/C++ libraries, make sure that the
|
| 300 |
+
# directory where we put them is in the library search path for
|
| 301 |
+
# linking extensions.
|
| 302 |
+
if self.distribution.has_c_libraries():
|
| 303 |
+
build_clib = self.get_finalized_command('build_clib')
|
| 304 |
+
self.libraries.extend(build_clib.get_library_names() or [])
|
| 305 |
+
self.library_dirs.append(build_clib.build_clib)
|
| 306 |
+
|
| 307 |
+
# Setup the CCompiler object that we'll use to do all the
|
| 308 |
+
# compiling and linking
|
| 309 |
+
self.compiler = new_compiler(
|
| 310 |
+
compiler=self.compiler,
|
| 311 |
+
verbose=self.verbose,
|
| 312 |
+
dry_run=self.dry_run,
|
| 313 |
+
force=self.force,
|
| 314 |
+
)
|
| 315 |
+
customize_compiler(self.compiler)
|
| 316 |
+
# If we are cross-compiling, init the compiler now (if we are not
|
| 317 |
+
# cross-compiling, init would not hurt, but people may rely on
|
| 318 |
+
# late initialization of compiler even if they shouldn't...)
|
| 319 |
+
if os.name == 'nt' and self.plat_name != get_platform():
|
| 320 |
+
self.compiler.initialize(self.plat_name)
|
| 321 |
+
|
| 322 |
+
# And make sure that any compile/link-related options (which might
|
| 323 |
+
# come from the command-line or from the setup script) are set in
|
| 324 |
+
# that CCompiler object -- that way, they automatically apply to
|
| 325 |
+
# all compiling and linking done here.
|
| 326 |
+
if self.include_dirs is not None:
|
| 327 |
+
self.compiler.set_include_dirs(self.include_dirs)
|
| 328 |
+
if self.define is not None:
|
| 329 |
+
# 'define' option is a list of (name,value) tuples
|
| 330 |
+
for name, value in self.define:
|
| 331 |
+
self.compiler.define_macro(name, value)
|
| 332 |
+
if self.undef is not None:
|
| 333 |
+
for macro in self.undef:
|
| 334 |
+
self.compiler.undefine_macro(macro)
|
| 335 |
+
if self.libraries is not None:
|
| 336 |
+
self.compiler.set_libraries(self.libraries)
|
| 337 |
+
if self.library_dirs is not None:
|
| 338 |
+
self.compiler.set_library_dirs(self.library_dirs)
|
| 339 |
+
if self.rpath is not None:
|
| 340 |
+
self.compiler.set_runtime_library_dirs(self.rpath)
|
| 341 |
+
if self.link_objects is not None:
|
| 342 |
+
self.compiler.set_link_objects(self.link_objects)
|
| 343 |
+
|
| 344 |
+
# Now actually compile and link everything.
|
| 345 |
+
self.build_extensions()
|
| 346 |
+
|
| 347 |
+
def check_extensions_list(self, extensions): # noqa: C901
|
| 348 |
+
"""Ensure that the list of extensions (presumably provided as a
|
| 349 |
+
command option 'extensions') is valid, i.e. it is a list of
|
| 350 |
+
Extension objects. We also support the old-style list of 2-tuples,
|
| 351 |
+
where the tuples are (ext_name, build_info), which are converted to
|
| 352 |
+
Extension instances here.
|
| 353 |
+
|
| 354 |
+
Raise DistutilsSetupError if the structure is invalid anywhere;
|
| 355 |
+
just returns otherwise.
|
| 356 |
+
"""
|
| 357 |
+
if not isinstance(extensions, list):
|
| 358 |
+
raise DistutilsSetupError(
|
| 359 |
+
"'ext_modules' option must be a list of Extension instances"
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
for i, ext in enumerate(extensions):
|
| 363 |
+
if isinstance(ext, Extension):
|
| 364 |
+
continue # OK! (assume type-checking done
|
| 365 |
+
# by Extension constructor)
|
| 366 |
+
|
| 367 |
+
if not isinstance(ext, tuple) or len(ext) != 2:
|
| 368 |
+
raise DistutilsSetupError(
|
| 369 |
+
"each element of 'ext_modules' option must be an "
|
| 370 |
+
"Extension instance or 2-tuple"
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
ext_name, build_info = ext
|
| 374 |
+
|
| 375 |
+
log.warning(
|
| 376 |
+
"old-style (ext_name, build_info) tuple found in "
|
| 377 |
+
"ext_modules for extension '%s' "
|
| 378 |
+
"-- please convert to Extension instance",
|
| 379 |
+
ext_name,
|
| 380 |
+
)
|
| 381 |
+
|
| 382 |
+
if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)):
|
| 383 |
+
raise DistutilsSetupError(
|
| 384 |
+
"first element of each tuple in 'ext_modules' "
|
| 385 |
+
"must be the extension name (a string)"
|
| 386 |
+
)
|
| 387 |
+
|
| 388 |
+
if not isinstance(build_info, dict):
|
| 389 |
+
raise DistutilsSetupError(
|
| 390 |
+
"second element of each tuple in 'ext_modules' "
|
| 391 |
+
"must be a dictionary (build info)"
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
# OK, the (ext_name, build_info) dict is type-safe: convert it
|
| 395 |
+
# to an Extension instance.
|
| 396 |
+
ext = Extension(ext_name, build_info['sources'])
|
| 397 |
+
|
| 398 |
+
# Easy stuff: one-to-one mapping from dict elements to
|
| 399 |
+
# instance attributes.
|
| 400 |
+
for key in (
|
| 401 |
+
'include_dirs',
|
| 402 |
+
'library_dirs',
|
| 403 |
+
'libraries',
|
| 404 |
+
'extra_objects',
|
| 405 |
+
'extra_compile_args',
|
| 406 |
+
'extra_link_args',
|
| 407 |
+
):
|
| 408 |
+
val = build_info.get(key)
|
| 409 |
+
if val is not None:
|
| 410 |
+
setattr(ext, key, val)
|
| 411 |
+
|
| 412 |
+
# Medium-easy stuff: same syntax/semantics, different names.
|
| 413 |
+
ext.runtime_library_dirs = build_info.get('rpath')
|
| 414 |
+
if 'def_file' in build_info:
|
| 415 |
+
log.warning(
|
| 416 |
+
"'def_file' element of build info dict " "no longer supported"
|
| 417 |
+
)
|
| 418 |
+
|
| 419 |
+
# Non-trivial stuff: 'macros' split into 'define_macros'
|
| 420 |
+
# and 'undef_macros'.
|
| 421 |
+
macros = build_info.get('macros')
|
| 422 |
+
if macros:
|
| 423 |
+
ext.define_macros = []
|
| 424 |
+
ext.undef_macros = []
|
| 425 |
+
for macro in macros:
|
| 426 |
+
if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
|
| 427 |
+
raise DistutilsSetupError(
|
| 428 |
+
"'macros' element of build info dict "
|
| 429 |
+
"must be 1- or 2-tuple"
|
| 430 |
+
)
|
| 431 |
+
if len(macro) == 1:
|
| 432 |
+
ext.undef_macros.append(macro[0])
|
| 433 |
+
elif len(macro) == 2:
|
| 434 |
+
ext.define_macros.append(macro)
|
| 435 |
+
|
| 436 |
+
extensions[i] = ext
|
| 437 |
+
|
| 438 |
+
def get_source_files(self):
|
| 439 |
+
self.check_extensions_list(self.extensions)
|
| 440 |
+
filenames = []
|
| 441 |
+
|
| 442 |
+
# Wouldn't it be neat if we knew the names of header files too...
|
| 443 |
+
for ext in self.extensions:
|
| 444 |
+
filenames.extend(ext.sources)
|
| 445 |
+
return filenames
|
| 446 |
+
|
| 447 |
+
def get_outputs(self):
|
| 448 |
+
# Sanity check the 'extensions' list -- can't assume this is being
|
| 449 |
+
# done in the same run as a 'build_extensions()' call (in fact, we
|
| 450 |
+
# can probably assume that it *isn't*!).
|
| 451 |
+
self.check_extensions_list(self.extensions)
|
| 452 |
+
|
| 453 |
+
# And build the list of output (built) filenames. Note that this
|
| 454 |
+
# ignores the 'inplace' flag, and assumes everything goes in the
|
| 455 |
+
# "build" tree.
|
| 456 |
+
outputs = []
|
| 457 |
+
for ext in self.extensions:
|
| 458 |
+
outputs.append(self.get_ext_fullpath(ext.name))
|
| 459 |
+
return outputs
|
| 460 |
+
|
| 461 |
+
def build_extensions(self):
|
| 462 |
+
# First, sanity-check the 'extensions' list
|
| 463 |
+
self.check_extensions_list(self.extensions)
|
| 464 |
+
if self.parallel:
|
| 465 |
+
self._build_extensions_parallel()
|
| 466 |
+
else:
|
| 467 |
+
self._build_extensions_serial()
|
| 468 |
+
|
| 469 |
+
def _build_extensions_parallel(self):
|
| 470 |
+
workers = self.parallel
|
| 471 |
+
if self.parallel is True:
|
| 472 |
+
workers = os.cpu_count() # may return None
|
| 473 |
+
try:
|
| 474 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 475 |
+
except ImportError:
|
| 476 |
+
workers = None
|
| 477 |
+
|
| 478 |
+
if workers is None:
|
| 479 |
+
self._build_extensions_serial()
|
| 480 |
+
return
|
| 481 |
+
|
| 482 |
+
with ThreadPoolExecutor(max_workers=workers) as executor:
|
| 483 |
+
futures = [
|
| 484 |
+
executor.submit(self.build_extension, ext) for ext in self.extensions
|
| 485 |
+
]
|
| 486 |
+
for ext, fut in zip(self.extensions, futures):
|
| 487 |
+
with self._filter_build_errors(ext):
|
| 488 |
+
fut.result()
|
| 489 |
+
|
| 490 |
+
def _build_extensions_serial(self):
|
| 491 |
+
for ext in self.extensions:
|
| 492 |
+
with self._filter_build_errors(ext):
|
| 493 |
+
self.build_extension(ext)
|
| 494 |
+
|
| 495 |
+
@contextlib.contextmanager
|
| 496 |
+
def _filter_build_errors(self, ext):
|
| 497 |
+
try:
|
| 498 |
+
yield
|
| 499 |
+
except (CCompilerError, DistutilsError, CompileError) as e:
|
| 500 |
+
if not ext.optional:
|
| 501 |
+
raise
|
| 502 |
+
self.warn('building extension "{}" failed: {}'.format(ext.name, e))
|
| 503 |
+
|
| 504 |
+
def build_extension(self, ext):
|
| 505 |
+
sources = ext.sources
|
| 506 |
+
if sources is None or not isinstance(sources, (list, tuple)):
|
| 507 |
+
raise DistutilsSetupError(
|
| 508 |
+
"in 'ext_modules' option (extension '%s'), "
|
| 509 |
+
"'sources' must be present and must be "
|
| 510 |
+
"a list of source filenames" % ext.name
|
| 511 |
+
)
|
| 512 |
+
# sort to make the resulting .so file build reproducible
|
| 513 |
+
sources = sorted(sources)
|
| 514 |
+
|
| 515 |
+
ext_path = self.get_ext_fullpath(ext.name)
|
| 516 |
+
depends = sources + ext.depends
|
| 517 |
+
if not (self.force or newer_group(depends, ext_path, 'newer')):
|
| 518 |
+
log.debug("skipping '%s' extension (up-to-date)", ext.name)
|
| 519 |
+
return
|
| 520 |
+
else:
|
| 521 |
+
log.info("building '%s' extension", ext.name)
|
| 522 |
+
|
| 523 |
+
# First, scan the sources for SWIG definition files (.i), run
|
| 524 |
+
# SWIG on 'em to create .c files, and modify the sources list
|
| 525 |
+
# accordingly.
|
| 526 |
+
sources = self.swig_sources(sources, ext)
|
| 527 |
+
|
| 528 |
+
# Next, compile the source code to object files.
|
| 529 |
+
|
| 530 |
+
# XXX not honouring 'define_macros' or 'undef_macros' -- the
|
| 531 |
+
# CCompiler API needs to change to accommodate this, and I
|
| 532 |
+
# want to do one thing at a time!
|
| 533 |
+
|
| 534 |
+
# Two possible sources for extra compiler arguments:
|
| 535 |
+
# - 'extra_compile_args' in Extension object
|
| 536 |
+
# - CFLAGS environment variable (not particularly
|
| 537 |
+
# elegant, but people seem to expect it and I
|
| 538 |
+
# guess it's useful)
|
| 539 |
+
# The environment variable should take precedence, and
|
| 540 |
+
# any sensible compiler will give precedence to later
|
| 541 |
+
# command line args. Hence we combine them in order:
|
| 542 |
+
extra_args = ext.extra_compile_args or []
|
| 543 |
+
|
| 544 |
+
macros = ext.define_macros[:]
|
| 545 |
+
for undef in ext.undef_macros:
|
| 546 |
+
macros.append((undef,))
|
| 547 |
+
|
| 548 |
+
objects = self.compiler.compile(
|
| 549 |
+
sources,
|
| 550 |
+
output_dir=self.build_temp,
|
| 551 |
+
macros=macros,
|
| 552 |
+
include_dirs=ext.include_dirs,
|
| 553 |
+
debug=self.debug,
|
| 554 |
+
extra_postargs=extra_args,
|
| 555 |
+
depends=ext.depends,
|
| 556 |
+
)
|
| 557 |
+
|
| 558 |
+
# XXX outdated variable, kept here in case third-part code
|
| 559 |
+
# needs it.
|
| 560 |
+
self._built_objects = objects[:]
|
| 561 |
+
|
| 562 |
+
# Now link the object files together into a "shared object" --
|
| 563 |
+
# of course, first we have to figure out all the other things
|
| 564 |
+
# that go into the mix.
|
| 565 |
+
if ext.extra_objects:
|
| 566 |
+
objects.extend(ext.extra_objects)
|
| 567 |
+
extra_args = ext.extra_link_args or []
|
| 568 |
+
|
| 569 |
+
# Detect target language, if not provided
|
| 570 |
+
language = ext.language or self.compiler.detect_language(sources)
|
| 571 |
+
|
| 572 |
+
self.compiler.link_shared_object(
|
| 573 |
+
objects,
|
| 574 |
+
ext_path,
|
| 575 |
+
libraries=self.get_libraries(ext),
|
| 576 |
+
library_dirs=ext.library_dirs,
|
| 577 |
+
runtime_library_dirs=ext.runtime_library_dirs,
|
| 578 |
+
extra_postargs=extra_args,
|
| 579 |
+
export_symbols=self.get_export_symbols(ext),
|
| 580 |
+
debug=self.debug,
|
| 581 |
+
build_temp=self.build_temp,
|
| 582 |
+
target_lang=language,
|
| 583 |
+
)
|
| 584 |
+
|
| 585 |
+
def swig_sources(self, sources, extension):
|
| 586 |
+
"""Walk the list of source files in 'sources', looking for SWIG
|
| 587 |
+
interface (.i) files. Run SWIG on all that are found, and
|
| 588 |
+
return a modified 'sources' list with SWIG source files replaced
|
| 589 |
+
by the generated C (or C++) files.
|
| 590 |
+
"""
|
| 591 |
+
new_sources = []
|
| 592 |
+
swig_sources = []
|
| 593 |
+
swig_targets = {}
|
| 594 |
+
|
| 595 |
+
# XXX this drops generated C/C++ files into the source tree, which
|
| 596 |
+
# is fine for developers who want to distribute the generated
|
| 597 |
+
# source -- but there should be an option to put SWIG output in
|
| 598 |
+
# the temp dir.
|
| 599 |
+
|
| 600 |
+
if self.swig_cpp:
|
| 601 |
+
log.warning("--swig-cpp is deprecated - use --swig-opts=-c++")
|
| 602 |
+
|
| 603 |
+
if (
|
| 604 |
+
self.swig_cpp
|
| 605 |
+
or ('-c++' in self.swig_opts)
|
| 606 |
+
or ('-c++' in extension.swig_opts)
|
| 607 |
+
):
|
| 608 |
+
target_ext = '.cpp'
|
| 609 |
+
else:
|
| 610 |
+
target_ext = '.c'
|
| 611 |
+
|
| 612 |
+
for source in sources:
|
| 613 |
+
(base, ext) = os.path.splitext(source)
|
| 614 |
+
if ext == ".i": # SWIG interface file
|
| 615 |
+
new_sources.append(base + '_wrap' + target_ext)
|
| 616 |
+
swig_sources.append(source)
|
| 617 |
+
swig_targets[source] = new_sources[-1]
|
| 618 |
+
else:
|
| 619 |
+
new_sources.append(source)
|
| 620 |
+
|
| 621 |
+
if not swig_sources:
|
| 622 |
+
return new_sources
|
| 623 |
+
|
| 624 |
+
swig = self.swig or self.find_swig()
|
| 625 |
+
swig_cmd = [swig, "-python"]
|
| 626 |
+
swig_cmd.extend(self.swig_opts)
|
| 627 |
+
if self.swig_cpp:
|
| 628 |
+
swig_cmd.append("-c++")
|
| 629 |
+
|
| 630 |
+
# Do not override commandline arguments
|
| 631 |
+
if not self.swig_opts:
|
| 632 |
+
for o in extension.swig_opts:
|
| 633 |
+
swig_cmd.append(o)
|
| 634 |
+
|
| 635 |
+
for source in swig_sources:
|
| 636 |
+
target = swig_targets[source]
|
| 637 |
+
log.info("swigging %s to %s", source, target)
|
| 638 |
+
self.spawn(swig_cmd + ["-o", target, source])
|
| 639 |
+
|
| 640 |
+
return new_sources
|
| 641 |
+
|
| 642 |
+
def find_swig(self):
|
| 643 |
+
"""Return the name of the SWIG executable. On Unix, this is
|
| 644 |
+
just "swig" -- it should be in the PATH. Tries a bit harder on
|
| 645 |
+
Windows.
|
| 646 |
+
"""
|
| 647 |
+
if os.name == "posix":
|
| 648 |
+
return "swig"
|
| 649 |
+
elif os.name == "nt":
|
| 650 |
+
# Look for SWIG in its standard installation directory on
|
| 651 |
+
# Windows (or so I presume!). If we find it there, great;
|
| 652 |
+
# if not, act like Unix and assume it's in the PATH.
|
| 653 |
+
for vers in ("1.3", "1.2", "1.1"):
|
| 654 |
+
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
|
| 655 |
+
if os.path.isfile(fn):
|
| 656 |
+
return fn
|
| 657 |
+
else:
|
| 658 |
+
return "swig.exe"
|
| 659 |
+
else:
|
| 660 |
+
raise DistutilsPlatformError(
|
| 661 |
+
"I don't know how to find (much less run) SWIG "
|
| 662 |
+
"on platform '%s'" % os.name
|
| 663 |
+
)
|
| 664 |
+
|
| 665 |
+
# -- Name generators -----------------------------------------------
|
| 666 |
+
# (extension names, filenames, whatever)
|
| 667 |
+
def get_ext_fullpath(self, ext_name):
|
| 668 |
+
"""Returns the path of the filename for a given extension.
|
| 669 |
+
|
| 670 |
+
The file is located in `build_lib` or directly in the package
|
| 671 |
+
(inplace option).
|
| 672 |
+
"""
|
| 673 |
+
fullname = self.get_ext_fullname(ext_name)
|
| 674 |
+
modpath = fullname.split('.')
|
| 675 |
+
filename = self.get_ext_filename(modpath[-1])
|
| 676 |
+
|
| 677 |
+
if not self.inplace:
|
| 678 |
+
# no further work needed
|
| 679 |
+
# returning :
|
| 680 |
+
# build_dir/package/path/filename
|
| 681 |
+
filename = os.path.join(*modpath[:-1] + [filename])
|
| 682 |
+
return os.path.join(self.build_lib, filename)
|
| 683 |
+
|
| 684 |
+
# the inplace option requires to find the package directory
|
| 685 |
+
# using the build_py command for that
|
| 686 |
+
package = '.'.join(modpath[0:-1])
|
| 687 |
+
build_py = self.get_finalized_command('build_py')
|
| 688 |
+
package_dir = os.path.abspath(build_py.get_package_dir(package))
|
| 689 |
+
|
| 690 |
+
# returning
|
| 691 |
+
# package_dir/filename
|
| 692 |
+
return os.path.join(package_dir, filename)
|
| 693 |
+
|
| 694 |
+
def get_ext_fullname(self, ext_name):
|
| 695 |
+
"""Returns the fullname of a given extension name.
|
| 696 |
+
|
| 697 |
+
Adds the `package.` prefix"""
|
| 698 |
+
if self.package is None:
|
| 699 |
+
return ext_name
|
| 700 |
+
else:
|
| 701 |
+
return self.package + '.' + ext_name
|
| 702 |
+
|
| 703 |
+
def get_ext_filename(self, ext_name):
|
| 704 |
+
r"""Convert the name of an extension (eg. "foo.bar") into the name
|
| 705 |
+
of the file from which it will be loaded (eg. "foo/bar.so", or
|
| 706 |
+
"foo\bar.pyd").
|
| 707 |
+
"""
|
| 708 |
+
from ..sysconfig import get_config_var
|
| 709 |
+
|
| 710 |
+
ext_path = ext_name.split('.')
|
| 711 |
+
ext_suffix = get_config_var('EXT_SUFFIX')
|
| 712 |
+
return os.path.join(*ext_path) + ext_suffix
|
| 713 |
+
|
| 714 |
+
def get_export_symbols(self, ext):
|
| 715 |
+
"""Return the list of symbols that a shared extension has to
|
| 716 |
+
export. This either uses 'ext.export_symbols' or, if it's not
|
| 717 |
+
provided, "PyInit_" + module_name. Only relevant on Windows, where
|
| 718 |
+
the .pyd file (DLL) must export the module "PyInit_" function.
|
| 719 |
+
"""
|
| 720 |
+
name = ext.name.split('.')[-1]
|
| 721 |
+
try:
|
| 722 |
+
# Unicode module name support as defined in PEP-489
|
| 723 |
+
# https://peps.python.org/pep-0489/#export-hook-name
|
| 724 |
+
name.encode('ascii')
|
| 725 |
+
except UnicodeEncodeError:
|
| 726 |
+
suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii')
|
| 727 |
+
else:
|
| 728 |
+
suffix = "_" + name
|
| 729 |
+
|
| 730 |
+
initfunc_name = "PyInit" + suffix
|
| 731 |
+
if initfunc_name not in ext.export_symbols:
|
| 732 |
+
ext.export_symbols.append(initfunc_name)
|
| 733 |
+
return ext.export_symbols
|
| 734 |
+
|
| 735 |
+
def get_libraries(self, ext): # noqa: C901
|
| 736 |
+
"""Return the list of libraries to link against when building a
|
| 737 |
+
shared extension. On most platforms, this is just 'ext.libraries';
|
| 738 |
+
on Windows, we add the Python library (eg. python20.dll).
|
| 739 |
+
"""
|
| 740 |
+
# The python library is always needed on Windows. For MSVC, this
|
| 741 |
+
# is redundant, since the library is mentioned in a pragma in
|
| 742 |
+
# pyconfig.h that MSVC groks. The other Windows compilers all seem
|
| 743 |
+
# to need it mentioned explicitly, though, so that's what we do.
|
| 744 |
+
# Append '_d' to the python import library on debug builds.
|
| 745 |
+
if sys.platform == "win32":
|
| 746 |
+
from .._msvccompiler import MSVCCompiler
|
| 747 |
+
|
| 748 |
+
if not isinstance(self.compiler, MSVCCompiler):
|
| 749 |
+
template = "python%d%d"
|
| 750 |
+
if self.debug:
|
| 751 |
+
template = template + '_d'
|
| 752 |
+
pythonlib = template % (
|
| 753 |
+
sys.hexversion >> 24,
|
| 754 |
+
(sys.hexversion >> 16) & 0xFF,
|
| 755 |
+
)
|
| 756 |
+
# don't extend ext.libraries, it may be shared with other
|
| 757 |
+
# extensions, it is a reference to the original list
|
| 758 |
+
return ext.libraries + [pythonlib]
|
| 759 |
+
else:
|
| 760 |
+
# On Android only the main executable and LD_PRELOADs are considered
|
| 761 |
+
# to be RTLD_GLOBAL, all the dependencies of the main executable
|
| 762 |
+
# remain RTLD_LOCAL and so the shared libraries must be linked with
|
| 763 |
+
# libpython when python is built with a shared python library (issue
|
| 764 |
+
# bpo-21536).
|
| 765 |
+
# On Cygwin (and if required, other POSIX-like platforms based on
|
| 766 |
+
# Windows like MinGW) it is simply necessary that all symbols in
|
| 767 |
+
# shared libraries are resolved at link time.
|
| 768 |
+
from ..sysconfig import get_config_var
|
| 769 |
+
|
| 770 |
+
link_libpython = False
|
| 771 |
+
if get_config_var('Py_ENABLE_SHARED'):
|
| 772 |
+
# A native build on an Android device or on Cygwin
|
| 773 |
+
if hasattr(sys, 'getandroidapilevel'):
|
| 774 |
+
link_libpython = True
|
| 775 |
+
elif sys.platform == 'cygwin':
|
| 776 |
+
link_libpython = True
|
| 777 |
+
elif '_PYTHON_HOST_PLATFORM' in os.environ:
|
| 778 |
+
# We are cross-compiling for one of the relevant platforms
|
| 779 |
+
if get_config_var('ANDROID_API_LEVEL') != 0:
|
| 780 |
+
link_libpython = True
|
| 781 |
+
elif get_config_var('MACHDEP') == 'cygwin':
|
| 782 |
+
link_libpython = True
|
| 783 |
+
|
| 784 |
+
if link_libpython:
|
| 785 |
+
ldversion = get_config_var('LDVERSION')
|
| 786 |
+
return ext.libraries + ['python' + ldversion]
|
| 787 |
+
|
| 788 |
+
return ext.libraries + py37compat.pythonlib()
|
.venv/Lib/site-packages/setuptools/_distutils/command/build_py.py
ADDED
|
@@ -0,0 +1,406 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_py
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_py' command."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import importlib.util
|
| 7 |
+
import sys
|
| 8 |
+
import glob
|
| 9 |
+
|
| 10 |
+
from ..core import Command
|
| 11 |
+
from ..errors import DistutilsOptionError, DistutilsFileError
|
| 12 |
+
from ..util import convert_path
|
| 13 |
+
from distutils._log import log
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class build_py(Command):
|
| 17 |
+
description = "\"build\" pure Python modules (copy to build directory)"
|
| 18 |
+
|
| 19 |
+
user_options = [
|
| 20 |
+
('build-lib=', 'd', "directory to \"build\" (copy) to"),
|
| 21 |
+
('compile', 'c', "compile .py to .pyc"),
|
| 22 |
+
('no-compile', None, "don't compile .py files [default]"),
|
| 23 |
+
(
|
| 24 |
+
'optimize=',
|
| 25 |
+
'O',
|
| 26 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 27 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
|
| 28 |
+
),
|
| 29 |
+
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
| 30 |
+
]
|
| 31 |
+
|
| 32 |
+
boolean_options = ['compile', 'force']
|
| 33 |
+
negative_opt = {'no-compile': 'compile'}
|
| 34 |
+
|
| 35 |
+
def initialize_options(self):
|
| 36 |
+
self.build_lib = None
|
| 37 |
+
self.py_modules = None
|
| 38 |
+
self.package = None
|
| 39 |
+
self.package_data = None
|
| 40 |
+
self.package_dir = None
|
| 41 |
+
self.compile = 0
|
| 42 |
+
self.optimize = 0
|
| 43 |
+
self.force = None
|
| 44 |
+
|
| 45 |
+
def finalize_options(self):
|
| 46 |
+
self.set_undefined_options(
|
| 47 |
+
'build', ('build_lib', 'build_lib'), ('force', 'force')
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
# Get the distribution options that are aliases for build_py
|
| 51 |
+
# options -- list of packages and list of modules.
|
| 52 |
+
self.packages = self.distribution.packages
|
| 53 |
+
self.py_modules = self.distribution.py_modules
|
| 54 |
+
self.package_data = self.distribution.package_data
|
| 55 |
+
self.package_dir = {}
|
| 56 |
+
if self.distribution.package_dir:
|
| 57 |
+
for name, path in self.distribution.package_dir.items():
|
| 58 |
+
self.package_dir[name] = convert_path(path)
|
| 59 |
+
self.data_files = self.get_data_files()
|
| 60 |
+
|
| 61 |
+
# Ick, copied straight from install_lib.py (fancy_getopt needs a
|
| 62 |
+
# type system! Hell, *everything* needs a type system!!!)
|
| 63 |
+
if not isinstance(self.optimize, int):
|
| 64 |
+
try:
|
| 65 |
+
self.optimize = int(self.optimize)
|
| 66 |
+
assert 0 <= self.optimize <= 2
|
| 67 |
+
except (ValueError, AssertionError):
|
| 68 |
+
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
| 69 |
+
|
| 70 |
+
def run(self):
|
| 71 |
+
# XXX copy_file by default preserves atime and mtime. IMHO this is
|
| 72 |
+
# the right thing to do, but perhaps it should be an option -- in
|
| 73 |
+
# particular, a site administrator might want installed files to
|
| 74 |
+
# reflect the time of installation rather than the last
|
| 75 |
+
# modification time before the installed release.
|
| 76 |
+
|
| 77 |
+
# XXX copy_file by default preserves mode, which appears to be the
|
| 78 |
+
# wrong thing to do: if a file is read-only in the working
|
| 79 |
+
# directory, we want it to be installed read/write so that the next
|
| 80 |
+
# installation of the same module distribution can overwrite it
|
| 81 |
+
# without problems. (This might be a Unix-specific issue.) Thus
|
| 82 |
+
# we turn off 'preserve_mode' when copying to the build directory,
|
| 83 |
+
# since the build directory is supposed to be exactly what the
|
| 84 |
+
# installation will look like (ie. we preserve mode when
|
| 85 |
+
# installing).
|
| 86 |
+
|
| 87 |
+
# Two options control which modules will be installed: 'packages'
|
| 88 |
+
# and 'py_modules'. The former lets us work with whole packages, not
|
| 89 |
+
# specifying individual modules at all; the latter is for
|
| 90 |
+
# specifying modules one-at-a-time.
|
| 91 |
+
|
| 92 |
+
if self.py_modules:
|
| 93 |
+
self.build_modules()
|
| 94 |
+
if self.packages:
|
| 95 |
+
self.build_packages()
|
| 96 |
+
self.build_package_data()
|
| 97 |
+
|
| 98 |
+
self.byte_compile(self.get_outputs(include_bytecode=0))
|
| 99 |
+
|
| 100 |
+
def get_data_files(self):
|
| 101 |
+
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
| 102 |
+
data = []
|
| 103 |
+
if not self.packages:
|
| 104 |
+
return data
|
| 105 |
+
for package in self.packages:
|
| 106 |
+
# Locate package source directory
|
| 107 |
+
src_dir = self.get_package_dir(package)
|
| 108 |
+
|
| 109 |
+
# Compute package build directory
|
| 110 |
+
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
| 111 |
+
|
| 112 |
+
# Length of path to strip from found files
|
| 113 |
+
plen = 0
|
| 114 |
+
if src_dir:
|
| 115 |
+
plen = len(src_dir) + 1
|
| 116 |
+
|
| 117 |
+
# Strip directory from globbed filenames
|
| 118 |
+
filenames = [file[plen:] for file in self.find_data_files(package, src_dir)]
|
| 119 |
+
data.append((package, src_dir, build_dir, filenames))
|
| 120 |
+
return data
|
| 121 |
+
|
| 122 |
+
def find_data_files(self, package, src_dir):
|
| 123 |
+
"""Return filenames for package's data files in 'src_dir'"""
|
| 124 |
+
globs = self.package_data.get('', []) + self.package_data.get(package, [])
|
| 125 |
+
files = []
|
| 126 |
+
for pattern in globs:
|
| 127 |
+
# Each pattern has to be converted to a platform-specific path
|
| 128 |
+
filelist = glob.glob(
|
| 129 |
+
os.path.join(glob.escape(src_dir), convert_path(pattern))
|
| 130 |
+
)
|
| 131 |
+
# Files that match more than one pattern are only added once
|
| 132 |
+
files.extend(
|
| 133 |
+
[fn for fn in filelist if fn not in files and os.path.isfile(fn)]
|
| 134 |
+
)
|
| 135 |
+
return files
|
| 136 |
+
|
| 137 |
+
def build_package_data(self):
|
| 138 |
+
"""Copy data files into build directory"""
|
| 139 |
+
for package, src_dir, build_dir, filenames in self.data_files:
|
| 140 |
+
for filename in filenames:
|
| 141 |
+
target = os.path.join(build_dir, filename)
|
| 142 |
+
self.mkpath(os.path.dirname(target))
|
| 143 |
+
self.copy_file(
|
| 144 |
+
os.path.join(src_dir, filename), target, preserve_mode=False
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
def get_package_dir(self, package):
|
| 148 |
+
"""Return the directory, relative to the top of the source
|
| 149 |
+
distribution, where package 'package' should be found
|
| 150 |
+
(at least according to the 'package_dir' option, if any)."""
|
| 151 |
+
path = package.split('.')
|
| 152 |
+
|
| 153 |
+
if not self.package_dir:
|
| 154 |
+
if path:
|
| 155 |
+
return os.path.join(*path)
|
| 156 |
+
else:
|
| 157 |
+
return ''
|
| 158 |
+
else:
|
| 159 |
+
tail = []
|
| 160 |
+
while path:
|
| 161 |
+
try:
|
| 162 |
+
pdir = self.package_dir['.'.join(path)]
|
| 163 |
+
except KeyError:
|
| 164 |
+
tail.insert(0, path[-1])
|
| 165 |
+
del path[-1]
|
| 166 |
+
else:
|
| 167 |
+
tail.insert(0, pdir)
|
| 168 |
+
return os.path.join(*tail)
|
| 169 |
+
else:
|
| 170 |
+
# Oops, got all the way through 'path' without finding a
|
| 171 |
+
# match in package_dir. If package_dir defines a directory
|
| 172 |
+
# for the root (nameless) package, then fallback on it;
|
| 173 |
+
# otherwise, we might as well have not consulted
|
| 174 |
+
# package_dir at all, as we just use the directory implied
|
| 175 |
+
# by 'tail' (which should be the same as the original value
|
| 176 |
+
# of 'path' at this point).
|
| 177 |
+
pdir = self.package_dir.get('')
|
| 178 |
+
if pdir is not None:
|
| 179 |
+
tail.insert(0, pdir)
|
| 180 |
+
|
| 181 |
+
if tail:
|
| 182 |
+
return os.path.join(*tail)
|
| 183 |
+
else:
|
| 184 |
+
return ''
|
| 185 |
+
|
| 186 |
+
def check_package(self, package, package_dir):
|
| 187 |
+
# Empty dir name means current directory, which we can probably
|
| 188 |
+
# assume exists. Also, os.path.exists and isdir don't know about
|
| 189 |
+
# my "empty string means current dir" convention, so we have to
|
| 190 |
+
# circumvent them.
|
| 191 |
+
if package_dir != "":
|
| 192 |
+
if not os.path.exists(package_dir):
|
| 193 |
+
raise DistutilsFileError(
|
| 194 |
+
"package directory '%s' does not exist" % package_dir
|
| 195 |
+
)
|
| 196 |
+
if not os.path.isdir(package_dir):
|
| 197 |
+
raise DistutilsFileError(
|
| 198 |
+
"supposed package directory '%s' exists, "
|
| 199 |
+
"but is not a directory" % package_dir
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
# Directories without __init__.py are namespace packages (PEP 420).
|
| 203 |
+
if package:
|
| 204 |
+
init_py = os.path.join(package_dir, "__init__.py")
|
| 205 |
+
if os.path.isfile(init_py):
|
| 206 |
+
return init_py
|
| 207 |
+
|
| 208 |
+
# Either not in a package at all (__init__.py not expected), or
|
| 209 |
+
# __init__.py doesn't exist -- so don't return the filename.
|
| 210 |
+
return None
|
| 211 |
+
|
| 212 |
+
def check_module(self, module, module_file):
|
| 213 |
+
if not os.path.isfile(module_file):
|
| 214 |
+
log.warning("file %s (for module %s) not found", module_file, module)
|
| 215 |
+
return False
|
| 216 |
+
else:
|
| 217 |
+
return True
|
| 218 |
+
|
| 219 |
+
def find_package_modules(self, package, package_dir):
|
| 220 |
+
self.check_package(package, package_dir)
|
| 221 |
+
module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
|
| 222 |
+
modules = []
|
| 223 |
+
setup_script = os.path.abspath(self.distribution.script_name)
|
| 224 |
+
|
| 225 |
+
for f in module_files:
|
| 226 |
+
abs_f = os.path.abspath(f)
|
| 227 |
+
if abs_f != setup_script:
|
| 228 |
+
module = os.path.splitext(os.path.basename(f))[0]
|
| 229 |
+
modules.append((package, module, f))
|
| 230 |
+
else:
|
| 231 |
+
self.debug_print("excluding %s" % setup_script)
|
| 232 |
+
return modules
|
| 233 |
+
|
| 234 |
+
def find_modules(self):
|
| 235 |
+
"""Finds individually-specified Python modules, ie. those listed by
|
| 236 |
+
module name in 'self.py_modules'. Returns a list of tuples (package,
|
| 237 |
+
module_base, filename): 'package' is a tuple of the path through
|
| 238 |
+
package-space to the module; 'module_base' is the bare (no
|
| 239 |
+
packages, no dots) module name, and 'filename' is the path to the
|
| 240 |
+
".py" file (relative to the distribution root) that implements the
|
| 241 |
+
module.
|
| 242 |
+
"""
|
| 243 |
+
# Map package names to tuples of useful info about the package:
|
| 244 |
+
# (package_dir, checked)
|
| 245 |
+
# package_dir - the directory where we'll find source files for
|
| 246 |
+
# this package
|
| 247 |
+
# checked - true if we have checked that the package directory
|
| 248 |
+
# is valid (exists, contains __init__.py, ... ?)
|
| 249 |
+
packages = {}
|
| 250 |
+
|
| 251 |
+
# List of (package, module, filename) tuples to return
|
| 252 |
+
modules = []
|
| 253 |
+
|
| 254 |
+
# We treat modules-in-packages almost the same as toplevel modules,
|
| 255 |
+
# just the "package" for a toplevel is empty (either an empty
|
| 256 |
+
# string or empty list, depending on context). Differences:
|
| 257 |
+
# - don't check for __init__.py in directory for empty package
|
| 258 |
+
for module in self.py_modules:
|
| 259 |
+
path = module.split('.')
|
| 260 |
+
package = '.'.join(path[0:-1])
|
| 261 |
+
module_base = path[-1]
|
| 262 |
+
|
| 263 |
+
try:
|
| 264 |
+
(package_dir, checked) = packages[package]
|
| 265 |
+
except KeyError:
|
| 266 |
+
package_dir = self.get_package_dir(package)
|
| 267 |
+
checked = 0
|
| 268 |
+
|
| 269 |
+
if not checked:
|
| 270 |
+
init_py = self.check_package(package, package_dir)
|
| 271 |
+
packages[package] = (package_dir, 1)
|
| 272 |
+
if init_py:
|
| 273 |
+
modules.append((package, "__init__", init_py))
|
| 274 |
+
|
| 275 |
+
# XXX perhaps we should also check for just .pyc files
|
| 276 |
+
# (so greedy closed-source bastards can distribute Python
|
| 277 |
+
# modules too)
|
| 278 |
+
module_file = os.path.join(package_dir, module_base + ".py")
|
| 279 |
+
if not self.check_module(module, module_file):
|
| 280 |
+
continue
|
| 281 |
+
|
| 282 |
+
modules.append((package, module_base, module_file))
|
| 283 |
+
|
| 284 |
+
return modules
|
| 285 |
+
|
| 286 |
+
def find_all_modules(self):
|
| 287 |
+
"""Compute the list of all modules that will be built, whether
|
| 288 |
+
they are specified one-module-at-a-time ('self.py_modules') or
|
| 289 |
+
by whole packages ('self.packages'). Return a list of tuples
|
| 290 |
+
(package, module, module_file), just like 'find_modules()' and
|
| 291 |
+
'find_package_modules()' do."""
|
| 292 |
+
modules = []
|
| 293 |
+
if self.py_modules:
|
| 294 |
+
modules.extend(self.find_modules())
|
| 295 |
+
if self.packages:
|
| 296 |
+
for package in self.packages:
|
| 297 |
+
package_dir = self.get_package_dir(package)
|
| 298 |
+
m = self.find_package_modules(package, package_dir)
|
| 299 |
+
modules.extend(m)
|
| 300 |
+
return modules
|
| 301 |
+
|
| 302 |
+
def get_source_files(self):
|
| 303 |
+
return [module[-1] for module in self.find_all_modules()]
|
| 304 |
+
|
| 305 |
+
def get_module_outfile(self, build_dir, package, module):
|
| 306 |
+
outfile_path = [build_dir] + list(package) + [module + ".py"]
|
| 307 |
+
return os.path.join(*outfile_path)
|
| 308 |
+
|
| 309 |
+
def get_outputs(self, include_bytecode=1):
|
| 310 |
+
modules = self.find_all_modules()
|
| 311 |
+
outputs = []
|
| 312 |
+
for package, module, module_file in modules:
|
| 313 |
+
package = package.split('.')
|
| 314 |
+
filename = self.get_module_outfile(self.build_lib, package, module)
|
| 315 |
+
outputs.append(filename)
|
| 316 |
+
if include_bytecode:
|
| 317 |
+
if self.compile:
|
| 318 |
+
outputs.append(
|
| 319 |
+
importlib.util.cache_from_source(filename, optimization='')
|
| 320 |
+
)
|
| 321 |
+
if self.optimize > 0:
|
| 322 |
+
outputs.append(
|
| 323 |
+
importlib.util.cache_from_source(
|
| 324 |
+
filename, optimization=self.optimize
|
| 325 |
+
)
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
outputs += [
|
| 329 |
+
os.path.join(build_dir, filename)
|
| 330 |
+
for package, src_dir, build_dir, filenames in self.data_files
|
| 331 |
+
for filename in filenames
|
| 332 |
+
]
|
| 333 |
+
|
| 334 |
+
return outputs
|
| 335 |
+
|
| 336 |
+
def build_module(self, module, module_file, package):
|
| 337 |
+
if isinstance(package, str):
|
| 338 |
+
package = package.split('.')
|
| 339 |
+
elif not isinstance(package, (list, tuple)):
|
| 340 |
+
raise TypeError(
|
| 341 |
+
"'package' must be a string (dot-separated), list, or tuple"
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
# Now put the module source file into the "build" area -- this is
|
| 345 |
+
# easy, we just copy it somewhere under self.build_lib (the build
|
| 346 |
+
# directory for Python source).
|
| 347 |
+
outfile = self.get_module_outfile(self.build_lib, package, module)
|
| 348 |
+
dir = os.path.dirname(outfile)
|
| 349 |
+
self.mkpath(dir)
|
| 350 |
+
return self.copy_file(module_file, outfile, preserve_mode=0)
|
| 351 |
+
|
| 352 |
+
def build_modules(self):
|
| 353 |
+
modules = self.find_modules()
|
| 354 |
+
for package, module, module_file in modules:
|
| 355 |
+
# Now "build" the module -- ie. copy the source file to
|
| 356 |
+
# self.build_lib (the build directory for Python source).
|
| 357 |
+
# (Actually, it gets copied to the directory for this package
|
| 358 |
+
# under self.build_lib.)
|
| 359 |
+
self.build_module(module, module_file, package)
|
| 360 |
+
|
| 361 |
+
def build_packages(self):
|
| 362 |
+
for package in self.packages:
|
| 363 |
+
# Get list of (package, module, module_file) tuples based on
|
| 364 |
+
# scanning the package directory. 'package' is only included
|
| 365 |
+
# in the tuple so that 'find_modules()' and
|
| 366 |
+
# 'find_package_tuples()' have a consistent interface; it's
|
| 367 |
+
# ignored here (apart from a sanity check). Also, 'module' is
|
| 368 |
+
# the *unqualified* module name (ie. no dots, no package -- we
|
| 369 |
+
# already know its package!), and 'module_file' is the path to
|
| 370 |
+
# the .py file, relative to the current directory
|
| 371 |
+
# (ie. including 'package_dir').
|
| 372 |
+
package_dir = self.get_package_dir(package)
|
| 373 |
+
modules = self.find_package_modules(package, package_dir)
|
| 374 |
+
|
| 375 |
+
# Now loop over the modules we found, "building" each one (just
|
| 376 |
+
# copy it to self.build_lib).
|
| 377 |
+
for package_, module, module_file in modules:
|
| 378 |
+
assert package == package_
|
| 379 |
+
self.build_module(module, module_file, package)
|
| 380 |
+
|
| 381 |
+
def byte_compile(self, files):
|
| 382 |
+
if sys.dont_write_bytecode:
|
| 383 |
+
self.warn('byte-compiling is disabled, skipping.')
|
| 384 |
+
return
|
| 385 |
+
|
| 386 |
+
from ..util import byte_compile
|
| 387 |
+
|
| 388 |
+
prefix = self.build_lib
|
| 389 |
+
if prefix[-1] != os.sep:
|
| 390 |
+
prefix = prefix + os.sep
|
| 391 |
+
|
| 392 |
+
# XXX this code is essentially the same as the 'byte_compile()
|
| 393 |
+
# method of the "install_lib" command, except for the determination
|
| 394 |
+
# of the 'prefix' string. Hmmm.
|
| 395 |
+
if self.compile:
|
| 396 |
+
byte_compile(
|
| 397 |
+
files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run
|
| 398 |
+
)
|
| 399 |
+
if self.optimize > 0:
|
| 400 |
+
byte_compile(
|
| 401 |
+
files,
|
| 402 |
+
optimize=self.optimize,
|
| 403 |
+
force=self.force,
|
| 404 |
+
prefix=prefix,
|
| 405 |
+
dry_run=self.dry_run,
|
| 406 |
+
)
|
.venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def _pythonlib_compat():
|
| 5 |
+
"""
|
| 6 |
+
On Python 3.7 and earlier, distutils would include the Python
|
| 7 |
+
library. See pypa/distutils#9.
|
| 8 |
+
"""
|
| 9 |
+
from distutils import sysconfig
|
| 10 |
+
|
| 11 |
+
if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
|
| 12 |
+
return
|
| 13 |
+
|
| 14 |
+
yield 'python{}.{}{}'.format(
|
| 15 |
+
sys.hexversion >> 24,
|
| 16 |
+
(sys.hexversion >> 16) & 0xFF,
|
| 17 |
+
sysconfig.get_config_var('ABIFLAGS'),
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def compose(f1, f2):
|
| 22 |
+
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
pythonlib = (
|
| 26 |
+
compose(list, _pythonlib_compat)
|
| 27 |
+
if sys.version_info < (3, 8)
|
| 28 |
+
and sys.platform != 'darwin'
|
| 29 |
+
and sys.platform[:3] != 'aix'
|
| 30 |
+
else list
|
| 31 |
+
)
|
.venv/Lib/site-packages/setuptools/_distutils/command/register.py
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.register
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'register' command (register with the repository).
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# created 2002/10/21, Richard Jones
|
| 7 |
+
|
| 8 |
+
import getpass
|
| 9 |
+
import io
|
| 10 |
+
import logging
|
| 11 |
+
import urllib.parse
|
| 12 |
+
import urllib.request
|
| 13 |
+
from warnings import warn
|
| 14 |
+
|
| 15 |
+
from ..core import PyPIRCCommand
|
| 16 |
+
from distutils._log import log
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class register(PyPIRCCommand):
|
| 20 |
+
description = "register the distribution with the Python package index"
|
| 21 |
+
user_options = PyPIRCCommand.user_options + [
|
| 22 |
+
('list-classifiers', None, 'list the valid Trove classifiers'),
|
| 23 |
+
(
|
| 24 |
+
'strict',
|
| 25 |
+
None,
|
| 26 |
+
'Will stop the registering if the meta-data are not fully compliant',
|
| 27 |
+
),
|
| 28 |
+
]
|
| 29 |
+
boolean_options = PyPIRCCommand.boolean_options + [
|
| 30 |
+
'verify',
|
| 31 |
+
'list-classifiers',
|
| 32 |
+
'strict',
|
| 33 |
+
]
|
| 34 |
+
|
| 35 |
+
sub_commands = [('check', lambda self: True)]
|
| 36 |
+
|
| 37 |
+
def initialize_options(self):
|
| 38 |
+
PyPIRCCommand.initialize_options(self)
|
| 39 |
+
self.list_classifiers = 0
|
| 40 |
+
self.strict = 0
|
| 41 |
+
|
| 42 |
+
def finalize_options(self):
|
| 43 |
+
PyPIRCCommand.finalize_options(self)
|
| 44 |
+
# setting options for the `check` subcommand
|
| 45 |
+
check_options = {
|
| 46 |
+
'strict': ('register', self.strict),
|
| 47 |
+
'restructuredtext': ('register', 1),
|
| 48 |
+
}
|
| 49 |
+
self.distribution.command_options['check'] = check_options
|
| 50 |
+
|
| 51 |
+
def run(self):
|
| 52 |
+
self.finalize_options()
|
| 53 |
+
self._set_config()
|
| 54 |
+
|
| 55 |
+
# Run sub commands
|
| 56 |
+
for cmd_name in self.get_sub_commands():
|
| 57 |
+
self.run_command(cmd_name)
|
| 58 |
+
|
| 59 |
+
if self.dry_run:
|
| 60 |
+
self.verify_metadata()
|
| 61 |
+
elif self.list_classifiers:
|
| 62 |
+
self.classifiers()
|
| 63 |
+
else:
|
| 64 |
+
self.send_metadata()
|
| 65 |
+
|
| 66 |
+
def check_metadata(self):
|
| 67 |
+
"""Deprecated API."""
|
| 68 |
+
warn(
|
| 69 |
+
"distutils.command.register.check_metadata is deprecated; "
|
| 70 |
+
"use the check command instead",
|
| 71 |
+
DeprecationWarning,
|
| 72 |
+
)
|
| 73 |
+
check = self.distribution.get_command_obj('check')
|
| 74 |
+
check.ensure_finalized()
|
| 75 |
+
check.strict = self.strict
|
| 76 |
+
check.restructuredtext = 1
|
| 77 |
+
check.run()
|
| 78 |
+
|
| 79 |
+
def _set_config(self):
|
| 80 |
+
'''Reads the configuration file and set attributes.'''
|
| 81 |
+
config = self._read_pypirc()
|
| 82 |
+
if config != {}:
|
| 83 |
+
self.username = config['username']
|
| 84 |
+
self.password = config['password']
|
| 85 |
+
self.repository = config['repository']
|
| 86 |
+
self.realm = config['realm']
|
| 87 |
+
self.has_config = True
|
| 88 |
+
else:
|
| 89 |
+
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
| 90 |
+
raise ValueError('%s not found in .pypirc' % self.repository)
|
| 91 |
+
if self.repository == 'pypi':
|
| 92 |
+
self.repository = self.DEFAULT_REPOSITORY
|
| 93 |
+
self.has_config = False
|
| 94 |
+
|
| 95 |
+
def classifiers(self):
|
| 96 |
+
'''Fetch the list of classifiers from the server.'''
|
| 97 |
+
url = self.repository + '?:action=list_classifiers'
|
| 98 |
+
response = urllib.request.urlopen(url)
|
| 99 |
+
log.info(self._read_pypi_response(response))
|
| 100 |
+
|
| 101 |
+
def verify_metadata(self):
|
| 102 |
+
'''Send the metadata to the package index server to be checked.'''
|
| 103 |
+
# send the info to the server and report the result
|
| 104 |
+
(code, result) = self.post_to_server(self.build_post_data('verify'))
|
| 105 |
+
log.info('Server response (%s): %s', code, result)
|
| 106 |
+
|
| 107 |
+
def send_metadata(self): # noqa: C901
|
| 108 |
+
'''Send the metadata to the package index server.
|
| 109 |
+
|
| 110 |
+
Well, do the following:
|
| 111 |
+
1. figure who the user is, and then
|
| 112 |
+
2. send the data as a Basic auth'ed POST.
|
| 113 |
+
|
| 114 |
+
First we try to read the username/password from $HOME/.pypirc,
|
| 115 |
+
which is a ConfigParser-formatted file with a section
|
| 116 |
+
[distutils] containing username and password entries (both
|
| 117 |
+
in clear text). Eg:
|
| 118 |
+
|
| 119 |
+
[distutils]
|
| 120 |
+
index-servers =
|
| 121 |
+
pypi
|
| 122 |
+
|
| 123 |
+
[pypi]
|
| 124 |
+
username: fred
|
| 125 |
+
password: sekrit
|
| 126 |
+
|
| 127 |
+
Otherwise, to figure who the user is, we offer the user three
|
| 128 |
+
choices:
|
| 129 |
+
|
| 130 |
+
1. use existing login,
|
| 131 |
+
2. register as a new user, or
|
| 132 |
+
3. set the password to a random string and email the user.
|
| 133 |
+
|
| 134 |
+
'''
|
| 135 |
+
# see if we can short-cut and get the username/password from the
|
| 136 |
+
# config
|
| 137 |
+
if self.has_config:
|
| 138 |
+
choice = '1'
|
| 139 |
+
username = self.username
|
| 140 |
+
password = self.password
|
| 141 |
+
else:
|
| 142 |
+
choice = 'x'
|
| 143 |
+
username = password = ''
|
| 144 |
+
|
| 145 |
+
# get the user's login info
|
| 146 |
+
choices = '1 2 3 4'.split()
|
| 147 |
+
while choice not in choices:
|
| 148 |
+
self.announce(
|
| 149 |
+
'''\
|
| 150 |
+
We need to know who you are, so please choose either:
|
| 151 |
+
1. use your existing login,
|
| 152 |
+
2. register as a new user,
|
| 153 |
+
3. have the server generate a new password for you (and email it to you), or
|
| 154 |
+
4. quit
|
| 155 |
+
Your selection [default 1]: ''',
|
| 156 |
+
logging.INFO,
|
| 157 |
+
)
|
| 158 |
+
choice = input()
|
| 159 |
+
if not choice:
|
| 160 |
+
choice = '1'
|
| 161 |
+
elif choice not in choices:
|
| 162 |
+
print('Please choose one of the four options!')
|
| 163 |
+
|
| 164 |
+
if choice == '1':
|
| 165 |
+
# get the username and password
|
| 166 |
+
while not username:
|
| 167 |
+
username = input('Username: ')
|
| 168 |
+
while not password:
|
| 169 |
+
password = getpass.getpass('Password: ')
|
| 170 |
+
|
| 171 |
+
# set up the authentication
|
| 172 |
+
auth = urllib.request.HTTPPasswordMgr()
|
| 173 |
+
host = urllib.parse.urlparse(self.repository)[1]
|
| 174 |
+
auth.add_password(self.realm, host, username, password)
|
| 175 |
+
# send the info to the server and report the result
|
| 176 |
+
code, result = self.post_to_server(self.build_post_data('submit'), auth)
|
| 177 |
+
self.announce('Server response ({}): {}'.format(code, result), logging.INFO)
|
| 178 |
+
|
| 179 |
+
# possibly save the login
|
| 180 |
+
if code == 200:
|
| 181 |
+
if self.has_config:
|
| 182 |
+
# sharing the password in the distribution instance
|
| 183 |
+
# so the upload command can reuse it
|
| 184 |
+
self.distribution.password = password
|
| 185 |
+
else:
|
| 186 |
+
self.announce(
|
| 187 |
+
(
|
| 188 |
+
'I can store your PyPI login so future '
|
| 189 |
+
'submissions will be faster.'
|
| 190 |
+
),
|
| 191 |
+
logging.INFO,
|
| 192 |
+
)
|
| 193 |
+
self.announce(
|
| 194 |
+
'(the login will be stored in %s)' % self._get_rc_file(),
|
| 195 |
+
logging.INFO,
|
| 196 |
+
)
|
| 197 |
+
choice = 'X'
|
| 198 |
+
while choice.lower() not in 'yn':
|
| 199 |
+
choice = input('Save your login (y/N)?')
|
| 200 |
+
if not choice:
|
| 201 |
+
choice = 'n'
|
| 202 |
+
if choice.lower() == 'y':
|
| 203 |
+
self._store_pypirc(username, password)
|
| 204 |
+
|
| 205 |
+
elif choice == '2':
|
| 206 |
+
data = {':action': 'user'}
|
| 207 |
+
data['name'] = data['password'] = data['email'] = ''
|
| 208 |
+
data['confirm'] = None
|
| 209 |
+
while not data['name']:
|
| 210 |
+
data['name'] = input('Username: ')
|
| 211 |
+
while data['password'] != data['confirm']:
|
| 212 |
+
while not data['password']:
|
| 213 |
+
data['password'] = getpass.getpass('Password: ')
|
| 214 |
+
while not data['confirm']:
|
| 215 |
+
data['confirm'] = getpass.getpass(' Confirm: ')
|
| 216 |
+
if data['password'] != data['confirm']:
|
| 217 |
+
data['password'] = ''
|
| 218 |
+
data['confirm'] = None
|
| 219 |
+
print("Password and confirm don't match!")
|
| 220 |
+
while not data['email']:
|
| 221 |
+
data['email'] = input(' EMail: ')
|
| 222 |
+
code, result = self.post_to_server(data)
|
| 223 |
+
if code != 200:
|
| 224 |
+
log.info('Server response (%s): %s', code, result)
|
| 225 |
+
else:
|
| 226 |
+
log.info('You will receive an email shortly.')
|
| 227 |
+
log.info('Follow the instructions in it to ' 'complete registration.')
|
| 228 |
+
elif choice == '3':
|
| 229 |
+
data = {':action': 'password_reset'}
|
| 230 |
+
data['email'] = ''
|
| 231 |
+
while not data['email']:
|
| 232 |
+
data['email'] = input('Your email address: ')
|
| 233 |
+
code, result = self.post_to_server(data)
|
| 234 |
+
log.info('Server response (%s): %s', code, result)
|
| 235 |
+
|
| 236 |
+
def build_post_data(self, action):
|
| 237 |
+
# figure the data to send - the metadata plus some additional
|
| 238 |
+
# information used by the package server
|
| 239 |
+
meta = self.distribution.metadata
|
| 240 |
+
data = {
|
| 241 |
+
':action': action,
|
| 242 |
+
'metadata_version': '1.0',
|
| 243 |
+
'name': meta.get_name(),
|
| 244 |
+
'version': meta.get_version(),
|
| 245 |
+
'summary': meta.get_description(),
|
| 246 |
+
'home_page': meta.get_url(),
|
| 247 |
+
'author': meta.get_contact(),
|
| 248 |
+
'author_email': meta.get_contact_email(),
|
| 249 |
+
'license': meta.get_licence(),
|
| 250 |
+
'description': meta.get_long_description(),
|
| 251 |
+
'keywords': meta.get_keywords(),
|
| 252 |
+
'platform': meta.get_platforms(),
|
| 253 |
+
'classifiers': meta.get_classifiers(),
|
| 254 |
+
'download_url': meta.get_download_url(),
|
| 255 |
+
# PEP 314
|
| 256 |
+
'provides': meta.get_provides(),
|
| 257 |
+
'requires': meta.get_requires(),
|
| 258 |
+
'obsoletes': meta.get_obsoletes(),
|
| 259 |
+
}
|
| 260 |
+
if data['provides'] or data['requires'] or data['obsoletes']:
|
| 261 |
+
data['metadata_version'] = '1.1'
|
| 262 |
+
return data
|
| 263 |
+
|
| 264 |
+
def post_to_server(self, data, auth=None): # noqa: C901
|
| 265 |
+
'''Post a query to the server, and return a string response.'''
|
| 266 |
+
if 'name' in data:
|
| 267 |
+
self.announce(
|
| 268 |
+
'Registering {} to {}'.format(data['name'], self.repository),
|
| 269 |
+
logging.INFO,
|
| 270 |
+
)
|
| 271 |
+
# Build up the MIME payload for the urllib2 POST data
|
| 272 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
| 273 |
+
sep_boundary = '\n--' + boundary
|
| 274 |
+
end_boundary = sep_boundary + '--'
|
| 275 |
+
body = io.StringIO()
|
| 276 |
+
for key, value in data.items():
|
| 277 |
+
# handle multiple entries for the same name
|
| 278 |
+
if type(value) not in (type([]), type(())):
|
| 279 |
+
value = [value]
|
| 280 |
+
for value in value:
|
| 281 |
+
value = str(value)
|
| 282 |
+
body.write(sep_boundary)
|
| 283 |
+
body.write('\nContent-Disposition: form-data; name="%s"' % key)
|
| 284 |
+
body.write("\n\n")
|
| 285 |
+
body.write(value)
|
| 286 |
+
if value and value[-1] == '\r':
|
| 287 |
+
body.write('\n') # write an extra newline (lurve Macs)
|
| 288 |
+
body.write(end_boundary)
|
| 289 |
+
body.write("\n")
|
| 290 |
+
body = body.getvalue().encode("utf-8")
|
| 291 |
+
|
| 292 |
+
# build the Request
|
| 293 |
+
headers = {
|
| 294 |
+
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'
|
| 295 |
+
% boundary,
|
| 296 |
+
'Content-length': str(len(body)),
|
| 297 |
+
}
|
| 298 |
+
req = urllib.request.Request(self.repository, body, headers)
|
| 299 |
+
|
| 300 |
+
# handle HTTP and include the Basic Auth handler
|
| 301 |
+
opener = urllib.request.build_opener(
|
| 302 |
+
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
| 303 |
+
)
|
| 304 |
+
data = ''
|
| 305 |
+
try:
|
| 306 |
+
result = opener.open(req)
|
| 307 |
+
except urllib.error.HTTPError as e:
|
| 308 |
+
if self.show_response:
|
| 309 |
+
data = e.fp.read()
|
| 310 |
+
result = e.code, e.msg
|
| 311 |
+
except urllib.error.URLError as e:
|
| 312 |
+
result = 500, str(e)
|
| 313 |
+
else:
|
| 314 |
+
if self.show_response:
|
| 315 |
+
data = self._read_pypi_response(result)
|
| 316 |
+
result = 200, 'OK'
|
| 317 |
+
if self.show_response:
|
| 318 |
+
msg = '\n'.join(('-' * 75, data, '-' * 75))
|
| 319 |
+
self.announce(msg, logging.INFO)
|
| 320 |
+
return result
|
.venv/Lib/site-packages/setuptools/_distutils/command/sdist.py
ADDED
|
@@ -0,0 +1,530 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.sdist
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'sdist' command (create a source distribution)."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from glob import glob
|
| 8 |
+
from warnings import warn
|
| 9 |
+
|
| 10 |
+
from ..core import Command
|
| 11 |
+
from distutils import dir_util
|
| 12 |
+
from distutils import file_util
|
| 13 |
+
from distutils import archive_util
|
| 14 |
+
from ..text_file import TextFile
|
| 15 |
+
from ..filelist import FileList
|
| 16 |
+
from distutils._log import log
|
| 17 |
+
from ..util import convert_path
|
| 18 |
+
from ..errors import DistutilsOptionError, DistutilsTemplateError
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def show_formats():
|
| 22 |
+
"""Print all possible values for the 'formats' option (used by
|
| 23 |
+
the "--help-formats" command-line option).
|
| 24 |
+
"""
|
| 25 |
+
from ..fancy_getopt import FancyGetopt
|
| 26 |
+
from ..archive_util import ARCHIVE_FORMATS
|
| 27 |
+
|
| 28 |
+
formats = []
|
| 29 |
+
for format in ARCHIVE_FORMATS.keys():
|
| 30 |
+
formats.append(("formats=" + format, None, ARCHIVE_FORMATS[format][2]))
|
| 31 |
+
formats.sort()
|
| 32 |
+
FancyGetopt(formats).print_help("List of available source distribution formats:")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class sdist(Command):
|
| 36 |
+
description = "create a source distribution (tarball, zip file, etc.)"
|
| 37 |
+
|
| 38 |
+
def checking_metadata(self):
|
| 39 |
+
"""Callable used for the check sub-command.
|
| 40 |
+
|
| 41 |
+
Placed here so user_options can view it"""
|
| 42 |
+
return self.metadata_check
|
| 43 |
+
|
| 44 |
+
user_options = [
|
| 45 |
+
('template=', 't', "name of manifest template file [default: MANIFEST.in]"),
|
| 46 |
+
('manifest=', 'm', "name of manifest file [default: MANIFEST]"),
|
| 47 |
+
(
|
| 48 |
+
'use-defaults',
|
| 49 |
+
None,
|
| 50 |
+
"include the default file set in the manifest "
|
| 51 |
+
"[default; disable with --no-defaults]",
|
| 52 |
+
),
|
| 53 |
+
('no-defaults', None, "don't include the default file set"),
|
| 54 |
+
(
|
| 55 |
+
'prune',
|
| 56 |
+
None,
|
| 57 |
+
"specifically exclude files/directories that should not be "
|
| 58 |
+
"distributed (build tree, RCS/CVS dirs, etc.) "
|
| 59 |
+
"[default; disable with --no-prune]",
|
| 60 |
+
),
|
| 61 |
+
('no-prune', None, "don't automatically exclude anything"),
|
| 62 |
+
(
|
| 63 |
+
'manifest-only',
|
| 64 |
+
'o',
|
| 65 |
+
"just regenerate the manifest and then stop " "(implies --force-manifest)",
|
| 66 |
+
),
|
| 67 |
+
(
|
| 68 |
+
'force-manifest',
|
| 69 |
+
'f',
|
| 70 |
+
"forcibly regenerate the manifest and carry on as usual. "
|
| 71 |
+
"Deprecated: now the manifest is always regenerated.",
|
| 72 |
+
),
|
| 73 |
+
('formats=', None, "formats for source distribution (comma-separated list)"),
|
| 74 |
+
(
|
| 75 |
+
'keep-temp',
|
| 76 |
+
'k',
|
| 77 |
+
"keep the distribution tree around after creating " + "archive file(s)",
|
| 78 |
+
),
|
| 79 |
+
(
|
| 80 |
+
'dist-dir=',
|
| 81 |
+
'd',
|
| 82 |
+
"directory to put the source distribution archive(s) in " "[default: dist]",
|
| 83 |
+
),
|
| 84 |
+
(
|
| 85 |
+
'metadata-check',
|
| 86 |
+
None,
|
| 87 |
+
"Ensure that all required elements of meta-data "
|
| 88 |
+
"are supplied. Warn if any missing. [default]",
|
| 89 |
+
),
|
| 90 |
+
(
|
| 91 |
+
'owner=',
|
| 92 |
+
'u',
|
| 93 |
+
"Owner name used when creating a tar file [default: current user]",
|
| 94 |
+
),
|
| 95 |
+
(
|
| 96 |
+
'group=',
|
| 97 |
+
'g',
|
| 98 |
+
"Group name used when creating a tar file [default: current group]",
|
| 99 |
+
),
|
| 100 |
+
]
|
| 101 |
+
|
| 102 |
+
boolean_options = [
|
| 103 |
+
'use-defaults',
|
| 104 |
+
'prune',
|
| 105 |
+
'manifest-only',
|
| 106 |
+
'force-manifest',
|
| 107 |
+
'keep-temp',
|
| 108 |
+
'metadata-check',
|
| 109 |
+
]
|
| 110 |
+
|
| 111 |
+
help_options = [
|
| 112 |
+
('help-formats', None, "list available distribution formats", show_formats),
|
| 113 |
+
]
|
| 114 |
+
|
| 115 |
+
negative_opt = {'no-defaults': 'use-defaults', 'no-prune': 'prune'}
|
| 116 |
+
|
| 117 |
+
sub_commands = [('check', checking_metadata)]
|
| 118 |
+
|
| 119 |
+
READMES = ('README', 'README.txt', 'README.rst')
|
| 120 |
+
|
| 121 |
+
def initialize_options(self):
|
| 122 |
+
# 'template' and 'manifest' are, respectively, the names of
|
| 123 |
+
# the manifest template and manifest file.
|
| 124 |
+
self.template = None
|
| 125 |
+
self.manifest = None
|
| 126 |
+
|
| 127 |
+
# 'use_defaults': if true, we will include the default file set
|
| 128 |
+
# in the manifest
|
| 129 |
+
self.use_defaults = 1
|
| 130 |
+
self.prune = 1
|
| 131 |
+
|
| 132 |
+
self.manifest_only = 0
|
| 133 |
+
self.force_manifest = 0
|
| 134 |
+
|
| 135 |
+
self.formats = ['gztar']
|
| 136 |
+
self.keep_temp = 0
|
| 137 |
+
self.dist_dir = None
|
| 138 |
+
|
| 139 |
+
self.archive_files = None
|
| 140 |
+
self.metadata_check = 1
|
| 141 |
+
self.owner = None
|
| 142 |
+
self.group = None
|
| 143 |
+
|
| 144 |
+
def finalize_options(self):
|
| 145 |
+
if self.manifest is None:
|
| 146 |
+
self.manifest = "MANIFEST"
|
| 147 |
+
if self.template is None:
|
| 148 |
+
self.template = "MANIFEST.in"
|
| 149 |
+
|
| 150 |
+
self.ensure_string_list('formats')
|
| 151 |
+
|
| 152 |
+
bad_format = archive_util.check_archive_formats(self.formats)
|
| 153 |
+
if bad_format:
|
| 154 |
+
raise DistutilsOptionError("unknown archive format '%s'" % bad_format)
|
| 155 |
+
|
| 156 |
+
if self.dist_dir is None:
|
| 157 |
+
self.dist_dir = "dist"
|
| 158 |
+
|
| 159 |
+
def run(self):
|
| 160 |
+
# 'filelist' contains the list of files that will make up the
|
| 161 |
+
# manifest
|
| 162 |
+
self.filelist = FileList()
|
| 163 |
+
|
| 164 |
+
# Run sub commands
|
| 165 |
+
for cmd_name in self.get_sub_commands():
|
| 166 |
+
self.run_command(cmd_name)
|
| 167 |
+
|
| 168 |
+
# Do whatever it takes to get the list of files to process
|
| 169 |
+
# (process the manifest template, read an existing manifest,
|
| 170 |
+
# whatever). File list is accumulated in 'self.filelist'.
|
| 171 |
+
self.get_file_list()
|
| 172 |
+
|
| 173 |
+
# If user just wanted us to regenerate the manifest, stop now.
|
| 174 |
+
if self.manifest_only:
|
| 175 |
+
return
|
| 176 |
+
|
| 177 |
+
# Otherwise, go ahead and create the source distribution tarball,
|
| 178 |
+
# or zipfile, or whatever.
|
| 179 |
+
self.make_distribution()
|
| 180 |
+
|
| 181 |
+
def check_metadata(self):
|
| 182 |
+
"""Deprecated API."""
|
| 183 |
+
warn(
|
| 184 |
+
"distutils.command.sdist.check_metadata is deprecated, \
|
| 185 |
+
use the check command instead",
|
| 186 |
+
PendingDeprecationWarning,
|
| 187 |
+
)
|
| 188 |
+
check = self.distribution.get_command_obj('check')
|
| 189 |
+
check.ensure_finalized()
|
| 190 |
+
check.run()
|
| 191 |
+
|
| 192 |
+
def get_file_list(self):
|
| 193 |
+
"""Figure out the list of files to include in the source
|
| 194 |
+
distribution, and put it in 'self.filelist'. This might involve
|
| 195 |
+
reading the manifest template (and writing the manifest), or just
|
| 196 |
+
reading the manifest, or just using the default file set -- it all
|
| 197 |
+
depends on the user's options.
|
| 198 |
+
"""
|
| 199 |
+
# new behavior when using a template:
|
| 200 |
+
# the file list is recalculated every time because
|
| 201 |
+
# even if MANIFEST.in or setup.py are not changed
|
| 202 |
+
# the user might have added some files in the tree that
|
| 203 |
+
# need to be included.
|
| 204 |
+
#
|
| 205 |
+
# This makes --force the default and only behavior with templates.
|
| 206 |
+
template_exists = os.path.isfile(self.template)
|
| 207 |
+
if not template_exists and self._manifest_is_not_generated():
|
| 208 |
+
self.read_manifest()
|
| 209 |
+
self.filelist.sort()
|
| 210 |
+
self.filelist.remove_duplicates()
|
| 211 |
+
return
|
| 212 |
+
|
| 213 |
+
if not template_exists:
|
| 214 |
+
self.warn(
|
| 215 |
+
("manifest template '%s' does not exist " + "(using default file list)")
|
| 216 |
+
% self.template
|
| 217 |
+
)
|
| 218 |
+
self.filelist.findall()
|
| 219 |
+
|
| 220 |
+
if self.use_defaults:
|
| 221 |
+
self.add_defaults()
|
| 222 |
+
|
| 223 |
+
if template_exists:
|
| 224 |
+
self.read_template()
|
| 225 |
+
|
| 226 |
+
if self.prune:
|
| 227 |
+
self.prune_file_list()
|
| 228 |
+
|
| 229 |
+
self.filelist.sort()
|
| 230 |
+
self.filelist.remove_duplicates()
|
| 231 |
+
self.write_manifest()
|
| 232 |
+
|
| 233 |
+
def add_defaults(self):
|
| 234 |
+
"""Add all the default files to self.filelist:
|
| 235 |
+
- README or README.txt
|
| 236 |
+
- setup.py
|
| 237 |
+
- tests/test*.py and test/test*.py
|
| 238 |
+
- all pure Python modules mentioned in setup script
|
| 239 |
+
- all files pointed by package_data (build_py)
|
| 240 |
+
- all files defined in data_files.
|
| 241 |
+
- all files defined as scripts.
|
| 242 |
+
- all C sources listed as part of extensions or C libraries
|
| 243 |
+
in the setup script (doesn't catch C headers!)
|
| 244 |
+
Warns if (README or README.txt) or setup.py are missing; everything
|
| 245 |
+
else is optional.
|
| 246 |
+
"""
|
| 247 |
+
self._add_defaults_standards()
|
| 248 |
+
self._add_defaults_optional()
|
| 249 |
+
self._add_defaults_python()
|
| 250 |
+
self._add_defaults_data_files()
|
| 251 |
+
self._add_defaults_ext()
|
| 252 |
+
self._add_defaults_c_libs()
|
| 253 |
+
self._add_defaults_scripts()
|
| 254 |
+
|
| 255 |
+
@staticmethod
|
| 256 |
+
def _cs_path_exists(fspath):
|
| 257 |
+
"""
|
| 258 |
+
Case-sensitive path existence check
|
| 259 |
+
|
| 260 |
+
>>> sdist._cs_path_exists(__file__)
|
| 261 |
+
True
|
| 262 |
+
>>> sdist._cs_path_exists(__file__.upper())
|
| 263 |
+
False
|
| 264 |
+
"""
|
| 265 |
+
if not os.path.exists(fspath):
|
| 266 |
+
return False
|
| 267 |
+
# make absolute so we always have a directory
|
| 268 |
+
abspath = os.path.abspath(fspath)
|
| 269 |
+
directory, filename = os.path.split(abspath)
|
| 270 |
+
return filename in os.listdir(directory)
|
| 271 |
+
|
| 272 |
+
def _add_defaults_standards(self):
|
| 273 |
+
standards = [self.READMES, self.distribution.script_name]
|
| 274 |
+
for fn in standards:
|
| 275 |
+
if isinstance(fn, tuple):
|
| 276 |
+
alts = fn
|
| 277 |
+
got_it = False
|
| 278 |
+
for fn in alts:
|
| 279 |
+
if self._cs_path_exists(fn):
|
| 280 |
+
got_it = True
|
| 281 |
+
self.filelist.append(fn)
|
| 282 |
+
break
|
| 283 |
+
|
| 284 |
+
if not got_it:
|
| 285 |
+
self.warn(
|
| 286 |
+
"standard file not found: should have one of " + ', '.join(alts)
|
| 287 |
+
)
|
| 288 |
+
else:
|
| 289 |
+
if self._cs_path_exists(fn):
|
| 290 |
+
self.filelist.append(fn)
|
| 291 |
+
else:
|
| 292 |
+
self.warn("standard file '%s' not found" % fn)
|
| 293 |
+
|
| 294 |
+
def _add_defaults_optional(self):
|
| 295 |
+
optional = ['tests/test*.py', 'test/test*.py', 'setup.cfg']
|
| 296 |
+
for pattern in optional:
|
| 297 |
+
files = filter(os.path.isfile, glob(pattern))
|
| 298 |
+
self.filelist.extend(files)
|
| 299 |
+
|
| 300 |
+
def _add_defaults_python(self):
|
| 301 |
+
# build_py is used to get:
|
| 302 |
+
# - python modules
|
| 303 |
+
# - files defined in package_data
|
| 304 |
+
build_py = self.get_finalized_command('build_py')
|
| 305 |
+
|
| 306 |
+
# getting python files
|
| 307 |
+
if self.distribution.has_pure_modules():
|
| 308 |
+
self.filelist.extend(build_py.get_source_files())
|
| 309 |
+
|
| 310 |
+
# getting package_data files
|
| 311 |
+
# (computed in build_py.data_files by build_py.finalize_options)
|
| 312 |
+
for pkg, src_dir, build_dir, filenames in build_py.data_files:
|
| 313 |
+
for filename in filenames:
|
| 314 |
+
self.filelist.append(os.path.join(src_dir, filename))
|
| 315 |
+
|
| 316 |
+
def _add_defaults_data_files(self):
|
| 317 |
+
# getting distribution.data_files
|
| 318 |
+
if self.distribution.has_data_files():
|
| 319 |
+
for item in self.distribution.data_files:
|
| 320 |
+
if isinstance(item, str):
|
| 321 |
+
# plain file
|
| 322 |
+
item = convert_path(item)
|
| 323 |
+
if os.path.isfile(item):
|
| 324 |
+
self.filelist.append(item)
|
| 325 |
+
else:
|
| 326 |
+
# a (dirname, filenames) tuple
|
| 327 |
+
dirname, filenames = item
|
| 328 |
+
for f in filenames:
|
| 329 |
+
f = convert_path(f)
|
| 330 |
+
if os.path.isfile(f):
|
| 331 |
+
self.filelist.append(f)
|
| 332 |
+
|
| 333 |
+
def _add_defaults_ext(self):
|
| 334 |
+
if self.distribution.has_ext_modules():
|
| 335 |
+
build_ext = self.get_finalized_command('build_ext')
|
| 336 |
+
self.filelist.extend(build_ext.get_source_files())
|
| 337 |
+
|
| 338 |
+
def _add_defaults_c_libs(self):
|
| 339 |
+
if self.distribution.has_c_libraries():
|
| 340 |
+
build_clib = self.get_finalized_command('build_clib')
|
| 341 |
+
self.filelist.extend(build_clib.get_source_files())
|
| 342 |
+
|
| 343 |
+
def _add_defaults_scripts(self):
|
| 344 |
+
if self.distribution.has_scripts():
|
| 345 |
+
build_scripts = self.get_finalized_command('build_scripts')
|
| 346 |
+
self.filelist.extend(build_scripts.get_source_files())
|
| 347 |
+
|
| 348 |
+
def read_template(self):
|
| 349 |
+
"""Read and parse manifest template file named by self.template.
|
| 350 |
+
|
| 351 |
+
(usually "MANIFEST.in") The parsing and processing is done by
|
| 352 |
+
'self.filelist', which updates itself accordingly.
|
| 353 |
+
"""
|
| 354 |
+
log.info("reading manifest template '%s'", self.template)
|
| 355 |
+
template = TextFile(
|
| 356 |
+
self.template,
|
| 357 |
+
strip_comments=1,
|
| 358 |
+
skip_blanks=1,
|
| 359 |
+
join_lines=1,
|
| 360 |
+
lstrip_ws=1,
|
| 361 |
+
rstrip_ws=1,
|
| 362 |
+
collapse_join=1,
|
| 363 |
+
)
|
| 364 |
+
|
| 365 |
+
try:
|
| 366 |
+
while True:
|
| 367 |
+
line = template.readline()
|
| 368 |
+
if line is None: # end of file
|
| 369 |
+
break
|
| 370 |
+
|
| 371 |
+
try:
|
| 372 |
+
self.filelist.process_template_line(line)
|
| 373 |
+
# the call above can raise a DistutilsTemplateError for
|
| 374 |
+
# malformed lines, or a ValueError from the lower-level
|
| 375 |
+
# convert_path function
|
| 376 |
+
except (DistutilsTemplateError, ValueError) as msg:
|
| 377 |
+
self.warn(
|
| 378 |
+
"%s, line %d: %s"
|
| 379 |
+
% (template.filename, template.current_line, msg)
|
| 380 |
+
)
|
| 381 |
+
finally:
|
| 382 |
+
template.close()
|
| 383 |
+
|
| 384 |
+
def prune_file_list(self):
|
| 385 |
+
"""Prune off branches that might slip into the file list as created
|
| 386 |
+
by 'read_template()', but really don't belong there:
|
| 387 |
+
* the build tree (typically "build")
|
| 388 |
+
* the release tree itself (only an issue if we ran "sdist"
|
| 389 |
+
previously with --keep-temp, or it aborted)
|
| 390 |
+
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
|
| 391 |
+
"""
|
| 392 |
+
build = self.get_finalized_command('build')
|
| 393 |
+
base_dir = self.distribution.get_fullname()
|
| 394 |
+
|
| 395 |
+
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
| 396 |
+
self.filelist.exclude_pattern(None, prefix=base_dir)
|
| 397 |
+
|
| 398 |
+
if sys.platform == 'win32':
|
| 399 |
+
seps = r'/|\\'
|
| 400 |
+
else:
|
| 401 |
+
seps = '/'
|
| 402 |
+
|
| 403 |
+
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs']
|
| 404 |
+
vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
|
| 405 |
+
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
|
| 406 |
+
|
| 407 |
+
def write_manifest(self):
|
| 408 |
+
"""Write the file list in 'self.filelist' (presumably as filled in
|
| 409 |
+
by 'add_defaults()' and 'read_template()') to the manifest file
|
| 410 |
+
named by 'self.manifest'.
|
| 411 |
+
"""
|
| 412 |
+
if self._manifest_is_not_generated():
|
| 413 |
+
log.info(
|
| 414 |
+
"not writing to manually maintained "
|
| 415 |
+
"manifest file '%s'" % self.manifest
|
| 416 |
+
)
|
| 417 |
+
return
|
| 418 |
+
|
| 419 |
+
content = self.filelist.files[:]
|
| 420 |
+
content.insert(0, '# file GENERATED by distutils, do NOT edit')
|
| 421 |
+
self.execute(
|
| 422 |
+
file_util.write_file,
|
| 423 |
+
(self.manifest, content),
|
| 424 |
+
"writing manifest file '%s'" % self.manifest,
|
| 425 |
+
)
|
| 426 |
+
|
| 427 |
+
def _manifest_is_not_generated(self):
|
| 428 |
+
# check for special comment used in 3.1.3 and higher
|
| 429 |
+
if not os.path.isfile(self.manifest):
|
| 430 |
+
return False
|
| 431 |
+
|
| 432 |
+
fp = open(self.manifest)
|
| 433 |
+
try:
|
| 434 |
+
first_line = fp.readline()
|
| 435 |
+
finally:
|
| 436 |
+
fp.close()
|
| 437 |
+
return first_line != '# file GENERATED by distutils, do NOT edit\n'
|
| 438 |
+
|
| 439 |
+
def read_manifest(self):
|
| 440 |
+
"""Read the manifest file (named by 'self.manifest') and use it to
|
| 441 |
+
fill in 'self.filelist', the list of files to include in the source
|
| 442 |
+
distribution.
|
| 443 |
+
"""
|
| 444 |
+
log.info("reading manifest file '%s'", self.manifest)
|
| 445 |
+
with open(self.manifest) as manifest:
|
| 446 |
+
for line in manifest:
|
| 447 |
+
# ignore comments and blank lines
|
| 448 |
+
line = line.strip()
|
| 449 |
+
if line.startswith('#') or not line:
|
| 450 |
+
continue
|
| 451 |
+
self.filelist.append(line)
|
| 452 |
+
|
| 453 |
+
def make_release_tree(self, base_dir, files):
|
| 454 |
+
"""Create the directory tree that will become the source
|
| 455 |
+
distribution archive. All directories implied by the filenames in
|
| 456 |
+
'files' are created under 'base_dir', and then we hard link or copy
|
| 457 |
+
(if hard linking is unavailable) those files into place.
|
| 458 |
+
Essentially, this duplicates the developer's source tree, but in a
|
| 459 |
+
directory named after the distribution, containing only the files
|
| 460 |
+
to be distributed.
|
| 461 |
+
"""
|
| 462 |
+
# Create all the directories under 'base_dir' necessary to
|
| 463 |
+
# put 'files' there; the 'mkpath()' is just so we don't die
|
| 464 |
+
# if the manifest happens to be empty.
|
| 465 |
+
self.mkpath(base_dir)
|
| 466 |
+
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
|
| 467 |
+
|
| 468 |
+
# And walk over the list of files, either making a hard link (if
|
| 469 |
+
# os.link exists) to each one that doesn't already exist in its
|
| 470 |
+
# corresponding location under 'base_dir', or copying each file
|
| 471 |
+
# that's out-of-date in 'base_dir'. (Usually, all files will be
|
| 472 |
+
# out-of-date, because by default we blow away 'base_dir' when
|
| 473 |
+
# we're done making the distribution archives.)
|
| 474 |
+
|
| 475 |
+
if hasattr(os, 'link'): # can make hard links on this system
|
| 476 |
+
link = 'hard'
|
| 477 |
+
msg = "making hard links in %s..." % base_dir
|
| 478 |
+
else: # nope, have to copy
|
| 479 |
+
link = None
|
| 480 |
+
msg = "copying files to %s..." % base_dir
|
| 481 |
+
|
| 482 |
+
if not files:
|
| 483 |
+
log.warning("no files to distribute -- empty manifest?")
|
| 484 |
+
else:
|
| 485 |
+
log.info(msg)
|
| 486 |
+
for file in files:
|
| 487 |
+
if not os.path.isfile(file):
|
| 488 |
+
log.warning("'%s' not a regular file -- skipping", file)
|
| 489 |
+
else:
|
| 490 |
+
dest = os.path.join(base_dir, file)
|
| 491 |
+
self.copy_file(file, dest, link=link)
|
| 492 |
+
|
| 493 |
+
self.distribution.metadata.write_pkg_info(base_dir)
|
| 494 |
+
|
| 495 |
+
def make_distribution(self):
|
| 496 |
+
"""Create the source distribution(s). First, we create the release
|
| 497 |
+
tree with 'make_release_tree()'; then, we create all required
|
| 498 |
+
archive files (according to 'self.formats') from the release tree.
|
| 499 |
+
Finally, we clean up by blowing away the release tree (unless
|
| 500 |
+
'self.keep_temp' is true). The list of archive files created is
|
| 501 |
+
stored so it can be retrieved later by 'get_archive_files()'.
|
| 502 |
+
"""
|
| 503 |
+
# Don't warn about missing meta-data here -- should be (and is!)
|
| 504 |
+
# done elsewhere.
|
| 505 |
+
base_dir = self.distribution.get_fullname()
|
| 506 |
+
base_name = os.path.join(self.dist_dir, base_dir)
|
| 507 |
+
|
| 508 |
+
self.make_release_tree(base_dir, self.filelist.files)
|
| 509 |
+
archive_files = [] # remember names of files we create
|
| 510 |
+
# tar archive must be created last to avoid overwrite and remove
|
| 511 |
+
if 'tar' in self.formats:
|
| 512 |
+
self.formats.append(self.formats.pop(self.formats.index('tar')))
|
| 513 |
+
|
| 514 |
+
for fmt in self.formats:
|
| 515 |
+
file = self.make_archive(
|
| 516 |
+
base_name, fmt, base_dir=base_dir, owner=self.owner, group=self.group
|
| 517 |
+
)
|
| 518 |
+
archive_files.append(file)
|
| 519 |
+
self.distribution.dist_files.append(('sdist', '', file))
|
| 520 |
+
|
| 521 |
+
self.archive_files = archive_files
|
| 522 |
+
|
| 523 |
+
if not self.keep_temp:
|
| 524 |
+
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
|
| 525 |
+
|
| 526 |
+
def get_archive_files(self):
|
| 527 |
+
"""Return the list of archive files created when the command
|
| 528 |
+
was run, or None if the command hasn't run yet.
|
| 529 |
+
"""
|
| 530 |
+
return self.archive_files
|
.venv/Lib/site-packages/setuptools/_distutils/command/upload.py
ADDED
|
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
distutils.command.upload
|
| 3 |
+
|
| 4 |
+
Implements the Distutils 'upload' subcommand (upload package to a package
|
| 5 |
+
index).
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import io
|
| 10 |
+
import hashlib
|
| 11 |
+
import logging
|
| 12 |
+
from base64 import standard_b64encode
|
| 13 |
+
from urllib.request import urlopen, Request, HTTPError
|
| 14 |
+
from urllib.parse import urlparse
|
| 15 |
+
from ..errors import DistutilsError, DistutilsOptionError
|
| 16 |
+
from ..core import PyPIRCCommand
|
| 17 |
+
from ..spawn import spawn
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
|
| 21 |
+
# https://bugs.python.org/issue40698
|
| 22 |
+
_FILE_CONTENT_DIGESTS = {
|
| 23 |
+
"md5_digest": getattr(hashlib, "md5", None),
|
| 24 |
+
"sha256_digest": getattr(hashlib, "sha256", None),
|
| 25 |
+
"blake2_256_digest": getattr(hashlib, "blake2b", None),
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class upload(PyPIRCCommand):
|
| 30 |
+
description = "upload binary package to PyPI"
|
| 31 |
+
|
| 32 |
+
user_options = PyPIRCCommand.user_options + [
|
| 33 |
+
('sign', 's', 'sign files to upload using gpg'),
|
| 34 |
+
('identity=', 'i', 'GPG identity used to sign files'),
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
boolean_options = PyPIRCCommand.boolean_options + ['sign']
|
| 38 |
+
|
| 39 |
+
def initialize_options(self):
|
| 40 |
+
PyPIRCCommand.initialize_options(self)
|
| 41 |
+
self.username = ''
|
| 42 |
+
self.password = ''
|
| 43 |
+
self.show_response = 0
|
| 44 |
+
self.sign = False
|
| 45 |
+
self.identity = None
|
| 46 |
+
|
| 47 |
+
def finalize_options(self):
|
| 48 |
+
PyPIRCCommand.finalize_options(self)
|
| 49 |
+
if self.identity and not self.sign:
|
| 50 |
+
raise DistutilsOptionError("Must use --sign for --identity to have meaning")
|
| 51 |
+
config = self._read_pypirc()
|
| 52 |
+
if config != {}:
|
| 53 |
+
self.username = config['username']
|
| 54 |
+
self.password = config['password']
|
| 55 |
+
self.repository = config['repository']
|
| 56 |
+
self.realm = config['realm']
|
| 57 |
+
|
| 58 |
+
# getting the password from the distribution
|
| 59 |
+
# if previously set by the register command
|
| 60 |
+
if not self.password and self.distribution.password:
|
| 61 |
+
self.password = self.distribution.password
|
| 62 |
+
|
| 63 |
+
def run(self):
|
| 64 |
+
if not self.distribution.dist_files:
|
| 65 |
+
msg = (
|
| 66 |
+
"Must create and upload files in one command "
|
| 67 |
+
"(e.g. setup.py sdist upload)"
|
| 68 |
+
)
|
| 69 |
+
raise DistutilsOptionError(msg)
|
| 70 |
+
for command, pyversion, filename in self.distribution.dist_files:
|
| 71 |
+
self.upload_file(command, pyversion, filename)
|
| 72 |
+
|
| 73 |
+
def upload_file(self, command, pyversion, filename): # noqa: C901
|
| 74 |
+
# Makes sure the repository URL is compliant
|
| 75 |
+
schema, netloc, url, params, query, fragments = urlparse(self.repository)
|
| 76 |
+
if params or query or fragments:
|
| 77 |
+
raise AssertionError("Incompatible url %s" % self.repository)
|
| 78 |
+
|
| 79 |
+
if schema not in ('http', 'https'):
|
| 80 |
+
raise AssertionError("unsupported schema " + schema)
|
| 81 |
+
|
| 82 |
+
# Sign if requested
|
| 83 |
+
if self.sign:
|
| 84 |
+
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
| 85 |
+
if self.identity:
|
| 86 |
+
gpg_args[2:2] = ["--local-user", self.identity]
|
| 87 |
+
spawn(gpg_args, dry_run=self.dry_run)
|
| 88 |
+
|
| 89 |
+
# Fill in the data - send all the meta-data in case we need to
|
| 90 |
+
# register a new release
|
| 91 |
+
f = open(filename, 'rb')
|
| 92 |
+
try:
|
| 93 |
+
content = f.read()
|
| 94 |
+
finally:
|
| 95 |
+
f.close()
|
| 96 |
+
|
| 97 |
+
meta = self.distribution.metadata
|
| 98 |
+
data = {
|
| 99 |
+
# action
|
| 100 |
+
':action': 'file_upload',
|
| 101 |
+
'protocol_version': '1',
|
| 102 |
+
# identify release
|
| 103 |
+
'name': meta.get_name(),
|
| 104 |
+
'version': meta.get_version(),
|
| 105 |
+
# file content
|
| 106 |
+
'content': (os.path.basename(filename), content),
|
| 107 |
+
'filetype': command,
|
| 108 |
+
'pyversion': pyversion,
|
| 109 |
+
# additional meta-data
|
| 110 |
+
'metadata_version': '1.0',
|
| 111 |
+
'summary': meta.get_description(),
|
| 112 |
+
'home_page': meta.get_url(),
|
| 113 |
+
'author': meta.get_contact(),
|
| 114 |
+
'author_email': meta.get_contact_email(),
|
| 115 |
+
'license': meta.get_licence(),
|
| 116 |
+
'description': meta.get_long_description(),
|
| 117 |
+
'keywords': meta.get_keywords(),
|
| 118 |
+
'platform': meta.get_platforms(),
|
| 119 |
+
'classifiers': meta.get_classifiers(),
|
| 120 |
+
'download_url': meta.get_download_url(),
|
| 121 |
+
# PEP 314
|
| 122 |
+
'provides': meta.get_provides(),
|
| 123 |
+
'requires': meta.get_requires(),
|
| 124 |
+
'obsoletes': meta.get_obsoletes(),
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
data['comment'] = ''
|
| 128 |
+
|
| 129 |
+
# file content digests
|
| 130 |
+
for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
|
| 131 |
+
if digest_cons is None:
|
| 132 |
+
continue
|
| 133 |
+
try:
|
| 134 |
+
data[digest_name] = digest_cons(content).hexdigest()
|
| 135 |
+
except ValueError:
|
| 136 |
+
# hash digest not available or blocked by security policy
|
| 137 |
+
pass
|
| 138 |
+
|
| 139 |
+
if self.sign:
|
| 140 |
+
with open(filename + ".asc", "rb") as f:
|
| 141 |
+
data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read())
|
| 142 |
+
|
| 143 |
+
# set up the authentication
|
| 144 |
+
user_pass = (self.username + ":" + self.password).encode('ascii')
|
| 145 |
+
# The exact encoding of the authentication string is debated.
|
| 146 |
+
# Anyway PyPI only accepts ascii for both username or password.
|
| 147 |
+
auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
|
| 148 |
+
|
| 149 |
+
# Build up the MIME payload for the POST data
|
| 150 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
| 151 |
+
sep_boundary = b'\r\n--' + boundary.encode('ascii')
|
| 152 |
+
end_boundary = sep_boundary + b'--\r\n'
|
| 153 |
+
body = io.BytesIO()
|
| 154 |
+
for key, value in data.items():
|
| 155 |
+
title = '\r\nContent-Disposition: form-data; name="%s"' % key
|
| 156 |
+
# handle multiple entries for the same name
|
| 157 |
+
if not isinstance(value, list):
|
| 158 |
+
value = [value]
|
| 159 |
+
for value in value:
|
| 160 |
+
if type(value) is tuple:
|
| 161 |
+
title += '; filename="%s"' % value[0]
|
| 162 |
+
value = value[1]
|
| 163 |
+
else:
|
| 164 |
+
value = str(value).encode('utf-8')
|
| 165 |
+
body.write(sep_boundary)
|
| 166 |
+
body.write(title.encode('utf-8'))
|
| 167 |
+
body.write(b"\r\n\r\n")
|
| 168 |
+
body.write(value)
|
| 169 |
+
body.write(end_boundary)
|
| 170 |
+
body = body.getvalue()
|
| 171 |
+
|
| 172 |
+
msg = "Submitting {} to {}".format(filename, self.repository)
|
| 173 |
+
self.announce(msg, logging.INFO)
|
| 174 |
+
|
| 175 |
+
# build the Request
|
| 176 |
+
headers = {
|
| 177 |
+
'Content-type': 'multipart/form-data; boundary=%s' % boundary,
|
| 178 |
+
'Content-length': str(len(body)),
|
| 179 |
+
'Authorization': auth,
|
| 180 |
+
}
|
| 181 |
+
|
| 182 |
+
request = Request(self.repository, data=body, headers=headers)
|
| 183 |
+
# send the data
|
| 184 |
+
try:
|
| 185 |
+
result = urlopen(request)
|
| 186 |
+
status = result.getcode()
|
| 187 |
+
reason = result.msg
|
| 188 |
+
except HTTPError as e:
|
| 189 |
+
status = e.code
|
| 190 |
+
reason = e.msg
|
| 191 |
+
except OSError as e:
|
| 192 |
+
self.announce(str(e), logging.ERROR)
|
| 193 |
+
raise
|
| 194 |
+
|
| 195 |
+
if status == 200:
|
| 196 |
+
self.announce(
|
| 197 |
+
'Server response ({}): {}'.format(status, reason), logging.INFO
|
| 198 |
+
)
|
| 199 |
+
if self.show_response:
|
| 200 |
+
text = self._read_pypi_response(result)
|
| 201 |
+
msg = '\n'.join(('-' * 75, text, '-' * 75))
|
| 202 |
+
self.announce(msg, logging.INFO)
|
| 203 |
+
else:
|
| 204 |
+
msg = 'Upload failed ({}): {}'.format(status, reason)
|
| 205 |
+
self.announce(msg, logging.ERROR)
|
| 206 |
+
raise DistutilsError(msg)
|
.venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Module for parsing and testing package version predicate strings.
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
from . import version
|
| 5 |
+
import operator
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII)
|
| 9 |
+
# (package) (rest)
|
| 10 |
+
|
| 11 |
+
re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
|
| 12 |
+
re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
|
| 13 |
+
# (comp) (version)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def splitUp(pred):
|
| 17 |
+
"""Parse a single version comparison.
|
| 18 |
+
|
| 19 |
+
Return (comparison string, StrictVersion)
|
| 20 |
+
"""
|
| 21 |
+
res = re_splitComparison.match(pred)
|
| 22 |
+
if not res:
|
| 23 |
+
raise ValueError("bad package restriction syntax: %r" % pred)
|
| 24 |
+
comp, verStr = res.groups()
|
| 25 |
+
with version.suppress_known_deprecation():
|
| 26 |
+
other = version.StrictVersion(verStr)
|
| 27 |
+
return (comp, other)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
compmap = {
|
| 31 |
+
"<": operator.lt,
|
| 32 |
+
"<=": operator.le,
|
| 33 |
+
"==": operator.eq,
|
| 34 |
+
">": operator.gt,
|
| 35 |
+
">=": operator.ge,
|
| 36 |
+
"!=": operator.ne,
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class VersionPredicate:
|
| 41 |
+
"""Parse and test package version predicates.
|
| 42 |
+
|
| 43 |
+
>>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
|
| 44 |
+
|
| 45 |
+
The `name` attribute provides the full dotted name that is given::
|
| 46 |
+
|
| 47 |
+
>>> v.name
|
| 48 |
+
'pyepat.abc'
|
| 49 |
+
|
| 50 |
+
The str() of a `VersionPredicate` provides a normalized
|
| 51 |
+
human-readable version of the expression::
|
| 52 |
+
|
| 53 |
+
>>> print(v)
|
| 54 |
+
pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
|
| 55 |
+
|
| 56 |
+
The `satisfied_by()` method can be used to determine with a given
|
| 57 |
+
version number is included in the set described by the version
|
| 58 |
+
restrictions::
|
| 59 |
+
|
| 60 |
+
>>> v.satisfied_by('1.1')
|
| 61 |
+
True
|
| 62 |
+
>>> v.satisfied_by('1.4')
|
| 63 |
+
True
|
| 64 |
+
>>> v.satisfied_by('1.0')
|
| 65 |
+
False
|
| 66 |
+
>>> v.satisfied_by('4444.4')
|
| 67 |
+
False
|
| 68 |
+
>>> v.satisfied_by('1555.1b3')
|
| 69 |
+
False
|
| 70 |
+
|
| 71 |
+
`VersionPredicate` is flexible in accepting extra whitespace::
|
| 72 |
+
|
| 73 |
+
>>> v = VersionPredicate(' pat( == 0.1 ) ')
|
| 74 |
+
>>> v.name
|
| 75 |
+
'pat'
|
| 76 |
+
>>> v.satisfied_by('0.1')
|
| 77 |
+
True
|
| 78 |
+
>>> v.satisfied_by('0.2')
|
| 79 |
+
False
|
| 80 |
+
|
| 81 |
+
If any version numbers passed in do not conform to the
|
| 82 |
+
restrictions of `StrictVersion`, a `ValueError` is raised::
|
| 83 |
+
|
| 84 |
+
>>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
|
| 85 |
+
Traceback (most recent call last):
|
| 86 |
+
...
|
| 87 |
+
ValueError: invalid version number '1.2zb3'
|
| 88 |
+
|
| 89 |
+
It the module or package name given does not conform to what's
|
| 90 |
+
allowed as a legal module or package name, `ValueError` is
|
| 91 |
+
raised::
|
| 92 |
+
|
| 93 |
+
>>> v = VersionPredicate('foo-bar')
|
| 94 |
+
Traceback (most recent call last):
|
| 95 |
+
...
|
| 96 |
+
ValueError: expected parenthesized list: '-bar'
|
| 97 |
+
|
| 98 |
+
>>> v = VersionPredicate('foo bar (12.21)')
|
| 99 |
+
Traceback (most recent call last):
|
| 100 |
+
...
|
| 101 |
+
ValueError: expected parenthesized list: 'bar (12.21)'
|
| 102 |
+
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
def __init__(self, versionPredicateStr):
|
| 106 |
+
"""Parse a version predicate string."""
|
| 107 |
+
# Fields:
|
| 108 |
+
# name: package name
|
| 109 |
+
# pred: list of (comparison string, StrictVersion)
|
| 110 |
+
|
| 111 |
+
versionPredicateStr = versionPredicateStr.strip()
|
| 112 |
+
if not versionPredicateStr:
|
| 113 |
+
raise ValueError("empty package restriction")
|
| 114 |
+
match = re_validPackage.match(versionPredicateStr)
|
| 115 |
+
if not match:
|
| 116 |
+
raise ValueError("bad package name in %r" % versionPredicateStr)
|
| 117 |
+
self.name, paren = match.groups()
|
| 118 |
+
paren = paren.strip()
|
| 119 |
+
if paren:
|
| 120 |
+
match = re_paren.match(paren)
|
| 121 |
+
if not match:
|
| 122 |
+
raise ValueError("expected parenthesized list: %r" % paren)
|
| 123 |
+
str = match.groups()[0]
|
| 124 |
+
self.pred = [splitUp(aPred) for aPred in str.split(",")]
|
| 125 |
+
if not self.pred:
|
| 126 |
+
raise ValueError("empty parenthesized list in %r" % versionPredicateStr)
|
| 127 |
+
else:
|
| 128 |
+
self.pred = []
|
| 129 |
+
|
| 130 |
+
def __str__(self):
|
| 131 |
+
if self.pred:
|
| 132 |
+
seq = [cond + " " + str(ver) for cond, ver in self.pred]
|
| 133 |
+
return self.name + " (" + ", ".join(seq) + ")"
|
| 134 |
+
else:
|
| 135 |
+
return self.name
|
| 136 |
+
|
| 137 |
+
def satisfied_by(self, version):
|
| 138 |
+
"""True if version is compatible with all the predicates in self.
|
| 139 |
+
The parameter version must be acceptable to the StrictVersion
|
| 140 |
+
constructor. It may be either a string or StrictVersion.
|
| 141 |
+
"""
|
| 142 |
+
for cond, ver in self.pred:
|
| 143 |
+
if not compmap[cond](version, ver):
|
| 144 |
+
return False
|
| 145 |
+
return True
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
_provision_rx = None
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def split_provision(value):
|
| 152 |
+
"""Return the name and optional version number of a provision.
|
| 153 |
+
|
| 154 |
+
The version number, if given, will be returned as a `StrictVersion`
|
| 155 |
+
instance, otherwise it will be `None`.
|
| 156 |
+
|
| 157 |
+
>>> split_provision('mypkg')
|
| 158 |
+
('mypkg', None)
|
| 159 |
+
>>> split_provision(' mypkg( 1.2 ) ')
|
| 160 |
+
('mypkg', StrictVersion ('1.2'))
|
| 161 |
+
"""
|
| 162 |
+
global _provision_rx
|
| 163 |
+
if _provision_rx is None:
|
| 164 |
+
_provision_rx = re.compile(
|
| 165 |
+
r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", re.ASCII
|
| 166 |
+
)
|
| 167 |
+
value = value.strip()
|
| 168 |
+
m = _provision_rx.match(value)
|
| 169 |
+
if not m:
|
| 170 |
+
raise ValueError("illegal provides specification: %r" % value)
|
| 171 |
+
ver = m.group(2) or None
|
| 172 |
+
if ver:
|
| 173 |
+
with version.suppress_known_deprecation():
|
| 174 |
+
ver = version.StrictVersion(ver)
|
| 175 |
+
return m.group(1), ver
|
.venv/Lib/site-packages/setuptools/_vendor/__init__.py
ADDED
|
File without changes
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__init__.py
ADDED
|
@@ -0,0 +1,904 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import re
|
| 3 |
+
import abc
|
| 4 |
+
import csv
|
| 5 |
+
import sys
|
| 6 |
+
from .. import zipp
|
| 7 |
+
import email
|
| 8 |
+
import pathlib
|
| 9 |
+
import operator
|
| 10 |
+
import textwrap
|
| 11 |
+
import warnings
|
| 12 |
+
import functools
|
| 13 |
+
import itertools
|
| 14 |
+
import posixpath
|
| 15 |
+
import collections
|
| 16 |
+
|
| 17 |
+
from . import _adapters, _meta, _py39compat
|
| 18 |
+
from ._collections import FreezableDefaultDict, Pair
|
| 19 |
+
from ._compat import (
|
| 20 |
+
NullFinder,
|
| 21 |
+
install,
|
| 22 |
+
pypy_partial,
|
| 23 |
+
)
|
| 24 |
+
from ._functools import method_cache, pass_none
|
| 25 |
+
from ._itertools import always_iterable, unique_everseen
|
| 26 |
+
from ._meta import PackageMetadata, SimplePath
|
| 27 |
+
|
| 28 |
+
from contextlib import suppress
|
| 29 |
+
from importlib import import_module
|
| 30 |
+
from importlib.abc import MetaPathFinder
|
| 31 |
+
from itertools import starmap
|
| 32 |
+
from typing import List, Mapping, Optional
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
__all__ = [
|
| 36 |
+
'Distribution',
|
| 37 |
+
'DistributionFinder',
|
| 38 |
+
'PackageMetadata',
|
| 39 |
+
'PackageNotFoundError',
|
| 40 |
+
'distribution',
|
| 41 |
+
'distributions',
|
| 42 |
+
'entry_points',
|
| 43 |
+
'files',
|
| 44 |
+
'metadata',
|
| 45 |
+
'packages_distributions',
|
| 46 |
+
'requires',
|
| 47 |
+
'version',
|
| 48 |
+
]
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class PackageNotFoundError(ModuleNotFoundError):
|
| 52 |
+
"""The package was not found."""
|
| 53 |
+
|
| 54 |
+
def __str__(self):
|
| 55 |
+
return f"No package metadata was found for {self.name}"
|
| 56 |
+
|
| 57 |
+
@property
|
| 58 |
+
def name(self):
|
| 59 |
+
(name,) = self.args
|
| 60 |
+
return name
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Sectioned:
|
| 64 |
+
"""
|
| 65 |
+
A simple entry point config parser for performance
|
| 66 |
+
|
| 67 |
+
>>> for item in Sectioned.read(Sectioned._sample):
|
| 68 |
+
... print(item)
|
| 69 |
+
Pair(name='sec1', value='# comments ignored')
|
| 70 |
+
Pair(name='sec1', value='a = 1')
|
| 71 |
+
Pair(name='sec1', value='b = 2')
|
| 72 |
+
Pair(name='sec2', value='a = 2')
|
| 73 |
+
|
| 74 |
+
>>> res = Sectioned.section_pairs(Sectioned._sample)
|
| 75 |
+
>>> item = next(res)
|
| 76 |
+
>>> item.name
|
| 77 |
+
'sec1'
|
| 78 |
+
>>> item.value
|
| 79 |
+
Pair(name='a', value='1')
|
| 80 |
+
>>> item = next(res)
|
| 81 |
+
>>> item.value
|
| 82 |
+
Pair(name='b', value='2')
|
| 83 |
+
>>> item = next(res)
|
| 84 |
+
>>> item.name
|
| 85 |
+
'sec2'
|
| 86 |
+
>>> item.value
|
| 87 |
+
Pair(name='a', value='2')
|
| 88 |
+
>>> list(res)
|
| 89 |
+
[]
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
_sample = textwrap.dedent(
|
| 93 |
+
"""
|
| 94 |
+
[sec1]
|
| 95 |
+
# comments ignored
|
| 96 |
+
a = 1
|
| 97 |
+
b = 2
|
| 98 |
+
|
| 99 |
+
[sec2]
|
| 100 |
+
a = 2
|
| 101 |
+
"""
|
| 102 |
+
).lstrip()
|
| 103 |
+
|
| 104 |
+
@classmethod
|
| 105 |
+
def section_pairs(cls, text):
|
| 106 |
+
return (
|
| 107 |
+
section._replace(value=Pair.parse(section.value))
|
| 108 |
+
for section in cls.read(text, filter_=cls.valid)
|
| 109 |
+
if section.name is not None
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
@staticmethod
|
| 113 |
+
def read(text, filter_=None):
|
| 114 |
+
lines = filter(filter_, map(str.strip, text.splitlines()))
|
| 115 |
+
name = None
|
| 116 |
+
for value in lines:
|
| 117 |
+
section_match = value.startswith('[') and value.endswith(']')
|
| 118 |
+
if section_match:
|
| 119 |
+
name = value.strip('[]')
|
| 120 |
+
continue
|
| 121 |
+
yield Pair(name, value)
|
| 122 |
+
|
| 123 |
+
@staticmethod
|
| 124 |
+
def valid(line):
|
| 125 |
+
return line and not line.startswith('#')
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class DeprecatedTuple:
|
| 129 |
+
"""
|
| 130 |
+
Provide subscript item access for backward compatibility.
|
| 131 |
+
|
| 132 |
+
>>> recwarn = getfixture('recwarn')
|
| 133 |
+
>>> ep = EntryPoint(name='name', value='value', group='group')
|
| 134 |
+
>>> ep[:]
|
| 135 |
+
('name', 'value', 'group')
|
| 136 |
+
>>> ep[0]
|
| 137 |
+
'name'
|
| 138 |
+
>>> len(recwarn)
|
| 139 |
+
1
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
# Do not remove prior to 2023-05-01 or Python 3.13
|
| 143 |
+
_warn = functools.partial(
|
| 144 |
+
warnings.warn,
|
| 145 |
+
"EntryPoint tuple interface is deprecated. Access members by name.",
|
| 146 |
+
DeprecationWarning,
|
| 147 |
+
stacklevel=pypy_partial(2),
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
def __getitem__(self, item):
|
| 151 |
+
self._warn()
|
| 152 |
+
return self._key()[item]
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class EntryPoint(DeprecatedTuple):
|
| 156 |
+
"""An entry point as defined by Python packaging conventions.
|
| 157 |
+
|
| 158 |
+
See `the packaging docs on entry points
|
| 159 |
+
<https://packaging.python.org/specifications/entry-points/>`_
|
| 160 |
+
for more information.
|
| 161 |
+
|
| 162 |
+
>>> ep = EntryPoint(
|
| 163 |
+
... name=None, group=None, value='package.module:attr [extra1, extra2]')
|
| 164 |
+
>>> ep.module
|
| 165 |
+
'package.module'
|
| 166 |
+
>>> ep.attr
|
| 167 |
+
'attr'
|
| 168 |
+
>>> ep.extras
|
| 169 |
+
['extra1', 'extra2']
|
| 170 |
+
"""
|
| 171 |
+
|
| 172 |
+
pattern = re.compile(
|
| 173 |
+
r'(?P<module>[\w.]+)\s*'
|
| 174 |
+
r'(:\s*(?P<attr>[\w.]+)\s*)?'
|
| 175 |
+
r'((?P<extras>\[.*\])\s*)?$'
|
| 176 |
+
)
|
| 177 |
+
"""
|
| 178 |
+
A regular expression describing the syntax for an entry point,
|
| 179 |
+
which might look like:
|
| 180 |
+
|
| 181 |
+
- module
|
| 182 |
+
- package.module
|
| 183 |
+
- package.module:attribute
|
| 184 |
+
- package.module:object.attribute
|
| 185 |
+
- package.module:attr [extra1, extra2]
|
| 186 |
+
|
| 187 |
+
Other combinations are possible as well.
|
| 188 |
+
|
| 189 |
+
The expression is lenient about whitespace around the ':',
|
| 190 |
+
following the attr, and following any extras.
|
| 191 |
+
"""
|
| 192 |
+
|
| 193 |
+
name: str
|
| 194 |
+
value: str
|
| 195 |
+
group: str
|
| 196 |
+
|
| 197 |
+
dist: Optional['Distribution'] = None
|
| 198 |
+
|
| 199 |
+
def __init__(self, name, value, group):
|
| 200 |
+
vars(self).update(name=name, value=value, group=group)
|
| 201 |
+
|
| 202 |
+
def load(self):
|
| 203 |
+
"""Load the entry point from its definition. If only a module
|
| 204 |
+
is indicated by the value, return that module. Otherwise,
|
| 205 |
+
return the named object.
|
| 206 |
+
"""
|
| 207 |
+
match = self.pattern.match(self.value)
|
| 208 |
+
module = import_module(match.group('module'))
|
| 209 |
+
attrs = filter(None, (match.group('attr') or '').split('.'))
|
| 210 |
+
return functools.reduce(getattr, attrs, module)
|
| 211 |
+
|
| 212 |
+
@property
|
| 213 |
+
def module(self):
|
| 214 |
+
match = self.pattern.match(self.value)
|
| 215 |
+
return match.group('module')
|
| 216 |
+
|
| 217 |
+
@property
|
| 218 |
+
def attr(self):
|
| 219 |
+
match = self.pattern.match(self.value)
|
| 220 |
+
return match.group('attr')
|
| 221 |
+
|
| 222 |
+
@property
|
| 223 |
+
def extras(self):
|
| 224 |
+
match = self.pattern.match(self.value)
|
| 225 |
+
return re.findall(r'\w+', match.group('extras') or '')
|
| 226 |
+
|
| 227 |
+
def _for(self, dist):
|
| 228 |
+
vars(self).update(dist=dist)
|
| 229 |
+
return self
|
| 230 |
+
|
| 231 |
+
def matches(self, **params):
|
| 232 |
+
"""
|
| 233 |
+
EntryPoint matches the given parameters.
|
| 234 |
+
|
| 235 |
+
>>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]')
|
| 236 |
+
>>> ep.matches(group='foo')
|
| 237 |
+
True
|
| 238 |
+
>>> ep.matches(name='bar', value='bing:bong [extra1, extra2]')
|
| 239 |
+
True
|
| 240 |
+
>>> ep.matches(group='foo', name='other')
|
| 241 |
+
False
|
| 242 |
+
>>> ep.matches()
|
| 243 |
+
True
|
| 244 |
+
>>> ep.matches(extras=['extra1', 'extra2'])
|
| 245 |
+
True
|
| 246 |
+
>>> ep.matches(module='bing')
|
| 247 |
+
True
|
| 248 |
+
>>> ep.matches(attr='bong')
|
| 249 |
+
True
|
| 250 |
+
"""
|
| 251 |
+
attrs = (getattr(self, param) for param in params)
|
| 252 |
+
return all(map(operator.eq, params.values(), attrs))
|
| 253 |
+
|
| 254 |
+
def _key(self):
|
| 255 |
+
return self.name, self.value, self.group
|
| 256 |
+
|
| 257 |
+
def __lt__(self, other):
|
| 258 |
+
return self._key() < other._key()
|
| 259 |
+
|
| 260 |
+
def __eq__(self, other):
|
| 261 |
+
return self._key() == other._key()
|
| 262 |
+
|
| 263 |
+
def __setattr__(self, name, value):
|
| 264 |
+
raise AttributeError("EntryPoint objects are immutable.")
|
| 265 |
+
|
| 266 |
+
def __repr__(self):
|
| 267 |
+
return (
|
| 268 |
+
f'EntryPoint(name={self.name!r}, value={self.value!r}, '
|
| 269 |
+
f'group={self.group!r})'
|
| 270 |
+
)
|
| 271 |
+
|
| 272 |
+
def __hash__(self):
|
| 273 |
+
return hash(self._key())
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
class EntryPoints(tuple):
|
| 277 |
+
"""
|
| 278 |
+
An immutable collection of selectable EntryPoint objects.
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
__slots__ = ()
|
| 282 |
+
|
| 283 |
+
def __getitem__(self, name): # -> EntryPoint:
|
| 284 |
+
"""
|
| 285 |
+
Get the EntryPoint in self matching name.
|
| 286 |
+
"""
|
| 287 |
+
try:
|
| 288 |
+
return next(iter(self.select(name=name)))
|
| 289 |
+
except StopIteration:
|
| 290 |
+
raise KeyError(name)
|
| 291 |
+
|
| 292 |
+
def select(self, **params):
|
| 293 |
+
"""
|
| 294 |
+
Select entry points from self that match the
|
| 295 |
+
given parameters (typically group and/or name).
|
| 296 |
+
"""
|
| 297 |
+
return EntryPoints(ep for ep in self if _py39compat.ep_matches(ep, **params))
|
| 298 |
+
|
| 299 |
+
@property
|
| 300 |
+
def names(self):
|
| 301 |
+
"""
|
| 302 |
+
Return the set of all names of all entry points.
|
| 303 |
+
"""
|
| 304 |
+
return {ep.name for ep in self}
|
| 305 |
+
|
| 306 |
+
@property
|
| 307 |
+
def groups(self):
|
| 308 |
+
"""
|
| 309 |
+
Return the set of all groups of all entry points.
|
| 310 |
+
"""
|
| 311 |
+
return {ep.group for ep in self}
|
| 312 |
+
|
| 313 |
+
@classmethod
|
| 314 |
+
def _from_text_for(cls, text, dist):
|
| 315 |
+
return cls(ep._for(dist) for ep in cls._from_text(text))
|
| 316 |
+
|
| 317 |
+
@staticmethod
|
| 318 |
+
def _from_text(text):
|
| 319 |
+
return (
|
| 320 |
+
EntryPoint(name=item.value.name, value=item.value.value, group=item.name)
|
| 321 |
+
for item in Sectioned.section_pairs(text or '')
|
| 322 |
+
)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
class PackagePath(pathlib.PurePosixPath):
|
| 326 |
+
"""A reference to a path in a package"""
|
| 327 |
+
|
| 328 |
+
def read_text(self, encoding='utf-8'):
|
| 329 |
+
with self.locate().open(encoding=encoding) as stream:
|
| 330 |
+
return stream.read()
|
| 331 |
+
|
| 332 |
+
def read_binary(self):
|
| 333 |
+
with self.locate().open('rb') as stream:
|
| 334 |
+
return stream.read()
|
| 335 |
+
|
| 336 |
+
def locate(self):
|
| 337 |
+
"""Return a path-like object for this path"""
|
| 338 |
+
return self.dist.locate_file(self)
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
class FileHash:
|
| 342 |
+
def __init__(self, spec):
|
| 343 |
+
self.mode, _, self.value = spec.partition('=')
|
| 344 |
+
|
| 345 |
+
def __repr__(self):
|
| 346 |
+
return f'<FileHash mode: {self.mode} value: {self.value}>'
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
class Distribution(metaclass=abc.ABCMeta):
|
| 350 |
+
"""A Python distribution package."""
|
| 351 |
+
|
| 352 |
+
@abc.abstractmethod
|
| 353 |
+
def read_text(self, filename):
|
| 354 |
+
"""Attempt to load metadata file given by the name.
|
| 355 |
+
|
| 356 |
+
:param filename: The name of the file in the distribution info.
|
| 357 |
+
:return: The text if found, otherwise None.
|
| 358 |
+
"""
|
| 359 |
+
|
| 360 |
+
@abc.abstractmethod
|
| 361 |
+
def locate_file(self, path):
|
| 362 |
+
"""
|
| 363 |
+
Given a path to a file in this distribution, return a path
|
| 364 |
+
to it.
|
| 365 |
+
"""
|
| 366 |
+
|
| 367 |
+
@classmethod
|
| 368 |
+
def from_name(cls, name: str):
|
| 369 |
+
"""Return the Distribution for the given package name.
|
| 370 |
+
|
| 371 |
+
:param name: The name of the distribution package to search for.
|
| 372 |
+
:return: The Distribution instance (or subclass thereof) for the named
|
| 373 |
+
package, if found.
|
| 374 |
+
:raises PackageNotFoundError: When the named package's distribution
|
| 375 |
+
metadata cannot be found.
|
| 376 |
+
:raises ValueError: When an invalid value is supplied for name.
|
| 377 |
+
"""
|
| 378 |
+
if not name:
|
| 379 |
+
raise ValueError("A distribution name is required.")
|
| 380 |
+
try:
|
| 381 |
+
return next(cls.discover(name=name))
|
| 382 |
+
except StopIteration:
|
| 383 |
+
raise PackageNotFoundError(name)
|
| 384 |
+
|
| 385 |
+
@classmethod
|
| 386 |
+
def discover(cls, **kwargs):
|
| 387 |
+
"""Return an iterable of Distribution objects for all packages.
|
| 388 |
+
|
| 389 |
+
Pass a ``context`` or pass keyword arguments for constructing
|
| 390 |
+
a context.
|
| 391 |
+
|
| 392 |
+
:context: A ``DistributionFinder.Context`` object.
|
| 393 |
+
:return: Iterable of Distribution objects for all packages.
|
| 394 |
+
"""
|
| 395 |
+
context = kwargs.pop('context', None)
|
| 396 |
+
if context and kwargs:
|
| 397 |
+
raise ValueError("cannot accept context and kwargs")
|
| 398 |
+
context = context or DistributionFinder.Context(**kwargs)
|
| 399 |
+
return itertools.chain.from_iterable(
|
| 400 |
+
resolver(context) for resolver in cls._discover_resolvers()
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
@staticmethod
|
| 404 |
+
def at(path):
|
| 405 |
+
"""Return a Distribution for the indicated metadata path
|
| 406 |
+
|
| 407 |
+
:param path: a string or path-like object
|
| 408 |
+
:return: a concrete Distribution instance for the path
|
| 409 |
+
"""
|
| 410 |
+
return PathDistribution(pathlib.Path(path))
|
| 411 |
+
|
| 412 |
+
@staticmethod
|
| 413 |
+
def _discover_resolvers():
|
| 414 |
+
"""Search the meta_path for resolvers."""
|
| 415 |
+
declared = (
|
| 416 |
+
getattr(finder, 'find_distributions', None) for finder in sys.meta_path
|
| 417 |
+
)
|
| 418 |
+
return filter(None, declared)
|
| 419 |
+
|
| 420 |
+
@property
|
| 421 |
+
def metadata(self) -> _meta.PackageMetadata:
|
| 422 |
+
"""Return the parsed metadata for this Distribution.
|
| 423 |
+
|
| 424 |
+
The returned object will have keys that name the various bits of
|
| 425 |
+
metadata. See PEP 566 for details.
|
| 426 |
+
"""
|
| 427 |
+
text = (
|
| 428 |
+
self.read_text('METADATA')
|
| 429 |
+
or self.read_text('PKG-INFO')
|
| 430 |
+
# This last clause is here to support old egg-info files. Its
|
| 431 |
+
# effect is to just end up using the PathDistribution's self._path
|
| 432 |
+
# (which points to the egg-info file) attribute unchanged.
|
| 433 |
+
or self.read_text('')
|
| 434 |
+
)
|
| 435 |
+
return _adapters.Message(email.message_from_string(text))
|
| 436 |
+
|
| 437 |
+
@property
|
| 438 |
+
def name(self):
|
| 439 |
+
"""Return the 'Name' metadata for the distribution package."""
|
| 440 |
+
return self.metadata['Name']
|
| 441 |
+
|
| 442 |
+
@property
|
| 443 |
+
def _normalized_name(self):
|
| 444 |
+
"""Return a normalized version of the name."""
|
| 445 |
+
return Prepared.normalize(self.name)
|
| 446 |
+
|
| 447 |
+
@property
|
| 448 |
+
def version(self):
|
| 449 |
+
"""Return the 'Version' metadata for the distribution package."""
|
| 450 |
+
return self.metadata['Version']
|
| 451 |
+
|
| 452 |
+
@property
|
| 453 |
+
def entry_points(self):
|
| 454 |
+
return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self)
|
| 455 |
+
|
| 456 |
+
@property
|
| 457 |
+
def files(self):
|
| 458 |
+
"""Files in this distribution.
|
| 459 |
+
|
| 460 |
+
:return: List of PackagePath for this distribution or None
|
| 461 |
+
|
| 462 |
+
Result is `None` if the metadata file that enumerates files
|
| 463 |
+
(i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
|
| 464 |
+
missing.
|
| 465 |
+
Result may be empty if the metadata exists but is empty.
|
| 466 |
+
"""
|
| 467 |
+
|
| 468 |
+
def make_file(name, hash=None, size_str=None):
|
| 469 |
+
result = PackagePath(name)
|
| 470 |
+
result.hash = FileHash(hash) if hash else None
|
| 471 |
+
result.size = int(size_str) if size_str else None
|
| 472 |
+
result.dist = self
|
| 473 |
+
return result
|
| 474 |
+
|
| 475 |
+
@pass_none
|
| 476 |
+
def make_files(lines):
|
| 477 |
+
return list(starmap(make_file, csv.reader(lines)))
|
| 478 |
+
|
| 479 |
+
return make_files(self._read_files_distinfo() or self._read_files_egginfo())
|
| 480 |
+
|
| 481 |
+
def _read_files_distinfo(self):
|
| 482 |
+
"""
|
| 483 |
+
Read the lines of RECORD
|
| 484 |
+
"""
|
| 485 |
+
text = self.read_text('RECORD')
|
| 486 |
+
return text and text.splitlines()
|
| 487 |
+
|
| 488 |
+
def _read_files_egginfo(self):
|
| 489 |
+
"""
|
| 490 |
+
SOURCES.txt might contain literal commas, so wrap each line
|
| 491 |
+
in quotes.
|
| 492 |
+
"""
|
| 493 |
+
text = self.read_text('SOURCES.txt')
|
| 494 |
+
return text and map('"{}"'.format, text.splitlines())
|
| 495 |
+
|
| 496 |
+
@property
|
| 497 |
+
def requires(self):
|
| 498 |
+
"""Generated requirements specified for this Distribution"""
|
| 499 |
+
reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
|
| 500 |
+
return reqs and list(reqs)
|
| 501 |
+
|
| 502 |
+
def _read_dist_info_reqs(self):
|
| 503 |
+
return self.metadata.get_all('Requires-Dist')
|
| 504 |
+
|
| 505 |
+
def _read_egg_info_reqs(self):
|
| 506 |
+
source = self.read_text('requires.txt')
|
| 507 |
+
return pass_none(self._deps_from_requires_text)(source)
|
| 508 |
+
|
| 509 |
+
@classmethod
|
| 510 |
+
def _deps_from_requires_text(cls, source):
|
| 511 |
+
return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source))
|
| 512 |
+
|
| 513 |
+
@staticmethod
|
| 514 |
+
def _convert_egg_info_reqs_to_simple_reqs(sections):
|
| 515 |
+
"""
|
| 516 |
+
Historically, setuptools would solicit and store 'extra'
|
| 517 |
+
requirements, including those with environment markers,
|
| 518 |
+
in separate sections. More modern tools expect each
|
| 519 |
+
dependency to be defined separately, with any relevant
|
| 520 |
+
extras and environment markers attached directly to that
|
| 521 |
+
requirement. This method converts the former to the
|
| 522 |
+
latter. See _test_deps_from_requires_text for an example.
|
| 523 |
+
"""
|
| 524 |
+
|
| 525 |
+
def make_condition(name):
|
| 526 |
+
return name and f'extra == "{name}"'
|
| 527 |
+
|
| 528 |
+
def quoted_marker(section):
|
| 529 |
+
section = section or ''
|
| 530 |
+
extra, sep, markers = section.partition(':')
|
| 531 |
+
if extra and markers:
|
| 532 |
+
markers = f'({markers})'
|
| 533 |
+
conditions = list(filter(None, [markers, make_condition(extra)]))
|
| 534 |
+
return '; ' + ' and '.join(conditions) if conditions else ''
|
| 535 |
+
|
| 536 |
+
def url_req_space(req):
|
| 537 |
+
"""
|
| 538 |
+
PEP 508 requires a space between the url_spec and the quoted_marker.
|
| 539 |
+
Ref python/importlib_metadata#357.
|
| 540 |
+
"""
|
| 541 |
+
# '@' is uniquely indicative of a url_req.
|
| 542 |
+
return ' ' * ('@' in req)
|
| 543 |
+
|
| 544 |
+
for section in sections:
|
| 545 |
+
space = url_req_space(section.value)
|
| 546 |
+
yield section.value + space + quoted_marker(section.name)
|
| 547 |
+
|
| 548 |
+
|
| 549 |
+
class DistributionFinder(MetaPathFinder):
|
| 550 |
+
"""
|
| 551 |
+
A MetaPathFinder capable of discovering installed distributions.
|
| 552 |
+
"""
|
| 553 |
+
|
| 554 |
+
class Context:
|
| 555 |
+
"""
|
| 556 |
+
Keyword arguments presented by the caller to
|
| 557 |
+
``distributions()`` or ``Distribution.discover()``
|
| 558 |
+
to narrow the scope of a search for distributions
|
| 559 |
+
in all DistributionFinders.
|
| 560 |
+
|
| 561 |
+
Each DistributionFinder may expect any parameters
|
| 562 |
+
and should attempt to honor the canonical
|
| 563 |
+
parameters defined below when appropriate.
|
| 564 |
+
"""
|
| 565 |
+
|
| 566 |
+
name = None
|
| 567 |
+
"""
|
| 568 |
+
Specific name for which a distribution finder should match.
|
| 569 |
+
A name of ``None`` matches all distributions.
|
| 570 |
+
"""
|
| 571 |
+
|
| 572 |
+
def __init__(self, **kwargs):
|
| 573 |
+
vars(self).update(kwargs)
|
| 574 |
+
|
| 575 |
+
@property
|
| 576 |
+
def path(self):
|
| 577 |
+
"""
|
| 578 |
+
The sequence of directory path that a distribution finder
|
| 579 |
+
should search.
|
| 580 |
+
|
| 581 |
+
Typically refers to Python installed package paths such as
|
| 582 |
+
"site-packages" directories and defaults to ``sys.path``.
|
| 583 |
+
"""
|
| 584 |
+
return vars(self).get('path', sys.path)
|
| 585 |
+
|
| 586 |
+
@abc.abstractmethod
|
| 587 |
+
def find_distributions(self, context=Context()):
|
| 588 |
+
"""
|
| 589 |
+
Find distributions.
|
| 590 |
+
|
| 591 |
+
Return an iterable of all Distribution instances capable of
|
| 592 |
+
loading the metadata for packages matching the ``context``,
|
| 593 |
+
a DistributionFinder.Context instance.
|
| 594 |
+
"""
|
| 595 |
+
|
| 596 |
+
|
| 597 |
+
class FastPath:
|
| 598 |
+
"""
|
| 599 |
+
Micro-optimized class for searching a path for
|
| 600 |
+
children.
|
| 601 |
+
|
| 602 |
+
>>> FastPath('').children()
|
| 603 |
+
['...']
|
| 604 |
+
"""
|
| 605 |
+
|
| 606 |
+
@functools.lru_cache() # type: ignore
|
| 607 |
+
def __new__(cls, root):
|
| 608 |
+
return super().__new__(cls)
|
| 609 |
+
|
| 610 |
+
def __init__(self, root):
|
| 611 |
+
self.root = root
|
| 612 |
+
|
| 613 |
+
def joinpath(self, child):
|
| 614 |
+
return pathlib.Path(self.root, child)
|
| 615 |
+
|
| 616 |
+
def children(self):
|
| 617 |
+
with suppress(Exception):
|
| 618 |
+
return os.listdir(self.root or '.')
|
| 619 |
+
with suppress(Exception):
|
| 620 |
+
return self.zip_children()
|
| 621 |
+
return []
|
| 622 |
+
|
| 623 |
+
def zip_children(self):
|
| 624 |
+
zip_path = zipp.Path(self.root)
|
| 625 |
+
names = zip_path.root.namelist()
|
| 626 |
+
self.joinpath = zip_path.joinpath
|
| 627 |
+
|
| 628 |
+
return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names)
|
| 629 |
+
|
| 630 |
+
def search(self, name):
|
| 631 |
+
return self.lookup(self.mtime).search(name)
|
| 632 |
+
|
| 633 |
+
@property
|
| 634 |
+
def mtime(self):
|
| 635 |
+
with suppress(OSError):
|
| 636 |
+
return os.stat(self.root).st_mtime
|
| 637 |
+
self.lookup.cache_clear()
|
| 638 |
+
|
| 639 |
+
@method_cache
|
| 640 |
+
def lookup(self, mtime):
|
| 641 |
+
return Lookup(self)
|
| 642 |
+
|
| 643 |
+
|
| 644 |
+
class Lookup:
|
| 645 |
+
def __init__(self, path: FastPath):
|
| 646 |
+
base = os.path.basename(path.root).lower()
|
| 647 |
+
base_is_egg = base.endswith(".egg")
|
| 648 |
+
self.infos = FreezableDefaultDict(list)
|
| 649 |
+
self.eggs = FreezableDefaultDict(list)
|
| 650 |
+
|
| 651 |
+
for child in path.children():
|
| 652 |
+
low = child.lower()
|
| 653 |
+
if low.endswith((".dist-info", ".egg-info")):
|
| 654 |
+
# rpartition is faster than splitext and suitable for this purpose.
|
| 655 |
+
name = low.rpartition(".")[0].partition("-")[0]
|
| 656 |
+
normalized = Prepared.normalize(name)
|
| 657 |
+
self.infos[normalized].append(path.joinpath(child))
|
| 658 |
+
elif base_is_egg and low == "egg-info":
|
| 659 |
+
name = base.rpartition(".")[0].partition("-")[0]
|
| 660 |
+
legacy_normalized = Prepared.legacy_normalize(name)
|
| 661 |
+
self.eggs[legacy_normalized].append(path.joinpath(child))
|
| 662 |
+
|
| 663 |
+
self.infos.freeze()
|
| 664 |
+
self.eggs.freeze()
|
| 665 |
+
|
| 666 |
+
def search(self, prepared):
|
| 667 |
+
infos = (
|
| 668 |
+
self.infos[prepared.normalized]
|
| 669 |
+
if prepared
|
| 670 |
+
else itertools.chain.from_iterable(self.infos.values())
|
| 671 |
+
)
|
| 672 |
+
eggs = (
|
| 673 |
+
self.eggs[prepared.legacy_normalized]
|
| 674 |
+
if prepared
|
| 675 |
+
else itertools.chain.from_iterable(self.eggs.values())
|
| 676 |
+
)
|
| 677 |
+
return itertools.chain(infos, eggs)
|
| 678 |
+
|
| 679 |
+
|
| 680 |
+
class Prepared:
|
| 681 |
+
"""
|
| 682 |
+
A prepared search for metadata on a possibly-named package.
|
| 683 |
+
"""
|
| 684 |
+
|
| 685 |
+
normalized = None
|
| 686 |
+
legacy_normalized = None
|
| 687 |
+
|
| 688 |
+
def __init__(self, name):
|
| 689 |
+
self.name = name
|
| 690 |
+
if name is None:
|
| 691 |
+
return
|
| 692 |
+
self.normalized = self.normalize(name)
|
| 693 |
+
self.legacy_normalized = self.legacy_normalize(name)
|
| 694 |
+
|
| 695 |
+
@staticmethod
|
| 696 |
+
def normalize(name):
|
| 697 |
+
"""
|
| 698 |
+
PEP 503 normalization plus dashes as underscores.
|
| 699 |
+
"""
|
| 700 |
+
return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
|
| 701 |
+
|
| 702 |
+
@staticmethod
|
| 703 |
+
def legacy_normalize(name):
|
| 704 |
+
"""
|
| 705 |
+
Normalize the package name as found in the convention in
|
| 706 |
+
older packaging tools versions and specs.
|
| 707 |
+
"""
|
| 708 |
+
return name.lower().replace('-', '_')
|
| 709 |
+
|
| 710 |
+
def __bool__(self):
|
| 711 |
+
return bool(self.name)
|
| 712 |
+
|
| 713 |
+
|
| 714 |
+
@install
|
| 715 |
+
class MetadataPathFinder(NullFinder, DistributionFinder):
|
| 716 |
+
"""A degenerate finder for distribution packages on the file system.
|
| 717 |
+
|
| 718 |
+
This finder supplies only a find_distributions() method for versions
|
| 719 |
+
of Python that do not have a PathFinder find_distributions().
|
| 720 |
+
"""
|
| 721 |
+
|
| 722 |
+
def find_distributions(self, context=DistributionFinder.Context()):
|
| 723 |
+
"""
|
| 724 |
+
Find distributions.
|
| 725 |
+
|
| 726 |
+
Return an iterable of all Distribution instances capable of
|
| 727 |
+
loading the metadata for packages matching ``context.name``
|
| 728 |
+
(or all names if ``None`` indicated) along the paths in the list
|
| 729 |
+
of directories ``context.path``.
|
| 730 |
+
"""
|
| 731 |
+
found = self._search_paths(context.name, context.path)
|
| 732 |
+
return map(PathDistribution, found)
|
| 733 |
+
|
| 734 |
+
@classmethod
|
| 735 |
+
def _search_paths(cls, name, paths):
|
| 736 |
+
"""Find metadata directories in paths heuristically."""
|
| 737 |
+
prepared = Prepared(name)
|
| 738 |
+
return itertools.chain.from_iterable(
|
| 739 |
+
path.search(prepared) for path in map(FastPath, paths)
|
| 740 |
+
)
|
| 741 |
+
|
| 742 |
+
def invalidate_caches(cls):
|
| 743 |
+
FastPath.__new__.cache_clear()
|
| 744 |
+
|
| 745 |
+
|
| 746 |
+
class PathDistribution(Distribution):
|
| 747 |
+
def __init__(self, path: SimplePath):
|
| 748 |
+
"""Construct a distribution.
|
| 749 |
+
|
| 750 |
+
:param path: SimplePath indicating the metadata directory.
|
| 751 |
+
"""
|
| 752 |
+
self._path = path
|
| 753 |
+
|
| 754 |
+
def read_text(self, filename):
|
| 755 |
+
with suppress(
|
| 756 |
+
FileNotFoundError,
|
| 757 |
+
IsADirectoryError,
|
| 758 |
+
KeyError,
|
| 759 |
+
NotADirectoryError,
|
| 760 |
+
PermissionError,
|
| 761 |
+
):
|
| 762 |
+
return self._path.joinpath(filename).read_text(encoding='utf-8')
|
| 763 |
+
|
| 764 |
+
read_text.__doc__ = Distribution.read_text.__doc__
|
| 765 |
+
|
| 766 |
+
def locate_file(self, path):
|
| 767 |
+
return self._path.parent / path
|
| 768 |
+
|
| 769 |
+
@property
|
| 770 |
+
def _normalized_name(self):
|
| 771 |
+
"""
|
| 772 |
+
Performance optimization: where possible, resolve the
|
| 773 |
+
normalized name from the file system path.
|
| 774 |
+
"""
|
| 775 |
+
stem = os.path.basename(str(self._path))
|
| 776 |
+
return (
|
| 777 |
+
pass_none(Prepared.normalize)(self._name_from_stem(stem))
|
| 778 |
+
or super()._normalized_name
|
| 779 |
+
)
|
| 780 |
+
|
| 781 |
+
@staticmethod
|
| 782 |
+
def _name_from_stem(stem):
|
| 783 |
+
"""
|
| 784 |
+
>>> PathDistribution._name_from_stem('foo-3.0.egg-info')
|
| 785 |
+
'foo'
|
| 786 |
+
>>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info')
|
| 787 |
+
'CherryPy'
|
| 788 |
+
>>> PathDistribution._name_from_stem('face.egg-info')
|
| 789 |
+
'face'
|
| 790 |
+
>>> PathDistribution._name_from_stem('foo.bar')
|
| 791 |
+
"""
|
| 792 |
+
filename, ext = os.path.splitext(stem)
|
| 793 |
+
if ext not in ('.dist-info', '.egg-info'):
|
| 794 |
+
return
|
| 795 |
+
name, sep, rest = filename.partition('-')
|
| 796 |
+
return name
|
| 797 |
+
|
| 798 |
+
|
| 799 |
+
def distribution(distribution_name):
|
| 800 |
+
"""Get the ``Distribution`` instance for the named package.
|
| 801 |
+
|
| 802 |
+
:param distribution_name: The name of the distribution package as a string.
|
| 803 |
+
:return: A ``Distribution`` instance (or subclass thereof).
|
| 804 |
+
"""
|
| 805 |
+
return Distribution.from_name(distribution_name)
|
| 806 |
+
|
| 807 |
+
|
| 808 |
+
def distributions(**kwargs):
|
| 809 |
+
"""Get all ``Distribution`` instances in the current environment.
|
| 810 |
+
|
| 811 |
+
:return: An iterable of ``Distribution`` instances.
|
| 812 |
+
"""
|
| 813 |
+
return Distribution.discover(**kwargs)
|
| 814 |
+
|
| 815 |
+
|
| 816 |
+
def metadata(distribution_name) -> _meta.PackageMetadata:
|
| 817 |
+
"""Get the metadata for the named package.
|
| 818 |
+
|
| 819 |
+
:param distribution_name: The name of the distribution package to query.
|
| 820 |
+
:return: A PackageMetadata containing the parsed metadata.
|
| 821 |
+
"""
|
| 822 |
+
return Distribution.from_name(distribution_name).metadata
|
| 823 |
+
|
| 824 |
+
|
| 825 |
+
def version(distribution_name):
|
| 826 |
+
"""Get the version string for the named package.
|
| 827 |
+
|
| 828 |
+
:param distribution_name: The name of the distribution package to query.
|
| 829 |
+
:return: The version string for the package as defined in the package's
|
| 830 |
+
"Version" metadata key.
|
| 831 |
+
"""
|
| 832 |
+
return distribution(distribution_name).version
|
| 833 |
+
|
| 834 |
+
|
| 835 |
+
_unique = functools.partial(
|
| 836 |
+
unique_everseen,
|
| 837 |
+
key=_py39compat.normalized_name,
|
| 838 |
+
)
|
| 839 |
+
"""
|
| 840 |
+
Wrapper for ``distributions`` to return unique distributions by name.
|
| 841 |
+
"""
|
| 842 |
+
|
| 843 |
+
|
| 844 |
+
def entry_points(**params) -> EntryPoints:
|
| 845 |
+
"""Return EntryPoint objects for all installed packages.
|
| 846 |
+
|
| 847 |
+
Pass selection parameters (group or name) to filter the
|
| 848 |
+
result to entry points matching those properties (see
|
| 849 |
+
EntryPoints.select()).
|
| 850 |
+
|
| 851 |
+
:return: EntryPoints for all installed packages.
|
| 852 |
+
"""
|
| 853 |
+
eps = itertools.chain.from_iterable(
|
| 854 |
+
dist.entry_points for dist in _unique(distributions())
|
| 855 |
+
)
|
| 856 |
+
return EntryPoints(eps).select(**params)
|
| 857 |
+
|
| 858 |
+
|
| 859 |
+
def files(distribution_name):
|
| 860 |
+
"""Return a list of files for the named package.
|
| 861 |
+
|
| 862 |
+
:param distribution_name: The name of the distribution package to query.
|
| 863 |
+
:return: List of files composing the distribution.
|
| 864 |
+
"""
|
| 865 |
+
return distribution(distribution_name).files
|
| 866 |
+
|
| 867 |
+
|
| 868 |
+
def requires(distribution_name):
|
| 869 |
+
"""
|
| 870 |
+
Return a list of requirements for the named package.
|
| 871 |
+
|
| 872 |
+
:return: An iterator of requirements, suitable for
|
| 873 |
+
packaging.requirement.Requirement.
|
| 874 |
+
"""
|
| 875 |
+
return distribution(distribution_name).requires
|
| 876 |
+
|
| 877 |
+
|
| 878 |
+
def packages_distributions() -> Mapping[str, List[str]]:
|
| 879 |
+
"""
|
| 880 |
+
Return a mapping of top-level packages to their
|
| 881 |
+
distributions.
|
| 882 |
+
|
| 883 |
+
>>> import collections.abc
|
| 884 |
+
>>> pkgs = packages_distributions()
|
| 885 |
+
>>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values())
|
| 886 |
+
True
|
| 887 |
+
"""
|
| 888 |
+
pkg_to_dist = collections.defaultdict(list)
|
| 889 |
+
for dist in distributions():
|
| 890 |
+
for pkg in _top_level_declared(dist) or _top_level_inferred(dist):
|
| 891 |
+
pkg_to_dist[pkg].append(dist.metadata['Name'])
|
| 892 |
+
return dict(pkg_to_dist)
|
| 893 |
+
|
| 894 |
+
|
| 895 |
+
def _top_level_declared(dist):
|
| 896 |
+
return (dist.read_text('top_level.txt') or '').split()
|
| 897 |
+
|
| 898 |
+
|
| 899 |
+
def _top_level_inferred(dist):
|
| 900 |
+
return {
|
| 901 |
+
f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name
|
| 902 |
+
for f in always_iterable(dist.files)
|
| 903 |
+
if f.suffix == ".py"
|
| 904 |
+
}
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_adapters.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import warnings
|
| 3 |
+
import re
|
| 4 |
+
import textwrap
|
| 5 |
+
import email.message
|
| 6 |
+
|
| 7 |
+
from ._text import FoldedCase
|
| 8 |
+
from ._compat import pypy_partial
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
# Do not remove prior to 2024-01-01 or Python 3.14
|
| 12 |
+
_warn = functools.partial(
|
| 13 |
+
warnings.warn,
|
| 14 |
+
"Implicit None on return values is deprecated and will raise KeyErrors.",
|
| 15 |
+
DeprecationWarning,
|
| 16 |
+
stacklevel=pypy_partial(2),
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Message(email.message.Message):
|
| 21 |
+
multiple_use_keys = set(
|
| 22 |
+
map(
|
| 23 |
+
FoldedCase,
|
| 24 |
+
[
|
| 25 |
+
'Classifier',
|
| 26 |
+
'Obsoletes-Dist',
|
| 27 |
+
'Platform',
|
| 28 |
+
'Project-URL',
|
| 29 |
+
'Provides-Dist',
|
| 30 |
+
'Provides-Extra',
|
| 31 |
+
'Requires-Dist',
|
| 32 |
+
'Requires-External',
|
| 33 |
+
'Supported-Platform',
|
| 34 |
+
'Dynamic',
|
| 35 |
+
],
|
| 36 |
+
)
|
| 37 |
+
)
|
| 38 |
+
"""
|
| 39 |
+
Keys that may be indicated multiple times per PEP 566.
|
| 40 |
+
"""
|
| 41 |
+
|
| 42 |
+
def __new__(cls, orig: email.message.Message):
|
| 43 |
+
res = super().__new__(cls)
|
| 44 |
+
vars(res).update(vars(orig))
|
| 45 |
+
return res
|
| 46 |
+
|
| 47 |
+
def __init__(self, *args, **kwargs):
|
| 48 |
+
self._headers = self._repair_headers()
|
| 49 |
+
|
| 50 |
+
# suppress spurious error from mypy
|
| 51 |
+
def __iter__(self):
|
| 52 |
+
return super().__iter__()
|
| 53 |
+
|
| 54 |
+
def __getitem__(self, item):
|
| 55 |
+
"""
|
| 56 |
+
Warn users that a ``KeyError`` can be expected when a
|
| 57 |
+
mising key is supplied. Ref python/importlib_metadata#371.
|
| 58 |
+
"""
|
| 59 |
+
res = super().__getitem__(item)
|
| 60 |
+
if res is None:
|
| 61 |
+
_warn()
|
| 62 |
+
return res
|
| 63 |
+
|
| 64 |
+
def _repair_headers(self):
|
| 65 |
+
def redent(value):
|
| 66 |
+
"Correct for RFC822 indentation"
|
| 67 |
+
if not value or '\n' not in value:
|
| 68 |
+
return value
|
| 69 |
+
return textwrap.dedent(' ' * 8 + value)
|
| 70 |
+
|
| 71 |
+
headers = [(key, redent(value)) for key, value in vars(self)['_headers']]
|
| 72 |
+
if self._payload:
|
| 73 |
+
headers.append(('Description', self.get_payload()))
|
| 74 |
+
return headers
|
| 75 |
+
|
| 76 |
+
@property
|
| 77 |
+
def json(self):
|
| 78 |
+
"""
|
| 79 |
+
Convert PackageMetadata to a JSON-compatible format
|
| 80 |
+
per PEP 0566.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
def transform(key):
|
| 84 |
+
value = self.get_all(key) if key in self.multiple_use_keys else self[key]
|
| 85 |
+
if key == 'Keywords':
|
| 86 |
+
value = re.split(r'\s+', value)
|
| 87 |
+
tk = key.lower().replace('-', '_')
|
| 88 |
+
return tk, value
|
| 89 |
+
|
| 90 |
+
return dict(map(transform, map(FoldedCase, self)))
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_collections.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
# from jaraco.collections 3.3
|
| 5 |
+
class FreezableDefaultDict(collections.defaultdict):
|
| 6 |
+
"""
|
| 7 |
+
Often it is desirable to prevent the mutation of
|
| 8 |
+
a default dict after its initial construction, such
|
| 9 |
+
as to prevent mutation during iteration.
|
| 10 |
+
|
| 11 |
+
>>> dd = FreezableDefaultDict(list)
|
| 12 |
+
>>> dd[0].append('1')
|
| 13 |
+
>>> dd.freeze()
|
| 14 |
+
>>> dd[1]
|
| 15 |
+
[]
|
| 16 |
+
>>> len(dd)
|
| 17 |
+
1
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
def __missing__(self, key):
|
| 21 |
+
return getattr(self, '_frozen', super().__missing__)(key)
|
| 22 |
+
|
| 23 |
+
def freeze(self):
|
| 24 |
+
self._frozen = lambda key: self.default_factory()
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Pair(collections.namedtuple('Pair', 'name value')):
|
| 28 |
+
@classmethod
|
| 29 |
+
def parse(cls, text):
|
| 30 |
+
return cls(*map(str.strip, text.split("=", 1)))
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_compat.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import platform
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
__all__ = ['install', 'NullFinder', 'Protocol']
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
try:
|
| 9 |
+
from typing import Protocol
|
| 10 |
+
except ImportError: # pragma: no cover
|
| 11 |
+
# Python 3.7 compatibility
|
| 12 |
+
from ..typing_extensions import Protocol # type: ignore
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def install(cls):
|
| 16 |
+
"""
|
| 17 |
+
Class decorator for installation on sys.meta_path.
|
| 18 |
+
|
| 19 |
+
Adds the backport DistributionFinder to sys.meta_path and
|
| 20 |
+
attempts to disable the finder functionality of the stdlib
|
| 21 |
+
DistributionFinder.
|
| 22 |
+
"""
|
| 23 |
+
sys.meta_path.append(cls())
|
| 24 |
+
disable_stdlib_finder()
|
| 25 |
+
return cls
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def disable_stdlib_finder():
|
| 29 |
+
"""
|
| 30 |
+
Give the backport primacy for discovering path-based distributions
|
| 31 |
+
by monkey-patching the stdlib O_O.
|
| 32 |
+
|
| 33 |
+
See #91 for more background for rationale on this sketchy
|
| 34 |
+
behavior.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def matches(finder):
|
| 38 |
+
return getattr(
|
| 39 |
+
finder, '__module__', None
|
| 40 |
+
) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions')
|
| 41 |
+
|
| 42 |
+
for finder in filter(matches, sys.meta_path): # pragma: nocover
|
| 43 |
+
del finder.find_distributions
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class NullFinder:
|
| 47 |
+
"""
|
| 48 |
+
A "Finder" (aka "MetaClassFinder") that never finds any modules,
|
| 49 |
+
but may find distributions.
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
@staticmethod
|
| 53 |
+
def find_spec(*args, **kwargs):
|
| 54 |
+
return None
|
| 55 |
+
|
| 56 |
+
# In Python 2, the import system requires finders
|
| 57 |
+
# to have a find_module() method, but this usage
|
| 58 |
+
# is deprecated in Python 3 in favor of find_spec().
|
| 59 |
+
# For the purposes of this finder (i.e. being present
|
| 60 |
+
# on sys.meta_path but having no other import
|
| 61 |
+
# system functionality), the two methods are identical.
|
| 62 |
+
find_module = find_spec
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def pypy_partial(val):
|
| 66 |
+
"""
|
| 67 |
+
Adjust for variable stacklevel on partial under PyPy.
|
| 68 |
+
|
| 69 |
+
Workaround for #327.
|
| 70 |
+
"""
|
| 71 |
+
is_pypy = platform.python_implementation() == 'PyPy'
|
| 72 |
+
return val + is_pypy
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_functools.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import types
|
| 2 |
+
import functools
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
# from jaraco.functools 3.3
|
| 6 |
+
def method_cache(method, cache_wrapper=None):
|
| 7 |
+
"""
|
| 8 |
+
Wrap lru_cache to support storing the cache data in the object instances.
|
| 9 |
+
|
| 10 |
+
Abstracts the common paradigm where the method explicitly saves an
|
| 11 |
+
underscore-prefixed protected property on first call and returns that
|
| 12 |
+
subsequently.
|
| 13 |
+
|
| 14 |
+
>>> class MyClass:
|
| 15 |
+
... calls = 0
|
| 16 |
+
...
|
| 17 |
+
... @method_cache
|
| 18 |
+
... def method(self, value):
|
| 19 |
+
... self.calls += 1
|
| 20 |
+
... return value
|
| 21 |
+
|
| 22 |
+
>>> a = MyClass()
|
| 23 |
+
>>> a.method(3)
|
| 24 |
+
3
|
| 25 |
+
>>> for x in range(75):
|
| 26 |
+
... res = a.method(x)
|
| 27 |
+
>>> a.calls
|
| 28 |
+
75
|
| 29 |
+
|
| 30 |
+
Note that the apparent behavior will be exactly like that of lru_cache
|
| 31 |
+
except that the cache is stored on each instance, so values in one
|
| 32 |
+
instance will not flush values from another, and when an instance is
|
| 33 |
+
deleted, so are the cached values for that instance.
|
| 34 |
+
|
| 35 |
+
>>> b = MyClass()
|
| 36 |
+
>>> for x in range(35):
|
| 37 |
+
... res = b.method(x)
|
| 38 |
+
>>> b.calls
|
| 39 |
+
35
|
| 40 |
+
>>> a.method(0)
|
| 41 |
+
0
|
| 42 |
+
>>> a.calls
|
| 43 |
+
75
|
| 44 |
+
|
| 45 |
+
Note that if method had been decorated with ``functools.lru_cache()``,
|
| 46 |
+
a.calls would have been 76 (due to the cached value of 0 having been
|
| 47 |
+
flushed by the 'b' instance).
|
| 48 |
+
|
| 49 |
+
Clear the cache with ``.cache_clear()``
|
| 50 |
+
|
| 51 |
+
>>> a.method.cache_clear()
|
| 52 |
+
|
| 53 |
+
Same for a method that hasn't yet been called.
|
| 54 |
+
|
| 55 |
+
>>> c = MyClass()
|
| 56 |
+
>>> c.method.cache_clear()
|
| 57 |
+
|
| 58 |
+
Another cache wrapper may be supplied:
|
| 59 |
+
|
| 60 |
+
>>> cache = functools.lru_cache(maxsize=2)
|
| 61 |
+
>>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
|
| 62 |
+
>>> a = MyClass()
|
| 63 |
+
>>> a.method2()
|
| 64 |
+
3
|
| 65 |
+
|
| 66 |
+
Caution - do not subsequently wrap the method with another decorator, such
|
| 67 |
+
as ``@property``, which changes the semantics of the function.
|
| 68 |
+
|
| 69 |
+
See also
|
| 70 |
+
http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
|
| 71 |
+
for another implementation and additional justification.
|
| 72 |
+
"""
|
| 73 |
+
cache_wrapper = cache_wrapper or functools.lru_cache()
|
| 74 |
+
|
| 75 |
+
def wrapper(self, *args, **kwargs):
|
| 76 |
+
# it's the first call, replace the method with a cached, bound method
|
| 77 |
+
bound_method = types.MethodType(method, self)
|
| 78 |
+
cached_method = cache_wrapper(bound_method)
|
| 79 |
+
setattr(self, method.__name__, cached_method)
|
| 80 |
+
return cached_method(*args, **kwargs)
|
| 81 |
+
|
| 82 |
+
# Support cache clear even before cache has been created.
|
| 83 |
+
wrapper.cache_clear = lambda: None
|
| 84 |
+
|
| 85 |
+
return wrapper
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
# From jaraco.functools 3.3
|
| 89 |
+
def pass_none(func):
|
| 90 |
+
"""
|
| 91 |
+
Wrap func so it's not called if its first param is None
|
| 92 |
+
|
| 93 |
+
>>> print_text = pass_none(print)
|
| 94 |
+
>>> print_text('text')
|
| 95 |
+
text
|
| 96 |
+
>>> print_text(None)
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
@functools.wraps(func)
|
| 100 |
+
def wrapper(param, *args, **kwargs):
|
| 101 |
+
if param is not None:
|
| 102 |
+
return func(param, *args, **kwargs)
|
| 103 |
+
|
| 104 |
+
return wrapper
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_itertools.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import filterfalse
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def unique_everseen(iterable, key=None):
|
| 5 |
+
"List unique elements, preserving order. Remember all elements ever seen."
|
| 6 |
+
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
| 7 |
+
# unique_everseen('ABBCcAD', str.lower) --> A B C D
|
| 8 |
+
seen = set()
|
| 9 |
+
seen_add = seen.add
|
| 10 |
+
if key is None:
|
| 11 |
+
for element in filterfalse(seen.__contains__, iterable):
|
| 12 |
+
seen_add(element)
|
| 13 |
+
yield element
|
| 14 |
+
else:
|
| 15 |
+
for element in iterable:
|
| 16 |
+
k = key(element)
|
| 17 |
+
if k not in seen:
|
| 18 |
+
seen_add(k)
|
| 19 |
+
yield element
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# copied from more_itertools 8.8
|
| 23 |
+
def always_iterable(obj, base_type=(str, bytes)):
|
| 24 |
+
"""If *obj* is iterable, return an iterator over its items::
|
| 25 |
+
|
| 26 |
+
>>> obj = (1, 2, 3)
|
| 27 |
+
>>> list(always_iterable(obj))
|
| 28 |
+
[1, 2, 3]
|
| 29 |
+
|
| 30 |
+
If *obj* is not iterable, return a one-item iterable containing *obj*::
|
| 31 |
+
|
| 32 |
+
>>> obj = 1
|
| 33 |
+
>>> list(always_iterable(obj))
|
| 34 |
+
[1]
|
| 35 |
+
|
| 36 |
+
If *obj* is ``None``, return an empty iterable:
|
| 37 |
+
|
| 38 |
+
>>> obj = None
|
| 39 |
+
>>> list(always_iterable(None))
|
| 40 |
+
[]
|
| 41 |
+
|
| 42 |
+
By default, binary and text strings are not considered iterable::
|
| 43 |
+
|
| 44 |
+
>>> obj = 'foo'
|
| 45 |
+
>>> list(always_iterable(obj))
|
| 46 |
+
['foo']
|
| 47 |
+
|
| 48 |
+
If *base_type* is set, objects for which ``isinstance(obj, base_type)``
|
| 49 |
+
returns ``True`` won't be considered iterable.
|
| 50 |
+
|
| 51 |
+
>>> obj = {'a': 1}
|
| 52 |
+
>>> list(always_iterable(obj)) # Iterate over the dict's keys
|
| 53 |
+
['a']
|
| 54 |
+
>>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
|
| 55 |
+
[{'a': 1}]
|
| 56 |
+
|
| 57 |
+
Set *base_type* to ``None`` to avoid any special handling and treat objects
|
| 58 |
+
Python considers iterable as iterable:
|
| 59 |
+
|
| 60 |
+
>>> obj = 'foo'
|
| 61 |
+
>>> list(always_iterable(obj, base_type=None))
|
| 62 |
+
['f', 'o', 'o']
|
| 63 |
+
"""
|
| 64 |
+
if obj is None:
|
| 65 |
+
return iter(())
|
| 66 |
+
|
| 67 |
+
if (base_type is not None) and isinstance(obj, base_type):
|
| 68 |
+
return iter((obj,))
|
| 69 |
+
|
| 70 |
+
try:
|
| 71 |
+
return iter(obj)
|
| 72 |
+
except TypeError:
|
| 73 |
+
return iter((obj,))
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_meta.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ._compat import Protocol
|
| 2 |
+
from typing import Any, Dict, Iterator, List, TypeVar, Union
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
_T = TypeVar("_T")
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class PackageMetadata(Protocol):
|
| 9 |
+
def __len__(self) -> int:
|
| 10 |
+
... # pragma: no cover
|
| 11 |
+
|
| 12 |
+
def __contains__(self, item: str) -> bool:
|
| 13 |
+
... # pragma: no cover
|
| 14 |
+
|
| 15 |
+
def __getitem__(self, key: str) -> str:
|
| 16 |
+
... # pragma: no cover
|
| 17 |
+
|
| 18 |
+
def __iter__(self) -> Iterator[str]:
|
| 19 |
+
... # pragma: no cover
|
| 20 |
+
|
| 21 |
+
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
|
| 22 |
+
"""
|
| 23 |
+
Return all values associated with a possibly multi-valued key.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
@property
|
| 27 |
+
def json(self) -> Dict[str, Union[str, List[str]]]:
|
| 28 |
+
"""
|
| 29 |
+
A JSON-compatible form of the metadata.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class SimplePath(Protocol[_T]):
|
| 34 |
+
"""
|
| 35 |
+
A minimal subset of pathlib.Path required by PathDistribution.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def joinpath(self) -> _T:
|
| 39 |
+
... # pragma: no cover
|
| 40 |
+
|
| 41 |
+
def __truediv__(self, other: Union[str, _T]) -> _T:
|
| 42 |
+
... # pragma: no cover
|
| 43 |
+
|
| 44 |
+
@property
|
| 45 |
+
def parent(self) -> _T:
|
| 46 |
+
... # pragma: no cover
|
| 47 |
+
|
| 48 |
+
def read_text(self) -> str:
|
| 49 |
+
... # pragma: no cover
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_py39compat.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Compatibility layer with Python 3.8/3.9
|
| 3 |
+
"""
|
| 4 |
+
from typing import TYPE_CHECKING, Any, Optional
|
| 5 |
+
|
| 6 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 7 |
+
# Prevent circular imports on runtime.
|
| 8 |
+
from . import Distribution, EntryPoint
|
| 9 |
+
else:
|
| 10 |
+
Distribution = EntryPoint = Any
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def normalized_name(dist: Distribution) -> Optional[str]:
|
| 14 |
+
"""
|
| 15 |
+
Honor name normalization for distributions that don't provide ``_normalized_name``.
|
| 16 |
+
"""
|
| 17 |
+
try:
|
| 18 |
+
return dist._normalized_name
|
| 19 |
+
except AttributeError:
|
| 20 |
+
from . import Prepared # -> delay to prevent circular imports.
|
| 21 |
+
|
| 22 |
+
return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name'])
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def ep_matches(ep: EntryPoint, **params) -> bool:
|
| 26 |
+
"""
|
| 27 |
+
Workaround for ``EntryPoint`` objects without the ``matches`` method.
|
| 28 |
+
"""
|
| 29 |
+
try:
|
| 30 |
+
return ep.matches(**params)
|
| 31 |
+
except AttributeError:
|
| 32 |
+
from . import EntryPoint # -> delay to prevent circular imports.
|
| 33 |
+
|
| 34 |
+
# Reconstruct the EntryPoint object to make sure it is compatible.
|
| 35 |
+
return EntryPoint(ep.name, ep.value, ep.group).matches(**params)
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_text.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
|
| 3 |
+
from ._functools import method_cache
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
# from jaraco.text 3.5
|
| 7 |
+
class FoldedCase(str):
|
| 8 |
+
"""
|
| 9 |
+
A case insensitive string class; behaves just like str
|
| 10 |
+
except compares equal when the only variation is case.
|
| 11 |
+
|
| 12 |
+
>>> s = FoldedCase('hello world')
|
| 13 |
+
|
| 14 |
+
>>> s == 'Hello World'
|
| 15 |
+
True
|
| 16 |
+
|
| 17 |
+
>>> 'Hello World' == s
|
| 18 |
+
True
|
| 19 |
+
|
| 20 |
+
>>> s != 'Hello World'
|
| 21 |
+
False
|
| 22 |
+
|
| 23 |
+
>>> s.index('O')
|
| 24 |
+
4
|
| 25 |
+
|
| 26 |
+
>>> s.split('O')
|
| 27 |
+
['hell', ' w', 'rld']
|
| 28 |
+
|
| 29 |
+
>>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
|
| 30 |
+
['alpha', 'Beta', 'GAMMA']
|
| 31 |
+
|
| 32 |
+
Sequence membership is straightforward.
|
| 33 |
+
|
| 34 |
+
>>> "Hello World" in [s]
|
| 35 |
+
True
|
| 36 |
+
>>> s in ["Hello World"]
|
| 37 |
+
True
|
| 38 |
+
|
| 39 |
+
You may test for set inclusion, but candidate and elements
|
| 40 |
+
must both be folded.
|
| 41 |
+
|
| 42 |
+
>>> FoldedCase("Hello World") in {s}
|
| 43 |
+
True
|
| 44 |
+
>>> s in {FoldedCase("Hello World")}
|
| 45 |
+
True
|
| 46 |
+
|
| 47 |
+
String inclusion works as long as the FoldedCase object
|
| 48 |
+
is on the right.
|
| 49 |
+
|
| 50 |
+
>>> "hello" in FoldedCase("Hello World")
|
| 51 |
+
True
|
| 52 |
+
|
| 53 |
+
But not if the FoldedCase object is on the left:
|
| 54 |
+
|
| 55 |
+
>>> FoldedCase('hello') in 'Hello World'
|
| 56 |
+
False
|
| 57 |
+
|
| 58 |
+
In that case, use in_:
|
| 59 |
+
|
| 60 |
+
>>> FoldedCase('hello').in_('Hello World')
|
| 61 |
+
True
|
| 62 |
+
|
| 63 |
+
>>> FoldedCase('hello') > FoldedCase('Hello')
|
| 64 |
+
False
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def __lt__(self, other):
|
| 68 |
+
return self.lower() < other.lower()
|
| 69 |
+
|
| 70 |
+
def __gt__(self, other):
|
| 71 |
+
return self.lower() > other.lower()
|
| 72 |
+
|
| 73 |
+
def __eq__(self, other):
|
| 74 |
+
return self.lower() == other.lower()
|
| 75 |
+
|
| 76 |
+
def __ne__(self, other):
|
| 77 |
+
return self.lower() != other.lower()
|
| 78 |
+
|
| 79 |
+
def __hash__(self):
|
| 80 |
+
return hash(self.lower())
|
| 81 |
+
|
| 82 |
+
def __contains__(self, other):
|
| 83 |
+
return super().lower().__contains__(other.lower())
|
| 84 |
+
|
| 85 |
+
def in_(self, other):
|
| 86 |
+
"Does self appear in other?"
|
| 87 |
+
return self in FoldedCase(other)
|
| 88 |
+
|
| 89 |
+
# cache lower since it's likely to be called frequently.
|
| 90 |
+
@method_cache
|
| 91 |
+
def lower(self):
|
| 92 |
+
return super().lower()
|
| 93 |
+
|
| 94 |
+
def index(self, sub):
|
| 95 |
+
return self.lower().index(sub.lower())
|
| 96 |
+
|
| 97 |
+
def split(self, splitter=' ', maxsplit=0):
|
| 98 |
+
pattern = re.compile(re.escape(splitter), re.I)
|
| 99 |
+
return pattern.split(self, maxsplit)
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__init__.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Read resources contained within a package."""
|
| 2 |
+
|
| 3 |
+
from ._common import (
|
| 4 |
+
as_file,
|
| 5 |
+
files,
|
| 6 |
+
Package,
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
from ._legacy import (
|
| 10 |
+
contents,
|
| 11 |
+
open_binary,
|
| 12 |
+
read_binary,
|
| 13 |
+
open_text,
|
| 14 |
+
read_text,
|
| 15 |
+
is_resource,
|
| 16 |
+
path,
|
| 17 |
+
Resource,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from .abc import ResourceReader
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
__all__ = [
|
| 24 |
+
'Package',
|
| 25 |
+
'Resource',
|
| 26 |
+
'ResourceReader',
|
| 27 |
+
'as_file',
|
| 28 |
+
'contents',
|
| 29 |
+
'files',
|
| 30 |
+
'is_resource',
|
| 31 |
+
'open_binary',
|
| 32 |
+
'open_text',
|
| 33 |
+
'path',
|
| 34 |
+
'read_binary',
|
| 35 |
+
'read_text',
|
| 36 |
+
]
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_adapters.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import suppress
|
| 2 |
+
from io import TextIOWrapper
|
| 3 |
+
|
| 4 |
+
from . import abc
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class SpecLoaderAdapter:
|
| 8 |
+
"""
|
| 9 |
+
Adapt a package spec to adapt the underlying loader.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
def __init__(self, spec, adapter=lambda spec: spec.loader):
|
| 13 |
+
self.spec = spec
|
| 14 |
+
self.loader = adapter(spec)
|
| 15 |
+
|
| 16 |
+
def __getattr__(self, name):
|
| 17 |
+
return getattr(self.spec, name)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class TraversableResourcesLoader:
|
| 21 |
+
"""
|
| 22 |
+
Adapt a loader to provide TraversableResources.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init__(self, spec):
|
| 26 |
+
self.spec = spec
|
| 27 |
+
|
| 28 |
+
def get_resource_reader(self, name):
|
| 29 |
+
return CompatibilityFiles(self.spec)._native()
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _io_wrapper(file, mode='r', *args, **kwargs):
|
| 33 |
+
if mode == 'r':
|
| 34 |
+
return TextIOWrapper(file, *args, **kwargs)
|
| 35 |
+
elif mode == 'rb':
|
| 36 |
+
return file
|
| 37 |
+
raise ValueError(
|
| 38 |
+
"Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class CompatibilityFiles:
|
| 43 |
+
"""
|
| 44 |
+
Adapter for an existing or non-existent resource reader
|
| 45 |
+
to provide a compatibility .files().
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
class SpecPath(abc.Traversable):
|
| 49 |
+
"""
|
| 50 |
+
Path tied to a module spec.
|
| 51 |
+
Can be read and exposes the resource reader children.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def __init__(self, spec, reader):
|
| 55 |
+
self._spec = spec
|
| 56 |
+
self._reader = reader
|
| 57 |
+
|
| 58 |
+
def iterdir(self):
|
| 59 |
+
if not self._reader:
|
| 60 |
+
return iter(())
|
| 61 |
+
return iter(
|
| 62 |
+
CompatibilityFiles.ChildPath(self._reader, path)
|
| 63 |
+
for path in self._reader.contents()
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
def is_file(self):
|
| 67 |
+
return False
|
| 68 |
+
|
| 69 |
+
is_dir = is_file
|
| 70 |
+
|
| 71 |
+
def joinpath(self, other):
|
| 72 |
+
if not self._reader:
|
| 73 |
+
return CompatibilityFiles.OrphanPath(other)
|
| 74 |
+
return CompatibilityFiles.ChildPath(self._reader, other)
|
| 75 |
+
|
| 76 |
+
@property
|
| 77 |
+
def name(self):
|
| 78 |
+
return self._spec.name
|
| 79 |
+
|
| 80 |
+
def open(self, mode='r', *args, **kwargs):
|
| 81 |
+
return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
|
| 82 |
+
|
| 83 |
+
class ChildPath(abc.Traversable):
|
| 84 |
+
"""
|
| 85 |
+
Path tied to a resource reader child.
|
| 86 |
+
Can be read but doesn't expose any meaningful children.
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
def __init__(self, reader, name):
|
| 90 |
+
self._reader = reader
|
| 91 |
+
self._name = name
|
| 92 |
+
|
| 93 |
+
def iterdir(self):
|
| 94 |
+
return iter(())
|
| 95 |
+
|
| 96 |
+
def is_file(self):
|
| 97 |
+
return self._reader.is_resource(self.name)
|
| 98 |
+
|
| 99 |
+
def is_dir(self):
|
| 100 |
+
return not self.is_file()
|
| 101 |
+
|
| 102 |
+
def joinpath(self, other):
|
| 103 |
+
return CompatibilityFiles.OrphanPath(self.name, other)
|
| 104 |
+
|
| 105 |
+
@property
|
| 106 |
+
def name(self):
|
| 107 |
+
return self._name
|
| 108 |
+
|
| 109 |
+
def open(self, mode='r', *args, **kwargs):
|
| 110 |
+
return _io_wrapper(
|
| 111 |
+
self._reader.open_resource(self.name), mode, *args, **kwargs
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
class OrphanPath(abc.Traversable):
|
| 115 |
+
"""
|
| 116 |
+
Orphan path, not tied to a module spec or resource reader.
|
| 117 |
+
Can't be read and doesn't expose any meaningful children.
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
def __init__(self, *path_parts):
|
| 121 |
+
if len(path_parts) < 1:
|
| 122 |
+
raise ValueError('Need at least one path part to construct a path')
|
| 123 |
+
self._path = path_parts
|
| 124 |
+
|
| 125 |
+
def iterdir(self):
|
| 126 |
+
return iter(())
|
| 127 |
+
|
| 128 |
+
def is_file(self):
|
| 129 |
+
return False
|
| 130 |
+
|
| 131 |
+
is_dir = is_file
|
| 132 |
+
|
| 133 |
+
def joinpath(self, other):
|
| 134 |
+
return CompatibilityFiles.OrphanPath(*self._path, other)
|
| 135 |
+
|
| 136 |
+
@property
|
| 137 |
+
def name(self):
|
| 138 |
+
return self._path[-1]
|
| 139 |
+
|
| 140 |
+
def open(self, mode='r', *args, **kwargs):
|
| 141 |
+
raise FileNotFoundError("Can't open orphan path")
|
| 142 |
+
|
| 143 |
+
def __init__(self, spec):
|
| 144 |
+
self.spec = spec
|
| 145 |
+
|
| 146 |
+
@property
|
| 147 |
+
def _reader(self):
|
| 148 |
+
with suppress(AttributeError):
|
| 149 |
+
return self.spec.loader.get_resource_reader(self.spec.name)
|
| 150 |
+
|
| 151 |
+
def _native(self):
|
| 152 |
+
"""
|
| 153 |
+
Return the native reader if it supports files().
|
| 154 |
+
"""
|
| 155 |
+
reader = self._reader
|
| 156 |
+
return reader if hasattr(reader, 'files') else self
|
| 157 |
+
|
| 158 |
+
def __getattr__(self, attr):
|
| 159 |
+
return getattr(self._reader, attr)
|
| 160 |
+
|
| 161 |
+
def files(self):
|
| 162 |
+
return CompatibilityFiles.SpecPath(self.spec, self._reader)
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def wrap_spec(package):
|
| 166 |
+
"""
|
| 167 |
+
Construct a package spec with traversable compatibility
|
| 168 |
+
on the spec/loader/reader.
|
| 169 |
+
"""
|
| 170 |
+
return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_common.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import pathlib
|
| 3 |
+
import tempfile
|
| 4 |
+
import functools
|
| 5 |
+
import contextlib
|
| 6 |
+
import types
|
| 7 |
+
import importlib
|
| 8 |
+
import inspect
|
| 9 |
+
import warnings
|
| 10 |
+
import itertools
|
| 11 |
+
|
| 12 |
+
from typing import Union, Optional, cast
|
| 13 |
+
from .abc import ResourceReader, Traversable
|
| 14 |
+
|
| 15 |
+
from ._compat import wrap_spec
|
| 16 |
+
|
| 17 |
+
Package = Union[types.ModuleType, str]
|
| 18 |
+
Anchor = Package
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def package_to_anchor(func):
|
| 22 |
+
"""
|
| 23 |
+
Replace 'package' parameter as 'anchor' and warn about the change.
|
| 24 |
+
|
| 25 |
+
Other errors should fall through.
|
| 26 |
+
|
| 27 |
+
>>> files('a', 'b')
|
| 28 |
+
Traceback (most recent call last):
|
| 29 |
+
TypeError: files() takes from 0 to 1 positional arguments but 2 were given
|
| 30 |
+
"""
|
| 31 |
+
undefined = object()
|
| 32 |
+
|
| 33 |
+
@functools.wraps(func)
|
| 34 |
+
def wrapper(anchor=undefined, package=undefined):
|
| 35 |
+
if package is not undefined:
|
| 36 |
+
if anchor is not undefined:
|
| 37 |
+
return func(anchor, package)
|
| 38 |
+
warnings.warn(
|
| 39 |
+
"First parameter to files is renamed to 'anchor'",
|
| 40 |
+
DeprecationWarning,
|
| 41 |
+
stacklevel=2,
|
| 42 |
+
)
|
| 43 |
+
return func(package)
|
| 44 |
+
elif anchor is undefined:
|
| 45 |
+
return func()
|
| 46 |
+
return func(anchor)
|
| 47 |
+
|
| 48 |
+
return wrapper
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@package_to_anchor
|
| 52 |
+
def files(anchor: Optional[Anchor] = None) -> Traversable:
|
| 53 |
+
"""
|
| 54 |
+
Get a Traversable resource for an anchor.
|
| 55 |
+
"""
|
| 56 |
+
return from_package(resolve(anchor))
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
|
| 60 |
+
"""
|
| 61 |
+
Return the package's loader if it's a ResourceReader.
|
| 62 |
+
"""
|
| 63 |
+
# We can't use
|
| 64 |
+
# a issubclass() check here because apparently abc.'s __subclasscheck__()
|
| 65 |
+
# hook wants to create a weak reference to the object, but
|
| 66 |
+
# zipimport.zipimporter does not support weak references, resulting in a
|
| 67 |
+
# TypeError. That seems terrible.
|
| 68 |
+
spec = package.__spec__
|
| 69 |
+
reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
|
| 70 |
+
if reader is None:
|
| 71 |
+
return None
|
| 72 |
+
return reader(spec.name) # type: ignore
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
@functools.singledispatch
|
| 76 |
+
def resolve(cand: Optional[Anchor]) -> types.ModuleType:
|
| 77 |
+
return cast(types.ModuleType, cand)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
@resolve.register
|
| 81 |
+
def _(cand: str) -> types.ModuleType:
|
| 82 |
+
return importlib.import_module(cand)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@resolve.register
|
| 86 |
+
def _(cand: None) -> types.ModuleType:
|
| 87 |
+
return resolve(_infer_caller().f_globals['__name__'])
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _infer_caller():
|
| 91 |
+
"""
|
| 92 |
+
Walk the stack and find the frame of the first caller not in this module.
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
def is_this_file(frame_info):
|
| 96 |
+
return frame_info.filename == __file__
|
| 97 |
+
|
| 98 |
+
def is_wrapper(frame_info):
|
| 99 |
+
return frame_info.function == 'wrapper'
|
| 100 |
+
|
| 101 |
+
not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
|
| 102 |
+
# also exclude 'wrapper' due to singledispatch in the call stack
|
| 103 |
+
callers = itertools.filterfalse(is_wrapper, not_this_file)
|
| 104 |
+
return next(callers).frame
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def from_package(package: types.ModuleType):
|
| 108 |
+
"""
|
| 109 |
+
Return a Traversable object for the given package.
|
| 110 |
+
|
| 111 |
+
"""
|
| 112 |
+
spec = wrap_spec(package)
|
| 113 |
+
reader = spec.loader.get_resource_reader(spec.name)
|
| 114 |
+
return reader.files()
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@contextlib.contextmanager
|
| 118 |
+
def _tempfile(
|
| 119 |
+
reader,
|
| 120 |
+
suffix='',
|
| 121 |
+
# gh-93353: Keep a reference to call os.remove() in late Python
|
| 122 |
+
# finalization.
|
| 123 |
+
*,
|
| 124 |
+
_os_remove=os.remove,
|
| 125 |
+
):
|
| 126 |
+
# Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
|
| 127 |
+
# blocks due to the need to close the temporary file to work on Windows
|
| 128 |
+
# properly.
|
| 129 |
+
fd, raw_path = tempfile.mkstemp(suffix=suffix)
|
| 130 |
+
try:
|
| 131 |
+
try:
|
| 132 |
+
os.write(fd, reader())
|
| 133 |
+
finally:
|
| 134 |
+
os.close(fd)
|
| 135 |
+
del reader
|
| 136 |
+
yield pathlib.Path(raw_path)
|
| 137 |
+
finally:
|
| 138 |
+
try:
|
| 139 |
+
_os_remove(raw_path)
|
| 140 |
+
except FileNotFoundError:
|
| 141 |
+
pass
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _temp_file(path):
|
| 145 |
+
return _tempfile(path.read_bytes, suffix=path.name)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _is_present_dir(path: Traversable) -> bool:
|
| 149 |
+
"""
|
| 150 |
+
Some Traversables implement ``is_dir()`` to raise an
|
| 151 |
+
exception (i.e. ``FileNotFoundError``) when the
|
| 152 |
+
directory doesn't exist. This function wraps that call
|
| 153 |
+
to always return a boolean and only return True
|
| 154 |
+
if there's a dir and it exists.
|
| 155 |
+
"""
|
| 156 |
+
with contextlib.suppress(FileNotFoundError):
|
| 157 |
+
return path.is_dir()
|
| 158 |
+
return False
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
@functools.singledispatch
|
| 162 |
+
def as_file(path):
|
| 163 |
+
"""
|
| 164 |
+
Given a Traversable object, return that object as a
|
| 165 |
+
path on the local file system in a context manager.
|
| 166 |
+
"""
|
| 167 |
+
return _temp_dir(path) if _is_present_dir(path) else _temp_file(path)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@as_file.register(pathlib.Path)
|
| 171 |
+
@contextlib.contextmanager
|
| 172 |
+
def _(path):
|
| 173 |
+
"""
|
| 174 |
+
Degenerate behavior for pathlib.Path objects.
|
| 175 |
+
"""
|
| 176 |
+
yield path
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
@contextlib.contextmanager
|
| 180 |
+
def _temp_path(dir: tempfile.TemporaryDirectory):
|
| 181 |
+
"""
|
| 182 |
+
Wrap tempfile.TemporyDirectory to return a pathlib object.
|
| 183 |
+
"""
|
| 184 |
+
with dir as result:
|
| 185 |
+
yield pathlib.Path(result)
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
@contextlib.contextmanager
|
| 189 |
+
def _temp_dir(path):
|
| 190 |
+
"""
|
| 191 |
+
Given a traversable dir, recursively replicate the whole tree
|
| 192 |
+
to the file system in a context manager.
|
| 193 |
+
"""
|
| 194 |
+
assert path.is_dir()
|
| 195 |
+
with _temp_path(tempfile.TemporaryDirectory()) as temp_dir:
|
| 196 |
+
yield _write_contents(temp_dir, path)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
def _write_contents(target, source):
|
| 200 |
+
child = target.joinpath(source.name)
|
| 201 |
+
if source.is_dir():
|
| 202 |
+
child.mkdir()
|
| 203 |
+
for item in source.iterdir():
|
| 204 |
+
_write_contents(child, item)
|
| 205 |
+
else:
|
| 206 |
+
child.write_bytes(source.read_bytes())
|
| 207 |
+
return child
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_compat.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# flake8: noqa
|
| 2 |
+
|
| 3 |
+
import abc
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import pathlib
|
| 7 |
+
from contextlib import suppress
|
| 8 |
+
from typing import Union
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
if sys.version_info >= (3, 10):
|
| 12 |
+
from zipfile import Path as ZipPath # type: ignore
|
| 13 |
+
else:
|
| 14 |
+
from ..zipp import Path as ZipPath # type: ignore
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
from typing import runtime_checkable # type: ignore
|
| 19 |
+
except ImportError:
|
| 20 |
+
|
| 21 |
+
def runtime_checkable(cls): # type: ignore
|
| 22 |
+
return cls
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
try:
|
| 26 |
+
from typing import Protocol # type: ignore
|
| 27 |
+
except ImportError:
|
| 28 |
+
Protocol = abc.ABC # type: ignore
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class TraversableResourcesLoader:
|
| 32 |
+
"""
|
| 33 |
+
Adapt loaders to provide TraversableResources and other
|
| 34 |
+
compatibility.
|
| 35 |
+
|
| 36 |
+
Used primarily for Python 3.9 and earlier where the native
|
| 37 |
+
loaders do not yet implement TraversableResources.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
def __init__(self, spec):
|
| 41 |
+
self.spec = spec
|
| 42 |
+
|
| 43 |
+
@property
|
| 44 |
+
def path(self):
|
| 45 |
+
return self.spec.origin
|
| 46 |
+
|
| 47 |
+
def get_resource_reader(self, name):
|
| 48 |
+
from . import readers, _adapters
|
| 49 |
+
|
| 50 |
+
def _zip_reader(spec):
|
| 51 |
+
with suppress(AttributeError):
|
| 52 |
+
return readers.ZipReader(spec.loader, spec.name)
|
| 53 |
+
|
| 54 |
+
def _namespace_reader(spec):
|
| 55 |
+
with suppress(AttributeError, ValueError):
|
| 56 |
+
return readers.NamespaceReader(spec.submodule_search_locations)
|
| 57 |
+
|
| 58 |
+
def _available_reader(spec):
|
| 59 |
+
with suppress(AttributeError):
|
| 60 |
+
return spec.loader.get_resource_reader(spec.name)
|
| 61 |
+
|
| 62 |
+
def _native_reader(spec):
|
| 63 |
+
reader = _available_reader(spec)
|
| 64 |
+
return reader if hasattr(reader, 'files') else None
|
| 65 |
+
|
| 66 |
+
def _file_reader(spec):
|
| 67 |
+
try:
|
| 68 |
+
path = pathlib.Path(self.path)
|
| 69 |
+
except TypeError:
|
| 70 |
+
return None
|
| 71 |
+
if path.exists():
|
| 72 |
+
return readers.FileReader(self)
|
| 73 |
+
|
| 74 |
+
return (
|
| 75 |
+
# native reader if it supplies 'files'
|
| 76 |
+
_native_reader(self.spec)
|
| 77 |
+
or
|
| 78 |
+
# local ZipReader if a zip module
|
| 79 |
+
_zip_reader(self.spec)
|
| 80 |
+
or
|
| 81 |
+
# local NamespaceReader if a namespace module
|
| 82 |
+
_namespace_reader(self.spec)
|
| 83 |
+
or
|
| 84 |
+
# local FileReader
|
| 85 |
+
_file_reader(self.spec)
|
| 86 |
+
# fallback - adapt the spec ResourceReader to TraversableReader
|
| 87 |
+
or _adapters.CompatibilityFiles(self.spec)
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def wrap_spec(package):
|
| 92 |
+
"""
|
| 93 |
+
Construct a package spec with traversable compatibility
|
| 94 |
+
on the spec/loader/reader.
|
| 95 |
+
|
| 96 |
+
Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
|
| 97 |
+
from above for older Python compatibility (<3.10).
|
| 98 |
+
"""
|
| 99 |
+
from . import _adapters
|
| 100 |
+
|
| 101 |
+
return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
if sys.version_info >= (3, 9):
|
| 105 |
+
StrPath = Union[str, os.PathLike[str]]
|
| 106 |
+
else:
|
| 107 |
+
# PathLike is only subscriptable at runtime in 3.9+
|
| 108 |
+
StrPath = Union[str, "os.PathLike[str]"]
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_itertools.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import filterfalse
|
| 2 |
+
|
| 3 |
+
from typing import (
|
| 4 |
+
Callable,
|
| 5 |
+
Iterable,
|
| 6 |
+
Iterator,
|
| 7 |
+
Optional,
|
| 8 |
+
Set,
|
| 9 |
+
TypeVar,
|
| 10 |
+
Union,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
# Type and type variable definitions
|
| 14 |
+
_T = TypeVar('_T')
|
| 15 |
+
_U = TypeVar('_U')
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def unique_everseen(
|
| 19 |
+
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
|
| 20 |
+
) -> Iterator[_T]:
|
| 21 |
+
"List unique elements, preserving order. Remember all elements ever seen."
|
| 22 |
+
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
| 23 |
+
# unique_everseen('ABBCcAD', str.lower) --> A B C D
|
| 24 |
+
seen: Set[Union[_T, _U]] = set()
|
| 25 |
+
seen_add = seen.add
|
| 26 |
+
if key is None:
|
| 27 |
+
for element in filterfalse(seen.__contains__, iterable):
|
| 28 |
+
seen_add(element)
|
| 29 |
+
yield element
|
| 30 |
+
else:
|
| 31 |
+
for element in iterable:
|
| 32 |
+
k = key(element)
|
| 33 |
+
if k not in seen:
|
| 34 |
+
seen_add(k)
|
| 35 |
+
yield element
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_legacy.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import os
|
| 3 |
+
import pathlib
|
| 4 |
+
import types
|
| 5 |
+
import warnings
|
| 6 |
+
|
| 7 |
+
from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
|
| 8 |
+
|
| 9 |
+
from . import _common
|
| 10 |
+
|
| 11 |
+
Package = Union[types.ModuleType, str]
|
| 12 |
+
Resource = str
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def deprecated(func):
|
| 16 |
+
@functools.wraps(func)
|
| 17 |
+
def wrapper(*args, **kwargs):
|
| 18 |
+
warnings.warn(
|
| 19 |
+
f"{func.__name__} is deprecated. Use files() instead. "
|
| 20 |
+
"Refer to https://importlib-resources.readthedocs.io"
|
| 21 |
+
"/en/latest/using.html#migrating-from-legacy for migration advice.",
|
| 22 |
+
DeprecationWarning,
|
| 23 |
+
stacklevel=2,
|
| 24 |
+
)
|
| 25 |
+
return func(*args, **kwargs)
|
| 26 |
+
|
| 27 |
+
return wrapper
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def normalize_path(path: Any) -> str:
|
| 31 |
+
"""Normalize a path by ensuring it is a string.
|
| 32 |
+
|
| 33 |
+
If the resulting string contains path separators, an exception is raised.
|
| 34 |
+
"""
|
| 35 |
+
str_path = str(path)
|
| 36 |
+
parent, file_name = os.path.split(str_path)
|
| 37 |
+
if parent:
|
| 38 |
+
raise ValueError(f'{path!r} must be only a file name')
|
| 39 |
+
return file_name
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@deprecated
|
| 43 |
+
def open_binary(package: Package, resource: Resource) -> BinaryIO:
|
| 44 |
+
"""Return a file-like object opened for binary reading of the resource."""
|
| 45 |
+
return (_common.files(package) / normalize_path(resource)).open('rb')
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@deprecated
|
| 49 |
+
def read_binary(package: Package, resource: Resource) -> bytes:
|
| 50 |
+
"""Return the binary contents of the resource."""
|
| 51 |
+
return (_common.files(package) / normalize_path(resource)).read_bytes()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@deprecated
|
| 55 |
+
def open_text(
|
| 56 |
+
package: Package,
|
| 57 |
+
resource: Resource,
|
| 58 |
+
encoding: str = 'utf-8',
|
| 59 |
+
errors: str = 'strict',
|
| 60 |
+
) -> TextIO:
|
| 61 |
+
"""Return a file-like object opened for text reading of the resource."""
|
| 62 |
+
return (_common.files(package) / normalize_path(resource)).open(
|
| 63 |
+
'r', encoding=encoding, errors=errors
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@deprecated
|
| 68 |
+
def read_text(
|
| 69 |
+
package: Package,
|
| 70 |
+
resource: Resource,
|
| 71 |
+
encoding: str = 'utf-8',
|
| 72 |
+
errors: str = 'strict',
|
| 73 |
+
) -> str:
|
| 74 |
+
"""Return the decoded string of the resource.
|
| 75 |
+
|
| 76 |
+
The decoding-related arguments have the same semantics as those of
|
| 77 |
+
bytes.decode().
|
| 78 |
+
"""
|
| 79 |
+
with open_text(package, resource, encoding, errors) as fp:
|
| 80 |
+
return fp.read()
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@deprecated
|
| 84 |
+
def contents(package: Package) -> Iterable[str]:
|
| 85 |
+
"""Return an iterable of entries in `package`.
|
| 86 |
+
|
| 87 |
+
Note that not all entries are resources. Specifically, directories are
|
| 88 |
+
not considered resources. Use `is_resource()` on each entry returned here
|
| 89 |
+
to check if it is a resource or not.
|
| 90 |
+
"""
|
| 91 |
+
return [path.name for path in _common.files(package).iterdir()]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@deprecated
|
| 95 |
+
def is_resource(package: Package, name: str) -> bool:
|
| 96 |
+
"""True if `name` is a resource inside `package`.
|
| 97 |
+
|
| 98 |
+
Directories are *not* resources.
|
| 99 |
+
"""
|
| 100 |
+
resource = normalize_path(name)
|
| 101 |
+
return any(
|
| 102 |
+
traversable.name == resource and traversable.is_file()
|
| 103 |
+
for traversable in _common.files(package).iterdir()
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@deprecated
|
| 108 |
+
def path(
|
| 109 |
+
package: Package,
|
| 110 |
+
resource: Resource,
|
| 111 |
+
) -> ContextManager[pathlib.Path]:
|
| 112 |
+
"""A context manager providing a file path object to the resource.
|
| 113 |
+
|
| 114 |
+
If the resource does not already exist on its own on the file system,
|
| 115 |
+
a temporary file will be created. If the file was created, the file
|
| 116 |
+
will be deleted upon exiting the context manager (no exception is
|
| 117 |
+
raised if the file was deleted prior to the context manager
|
| 118 |
+
exiting).
|
| 119 |
+
"""
|
| 120 |
+
return _common.as_file(_common.files(package) / normalize_path(resource))
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/abc.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import io
|
| 3 |
+
import itertools
|
| 4 |
+
import pathlib
|
| 5 |
+
from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
|
| 6 |
+
|
| 7 |
+
from ._compat import runtime_checkable, Protocol, StrPath
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
__all__ = ["ResourceReader", "Traversable", "TraversableResources"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class ResourceReader(metaclass=abc.ABCMeta):
|
| 14 |
+
"""Abstract base class for loaders to provide resource reading support."""
|
| 15 |
+
|
| 16 |
+
@abc.abstractmethod
|
| 17 |
+
def open_resource(self, resource: Text) -> BinaryIO:
|
| 18 |
+
"""Return an opened, file-like object for binary reading.
|
| 19 |
+
|
| 20 |
+
The 'resource' argument is expected to represent only a file name.
|
| 21 |
+
If the resource cannot be found, FileNotFoundError is raised.
|
| 22 |
+
"""
|
| 23 |
+
# This deliberately raises FileNotFoundError instead of
|
| 24 |
+
# NotImplementedError so that if this method is accidentally called,
|
| 25 |
+
# it'll still do the right thing.
|
| 26 |
+
raise FileNotFoundError
|
| 27 |
+
|
| 28 |
+
@abc.abstractmethod
|
| 29 |
+
def resource_path(self, resource: Text) -> Text:
|
| 30 |
+
"""Return the file system path to the specified resource.
|
| 31 |
+
|
| 32 |
+
The 'resource' argument is expected to represent only a file name.
|
| 33 |
+
If the resource does not exist on the file system, raise
|
| 34 |
+
FileNotFoundError.
|
| 35 |
+
"""
|
| 36 |
+
# This deliberately raises FileNotFoundError instead of
|
| 37 |
+
# NotImplementedError so that if this method is accidentally called,
|
| 38 |
+
# it'll still do the right thing.
|
| 39 |
+
raise FileNotFoundError
|
| 40 |
+
|
| 41 |
+
@abc.abstractmethod
|
| 42 |
+
def is_resource(self, path: Text) -> bool:
|
| 43 |
+
"""Return True if the named 'path' is a resource.
|
| 44 |
+
|
| 45 |
+
Files are resources, directories are not.
|
| 46 |
+
"""
|
| 47 |
+
raise FileNotFoundError
|
| 48 |
+
|
| 49 |
+
@abc.abstractmethod
|
| 50 |
+
def contents(self) -> Iterable[str]:
|
| 51 |
+
"""Return an iterable of entries in `package`."""
|
| 52 |
+
raise FileNotFoundError
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class TraversalError(Exception):
|
| 56 |
+
pass
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
@runtime_checkable
|
| 60 |
+
class Traversable(Protocol):
|
| 61 |
+
"""
|
| 62 |
+
An object with a subset of pathlib.Path methods suitable for
|
| 63 |
+
traversing directories and opening files.
|
| 64 |
+
|
| 65 |
+
Any exceptions that occur when accessing the backing resource
|
| 66 |
+
may propagate unaltered.
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
@abc.abstractmethod
|
| 70 |
+
def iterdir(self) -> Iterator["Traversable"]:
|
| 71 |
+
"""
|
| 72 |
+
Yield Traversable objects in self
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
def read_bytes(self) -> bytes:
|
| 76 |
+
"""
|
| 77 |
+
Read contents of self as bytes
|
| 78 |
+
"""
|
| 79 |
+
with self.open('rb') as strm:
|
| 80 |
+
return strm.read()
|
| 81 |
+
|
| 82 |
+
def read_text(self, encoding: Optional[str] = None) -> str:
|
| 83 |
+
"""
|
| 84 |
+
Read contents of self as text
|
| 85 |
+
"""
|
| 86 |
+
with self.open(encoding=encoding) as strm:
|
| 87 |
+
return strm.read()
|
| 88 |
+
|
| 89 |
+
@abc.abstractmethod
|
| 90 |
+
def is_dir(self) -> bool:
|
| 91 |
+
"""
|
| 92 |
+
Return True if self is a directory
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
@abc.abstractmethod
|
| 96 |
+
def is_file(self) -> bool:
|
| 97 |
+
"""
|
| 98 |
+
Return True if self is a file
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def joinpath(self, *descendants: StrPath) -> "Traversable":
|
| 102 |
+
"""
|
| 103 |
+
Return Traversable resolved with any descendants applied.
|
| 104 |
+
|
| 105 |
+
Each descendant should be a path segment relative to self
|
| 106 |
+
and each may contain multiple levels separated by
|
| 107 |
+
``posixpath.sep`` (``/``).
|
| 108 |
+
"""
|
| 109 |
+
if not descendants:
|
| 110 |
+
return self
|
| 111 |
+
names = itertools.chain.from_iterable(
|
| 112 |
+
path.parts for path in map(pathlib.PurePosixPath, descendants)
|
| 113 |
+
)
|
| 114 |
+
target = next(names)
|
| 115 |
+
matches = (
|
| 116 |
+
traversable for traversable in self.iterdir() if traversable.name == target
|
| 117 |
+
)
|
| 118 |
+
try:
|
| 119 |
+
match = next(matches)
|
| 120 |
+
except StopIteration:
|
| 121 |
+
raise TraversalError(
|
| 122 |
+
"Target not found during traversal.", target, list(names)
|
| 123 |
+
)
|
| 124 |
+
return match.joinpath(*names)
|
| 125 |
+
|
| 126 |
+
def __truediv__(self, child: StrPath) -> "Traversable":
|
| 127 |
+
"""
|
| 128 |
+
Return Traversable child in self
|
| 129 |
+
"""
|
| 130 |
+
return self.joinpath(child)
|
| 131 |
+
|
| 132 |
+
@abc.abstractmethod
|
| 133 |
+
def open(self, mode='r', *args, **kwargs):
|
| 134 |
+
"""
|
| 135 |
+
mode may be 'r' or 'rb' to open as text or binary. Return a handle
|
| 136 |
+
suitable for reading (same as pathlib.Path.open).
|
| 137 |
+
|
| 138 |
+
When opening as text, accepts encoding parameters such as those
|
| 139 |
+
accepted by io.TextIOWrapper.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
@property
|
| 143 |
+
@abc.abstractmethod
|
| 144 |
+
def name(self) -> str:
|
| 145 |
+
"""
|
| 146 |
+
The base name of this object without any parent references.
|
| 147 |
+
"""
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class TraversableResources(ResourceReader):
|
| 151 |
+
"""
|
| 152 |
+
The required interface for providing traversable
|
| 153 |
+
resources.
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
@abc.abstractmethod
|
| 157 |
+
def files(self) -> "Traversable":
|
| 158 |
+
"""Return a Traversable object for the loaded package."""
|
| 159 |
+
|
| 160 |
+
def open_resource(self, resource: StrPath) -> io.BufferedReader:
|
| 161 |
+
return self.files().joinpath(resource).open('rb')
|
| 162 |
+
|
| 163 |
+
def resource_path(self, resource: Any) -> NoReturn:
|
| 164 |
+
raise FileNotFoundError(resource)
|
| 165 |
+
|
| 166 |
+
def is_resource(self, path: StrPath) -> bool:
|
| 167 |
+
return self.files().joinpath(path).is_file()
|
| 168 |
+
|
| 169 |
+
def contents(self) -> Iterator[str]:
|
| 170 |
+
return (item.name for item in self.files().iterdir())
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/readers.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import pathlib
|
| 3 |
+
import operator
|
| 4 |
+
|
| 5 |
+
from . import abc
|
| 6 |
+
|
| 7 |
+
from ._itertools import unique_everseen
|
| 8 |
+
from ._compat import ZipPath
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def remove_duplicates(items):
|
| 12 |
+
return iter(collections.OrderedDict.fromkeys(items))
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FileReader(abc.TraversableResources):
|
| 16 |
+
def __init__(self, loader):
|
| 17 |
+
self.path = pathlib.Path(loader.path).parent
|
| 18 |
+
|
| 19 |
+
def resource_path(self, resource):
|
| 20 |
+
"""
|
| 21 |
+
Return the file system path to prevent
|
| 22 |
+
`resources.path()` from creating a temporary
|
| 23 |
+
copy.
|
| 24 |
+
"""
|
| 25 |
+
return str(self.path.joinpath(resource))
|
| 26 |
+
|
| 27 |
+
def files(self):
|
| 28 |
+
return self.path
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class ZipReader(abc.TraversableResources):
|
| 32 |
+
def __init__(self, loader, module):
|
| 33 |
+
_, _, name = module.rpartition('.')
|
| 34 |
+
self.prefix = loader.prefix.replace('\\', '/') + name + '/'
|
| 35 |
+
self.archive = loader.archive
|
| 36 |
+
|
| 37 |
+
def open_resource(self, resource):
|
| 38 |
+
try:
|
| 39 |
+
return super().open_resource(resource)
|
| 40 |
+
except KeyError as exc:
|
| 41 |
+
raise FileNotFoundError(exc.args[0])
|
| 42 |
+
|
| 43 |
+
def is_resource(self, path):
|
| 44 |
+
# workaround for `zipfile.Path.is_file` returning true
|
| 45 |
+
# for non-existent paths.
|
| 46 |
+
target = self.files().joinpath(path)
|
| 47 |
+
return target.is_file() and target.exists()
|
| 48 |
+
|
| 49 |
+
def files(self):
|
| 50 |
+
return ZipPath(self.archive, self.prefix)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class MultiplexedPath(abc.Traversable):
|
| 54 |
+
"""
|
| 55 |
+
Given a series of Traversable objects, implement a merged
|
| 56 |
+
version of the interface across all objects. Useful for
|
| 57 |
+
namespace packages which may be multihomed at a single
|
| 58 |
+
name.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
def __init__(self, *paths):
|
| 62 |
+
self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
|
| 63 |
+
if not self._paths:
|
| 64 |
+
message = 'MultiplexedPath must contain at least one path'
|
| 65 |
+
raise FileNotFoundError(message)
|
| 66 |
+
if not all(path.is_dir() for path in self._paths):
|
| 67 |
+
raise NotADirectoryError('MultiplexedPath only supports directories')
|
| 68 |
+
|
| 69 |
+
def iterdir(self):
|
| 70 |
+
files = (file for path in self._paths for file in path.iterdir())
|
| 71 |
+
return unique_everseen(files, key=operator.attrgetter('name'))
|
| 72 |
+
|
| 73 |
+
def read_bytes(self):
|
| 74 |
+
raise FileNotFoundError(f'{self} is not a file')
|
| 75 |
+
|
| 76 |
+
def read_text(self, *args, **kwargs):
|
| 77 |
+
raise FileNotFoundError(f'{self} is not a file')
|
| 78 |
+
|
| 79 |
+
def is_dir(self):
|
| 80 |
+
return True
|
| 81 |
+
|
| 82 |
+
def is_file(self):
|
| 83 |
+
return False
|
| 84 |
+
|
| 85 |
+
def joinpath(self, *descendants):
|
| 86 |
+
try:
|
| 87 |
+
return super().joinpath(*descendants)
|
| 88 |
+
except abc.TraversalError:
|
| 89 |
+
# One of the paths did not resolve (a directory does not exist).
|
| 90 |
+
# Just return something that will not exist.
|
| 91 |
+
return self._paths[0].joinpath(*descendants)
|
| 92 |
+
|
| 93 |
+
def open(self, *args, **kwargs):
|
| 94 |
+
raise FileNotFoundError(f'{self} is not a file')
|
| 95 |
+
|
| 96 |
+
@property
|
| 97 |
+
def name(self):
|
| 98 |
+
return self._paths[0].name
|
| 99 |
+
|
| 100 |
+
def __repr__(self):
|
| 101 |
+
paths = ', '.join(f"'{path}'" for path in self._paths)
|
| 102 |
+
return f'MultiplexedPath({paths})'
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class NamespaceReader(abc.TraversableResources):
|
| 106 |
+
def __init__(self, namespace_path):
|
| 107 |
+
if 'NamespacePath' not in str(namespace_path):
|
| 108 |
+
raise ValueError('Invalid path')
|
| 109 |
+
self.path = MultiplexedPath(*list(namespace_path))
|
| 110 |
+
|
| 111 |
+
def resource_path(self, resource):
|
| 112 |
+
"""
|
| 113 |
+
Return the file system path to prevent
|
| 114 |
+
`resources.path()` from creating a temporary
|
| 115 |
+
copy.
|
| 116 |
+
"""
|
| 117 |
+
return str(self.path.joinpath(resource))
|
| 118 |
+
|
| 119 |
+
def files(self):
|
| 120 |
+
return self.path
|
.venv/Lib/site-packages/setuptools/_vendor/importlib_resources/simple.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Interface adapters for low-level readers.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import abc
|
| 6 |
+
import io
|
| 7 |
+
import itertools
|
| 8 |
+
from typing import BinaryIO, List
|
| 9 |
+
|
| 10 |
+
from .abc import Traversable, TraversableResources
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class SimpleReader(abc.ABC):
|
| 14 |
+
"""
|
| 15 |
+
The minimum, low-level interface required from a resource
|
| 16 |
+
provider.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
@property
|
| 20 |
+
@abc.abstractmethod
|
| 21 |
+
def package(self) -> str:
|
| 22 |
+
"""
|
| 23 |
+
The name of the package for which this reader loads resources.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
@abc.abstractmethod
|
| 27 |
+
def children(self) -> List['SimpleReader']:
|
| 28 |
+
"""
|
| 29 |
+
Obtain an iterable of SimpleReader for available
|
| 30 |
+
child containers (e.g. directories).
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
@abc.abstractmethod
|
| 34 |
+
def resources(self) -> List[str]:
|
| 35 |
+
"""
|
| 36 |
+
Obtain available named resources for this virtual package.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
@abc.abstractmethod
|
| 40 |
+
def open_binary(self, resource: str) -> BinaryIO:
|
| 41 |
+
"""
|
| 42 |
+
Obtain a File-like for a named resource.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
@property
|
| 46 |
+
def name(self):
|
| 47 |
+
return self.package.split('.')[-1]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class ResourceContainer(Traversable):
|
| 51 |
+
"""
|
| 52 |
+
Traversable container for a package's resources via its reader.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def __init__(self, reader: SimpleReader):
|
| 56 |
+
self.reader = reader
|
| 57 |
+
|
| 58 |
+
def is_dir(self):
|
| 59 |
+
return True
|
| 60 |
+
|
| 61 |
+
def is_file(self):
|
| 62 |
+
return False
|
| 63 |
+
|
| 64 |
+
def iterdir(self):
|
| 65 |
+
files = (ResourceHandle(self, name) for name in self.reader.resources)
|
| 66 |
+
dirs = map(ResourceContainer, self.reader.children())
|
| 67 |
+
return itertools.chain(files, dirs)
|
| 68 |
+
|
| 69 |
+
def open(self, *args, **kwargs):
|
| 70 |
+
raise IsADirectoryError()
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class ResourceHandle(Traversable):
|
| 74 |
+
"""
|
| 75 |
+
Handle to a named resource in a ResourceReader.
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
def __init__(self, parent: ResourceContainer, name: str):
|
| 79 |
+
self.parent = parent
|
| 80 |
+
self.name = name # type: ignore
|
| 81 |
+
|
| 82 |
+
def is_file(self):
|
| 83 |
+
return True
|
| 84 |
+
|
| 85 |
+
def is_dir(self):
|
| 86 |
+
return False
|
| 87 |
+
|
| 88 |
+
def open(self, mode='r', *args, **kwargs):
|
| 89 |
+
stream = self.parent.reader.open_binary(self.name)
|
| 90 |
+
if 'b' not in mode:
|
| 91 |
+
stream = io.TextIOWrapper(*args, **kwargs)
|
| 92 |
+
return stream
|
| 93 |
+
|
| 94 |
+
def joinpath(self, name):
|
| 95 |
+
raise RuntimeError("Cannot traverse into a resource")
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TraversableReader(TraversableResources, SimpleReader):
|
| 99 |
+
"""
|
| 100 |
+
A TraversableResources based on SimpleReader. Resource providers
|
| 101 |
+
may derive from this class to provide the TraversableResources
|
| 102 |
+
interface by supplying the SimpleReader interface.
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
def files(self):
|
| 106 |
+
return ResourceContainer(self)
|
.venv/Lib/site-packages/setuptools/_vendor/jaraco/__init__.py
ADDED
|
File without changes
|
.venv/Lib/site-packages/setuptools/_vendor/jaraco/context.py
ADDED
|
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import subprocess
|
| 3 |
+
import contextlib
|
| 4 |
+
import functools
|
| 5 |
+
import tempfile
|
| 6 |
+
import shutil
|
| 7 |
+
import operator
|
| 8 |
+
import warnings
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@contextlib.contextmanager
|
| 12 |
+
def pushd(dir):
|
| 13 |
+
"""
|
| 14 |
+
>>> tmp_path = getfixture('tmp_path')
|
| 15 |
+
>>> with pushd(tmp_path):
|
| 16 |
+
... assert os.getcwd() == os.fspath(tmp_path)
|
| 17 |
+
>>> assert os.getcwd() != os.fspath(tmp_path)
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
orig = os.getcwd()
|
| 21 |
+
os.chdir(dir)
|
| 22 |
+
try:
|
| 23 |
+
yield dir
|
| 24 |
+
finally:
|
| 25 |
+
os.chdir(orig)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
@contextlib.contextmanager
|
| 29 |
+
def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
|
| 30 |
+
"""
|
| 31 |
+
Get a tarball, extract it, change to that directory, yield, then
|
| 32 |
+
clean up.
|
| 33 |
+
`runner` is the function to invoke commands.
|
| 34 |
+
`pushd` is a context manager for changing the directory.
|
| 35 |
+
"""
|
| 36 |
+
if target_dir is None:
|
| 37 |
+
target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
|
| 38 |
+
if runner is None:
|
| 39 |
+
runner = functools.partial(subprocess.check_call, shell=True)
|
| 40 |
+
else:
|
| 41 |
+
warnings.warn("runner parameter is deprecated", DeprecationWarning)
|
| 42 |
+
# In the tar command, use --strip-components=1 to strip the first path and
|
| 43 |
+
# then
|
| 44 |
+
# use -C to cause the files to be extracted to {target_dir}. This ensures
|
| 45 |
+
# that we always know where the files were extracted.
|
| 46 |
+
runner('mkdir {target_dir}'.format(**vars()))
|
| 47 |
+
try:
|
| 48 |
+
getter = 'wget {url} -O -'
|
| 49 |
+
extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
|
| 50 |
+
cmd = ' | '.join((getter, extract))
|
| 51 |
+
runner(cmd.format(compression=infer_compression(url), **vars()))
|
| 52 |
+
with pushd(target_dir):
|
| 53 |
+
yield target_dir
|
| 54 |
+
finally:
|
| 55 |
+
runner('rm -Rf {target_dir}'.format(**vars()))
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def infer_compression(url):
|
| 59 |
+
"""
|
| 60 |
+
Given a URL or filename, infer the compression code for tar.
|
| 61 |
+
|
| 62 |
+
>>> infer_compression('http://foo/bar.tar.gz')
|
| 63 |
+
'z'
|
| 64 |
+
>>> infer_compression('http://foo/bar.tgz')
|
| 65 |
+
'z'
|
| 66 |
+
>>> infer_compression('file.bz')
|
| 67 |
+
'j'
|
| 68 |
+
>>> infer_compression('file.xz')
|
| 69 |
+
'J'
|
| 70 |
+
"""
|
| 71 |
+
# cheat and just assume it's the last two characters
|
| 72 |
+
compression_indicator = url[-2:]
|
| 73 |
+
mapping = dict(gz='z', bz='j', xz='J')
|
| 74 |
+
# Assume 'z' (gzip) if no match
|
| 75 |
+
return mapping.get(compression_indicator, 'z')
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
@contextlib.contextmanager
|
| 79 |
+
def temp_dir(remover=shutil.rmtree):
|
| 80 |
+
"""
|
| 81 |
+
Create a temporary directory context. Pass a custom remover
|
| 82 |
+
to override the removal behavior.
|
| 83 |
+
|
| 84 |
+
>>> import pathlib
|
| 85 |
+
>>> with temp_dir() as the_dir:
|
| 86 |
+
... assert os.path.isdir(the_dir)
|
| 87 |
+
... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents')
|
| 88 |
+
>>> assert not os.path.exists(the_dir)
|
| 89 |
+
"""
|
| 90 |
+
temp_dir = tempfile.mkdtemp()
|
| 91 |
+
try:
|
| 92 |
+
yield temp_dir
|
| 93 |
+
finally:
|
| 94 |
+
remover(temp_dir)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
@contextlib.contextmanager
|
| 98 |
+
def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
|
| 99 |
+
"""
|
| 100 |
+
Check out the repo indicated by url.
|
| 101 |
+
|
| 102 |
+
If dest_ctx is supplied, it should be a context manager
|
| 103 |
+
to yield the target directory for the check out.
|
| 104 |
+
"""
|
| 105 |
+
exe = 'git' if 'git' in url else 'hg'
|
| 106 |
+
with dest_ctx() as repo_dir:
|
| 107 |
+
cmd = [exe, 'clone', url, repo_dir]
|
| 108 |
+
if branch:
|
| 109 |
+
cmd.extend(['--branch', branch])
|
| 110 |
+
devnull = open(os.path.devnull, 'w')
|
| 111 |
+
stdout = devnull if quiet else None
|
| 112 |
+
subprocess.check_call(cmd, stdout=stdout)
|
| 113 |
+
yield repo_dir
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
@contextlib.contextmanager
|
| 117 |
+
def null():
|
| 118 |
+
"""
|
| 119 |
+
A null context suitable to stand in for a meaningful context.
|
| 120 |
+
|
| 121 |
+
>>> with null() as value:
|
| 122 |
+
... assert value is None
|
| 123 |
+
"""
|
| 124 |
+
yield
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class ExceptionTrap:
|
| 128 |
+
"""
|
| 129 |
+
A context manager that will catch certain exceptions and provide an
|
| 130 |
+
indication they occurred.
|
| 131 |
+
|
| 132 |
+
>>> with ExceptionTrap() as trap:
|
| 133 |
+
... raise Exception()
|
| 134 |
+
>>> bool(trap)
|
| 135 |
+
True
|
| 136 |
+
|
| 137 |
+
>>> with ExceptionTrap() as trap:
|
| 138 |
+
... pass
|
| 139 |
+
>>> bool(trap)
|
| 140 |
+
False
|
| 141 |
+
|
| 142 |
+
>>> with ExceptionTrap(ValueError) as trap:
|
| 143 |
+
... raise ValueError("1 + 1 is not 3")
|
| 144 |
+
>>> bool(trap)
|
| 145 |
+
True
|
| 146 |
+
>>> trap.value
|
| 147 |
+
ValueError('1 + 1 is not 3')
|
| 148 |
+
>>> trap.tb
|
| 149 |
+
<traceback object at ...>
|
| 150 |
+
|
| 151 |
+
>>> with ExceptionTrap(ValueError) as trap:
|
| 152 |
+
... raise Exception()
|
| 153 |
+
Traceback (most recent call last):
|
| 154 |
+
...
|
| 155 |
+
Exception
|
| 156 |
+
|
| 157 |
+
>>> bool(trap)
|
| 158 |
+
False
|
| 159 |
+
"""
|
| 160 |
+
|
| 161 |
+
exc_info = None, None, None
|
| 162 |
+
|
| 163 |
+
def __init__(self, exceptions=(Exception,)):
|
| 164 |
+
self.exceptions = exceptions
|
| 165 |
+
|
| 166 |
+
def __enter__(self):
|
| 167 |
+
return self
|
| 168 |
+
|
| 169 |
+
@property
|
| 170 |
+
def type(self):
|
| 171 |
+
return self.exc_info[0]
|
| 172 |
+
|
| 173 |
+
@property
|
| 174 |
+
def value(self):
|
| 175 |
+
return self.exc_info[1]
|
| 176 |
+
|
| 177 |
+
@property
|
| 178 |
+
def tb(self):
|
| 179 |
+
return self.exc_info[2]
|
| 180 |
+
|
| 181 |
+
def __exit__(self, *exc_info):
|
| 182 |
+
type = exc_info[0]
|
| 183 |
+
matches = type and issubclass(type, self.exceptions)
|
| 184 |
+
if matches:
|
| 185 |
+
self.exc_info = exc_info
|
| 186 |
+
return matches
|
| 187 |
+
|
| 188 |
+
def __bool__(self):
|
| 189 |
+
return bool(self.type)
|
| 190 |
+
|
| 191 |
+
def raises(self, func, *, _test=bool):
|
| 192 |
+
"""
|
| 193 |
+
Wrap func and replace the result with the truth
|
| 194 |
+
value of the trap (True if an exception occurred).
|
| 195 |
+
|
| 196 |
+
First, give the decorator an alias to support Python 3.8
|
| 197 |
+
Syntax.
|
| 198 |
+
|
| 199 |
+
>>> raises = ExceptionTrap(ValueError).raises
|
| 200 |
+
|
| 201 |
+
Now decorate a function that always fails.
|
| 202 |
+
|
| 203 |
+
>>> @raises
|
| 204 |
+
... def fail():
|
| 205 |
+
... raise ValueError('failed')
|
| 206 |
+
>>> fail()
|
| 207 |
+
True
|
| 208 |
+
"""
|
| 209 |
+
|
| 210 |
+
@functools.wraps(func)
|
| 211 |
+
def wrapper(*args, **kwargs):
|
| 212 |
+
with ExceptionTrap(self.exceptions) as trap:
|
| 213 |
+
func(*args, **kwargs)
|
| 214 |
+
return _test(trap)
|
| 215 |
+
|
| 216 |
+
return wrapper
|
| 217 |
+
|
| 218 |
+
def passes(self, func):
|
| 219 |
+
"""
|
| 220 |
+
Wrap func and replace the result with the truth
|
| 221 |
+
value of the trap (True if no exception).
|
| 222 |
+
|
| 223 |
+
First, give the decorator an alias to support Python 3.8
|
| 224 |
+
Syntax.
|
| 225 |
+
|
| 226 |
+
>>> passes = ExceptionTrap(ValueError).passes
|
| 227 |
+
|
| 228 |
+
Now decorate a function that always fails.
|
| 229 |
+
|
| 230 |
+
>>> @passes
|
| 231 |
+
... def fail():
|
| 232 |
+
... raise ValueError('failed')
|
| 233 |
+
|
| 234 |
+
>>> fail()
|
| 235 |
+
False
|
| 236 |
+
"""
|
| 237 |
+
return self.raises(func, _test=operator.not_)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
class suppress(contextlib.suppress, contextlib.ContextDecorator):
|
| 241 |
+
"""
|
| 242 |
+
A version of contextlib.suppress with decorator support.
|
| 243 |
+
|
| 244 |
+
>>> @suppress(KeyError)
|
| 245 |
+
... def key_error():
|
| 246 |
+
... {}['']
|
| 247 |
+
>>> key_error()
|
| 248 |
+
"""
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class on_interrupt(contextlib.ContextDecorator):
|
| 252 |
+
"""
|
| 253 |
+
Replace a KeyboardInterrupt with SystemExit(1)
|
| 254 |
+
|
| 255 |
+
>>> def do_interrupt():
|
| 256 |
+
... raise KeyboardInterrupt()
|
| 257 |
+
>>> on_interrupt('error')(do_interrupt)()
|
| 258 |
+
Traceback (most recent call last):
|
| 259 |
+
...
|
| 260 |
+
SystemExit: 1
|
| 261 |
+
>>> on_interrupt('error', code=255)(do_interrupt)()
|
| 262 |
+
Traceback (most recent call last):
|
| 263 |
+
...
|
| 264 |
+
SystemExit: 255
|
| 265 |
+
>>> on_interrupt('suppress')(do_interrupt)()
|
| 266 |
+
>>> with __import__('pytest').raises(KeyboardInterrupt):
|
| 267 |
+
... on_interrupt('ignore')(do_interrupt)()
|
| 268 |
+
"""
|
| 269 |
+
|
| 270 |
+
def __init__(
|
| 271 |
+
self,
|
| 272 |
+
action='error',
|
| 273 |
+
# py3.7 compat
|
| 274 |
+
# /,
|
| 275 |
+
code=1,
|
| 276 |
+
):
|
| 277 |
+
self.action = action
|
| 278 |
+
self.code = code
|
| 279 |
+
|
| 280 |
+
def __enter__(self):
|
| 281 |
+
return self
|
| 282 |
+
|
| 283 |
+
def __exit__(self, exctype, excinst, exctb):
|
| 284 |
+
if exctype is not KeyboardInterrupt or self.action == 'ignore':
|
| 285 |
+
return
|
| 286 |
+
elif self.action == 'error':
|
| 287 |
+
raise SystemExit(self.code) from excinst
|
| 288 |
+
return self.action == 'suppress'
|
.venv/Lib/site-packages/setuptools/_vendor/jaraco/functools.py
ADDED
|
@@ -0,0 +1,556 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import time
|
| 3 |
+
import inspect
|
| 4 |
+
import collections
|
| 5 |
+
import types
|
| 6 |
+
import itertools
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
import setuptools.extern.more_itertools
|
| 10 |
+
|
| 11 |
+
from typing import Callable, TypeVar
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
CallableT = TypeVar("CallableT", bound=Callable[..., object])
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def compose(*funcs):
|
| 18 |
+
"""
|
| 19 |
+
Compose any number of unary functions into a single unary function.
|
| 20 |
+
|
| 21 |
+
>>> import textwrap
|
| 22 |
+
>>> expected = str.strip(textwrap.dedent(compose.__doc__))
|
| 23 |
+
>>> strip_and_dedent = compose(str.strip, textwrap.dedent)
|
| 24 |
+
>>> strip_and_dedent(compose.__doc__) == expected
|
| 25 |
+
True
|
| 26 |
+
|
| 27 |
+
Compose also allows the innermost function to take arbitrary arguments.
|
| 28 |
+
|
| 29 |
+
>>> round_three = lambda x: round(x, ndigits=3)
|
| 30 |
+
>>> f = compose(round_three, int.__truediv__)
|
| 31 |
+
>>> [f(3*x, x+1) for x in range(1,10)]
|
| 32 |
+
[1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
def compose_two(f1, f2):
|
| 36 |
+
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
|
| 37 |
+
|
| 38 |
+
return functools.reduce(compose_two, funcs)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def method_caller(method_name, *args, **kwargs):
|
| 42 |
+
"""
|
| 43 |
+
Return a function that will call a named method on the
|
| 44 |
+
target object with optional positional and keyword
|
| 45 |
+
arguments.
|
| 46 |
+
|
| 47 |
+
>>> lower = method_caller('lower')
|
| 48 |
+
>>> lower('MyString')
|
| 49 |
+
'mystring'
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
def call_method(target):
|
| 53 |
+
func = getattr(target, method_name)
|
| 54 |
+
return func(*args, **kwargs)
|
| 55 |
+
|
| 56 |
+
return call_method
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def once(func):
|
| 60 |
+
"""
|
| 61 |
+
Decorate func so it's only ever called the first time.
|
| 62 |
+
|
| 63 |
+
This decorator can ensure that an expensive or non-idempotent function
|
| 64 |
+
will not be expensive on subsequent calls and is idempotent.
|
| 65 |
+
|
| 66 |
+
>>> add_three = once(lambda a: a+3)
|
| 67 |
+
>>> add_three(3)
|
| 68 |
+
6
|
| 69 |
+
>>> add_three(9)
|
| 70 |
+
6
|
| 71 |
+
>>> add_three('12')
|
| 72 |
+
6
|
| 73 |
+
|
| 74 |
+
To reset the stored value, simply clear the property ``saved_result``.
|
| 75 |
+
|
| 76 |
+
>>> del add_three.saved_result
|
| 77 |
+
>>> add_three(9)
|
| 78 |
+
12
|
| 79 |
+
>>> add_three(8)
|
| 80 |
+
12
|
| 81 |
+
|
| 82 |
+
Or invoke 'reset()' on it.
|
| 83 |
+
|
| 84 |
+
>>> add_three.reset()
|
| 85 |
+
>>> add_three(-3)
|
| 86 |
+
0
|
| 87 |
+
>>> add_three(0)
|
| 88 |
+
0
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
@functools.wraps(func)
|
| 92 |
+
def wrapper(*args, **kwargs):
|
| 93 |
+
if not hasattr(wrapper, 'saved_result'):
|
| 94 |
+
wrapper.saved_result = func(*args, **kwargs)
|
| 95 |
+
return wrapper.saved_result
|
| 96 |
+
|
| 97 |
+
wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
|
| 98 |
+
return wrapper
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def method_cache(
|
| 102 |
+
method: CallableT,
|
| 103 |
+
cache_wrapper: Callable[
|
| 104 |
+
[CallableT], CallableT
|
| 105 |
+
] = functools.lru_cache(), # type: ignore[assignment]
|
| 106 |
+
) -> CallableT:
|
| 107 |
+
"""
|
| 108 |
+
Wrap lru_cache to support storing the cache data in the object instances.
|
| 109 |
+
|
| 110 |
+
Abstracts the common paradigm where the method explicitly saves an
|
| 111 |
+
underscore-prefixed protected property on first call and returns that
|
| 112 |
+
subsequently.
|
| 113 |
+
|
| 114 |
+
>>> class MyClass:
|
| 115 |
+
... calls = 0
|
| 116 |
+
...
|
| 117 |
+
... @method_cache
|
| 118 |
+
... def method(self, value):
|
| 119 |
+
... self.calls += 1
|
| 120 |
+
... return value
|
| 121 |
+
|
| 122 |
+
>>> a = MyClass()
|
| 123 |
+
>>> a.method(3)
|
| 124 |
+
3
|
| 125 |
+
>>> for x in range(75):
|
| 126 |
+
... res = a.method(x)
|
| 127 |
+
>>> a.calls
|
| 128 |
+
75
|
| 129 |
+
|
| 130 |
+
Note that the apparent behavior will be exactly like that of lru_cache
|
| 131 |
+
except that the cache is stored on each instance, so values in one
|
| 132 |
+
instance will not flush values from another, and when an instance is
|
| 133 |
+
deleted, so are the cached values for that instance.
|
| 134 |
+
|
| 135 |
+
>>> b = MyClass()
|
| 136 |
+
>>> for x in range(35):
|
| 137 |
+
... res = b.method(x)
|
| 138 |
+
>>> b.calls
|
| 139 |
+
35
|
| 140 |
+
>>> a.method(0)
|
| 141 |
+
0
|
| 142 |
+
>>> a.calls
|
| 143 |
+
75
|
| 144 |
+
|
| 145 |
+
Note that if method had been decorated with ``functools.lru_cache()``,
|
| 146 |
+
a.calls would have been 76 (due to the cached value of 0 having been
|
| 147 |
+
flushed by the 'b' instance).
|
| 148 |
+
|
| 149 |
+
Clear the cache with ``.cache_clear()``
|
| 150 |
+
|
| 151 |
+
>>> a.method.cache_clear()
|
| 152 |
+
|
| 153 |
+
Same for a method that hasn't yet been called.
|
| 154 |
+
|
| 155 |
+
>>> c = MyClass()
|
| 156 |
+
>>> c.method.cache_clear()
|
| 157 |
+
|
| 158 |
+
Another cache wrapper may be supplied:
|
| 159 |
+
|
| 160 |
+
>>> cache = functools.lru_cache(maxsize=2)
|
| 161 |
+
>>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
|
| 162 |
+
>>> a = MyClass()
|
| 163 |
+
>>> a.method2()
|
| 164 |
+
3
|
| 165 |
+
|
| 166 |
+
Caution - do not subsequently wrap the method with another decorator, such
|
| 167 |
+
as ``@property``, which changes the semantics of the function.
|
| 168 |
+
|
| 169 |
+
See also
|
| 170 |
+
http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
|
| 171 |
+
for another implementation and additional justification.
|
| 172 |
+
"""
|
| 173 |
+
|
| 174 |
+
def wrapper(self: object, *args: object, **kwargs: object) -> object:
|
| 175 |
+
# it's the first call, replace the method with a cached, bound method
|
| 176 |
+
bound_method: CallableT = types.MethodType( # type: ignore[assignment]
|
| 177 |
+
method, self
|
| 178 |
+
)
|
| 179 |
+
cached_method = cache_wrapper(bound_method)
|
| 180 |
+
setattr(self, method.__name__, cached_method)
|
| 181 |
+
return cached_method(*args, **kwargs)
|
| 182 |
+
|
| 183 |
+
# Support cache clear even before cache has been created.
|
| 184 |
+
wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
|
| 185 |
+
|
| 186 |
+
return ( # type: ignore[return-value]
|
| 187 |
+
_special_method_cache(method, cache_wrapper) or wrapper
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def _special_method_cache(method, cache_wrapper):
|
| 192 |
+
"""
|
| 193 |
+
Because Python treats special methods differently, it's not
|
| 194 |
+
possible to use instance attributes to implement the cached
|
| 195 |
+
methods.
|
| 196 |
+
|
| 197 |
+
Instead, install the wrapper method under a different name
|
| 198 |
+
and return a simple proxy to that wrapper.
|
| 199 |
+
|
| 200 |
+
https://github.com/jaraco/jaraco.functools/issues/5
|
| 201 |
+
"""
|
| 202 |
+
name = method.__name__
|
| 203 |
+
special_names = '__getattr__', '__getitem__'
|
| 204 |
+
if name not in special_names:
|
| 205 |
+
return
|
| 206 |
+
|
| 207 |
+
wrapper_name = '__cached' + name
|
| 208 |
+
|
| 209 |
+
def proxy(self, *args, **kwargs):
|
| 210 |
+
if wrapper_name not in vars(self):
|
| 211 |
+
bound = types.MethodType(method, self)
|
| 212 |
+
cache = cache_wrapper(bound)
|
| 213 |
+
setattr(self, wrapper_name, cache)
|
| 214 |
+
else:
|
| 215 |
+
cache = getattr(self, wrapper_name)
|
| 216 |
+
return cache(*args, **kwargs)
|
| 217 |
+
|
| 218 |
+
return proxy
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def apply(transform):
|
| 222 |
+
"""
|
| 223 |
+
Decorate a function with a transform function that is
|
| 224 |
+
invoked on results returned from the decorated function.
|
| 225 |
+
|
| 226 |
+
>>> @apply(reversed)
|
| 227 |
+
... def get_numbers(start):
|
| 228 |
+
... "doc for get_numbers"
|
| 229 |
+
... return range(start, start+3)
|
| 230 |
+
>>> list(get_numbers(4))
|
| 231 |
+
[6, 5, 4]
|
| 232 |
+
>>> get_numbers.__doc__
|
| 233 |
+
'doc for get_numbers'
|
| 234 |
+
"""
|
| 235 |
+
|
| 236 |
+
def wrap(func):
|
| 237 |
+
return functools.wraps(func)(compose(transform, func))
|
| 238 |
+
|
| 239 |
+
return wrap
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def result_invoke(action):
|
| 243 |
+
r"""
|
| 244 |
+
Decorate a function with an action function that is
|
| 245 |
+
invoked on the results returned from the decorated
|
| 246 |
+
function (for its side-effect), then return the original
|
| 247 |
+
result.
|
| 248 |
+
|
| 249 |
+
>>> @result_invoke(print)
|
| 250 |
+
... def add_two(a, b):
|
| 251 |
+
... return a + b
|
| 252 |
+
>>> x = add_two(2, 3)
|
| 253 |
+
5
|
| 254 |
+
>>> x
|
| 255 |
+
5
|
| 256 |
+
"""
|
| 257 |
+
|
| 258 |
+
def wrap(func):
|
| 259 |
+
@functools.wraps(func)
|
| 260 |
+
def wrapper(*args, **kwargs):
|
| 261 |
+
result = func(*args, **kwargs)
|
| 262 |
+
action(result)
|
| 263 |
+
return result
|
| 264 |
+
|
| 265 |
+
return wrapper
|
| 266 |
+
|
| 267 |
+
return wrap
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def invoke(f, *args, **kwargs):
|
| 271 |
+
"""
|
| 272 |
+
Call a function for its side effect after initialization.
|
| 273 |
+
|
| 274 |
+
The benefit of using the decorator instead of simply invoking a function
|
| 275 |
+
after defining it is that it makes explicit the author's intent for the
|
| 276 |
+
function to be called immediately. Whereas if one simply calls the
|
| 277 |
+
function immediately, it's less obvious if that was intentional or
|
| 278 |
+
incidental. It also avoids repeating the name - the two actions, defining
|
| 279 |
+
the function and calling it immediately are modeled separately, but linked
|
| 280 |
+
by the decorator construct.
|
| 281 |
+
|
| 282 |
+
The benefit of having a function construct (opposed to just invoking some
|
| 283 |
+
behavior inline) is to serve as a scope in which the behavior occurs. It
|
| 284 |
+
avoids polluting the global namespace with local variables, provides an
|
| 285 |
+
anchor on which to attach documentation (docstring), keeps the behavior
|
| 286 |
+
logically separated (instead of conceptually separated or not separated at
|
| 287 |
+
all), and provides potential to re-use the behavior for testing or other
|
| 288 |
+
purposes.
|
| 289 |
+
|
| 290 |
+
This function is named as a pithy way to communicate, "call this function
|
| 291 |
+
primarily for its side effect", or "while defining this function, also
|
| 292 |
+
take it aside and call it". It exists because there's no Python construct
|
| 293 |
+
for "define and call" (nor should there be, as decorators serve this need
|
| 294 |
+
just fine). The behavior happens immediately and synchronously.
|
| 295 |
+
|
| 296 |
+
>>> @invoke
|
| 297 |
+
... def func(): print("called")
|
| 298 |
+
called
|
| 299 |
+
>>> func()
|
| 300 |
+
called
|
| 301 |
+
|
| 302 |
+
Use functools.partial to pass parameters to the initial call
|
| 303 |
+
|
| 304 |
+
>>> @functools.partial(invoke, name='bingo')
|
| 305 |
+
... def func(name): print("called with", name)
|
| 306 |
+
called with bingo
|
| 307 |
+
"""
|
| 308 |
+
f(*args, **kwargs)
|
| 309 |
+
return f
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
def call_aside(*args, **kwargs):
|
| 313 |
+
"""
|
| 314 |
+
Deprecated name for invoke.
|
| 315 |
+
"""
|
| 316 |
+
warnings.warn("call_aside is deprecated, use invoke", DeprecationWarning)
|
| 317 |
+
return invoke(*args, **kwargs)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
class Throttler:
|
| 321 |
+
"""
|
| 322 |
+
Rate-limit a function (or other callable)
|
| 323 |
+
"""
|
| 324 |
+
|
| 325 |
+
def __init__(self, func, max_rate=float('Inf')):
|
| 326 |
+
if isinstance(func, Throttler):
|
| 327 |
+
func = func.func
|
| 328 |
+
self.func = func
|
| 329 |
+
self.max_rate = max_rate
|
| 330 |
+
self.reset()
|
| 331 |
+
|
| 332 |
+
def reset(self):
|
| 333 |
+
self.last_called = 0
|
| 334 |
+
|
| 335 |
+
def __call__(self, *args, **kwargs):
|
| 336 |
+
self._wait()
|
| 337 |
+
return self.func(*args, **kwargs)
|
| 338 |
+
|
| 339 |
+
def _wait(self):
|
| 340 |
+
"ensure at least 1/max_rate seconds from last call"
|
| 341 |
+
elapsed = time.time() - self.last_called
|
| 342 |
+
must_wait = 1 / self.max_rate - elapsed
|
| 343 |
+
time.sleep(max(0, must_wait))
|
| 344 |
+
self.last_called = time.time()
|
| 345 |
+
|
| 346 |
+
def __get__(self, obj, type=None):
|
| 347 |
+
return first_invoke(self._wait, functools.partial(self.func, obj))
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def first_invoke(func1, func2):
|
| 351 |
+
"""
|
| 352 |
+
Return a function that when invoked will invoke func1 without
|
| 353 |
+
any parameters (for its side-effect) and then invoke func2
|
| 354 |
+
with whatever parameters were passed, returning its result.
|
| 355 |
+
"""
|
| 356 |
+
|
| 357 |
+
def wrapper(*args, **kwargs):
|
| 358 |
+
func1()
|
| 359 |
+
return func2(*args, **kwargs)
|
| 360 |
+
|
| 361 |
+
return wrapper
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
|
| 365 |
+
"""
|
| 366 |
+
Given a callable func, trap the indicated exceptions
|
| 367 |
+
for up to 'retries' times, invoking cleanup on the
|
| 368 |
+
exception. On the final attempt, allow any exceptions
|
| 369 |
+
to propagate.
|
| 370 |
+
"""
|
| 371 |
+
attempts = itertools.count() if retries == float('inf') else range(retries)
|
| 372 |
+
for attempt in attempts:
|
| 373 |
+
try:
|
| 374 |
+
return func()
|
| 375 |
+
except trap:
|
| 376 |
+
cleanup()
|
| 377 |
+
|
| 378 |
+
return func()
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
def retry(*r_args, **r_kwargs):
|
| 382 |
+
"""
|
| 383 |
+
Decorator wrapper for retry_call. Accepts arguments to retry_call
|
| 384 |
+
except func and then returns a decorator for the decorated function.
|
| 385 |
+
|
| 386 |
+
Ex:
|
| 387 |
+
|
| 388 |
+
>>> @retry(retries=3)
|
| 389 |
+
... def my_func(a, b):
|
| 390 |
+
... "this is my funk"
|
| 391 |
+
... print(a, b)
|
| 392 |
+
>>> my_func.__doc__
|
| 393 |
+
'this is my funk'
|
| 394 |
+
"""
|
| 395 |
+
|
| 396 |
+
def decorate(func):
|
| 397 |
+
@functools.wraps(func)
|
| 398 |
+
def wrapper(*f_args, **f_kwargs):
|
| 399 |
+
bound = functools.partial(func, *f_args, **f_kwargs)
|
| 400 |
+
return retry_call(bound, *r_args, **r_kwargs)
|
| 401 |
+
|
| 402 |
+
return wrapper
|
| 403 |
+
|
| 404 |
+
return decorate
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
def print_yielded(func):
|
| 408 |
+
"""
|
| 409 |
+
Convert a generator into a function that prints all yielded elements
|
| 410 |
+
|
| 411 |
+
>>> @print_yielded
|
| 412 |
+
... def x():
|
| 413 |
+
... yield 3; yield None
|
| 414 |
+
>>> x()
|
| 415 |
+
3
|
| 416 |
+
None
|
| 417 |
+
"""
|
| 418 |
+
print_all = functools.partial(map, print)
|
| 419 |
+
print_results = compose(more_itertools.consume, print_all, func)
|
| 420 |
+
return functools.wraps(func)(print_results)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
def pass_none(func):
|
| 424 |
+
"""
|
| 425 |
+
Wrap func so it's not called if its first param is None
|
| 426 |
+
|
| 427 |
+
>>> print_text = pass_none(print)
|
| 428 |
+
>>> print_text('text')
|
| 429 |
+
text
|
| 430 |
+
>>> print_text(None)
|
| 431 |
+
"""
|
| 432 |
+
|
| 433 |
+
@functools.wraps(func)
|
| 434 |
+
def wrapper(param, *args, **kwargs):
|
| 435 |
+
if param is not None:
|
| 436 |
+
return func(param, *args, **kwargs)
|
| 437 |
+
|
| 438 |
+
return wrapper
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def assign_params(func, namespace):
|
| 442 |
+
"""
|
| 443 |
+
Assign parameters from namespace where func solicits.
|
| 444 |
+
|
| 445 |
+
>>> def func(x, y=3):
|
| 446 |
+
... print(x, y)
|
| 447 |
+
>>> assigned = assign_params(func, dict(x=2, z=4))
|
| 448 |
+
>>> assigned()
|
| 449 |
+
2 3
|
| 450 |
+
|
| 451 |
+
The usual errors are raised if a function doesn't receive
|
| 452 |
+
its required parameters:
|
| 453 |
+
|
| 454 |
+
>>> assigned = assign_params(func, dict(y=3, z=4))
|
| 455 |
+
>>> assigned()
|
| 456 |
+
Traceback (most recent call last):
|
| 457 |
+
TypeError: func() ...argument...
|
| 458 |
+
|
| 459 |
+
It even works on methods:
|
| 460 |
+
|
| 461 |
+
>>> class Handler:
|
| 462 |
+
... def meth(self, arg):
|
| 463 |
+
... print(arg)
|
| 464 |
+
>>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
|
| 465 |
+
crystal
|
| 466 |
+
"""
|
| 467 |
+
sig = inspect.signature(func)
|
| 468 |
+
params = sig.parameters.keys()
|
| 469 |
+
call_ns = {k: namespace[k] for k in params if k in namespace}
|
| 470 |
+
return functools.partial(func, **call_ns)
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def save_method_args(method):
|
| 474 |
+
"""
|
| 475 |
+
Wrap a method such that when it is called, the args and kwargs are
|
| 476 |
+
saved on the method.
|
| 477 |
+
|
| 478 |
+
>>> class MyClass:
|
| 479 |
+
... @save_method_args
|
| 480 |
+
... def method(self, a, b):
|
| 481 |
+
... print(a, b)
|
| 482 |
+
>>> my_ob = MyClass()
|
| 483 |
+
>>> my_ob.method(1, 2)
|
| 484 |
+
1 2
|
| 485 |
+
>>> my_ob._saved_method.args
|
| 486 |
+
(1, 2)
|
| 487 |
+
>>> my_ob._saved_method.kwargs
|
| 488 |
+
{}
|
| 489 |
+
>>> my_ob.method(a=3, b='foo')
|
| 490 |
+
3 foo
|
| 491 |
+
>>> my_ob._saved_method.args
|
| 492 |
+
()
|
| 493 |
+
>>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
|
| 494 |
+
True
|
| 495 |
+
|
| 496 |
+
The arguments are stored on the instance, allowing for
|
| 497 |
+
different instance to save different args.
|
| 498 |
+
|
| 499 |
+
>>> your_ob = MyClass()
|
| 500 |
+
>>> your_ob.method({str('x'): 3}, b=[4])
|
| 501 |
+
{'x': 3} [4]
|
| 502 |
+
>>> your_ob._saved_method.args
|
| 503 |
+
({'x': 3},)
|
| 504 |
+
>>> my_ob._saved_method.args
|
| 505 |
+
()
|
| 506 |
+
"""
|
| 507 |
+
args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
|
| 508 |
+
|
| 509 |
+
@functools.wraps(method)
|
| 510 |
+
def wrapper(self, *args, **kwargs):
|
| 511 |
+
attr_name = '_saved_' + method.__name__
|
| 512 |
+
attr = args_and_kwargs(args, kwargs)
|
| 513 |
+
setattr(self, attr_name, attr)
|
| 514 |
+
return method(self, *args, **kwargs)
|
| 515 |
+
|
| 516 |
+
return wrapper
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
def except_(*exceptions, replace=None, use=None):
|
| 520 |
+
"""
|
| 521 |
+
Replace the indicated exceptions, if raised, with the indicated
|
| 522 |
+
literal replacement or evaluated expression (if present).
|
| 523 |
+
|
| 524 |
+
>>> safe_int = except_(ValueError)(int)
|
| 525 |
+
>>> safe_int('five')
|
| 526 |
+
>>> safe_int('5')
|
| 527 |
+
5
|
| 528 |
+
|
| 529 |
+
Specify a literal replacement with ``replace``.
|
| 530 |
+
|
| 531 |
+
>>> safe_int_r = except_(ValueError, replace=0)(int)
|
| 532 |
+
>>> safe_int_r('five')
|
| 533 |
+
0
|
| 534 |
+
|
| 535 |
+
Provide an expression to ``use`` to pass through particular parameters.
|
| 536 |
+
|
| 537 |
+
>>> safe_int_pt = except_(ValueError, use='args[0]')(int)
|
| 538 |
+
>>> safe_int_pt('five')
|
| 539 |
+
'five'
|
| 540 |
+
|
| 541 |
+
"""
|
| 542 |
+
|
| 543 |
+
def decorate(func):
|
| 544 |
+
@functools.wraps(func)
|
| 545 |
+
def wrapper(*args, **kwargs):
|
| 546 |
+
try:
|
| 547 |
+
return func(*args, **kwargs)
|
| 548 |
+
except exceptions:
|
| 549 |
+
try:
|
| 550 |
+
return eval(use)
|
| 551 |
+
except TypeError:
|
| 552 |
+
return replace
|
| 553 |
+
|
| 554 |
+
return wrapper
|
| 555 |
+
|
| 556 |
+
return decorate
|
.venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__init__.py
ADDED
|
@@ -0,0 +1,599 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
import itertools
|
| 3 |
+
import textwrap
|
| 4 |
+
import functools
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
from importlib.resources import files # type: ignore
|
| 8 |
+
except ImportError: # pragma: nocover
|
| 9 |
+
from setuptools.extern.importlib_resources import files # type: ignore
|
| 10 |
+
|
| 11 |
+
from setuptools.extern.jaraco.functools import compose, method_cache
|
| 12 |
+
from setuptools.extern.jaraco.context import ExceptionTrap
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def substitution(old, new):
|
| 16 |
+
"""
|
| 17 |
+
Return a function that will perform a substitution on a string
|
| 18 |
+
"""
|
| 19 |
+
return lambda s: s.replace(old, new)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def multi_substitution(*substitutions):
|
| 23 |
+
"""
|
| 24 |
+
Take a sequence of pairs specifying substitutions, and create
|
| 25 |
+
a function that performs those substitutions.
|
| 26 |
+
|
| 27 |
+
>>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
|
| 28 |
+
'baz'
|
| 29 |
+
"""
|
| 30 |
+
substitutions = itertools.starmap(substitution, substitutions)
|
| 31 |
+
# compose function applies last function first, so reverse the
|
| 32 |
+
# substitutions to get the expected order.
|
| 33 |
+
substitutions = reversed(tuple(substitutions))
|
| 34 |
+
return compose(*substitutions)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class FoldedCase(str):
|
| 38 |
+
"""
|
| 39 |
+
A case insensitive string class; behaves just like str
|
| 40 |
+
except compares equal when the only variation is case.
|
| 41 |
+
|
| 42 |
+
>>> s = FoldedCase('hello world')
|
| 43 |
+
|
| 44 |
+
>>> s == 'Hello World'
|
| 45 |
+
True
|
| 46 |
+
|
| 47 |
+
>>> 'Hello World' == s
|
| 48 |
+
True
|
| 49 |
+
|
| 50 |
+
>>> s != 'Hello World'
|
| 51 |
+
False
|
| 52 |
+
|
| 53 |
+
>>> s.index('O')
|
| 54 |
+
4
|
| 55 |
+
|
| 56 |
+
>>> s.split('O')
|
| 57 |
+
['hell', ' w', 'rld']
|
| 58 |
+
|
| 59 |
+
>>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
|
| 60 |
+
['alpha', 'Beta', 'GAMMA']
|
| 61 |
+
|
| 62 |
+
Sequence membership is straightforward.
|
| 63 |
+
|
| 64 |
+
>>> "Hello World" in [s]
|
| 65 |
+
True
|
| 66 |
+
>>> s in ["Hello World"]
|
| 67 |
+
True
|
| 68 |
+
|
| 69 |
+
You may test for set inclusion, but candidate and elements
|
| 70 |
+
must both be folded.
|
| 71 |
+
|
| 72 |
+
>>> FoldedCase("Hello World") in {s}
|
| 73 |
+
True
|
| 74 |
+
>>> s in {FoldedCase("Hello World")}
|
| 75 |
+
True
|
| 76 |
+
|
| 77 |
+
String inclusion works as long as the FoldedCase object
|
| 78 |
+
is on the right.
|
| 79 |
+
|
| 80 |
+
>>> "hello" in FoldedCase("Hello World")
|
| 81 |
+
True
|
| 82 |
+
|
| 83 |
+
But not if the FoldedCase object is on the left:
|
| 84 |
+
|
| 85 |
+
>>> FoldedCase('hello') in 'Hello World'
|
| 86 |
+
False
|
| 87 |
+
|
| 88 |
+
In that case, use ``in_``:
|
| 89 |
+
|
| 90 |
+
>>> FoldedCase('hello').in_('Hello World')
|
| 91 |
+
True
|
| 92 |
+
|
| 93 |
+
>>> FoldedCase('hello') > FoldedCase('Hello')
|
| 94 |
+
False
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
def __lt__(self, other):
|
| 98 |
+
return self.lower() < other.lower()
|
| 99 |
+
|
| 100 |
+
def __gt__(self, other):
|
| 101 |
+
return self.lower() > other.lower()
|
| 102 |
+
|
| 103 |
+
def __eq__(self, other):
|
| 104 |
+
return self.lower() == other.lower()
|
| 105 |
+
|
| 106 |
+
def __ne__(self, other):
|
| 107 |
+
return self.lower() != other.lower()
|
| 108 |
+
|
| 109 |
+
def __hash__(self):
|
| 110 |
+
return hash(self.lower())
|
| 111 |
+
|
| 112 |
+
def __contains__(self, other):
|
| 113 |
+
return super().lower().__contains__(other.lower())
|
| 114 |
+
|
| 115 |
+
def in_(self, other):
|
| 116 |
+
"Does self appear in other?"
|
| 117 |
+
return self in FoldedCase(other)
|
| 118 |
+
|
| 119 |
+
# cache lower since it's likely to be called frequently.
|
| 120 |
+
@method_cache
|
| 121 |
+
def lower(self):
|
| 122 |
+
return super().lower()
|
| 123 |
+
|
| 124 |
+
def index(self, sub):
|
| 125 |
+
return self.lower().index(sub.lower())
|
| 126 |
+
|
| 127 |
+
def split(self, splitter=' ', maxsplit=0):
|
| 128 |
+
pattern = re.compile(re.escape(splitter), re.I)
|
| 129 |
+
return pattern.split(self, maxsplit)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
# Python 3.8 compatibility
|
| 133 |
+
_unicode_trap = ExceptionTrap(UnicodeDecodeError)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@_unicode_trap.passes
|
| 137 |
+
def is_decodable(value):
|
| 138 |
+
r"""
|
| 139 |
+
Return True if the supplied value is decodable (using the default
|
| 140 |
+
encoding).
|
| 141 |
+
|
| 142 |
+
>>> is_decodable(b'\xff')
|
| 143 |
+
False
|
| 144 |
+
>>> is_decodable(b'\x32')
|
| 145 |
+
True
|
| 146 |
+
"""
|
| 147 |
+
value.decode()
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def is_binary(value):
|
| 151 |
+
r"""
|
| 152 |
+
Return True if the value appears to be binary (that is, it's a byte
|
| 153 |
+
string and isn't decodable).
|
| 154 |
+
|
| 155 |
+
>>> is_binary(b'\xff')
|
| 156 |
+
True
|
| 157 |
+
>>> is_binary('\xff')
|
| 158 |
+
False
|
| 159 |
+
"""
|
| 160 |
+
return isinstance(value, bytes) and not is_decodable(value)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def trim(s):
|
| 164 |
+
r"""
|
| 165 |
+
Trim something like a docstring to remove the whitespace that
|
| 166 |
+
is common due to indentation and formatting.
|
| 167 |
+
|
| 168 |
+
>>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
|
| 169 |
+
'foo = bar\n\tbar = baz'
|
| 170 |
+
"""
|
| 171 |
+
return textwrap.dedent(s).strip()
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def wrap(s):
|
| 175 |
+
"""
|
| 176 |
+
Wrap lines of text, retaining existing newlines as
|
| 177 |
+
paragraph markers.
|
| 178 |
+
|
| 179 |
+
>>> print(wrap(lorem_ipsum))
|
| 180 |
+
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
|
| 181 |
+
eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
|
| 182 |
+
minim veniam, quis nostrud exercitation ullamco laboris nisi ut
|
| 183 |
+
aliquip ex ea commodo consequat. Duis aute irure dolor in
|
| 184 |
+
reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
|
| 185 |
+
pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
|
| 186 |
+
culpa qui officia deserunt mollit anim id est laborum.
|
| 187 |
+
<BLANKLINE>
|
| 188 |
+
Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
|
| 189 |
+
varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
|
| 190 |
+
magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
|
| 191 |
+
gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
|
| 192 |
+
risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
|
| 193 |
+
eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
|
| 194 |
+
fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
|
| 195 |
+
a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
|
| 196 |
+
neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
|
| 197 |
+
sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
|
| 198 |
+
nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
|
| 199 |
+
quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
|
| 200 |
+
molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
|
| 201 |
+
"""
|
| 202 |
+
paragraphs = s.splitlines()
|
| 203 |
+
wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
|
| 204 |
+
return '\n\n'.join(wrapped)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
def unwrap(s):
|
| 208 |
+
r"""
|
| 209 |
+
Given a multi-line string, return an unwrapped version.
|
| 210 |
+
|
| 211 |
+
>>> wrapped = wrap(lorem_ipsum)
|
| 212 |
+
>>> wrapped.count('\n')
|
| 213 |
+
20
|
| 214 |
+
>>> unwrapped = unwrap(wrapped)
|
| 215 |
+
>>> unwrapped.count('\n')
|
| 216 |
+
1
|
| 217 |
+
>>> print(unwrapped)
|
| 218 |
+
Lorem ipsum dolor sit amet, consectetur adipiscing ...
|
| 219 |
+
Curabitur pretium tincidunt lacus. Nulla gravida orci ...
|
| 220 |
+
|
| 221 |
+
"""
|
| 222 |
+
paragraphs = re.split(r'\n\n+', s)
|
| 223 |
+
cleaned = (para.replace('\n', ' ') for para in paragraphs)
|
| 224 |
+
return '\n'.join(cleaned)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
class Splitter(object):
|
| 230 |
+
"""object that will split a string with the given arguments for each call
|
| 231 |
+
|
| 232 |
+
>>> s = Splitter(',')
|
| 233 |
+
>>> s('hello, world, this is your, master calling')
|
| 234 |
+
['hello', ' world', ' this is your', ' master calling']
|
| 235 |
+
"""
|
| 236 |
+
|
| 237 |
+
def __init__(self, *args):
|
| 238 |
+
self.args = args
|
| 239 |
+
|
| 240 |
+
def __call__(self, s):
|
| 241 |
+
return s.split(*self.args)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def indent(string, prefix=' ' * 4):
|
| 245 |
+
"""
|
| 246 |
+
>>> indent('foo')
|
| 247 |
+
' foo'
|
| 248 |
+
"""
|
| 249 |
+
return prefix + string
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
class WordSet(tuple):
|
| 253 |
+
"""
|
| 254 |
+
Given an identifier, return the words that identifier represents,
|
| 255 |
+
whether in camel case, underscore-separated, etc.
|
| 256 |
+
|
| 257 |
+
>>> WordSet.parse("camelCase")
|
| 258 |
+
('camel', 'Case')
|
| 259 |
+
|
| 260 |
+
>>> WordSet.parse("under_sep")
|
| 261 |
+
('under', 'sep')
|
| 262 |
+
|
| 263 |
+
Acronyms should be retained
|
| 264 |
+
|
| 265 |
+
>>> WordSet.parse("firstSNL")
|
| 266 |
+
('first', 'SNL')
|
| 267 |
+
|
| 268 |
+
>>> WordSet.parse("you_and_I")
|
| 269 |
+
('you', 'and', 'I')
|
| 270 |
+
|
| 271 |
+
>>> WordSet.parse("A simple test")
|
| 272 |
+
('A', 'simple', 'test')
|
| 273 |
+
|
| 274 |
+
Multiple caps should not interfere with the first cap of another word.
|
| 275 |
+
|
| 276 |
+
>>> WordSet.parse("myABCClass")
|
| 277 |
+
('my', 'ABC', 'Class')
|
| 278 |
+
|
| 279 |
+
The result is a WordSet, so you can get the form you need.
|
| 280 |
+
|
| 281 |
+
>>> WordSet.parse("myABCClass").underscore_separated()
|
| 282 |
+
'my_ABC_Class'
|
| 283 |
+
|
| 284 |
+
>>> WordSet.parse('a-command').camel_case()
|
| 285 |
+
'ACommand'
|
| 286 |
+
|
| 287 |
+
>>> WordSet.parse('someIdentifier').lowered().space_separated()
|
| 288 |
+
'some identifier'
|
| 289 |
+
|
| 290 |
+
Slices of the result should return another WordSet.
|
| 291 |
+
|
| 292 |
+
>>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
|
| 293 |
+
'out_of_context'
|
| 294 |
+
|
| 295 |
+
>>> WordSet.from_class_name(WordSet()).lowered().space_separated()
|
| 296 |
+
'word set'
|
| 297 |
+
|
| 298 |
+
>>> example = WordSet.parse('figured it out')
|
| 299 |
+
>>> example.headless_camel_case()
|
| 300 |
+
'figuredItOut'
|
| 301 |
+
>>> example.dash_separated()
|
| 302 |
+
'figured-it-out'
|
| 303 |
+
|
| 304 |
+
"""
|
| 305 |
+
|
| 306 |
+
_pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
|
| 307 |
+
|
| 308 |
+
def capitalized(self):
|
| 309 |
+
return WordSet(word.capitalize() for word in self)
|
| 310 |
+
|
| 311 |
+
def lowered(self):
|
| 312 |
+
return WordSet(word.lower() for word in self)
|
| 313 |
+
|
| 314 |
+
def camel_case(self):
|
| 315 |
+
return ''.join(self.capitalized())
|
| 316 |
+
|
| 317 |
+
def headless_camel_case(self):
|
| 318 |
+
words = iter(self)
|
| 319 |
+
first = next(words).lower()
|
| 320 |
+
new_words = itertools.chain((first,), WordSet(words).camel_case())
|
| 321 |
+
return ''.join(new_words)
|
| 322 |
+
|
| 323 |
+
def underscore_separated(self):
|
| 324 |
+
return '_'.join(self)
|
| 325 |
+
|
| 326 |
+
def dash_separated(self):
|
| 327 |
+
return '-'.join(self)
|
| 328 |
+
|
| 329 |
+
def space_separated(self):
|
| 330 |
+
return ' '.join(self)
|
| 331 |
+
|
| 332 |
+
def trim_right(self, item):
|
| 333 |
+
"""
|
| 334 |
+
Remove the item from the end of the set.
|
| 335 |
+
|
| 336 |
+
>>> WordSet.parse('foo bar').trim_right('foo')
|
| 337 |
+
('foo', 'bar')
|
| 338 |
+
>>> WordSet.parse('foo bar').trim_right('bar')
|
| 339 |
+
('foo',)
|
| 340 |
+
>>> WordSet.parse('').trim_right('bar')
|
| 341 |
+
()
|
| 342 |
+
"""
|
| 343 |
+
return self[:-1] if self and self[-1] == item else self
|
| 344 |
+
|
| 345 |
+
def trim_left(self, item):
|
| 346 |
+
"""
|
| 347 |
+
Remove the item from the beginning of the set.
|
| 348 |
+
|
| 349 |
+
>>> WordSet.parse('foo bar').trim_left('foo')
|
| 350 |
+
('bar',)
|
| 351 |
+
>>> WordSet.parse('foo bar').trim_left('bar')
|
| 352 |
+
('foo', 'bar')
|
| 353 |
+
>>> WordSet.parse('').trim_left('bar')
|
| 354 |
+
()
|
| 355 |
+
"""
|
| 356 |
+
return self[1:] if self and self[0] == item else self
|
| 357 |
+
|
| 358 |
+
def trim(self, item):
|
| 359 |
+
"""
|
| 360 |
+
>>> WordSet.parse('foo bar').trim('foo')
|
| 361 |
+
('bar',)
|
| 362 |
+
"""
|
| 363 |
+
return self.trim_left(item).trim_right(item)
|
| 364 |
+
|
| 365 |
+
def __getitem__(self, item):
|
| 366 |
+
result = super(WordSet, self).__getitem__(item)
|
| 367 |
+
if isinstance(item, slice):
|
| 368 |
+
result = WordSet(result)
|
| 369 |
+
return result
|
| 370 |
+
|
| 371 |
+
@classmethod
|
| 372 |
+
def parse(cls, identifier):
|
| 373 |
+
matches = cls._pattern.finditer(identifier)
|
| 374 |
+
return WordSet(match.group(0) for match in matches)
|
| 375 |
+
|
| 376 |
+
@classmethod
|
| 377 |
+
def from_class_name(cls, subject):
|
| 378 |
+
return cls.parse(subject.__class__.__name__)
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
# for backward compatibility
|
| 382 |
+
words = WordSet.parse
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def simple_html_strip(s):
|
| 386 |
+
r"""
|
| 387 |
+
Remove HTML from the string `s`.
|
| 388 |
+
|
| 389 |
+
>>> str(simple_html_strip(''))
|
| 390 |
+
''
|
| 391 |
+
|
| 392 |
+
>>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
|
| 393 |
+
A stormy day in paradise
|
| 394 |
+
|
| 395 |
+
>>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
|
| 396 |
+
Somebody tell the truth.
|
| 397 |
+
|
| 398 |
+
>>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
|
| 399 |
+
What about
|
| 400 |
+
multiple lines?
|
| 401 |
+
"""
|
| 402 |
+
html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
|
| 403 |
+
texts = (match.group(3) or '' for match in html_stripper.finditer(s))
|
| 404 |
+
return ''.join(texts)
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
class SeparatedValues(str):
|
| 408 |
+
"""
|
| 409 |
+
A string separated by a separator. Overrides __iter__ for getting
|
| 410 |
+
the values.
|
| 411 |
+
|
| 412 |
+
>>> list(SeparatedValues('a,b,c'))
|
| 413 |
+
['a', 'b', 'c']
|
| 414 |
+
|
| 415 |
+
Whitespace is stripped and empty values are discarded.
|
| 416 |
+
|
| 417 |
+
>>> list(SeparatedValues(' a, b , c, '))
|
| 418 |
+
['a', 'b', 'c']
|
| 419 |
+
"""
|
| 420 |
+
|
| 421 |
+
separator = ','
|
| 422 |
+
|
| 423 |
+
def __iter__(self):
|
| 424 |
+
parts = self.split(self.separator)
|
| 425 |
+
return filter(None, (part.strip() for part in parts))
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
class Stripper:
|
| 429 |
+
r"""
|
| 430 |
+
Given a series of lines, find the common prefix and strip it from them.
|
| 431 |
+
|
| 432 |
+
>>> lines = [
|
| 433 |
+
... 'abcdefg\n',
|
| 434 |
+
... 'abc\n',
|
| 435 |
+
... 'abcde\n',
|
| 436 |
+
... ]
|
| 437 |
+
>>> res = Stripper.strip_prefix(lines)
|
| 438 |
+
>>> res.prefix
|
| 439 |
+
'abc'
|
| 440 |
+
>>> list(res.lines)
|
| 441 |
+
['defg\n', '\n', 'de\n']
|
| 442 |
+
|
| 443 |
+
If no prefix is common, nothing should be stripped.
|
| 444 |
+
|
| 445 |
+
>>> lines = [
|
| 446 |
+
... 'abcd\n',
|
| 447 |
+
... '1234\n',
|
| 448 |
+
... ]
|
| 449 |
+
>>> res = Stripper.strip_prefix(lines)
|
| 450 |
+
>>> res.prefix = ''
|
| 451 |
+
>>> list(res.lines)
|
| 452 |
+
['abcd\n', '1234\n']
|
| 453 |
+
"""
|
| 454 |
+
|
| 455 |
+
def __init__(self, prefix, lines):
|
| 456 |
+
self.prefix = prefix
|
| 457 |
+
self.lines = map(self, lines)
|
| 458 |
+
|
| 459 |
+
@classmethod
|
| 460 |
+
def strip_prefix(cls, lines):
|
| 461 |
+
prefix_lines, lines = itertools.tee(lines)
|
| 462 |
+
prefix = functools.reduce(cls.common_prefix, prefix_lines)
|
| 463 |
+
return cls(prefix, lines)
|
| 464 |
+
|
| 465 |
+
def __call__(self, line):
|
| 466 |
+
if not self.prefix:
|
| 467 |
+
return line
|
| 468 |
+
null, prefix, rest = line.partition(self.prefix)
|
| 469 |
+
return rest
|
| 470 |
+
|
| 471 |
+
@staticmethod
|
| 472 |
+
def common_prefix(s1, s2):
|
| 473 |
+
"""
|
| 474 |
+
Return the common prefix of two lines.
|
| 475 |
+
"""
|
| 476 |
+
index = min(len(s1), len(s2))
|
| 477 |
+
while s1[:index] != s2[:index]:
|
| 478 |
+
index -= 1
|
| 479 |
+
return s1[:index]
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def remove_prefix(text, prefix):
|
| 483 |
+
"""
|
| 484 |
+
Remove the prefix from the text if it exists.
|
| 485 |
+
|
| 486 |
+
>>> remove_prefix('underwhelming performance', 'underwhelming ')
|
| 487 |
+
'performance'
|
| 488 |
+
|
| 489 |
+
>>> remove_prefix('something special', 'sample')
|
| 490 |
+
'something special'
|
| 491 |
+
"""
|
| 492 |
+
null, prefix, rest = text.rpartition(prefix)
|
| 493 |
+
return rest
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
def remove_suffix(text, suffix):
|
| 497 |
+
"""
|
| 498 |
+
Remove the suffix from the text if it exists.
|
| 499 |
+
|
| 500 |
+
>>> remove_suffix('name.git', '.git')
|
| 501 |
+
'name'
|
| 502 |
+
|
| 503 |
+
>>> remove_suffix('something special', 'sample')
|
| 504 |
+
'something special'
|
| 505 |
+
"""
|
| 506 |
+
rest, suffix, null = text.partition(suffix)
|
| 507 |
+
return rest
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def normalize_newlines(text):
|
| 511 |
+
r"""
|
| 512 |
+
Replace alternate newlines with the canonical newline.
|
| 513 |
+
|
| 514 |
+
>>> normalize_newlines('Lorem Ipsum\u2029')
|
| 515 |
+
'Lorem Ipsum\n'
|
| 516 |
+
>>> normalize_newlines('Lorem Ipsum\r\n')
|
| 517 |
+
'Lorem Ipsum\n'
|
| 518 |
+
>>> normalize_newlines('Lorem Ipsum\x85')
|
| 519 |
+
'Lorem Ipsum\n'
|
| 520 |
+
"""
|
| 521 |
+
newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
|
| 522 |
+
pattern = '|'.join(newlines)
|
| 523 |
+
return re.sub(pattern, '\n', text)
|
| 524 |
+
|
| 525 |
+
|
| 526 |
+
def _nonblank(str):
|
| 527 |
+
return str and not str.startswith('#')
|
| 528 |
+
|
| 529 |
+
|
| 530 |
+
@functools.singledispatch
|
| 531 |
+
def yield_lines(iterable):
|
| 532 |
+
r"""
|
| 533 |
+
Yield valid lines of a string or iterable.
|
| 534 |
+
|
| 535 |
+
>>> list(yield_lines(''))
|
| 536 |
+
[]
|
| 537 |
+
>>> list(yield_lines(['foo', 'bar']))
|
| 538 |
+
['foo', 'bar']
|
| 539 |
+
>>> list(yield_lines('foo\nbar'))
|
| 540 |
+
['foo', 'bar']
|
| 541 |
+
>>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
|
| 542 |
+
['foo', 'baz #comment']
|
| 543 |
+
>>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
|
| 544 |
+
['foo', 'bar', 'baz', 'bing']
|
| 545 |
+
"""
|
| 546 |
+
return itertools.chain.from_iterable(map(yield_lines, iterable))
|
| 547 |
+
|
| 548 |
+
|
| 549 |
+
@yield_lines.register(str)
|
| 550 |
+
def _(text):
|
| 551 |
+
return filter(_nonblank, map(str.strip, text.splitlines()))
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
def drop_comment(line):
|
| 555 |
+
"""
|
| 556 |
+
Drop comments.
|
| 557 |
+
|
| 558 |
+
>>> drop_comment('foo # bar')
|
| 559 |
+
'foo'
|
| 560 |
+
|
| 561 |
+
A hash without a space may be in a URL.
|
| 562 |
+
|
| 563 |
+
>>> drop_comment('http://example.com/foo#bar')
|
| 564 |
+
'http://example.com/foo#bar'
|
| 565 |
+
"""
|
| 566 |
+
return line.partition(' #')[0]
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
def join_continuation(lines):
|
| 570 |
+
r"""
|
| 571 |
+
Join lines continued by a trailing backslash.
|
| 572 |
+
|
| 573 |
+
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
| 574 |
+
['foobar', 'baz']
|
| 575 |
+
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
| 576 |
+
['foobar', 'baz']
|
| 577 |
+
>>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
|
| 578 |
+
['foobarbaz']
|
| 579 |
+
|
| 580 |
+
Not sure why, but...
|
| 581 |
+
The character preceeding the backslash is also elided.
|
| 582 |
+
|
| 583 |
+
>>> list(join_continuation(['goo\\', 'dly']))
|
| 584 |
+
['godly']
|
| 585 |
+
|
| 586 |
+
A terrible idea, but...
|
| 587 |
+
If no line is available to continue, suppress the lines.
|
| 588 |
+
|
| 589 |
+
>>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
|
| 590 |
+
['foo']
|
| 591 |
+
"""
|
| 592 |
+
lines = iter(lines)
|
| 593 |
+
for item in lines:
|
| 594 |
+
while item.endswith('\\'):
|
| 595 |
+
try:
|
| 596 |
+
item = item[:-2].strip() + next(lines)
|
| 597 |
+
except StopIteration:
|
| 598 |
+
return
|
| 599 |
+
yield item
|