Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llava/lib/python3.10/distutils/__pycache__/ccompiler.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/__pycache__/config.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/__pycache__/util.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/command/__init__.py +30 -0
- llava/lib/python3.10/distutils/command/__pycache__/bdist_dumb.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/command/__pycache__/build_clib.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/command/__pycache__/check.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/command/__pycache__/sdist.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/command/bdist_dumb.py +123 -0
- llava/lib/python3.10/distutils/command/bdist_rpm.py +579 -0
- llava/lib/python3.10/distutils/command/build_ext.py +754 -0
- llava/lib/python3.10/distutils/command/build_py.py +416 -0
- llava/lib/python3.10/distutils/command/clean.py +76 -0
- llava/lib/python3.10/distutils/command/command_template +33 -0
- llava/lib/python3.10/distutils/command/config.py +344 -0
- llava/lib/python3.10/distutils/command/install_headers.py +47 -0
- llava/lib/python3.10/distutils/command/install_lib.py +217 -0
- llava/lib/python3.10/distutils/command/install_scripts.py +60 -0
- llava/lib/python3.10/distutils/command/register.py +304 -0
- llava/lib/python3.10/distutils/command/sdist.py +494 -0
- llava/lib/python3.10/distutils/tests/__pycache__/test_bdist_msi.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/tests/__pycache__/test_bdist_rpm.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/tests/__pycache__/test_cygwinccompiler.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/tests/__pycache__/test_extension.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/tests/__pycache__/test_file_util.cpython-310.pyc +0 -0
- llava/lib/python3.10/distutils/tests/__pycache__/test_filelist.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_cast_Short_compositeimplicitautograd_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_cdist_backward_cuda_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_copy_from_and_resize_native.h +21 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_cslt_sparse_mm_search_cuda_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_fft_c2r_ops.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_log2_native.h +25 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_pow_ops.h +127 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_sqrt_native.h +25 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_lstm_mps.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_native_batch_norm_legit_cuda_dispatch.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_nested_get_values_ops.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_efficient_attention_native.h +22 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/acosh_meta_dispatch.h +26 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/addbmm.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/argsort.h +49 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_compositeexplicitautogradnonfunctional_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/concat.h +53 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/conj_physical_compositeimplicitautograd_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/conv1d_compositeimplicitautograd_dispatch.h +26 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/empty_like_compositeexplicitautograd_dispatch.h +26 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/empty_quantized_compositeexplicitautograd_dispatch.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/empty_quantized_ops.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/hann_window_compositeexplicitautograd_dispatch.h +30 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/hinge_embedding_loss_native.h +21 -0
llava/lib/python3.10/distutils/__pycache__/ccompiler.cpython-310.pyc
ADDED
|
Binary file (33.3 kB). View file
|
|
|
llava/lib/python3.10/distutils/__pycache__/config.cpython-310.pyc
ADDED
|
Binary file (3.79 kB). View file
|
|
|
llava/lib/python3.10/distutils/__pycache__/util.cpython-310.pyc
ADDED
|
Binary file (15.6 kB). View file
|
|
|
llava/lib/python3.10/distutils/command/__init__.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command
|
| 2 |
+
|
| 3 |
+
Package containing implementation of all the standard Distutils
|
| 4 |
+
commands."""
|
| 5 |
+
|
| 6 |
+
__all__ = ['build',
|
| 7 |
+
'build_py',
|
| 8 |
+
'build_ext',
|
| 9 |
+
'build_clib',
|
| 10 |
+
'build_scripts',
|
| 11 |
+
'clean',
|
| 12 |
+
'install',
|
| 13 |
+
'install_lib',
|
| 14 |
+
'install_headers',
|
| 15 |
+
'install_scripts',
|
| 16 |
+
'install_data',
|
| 17 |
+
'sdist',
|
| 18 |
+
'register',
|
| 19 |
+
'bdist',
|
| 20 |
+
'bdist_dumb',
|
| 21 |
+
'bdist_rpm',
|
| 22 |
+
'check',
|
| 23 |
+
'upload',
|
| 24 |
+
# These two are reserved for future use:
|
| 25 |
+
#'bdist_sdux',
|
| 26 |
+
#'bdist_pkgtool',
|
| 27 |
+
# Note:
|
| 28 |
+
# bdist_packager is not included because it only provides
|
| 29 |
+
# an abstract base class
|
| 30 |
+
]
|
llava/lib/python3.10/distutils/command/__pycache__/bdist_dumb.cpython-310.pyc
ADDED
|
Binary file (3.58 kB). View file
|
|
|
llava/lib/python3.10/distutils/command/__pycache__/build_clib.cpython-310.pyc
ADDED
|
Binary file (5.07 kB). View file
|
|
|
llava/lib/python3.10/distutils/command/__pycache__/check.cpython-310.pyc
ADDED
|
Binary file (5.21 kB). View file
|
|
|
llava/lib/python3.10/distutils/command/__pycache__/sdist.cpython-310.pyc
ADDED
|
Binary file (14.7 kB). View file
|
|
|
llava/lib/python3.10/distutils/command/bdist_dumb.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist_dumb
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
|
| 4 |
+
distribution -- i.e., just an archive to be unpacked under $prefix or
|
| 5 |
+
$exec_prefix)."""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from distutils.core import Command
|
| 9 |
+
from distutils.util import get_platform
|
| 10 |
+
from distutils.dir_util import remove_tree, ensure_relative
|
| 11 |
+
from distutils.errors import *
|
| 12 |
+
from distutils.sysconfig import get_python_version
|
| 13 |
+
from distutils import log
|
| 14 |
+
|
| 15 |
+
class bdist_dumb(Command):
|
| 16 |
+
|
| 17 |
+
description = "create a \"dumb\" built distribution"
|
| 18 |
+
|
| 19 |
+
user_options = [('bdist-dir=', 'd',
|
| 20 |
+
"temporary directory for creating the distribution"),
|
| 21 |
+
('plat-name=', 'p',
|
| 22 |
+
"platform name to embed in generated filenames "
|
| 23 |
+
"(default: %s)" % get_platform()),
|
| 24 |
+
('format=', 'f',
|
| 25 |
+
"archive format to create (tar, gztar, bztar, xztar, "
|
| 26 |
+
"ztar, zip)"),
|
| 27 |
+
('keep-temp', 'k',
|
| 28 |
+
"keep the pseudo-installation tree around after " +
|
| 29 |
+
"creating the distribution archive"),
|
| 30 |
+
('dist-dir=', 'd',
|
| 31 |
+
"directory to put final built distributions in"),
|
| 32 |
+
('skip-build', None,
|
| 33 |
+
"skip rebuilding everything (for testing/debugging)"),
|
| 34 |
+
('relative', None,
|
| 35 |
+
"build the archive using relative paths "
|
| 36 |
+
"(default: false)"),
|
| 37 |
+
('owner=', 'u',
|
| 38 |
+
"Owner name used when creating a tar file"
|
| 39 |
+
" [default: current user]"),
|
| 40 |
+
('group=', 'g',
|
| 41 |
+
"Group name used when creating a tar file"
|
| 42 |
+
" [default: current group]"),
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
boolean_options = ['keep-temp', 'skip-build', 'relative']
|
| 46 |
+
|
| 47 |
+
default_format = { 'posix': 'gztar',
|
| 48 |
+
'nt': 'zip' }
|
| 49 |
+
|
| 50 |
+
def initialize_options(self):
|
| 51 |
+
self.bdist_dir = None
|
| 52 |
+
self.plat_name = None
|
| 53 |
+
self.format = None
|
| 54 |
+
self.keep_temp = 0
|
| 55 |
+
self.dist_dir = None
|
| 56 |
+
self.skip_build = None
|
| 57 |
+
self.relative = 0
|
| 58 |
+
self.owner = None
|
| 59 |
+
self.group = None
|
| 60 |
+
|
| 61 |
+
def finalize_options(self):
|
| 62 |
+
if self.bdist_dir is None:
|
| 63 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
| 64 |
+
self.bdist_dir = os.path.join(bdist_base, 'dumb')
|
| 65 |
+
|
| 66 |
+
if self.format is None:
|
| 67 |
+
try:
|
| 68 |
+
self.format = self.default_format[os.name]
|
| 69 |
+
except KeyError:
|
| 70 |
+
raise DistutilsPlatformError(
|
| 71 |
+
"don't know how to create dumb built distributions "
|
| 72 |
+
"on platform %s" % os.name)
|
| 73 |
+
|
| 74 |
+
self.set_undefined_options('bdist',
|
| 75 |
+
('dist_dir', 'dist_dir'),
|
| 76 |
+
('plat_name', 'plat_name'),
|
| 77 |
+
('skip_build', 'skip_build'))
|
| 78 |
+
|
| 79 |
+
def run(self):
|
| 80 |
+
if not self.skip_build:
|
| 81 |
+
self.run_command('build')
|
| 82 |
+
|
| 83 |
+
install = self.reinitialize_command('install', reinit_subcommands=1)
|
| 84 |
+
install.root = self.bdist_dir
|
| 85 |
+
install.skip_build = self.skip_build
|
| 86 |
+
install.warn_dir = 0
|
| 87 |
+
|
| 88 |
+
log.info("installing to %s", self.bdist_dir)
|
| 89 |
+
self.run_command('install')
|
| 90 |
+
|
| 91 |
+
# And make an archive relative to the root of the
|
| 92 |
+
# pseudo-installation tree.
|
| 93 |
+
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
|
| 94 |
+
self.plat_name)
|
| 95 |
+
|
| 96 |
+
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
| 97 |
+
if not self.relative:
|
| 98 |
+
archive_root = self.bdist_dir
|
| 99 |
+
else:
|
| 100 |
+
if (self.distribution.has_ext_modules() and
|
| 101 |
+
(install.install_base != install.install_platbase)):
|
| 102 |
+
raise DistutilsPlatformError(
|
| 103 |
+
"can't make a dumb built distribution where "
|
| 104 |
+
"base and platbase are different (%s, %s)"
|
| 105 |
+
% (repr(install.install_base),
|
| 106 |
+
repr(install.install_platbase)))
|
| 107 |
+
else:
|
| 108 |
+
archive_root = os.path.join(self.bdist_dir,
|
| 109 |
+
ensure_relative(install.install_base))
|
| 110 |
+
|
| 111 |
+
# Make the archive
|
| 112 |
+
filename = self.make_archive(pseudoinstall_root,
|
| 113 |
+
self.format, root_dir=archive_root,
|
| 114 |
+
owner=self.owner, group=self.group)
|
| 115 |
+
if self.distribution.has_ext_modules():
|
| 116 |
+
pyversion = get_python_version()
|
| 117 |
+
else:
|
| 118 |
+
pyversion = 'any'
|
| 119 |
+
self.distribution.dist_files.append(('bdist_dumb', pyversion,
|
| 120 |
+
filename))
|
| 121 |
+
|
| 122 |
+
if not self.keep_temp:
|
| 123 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
llava/lib/python3.10/distutils/command/bdist_rpm.py
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist_rpm
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
|
| 4 |
+
distributions)."""
|
| 5 |
+
|
| 6 |
+
import subprocess, sys, os
|
| 7 |
+
from distutils.core import Command
|
| 8 |
+
from distutils.debug import DEBUG
|
| 9 |
+
from distutils.file_util import write_file
|
| 10 |
+
from distutils.errors import *
|
| 11 |
+
from distutils.sysconfig import get_python_version
|
| 12 |
+
from distutils import log
|
| 13 |
+
|
| 14 |
+
class bdist_rpm(Command):
|
| 15 |
+
|
| 16 |
+
description = "create an RPM distribution"
|
| 17 |
+
|
| 18 |
+
user_options = [
|
| 19 |
+
('bdist-base=', None,
|
| 20 |
+
"base directory for creating built distributions"),
|
| 21 |
+
('rpm-base=', None,
|
| 22 |
+
"base directory for creating RPMs (defaults to \"rpm\" under "
|
| 23 |
+
"--bdist-base; must be specified for RPM 2)"),
|
| 24 |
+
('dist-dir=', 'd',
|
| 25 |
+
"directory to put final RPM files in "
|
| 26 |
+
"(and .spec files if --spec-only)"),
|
| 27 |
+
('python=', None,
|
| 28 |
+
"path to Python interpreter to hard-code in the .spec file "
|
| 29 |
+
"(default: \"python\")"),
|
| 30 |
+
('fix-python', None,
|
| 31 |
+
"hard-code the exact path to the current Python interpreter in "
|
| 32 |
+
"the .spec file"),
|
| 33 |
+
('spec-only', None,
|
| 34 |
+
"only regenerate spec file"),
|
| 35 |
+
('source-only', None,
|
| 36 |
+
"only generate source RPM"),
|
| 37 |
+
('binary-only', None,
|
| 38 |
+
"only generate binary RPM"),
|
| 39 |
+
('use-bzip2', None,
|
| 40 |
+
"use bzip2 instead of gzip to create source distribution"),
|
| 41 |
+
|
| 42 |
+
# More meta-data: too RPM-specific to put in the setup script,
|
| 43 |
+
# but needs to go in the .spec file -- so we make these options
|
| 44 |
+
# to "bdist_rpm". The idea is that packagers would put this
|
| 45 |
+
# info in setup.cfg, although they are of course free to
|
| 46 |
+
# supply it on the command line.
|
| 47 |
+
('distribution-name=', None,
|
| 48 |
+
"name of the (Linux) distribution to which this "
|
| 49 |
+
"RPM applies (*not* the name of the module distribution!)"),
|
| 50 |
+
('group=', None,
|
| 51 |
+
"package classification [default: \"Development/Libraries\"]"),
|
| 52 |
+
('release=', None,
|
| 53 |
+
"RPM release number"),
|
| 54 |
+
('serial=', None,
|
| 55 |
+
"RPM serial number"),
|
| 56 |
+
('vendor=', None,
|
| 57 |
+
"RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
|
| 58 |
+
"[default: maintainer or author from setup script]"),
|
| 59 |
+
('packager=', None,
|
| 60 |
+
"RPM packager (eg. \"Jane Doe <jane@example.net>\") "
|
| 61 |
+
"[default: vendor]"),
|
| 62 |
+
('doc-files=', None,
|
| 63 |
+
"list of documentation files (space or comma-separated)"),
|
| 64 |
+
('changelog=', None,
|
| 65 |
+
"RPM changelog"),
|
| 66 |
+
('icon=', None,
|
| 67 |
+
"name of icon file"),
|
| 68 |
+
('provides=', None,
|
| 69 |
+
"capabilities provided by this package"),
|
| 70 |
+
('requires=', None,
|
| 71 |
+
"capabilities required by this package"),
|
| 72 |
+
('conflicts=', None,
|
| 73 |
+
"capabilities which conflict with this package"),
|
| 74 |
+
('build-requires=', None,
|
| 75 |
+
"capabilities required to build this package"),
|
| 76 |
+
('obsoletes=', None,
|
| 77 |
+
"capabilities made obsolete by this package"),
|
| 78 |
+
('no-autoreq', None,
|
| 79 |
+
"do not automatically calculate dependencies"),
|
| 80 |
+
|
| 81 |
+
# Actions to take when building RPM
|
| 82 |
+
('keep-temp', 'k',
|
| 83 |
+
"don't clean up RPM build directory"),
|
| 84 |
+
('no-keep-temp', None,
|
| 85 |
+
"clean up RPM build directory [default]"),
|
| 86 |
+
('use-rpm-opt-flags', None,
|
| 87 |
+
"compile with RPM_OPT_FLAGS when building from source RPM"),
|
| 88 |
+
('no-rpm-opt-flags', None,
|
| 89 |
+
"do not pass any RPM CFLAGS to compiler"),
|
| 90 |
+
('rpm3-mode', None,
|
| 91 |
+
"RPM 3 compatibility mode (default)"),
|
| 92 |
+
('rpm2-mode', None,
|
| 93 |
+
"RPM 2 compatibility mode"),
|
| 94 |
+
|
| 95 |
+
# Add the hooks necessary for specifying custom scripts
|
| 96 |
+
('prep-script=', None,
|
| 97 |
+
"Specify a script for the PREP phase of RPM building"),
|
| 98 |
+
('build-script=', None,
|
| 99 |
+
"Specify a script for the BUILD phase of RPM building"),
|
| 100 |
+
|
| 101 |
+
('pre-install=', None,
|
| 102 |
+
"Specify a script for the pre-INSTALL phase of RPM building"),
|
| 103 |
+
('install-script=', None,
|
| 104 |
+
"Specify a script for the INSTALL phase of RPM building"),
|
| 105 |
+
('post-install=', None,
|
| 106 |
+
"Specify a script for the post-INSTALL phase of RPM building"),
|
| 107 |
+
|
| 108 |
+
('pre-uninstall=', None,
|
| 109 |
+
"Specify a script for the pre-UNINSTALL phase of RPM building"),
|
| 110 |
+
('post-uninstall=', None,
|
| 111 |
+
"Specify a script for the post-UNINSTALL phase of RPM building"),
|
| 112 |
+
|
| 113 |
+
('clean-script=', None,
|
| 114 |
+
"Specify a script for the CLEAN phase of RPM building"),
|
| 115 |
+
|
| 116 |
+
('verify-script=', None,
|
| 117 |
+
"Specify a script for the VERIFY phase of the RPM build"),
|
| 118 |
+
|
| 119 |
+
# Allow a packager to explicitly force an architecture
|
| 120 |
+
('force-arch=', None,
|
| 121 |
+
"Force an architecture onto the RPM build process"),
|
| 122 |
+
|
| 123 |
+
('quiet', 'q',
|
| 124 |
+
"Run the INSTALL phase of RPM building in quiet mode"),
|
| 125 |
+
]
|
| 126 |
+
|
| 127 |
+
boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
|
| 128 |
+
'no-autoreq', 'quiet']
|
| 129 |
+
|
| 130 |
+
negative_opt = {'no-keep-temp': 'keep-temp',
|
| 131 |
+
'no-rpm-opt-flags': 'use-rpm-opt-flags',
|
| 132 |
+
'rpm2-mode': 'rpm3-mode'}
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def initialize_options(self):
|
| 136 |
+
self.bdist_base = None
|
| 137 |
+
self.rpm_base = None
|
| 138 |
+
self.dist_dir = None
|
| 139 |
+
self.python = None
|
| 140 |
+
self.fix_python = None
|
| 141 |
+
self.spec_only = None
|
| 142 |
+
self.binary_only = None
|
| 143 |
+
self.source_only = None
|
| 144 |
+
self.use_bzip2 = None
|
| 145 |
+
|
| 146 |
+
self.distribution_name = None
|
| 147 |
+
self.group = None
|
| 148 |
+
self.release = None
|
| 149 |
+
self.serial = None
|
| 150 |
+
self.vendor = None
|
| 151 |
+
self.packager = None
|
| 152 |
+
self.doc_files = None
|
| 153 |
+
self.changelog = None
|
| 154 |
+
self.icon = None
|
| 155 |
+
|
| 156 |
+
self.prep_script = None
|
| 157 |
+
self.build_script = None
|
| 158 |
+
self.install_script = None
|
| 159 |
+
self.clean_script = None
|
| 160 |
+
self.verify_script = None
|
| 161 |
+
self.pre_install = None
|
| 162 |
+
self.post_install = None
|
| 163 |
+
self.pre_uninstall = None
|
| 164 |
+
self.post_uninstall = None
|
| 165 |
+
self.prep = None
|
| 166 |
+
self.provides = None
|
| 167 |
+
self.requires = None
|
| 168 |
+
self.conflicts = None
|
| 169 |
+
self.build_requires = None
|
| 170 |
+
self.obsoletes = None
|
| 171 |
+
|
| 172 |
+
self.keep_temp = 0
|
| 173 |
+
self.use_rpm_opt_flags = 1
|
| 174 |
+
self.rpm3_mode = 1
|
| 175 |
+
self.no_autoreq = 0
|
| 176 |
+
|
| 177 |
+
self.force_arch = None
|
| 178 |
+
self.quiet = 0
|
| 179 |
+
|
| 180 |
+
def finalize_options(self):
|
| 181 |
+
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
| 182 |
+
if self.rpm_base is None:
|
| 183 |
+
if not self.rpm3_mode:
|
| 184 |
+
raise DistutilsOptionError(
|
| 185 |
+
"you must specify --rpm-base in RPM 2 mode")
|
| 186 |
+
self.rpm_base = os.path.join(self.bdist_base, "rpm")
|
| 187 |
+
|
| 188 |
+
if self.python is None:
|
| 189 |
+
if self.fix_python:
|
| 190 |
+
self.python = sys.executable
|
| 191 |
+
else:
|
| 192 |
+
self.python = "python3"
|
| 193 |
+
elif self.fix_python:
|
| 194 |
+
raise DistutilsOptionError(
|
| 195 |
+
"--python and --fix-python are mutually exclusive options")
|
| 196 |
+
|
| 197 |
+
if os.name != 'posix':
|
| 198 |
+
raise DistutilsPlatformError("don't know how to create RPM "
|
| 199 |
+
"distributions on platform %s" % os.name)
|
| 200 |
+
if self.binary_only and self.source_only:
|
| 201 |
+
raise DistutilsOptionError(
|
| 202 |
+
"cannot supply both '--source-only' and '--binary-only'")
|
| 203 |
+
|
| 204 |
+
# don't pass CFLAGS to pure python distributions
|
| 205 |
+
if not self.distribution.has_ext_modules():
|
| 206 |
+
self.use_rpm_opt_flags = 0
|
| 207 |
+
|
| 208 |
+
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
| 209 |
+
self.finalize_package_data()
|
| 210 |
+
|
| 211 |
+
def finalize_package_data(self):
|
| 212 |
+
self.ensure_string('group', "Development/Libraries")
|
| 213 |
+
self.ensure_string('vendor',
|
| 214 |
+
"%s <%s>" % (self.distribution.get_contact(),
|
| 215 |
+
self.distribution.get_contact_email()))
|
| 216 |
+
self.ensure_string('packager')
|
| 217 |
+
self.ensure_string_list('doc_files')
|
| 218 |
+
if isinstance(self.doc_files, list):
|
| 219 |
+
for readme in ('README', 'README.txt'):
|
| 220 |
+
if os.path.exists(readme) and readme not in self.doc_files:
|
| 221 |
+
self.doc_files.append(readme)
|
| 222 |
+
|
| 223 |
+
self.ensure_string('release', "1")
|
| 224 |
+
self.ensure_string('serial') # should it be an int?
|
| 225 |
+
|
| 226 |
+
self.ensure_string('distribution_name')
|
| 227 |
+
|
| 228 |
+
self.ensure_string('changelog')
|
| 229 |
+
# Format changelog correctly
|
| 230 |
+
self.changelog = self._format_changelog(self.changelog)
|
| 231 |
+
|
| 232 |
+
self.ensure_filename('icon')
|
| 233 |
+
|
| 234 |
+
self.ensure_filename('prep_script')
|
| 235 |
+
self.ensure_filename('build_script')
|
| 236 |
+
self.ensure_filename('install_script')
|
| 237 |
+
self.ensure_filename('clean_script')
|
| 238 |
+
self.ensure_filename('verify_script')
|
| 239 |
+
self.ensure_filename('pre_install')
|
| 240 |
+
self.ensure_filename('post_install')
|
| 241 |
+
self.ensure_filename('pre_uninstall')
|
| 242 |
+
self.ensure_filename('post_uninstall')
|
| 243 |
+
|
| 244 |
+
# XXX don't forget we punted on summaries and descriptions -- they
|
| 245 |
+
# should be handled here eventually!
|
| 246 |
+
|
| 247 |
+
# Now *this* is some meta-data that belongs in the setup script...
|
| 248 |
+
self.ensure_string_list('provides')
|
| 249 |
+
self.ensure_string_list('requires')
|
| 250 |
+
self.ensure_string_list('conflicts')
|
| 251 |
+
self.ensure_string_list('build_requires')
|
| 252 |
+
self.ensure_string_list('obsoletes')
|
| 253 |
+
|
| 254 |
+
self.ensure_string('force_arch')
|
| 255 |
+
|
| 256 |
+
def run(self):
|
| 257 |
+
if DEBUG:
|
| 258 |
+
print("before _get_package_data():")
|
| 259 |
+
print("vendor =", self.vendor)
|
| 260 |
+
print("packager =", self.packager)
|
| 261 |
+
print("doc_files =", self.doc_files)
|
| 262 |
+
print("changelog =", self.changelog)
|
| 263 |
+
|
| 264 |
+
# make directories
|
| 265 |
+
if self.spec_only:
|
| 266 |
+
spec_dir = self.dist_dir
|
| 267 |
+
self.mkpath(spec_dir)
|
| 268 |
+
else:
|
| 269 |
+
rpm_dir = {}
|
| 270 |
+
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
|
| 271 |
+
rpm_dir[d] = os.path.join(self.rpm_base, d)
|
| 272 |
+
self.mkpath(rpm_dir[d])
|
| 273 |
+
spec_dir = rpm_dir['SPECS']
|
| 274 |
+
|
| 275 |
+
# Spec file goes into 'dist_dir' if '--spec-only specified',
|
| 276 |
+
# build/rpm.<plat> otherwise.
|
| 277 |
+
spec_path = os.path.join(spec_dir,
|
| 278 |
+
"%s.spec" % self.distribution.get_name())
|
| 279 |
+
self.execute(write_file,
|
| 280 |
+
(spec_path,
|
| 281 |
+
self._make_spec_file()),
|
| 282 |
+
"writing '%s'" % spec_path)
|
| 283 |
+
|
| 284 |
+
if self.spec_only: # stop if requested
|
| 285 |
+
return
|
| 286 |
+
|
| 287 |
+
# Make a source distribution and copy to SOURCES directory with
|
| 288 |
+
# optional icon.
|
| 289 |
+
saved_dist_files = self.distribution.dist_files[:]
|
| 290 |
+
sdist = self.reinitialize_command('sdist')
|
| 291 |
+
if self.use_bzip2:
|
| 292 |
+
sdist.formats = ['bztar']
|
| 293 |
+
else:
|
| 294 |
+
sdist.formats = ['gztar']
|
| 295 |
+
self.run_command('sdist')
|
| 296 |
+
self.distribution.dist_files = saved_dist_files
|
| 297 |
+
|
| 298 |
+
source = sdist.get_archive_files()[0]
|
| 299 |
+
source_dir = rpm_dir['SOURCES']
|
| 300 |
+
self.copy_file(source, source_dir)
|
| 301 |
+
|
| 302 |
+
if self.icon:
|
| 303 |
+
if os.path.exists(self.icon):
|
| 304 |
+
self.copy_file(self.icon, source_dir)
|
| 305 |
+
else:
|
| 306 |
+
raise DistutilsFileError(
|
| 307 |
+
"icon file '%s' does not exist" % self.icon)
|
| 308 |
+
|
| 309 |
+
# build package
|
| 310 |
+
log.info("building RPMs")
|
| 311 |
+
rpm_cmd = ['rpmbuild']
|
| 312 |
+
|
| 313 |
+
if self.source_only: # what kind of RPMs?
|
| 314 |
+
rpm_cmd.append('-bs')
|
| 315 |
+
elif self.binary_only:
|
| 316 |
+
rpm_cmd.append('-bb')
|
| 317 |
+
else:
|
| 318 |
+
rpm_cmd.append('-ba')
|
| 319 |
+
rpm_cmd.extend(['--define', '__python %s' % self.python])
|
| 320 |
+
if self.rpm3_mode:
|
| 321 |
+
rpm_cmd.extend(['--define',
|
| 322 |
+
'_topdir %s' % os.path.abspath(self.rpm_base)])
|
| 323 |
+
if not self.keep_temp:
|
| 324 |
+
rpm_cmd.append('--clean')
|
| 325 |
+
|
| 326 |
+
if self.quiet:
|
| 327 |
+
rpm_cmd.append('--quiet')
|
| 328 |
+
|
| 329 |
+
rpm_cmd.append(spec_path)
|
| 330 |
+
# Determine the binary rpm names that should be built out of this spec
|
| 331 |
+
# file
|
| 332 |
+
# Note that some of these may not be really built (if the file
|
| 333 |
+
# list is empty)
|
| 334 |
+
nvr_string = "%{name}-%{version}-%{release}"
|
| 335 |
+
src_rpm = nvr_string + ".src.rpm"
|
| 336 |
+
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
|
| 337 |
+
q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % (
|
| 338 |
+
src_rpm, non_src_rpm, spec_path)
|
| 339 |
+
|
| 340 |
+
out = os.popen(q_cmd)
|
| 341 |
+
try:
|
| 342 |
+
binary_rpms = []
|
| 343 |
+
source_rpm = None
|
| 344 |
+
while True:
|
| 345 |
+
line = out.readline()
|
| 346 |
+
if not line:
|
| 347 |
+
break
|
| 348 |
+
l = line.strip().split()
|
| 349 |
+
assert(len(l) == 2)
|
| 350 |
+
binary_rpms.append(l[1])
|
| 351 |
+
# The source rpm is named after the first entry in the spec file
|
| 352 |
+
if source_rpm is None:
|
| 353 |
+
source_rpm = l[0]
|
| 354 |
+
|
| 355 |
+
status = out.close()
|
| 356 |
+
if status:
|
| 357 |
+
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
|
| 358 |
+
|
| 359 |
+
finally:
|
| 360 |
+
out.close()
|
| 361 |
+
|
| 362 |
+
self.spawn(rpm_cmd)
|
| 363 |
+
|
| 364 |
+
if not self.dry_run:
|
| 365 |
+
if self.distribution.has_ext_modules():
|
| 366 |
+
pyversion = get_python_version()
|
| 367 |
+
else:
|
| 368 |
+
pyversion = 'any'
|
| 369 |
+
|
| 370 |
+
if not self.binary_only:
|
| 371 |
+
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
|
| 372 |
+
assert(os.path.exists(srpm))
|
| 373 |
+
self.move_file(srpm, self.dist_dir)
|
| 374 |
+
filename = os.path.join(self.dist_dir, source_rpm)
|
| 375 |
+
self.distribution.dist_files.append(
|
| 376 |
+
('bdist_rpm', pyversion, filename))
|
| 377 |
+
|
| 378 |
+
if not self.source_only:
|
| 379 |
+
for rpm in binary_rpms:
|
| 380 |
+
rpm = os.path.join(rpm_dir['RPMS'], rpm)
|
| 381 |
+
if os.path.exists(rpm):
|
| 382 |
+
self.move_file(rpm, self.dist_dir)
|
| 383 |
+
filename = os.path.join(self.dist_dir,
|
| 384 |
+
os.path.basename(rpm))
|
| 385 |
+
self.distribution.dist_files.append(
|
| 386 |
+
('bdist_rpm', pyversion, filename))
|
| 387 |
+
|
| 388 |
+
def _dist_path(self, path):
|
| 389 |
+
return os.path.join(self.dist_dir, os.path.basename(path))
|
| 390 |
+
|
| 391 |
+
def _make_spec_file(self):
|
| 392 |
+
"""Generate the text of an RPM spec file and return it as a
|
| 393 |
+
list of strings (one per line).
|
| 394 |
+
"""
|
| 395 |
+
# definitions and headers
|
| 396 |
+
spec_file = [
|
| 397 |
+
'%define name ' + self.distribution.get_name(),
|
| 398 |
+
'%define version ' + self.distribution.get_version().replace('-','_'),
|
| 399 |
+
'%define unmangled_version ' + self.distribution.get_version(),
|
| 400 |
+
'%define release ' + self.release.replace('-','_'),
|
| 401 |
+
'',
|
| 402 |
+
'Summary: ' + self.distribution.get_description(),
|
| 403 |
+
]
|
| 404 |
+
|
| 405 |
+
# Workaround for #14443 which affects some RPM based systems such as
|
| 406 |
+
# RHEL6 (and probably derivatives)
|
| 407 |
+
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
|
| 408 |
+
# Generate a potential replacement value for __os_install_post (whilst
|
| 409 |
+
# normalizing the whitespace to simplify the test for whether the
|
| 410 |
+
# invocation of brp-python-bytecompile passes in __python):
|
| 411 |
+
vendor_hook = '\n'.join([' %s \\' % line.strip()
|
| 412 |
+
for line in vendor_hook.splitlines()])
|
| 413 |
+
problem = "brp-python-bytecompile \\\n"
|
| 414 |
+
fixed = "brp-python-bytecompile %{__python} \\\n"
|
| 415 |
+
fixed_hook = vendor_hook.replace(problem, fixed)
|
| 416 |
+
if fixed_hook != vendor_hook:
|
| 417 |
+
spec_file.append('# Workaround for http://bugs.python.org/issue14443')
|
| 418 |
+
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
|
| 419 |
+
|
| 420 |
+
# put locale summaries into spec file
|
| 421 |
+
# XXX not supported for now (hard to put a dictionary
|
| 422 |
+
# in a config file -- arg!)
|
| 423 |
+
#for locale in self.summaries.keys():
|
| 424 |
+
# spec_file.append('Summary(%s): %s' % (locale,
|
| 425 |
+
# self.summaries[locale]))
|
| 426 |
+
|
| 427 |
+
spec_file.extend([
|
| 428 |
+
'Name: %{name}',
|
| 429 |
+
'Version: %{version}',
|
| 430 |
+
'Release: %{release}',])
|
| 431 |
+
|
| 432 |
+
# XXX yuck! this filename is available from the "sdist" command,
|
| 433 |
+
# but only after it has run: and we create the spec file before
|
| 434 |
+
# running "sdist", in case of --spec-only.
|
| 435 |
+
if self.use_bzip2:
|
| 436 |
+
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
|
| 437 |
+
else:
|
| 438 |
+
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
|
| 439 |
+
|
| 440 |
+
spec_file.extend([
|
| 441 |
+
'License: ' + self.distribution.get_license(),
|
| 442 |
+
'Group: ' + self.group,
|
| 443 |
+
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
|
| 444 |
+
'Prefix: %{_prefix}', ])
|
| 445 |
+
|
| 446 |
+
if not self.force_arch:
|
| 447 |
+
# noarch if no extension modules
|
| 448 |
+
if not self.distribution.has_ext_modules():
|
| 449 |
+
spec_file.append('BuildArch: noarch')
|
| 450 |
+
else:
|
| 451 |
+
spec_file.append( 'BuildArch: %s' % self.force_arch )
|
| 452 |
+
|
| 453 |
+
for field in ('Vendor',
|
| 454 |
+
'Packager',
|
| 455 |
+
'Provides',
|
| 456 |
+
'Requires',
|
| 457 |
+
'Conflicts',
|
| 458 |
+
'Obsoletes',
|
| 459 |
+
):
|
| 460 |
+
val = getattr(self, field.lower())
|
| 461 |
+
if isinstance(val, list):
|
| 462 |
+
spec_file.append('%s: %s' % (field, ' '.join(val)))
|
| 463 |
+
elif val is not None:
|
| 464 |
+
spec_file.append('%s: %s' % (field, val))
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
if self.distribution.get_url() != 'UNKNOWN':
|
| 468 |
+
spec_file.append('Url: ' + self.distribution.get_url())
|
| 469 |
+
|
| 470 |
+
if self.distribution_name:
|
| 471 |
+
spec_file.append('Distribution: ' + self.distribution_name)
|
| 472 |
+
|
| 473 |
+
if self.build_requires:
|
| 474 |
+
spec_file.append('BuildRequires: ' +
|
| 475 |
+
' '.join(self.build_requires))
|
| 476 |
+
|
| 477 |
+
if self.icon:
|
| 478 |
+
spec_file.append('Icon: ' + os.path.basename(self.icon))
|
| 479 |
+
|
| 480 |
+
if self.no_autoreq:
|
| 481 |
+
spec_file.append('AutoReq: 0')
|
| 482 |
+
|
| 483 |
+
spec_file.extend([
|
| 484 |
+
'',
|
| 485 |
+
'%description',
|
| 486 |
+
self.distribution.get_long_description()
|
| 487 |
+
])
|
| 488 |
+
|
| 489 |
+
# put locale descriptions into spec file
|
| 490 |
+
# XXX again, suppressed because config file syntax doesn't
|
| 491 |
+
# easily support this ;-(
|
| 492 |
+
#for locale in self.descriptions.keys():
|
| 493 |
+
# spec_file.extend([
|
| 494 |
+
# '',
|
| 495 |
+
# '%description -l ' + locale,
|
| 496 |
+
# self.descriptions[locale],
|
| 497 |
+
# ])
|
| 498 |
+
|
| 499 |
+
# rpm scripts
|
| 500 |
+
# figure out default build script
|
| 501 |
+
def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
|
| 502 |
+
def_build = "%s build" % def_setup_call
|
| 503 |
+
if self.use_rpm_opt_flags:
|
| 504 |
+
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
|
| 505 |
+
|
| 506 |
+
# insert contents of files
|
| 507 |
+
|
| 508 |
+
# XXX this is kind of misleading: user-supplied options are files
|
| 509 |
+
# that we open and interpolate into the spec file, but the defaults
|
| 510 |
+
# are just text that we drop in as-is. Hmmm.
|
| 511 |
+
|
| 512 |
+
install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT '
|
| 513 |
+
'--record=INSTALLED_FILES') % def_setup_call
|
| 514 |
+
|
| 515 |
+
script_options = [
|
| 516 |
+
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
|
| 517 |
+
('build', 'build_script', def_build),
|
| 518 |
+
('install', 'install_script', install_cmd),
|
| 519 |
+
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
|
| 520 |
+
('verifyscript', 'verify_script', None),
|
| 521 |
+
('pre', 'pre_install', None),
|
| 522 |
+
('post', 'post_install', None),
|
| 523 |
+
('preun', 'pre_uninstall', None),
|
| 524 |
+
('postun', 'post_uninstall', None),
|
| 525 |
+
]
|
| 526 |
+
|
| 527 |
+
for (rpm_opt, attr, default) in script_options:
|
| 528 |
+
# Insert contents of file referred to, if no file is referred to
|
| 529 |
+
# use 'default' as contents of script
|
| 530 |
+
val = getattr(self, attr)
|
| 531 |
+
if val or default:
|
| 532 |
+
spec_file.extend([
|
| 533 |
+
'',
|
| 534 |
+
'%' + rpm_opt,])
|
| 535 |
+
if val:
|
| 536 |
+
with open(val) as f:
|
| 537 |
+
spec_file.extend(f.read().split('\n'))
|
| 538 |
+
else:
|
| 539 |
+
spec_file.append(default)
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
# files section
|
| 543 |
+
spec_file.extend([
|
| 544 |
+
'',
|
| 545 |
+
'%files -f INSTALLED_FILES',
|
| 546 |
+
'%defattr(-,root,root)',
|
| 547 |
+
])
|
| 548 |
+
|
| 549 |
+
if self.doc_files:
|
| 550 |
+
spec_file.append('%doc ' + ' '.join(self.doc_files))
|
| 551 |
+
|
| 552 |
+
if self.changelog:
|
| 553 |
+
spec_file.extend([
|
| 554 |
+
'',
|
| 555 |
+
'%changelog',])
|
| 556 |
+
spec_file.extend(self.changelog)
|
| 557 |
+
|
| 558 |
+
return spec_file
|
| 559 |
+
|
| 560 |
+
def _format_changelog(self, changelog):
|
| 561 |
+
"""Format the changelog correctly and convert it to a list of strings
|
| 562 |
+
"""
|
| 563 |
+
if not changelog:
|
| 564 |
+
return changelog
|
| 565 |
+
new_changelog = []
|
| 566 |
+
for line in changelog.strip().split('\n'):
|
| 567 |
+
line = line.strip()
|
| 568 |
+
if line[0] == '*':
|
| 569 |
+
new_changelog.extend(['', line])
|
| 570 |
+
elif line[0] == '-':
|
| 571 |
+
new_changelog.append(line)
|
| 572 |
+
else:
|
| 573 |
+
new_changelog.append(' ' + line)
|
| 574 |
+
|
| 575 |
+
# strip trailing newline inserted by first changelog entry
|
| 576 |
+
if not new_changelog[0]:
|
| 577 |
+
del new_changelog[0]
|
| 578 |
+
|
| 579 |
+
return new_changelog
|
llava/lib/python3.10/distutils/command/build_ext.py
ADDED
|
@@ -0,0 +1,754 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_ext
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_ext' command, for building extension
|
| 4 |
+
modules (currently limited to C extensions, should accommodate C++
|
| 5 |
+
extensions ASAP)."""
|
| 6 |
+
|
| 7 |
+
import contextlib
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import sys
|
| 11 |
+
from distutils.core import Command
|
| 12 |
+
from distutils.errors import *
|
| 13 |
+
from distutils.sysconfig import customize_compiler, get_python_version
|
| 14 |
+
from distutils.sysconfig import get_config_h_filename
|
| 15 |
+
from distutils.dep_util import newer_group
|
| 16 |
+
from distutils.extension import Extension
|
| 17 |
+
from distutils.util import get_platform
|
| 18 |
+
from distutils import log
|
| 19 |
+
|
| 20 |
+
from site import USER_BASE
|
| 21 |
+
|
| 22 |
+
# An extension name is just a dot-separated list of Python NAMEs (ie.
|
| 23 |
+
# the same as a fully-qualified module name).
|
| 24 |
+
extension_name_re = re.compile \
|
| 25 |
+
(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def show_compilers ():
|
| 29 |
+
from distutils.ccompiler import show_compilers
|
| 30 |
+
show_compilers()
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class build_ext(Command):
|
| 34 |
+
|
| 35 |
+
description = "build C/C++ extensions (compile/link to build directory)"
|
| 36 |
+
|
| 37 |
+
# XXX thoughts on how to deal with complex command-line options like
|
| 38 |
+
# these, i.e. how to make it so fancy_getopt can suck them off the
|
| 39 |
+
# command line and make it look like setup.py defined the appropriate
|
| 40 |
+
# lists of tuples of what-have-you.
|
| 41 |
+
# - each command needs a callback to process its command-line options
|
| 42 |
+
# - Command.__init__() needs access to its share of the whole
|
| 43 |
+
# command line (must ultimately come from
|
| 44 |
+
# Distribution.parse_command_line())
|
| 45 |
+
# - it then calls the current command class' option-parsing
|
| 46 |
+
# callback to deal with weird options like -D, which have to
|
| 47 |
+
# parse the option text and churn out some custom data
|
| 48 |
+
# structure
|
| 49 |
+
# - that data structure (in this case, a list of 2-tuples)
|
| 50 |
+
# will then be present in the command object by the time
|
| 51 |
+
# we get to finalize_options() (i.e. the constructor
|
| 52 |
+
# takes care of both command-line and client options
|
| 53 |
+
# in between initialize_options() and finalize_options())
|
| 54 |
+
|
| 55 |
+
sep_by = " (separated by '%s')" % os.pathsep
|
| 56 |
+
user_options = [
|
| 57 |
+
('build-lib=', 'b',
|
| 58 |
+
"directory for compiled extension modules"),
|
| 59 |
+
('build-temp=', 't',
|
| 60 |
+
"directory for temporary files (build by-products)"),
|
| 61 |
+
('plat-name=', 'p',
|
| 62 |
+
"platform name to cross-compile for, if supported "
|
| 63 |
+
"(default: %s)" % get_platform()),
|
| 64 |
+
('inplace', 'i',
|
| 65 |
+
"ignore build-lib and put compiled extensions into the source " +
|
| 66 |
+
"directory alongside your pure Python modules"),
|
| 67 |
+
('include-dirs=', 'I',
|
| 68 |
+
"list of directories to search for header files" + sep_by),
|
| 69 |
+
('define=', 'D',
|
| 70 |
+
"C preprocessor macros to define"),
|
| 71 |
+
('undef=', 'U',
|
| 72 |
+
"C preprocessor macros to undefine"),
|
| 73 |
+
('libraries=', 'l',
|
| 74 |
+
"external C libraries to link with"),
|
| 75 |
+
('library-dirs=', 'L',
|
| 76 |
+
"directories to search for external C libraries" + sep_by),
|
| 77 |
+
('rpath=', 'R',
|
| 78 |
+
"directories to search for shared C libraries at runtime"),
|
| 79 |
+
('link-objects=', 'O',
|
| 80 |
+
"extra explicit link objects to include in the link"),
|
| 81 |
+
('debug', 'g',
|
| 82 |
+
"compile/link with debugging information"),
|
| 83 |
+
('force', 'f',
|
| 84 |
+
"forcibly build everything (ignore file timestamps)"),
|
| 85 |
+
('compiler=', 'c',
|
| 86 |
+
"specify the compiler type"),
|
| 87 |
+
('parallel=', 'j',
|
| 88 |
+
"number of parallel build jobs"),
|
| 89 |
+
('swig-cpp', None,
|
| 90 |
+
"make SWIG create C++ files (default is C)"),
|
| 91 |
+
('swig-opts=', None,
|
| 92 |
+
"list of SWIG command line options"),
|
| 93 |
+
('swig=', None,
|
| 94 |
+
"path to the SWIG executable"),
|
| 95 |
+
('user', None,
|
| 96 |
+
"add user include, library and rpath")
|
| 97 |
+
]
|
| 98 |
+
|
| 99 |
+
boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
|
| 100 |
+
|
| 101 |
+
help_options = [
|
| 102 |
+
('help-compiler', None,
|
| 103 |
+
"list available compilers", show_compilers),
|
| 104 |
+
]
|
| 105 |
+
|
| 106 |
+
def initialize_options(self):
|
| 107 |
+
self.extensions = None
|
| 108 |
+
self.build_lib = None
|
| 109 |
+
self.plat_name = None
|
| 110 |
+
self.build_temp = None
|
| 111 |
+
self.inplace = 0
|
| 112 |
+
self.package = None
|
| 113 |
+
|
| 114 |
+
self.include_dirs = None
|
| 115 |
+
self.define = None
|
| 116 |
+
self.undef = None
|
| 117 |
+
self.libraries = None
|
| 118 |
+
self.library_dirs = None
|
| 119 |
+
self.rpath = None
|
| 120 |
+
self.link_objects = None
|
| 121 |
+
self.debug = None
|
| 122 |
+
self.force = None
|
| 123 |
+
self.compiler = None
|
| 124 |
+
self.swig = None
|
| 125 |
+
self.swig_cpp = None
|
| 126 |
+
self.swig_opts = None
|
| 127 |
+
self.user = None
|
| 128 |
+
self.parallel = None
|
| 129 |
+
|
| 130 |
+
def finalize_options(self):
|
| 131 |
+
from distutils import sysconfig
|
| 132 |
+
|
| 133 |
+
self.set_undefined_options('build',
|
| 134 |
+
('build_lib', 'build_lib'),
|
| 135 |
+
('build_temp', 'build_temp'),
|
| 136 |
+
('compiler', 'compiler'),
|
| 137 |
+
('debug', 'debug'),
|
| 138 |
+
('force', 'force'),
|
| 139 |
+
('parallel', 'parallel'),
|
| 140 |
+
('plat_name', 'plat_name'),
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
if self.package is None:
|
| 144 |
+
self.package = self.distribution.ext_package
|
| 145 |
+
|
| 146 |
+
self.extensions = self.distribution.ext_modules
|
| 147 |
+
|
| 148 |
+
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
| 149 |
+
# etc.) are in the include search path.
|
| 150 |
+
py_include = sysconfig.get_python_inc()
|
| 151 |
+
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
|
| 152 |
+
if self.include_dirs is None:
|
| 153 |
+
self.include_dirs = self.distribution.include_dirs or []
|
| 154 |
+
if isinstance(self.include_dirs, str):
|
| 155 |
+
self.include_dirs = self.include_dirs.split(os.pathsep)
|
| 156 |
+
|
| 157 |
+
# If in a virtualenv, add its include directory
|
| 158 |
+
# Issue 16116
|
| 159 |
+
if sys.exec_prefix != sys.base_exec_prefix:
|
| 160 |
+
self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
|
| 161 |
+
|
| 162 |
+
# Put the Python "system" include dir at the end, so that
|
| 163 |
+
# any local include dirs take precedence.
|
| 164 |
+
self.include_dirs.extend(py_include.split(os.path.pathsep))
|
| 165 |
+
if plat_py_include != py_include:
|
| 166 |
+
self.include_dirs.extend(
|
| 167 |
+
plat_py_include.split(os.path.pathsep))
|
| 168 |
+
|
| 169 |
+
self.ensure_string_list('libraries')
|
| 170 |
+
self.ensure_string_list('link_objects')
|
| 171 |
+
|
| 172 |
+
# Life is easier if we're not forever checking for None, so
|
| 173 |
+
# simplify these options to empty lists if unset
|
| 174 |
+
if self.libraries is None:
|
| 175 |
+
self.libraries = []
|
| 176 |
+
if self.library_dirs is None:
|
| 177 |
+
self.library_dirs = []
|
| 178 |
+
elif isinstance(self.library_dirs, str):
|
| 179 |
+
self.library_dirs = self.library_dirs.split(os.pathsep)
|
| 180 |
+
|
| 181 |
+
if self.rpath is None:
|
| 182 |
+
self.rpath = []
|
| 183 |
+
elif isinstance(self.rpath, str):
|
| 184 |
+
self.rpath = self.rpath.split(os.pathsep)
|
| 185 |
+
|
| 186 |
+
# for extensions under windows use different directories
|
| 187 |
+
# for Release and Debug builds.
|
| 188 |
+
# also Python's library directory must be appended to library_dirs
|
| 189 |
+
if os.name == 'nt':
|
| 190 |
+
# the 'libs' directory is for binary installs - we assume that
|
| 191 |
+
# must be the *native* platform. But we don't really support
|
| 192 |
+
# cross-compiling via a binary install anyway, so we let it go.
|
| 193 |
+
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
|
| 194 |
+
if sys.base_exec_prefix != sys.prefix: # Issue 16116
|
| 195 |
+
self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
|
| 196 |
+
if self.debug:
|
| 197 |
+
self.build_temp = os.path.join(self.build_temp, "Debug")
|
| 198 |
+
else:
|
| 199 |
+
self.build_temp = os.path.join(self.build_temp, "Release")
|
| 200 |
+
|
| 201 |
+
# Append the source distribution include and library directories,
|
| 202 |
+
# this allows distutils on windows to work in the source tree
|
| 203 |
+
self.include_dirs.append(os.path.dirname(get_config_h_filename()))
|
| 204 |
+
_sys_home = getattr(sys, '_home', None)
|
| 205 |
+
if _sys_home:
|
| 206 |
+
self.library_dirs.append(_sys_home)
|
| 207 |
+
|
| 208 |
+
# Use the .lib files for the correct architecture
|
| 209 |
+
if self.plat_name == 'win32':
|
| 210 |
+
suffix = 'win32'
|
| 211 |
+
else:
|
| 212 |
+
# win-amd64
|
| 213 |
+
suffix = self.plat_name[4:]
|
| 214 |
+
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
|
| 215 |
+
if suffix:
|
| 216 |
+
new_lib = os.path.join(new_lib, suffix)
|
| 217 |
+
self.library_dirs.append(new_lib)
|
| 218 |
+
|
| 219 |
+
# For extensions under Cygwin, Python's library directory must be
|
| 220 |
+
# appended to library_dirs
|
| 221 |
+
if sys.platform[:6] == 'cygwin':
|
| 222 |
+
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
|
| 223 |
+
# building third party extensions
|
| 224 |
+
self.library_dirs.append(os.path.join(sys.prefix, "lib",
|
| 225 |
+
"python" + get_python_version(),
|
| 226 |
+
"config"))
|
| 227 |
+
else:
|
| 228 |
+
# building python standard extensions
|
| 229 |
+
self.library_dirs.append('.')
|
| 230 |
+
|
| 231 |
+
# For building extensions with a shared Python library,
|
| 232 |
+
# Python's library directory must be appended to library_dirs
|
| 233 |
+
# See Issues: #1600860, #4366
|
| 234 |
+
if (sysconfig.get_config_var('Py_ENABLE_SHARED')):
|
| 235 |
+
if not sysconfig.python_build:
|
| 236 |
+
# building third party extensions
|
| 237 |
+
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
|
| 238 |
+
else:
|
| 239 |
+
# building python standard extensions
|
| 240 |
+
self.library_dirs.append('.')
|
| 241 |
+
|
| 242 |
+
# The argument parsing will result in self.define being a string, but
|
| 243 |
+
# it has to be a list of 2-tuples. All the preprocessor symbols
|
| 244 |
+
# specified by the 'define' option will be set to '1'. Multiple
|
| 245 |
+
# symbols can be separated with commas.
|
| 246 |
+
|
| 247 |
+
if self.define:
|
| 248 |
+
defines = self.define.split(',')
|
| 249 |
+
self.define = [(symbol, '1') for symbol in defines]
|
| 250 |
+
|
| 251 |
+
# The option for macros to undefine is also a string from the
|
| 252 |
+
# option parsing, but has to be a list. Multiple symbols can also
|
| 253 |
+
# be separated with commas here.
|
| 254 |
+
if self.undef:
|
| 255 |
+
self.undef = self.undef.split(',')
|
| 256 |
+
|
| 257 |
+
if self.swig_opts is None:
|
| 258 |
+
self.swig_opts = []
|
| 259 |
+
else:
|
| 260 |
+
self.swig_opts = self.swig_opts.split(' ')
|
| 261 |
+
|
| 262 |
+
# Finally add the user include and library directories if requested
|
| 263 |
+
if self.user:
|
| 264 |
+
user_include = os.path.join(USER_BASE, "include")
|
| 265 |
+
user_lib = os.path.join(USER_BASE, "lib")
|
| 266 |
+
if os.path.isdir(user_include):
|
| 267 |
+
self.include_dirs.append(user_include)
|
| 268 |
+
if os.path.isdir(user_lib):
|
| 269 |
+
self.library_dirs.append(user_lib)
|
| 270 |
+
self.rpath.append(user_lib)
|
| 271 |
+
|
| 272 |
+
if isinstance(self.parallel, str):
|
| 273 |
+
try:
|
| 274 |
+
self.parallel = int(self.parallel)
|
| 275 |
+
except ValueError:
|
| 276 |
+
raise DistutilsOptionError("parallel should be an integer")
|
| 277 |
+
|
| 278 |
+
def run(self):
|
| 279 |
+
from distutils.ccompiler import new_compiler
|
| 280 |
+
|
| 281 |
+
# 'self.extensions', as supplied by setup.py, is a list of
|
| 282 |
+
# Extension instances. See the documentation for Extension (in
|
| 283 |
+
# distutils.extension) for details.
|
| 284 |
+
#
|
| 285 |
+
# For backwards compatibility with Distutils 0.8.2 and earlier, we
|
| 286 |
+
# also allow the 'extensions' list to be a list of tuples:
|
| 287 |
+
# (ext_name, build_info)
|
| 288 |
+
# where build_info is a dictionary containing everything that
|
| 289 |
+
# Extension instances do except the name, with a few things being
|
| 290 |
+
# differently named. We convert these 2-tuples to Extension
|
| 291 |
+
# instances as needed.
|
| 292 |
+
|
| 293 |
+
if not self.extensions:
|
| 294 |
+
return
|
| 295 |
+
|
| 296 |
+
# If we were asked to build any C/C++ libraries, make sure that the
|
| 297 |
+
# directory where we put them is in the library search path for
|
| 298 |
+
# linking extensions.
|
| 299 |
+
if self.distribution.has_c_libraries():
|
| 300 |
+
build_clib = self.get_finalized_command('build_clib')
|
| 301 |
+
self.libraries.extend(build_clib.get_library_names() or [])
|
| 302 |
+
self.library_dirs.append(build_clib.build_clib)
|
| 303 |
+
|
| 304 |
+
# Setup the CCompiler object that we'll use to do all the
|
| 305 |
+
# compiling and linking
|
| 306 |
+
self.compiler = new_compiler(compiler=self.compiler,
|
| 307 |
+
verbose=self.verbose,
|
| 308 |
+
dry_run=self.dry_run,
|
| 309 |
+
force=self.force)
|
| 310 |
+
customize_compiler(self.compiler)
|
| 311 |
+
# If we are cross-compiling, init the compiler now (if we are not
|
| 312 |
+
# cross-compiling, init would not hurt, but people may rely on
|
| 313 |
+
# late initialization of compiler even if they shouldn't...)
|
| 314 |
+
if os.name == 'nt' and self.plat_name != get_platform():
|
| 315 |
+
self.compiler.initialize(self.plat_name)
|
| 316 |
+
|
| 317 |
+
# And make sure that any compile/link-related options (which might
|
| 318 |
+
# come from the command-line or from the setup script) are set in
|
| 319 |
+
# that CCompiler object -- that way, they automatically apply to
|
| 320 |
+
# all compiling and linking done here.
|
| 321 |
+
if self.include_dirs is not None:
|
| 322 |
+
self.compiler.set_include_dirs(self.include_dirs)
|
| 323 |
+
if self.define is not None:
|
| 324 |
+
# 'define' option is a list of (name,value) tuples
|
| 325 |
+
for (name, value) in self.define:
|
| 326 |
+
self.compiler.define_macro(name, value)
|
| 327 |
+
if self.undef is not None:
|
| 328 |
+
for macro in self.undef:
|
| 329 |
+
self.compiler.undefine_macro(macro)
|
| 330 |
+
if self.libraries is not None:
|
| 331 |
+
self.compiler.set_libraries(self.libraries)
|
| 332 |
+
if self.library_dirs is not None:
|
| 333 |
+
self.compiler.set_library_dirs(self.library_dirs)
|
| 334 |
+
if self.rpath is not None:
|
| 335 |
+
self.compiler.set_runtime_library_dirs(self.rpath)
|
| 336 |
+
if self.link_objects is not None:
|
| 337 |
+
self.compiler.set_link_objects(self.link_objects)
|
| 338 |
+
|
| 339 |
+
# Now actually compile and link everything.
|
| 340 |
+
self.build_extensions()
|
| 341 |
+
|
| 342 |
+
def check_extensions_list(self, extensions):
|
| 343 |
+
"""Ensure that the list of extensions (presumably provided as a
|
| 344 |
+
command option 'extensions') is valid, i.e. it is a list of
|
| 345 |
+
Extension objects. We also support the old-style list of 2-tuples,
|
| 346 |
+
where the tuples are (ext_name, build_info), which are converted to
|
| 347 |
+
Extension instances here.
|
| 348 |
+
|
| 349 |
+
Raise DistutilsSetupError if the structure is invalid anywhere;
|
| 350 |
+
just returns otherwise.
|
| 351 |
+
"""
|
| 352 |
+
if not isinstance(extensions, list):
|
| 353 |
+
raise DistutilsSetupError(
|
| 354 |
+
"'ext_modules' option must be a list of Extension instances")
|
| 355 |
+
|
| 356 |
+
for i, ext in enumerate(extensions):
|
| 357 |
+
if isinstance(ext, Extension):
|
| 358 |
+
continue # OK! (assume type-checking done
|
| 359 |
+
# by Extension constructor)
|
| 360 |
+
|
| 361 |
+
if not isinstance(ext, tuple) or len(ext) != 2:
|
| 362 |
+
raise DistutilsSetupError(
|
| 363 |
+
"each element of 'ext_modules' option must be an "
|
| 364 |
+
"Extension instance or 2-tuple")
|
| 365 |
+
|
| 366 |
+
ext_name, build_info = ext
|
| 367 |
+
|
| 368 |
+
log.warn("old-style (ext_name, build_info) tuple found in "
|
| 369 |
+
"ext_modules for extension '%s' "
|
| 370 |
+
"-- please convert to Extension instance", ext_name)
|
| 371 |
+
|
| 372 |
+
if not (isinstance(ext_name, str) and
|
| 373 |
+
extension_name_re.match(ext_name)):
|
| 374 |
+
raise DistutilsSetupError(
|
| 375 |
+
"first element of each tuple in 'ext_modules' "
|
| 376 |
+
"must be the extension name (a string)")
|
| 377 |
+
|
| 378 |
+
if not isinstance(build_info, dict):
|
| 379 |
+
raise DistutilsSetupError(
|
| 380 |
+
"second element of each tuple in 'ext_modules' "
|
| 381 |
+
"must be a dictionary (build info)")
|
| 382 |
+
|
| 383 |
+
# OK, the (ext_name, build_info) dict is type-safe: convert it
|
| 384 |
+
# to an Extension instance.
|
| 385 |
+
ext = Extension(ext_name, build_info['sources'])
|
| 386 |
+
|
| 387 |
+
# Easy stuff: one-to-one mapping from dict elements to
|
| 388 |
+
# instance attributes.
|
| 389 |
+
for key in ('include_dirs', 'library_dirs', 'libraries',
|
| 390 |
+
'extra_objects', 'extra_compile_args',
|
| 391 |
+
'extra_link_args'):
|
| 392 |
+
val = build_info.get(key)
|
| 393 |
+
if val is not None:
|
| 394 |
+
setattr(ext, key, val)
|
| 395 |
+
|
| 396 |
+
# Medium-easy stuff: same syntax/semantics, different names.
|
| 397 |
+
ext.runtime_library_dirs = build_info.get('rpath')
|
| 398 |
+
if 'def_file' in build_info:
|
| 399 |
+
log.warn("'def_file' element of build info dict "
|
| 400 |
+
"no longer supported")
|
| 401 |
+
|
| 402 |
+
# Non-trivial stuff: 'macros' split into 'define_macros'
|
| 403 |
+
# and 'undef_macros'.
|
| 404 |
+
macros = build_info.get('macros')
|
| 405 |
+
if macros:
|
| 406 |
+
ext.define_macros = []
|
| 407 |
+
ext.undef_macros = []
|
| 408 |
+
for macro in macros:
|
| 409 |
+
if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
|
| 410 |
+
raise DistutilsSetupError(
|
| 411 |
+
"'macros' element of build info dict "
|
| 412 |
+
"must be 1- or 2-tuple")
|
| 413 |
+
if len(macro) == 1:
|
| 414 |
+
ext.undef_macros.append(macro[0])
|
| 415 |
+
elif len(macro) == 2:
|
| 416 |
+
ext.define_macros.append(macro)
|
| 417 |
+
|
| 418 |
+
extensions[i] = ext
|
| 419 |
+
|
| 420 |
+
def get_source_files(self):
|
| 421 |
+
self.check_extensions_list(self.extensions)
|
| 422 |
+
filenames = []
|
| 423 |
+
|
| 424 |
+
# Wouldn't it be neat if we knew the names of header files too...
|
| 425 |
+
for ext in self.extensions:
|
| 426 |
+
filenames.extend(ext.sources)
|
| 427 |
+
return filenames
|
| 428 |
+
|
| 429 |
+
def get_outputs(self):
|
| 430 |
+
# Sanity check the 'extensions' list -- can't assume this is being
|
| 431 |
+
# done in the same run as a 'build_extensions()' call (in fact, we
|
| 432 |
+
# can probably assume that it *isn't*!).
|
| 433 |
+
self.check_extensions_list(self.extensions)
|
| 434 |
+
|
| 435 |
+
# And build the list of output (built) filenames. Note that this
|
| 436 |
+
# ignores the 'inplace' flag, and assumes everything goes in the
|
| 437 |
+
# "build" tree.
|
| 438 |
+
outputs = []
|
| 439 |
+
for ext in self.extensions:
|
| 440 |
+
outputs.append(self.get_ext_fullpath(ext.name))
|
| 441 |
+
return outputs
|
| 442 |
+
|
| 443 |
+
def build_extensions(self):
|
| 444 |
+
# First, sanity-check the 'extensions' list
|
| 445 |
+
self.check_extensions_list(self.extensions)
|
| 446 |
+
if self.parallel:
|
| 447 |
+
self._build_extensions_parallel()
|
| 448 |
+
else:
|
| 449 |
+
self._build_extensions_serial()
|
| 450 |
+
|
| 451 |
+
def _build_extensions_parallel(self):
|
| 452 |
+
workers = self.parallel
|
| 453 |
+
if self.parallel is True:
|
| 454 |
+
workers = os.cpu_count() # may return None
|
| 455 |
+
try:
|
| 456 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 457 |
+
except ImportError:
|
| 458 |
+
workers = None
|
| 459 |
+
|
| 460 |
+
if workers is None:
|
| 461 |
+
self._build_extensions_serial()
|
| 462 |
+
return
|
| 463 |
+
|
| 464 |
+
with ThreadPoolExecutor(max_workers=workers) as executor:
|
| 465 |
+
futures = [executor.submit(self.build_extension, ext)
|
| 466 |
+
for ext in self.extensions]
|
| 467 |
+
for ext, fut in zip(self.extensions, futures):
|
| 468 |
+
with self._filter_build_errors(ext):
|
| 469 |
+
fut.result()
|
| 470 |
+
|
| 471 |
+
def _build_extensions_serial(self):
|
| 472 |
+
for ext in self.extensions:
|
| 473 |
+
with self._filter_build_errors(ext):
|
| 474 |
+
self.build_extension(ext)
|
| 475 |
+
|
| 476 |
+
@contextlib.contextmanager
|
| 477 |
+
def _filter_build_errors(self, ext):
|
| 478 |
+
try:
|
| 479 |
+
yield
|
| 480 |
+
except (CCompilerError, DistutilsError, CompileError) as e:
|
| 481 |
+
if not ext.optional:
|
| 482 |
+
raise
|
| 483 |
+
self.warn('building extension "%s" failed: %s' %
|
| 484 |
+
(ext.name, e))
|
| 485 |
+
|
| 486 |
+
def build_extension(self, ext):
|
| 487 |
+
sources = ext.sources
|
| 488 |
+
if sources is None or not isinstance(sources, (list, tuple)):
|
| 489 |
+
raise DistutilsSetupError(
|
| 490 |
+
"in 'ext_modules' option (extension '%s'), "
|
| 491 |
+
"'sources' must be present and must be "
|
| 492 |
+
"a list of source filenames" % ext.name)
|
| 493 |
+
# sort to make the resulting .so file build reproducible
|
| 494 |
+
sources = sorted(sources)
|
| 495 |
+
|
| 496 |
+
ext_path = self.get_ext_fullpath(ext.name)
|
| 497 |
+
depends = sources + ext.depends
|
| 498 |
+
if not (self.force or newer_group(depends, ext_path, 'newer')):
|
| 499 |
+
log.debug("skipping '%s' extension (up-to-date)", ext.name)
|
| 500 |
+
return
|
| 501 |
+
else:
|
| 502 |
+
log.info("building '%s' extension", ext.name)
|
| 503 |
+
|
| 504 |
+
# First, scan the sources for SWIG definition files (.i), run
|
| 505 |
+
# SWIG on 'em to create .c files, and modify the sources list
|
| 506 |
+
# accordingly.
|
| 507 |
+
sources = self.swig_sources(sources, ext)
|
| 508 |
+
|
| 509 |
+
# Next, compile the source code to object files.
|
| 510 |
+
|
| 511 |
+
# XXX not honouring 'define_macros' or 'undef_macros' -- the
|
| 512 |
+
# CCompiler API needs to change to accommodate this, and I
|
| 513 |
+
# want to do one thing at a time!
|
| 514 |
+
|
| 515 |
+
# Two possible sources for extra compiler arguments:
|
| 516 |
+
# - 'extra_compile_args' in Extension object
|
| 517 |
+
# - CFLAGS environment variable (not particularly
|
| 518 |
+
# elegant, but people seem to expect it and I
|
| 519 |
+
# guess it's useful)
|
| 520 |
+
# The environment variable should take precedence, and
|
| 521 |
+
# any sensible compiler will give precedence to later
|
| 522 |
+
# command line args. Hence we combine them in order:
|
| 523 |
+
extra_args = ext.extra_compile_args or []
|
| 524 |
+
|
| 525 |
+
macros = ext.define_macros[:]
|
| 526 |
+
for undef in ext.undef_macros:
|
| 527 |
+
macros.append((undef,))
|
| 528 |
+
|
| 529 |
+
objects = self.compiler.compile(sources,
|
| 530 |
+
output_dir=self.build_temp,
|
| 531 |
+
macros=macros,
|
| 532 |
+
include_dirs=ext.include_dirs,
|
| 533 |
+
debug=self.debug,
|
| 534 |
+
extra_postargs=extra_args,
|
| 535 |
+
depends=ext.depends)
|
| 536 |
+
|
| 537 |
+
# XXX outdated variable, kept here in case third-part code
|
| 538 |
+
# needs it.
|
| 539 |
+
self._built_objects = objects[:]
|
| 540 |
+
|
| 541 |
+
# Now link the object files together into a "shared object" --
|
| 542 |
+
# of course, first we have to figure out all the other things
|
| 543 |
+
# that go into the mix.
|
| 544 |
+
if ext.extra_objects:
|
| 545 |
+
objects.extend(ext.extra_objects)
|
| 546 |
+
extra_args = ext.extra_link_args or []
|
| 547 |
+
|
| 548 |
+
# Detect target language, if not provided
|
| 549 |
+
language = ext.language or self.compiler.detect_language(sources)
|
| 550 |
+
|
| 551 |
+
self.compiler.link_shared_object(
|
| 552 |
+
objects, ext_path,
|
| 553 |
+
libraries=self.get_libraries(ext),
|
| 554 |
+
library_dirs=ext.library_dirs,
|
| 555 |
+
runtime_library_dirs=ext.runtime_library_dirs,
|
| 556 |
+
extra_postargs=extra_args,
|
| 557 |
+
export_symbols=self.get_export_symbols(ext),
|
| 558 |
+
debug=self.debug,
|
| 559 |
+
build_temp=self.build_temp,
|
| 560 |
+
target_lang=language)
|
| 561 |
+
|
| 562 |
+
def swig_sources(self, sources, extension):
|
| 563 |
+
"""Walk the list of source files in 'sources', looking for SWIG
|
| 564 |
+
interface (.i) files. Run SWIG on all that are found, and
|
| 565 |
+
return a modified 'sources' list with SWIG source files replaced
|
| 566 |
+
by the generated C (or C++) files.
|
| 567 |
+
"""
|
| 568 |
+
new_sources = []
|
| 569 |
+
swig_sources = []
|
| 570 |
+
swig_targets = {}
|
| 571 |
+
|
| 572 |
+
# XXX this drops generated C/C++ files into the source tree, which
|
| 573 |
+
# is fine for developers who want to distribute the generated
|
| 574 |
+
# source -- but there should be an option to put SWIG output in
|
| 575 |
+
# the temp dir.
|
| 576 |
+
|
| 577 |
+
if self.swig_cpp:
|
| 578 |
+
log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
|
| 579 |
+
|
| 580 |
+
if self.swig_cpp or ('-c++' in self.swig_opts) or \
|
| 581 |
+
('-c++' in extension.swig_opts):
|
| 582 |
+
target_ext = '.cpp'
|
| 583 |
+
else:
|
| 584 |
+
target_ext = '.c'
|
| 585 |
+
|
| 586 |
+
for source in sources:
|
| 587 |
+
(base, ext) = os.path.splitext(source)
|
| 588 |
+
if ext == ".i": # SWIG interface file
|
| 589 |
+
new_sources.append(base + '_wrap' + target_ext)
|
| 590 |
+
swig_sources.append(source)
|
| 591 |
+
swig_targets[source] = new_sources[-1]
|
| 592 |
+
else:
|
| 593 |
+
new_sources.append(source)
|
| 594 |
+
|
| 595 |
+
if not swig_sources:
|
| 596 |
+
return new_sources
|
| 597 |
+
|
| 598 |
+
swig = self.swig or self.find_swig()
|
| 599 |
+
swig_cmd = [swig, "-python"]
|
| 600 |
+
swig_cmd.extend(self.swig_opts)
|
| 601 |
+
if self.swig_cpp:
|
| 602 |
+
swig_cmd.append("-c++")
|
| 603 |
+
|
| 604 |
+
# Do not override commandline arguments
|
| 605 |
+
if not self.swig_opts:
|
| 606 |
+
for o in extension.swig_opts:
|
| 607 |
+
swig_cmd.append(o)
|
| 608 |
+
|
| 609 |
+
for source in swig_sources:
|
| 610 |
+
target = swig_targets[source]
|
| 611 |
+
log.info("swigging %s to %s", source, target)
|
| 612 |
+
self.spawn(swig_cmd + ["-o", target, source])
|
| 613 |
+
|
| 614 |
+
return new_sources
|
| 615 |
+
|
| 616 |
+
def find_swig(self):
|
| 617 |
+
"""Return the name of the SWIG executable. On Unix, this is
|
| 618 |
+
just "swig" -- it should be in the PATH. Tries a bit harder on
|
| 619 |
+
Windows.
|
| 620 |
+
"""
|
| 621 |
+
if os.name == "posix":
|
| 622 |
+
return "swig"
|
| 623 |
+
elif os.name == "nt":
|
| 624 |
+
# Look for SWIG in its standard installation directory on
|
| 625 |
+
# Windows (or so I presume!). If we find it there, great;
|
| 626 |
+
# if not, act like Unix and assume it's in the PATH.
|
| 627 |
+
for vers in ("1.3", "1.2", "1.1"):
|
| 628 |
+
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
|
| 629 |
+
if os.path.isfile(fn):
|
| 630 |
+
return fn
|
| 631 |
+
else:
|
| 632 |
+
return "swig.exe"
|
| 633 |
+
else:
|
| 634 |
+
raise DistutilsPlatformError(
|
| 635 |
+
"I don't know how to find (much less run) SWIG "
|
| 636 |
+
"on platform '%s'" % os.name)
|
| 637 |
+
|
| 638 |
+
# -- Name generators -----------------------------------------------
|
| 639 |
+
# (extension names, filenames, whatever)
|
| 640 |
+
def get_ext_fullpath(self, ext_name):
|
| 641 |
+
"""Returns the path of the filename for a given extension.
|
| 642 |
+
|
| 643 |
+
The file is located in `build_lib` or directly in the package
|
| 644 |
+
(inplace option).
|
| 645 |
+
"""
|
| 646 |
+
fullname = self.get_ext_fullname(ext_name)
|
| 647 |
+
modpath = fullname.split('.')
|
| 648 |
+
filename = self.get_ext_filename(modpath[-1])
|
| 649 |
+
|
| 650 |
+
if not self.inplace:
|
| 651 |
+
# no further work needed
|
| 652 |
+
# returning :
|
| 653 |
+
# build_dir/package/path/filename
|
| 654 |
+
filename = os.path.join(*modpath[:-1]+[filename])
|
| 655 |
+
return os.path.join(self.build_lib, filename)
|
| 656 |
+
|
| 657 |
+
# the inplace option requires to find the package directory
|
| 658 |
+
# using the build_py command for that
|
| 659 |
+
package = '.'.join(modpath[0:-1])
|
| 660 |
+
build_py = self.get_finalized_command('build_py')
|
| 661 |
+
package_dir = os.path.abspath(build_py.get_package_dir(package))
|
| 662 |
+
|
| 663 |
+
# returning
|
| 664 |
+
# package_dir/filename
|
| 665 |
+
return os.path.join(package_dir, filename)
|
| 666 |
+
|
| 667 |
+
def get_ext_fullname(self, ext_name):
|
| 668 |
+
"""Returns the fullname of a given extension name.
|
| 669 |
+
|
| 670 |
+
Adds the `package.` prefix"""
|
| 671 |
+
if self.package is None:
|
| 672 |
+
return ext_name
|
| 673 |
+
else:
|
| 674 |
+
return self.package + '.' + ext_name
|
| 675 |
+
|
| 676 |
+
def get_ext_filename(self, ext_name):
|
| 677 |
+
r"""Convert the name of an extension (eg. "foo.bar") into the name
|
| 678 |
+
of the file from which it will be loaded (eg. "foo/bar.so", or
|
| 679 |
+
"foo\bar.pyd").
|
| 680 |
+
"""
|
| 681 |
+
from distutils.sysconfig import get_config_var
|
| 682 |
+
ext_path = ext_name.split('.')
|
| 683 |
+
ext_suffix = get_config_var('EXT_SUFFIX')
|
| 684 |
+
return os.path.join(*ext_path) + ext_suffix
|
| 685 |
+
|
| 686 |
+
def get_export_symbols(self, ext):
|
| 687 |
+
"""Return the list of symbols that a shared extension has to
|
| 688 |
+
export. This either uses 'ext.export_symbols' or, if it's not
|
| 689 |
+
provided, "PyInit_" + module_name. Only relevant on Windows, where
|
| 690 |
+
the .pyd file (DLL) must export the module "PyInit_" function.
|
| 691 |
+
"""
|
| 692 |
+
suffix = '_' + ext.name.split('.')[-1]
|
| 693 |
+
try:
|
| 694 |
+
# Unicode module name support as defined in PEP-489
|
| 695 |
+
# https://www.python.org/dev/peps/pep-0489/#export-hook-name
|
| 696 |
+
suffix.encode('ascii')
|
| 697 |
+
except UnicodeEncodeError:
|
| 698 |
+
suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii')
|
| 699 |
+
|
| 700 |
+
initfunc_name = "PyInit" + suffix
|
| 701 |
+
if initfunc_name not in ext.export_symbols:
|
| 702 |
+
ext.export_symbols.append(initfunc_name)
|
| 703 |
+
return ext.export_symbols
|
| 704 |
+
|
| 705 |
+
def get_libraries(self, ext):
|
| 706 |
+
"""Return the list of libraries to link against when building a
|
| 707 |
+
shared extension. On most platforms, this is just 'ext.libraries';
|
| 708 |
+
on Windows, we add the Python library (eg. python20.dll).
|
| 709 |
+
"""
|
| 710 |
+
# The python library is always needed on Windows. For MSVC, this
|
| 711 |
+
# is redundant, since the library is mentioned in a pragma in
|
| 712 |
+
# pyconfig.h that MSVC groks. The other Windows compilers all seem
|
| 713 |
+
# to need it mentioned explicitly, though, so that's what we do.
|
| 714 |
+
# Append '_d' to the python import library on debug builds.
|
| 715 |
+
if sys.platform == "win32":
|
| 716 |
+
from distutils._msvccompiler import MSVCCompiler
|
| 717 |
+
if not isinstance(self.compiler, MSVCCompiler):
|
| 718 |
+
template = "python%d%d"
|
| 719 |
+
if self.debug:
|
| 720 |
+
template = template + '_d'
|
| 721 |
+
pythonlib = (template %
|
| 722 |
+
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
| 723 |
+
# don't extend ext.libraries, it may be shared with other
|
| 724 |
+
# extensions, it is a reference to the original list
|
| 725 |
+
return ext.libraries + [pythonlib]
|
| 726 |
+
else:
|
| 727 |
+
# On Android only the main executable and LD_PRELOADs are considered
|
| 728 |
+
# to be RTLD_GLOBAL, all the dependencies of the main executable
|
| 729 |
+
# remain RTLD_LOCAL and so the shared libraries must be linked with
|
| 730 |
+
# libpython when python is built with a shared python library (issue
|
| 731 |
+
# bpo-21536).
|
| 732 |
+
# On Cygwin (and if required, other POSIX-like platforms based on
|
| 733 |
+
# Windows like MinGW) it is simply necessary that all symbols in
|
| 734 |
+
# shared libraries are resolved at link time.
|
| 735 |
+
from distutils.sysconfig import get_config_var
|
| 736 |
+
link_libpython = False
|
| 737 |
+
if get_config_var('Py_ENABLE_SHARED'):
|
| 738 |
+
# A native build on an Android device or on Cygwin
|
| 739 |
+
if hasattr(sys, 'getandroidapilevel'):
|
| 740 |
+
link_libpython = True
|
| 741 |
+
elif sys.platform == 'cygwin':
|
| 742 |
+
link_libpython = True
|
| 743 |
+
elif '_PYTHON_HOST_PLATFORM' in os.environ:
|
| 744 |
+
# We are cross-compiling for one of the relevant platforms
|
| 745 |
+
if get_config_var('ANDROID_API_LEVEL') != 0:
|
| 746 |
+
link_libpython = True
|
| 747 |
+
elif get_config_var('MACHDEP') == 'cygwin':
|
| 748 |
+
link_libpython = True
|
| 749 |
+
|
| 750 |
+
if link_libpython:
|
| 751 |
+
ldversion = get_config_var('LDVERSION')
|
| 752 |
+
return ext.libraries + ['python' + ldversion]
|
| 753 |
+
|
| 754 |
+
return ext.libraries
|
llava/lib/python3.10/distutils/command/build_py.py
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_py
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_py' command."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import importlib.util
|
| 7 |
+
import sys
|
| 8 |
+
import glob
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils.errors import *
|
| 12 |
+
from distutils.util import convert_path, Mixin2to3
|
| 13 |
+
from distutils import log
|
| 14 |
+
|
| 15 |
+
class build_py (Command):
|
| 16 |
+
|
| 17 |
+
description = "\"build\" pure Python modules (copy to build directory)"
|
| 18 |
+
|
| 19 |
+
user_options = [
|
| 20 |
+
('build-lib=', 'd', "directory to \"build\" (copy) to"),
|
| 21 |
+
('compile', 'c', "compile .py to .pyc"),
|
| 22 |
+
('no-compile', None, "don't compile .py files [default]"),
|
| 23 |
+
('optimize=', 'O',
|
| 24 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 25 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
| 26 |
+
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
boolean_options = ['compile', 'force']
|
| 30 |
+
negative_opt = {'no-compile' : 'compile'}
|
| 31 |
+
|
| 32 |
+
def initialize_options(self):
|
| 33 |
+
self.build_lib = None
|
| 34 |
+
self.py_modules = None
|
| 35 |
+
self.package = None
|
| 36 |
+
self.package_data = None
|
| 37 |
+
self.package_dir = None
|
| 38 |
+
self.compile = 0
|
| 39 |
+
self.optimize = 0
|
| 40 |
+
self.force = None
|
| 41 |
+
|
| 42 |
+
def finalize_options(self):
|
| 43 |
+
self.set_undefined_options('build',
|
| 44 |
+
('build_lib', 'build_lib'),
|
| 45 |
+
('force', 'force'))
|
| 46 |
+
|
| 47 |
+
# Get the distribution options that are aliases for build_py
|
| 48 |
+
# options -- list of packages and list of modules.
|
| 49 |
+
self.packages = self.distribution.packages
|
| 50 |
+
self.py_modules = self.distribution.py_modules
|
| 51 |
+
self.package_data = self.distribution.package_data
|
| 52 |
+
self.package_dir = {}
|
| 53 |
+
if self.distribution.package_dir:
|
| 54 |
+
for name, path in self.distribution.package_dir.items():
|
| 55 |
+
self.package_dir[name] = convert_path(path)
|
| 56 |
+
self.data_files = self.get_data_files()
|
| 57 |
+
|
| 58 |
+
# Ick, copied straight from install_lib.py (fancy_getopt needs a
|
| 59 |
+
# type system! Hell, *everything* needs a type system!!!)
|
| 60 |
+
if not isinstance(self.optimize, int):
|
| 61 |
+
try:
|
| 62 |
+
self.optimize = int(self.optimize)
|
| 63 |
+
assert 0 <= self.optimize <= 2
|
| 64 |
+
except (ValueError, AssertionError):
|
| 65 |
+
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
| 66 |
+
|
| 67 |
+
def run(self):
|
| 68 |
+
# XXX copy_file by default preserves atime and mtime. IMHO this is
|
| 69 |
+
# the right thing to do, but perhaps it should be an option -- in
|
| 70 |
+
# particular, a site administrator might want installed files to
|
| 71 |
+
# reflect the time of installation rather than the last
|
| 72 |
+
# modification time before the installed release.
|
| 73 |
+
|
| 74 |
+
# XXX copy_file by default preserves mode, which appears to be the
|
| 75 |
+
# wrong thing to do: if a file is read-only in the working
|
| 76 |
+
# directory, we want it to be installed read/write so that the next
|
| 77 |
+
# installation of the same module distribution can overwrite it
|
| 78 |
+
# without problems. (This might be a Unix-specific issue.) Thus
|
| 79 |
+
# we turn off 'preserve_mode' when copying to the build directory,
|
| 80 |
+
# since the build directory is supposed to be exactly what the
|
| 81 |
+
# installation will look like (ie. we preserve mode when
|
| 82 |
+
# installing).
|
| 83 |
+
|
| 84 |
+
# Two options control which modules will be installed: 'packages'
|
| 85 |
+
# and 'py_modules'. The former lets us work with whole packages, not
|
| 86 |
+
# specifying individual modules at all; the latter is for
|
| 87 |
+
# specifying modules one-at-a-time.
|
| 88 |
+
|
| 89 |
+
if self.py_modules:
|
| 90 |
+
self.build_modules()
|
| 91 |
+
if self.packages:
|
| 92 |
+
self.build_packages()
|
| 93 |
+
self.build_package_data()
|
| 94 |
+
|
| 95 |
+
self.byte_compile(self.get_outputs(include_bytecode=0))
|
| 96 |
+
|
| 97 |
+
def get_data_files(self):
|
| 98 |
+
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
| 99 |
+
data = []
|
| 100 |
+
if not self.packages:
|
| 101 |
+
return data
|
| 102 |
+
for package in self.packages:
|
| 103 |
+
# Locate package source directory
|
| 104 |
+
src_dir = self.get_package_dir(package)
|
| 105 |
+
|
| 106 |
+
# Compute package build directory
|
| 107 |
+
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
| 108 |
+
|
| 109 |
+
# Length of path to strip from found files
|
| 110 |
+
plen = 0
|
| 111 |
+
if src_dir:
|
| 112 |
+
plen = len(src_dir)+1
|
| 113 |
+
|
| 114 |
+
# Strip directory from globbed filenames
|
| 115 |
+
filenames = [
|
| 116 |
+
file[plen:] for file in self.find_data_files(package, src_dir)
|
| 117 |
+
]
|
| 118 |
+
data.append((package, src_dir, build_dir, filenames))
|
| 119 |
+
return data
|
| 120 |
+
|
| 121 |
+
def find_data_files(self, package, src_dir):
|
| 122 |
+
"""Return filenames for package's data files in 'src_dir'"""
|
| 123 |
+
globs = (self.package_data.get('', [])
|
| 124 |
+
+ self.package_data.get(package, []))
|
| 125 |
+
files = []
|
| 126 |
+
for pattern in globs:
|
| 127 |
+
# Each pattern has to be converted to a platform-specific path
|
| 128 |
+
filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern)))
|
| 129 |
+
# Files that match more than one pattern are only added once
|
| 130 |
+
files.extend([fn for fn in filelist if fn not in files
|
| 131 |
+
and os.path.isfile(fn)])
|
| 132 |
+
return files
|
| 133 |
+
|
| 134 |
+
def build_package_data(self):
|
| 135 |
+
"""Copy data files into build directory"""
|
| 136 |
+
lastdir = None
|
| 137 |
+
for package, src_dir, build_dir, filenames in self.data_files:
|
| 138 |
+
for filename in filenames:
|
| 139 |
+
target = os.path.join(build_dir, filename)
|
| 140 |
+
self.mkpath(os.path.dirname(target))
|
| 141 |
+
self.copy_file(os.path.join(src_dir, filename), target,
|
| 142 |
+
preserve_mode=False)
|
| 143 |
+
|
| 144 |
+
def get_package_dir(self, package):
|
| 145 |
+
"""Return the directory, relative to the top of the source
|
| 146 |
+
distribution, where package 'package' should be found
|
| 147 |
+
(at least according to the 'package_dir' option, if any)."""
|
| 148 |
+
path = package.split('.')
|
| 149 |
+
|
| 150 |
+
if not self.package_dir:
|
| 151 |
+
if path:
|
| 152 |
+
return os.path.join(*path)
|
| 153 |
+
else:
|
| 154 |
+
return ''
|
| 155 |
+
else:
|
| 156 |
+
tail = []
|
| 157 |
+
while path:
|
| 158 |
+
try:
|
| 159 |
+
pdir = self.package_dir['.'.join(path)]
|
| 160 |
+
except KeyError:
|
| 161 |
+
tail.insert(0, path[-1])
|
| 162 |
+
del path[-1]
|
| 163 |
+
else:
|
| 164 |
+
tail.insert(0, pdir)
|
| 165 |
+
return os.path.join(*tail)
|
| 166 |
+
else:
|
| 167 |
+
# Oops, got all the way through 'path' without finding a
|
| 168 |
+
# match in package_dir. If package_dir defines a directory
|
| 169 |
+
# for the root (nameless) package, then fallback on it;
|
| 170 |
+
# otherwise, we might as well have not consulted
|
| 171 |
+
# package_dir at all, as we just use the directory implied
|
| 172 |
+
# by 'tail' (which should be the same as the original value
|
| 173 |
+
# of 'path' at this point).
|
| 174 |
+
pdir = self.package_dir.get('')
|
| 175 |
+
if pdir is not None:
|
| 176 |
+
tail.insert(0, pdir)
|
| 177 |
+
|
| 178 |
+
if tail:
|
| 179 |
+
return os.path.join(*tail)
|
| 180 |
+
else:
|
| 181 |
+
return ''
|
| 182 |
+
|
| 183 |
+
def check_package(self, package, package_dir):
|
| 184 |
+
# Empty dir name means current directory, which we can probably
|
| 185 |
+
# assume exists. Also, os.path.exists and isdir don't know about
|
| 186 |
+
# my "empty string means current dir" convention, so we have to
|
| 187 |
+
# circumvent them.
|
| 188 |
+
if package_dir != "":
|
| 189 |
+
if not os.path.exists(package_dir):
|
| 190 |
+
raise DistutilsFileError(
|
| 191 |
+
"package directory '%s' does not exist" % package_dir)
|
| 192 |
+
if not os.path.isdir(package_dir):
|
| 193 |
+
raise DistutilsFileError(
|
| 194 |
+
"supposed package directory '%s' exists, "
|
| 195 |
+
"but is not a directory" % package_dir)
|
| 196 |
+
|
| 197 |
+
# Require __init__.py for all but the "root package"
|
| 198 |
+
if package:
|
| 199 |
+
init_py = os.path.join(package_dir, "__init__.py")
|
| 200 |
+
if os.path.isfile(init_py):
|
| 201 |
+
return init_py
|
| 202 |
+
else:
|
| 203 |
+
log.warn(("package init file '%s' not found " +
|
| 204 |
+
"(or not a regular file)"), init_py)
|
| 205 |
+
|
| 206 |
+
# Either not in a package at all (__init__.py not expected), or
|
| 207 |
+
# __init__.py doesn't exist -- so don't return the filename.
|
| 208 |
+
return None
|
| 209 |
+
|
| 210 |
+
def check_module(self, module, module_file):
|
| 211 |
+
if not os.path.isfile(module_file):
|
| 212 |
+
log.warn("file %s (for module %s) not found", module_file, module)
|
| 213 |
+
return False
|
| 214 |
+
else:
|
| 215 |
+
return True
|
| 216 |
+
|
| 217 |
+
def find_package_modules(self, package, package_dir):
|
| 218 |
+
self.check_package(package, package_dir)
|
| 219 |
+
module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
|
| 220 |
+
modules = []
|
| 221 |
+
setup_script = os.path.abspath(self.distribution.script_name)
|
| 222 |
+
|
| 223 |
+
for f in module_files:
|
| 224 |
+
abs_f = os.path.abspath(f)
|
| 225 |
+
if abs_f != setup_script:
|
| 226 |
+
module = os.path.splitext(os.path.basename(f))[0]
|
| 227 |
+
modules.append((package, module, f))
|
| 228 |
+
else:
|
| 229 |
+
self.debug_print("excluding %s" % setup_script)
|
| 230 |
+
return modules
|
| 231 |
+
|
| 232 |
+
def find_modules(self):
|
| 233 |
+
"""Finds individually-specified Python modules, ie. those listed by
|
| 234 |
+
module name in 'self.py_modules'. Returns a list of tuples (package,
|
| 235 |
+
module_base, filename): 'package' is a tuple of the path through
|
| 236 |
+
package-space to the module; 'module_base' is the bare (no
|
| 237 |
+
packages, no dots) module name, and 'filename' is the path to the
|
| 238 |
+
".py" file (relative to the distribution root) that implements the
|
| 239 |
+
module.
|
| 240 |
+
"""
|
| 241 |
+
# Map package names to tuples of useful info about the package:
|
| 242 |
+
# (package_dir, checked)
|
| 243 |
+
# package_dir - the directory where we'll find source files for
|
| 244 |
+
# this package
|
| 245 |
+
# checked - true if we have checked that the package directory
|
| 246 |
+
# is valid (exists, contains __init__.py, ... ?)
|
| 247 |
+
packages = {}
|
| 248 |
+
|
| 249 |
+
# List of (package, module, filename) tuples to return
|
| 250 |
+
modules = []
|
| 251 |
+
|
| 252 |
+
# We treat modules-in-packages almost the same as toplevel modules,
|
| 253 |
+
# just the "package" for a toplevel is empty (either an empty
|
| 254 |
+
# string or empty list, depending on context). Differences:
|
| 255 |
+
# - don't check for __init__.py in directory for empty package
|
| 256 |
+
for module in self.py_modules:
|
| 257 |
+
path = module.split('.')
|
| 258 |
+
package = '.'.join(path[0:-1])
|
| 259 |
+
module_base = path[-1]
|
| 260 |
+
|
| 261 |
+
try:
|
| 262 |
+
(package_dir, checked) = packages[package]
|
| 263 |
+
except KeyError:
|
| 264 |
+
package_dir = self.get_package_dir(package)
|
| 265 |
+
checked = 0
|
| 266 |
+
|
| 267 |
+
if not checked:
|
| 268 |
+
init_py = self.check_package(package, package_dir)
|
| 269 |
+
packages[package] = (package_dir, 1)
|
| 270 |
+
if init_py:
|
| 271 |
+
modules.append((package, "__init__", init_py))
|
| 272 |
+
|
| 273 |
+
# XXX perhaps we should also check for just .pyc files
|
| 274 |
+
# (so greedy closed-source bastards can distribute Python
|
| 275 |
+
# modules too)
|
| 276 |
+
module_file = os.path.join(package_dir, module_base + ".py")
|
| 277 |
+
if not self.check_module(module, module_file):
|
| 278 |
+
continue
|
| 279 |
+
|
| 280 |
+
modules.append((package, module_base, module_file))
|
| 281 |
+
|
| 282 |
+
return modules
|
| 283 |
+
|
| 284 |
+
def find_all_modules(self):
|
| 285 |
+
"""Compute the list of all modules that will be built, whether
|
| 286 |
+
they are specified one-module-at-a-time ('self.py_modules') or
|
| 287 |
+
by whole packages ('self.packages'). Return a list of tuples
|
| 288 |
+
(package, module, module_file), just like 'find_modules()' and
|
| 289 |
+
'find_package_modules()' do."""
|
| 290 |
+
modules = []
|
| 291 |
+
if self.py_modules:
|
| 292 |
+
modules.extend(self.find_modules())
|
| 293 |
+
if self.packages:
|
| 294 |
+
for package in self.packages:
|
| 295 |
+
package_dir = self.get_package_dir(package)
|
| 296 |
+
m = self.find_package_modules(package, package_dir)
|
| 297 |
+
modules.extend(m)
|
| 298 |
+
return modules
|
| 299 |
+
|
| 300 |
+
def get_source_files(self):
|
| 301 |
+
return [module[-1] for module in self.find_all_modules()]
|
| 302 |
+
|
| 303 |
+
def get_module_outfile(self, build_dir, package, module):
|
| 304 |
+
outfile_path = [build_dir] + list(package) + [module + ".py"]
|
| 305 |
+
return os.path.join(*outfile_path)
|
| 306 |
+
|
| 307 |
+
def get_outputs(self, include_bytecode=1):
|
| 308 |
+
modules = self.find_all_modules()
|
| 309 |
+
outputs = []
|
| 310 |
+
for (package, module, module_file) in modules:
|
| 311 |
+
package = package.split('.')
|
| 312 |
+
filename = self.get_module_outfile(self.build_lib, package, module)
|
| 313 |
+
outputs.append(filename)
|
| 314 |
+
if include_bytecode:
|
| 315 |
+
if self.compile:
|
| 316 |
+
outputs.append(importlib.util.cache_from_source(
|
| 317 |
+
filename, optimization=''))
|
| 318 |
+
if self.optimize > 0:
|
| 319 |
+
outputs.append(importlib.util.cache_from_source(
|
| 320 |
+
filename, optimization=self.optimize))
|
| 321 |
+
|
| 322 |
+
outputs += [
|
| 323 |
+
os.path.join(build_dir, filename)
|
| 324 |
+
for package, src_dir, build_dir, filenames in self.data_files
|
| 325 |
+
for filename in filenames
|
| 326 |
+
]
|
| 327 |
+
|
| 328 |
+
return outputs
|
| 329 |
+
|
| 330 |
+
def build_module(self, module, module_file, package):
|
| 331 |
+
if isinstance(package, str):
|
| 332 |
+
package = package.split('.')
|
| 333 |
+
elif not isinstance(package, (list, tuple)):
|
| 334 |
+
raise TypeError(
|
| 335 |
+
"'package' must be a string (dot-separated), list, or tuple")
|
| 336 |
+
|
| 337 |
+
# Now put the module source file into the "build" area -- this is
|
| 338 |
+
# easy, we just copy it somewhere under self.build_lib (the build
|
| 339 |
+
# directory for Python source).
|
| 340 |
+
outfile = self.get_module_outfile(self.build_lib, package, module)
|
| 341 |
+
dir = os.path.dirname(outfile)
|
| 342 |
+
self.mkpath(dir)
|
| 343 |
+
return self.copy_file(module_file, outfile, preserve_mode=0)
|
| 344 |
+
|
| 345 |
+
def build_modules(self):
|
| 346 |
+
modules = self.find_modules()
|
| 347 |
+
for (package, module, module_file) in modules:
|
| 348 |
+
# Now "build" the module -- ie. copy the source file to
|
| 349 |
+
# self.build_lib (the build directory for Python source).
|
| 350 |
+
# (Actually, it gets copied to the directory for this package
|
| 351 |
+
# under self.build_lib.)
|
| 352 |
+
self.build_module(module, module_file, package)
|
| 353 |
+
|
| 354 |
+
def build_packages(self):
|
| 355 |
+
for package in self.packages:
|
| 356 |
+
# Get list of (package, module, module_file) tuples based on
|
| 357 |
+
# scanning the package directory. 'package' is only included
|
| 358 |
+
# in the tuple so that 'find_modules()' and
|
| 359 |
+
# 'find_package_tuples()' have a consistent interface; it's
|
| 360 |
+
# ignored here (apart from a sanity check). Also, 'module' is
|
| 361 |
+
# the *unqualified* module name (ie. no dots, no package -- we
|
| 362 |
+
# already know its package!), and 'module_file' is the path to
|
| 363 |
+
# the .py file, relative to the current directory
|
| 364 |
+
# (ie. including 'package_dir').
|
| 365 |
+
package_dir = self.get_package_dir(package)
|
| 366 |
+
modules = self.find_package_modules(package, package_dir)
|
| 367 |
+
|
| 368 |
+
# Now loop over the modules we found, "building" each one (just
|
| 369 |
+
# copy it to self.build_lib).
|
| 370 |
+
for (package_, module, module_file) in modules:
|
| 371 |
+
assert package == package_
|
| 372 |
+
self.build_module(module, module_file, package)
|
| 373 |
+
|
| 374 |
+
def byte_compile(self, files):
|
| 375 |
+
if sys.dont_write_bytecode:
|
| 376 |
+
self.warn('byte-compiling is disabled, skipping.')
|
| 377 |
+
return
|
| 378 |
+
|
| 379 |
+
from distutils.util import byte_compile
|
| 380 |
+
prefix = self.build_lib
|
| 381 |
+
if prefix[-1] != os.sep:
|
| 382 |
+
prefix = prefix + os.sep
|
| 383 |
+
|
| 384 |
+
# XXX this code is essentially the same as the 'byte_compile()
|
| 385 |
+
# method of the "install_lib" command, except for the determination
|
| 386 |
+
# of the 'prefix' string. Hmmm.
|
| 387 |
+
if self.compile:
|
| 388 |
+
byte_compile(files, optimize=0,
|
| 389 |
+
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
| 390 |
+
if self.optimize > 0:
|
| 391 |
+
byte_compile(files, optimize=self.optimize,
|
| 392 |
+
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
| 393 |
+
|
| 394 |
+
class build_py_2to3(build_py, Mixin2to3):
|
| 395 |
+
def run(self):
|
| 396 |
+
self.updated_files = []
|
| 397 |
+
|
| 398 |
+
# Base class code
|
| 399 |
+
if self.py_modules:
|
| 400 |
+
self.build_modules()
|
| 401 |
+
if self.packages:
|
| 402 |
+
self.build_packages()
|
| 403 |
+
self.build_package_data()
|
| 404 |
+
|
| 405 |
+
# 2to3
|
| 406 |
+
self.run_2to3(self.updated_files)
|
| 407 |
+
|
| 408 |
+
# Remaining base class code
|
| 409 |
+
self.byte_compile(self.get_outputs(include_bytecode=0))
|
| 410 |
+
|
| 411 |
+
def build_module(self, module, module_file, package):
|
| 412 |
+
res = build_py.build_module(self, module, module_file, package)
|
| 413 |
+
if res[1]:
|
| 414 |
+
# file was copied
|
| 415 |
+
self.updated_files.append(res[0])
|
| 416 |
+
return res
|
llava/lib/python3.10/distutils/command/clean.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.clean
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'clean' command."""
|
| 4 |
+
|
| 5 |
+
# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from distutils.core import Command
|
| 9 |
+
from distutils.dir_util import remove_tree
|
| 10 |
+
from distutils import log
|
| 11 |
+
|
| 12 |
+
class clean(Command):
|
| 13 |
+
|
| 14 |
+
description = "clean up temporary files from 'build' command"
|
| 15 |
+
user_options = [
|
| 16 |
+
('build-base=', 'b',
|
| 17 |
+
"base build directory (default: 'build.build-base')"),
|
| 18 |
+
('build-lib=', None,
|
| 19 |
+
"build directory for all modules (default: 'build.build-lib')"),
|
| 20 |
+
('build-temp=', 't',
|
| 21 |
+
"temporary build directory (default: 'build.build-temp')"),
|
| 22 |
+
('build-scripts=', None,
|
| 23 |
+
"build directory for scripts (default: 'build.build-scripts')"),
|
| 24 |
+
('bdist-base=', None,
|
| 25 |
+
"temporary directory for built distributions"),
|
| 26 |
+
('all', 'a',
|
| 27 |
+
"remove all build output, not just temporary by-products")
|
| 28 |
+
]
|
| 29 |
+
|
| 30 |
+
boolean_options = ['all']
|
| 31 |
+
|
| 32 |
+
def initialize_options(self):
|
| 33 |
+
self.build_base = None
|
| 34 |
+
self.build_lib = None
|
| 35 |
+
self.build_temp = None
|
| 36 |
+
self.build_scripts = None
|
| 37 |
+
self.bdist_base = None
|
| 38 |
+
self.all = None
|
| 39 |
+
|
| 40 |
+
def finalize_options(self):
|
| 41 |
+
self.set_undefined_options('build',
|
| 42 |
+
('build_base', 'build_base'),
|
| 43 |
+
('build_lib', 'build_lib'),
|
| 44 |
+
('build_scripts', 'build_scripts'),
|
| 45 |
+
('build_temp', 'build_temp'))
|
| 46 |
+
self.set_undefined_options('bdist',
|
| 47 |
+
('bdist_base', 'bdist_base'))
|
| 48 |
+
|
| 49 |
+
def run(self):
|
| 50 |
+
# remove the build/temp.<plat> directory (unless it's already
|
| 51 |
+
# gone)
|
| 52 |
+
if os.path.exists(self.build_temp):
|
| 53 |
+
remove_tree(self.build_temp, dry_run=self.dry_run)
|
| 54 |
+
else:
|
| 55 |
+
log.debug("'%s' does not exist -- can't clean it",
|
| 56 |
+
self.build_temp)
|
| 57 |
+
|
| 58 |
+
if self.all:
|
| 59 |
+
# remove build directories
|
| 60 |
+
for directory in (self.build_lib,
|
| 61 |
+
self.bdist_base,
|
| 62 |
+
self.build_scripts):
|
| 63 |
+
if os.path.exists(directory):
|
| 64 |
+
remove_tree(directory, dry_run=self.dry_run)
|
| 65 |
+
else:
|
| 66 |
+
log.warn("'%s' does not exist -- can't clean it",
|
| 67 |
+
directory)
|
| 68 |
+
|
| 69 |
+
# just for the heck of it, try to remove the base build directory:
|
| 70 |
+
# we might have emptied it right now, but if not we don't care
|
| 71 |
+
if not self.dry_run:
|
| 72 |
+
try:
|
| 73 |
+
os.rmdir(self.build_base)
|
| 74 |
+
log.info("removing '%s'", self.build_base)
|
| 75 |
+
except OSError:
|
| 76 |
+
pass
|
llava/lib/python3.10/distutils/command/command_template
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.x
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'x' command.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# created 2000/mm/dd, John Doe
|
| 7 |
+
|
| 8 |
+
__revision__ = "$Id$"
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class x(Command):
|
| 14 |
+
|
| 15 |
+
# Brief (40-50 characters) description of the command
|
| 16 |
+
description = ""
|
| 17 |
+
|
| 18 |
+
# List of option tuples: long name, short name (None if no short
|
| 19 |
+
# name), and help string.
|
| 20 |
+
user_options = [('', '',
|
| 21 |
+
""),
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
def initialize_options(self):
|
| 25 |
+
self. = None
|
| 26 |
+
self. = None
|
| 27 |
+
self. = None
|
| 28 |
+
|
| 29 |
+
def finalize_options(self):
|
| 30 |
+
if self.x is None:
|
| 31 |
+
self.x =
|
| 32 |
+
|
| 33 |
+
def run(self):
|
llava/lib/python3.10/distutils/command/config.py
ADDED
|
@@ -0,0 +1,344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.config
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'config' command, a (mostly) empty command class
|
| 4 |
+
that exists mainly to be sub-classed by specific module distributions and
|
| 5 |
+
applications. The idea is that while every "config" command is different,
|
| 6 |
+
at least they're all named the same, and users always see "config" in the
|
| 7 |
+
list of standard commands. Also, this is a good place to put common
|
| 8 |
+
configure-like tasks: "try to compile this C code", or "figure out where
|
| 9 |
+
this header file lives".
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import os, re
|
| 13 |
+
|
| 14 |
+
from distutils.core import Command
|
| 15 |
+
from distutils.errors import DistutilsExecError
|
| 16 |
+
from distutils.sysconfig import customize_compiler
|
| 17 |
+
from distutils import log
|
| 18 |
+
|
| 19 |
+
LANG_EXT = {"c": ".c", "c++": ".cxx"}
|
| 20 |
+
|
| 21 |
+
class config(Command):
|
| 22 |
+
|
| 23 |
+
description = "prepare to build"
|
| 24 |
+
|
| 25 |
+
user_options = [
|
| 26 |
+
('compiler=', None,
|
| 27 |
+
"specify the compiler type"),
|
| 28 |
+
('cc=', None,
|
| 29 |
+
"specify the compiler executable"),
|
| 30 |
+
('include-dirs=', 'I',
|
| 31 |
+
"list of directories to search for header files"),
|
| 32 |
+
('define=', 'D',
|
| 33 |
+
"C preprocessor macros to define"),
|
| 34 |
+
('undef=', 'U',
|
| 35 |
+
"C preprocessor macros to undefine"),
|
| 36 |
+
('libraries=', 'l',
|
| 37 |
+
"external C libraries to link with"),
|
| 38 |
+
('library-dirs=', 'L',
|
| 39 |
+
"directories to search for external C libraries"),
|
| 40 |
+
|
| 41 |
+
('noisy', None,
|
| 42 |
+
"show every action (compile, link, run, ...) taken"),
|
| 43 |
+
('dump-source', None,
|
| 44 |
+
"dump generated source files before attempting to compile them"),
|
| 45 |
+
]
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
# The three standard command methods: since the "config" command
|
| 49 |
+
# does nothing by default, these are empty.
|
| 50 |
+
|
| 51 |
+
def initialize_options(self):
|
| 52 |
+
self.compiler = None
|
| 53 |
+
self.cc = None
|
| 54 |
+
self.include_dirs = None
|
| 55 |
+
self.libraries = None
|
| 56 |
+
self.library_dirs = None
|
| 57 |
+
|
| 58 |
+
# maximal output for now
|
| 59 |
+
self.noisy = 1
|
| 60 |
+
self.dump_source = 1
|
| 61 |
+
|
| 62 |
+
# list of temporary files generated along-the-way that we have
|
| 63 |
+
# to clean at some point
|
| 64 |
+
self.temp_files = []
|
| 65 |
+
|
| 66 |
+
def finalize_options(self):
|
| 67 |
+
if self.include_dirs is None:
|
| 68 |
+
self.include_dirs = self.distribution.include_dirs or []
|
| 69 |
+
elif isinstance(self.include_dirs, str):
|
| 70 |
+
self.include_dirs = self.include_dirs.split(os.pathsep)
|
| 71 |
+
|
| 72 |
+
if self.libraries is None:
|
| 73 |
+
self.libraries = []
|
| 74 |
+
elif isinstance(self.libraries, str):
|
| 75 |
+
self.libraries = [self.libraries]
|
| 76 |
+
|
| 77 |
+
if self.library_dirs is None:
|
| 78 |
+
self.library_dirs = []
|
| 79 |
+
elif isinstance(self.library_dirs, str):
|
| 80 |
+
self.library_dirs = self.library_dirs.split(os.pathsep)
|
| 81 |
+
|
| 82 |
+
def run(self):
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
# Utility methods for actual "config" commands. The interfaces are
|
| 86 |
+
# loosely based on Autoconf macros of similar names. Sub-classes
|
| 87 |
+
# may use these freely.
|
| 88 |
+
|
| 89 |
+
def _check_compiler(self):
|
| 90 |
+
"""Check that 'self.compiler' really is a CCompiler object;
|
| 91 |
+
if not, make it one.
|
| 92 |
+
"""
|
| 93 |
+
# We do this late, and only on-demand, because this is an expensive
|
| 94 |
+
# import.
|
| 95 |
+
from distutils.ccompiler import CCompiler, new_compiler
|
| 96 |
+
if not isinstance(self.compiler, CCompiler):
|
| 97 |
+
self.compiler = new_compiler(compiler=self.compiler,
|
| 98 |
+
dry_run=self.dry_run, force=1)
|
| 99 |
+
customize_compiler(self.compiler)
|
| 100 |
+
if self.include_dirs:
|
| 101 |
+
self.compiler.set_include_dirs(self.include_dirs)
|
| 102 |
+
if self.libraries:
|
| 103 |
+
self.compiler.set_libraries(self.libraries)
|
| 104 |
+
if self.library_dirs:
|
| 105 |
+
self.compiler.set_library_dirs(self.library_dirs)
|
| 106 |
+
|
| 107 |
+
def _gen_temp_sourcefile(self, body, headers, lang):
|
| 108 |
+
filename = "_configtest" + LANG_EXT[lang]
|
| 109 |
+
with open(filename, "w") as file:
|
| 110 |
+
if headers:
|
| 111 |
+
for header in headers:
|
| 112 |
+
file.write("#include <%s>\n" % header)
|
| 113 |
+
file.write("\n")
|
| 114 |
+
file.write(body)
|
| 115 |
+
if body[-1] != "\n":
|
| 116 |
+
file.write("\n")
|
| 117 |
+
return filename
|
| 118 |
+
|
| 119 |
+
def _preprocess(self, body, headers, include_dirs, lang):
|
| 120 |
+
src = self._gen_temp_sourcefile(body, headers, lang)
|
| 121 |
+
out = "_configtest.i"
|
| 122 |
+
self.temp_files.extend([src, out])
|
| 123 |
+
self.compiler.preprocess(src, out, include_dirs=include_dirs)
|
| 124 |
+
return (src, out)
|
| 125 |
+
|
| 126 |
+
def _compile(self, body, headers, include_dirs, lang):
|
| 127 |
+
src = self._gen_temp_sourcefile(body, headers, lang)
|
| 128 |
+
if self.dump_source:
|
| 129 |
+
dump_file(src, "compiling '%s':" % src)
|
| 130 |
+
(obj,) = self.compiler.object_filenames([src])
|
| 131 |
+
self.temp_files.extend([src, obj])
|
| 132 |
+
self.compiler.compile([src], include_dirs=include_dirs)
|
| 133 |
+
return (src, obj)
|
| 134 |
+
|
| 135 |
+
def _link(self, body, headers, include_dirs, libraries, library_dirs,
|
| 136 |
+
lang):
|
| 137 |
+
(src, obj) = self._compile(body, headers, include_dirs, lang)
|
| 138 |
+
prog = os.path.splitext(os.path.basename(src))[0]
|
| 139 |
+
self.compiler.link_executable([obj], prog,
|
| 140 |
+
libraries=libraries,
|
| 141 |
+
library_dirs=library_dirs,
|
| 142 |
+
target_lang=lang)
|
| 143 |
+
|
| 144 |
+
if self.compiler.exe_extension is not None:
|
| 145 |
+
prog = prog + self.compiler.exe_extension
|
| 146 |
+
self.temp_files.append(prog)
|
| 147 |
+
|
| 148 |
+
return (src, obj, prog)
|
| 149 |
+
|
| 150 |
+
def _clean(self, *filenames):
|
| 151 |
+
if not filenames:
|
| 152 |
+
filenames = self.temp_files
|
| 153 |
+
self.temp_files = []
|
| 154 |
+
log.info("removing: %s", ' '.join(filenames))
|
| 155 |
+
for filename in filenames:
|
| 156 |
+
try:
|
| 157 |
+
os.remove(filename)
|
| 158 |
+
except OSError:
|
| 159 |
+
pass
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
# XXX these ignore the dry-run flag: what to do, what to do? even if
|
| 163 |
+
# you want a dry-run build, you still need some sort of configuration
|
| 164 |
+
# info. My inclination is to make it up to the real config command to
|
| 165 |
+
# consult 'dry_run', and assume a default (minimal) configuration if
|
| 166 |
+
# true. The problem with trying to do it here is that you'd have to
|
| 167 |
+
# return either true or false from all the 'try' methods, neither of
|
| 168 |
+
# which is correct.
|
| 169 |
+
|
| 170 |
+
# XXX need access to the header search path and maybe default macros.
|
| 171 |
+
|
| 172 |
+
def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
|
| 173 |
+
"""Construct a source file from 'body' (a string containing lines
|
| 174 |
+
of C/C++ code) and 'headers' (a list of header files to include)
|
| 175 |
+
and run it through the preprocessor. Return true if the
|
| 176 |
+
preprocessor succeeded, false if there were any errors.
|
| 177 |
+
('body' probably isn't of much use, but what the heck.)
|
| 178 |
+
"""
|
| 179 |
+
from distutils.ccompiler import CompileError
|
| 180 |
+
self._check_compiler()
|
| 181 |
+
ok = True
|
| 182 |
+
try:
|
| 183 |
+
self._preprocess(body, headers, include_dirs, lang)
|
| 184 |
+
except CompileError:
|
| 185 |
+
ok = False
|
| 186 |
+
|
| 187 |
+
self._clean()
|
| 188 |
+
return ok
|
| 189 |
+
|
| 190 |
+
def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
|
| 191 |
+
lang="c"):
|
| 192 |
+
"""Construct a source file (just like 'try_cpp()'), run it through
|
| 193 |
+
the preprocessor, and return true if any line of the output matches
|
| 194 |
+
'pattern'. 'pattern' should either be a compiled regex object or a
|
| 195 |
+
string containing a regex. If both 'body' and 'headers' are None,
|
| 196 |
+
preprocesses an empty file -- which can be useful to determine the
|
| 197 |
+
symbols the preprocessor and compiler set by default.
|
| 198 |
+
"""
|
| 199 |
+
self._check_compiler()
|
| 200 |
+
src, out = self._preprocess(body, headers, include_dirs, lang)
|
| 201 |
+
|
| 202 |
+
if isinstance(pattern, str):
|
| 203 |
+
pattern = re.compile(pattern)
|
| 204 |
+
|
| 205 |
+
with open(out) as file:
|
| 206 |
+
match = False
|
| 207 |
+
while True:
|
| 208 |
+
line = file.readline()
|
| 209 |
+
if line == '':
|
| 210 |
+
break
|
| 211 |
+
if pattern.search(line):
|
| 212 |
+
match = True
|
| 213 |
+
break
|
| 214 |
+
|
| 215 |
+
self._clean()
|
| 216 |
+
return match
|
| 217 |
+
|
| 218 |
+
def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
|
| 219 |
+
"""Try to compile a source file built from 'body' and 'headers'.
|
| 220 |
+
Return true on success, false otherwise.
|
| 221 |
+
"""
|
| 222 |
+
from distutils.ccompiler import CompileError
|
| 223 |
+
self._check_compiler()
|
| 224 |
+
try:
|
| 225 |
+
self._compile(body, headers, include_dirs, lang)
|
| 226 |
+
ok = True
|
| 227 |
+
except CompileError:
|
| 228 |
+
ok = False
|
| 229 |
+
|
| 230 |
+
log.info(ok and "success!" or "failure.")
|
| 231 |
+
self._clean()
|
| 232 |
+
return ok
|
| 233 |
+
|
| 234 |
+
def try_link(self, body, headers=None, include_dirs=None, libraries=None,
|
| 235 |
+
library_dirs=None, lang="c"):
|
| 236 |
+
"""Try to compile and link a source file, built from 'body' and
|
| 237 |
+
'headers', to executable form. Return true on success, false
|
| 238 |
+
otherwise.
|
| 239 |
+
"""
|
| 240 |
+
from distutils.ccompiler import CompileError, LinkError
|
| 241 |
+
self._check_compiler()
|
| 242 |
+
try:
|
| 243 |
+
self._link(body, headers, include_dirs,
|
| 244 |
+
libraries, library_dirs, lang)
|
| 245 |
+
ok = True
|
| 246 |
+
except (CompileError, LinkError):
|
| 247 |
+
ok = False
|
| 248 |
+
|
| 249 |
+
log.info(ok and "success!" or "failure.")
|
| 250 |
+
self._clean()
|
| 251 |
+
return ok
|
| 252 |
+
|
| 253 |
+
def try_run(self, body, headers=None, include_dirs=None, libraries=None,
|
| 254 |
+
library_dirs=None, lang="c"):
|
| 255 |
+
"""Try to compile, link to an executable, and run a program
|
| 256 |
+
built from 'body' and 'headers'. Return true on success, false
|
| 257 |
+
otherwise.
|
| 258 |
+
"""
|
| 259 |
+
from distutils.ccompiler import CompileError, LinkError
|
| 260 |
+
self._check_compiler()
|
| 261 |
+
try:
|
| 262 |
+
src, obj, exe = self._link(body, headers, include_dirs,
|
| 263 |
+
libraries, library_dirs, lang)
|
| 264 |
+
self.spawn([exe])
|
| 265 |
+
ok = True
|
| 266 |
+
except (CompileError, LinkError, DistutilsExecError):
|
| 267 |
+
ok = False
|
| 268 |
+
|
| 269 |
+
log.info(ok and "success!" or "failure.")
|
| 270 |
+
self._clean()
|
| 271 |
+
return ok
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
# -- High-level methods --------------------------------------------
|
| 275 |
+
# (these are the ones that are actually likely to be useful
|
| 276 |
+
# when implementing a real-world config command!)
|
| 277 |
+
|
| 278 |
+
def check_func(self, func, headers=None, include_dirs=None,
|
| 279 |
+
libraries=None, library_dirs=None, decl=0, call=0):
|
| 280 |
+
"""Determine if function 'func' is available by constructing a
|
| 281 |
+
source file that refers to 'func', and compiles and links it.
|
| 282 |
+
If everything succeeds, returns true; otherwise returns false.
|
| 283 |
+
|
| 284 |
+
The constructed source file starts out by including the header
|
| 285 |
+
files listed in 'headers'. If 'decl' is true, it then declares
|
| 286 |
+
'func' (as "int func()"); you probably shouldn't supply 'headers'
|
| 287 |
+
and set 'decl' true in the same call, or you might get errors about
|
| 288 |
+
a conflicting declarations for 'func'. Finally, the constructed
|
| 289 |
+
'main()' function either references 'func' or (if 'call' is true)
|
| 290 |
+
calls it. 'libraries' and 'library_dirs' are used when
|
| 291 |
+
linking.
|
| 292 |
+
"""
|
| 293 |
+
self._check_compiler()
|
| 294 |
+
body = []
|
| 295 |
+
if decl:
|
| 296 |
+
body.append("int %s ();" % func)
|
| 297 |
+
body.append("int main () {")
|
| 298 |
+
if call:
|
| 299 |
+
body.append(" %s();" % func)
|
| 300 |
+
else:
|
| 301 |
+
body.append(" %s;" % func)
|
| 302 |
+
body.append("}")
|
| 303 |
+
body = "\n".join(body) + "\n"
|
| 304 |
+
|
| 305 |
+
return self.try_link(body, headers, include_dirs,
|
| 306 |
+
libraries, library_dirs)
|
| 307 |
+
|
| 308 |
+
def check_lib(self, library, library_dirs=None, headers=None,
|
| 309 |
+
include_dirs=None, other_libraries=[]):
|
| 310 |
+
"""Determine if 'library' is available to be linked against,
|
| 311 |
+
without actually checking that any particular symbols are provided
|
| 312 |
+
by it. 'headers' will be used in constructing the source file to
|
| 313 |
+
be compiled, but the only effect of this is to check if all the
|
| 314 |
+
header files listed are available. Any libraries listed in
|
| 315 |
+
'other_libraries' will be included in the link, in case 'library'
|
| 316 |
+
has symbols that depend on other libraries.
|
| 317 |
+
"""
|
| 318 |
+
self._check_compiler()
|
| 319 |
+
return self.try_link("int main (void) { }", headers, include_dirs,
|
| 320 |
+
[library] + other_libraries, library_dirs)
|
| 321 |
+
|
| 322 |
+
def check_header(self, header, include_dirs=None, library_dirs=None,
|
| 323 |
+
lang="c"):
|
| 324 |
+
"""Determine if the system header file named by 'header_file'
|
| 325 |
+
exists and can be found by the preprocessor; return true if so,
|
| 326 |
+
false otherwise.
|
| 327 |
+
"""
|
| 328 |
+
return self.try_cpp(body="/* No body */", headers=[header],
|
| 329 |
+
include_dirs=include_dirs)
|
| 330 |
+
|
| 331 |
+
def dump_file(filename, head=None):
|
| 332 |
+
"""Dumps a file content into log.info.
|
| 333 |
+
|
| 334 |
+
If head is not None, will be dumped before the file content.
|
| 335 |
+
"""
|
| 336 |
+
if head is None:
|
| 337 |
+
log.info('%s', filename)
|
| 338 |
+
else:
|
| 339 |
+
log.info(head)
|
| 340 |
+
file = open(filename)
|
| 341 |
+
try:
|
| 342 |
+
log.info(file.read())
|
| 343 |
+
finally:
|
| 344 |
+
file.close()
|
llava/lib/python3.10/distutils/command/install_headers.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_headers
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_headers' command, to install C/C++ header
|
| 4 |
+
files to the Python include directory."""
|
| 5 |
+
|
| 6 |
+
from distutils.core import Command
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# XXX force is never used
|
| 10 |
+
class install_headers(Command):
|
| 11 |
+
|
| 12 |
+
description = "install C/C++ header files"
|
| 13 |
+
|
| 14 |
+
user_options = [('install-dir=', 'd',
|
| 15 |
+
"directory to install header files to"),
|
| 16 |
+
('force', 'f',
|
| 17 |
+
"force installation (overwrite existing files)"),
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
boolean_options = ['force']
|
| 21 |
+
|
| 22 |
+
def initialize_options(self):
|
| 23 |
+
self.install_dir = None
|
| 24 |
+
self.force = 0
|
| 25 |
+
self.outfiles = []
|
| 26 |
+
|
| 27 |
+
def finalize_options(self):
|
| 28 |
+
self.set_undefined_options('install',
|
| 29 |
+
('install_headers', 'install_dir'),
|
| 30 |
+
('force', 'force'))
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def run(self):
|
| 34 |
+
headers = self.distribution.headers
|
| 35 |
+
if not headers:
|
| 36 |
+
return
|
| 37 |
+
|
| 38 |
+
self.mkpath(self.install_dir)
|
| 39 |
+
for header in headers:
|
| 40 |
+
(out, _) = self.copy_file(header, self.install_dir)
|
| 41 |
+
self.outfiles.append(out)
|
| 42 |
+
|
| 43 |
+
def get_inputs(self):
|
| 44 |
+
return self.distribution.headers or []
|
| 45 |
+
|
| 46 |
+
def get_outputs(self):
|
| 47 |
+
return self.outfiles
|
llava/lib/python3.10/distutils/command/install_lib.py
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_lib
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_lib' command
|
| 4 |
+
(install all Python modules)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import importlib.util
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils.errors import DistutilsOptionError
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# Extension for Python source files.
|
| 15 |
+
PYTHON_SOURCE_EXTENSION = ".py"
|
| 16 |
+
|
| 17 |
+
class install_lib(Command):
|
| 18 |
+
|
| 19 |
+
description = "install all Python modules (extensions and pure Python)"
|
| 20 |
+
|
| 21 |
+
# The byte-compilation options are a tad confusing. Here are the
|
| 22 |
+
# possible scenarios:
|
| 23 |
+
# 1) no compilation at all (--no-compile --no-optimize)
|
| 24 |
+
# 2) compile .pyc only (--compile --no-optimize; default)
|
| 25 |
+
# 3) compile .pyc and "opt-1" .pyc (--compile --optimize)
|
| 26 |
+
# 4) compile "opt-1" .pyc only (--no-compile --optimize)
|
| 27 |
+
# 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
|
| 28 |
+
# 6) compile "opt-2" .pyc only (--no-compile --optimize-more)
|
| 29 |
+
#
|
| 30 |
+
# The UI for this is two options, 'compile' and 'optimize'.
|
| 31 |
+
# 'compile' is strictly boolean, and only decides whether to
|
| 32 |
+
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
|
| 33 |
+
# decides both whether to generate .pyc files and what level of
|
| 34 |
+
# optimization to use.
|
| 35 |
+
|
| 36 |
+
user_options = [
|
| 37 |
+
('install-dir=', 'd', "directory to install to"),
|
| 38 |
+
('build-dir=','b', "build directory (where to install from)"),
|
| 39 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 40 |
+
('compile', 'c', "compile .py to .pyc [default]"),
|
| 41 |
+
('no-compile', None, "don't compile .py files"),
|
| 42 |
+
('optimize=', 'O',
|
| 43 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 44 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
| 45 |
+
('skip-build', None, "skip the build steps"),
|
| 46 |
+
]
|
| 47 |
+
|
| 48 |
+
boolean_options = ['force', 'compile', 'skip-build']
|
| 49 |
+
negative_opt = {'no-compile' : 'compile'}
|
| 50 |
+
|
| 51 |
+
def initialize_options(self):
|
| 52 |
+
# let the 'install' command dictate our installation directory
|
| 53 |
+
self.install_dir = None
|
| 54 |
+
self.build_dir = None
|
| 55 |
+
self.force = 0
|
| 56 |
+
self.compile = None
|
| 57 |
+
self.optimize = None
|
| 58 |
+
self.skip_build = None
|
| 59 |
+
|
| 60 |
+
def finalize_options(self):
|
| 61 |
+
# Get all the information we need to install pure Python modules
|
| 62 |
+
# from the umbrella 'install' command -- build (source) directory,
|
| 63 |
+
# install (target) directory, and whether to compile .py files.
|
| 64 |
+
self.set_undefined_options('install',
|
| 65 |
+
('build_lib', 'build_dir'),
|
| 66 |
+
('install_lib', 'install_dir'),
|
| 67 |
+
('force', 'force'),
|
| 68 |
+
('compile', 'compile'),
|
| 69 |
+
('optimize', 'optimize'),
|
| 70 |
+
('skip_build', 'skip_build'),
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
if self.compile is None:
|
| 74 |
+
self.compile = True
|
| 75 |
+
if self.optimize is None:
|
| 76 |
+
self.optimize = False
|
| 77 |
+
|
| 78 |
+
if not isinstance(self.optimize, int):
|
| 79 |
+
try:
|
| 80 |
+
self.optimize = int(self.optimize)
|
| 81 |
+
if self.optimize not in (0, 1, 2):
|
| 82 |
+
raise AssertionError
|
| 83 |
+
except (ValueError, AssertionError):
|
| 84 |
+
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
| 85 |
+
|
| 86 |
+
def run(self):
|
| 87 |
+
# Make sure we have built everything we need first
|
| 88 |
+
self.build()
|
| 89 |
+
|
| 90 |
+
# Install everything: simply dump the entire contents of the build
|
| 91 |
+
# directory to the installation directory (that's the beauty of
|
| 92 |
+
# having a build directory!)
|
| 93 |
+
outfiles = self.install()
|
| 94 |
+
|
| 95 |
+
# (Optionally) compile .py to .pyc
|
| 96 |
+
if outfiles is not None and self.distribution.has_pure_modules():
|
| 97 |
+
self.byte_compile(outfiles)
|
| 98 |
+
|
| 99 |
+
# -- Top-level worker functions ------------------------------------
|
| 100 |
+
# (called from 'run()')
|
| 101 |
+
|
| 102 |
+
def build(self):
|
| 103 |
+
if not self.skip_build:
|
| 104 |
+
if self.distribution.has_pure_modules():
|
| 105 |
+
self.run_command('build_py')
|
| 106 |
+
if self.distribution.has_ext_modules():
|
| 107 |
+
self.run_command('build_ext')
|
| 108 |
+
|
| 109 |
+
def install(self):
|
| 110 |
+
if os.path.isdir(self.build_dir):
|
| 111 |
+
outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
| 112 |
+
else:
|
| 113 |
+
self.warn("'%s' does not exist -- no Python modules to install" %
|
| 114 |
+
self.build_dir)
|
| 115 |
+
return
|
| 116 |
+
return outfiles
|
| 117 |
+
|
| 118 |
+
def byte_compile(self, files):
|
| 119 |
+
if sys.dont_write_bytecode:
|
| 120 |
+
self.warn('byte-compiling is disabled, skipping.')
|
| 121 |
+
return
|
| 122 |
+
|
| 123 |
+
from distutils.util import byte_compile
|
| 124 |
+
|
| 125 |
+
# Get the "--root" directory supplied to the "install" command,
|
| 126 |
+
# and use it as a prefix to strip off the purported filename
|
| 127 |
+
# encoded in bytecode files. This is far from complete, but it
|
| 128 |
+
# should at least generate usable bytecode in RPM distributions.
|
| 129 |
+
install_root = self.get_finalized_command('install').root
|
| 130 |
+
|
| 131 |
+
if self.compile:
|
| 132 |
+
byte_compile(files, optimize=0,
|
| 133 |
+
force=self.force, prefix=install_root,
|
| 134 |
+
dry_run=self.dry_run)
|
| 135 |
+
if self.optimize > 0:
|
| 136 |
+
byte_compile(files, optimize=self.optimize,
|
| 137 |
+
force=self.force, prefix=install_root,
|
| 138 |
+
verbose=self.verbose, dry_run=self.dry_run)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
# -- Utility methods -----------------------------------------------
|
| 142 |
+
|
| 143 |
+
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
|
| 144 |
+
if not has_any:
|
| 145 |
+
return []
|
| 146 |
+
|
| 147 |
+
build_cmd = self.get_finalized_command(build_cmd)
|
| 148 |
+
build_files = build_cmd.get_outputs()
|
| 149 |
+
build_dir = getattr(build_cmd, cmd_option)
|
| 150 |
+
|
| 151 |
+
prefix_len = len(build_dir) + len(os.sep)
|
| 152 |
+
outputs = []
|
| 153 |
+
for file in build_files:
|
| 154 |
+
outputs.append(os.path.join(output_dir, file[prefix_len:]))
|
| 155 |
+
|
| 156 |
+
return outputs
|
| 157 |
+
|
| 158 |
+
def _bytecode_filenames(self, py_filenames):
|
| 159 |
+
bytecode_files = []
|
| 160 |
+
for py_file in py_filenames:
|
| 161 |
+
# Since build_py handles package data installation, the
|
| 162 |
+
# list of outputs can contain more than just .py files.
|
| 163 |
+
# Make sure we only report bytecode for the .py files.
|
| 164 |
+
ext = os.path.splitext(os.path.normcase(py_file))[1]
|
| 165 |
+
if ext != PYTHON_SOURCE_EXTENSION:
|
| 166 |
+
continue
|
| 167 |
+
if self.compile:
|
| 168 |
+
bytecode_files.append(importlib.util.cache_from_source(
|
| 169 |
+
py_file, optimization=''))
|
| 170 |
+
if self.optimize > 0:
|
| 171 |
+
bytecode_files.append(importlib.util.cache_from_source(
|
| 172 |
+
py_file, optimization=self.optimize))
|
| 173 |
+
|
| 174 |
+
return bytecode_files
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
# -- External interface --------------------------------------------
|
| 178 |
+
# (called by outsiders)
|
| 179 |
+
|
| 180 |
+
def get_outputs(self):
|
| 181 |
+
"""Return the list of files that would be installed if this command
|
| 182 |
+
were actually run. Not affected by the "dry-run" flag or whether
|
| 183 |
+
modules have actually been built yet.
|
| 184 |
+
"""
|
| 185 |
+
pure_outputs = \
|
| 186 |
+
self._mutate_outputs(self.distribution.has_pure_modules(),
|
| 187 |
+
'build_py', 'build_lib',
|
| 188 |
+
self.install_dir)
|
| 189 |
+
if self.compile:
|
| 190 |
+
bytecode_outputs = self._bytecode_filenames(pure_outputs)
|
| 191 |
+
else:
|
| 192 |
+
bytecode_outputs = []
|
| 193 |
+
|
| 194 |
+
ext_outputs = \
|
| 195 |
+
self._mutate_outputs(self.distribution.has_ext_modules(),
|
| 196 |
+
'build_ext', 'build_lib',
|
| 197 |
+
self.install_dir)
|
| 198 |
+
|
| 199 |
+
return pure_outputs + bytecode_outputs + ext_outputs
|
| 200 |
+
|
| 201 |
+
def get_inputs(self):
|
| 202 |
+
"""Get the list of files that are input to this command, ie. the
|
| 203 |
+
files that get installed as they are named in the build tree.
|
| 204 |
+
The files in this list correspond one-to-one to the output
|
| 205 |
+
filenames returned by 'get_outputs()'.
|
| 206 |
+
"""
|
| 207 |
+
inputs = []
|
| 208 |
+
|
| 209 |
+
if self.distribution.has_pure_modules():
|
| 210 |
+
build_py = self.get_finalized_command('build_py')
|
| 211 |
+
inputs.extend(build_py.get_outputs())
|
| 212 |
+
|
| 213 |
+
if self.distribution.has_ext_modules():
|
| 214 |
+
build_ext = self.get_finalized_command('build_ext')
|
| 215 |
+
inputs.extend(build_ext.get_outputs())
|
| 216 |
+
|
| 217 |
+
return inputs
|
llava/lib/python3.10/distutils/command/install_scripts.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_scripts
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_scripts' command, for installing
|
| 4 |
+
Python scripts."""
|
| 5 |
+
|
| 6 |
+
# contributed by Bastian Kleineidam
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
from distutils.core import Command
|
| 10 |
+
from distutils import log
|
| 11 |
+
from stat import ST_MODE
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class install_scripts(Command):
|
| 15 |
+
|
| 16 |
+
description = "install scripts (Python or otherwise)"
|
| 17 |
+
|
| 18 |
+
user_options = [
|
| 19 |
+
('install-dir=', 'd', "directory to install scripts to"),
|
| 20 |
+
('build-dir=','b', "build directory (where to install from)"),
|
| 21 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 22 |
+
('skip-build', None, "skip the build steps"),
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
boolean_options = ['force', 'skip-build']
|
| 26 |
+
|
| 27 |
+
def initialize_options(self):
|
| 28 |
+
self.install_dir = None
|
| 29 |
+
self.force = 0
|
| 30 |
+
self.build_dir = None
|
| 31 |
+
self.skip_build = None
|
| 32 |
+
|
| 33 |
+
def finalize_options(self):
|
| 34 |
+
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
|
| 35 |
+
self.set_undefined_options('install',
|
| 36 |
+
('install_scripts', 'install_dir'),
|
| 37 |
+
('force', 'force'),
|
| 38 |
+
('skip_build', 'skip_build'),
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
def run(self):
|
| 42 |
+
if not self.skip_build:
|
| 43 |
+
self.run_command('build_scripts')
|
| 44 |
+
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
| 45 |
+
if os.name == 'posix':
|
| 46 |
+
# Set the executable bits (owner, group, and world) on
|
| 47 |
+
# all the scripts we just installed.
|
| 48 |
+
for file in self.get_outputs():
|
| 49 |
+
if self.dry_run:
|
| 50 |
+
log.info("changing mode of %s", file)
|
| 51 |
+
else:
|
| 52 |
+
mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
|
| 53 |
+
log.info("changing mode of %s to %o", file, mode)
|
| 54 |
+
os.chmod(file, mode)
|
| 55 |
+
|
| 56 |
+
def get_inputs(self):
|
| 57 |
+
return self.distribution.scripts or []
|
| 58 |
+
|
| 59 |
+
def get_outputs(self):
|
| 60 |
+
return self.outfiles or []
|
llava/lib/python3.10/distutils/command/register.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.register
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'register' command (register with the repository).
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# created 2002/10/21, Richard Jones
|
| 7 |
+
|
| 8 |
+
import getpass
|
| 9 |
+
import io
|
| 10 |
+
import urllib.parse, urllib.request
|
| 11 |
+
from warnings import warn
|
| 12 |
+
|
| 13 |
+
from distutils.core import PyPIRCCommand
|
| 14 |
+
from distutils.errors import *
|
| 15 |
+
from distutils import log
|
| 16 |
+
|
| 17 |
+
class register(PyPIRCCommand):
|
| 18 |
+
|
| 19 |
+
description = ("register the distribution with the Python package index")
|
| 20 |
+
user_options = PyPIRCCommand.user_options + [
|
| 21 |
+
('list-classifiers', None,
|
| 22 |
+
'list the valid Trove classifiers'),
|
| 23 |
+
('strict', None ,
|
| 24 |
+
'Will stop the registering if the meta-data are not fully compliant')
|
| 25 |
+
]
|
| 26 |
+
boolean_options = PyPIRCCommand.boolean_options + [
|
| 27 |
+
'verify', 'list-classifiers', 'strict']
|
| 28 |
+
|
| 29 |
+
sub_commands = [('check', lambda self: True)]
|
| 30 |
+
|
| 31 |
+
def initialize_options(self):
|
| 32 |
+
PyPIRCCommand.initialize_options(self)
|
| 33 |
+
self.list_classifiers = 0
|
| 34 |
+
self.strict = 0
|
| 35 |
+
|
| 36 |
+
def finalize_options(self):
|
| 37 |
+
PyPIRCCommand.finalize_options(self)
|
| 38 |
+
# setting options for the `check` subcommand
|
| 39 |
+
check_options = {'strict': ('register', self.strict),
|
| 40 |
+
'restructuredtext': ('register', 1)}
|
| 41 |
+
self.distribution.command_options['check'] = check_options
|
| 42 |
+
|
| 43 |
+
def run(self):
|
| 44 |
+
self.finalize_options()
|
| 45 |
+
self._set_config()
|
| 46 |
+
|
| 47 |
+
# Run sub commands
|
| 48 |
+
for cmd_name in self.get_sub_commands():
|
| 49 |
+
self.run_command(cmd_name)
|
| 50 |
+
|
| 51 |
+
if self.dry_run:
|
| 52 |
+
self.verify_metadata()
|
| 53 |
+
elif self.list_classifiers:
|
| 54 |
+
self.classifiers()
|
| 55 |
+
else:
|
| 56 |
+
self.send_metadata()
|
| 57 |
+
|
| 58 |
+
def check_metadata(self):
|
| 59 |
+
"""Deprecated API."""
|
| 60 |
+
warn("distutils.command.register.check_metadata is deprecated, \
|
| 61 |
+
use the check command instead", PendingDeprecationWarning)
|
| 62 |
+
check = self.distribution.get_command_obj('check')
|
| 63 |
+
check.ensure_finalized()
|
| 64 |
+
check.strict = self.strict
|
| 65 |
+
check.restructuredtext = 1
|
| 66 |
+
check.run()
|
| 67 |
+
|
| 68 |
+
def _set_config(self):
|
| 69 |
+
''' Reads the configuration file and set attributes.
|
| 70 |
+
'''
|
| 71 |
+
config = self._read_pypirc()
|
| 72 |
+
if config != {}:
|
| 73 |
+
self.username = config['username']
|
| 74 |
+
self.password = config['password']
|
| 75 |
+
self.repository = config['repository']
|
| 76 |
+
self.realm = config['realm']
|
| 77 |
+
self.has_config = True
|
| 78 |
+
else:
|
| 79 |
+
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
| 80 |
+
raise ValueError('%s not found in .pypirc' % self.repository)
|
| 81 |
+
if self.repository == 'pypi':
|
| 82 |
+
self.repository = self.DEFAULT_REPOSITORY
|
| 83 |
+
self.has_config = False
|
| 84 |
+
|
| 85 |
+
def classifiers(self):
|
| 86 |
+
''' Fetch the list of classifiers from the server.
|
| 87 |
+
'''
|
| 88 |
+
url = self.repository+'?:action=list_classifiers'
|
| 89 |
+
response = urllib.request.urlopen(url)
|
| 90 |
+
log.info(self._read_pypi_response(response))
|
| 91 |
+
|
| 92 |
+
def verify_metadata(self):
|
| 93 |
+
''' Send the metadata to the package index server to be checked.
|
| 94 |
+
'''
|
| 95 |
+
# send the info to the server and report the result
|
| 96 |
+
(code, result) = self.post_to_server(self.build_post_data('verify'))
|
| 97 |
+
log.info('Server response (%s): %s', code, result)
|
| 98 |
+
|
| 99 |
+
def send_metadata(self):
|
| 100 |
+
''' Send the metadata to the package index server.
|
| 101 |
+
|
| 102 |
+
Well, do the following:
|
| 103 |
+
1. figure who the user is, and then
|
| 104 |
+
2. send the data as a Basic auth'ed POST.
|
| 105 |
+
|
| 106 |
+
First we try to read the username/password from $HOME/.pypirc,
|
| 107 |
+
which is a ConfigParser-formatted file with a section
|
| 108 |
+
[distutils] containing username and password entries (both
|
| 109 |
+
in clear text). Eg:
|
| 110 |
+
|
| 111 |
+
[distutils]
|
| 112 |
+
index-servers =
|
| 113 |
+
pypi
|
| 114 |
+
|
| 115 |
+
[pypi]
|
| 116 |
+
username: fred
|
| 117 |
+
password: sekrit
|
| 118 |
+
|
| 119 |
+
Otherwise, to figure who the user is, we offer the user three
|
| 120 |
+
choices:
|
| 121 |
+
|
| 122 |
+
1. use existing login,
|
| 123 |
+
2. register as a new user, or
|
| 124 |
+
3. set the password to a random string and email the user.
|
| 125 |
+
|
| 126 |
+
'''
|
| 127 |
+
# see if we can short-cut and get the username/password from the
|
| 128 |
+
# config
|
| 129 |
+
if self.has_config:
|
| 130 |
+
choice = '1'
|
| 131 |
+
username = self.username
|
| 132 |
+
password = self.password
|
| 133 |
+
else:
|
| 134 |
+
choice = 'x'
|
| 135 |
+
username = password = ''
|
| 136 |
+
|
| 137 |
+
# get the user's login info
|
| 138 |
+
choices = '1 2 3 4'.split()
|
| 139 |
+
while choice not in choices:
|
| 140 |
+
self.announce('''\
|
| 141 |
+
We need to know who you are, so please choose either:
|
| 142 |
+
1. use your existing login,
|
| 143 |
+
2. register as a new user,
|
| 144 |
+
3. have the server generate a new password for you (and email it to you), or
|
| 145 |
+
4. quit
|
| 146 |
+
Your selection [default 1]: ''', log.INFO)
|
| 147 |
+
choice = input()
|
| 148 |
+
if not choice:
|
| 149 |
+
choice = '1'
|
| 150 |
+
elif choice not in choices:
|
| 151 |
+
print('Please choose one of the four options!')
|
| 152 |
+
|
| 153 |
+
if choice == '1':
|
| 154 |
+
# get the username and password
|
| 155 |
+
while not username:
|
| 156 |
+
username = input('Username: ')
|
| 157 |
+
while not password:
|
| 158 |
+
password = getpass.getpass('Password: ')
|
| 159 |
+
|
| 160 |
+
# set up the authentication
|
| 161 |
+
auth = urllib.request.HTTPPasswordMgr()
|
| 162 |
+
host = urllib.parse.urlparse(self.repository)[1]
|
| 163 |
+
auth.add_password(self.realm, host, username, password)
|
| 164 |
+
# send the info to the server and report the result
|
| 165 |
+
code, result = self.post_to_server(self.build_post_data('submit'),
|
| 166 |
+
auth)
|
| 167 |
+
self.announce('Server response (%s): %s' % (code, result),
|
| 168 |
+
log.INFO)
|
| 169 |
+
|
| 170 |
+
# possibly save the login
|
| 171 |
+
if code == 200:
|
| 172 |
+
if self.has_config:
|
| 173 |
+
# sharing the password in the distribution instance
|
| 174 |
+
# so the upload command can reuse it
|
| 175 |
+
self.distribution.password = password
|
| 176 |
+
else:
|
| 177 |
+
self.announce(('I can store your PyPI login so future '
|
| 178 |
+
'submissions will be faster.'), log.INFO)
|
| 179 |
+
self.announce('(the login will be stored in %s)' % \
|
| 180 |
+
self._get_rc_file(), log.INFO)
|
| 181 |
+
choice = 'X'
|
| 182 |
+
while choice.lower() not in 'yn':
|
| 183 |
+
choice = input('Save your login (y/N)?')
|
| 184 |
+
if not choice:
|
| 185 |
+
choice = 'n'
|
| 186 |
+
if choice.lower() == 'y':
|
| 187 |
+
self._store_pypirc(username, password)
|
| 188 |
+
|
| 189 |
+
elif choice == '2':
|
| 190 |
+
data = {':action': 'user'}
|
| 191 |
+
data['name'] = data['password'] = data['email'] = ''
|
| 192 |
+
data['confirm'] = None
|
| 193 |
+
while not data['name']:
|
| 194 |
+
data['name'] = input('Username: ')
|
| 195 |
+
while data['password'] != data['confirm']:
|
| 196 |
+
while not data['password']:
|
| 197 |
+
data['password'] = getpass.getpass('Password: ')
|
| 198 |
+
while not data['confirm']:
|
| 199 |
+
data['confirm'] = getpass.getpass(' Confirm: ')
|
| 200 |
+
if data['password'] != data['confirm']:
|
| 201 |
+
data['password'] = ''
|
| 202 |
+
data['confirm'] = None
|
| 203 |
+
print("Password and confirm don't match!")
|
| 204 |
+
while not data['email']:
|
| 205 |
+
data['email'] = input(' EMail: ')
|
| 206 |
+
code, result = self.post_to_server(data)
|
| 207 |
+
if code != 200:
|
| 208 |
+
log.info('Server response (%s): %s', code, result)
|
| 209 |
+
else:
|
| 210 |
+
log.info('You will receive an email shortly.')
|
| 211 |
+
log.info(('Follow the instructions in it to '
|
| 212 |
+
'complete registration.'))
|
| 213 |
+
elif choice == '3':
|
| 214 |
+
data = {':action': 'password_reset'}
|
| 215 |
+
data['email'] = ''
|
| 216 |
+
while not data['email']:
|
| 217 |
+
data['email'] = input('Your email address: ')
|
| 218 |
+
code, result = self.post_to_server(data)
|
| 219 |
+
log.info('Server response (%s): %s', code, result)
|
| 220 |
+
|
| 221 |
+
def build_post_data(self, action):
|
| 222 |
+
# figure the data to send - the metadata plus some additional
|
| 223 |
+
# information used by the package server
|
| 224 |
+
meta = self.distribution.metadata
|
| 225 |
+
data = {
|
| 226 |
+
':action': action,
|
| 227 |
+
'metadata_version' : '1.0',
|
| 228 |
+
'name': meta.get_name(),
|
| 229 |
+
'version': meta.get_version(),
|
| 230 |
+
'summary': meta.get_description(),
|
| 231 |
+
'home_page': meta.get_url(),
|
| 232 |
+
'author': meta.get_contact(),
|
| 233 |
+
'author_email': meta.get_contact_email(),
|
| 234 |
+
'license': meta.get_licence(),
|
| 235 |
+
'description': meta.get_long_description(),
|
| 236 |
+
'keywords': meta.get_keywords(),
|
| 237 |
+
'platform': meta.get_platforms(),
|
| 238 |
+
'classifiers': meta.get_classifiers(),
|
| 239 |
+
'download_url': meta.get_download_url(),
|
| 240 |
+
# PEP 314
|
| 241 |
+
'provides': meta.get_provides(),
|
| 242 |
+
'requires': meta.get_requires(),
|
| 243 |
+
'obsoletes': meta.get_obsoletes(),
|
| 244 |
+
}
|
| 245 |
+
if data['provides'] or data['requires'] or data['obsoletes']:
|
| 246 |
+
data['metadata_version'] = '1.1'
|
| 247 |
+
return data
|
| 248 |
+
|
| 249 |
+
def post_to_server(self, data, auth=None):
|
| 250 |
+
''' Post a query to the server, and return a string response.
|
| 251 |
+
'''
|
| 252 |
+
if 'name' in data:
|
| 253 |
+
self.announce('Registering %s to %s' % (data['name'],
|
| 254 |
+
self.repository),
|
| 255 |
+
log.INFO)
|
| 256 |
+
# Build up the MIME payload for the urllib2 POST data
|
| 257 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
| 258 |
+
sep_boundary = '\n--' + boundary
|
| 259 |
+
end_boundary = sep_boundary + '--'
|
| 260 |
+
body = io.StringIO()
|
| 261 |
+
for key, value in data.items():
|
| 262 |
+
# handle multiple entries for the same name
|
| 263 |
+
if type(value) not in (type([]), type( () )):
|
| 264 |
+
value = [value]
|
| 265 |
+
for value in value:
|
| 266 |
+
value = str(value)
|
| 267 |
+
body.write(sep_boundary)
|
| 268 |
+
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
| 269 |
+
body.write("\n\n")
|
| 270 |
+
body.write(value)
|
| 271 |
+
if value and value[-1] == '\r':
|
| 272 |
+
body.write('\n') # write an extra newline (lurve Macs)
|
| 273 |
+
body.write(end_boundary)
|
| 274 |
+
body.write("\n")
|
| 275 |
+
body = body.getvalue().encode("utf-8")
|
| 276 |
+
|
| 277 |
+
# build the Request
|
| 278 |
+
headers = {
|
| 279 |
+
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
|
| 280 |
+
'Content-length': str(len(body))
|
| 281 |
+
}
|
| 282 |
+
req = urllib.request.Request(self.repository, body, headers)
|
| 283 |
+
|
| 284 |
+
# handle HTTP and include the Basic Auth handler
|
| 285 |
+
opener = urllib.request.build_opener(
|
| 286 |
+
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
| 287 |
+
)
|
| 288 |
+
data = ''
|
| 289 |
+
try:
|
| 290 |
+
result = opener.open(req)
|
| 291 |
+
except urllib.error.HTTPError as e:
|
| 292 |
+
if self.show_response:
|
| 293 |
+
data = e.fp.read()
|
| 294 |
+
result = e.code, e.msg
|
| 295 |
+
except urllib.error.URLError as e:
|
| 296 |
+
result = 500, str(e)
|
| 297 |
+
else:
|
| 298 |
+
if self.show_response:
|
| 299 |
+
data = self._read_pypi_response(result)
|
| 300 |
+
result = 200, 'OK'
|
| 301 |
+
if self.show_response:
|
| 302 |
+
msg = '\n'.join(('-' * 75, data, '-' * 75))
|
| 303 |
+
self.announce(msg, log.INFO)
|
| 304 |
+
return result
|
llava/lib/python3.10/distutils/command/sdist.py
ADDED
|
@@ -0,0 +1,494 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.sdist
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'sdist' command (create a source distribution)."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from glob import glob
|
| 8 |
+
from warnings import warn
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils import dir_util
|
| 12 |
+
from distutils import file_util
|
| 13 |
+
from distutils import archive_util
|
| 14 |
+
from distutils.text_file import TextFile
|
| 15 |
+
from distutils.filelist import FileList
|
| 16 |
+
from distutils import log
|
| 17 |
+
from distutils.util import convert_path
|
| 18 |
+
from distutils.errors import DistutilsTemplateError, DistutilsOptionError
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def show_formats():
|
| 22 |
+
"""Print all possible values for the 'formats' option (used by
|
| 23 |
+
the "--help-formats" command-line option).
|
| 24 |
+
"""
|
| 25 |
+
from distutils.fancy_getopt import FancyGetopt
|
| 26 |
+
from distutils.archive_util import ARCHIVE_FORMATS
|
| 27 |
+
formats = []
|
| 28 |
+
for format in ARCHIVE_FORMATS.keys():
|
| 29 |
+
formats.append(("formats=" + format, None,
|
| 30 |
+
ARCHIVE_FORMATS[format][2]))
|
| 31 |
+
formats.sort()
|
| 32 |
+
FancyGetopt(formats).print_help(
|
| 33 |
+
"List of available source distribution formats:")
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class sdist(Command):
|
| 37 |
+
|
| 38 |
+
description = "create a source distribution (tarball, zip file, etc.)"
|
| 39 |
+
|
| 40 |
+
def checking_metadata(self):
|
| 41 |
+
"""Callable used for the check sub-command.
|
| 42 |
+
|
| 43 |
+
Placed here so user_options can view it"""
|
| 44 |
+
return self.metadata_check
|
| 45 |
+
|
| 46 |
+
user_options = [
|
| 47 |
+
('template=', 't',
|
| 48 |
+
"name of manifest template file [default: MANIFEST.in]"),
|
| 49 |
+
('manifest=', 'm',
|
| 50 |
+
"name of manifest file [default: MANIFEST]"),
|
| 51 |
+
('use-defaults', None,
|
| 52 |
+
"include the default file set in the manifest "
|
| 53 |
+
"[default; disable with --no-defaults]"),
|
| 54 |
+
('no-defaults', None,
|
| 55 |
+
"don't include the default file set"),
|
| 56 |
+
('prune', None,
|
| 57 |
+
"specifically exclude files/directories that should not be "
|
| 58 |
+
"distributed (build tree, RCS/CVS dirs, etc.) "
|
| 59 |
+
"[default; disable with --no-prune]"),
|
| 60 |
+
('no-prune', None,
|
| 61 |
+
"don't automatically exclude anything"),
|
| 62 |
+
('manifest-only', 'o',
|
| 63 |
+
"just regenerate the manifest and then stop "
|
| 64 |
+
"(implies --force-manifest)"),
|
| 65 |
+
('force-manifest', 'f',
|
| 66 |
+
"forcibly regenerate the manifest and carry on as usual. "
|
| 67 |
+
"Deprecated: now the manifest is always regenerated."),
|
| 68 |
+
('formats=', None,
|
| 69 |
+
"formats for source distribution (comma-separated list)"),
|
| 70 |
+
('keep-temp', 'k',
|
| 71 |
+
"keep the distribution tree around after creating " +
|
| 72 |
+
"archive file(s)"),
|
| 73 |
+
('dist-dir=', 'd',
|
| 74 |
+
"directory to put the source distribution archive(s) in "
|
| 75 |
+
"[default: dist]"),
|
| 76 |
+
('metadata-check', None,
|
| 77 |
+
"Ensure that all required elements of meta-data "
|
| 78 |
+
"are supplied. Warn if any missing. [default]"),
|
| 79 |
+
('owner=', 'u',
|
| 80 |
+
"Owner name used when creating a tar file [default: current user]"),
|
| 81 |
+
('group=', 'g',
|
| 82 |
+
"Group name used when creating a tar file [default: current group]"),
|
| 83 |
+
]
|
| 84 |
+
|
| 85 |
+
boolean_options = ['use-defaults', 'prune',
|
| 86 |
+
'manifest-only', 'force-manifest',
|
| 87 |
+
'keep-temp', 'metadata-check']
|
| 88 |
+
|
| 89 |
+
help_options = [
|
| 90 |
+
('help-formats', None,
|
| 91 |
+
"list available distribution formats", show_formats),
|
| 92 |
+
]
|
| 93 |
+
|
| 94 |
+
negative_opt = {'no-defaults': 'use-defaults',
|
| 95 |
+
'no-prune': 'prune' }
|
| 96 |
+
|
| 97 |
+
sub_commands = [('check', checking_metadata)]
|
| 98 |
+
|
| 99 |
+
READMES = ('README', 'README.txt', 'README.rst')
|
| 100 |
+
|
| 101 |
+
def initialize_options(self):
|
| 102 |
+
# 'template' and 'manifest' are, respectively, the names of
|
| 103 |
+
# the manifest template and manifest file.
|
| 104 |
+
self.template = None
|
| 105 |
+
self.manifest = None
|
| 106 |
+
|
| 107 |
+
# 'use_defaults': if true, we will include the default file set
|
| 108 |
+
# in the manifest
|
| 109 |
+
self.use_defaults = 1
|
| 110 |
+
self.prune = 1
|
| 111 |
+
|
| 112 |
+
self.manifest_only = 0
|
| 113 |
+
self.force_manifest = 0
|
| 114 |
+
|
| 115 |
+
self.formats = ['gztar']
|
| 116 |
+
self.keep_temp = 0
|
| 117 |
+
self.dist_dir = None
|
| 118 |
+
|
| 119 |
+
self.archive_files = None
|
| 120 |
+
self.metadata_check = 1
|
| 121 |
+
self.owner = None
|
| 122 |
+
self.group = None
|
| 123 |
+
|
| 124 |
+
def finalize_options(self):
|
| 125 |
+
if self.manifest is None:
|
| 126 |
+
self.manifest = "MANIFEST"
|
| 127 |
+
if self.template is None:
|
| 128 |
+
self.template = "MANIFEST.in"
|
| 129 |
+
|
| 130 |
+
self.ensure_string_list('formats')
|
| 131 |
+
|
| 132 |
+
bad_format = archive_util.check_archive_formats(self.formats)
|
| 133 |
+
if bad_format:
|
| 134 |
+
raise DistutilsOptionError(
|
| 135 |
+
"unknown archive format '%s'" % bad_format)
|
| 136 |
+
|
| 137 |
+
if self.dist_dir is None:
|
| 138 |
+
self.dist_dir = "dist"
|
| 139 |
+
|
| 140 |
+
def run(self):
|
| 141 |
+
# 'filelist' contains the list of files that will make up the
|
| 142 |
+
# manifest
|
| 143 |
+
self.filelist = FileList()
|
| 144 |
+
|
| 145 |
+
# Run sub commands
|
| 146 |
+
for cmd_name in self.get_sub_commands():
|
| 147 |
+
self.run_command(cmd_name)
|
| 148 |
+
|
| 149 |
+
# Do whatever it takes to get the list of files to process
|
| 150 |
+
# (process the manifest template, read an existing manifest,
|
| 151 |
+
# whatever). File list is accumulated in 'self.filelist'.
|
| 152 |
+
self.get_file_list()
|
| 153 |
+
|
| 154 |
+
# If user just wanted us to regenerate the manifest, stop now.
|
| 155 |
+
if self.manifest_only:
|
| 156 |
+
return
|
| 157 |
+
|
| 158 |
+
# Otherwise, go ahead and create the source distribution tarball,
|
| 159 |
+
# or zipfile, or whatever.
|
| 160 |
+
self.make_distribution()
|
| 161 |
+
|
| 162 |
+
def check_metadata(self):
|
| 163 |
+
"""Deprecated API."""
|
| 164 |
+
warn("distutils.command.sdist.check_metadata is deprecated, \
|
| 165 |
+
use the check command instead", PendingDeprecationWarning)
|
| 166 |
+
check = self.distribution.get_command_obj('check')
|
| 167 |
+
check.ensure_finalized()
|
| 168 |
+
check.run()
|
| 169 |
+
|
| 170 |
+
def get_file_list(self):
|
| 171 |
+
"""Figure out the list of files to include in the source
|
| 172 |
+
distribution, and put it in 'self.filelist'. This might involve
|
| 173 |
+
reading the manifest template (and writing the manifest), or just
|
| 174 |
+
reading the manifest, or just using the default file set -- it all
|
| 175 |
+
depends on the user's options.
|
| 176 |
+
"""
|
| 177 |
+
# new behavior when using a template:
|
| 178 |
+
# the file list is recalculated every time because
|
| 179 |
+
# even if MANIFEST.in or setup.py are not changed
|
| 180 |
+
# the user might have added some files in the tree that
|
| 181 |
+
# need to be included.
|
| 182 |
+
#
|
| 183 |
+
# This makes --force the default and only behavior with templates.
|
| 184 |
+
template_exists = os.path.isfile(self.template)
|
| 185 |
+
if not template_exists and self._manifest_is_not_generated():
|
| 186 |
+
self.read_manifest()
|
| 187 |
+
self.filelist.sort()
|
| 188 |
+
self.filelist.remove_duplicates()
|
| 189 |
+
return
|
| 190 |
+
|
| 191 |
+
if not template_exists:
|
| 192 |
+
self.warn(("manifest template '%s' does not exist " +
|
| 193 |
+
"(using default file list)") %
|
| 194 |
+
self.template)
|
| 195 |
+
self.filelist.findall()
|
| 196 |
+
|
| 197 |
+
if self.use_defaults:
|
| 198 |
+
self.add_defaults()
|
| 199 |
+
|
| 200 |
+
if template_exists:
|
| 201 |
+
self.read_template()
|
| 202 |
+
|
| 203 |
+
if self.prune:
|
| 204 |
+
self.prune_file_list()
|
| 205 |
+
|
| 206 |
+
self.filelist.sort()
|
| 207 |
+
self.filelist.remove_duplicates()
|
| 208 |
+
self.write_manifest()
|
| 209 |
+
|
| 210 |
+
def add_defaults(self):
|
| 211 |
+
"""Add all the default files to self.filelist:
|
| 212 |
+
- README or README.txt
|
| 213 |
+
- setup.py
|
| 214 |
+
- test/test*.py
|
| 215 |
+
- all pure Python modules mentioned in setup script
|
| 216 |
+
- all files pointed by package_data (build_py)
|
| 217 |
+
- all files defined in data_files.
|
| 218 |
+
- all files defined as scripts.
|
| 219 |
+
- all C sources listed as part of extensions or C libraries
|
| 220 |
+
in the setup script (doesn't catch C headers!)
|
| 221 |
+
Warns if (README or README.txt) or setup.py are missing; everything
|
| 222 |
+
else is optional.
|
| 223 |
+
"""
|
| 224 |
+
self._add_defaults_standards()
|
| 225 |
+
self._add_defaults_optional()
|
| 226 |
+
self._add_defaults_python()
|
| 227 |
+
self._add_defaults_data_files()
|
| 228 |
+
self._add_defaults_ext()
|
| 229 |
+
self._add_defaults_c_libs()
|
| 230 |
+
self._add_defaults_scripts()
|
| 231 |
+
|
| 232 |
+
@staticmethod
|
| 233 |
+
def _cs_path_exists(fspath):
|
| 234 |
+
"""
|
| 235 |
+
Case-sensitive path existence check
|
| 236 |
+
|
| 237 |
+
>>> sdist._cs_path_exists(__file__)
|
| 238 |
+
True
|
| 239 |
+
>>> sdist._cs_path_exists(__file__.upper())
|
| 240 |
+
False
|
| 241 |
+
"""
|
| 242 |
+
if not os.path.exists(fspath):
|
| 243 |
+
return False
|
| 244 |
+
# make absolute so we always have a directory
|
| 245 |
+
abspath = os.path.abspath(fspath)
|
| 246 |
+
directory, filename = os.path.split(abspath)
|
| 247 |
+
return filename in os.listdir(directory)
|
| 248 |
+
|
| 249 |
+
def _add_defaults_standards(self):
|
| 250 |
+
standards = [self.READMES, self.distribution.script_name]
|
| 251 |
+
for fn in standards:
|
| 252 |
+
if isinstance(fn, tuple):
|
| 253 |
+
alts = fn
|
| 254 |
+
got_it = False
|
| 255 |
+
for fn in alts:
|
| 256 |
+
if self._cs_path_exists(fn):
|
| 257 |
+
got_it = True
|
| 258 |
+
self.filelist.append(fn)
|
| 259 |
+
break
|
| 260 |
+
|
| 261 |
+
if not got_it:
|
| 262 |
+
self.warn("standard file not found: should have one of " +
|
| 263 |
+
', '.join(alts))
|
| 264 |
+
else:
|
| 265 |
+
if self._cs_path_exists(fn):
|
| 266 |
+
self.filelist.append(fn)
|
| 267 |
+
else:
|
| 268 |
+
self.warn("standard file '%s' not found" % fn)
|
| 269 |
+
|
| 270 |
+
def _add_defaults_optional(self):
|
| 271 |
+
optional = ['test/test*.py', 'setup.cfg']
|
| 272 |
+
for pattern in optional:
|
| 273 |
+
files = filter(os.path.isfile, glob(pattern))
|
| 274 |
+
self.filelist.extend(files)
|
| 275 |
+
|
| 276 |
+
def _add_defaults_python(self):
|
| 277 |
+
# build_py is used to get:
|
| 278 |
+
# - python modules
|
| 279 |
+
# - files defined in package_data
|
| 280 |
+
build_py = self.get_finalized_command('build_py')
|
| 281 |
+
|
| 282 |
+
# getting python files
|
| 283 |
+
if self.distribution.has_pure_modules():
|
| 284 |
+
self.filelist.extend(build_py.get_source_files())
|
| 285 |
+
|
| 286 |
+
# getting package_data files
|
| 287 |
+
# (computed in build_py.data_files by build_py.finalize_options)
|
| 288 |
+
for pkg, src_dir, build_dir, filenames in build_py.data_files:
|
| 289 |
+
for filename in filenames:
|
| 290 |
+
self.filelist.append(os.path.join(src_dir, filename))
|
| 291 |
+
|
| 292 |
+
def _add_defaults_data_files(self):
|
| 293 |
+
# getting distribution.data_files
|
| 294 |
+
if self.distribution.has_data_files():
|
| 295 |
+
for item in self.distribution.data_files:
|
| 296 |
+
if isinstance(item, str):
|
| 297 |
+
# plain file
|
| 298 |
+
item = convert_path(item)
|
| 299 |
+
if os.path.isfile(item):
|
| 300 |
+
self.filelist.append(item)
|
| 301 |
+
else:
|
| 302 |
+
# a (dirname, filenames) tuple
|
| 303 |
+
dirname, filenames = item
|
| 304 |
+
for f in filenames:
|
| 305 |
+
f = convert_path(f)
|
| 306 |
+
if os.path.isfile(f):
|
| 307 |
+
self.filelist.append(f)
|
| 308 |
+
|
| 309 |
+
def _add_defaults_ext(self):
|
| 310 |
+
if self.distribution.has_ext_modules():
|
| 311 |
+
build_ext = self.get_finalized_command('build_ext')
|
| 312 |
+
self.filelist.extend(build_ext.get_source_files())
|
| 313 |
+
|
| 314 |
+
def _add_defaults_c_libs(self):
|
| 315 |
+
if self.distribution.has_c_libraries():
|
| 316 |
+
build_clib = self.get_finalized_command('build_clib')
|
| 317 |
+
self.filelist.extend(build_clib.get_source_files())
|
| 318 |
+
|
| 319 |
+
def _add_defaults_scripts(self):
|
| 320 |
+
if self.distribution.has_scripts():
|
| 321 |
+
build_scripts = self.get_finalized_command('build_scripts')
|
| 322 |
+
self.filelist.extend(build_scripts.get_source_files())
|
| 323 |
+
|
| 324 |
+
def read_template(self):
|
| 325 |
+
"""Read and parse manifest template file named by self.template.
|
| 326 |
+
|
| 327 |
+
(usually "MANIFEST.in") The parsing and processing is done by
|
| 328 |
+
'self.filelist', which updates itself accordingly.
|
| 329 |
+
"""
|
| 330 |
+
log.info("reading manifest template '%s'", self.template)
|
| 331 |
+
template = TextFile(self.template, strip_comments=1, skip_blanks=1,
|
| 332 |
+
join_lines=1, lstrip_ws=1, rstrip_ws=1,
|
| 333 |
+
collapse_join=1)
|
| 334 |
+
|
| 335 |
+
try:
|
| 336 |
+
while True:
|
| 337 |
+
line = template.readline()
|
| 338 |
+
if line is None: # end of file
|
| 339 |
+
break
|
| 340 |
+
|
| 341 |
+
try:
|
| 342 |
+
self.filelist.process_template_line(line)
|
| 343 |
+
# the call above can raise a DistutilsTemplateError for
|
| 344 |
+
# malformed lines, or a ValueError from the lower-level
|
| 345 |
+
# convert_path function
|
| 346 |
+
except (DistutilsTemplateError, ValueError) as msg:
|
| 347 |
+
self.warn("%s, line %d: %s" % (template.filename,
|
| 348 |
+
template.current_line,
|
| 349 |
+
msg))
|
| 350 |
+
finally:
|
| 351 |
+
template.close()
|
| 352 |
+
|
| 353 |
+
def prune_file_list(self):
|
| 354 |
+
"""Prune off branches that might slip into the file list as created
|
| 355 |
+
by 'read_template()', but really don't belong there:
|
| 356 |
+
* the build tree (typically "build")
|
| 357 |
+
* the release tree itself (only an issue if we ran "sdist"
|
| 358 |
+
previously with --keep-temp, or it aborted)
|
| 359 |
+
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
|
| 360 |
+
"""
|
| 361 |
+
build = self.get_finalized_command('build')
|
| 362 |
+
base_dir = self.distribution.get_fullname()
|
| 363 |
+
|
| 364 |
+
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
| 365 |
+
self.filelist.exclude_pattern(None, prefix=base_dir)
|
| 366 |
+
|
| 367 |
+
if sys.platform == 'win32':
|
| 368 |
+
seps = r'/|\\'
|
| 369 |
+
else:
|
| 370 |
+
seps = '/'
|
| 371 |
+
|
| 372 |
+
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
|
| 373 |
+
'_darcs']
|
| 374 |
+
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
|
| 375 |
+
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
|
| 376 |
+
|
| 377 |
+
def write_manifest(self):
|
| 378 |
+
"""Write the file list in 'self.filelist' (presumably as filled in
|
| 379 |
+
by 'add_defaults()' and 'read_template()') to the manifest file
|
| 380 |
+
named by 'self.manifest'.
|
| 381 |
+
"""
|
| 382 |
+
if self._manifest_is_not_generated():
|
| 383 |
+
log.info("not writing to manually maintained "
|
| 384 |
+
"manifest file '%s'" % self.manifest)
|
| 385 |
+
return
|
| 386 |
+
|
| 387 |
+
content = self.filelist.files[:]
|
| 388 |
+
content.insert(0, '# file GENERATED by distutils, do NOT edit')
|
| 389 |
+
self.execute(file_util.write_file, (self.manifest, content),
|
| 390 |
+
"writing manifest file '%s'" % self.manifest)
|
| 391 |
+
|
| 392 |
+
def _manifest_is_not_generated(self):
|
| 393 |
+
# check for special comment used in 3.1.3 and higher
|
| 394 |
+
if not os.path.isfile(self.manifest):
|
| 395 |
+
return False
|
| 396 |
+
|
| 397 |
+
fp = open(self.manifest)
|
| 398 |
+
try:
|
| 399 |
+
first_line = fp.readline()
|
| 400 |
+
finally:
|
| 401 |
+
fp.close()
|
| 402 |
+
return first_line != '# file GENERATED by distutils, do NOT edit\n'
|
| 403 |
+
|
| 404 |
+
def read_manifest(self):
|
| 405 |
+
"""Read the manifest file (named by 'self.manifest') and use it to
|
| 406 |
+
fill in 'self.filelist', the list of files to include in the source
|
| 407 |
+
distribution.
|
| 408 |
+
"""
|
| 409 |
+
log.info("reading manifest file '%s'", self.manifest)
|
| 410 |
+
with open(self.manifest) as manifest:
|
| 411 |
+
for line in manifest:
|
| 412 |
+
# ignore comments and blank lines
|
| 413 |
+
line = line.strip()
|
| 414 |
+
if line.startswith('#') or not line:
|
| 415 |
+
continue
|
| 416 |
+
self.filelist.append(line)
|
| 417 |
+
|
| 418 |
+
def make_release_tree(self, base_dir, files):
|
| 419 |
+
"""Create the directory tree that will become the source
|
| 420 |
+
distribution archive. All directories implied by the filenames in
|
| 421 |
+
'files' are created under 'base_dir', and then we hard link or copy
|
| 422 |
+
(if hard linking is unavailable) those files into place.
|
| 423 |
+
Essentially, this duplicates the developer's source tree, but in a
|
| 424 |
+
directory named after the distribution, containing only the files
|
| 425 |
+
to be distributed.
|
| 426 |
+
"""
|
| 427 |
+
# Create all the directories under 'base_dir' necessary to
|
| 428 |
+
# put 'files' there; the 'mkpath()' is just so we don't die
|
| 429 |
+
# if the manifest happens to be empty.
|
| 430 |
+
self.mkpath(base_dir)
|
| 431 |
+
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
|
| 432 |
+
|
| 433 |
+
# And walk over the list of files, either making a hard link (if
|
| 434 |
+
# os.link exists) to each one that doesn't already exist in its
|
| 435 |
+
# corresponding location under 'base_dir', or copying each file
|
| 436 |
+
# that's out-of-date in 'base_dir'. (Usually, all files will be
|
| 437 |
+
# out-of-date, because by default we blow away 'base_dir' when
|
| 438 |
+
# we're done making the distribution archives.)
|
| 439 |
+
|
| 440 |
+
if hasattr(os, 'link'): # can make hard links on this system
|
| 441 |
+
link = 'hard'
|
| 442 |
+
msg = "making hard links in %s..." % base_dir
|
| 443 |
+
else: # nope, have to copy
|
| 444 |
+
link = None
|
| 445 |
+
msg = "copying files to %s..." % base_dir
|
| 446 |
+
|
| 447 |
+
if not files:
|
| 448 |
+
log.warn("no files to distribute -- empty manifest?")
|
| 449 |
+
else:
|
| 450 |
+
log.info(msg)
|
| 451 |
+
for file in files:
|
| 452 |
+
if not os.path.isfile(file):
|
| 453 |
+
log.warn("'%s' not a regular file -- skipping", file)
|
| 454 |
+
else:
|
| 455 |
+
dest = os.path.join(base_dir, file)
|
| 456 |
+
self.copy_file(file, dest, link=link)
|
| 457 |
+
|
| 458 |
+
self.distribution.metadata.write_pkg_info(base_dir)
|
| 459 |
+
|
| 460 |
+
def make_distribution(self):
|
| 461 |
+
"""Create the source distribution(s). First, we create the release
|
| 462 |
+
tree with 'make_release_tree()'; then, we create all required
|
| 463 |
+
archive files (according to 'self.formats') from the release tree.
|
| 464 |
+
Finally, we clean up by blowing away the release tree (unless
|
| 465 |
+
'self.keep_temp' is true). The list of archive files created is
|
| 466 |
+
stored so it can be retrieved later by 'get_archive_files()'.
|
| 467 |
+
"""
|
| 468 |
+
# Don't warn about missing meta-data here -- should be (and is!)
|
| 469 |
+
# done elsewhere.
|
| 470 |
+
base_dir = self.distribution.get_fullname()
|
| 471 |
+
base_name = os.path.join(self.dist_dir, base_dir)
|
| 472 |
+
|
| 473 |
+
self.make_release_tree(base_dir, self.filelist.files)
|
| 474 |
+
archive_files = [] # remember names of files we create
|
| 475 |
+
# tar archive must be created last to avoid overwrite and remove
|
| 476 |
+
if 'tar' in self.formats:
|
| 477 |
+
self.formats.append(self.formats.pop(self.formats.index('tar')))
|
| 478 |
+
|
| 479 |
+
for fmt in self.formats:
|
| 480 |
+
file = self.make_archive(base_name, fmt, base_dir=base_dir,
|
| 481 |
+
owner=self.owner, group=self.group)
|
| 482 |
+
archive_files.append(file)
|
| 483 |
+
self.distribution.dist_files.append(('sdist', '', file))
|
| 484 |
+
|
| 485 |
+
self.archive_files = archive_files
|
| 486 |
+
|
| 487 |
+
if not self.keep_temp:
|
| 488 |
+
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
|
| 489 |
+
|
| 490 |
+
def get_archive_files(self):
|
| 491 |
+
"""Return the list of archive files created when the command
|
| 492 |
+
was run, or None if the command hasn't run yet.
|
| 493 |
+
"""
|
| 494 |
+
return self.archive_files
|
llava/lib/python3.10/distutils/tests/__pycache__/test_bdist_msi.cpython-310.pyc
ADDED
|
Binary file (1.49 kB). View file
|
|
|
llava/lib/python3.10/distutils/tests/__pycache__/test_bdist_rpm.cpython-310.pyc
ADDED
|
Binary file (3.62 kB). View file
|
|
|
llava/lib/python3.10/distutils/tests/__pycache__/test_cygwinccompiler.cpython-310.pyc
ADDED
|
Binary file (4.74 kB). View file
|
|
|
llava/lib/python3.10/distutils/tests/__pycache__/test_extension.cpython-310.pyc
ADDED
|
Binary file (2.73 kB). View file
|
|
|
llava/lib/python3.10/distutils/tests/__pycache__/test_file_util.cpython-310.pyc
ADDED
|
Binary file (4.93 kB). View file
|
|
|
llava/lib/python3.10/distutils/tests/__pycache__/test_filelist.cpython-310.pyc
ADDED
|
Binary file (8.68 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_cast_Short_compositeimplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeimplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor _cast_Short(const at::Tensor & self, bool non_blocking=false);
|
| 21 |
+
|
| 22 |
+
} // namespace compositeimplicitautograd
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_cdist_backward_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor _cdist_backward(const at::Tensor & grad, const at::Tensor & x1, const at::Tensor & x2, double p, const at::Tensor & cdist);
|
| 21 |
+
|
| 22 |
+
} // namespace cuda
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_copy_from_and_resize_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor & _copy_from_and_resize_out(const at::Tensor & self, const at::Tensor & dst, at::Tensor & out);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_cslt_sparse_mm_search_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API int64_t _cslt_sparse_mm_search(const at::Tensor & compressed_A, const at::Tensor & dense_B, const ::std::optional<at::Tensor> & bias={}, const ::std::optional<at::Tensor> & alpha={}, ::std::optional<at::ScalarType> out_dtype=::std::nullopt, bool transpose_result=false);
|
| 21 |
+
|
| 22 |
+
} // namespace cuda
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_fft_c2r_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _fft_c2r {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &, at::IntArrayRef, int64_t, c10::SymInt);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_fft_c2r")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_fft_c2r(Tensor self, int[] dim, int normalization, SymInt last_dim_size) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & self, at::IntArrayRef dim, int64_t normalization, c10::SymInt last_dim_size);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, at::IntArrayRef dim, int64_t normalization, c10::SymInt last_dim_size);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API _fft_c2r_out {
|
| 29 |
+
using schema = at::Tensor & (const at::Tensor &, at::IntArrayRef, int64_t, c10::SymInt, at::Tensor &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_fft_c2r")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_fft_c2r.out(Tensor self, int[] dim, int normalization, SymInt last_dim_size, *, Tensor(a!) out) -> Tensor(a!)")
|
| 35 |
+
static at::Tensor & call(const at::Tensor & self, at::IntArrayRef dim, int64_t normalization, c10::SymInt last_dim_size, at::Tensor & out);
|
| 36 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, at::IntArrayRef dim, int64_t normalization, c10::SymInt last_dim_size, at::Tensor & out);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_log2_native.h
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API ::std::vector<at::Tensor> foreach_tensor_log2_slow(at::TensorList self);
|
| 20 |
+
TORCH_API void _foreach_log2_out(at::TensorList self, at::TensorList out);
|
| 21 |
+
TORCH_API void foreach_tensor_log2_slow_(at::TensorList self);
|
| 22 |
+
TORCH_API ::std::vector<at::Tensor> foreach_tensor_log2_cuda(at::TensorList self);
|
| 23 |
+
TORCH_API void foreach_tensor_log2_cuda_(at::TensorList self);
|
| 24 |
+
} // namespace native
|
| 25 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_pow_ops.h
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _foreach_pow_List {
|
| 18 |
+
using schema = ::std::vector<at::Tensor> (at::TensorList, at::TensorList);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "List")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow.List(Tensor[] self, Tensor[] exponent) -> Tensor[]")
|
| 24 |
+
static ::std::vector<at::Tensor> call(at::TensorList self, at::TensorList exponent);
|
| 25 |
+
static ::std::vector<at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, at::TensorList exponent);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API _foreach_pow_Scalar {
|
| 29 |
+
using schema = ::std::vector<at::Tensor> (at::TensorList, const at::Scalar &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow.Scalar(Tensor[] self, Scalar exponent) -> Tensor[]")
|
| 35 |
+
static ::std::vector<at::Tensor> call(at::TensorList self, const at::Scalar & exponent);
|
| 36 |
+
static ::std::vector<at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, const at::Scalar & exponent);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
struct TORCH_API _foreach_pow_ScalarList {
|
| 40 |
+
using schema = ::std::vector<at::Tensor> (at::TensorList, at::ArrayRef<at::Scalar>);
|
| 41 |
+
using ptr_schema = schema*;
|
| 42 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 43 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow")
|
| 44 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "ScalarList")
|
| 45 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow.ScalarList(Tensor[] self, Scalar[] exponent) -> Tensor[]")
|
| 46 |
+
static ::std::vector<at::Tensor> call(at::TensorList self, at::ArrayRef<at::Scalar> exponent);
|
| 47 |
+
static ::std::vector<at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, at::ArrayRef<at::Scalar> exponent);
|
| 48 |
+
};
|
| 49 |
+
|
| 50 |
+
struct TORCH_API _foreach_pow_ScalarAndTensor {
|
| 51 |
+
using schema = ::std::vector<at::Tensor> (const at::Scalar &, at::TensorList);
|
| 52 |
+
using ptr_schema = schema*;
|
| 53 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 54 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow")
|
| 55 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "ScalarAndTensor")
|
| 56 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow.ScalarAndTensor(Scalar self, Tensor[] exponent) -> Tensor[]")
|
| 57 |
+
static ::std::vector<at::Tensor> call(const at::Scalar & self, at::TensorList exponent);
|
| 58 |
+
static ::std::vector<at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Scalar & self, at::TensorList exponent);
|
| 59 |
+
};
|
| 60 |
+
|
| 61 |
+
struct TORCH_API _foreach_pow__List {
|
| 62 |
+
using schema = void (at::TensorList, at::TensorList);
|
| 63 |
+
using ptr_schema = schema*;
|
| 64 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 65 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow_")
|
| 66 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "List")
|
| 67 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow_.List(Tensor(a!)[] self, Tensor[] exponent) -> ()")
|
| 68 |
+
static void call(at::TensorList self, at::TensorList exponent);
|
| 69 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, at::TensorList exponent);
|
| 70 |
+
};
|
| 71 |
+
|
| 72 |
+
struct TORCH_API _foreach_pow__Scalar {
|
| 73 |
+
using schema = void (at::TensorList, const at::Scalar &);
|
| 74 |
+
using ptr_schema = schema*;
|
| 75 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 76 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow_")
|
| 77 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar")
|
| 78 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow_.Scalar(Tensor(a!)[] self, Scalar exponent) -> ()")
|
| 79 |
+
static void call(at::TensorList self, const at::Scalar & exponent);
|
| 80 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, const at::Scalar & exponent);
|
| 81 |
+
};
|
| 82 |
+
|
| 83 |
+
struct TORCH_API _foreach_pow__ScalarList {
|
| 84 |
+
using schema = void (at::TensorList, at::ArrayRef<at::Scalar>);
|
| 85 |
+
using ptr_schema = schema*;
|
| 86 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 87 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow_")
|
| 88 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "ScalarList")
|
| 89 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow_.ScalarList(Tensor(a!)[] self, Scalar[] exponent) -> ()")
|
| 90 |
+
static void call(at::TensorList self, at::ArrayRef<at::Scalar> exponent);
|
| 91 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, at::ArrayRef<at::Scalar> exponent);
|
| 92 |
+
};
|
| 93 |
+
|
| 94 |
+
struct TORCH_API _foreach_pow_List_out {
|
| 95 |
+
using schema = void (at::TensorList, at::TensorList, at::TensorList);
|
| 96 |
+
using ptr_schema = schema*;
|
| 97 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 98 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow")
|
| 99 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "List_out")
|
| 100 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow.List_out(Tensor[] self, Tensor[] exponent, *, Tensor(a!)[] out) -> ()")
|
| 101 |
+
static void call(at::TensorList self, at::TensorList exponent, at::TensorList out);
|
| 102 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, at::TensorList exponent, at::TensorList out);
|
| 103 |
+
};
|
| 104 |
+
|
| 105 |
+
struct TORCH_API _foreach_pow_Scalar_out {
|
| 106 |
+
using schema = void (at::TensorList, const at::Scalar &, at::TensorList);
|
| 107 |
+
using ptr_schema = schema*;
|
| 108 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 109 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow")
|
| 110 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar_out")
|
| 111 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow.Scalar_out(Tensor[] self, Scalar exponent, *, Tensor(a!)[] out) -> ()")
|
| 112 |
+
static void call(at::TensorList self, const at::Scalar & exponent, at::TensorList out);
|
| 113 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, const at::Scalar & exponent, at::TensorList out);
|
| 114 |
+
};
|
| 115 |
+
|
| 116 |
+
struct TORCH_API _foreach_pow_ScalarList_out {
|
| 117 |
+
using schema = void (at::TensorList, at::ArrayRef<at::Scalar>, at::TensorList);
|
| 118 |
+
using ptr_schema = schema*;
|
| 119 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 120 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_pow")
|
| 121 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "ScalarList_out")
|
| 122 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_pow.ScalarList_out(Tensor[] self, Scalar[] exponent, *, Tensor(a!)[] out) -> ()")
|
| 123 |
+
static void call(at::TensorList self, at::ArrayRef<at::Scalar> exponent, at::TensorList out);
|
| 124 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, at::ArrayRef<at::Scalar> exponent, at::TensorList out);
|
| 125 |
+
};
|
| 126 |
+
|
| 127 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_sqrt_native.h
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API ::std::vector<at::Tensor> foreach_tensor_sqrt_slow(at::TensorList self);
|
| 20 |
+
TORCH_API void _foreach_sqrt_out(at::TensorList self, at::TensorList out);
|
| 21 |
+
TORCH_API void foreach_tensor_sqrt_slow_(at::TensorList self);
|
| 22 |
+
TORCH_API ::std::vector<at::Tensor> foreach_tensor_sqrt_cuda(at::TensorList self);
|
| 23 |
+
TORCH_API void foreach_tensor_sqrt_cuda_(at::TensorList self);
|
| 24 |
+
} // namespace native
|
| 25 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_lstm_mps.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_lstm_mps_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_lstm_mps(Tensor input, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor, Tensor, Tensor, Tensor, Tensor)
|
| 26 |
+
inline ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor,at::Tensor,at::Tensor> _lstm_mps(const at::Tensor & input, at::TensorList hx, at::TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first) {
|
| 27 |
+
return at::_ops::_lstm_mps::call(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_lstm_mps.out(Tensor input, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2, Tensor(d!) out3, Tensor(e!) out4, Tensor(f!) out5) -> (Tensor(a!), Tensor(b!), Tensor(c!), Tensor(d!), Tensor(e!), Tensor(f!))
|
| 31 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &> _lstm_mps_out(at::Tensor & out0, at::Tensor & out1, at::Tensor & out2, at::Tensor & out3, at::Tensor & out4, at::Tensor & out5, const at::Tensor & input, at::TensorList hx, at::TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first) {
|
| 32 |
+
return at::_ops::_lstm_mps_out::call(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first, out0, out1, out2, out3, out4, out5);
|
| 33 |
+
}
|
| 34 |
+
// aten::_lstm_mps.out(Tensor input, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2, Tensor(d!) out3, Tensor(e!) out4, Tensor(f!) out5) -> (Tensor(a!), Tensor(b!), Tensor(c!), Tensor(d!), Tensor(e!), Tensor(f!))
|
| 35 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &,at::Tensor &> _lstm_mps_outf(const at::Tensor & input, at::TensorList hx, at::TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first, at::Tensor & out0, at::Tensor & out1, at::Tensor & out2, at::Tensor & out3, at::Tensor & out4, at::Tensor & out5) {
|
| 36 |
+
return at::_ops::_lstm_mps_out::call(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first, out0, out1, out2, out3, out4, out5);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_native_batch_norm_legit_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &> _native_batch_norm_legit_out(at::Tensor & out, at::Tensor & save_mean, at::Tensor & save_invstd, const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, at::Tensor & running_mean, at::Tensor & running_var, bool training, double momentum, double eps);
|
| 21 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &> _native_batch_norm_legit_outf(const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, at::Tensor & running_mean, at::Tensor & running_var, bool training, double momentum, double eps, at::Tensor & out, at::Tensor & save_mean, at::Tensor & save_invstd);
|
| 22 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor> _native_batch_norm_legit(const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, at::Tensor & running_mean, at::Tensor & running_var, bool training, double momentum, double eps);
|
| 23 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor> _native_batch_norm_legit(const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, bool training, double momentum, double eps);
|
| 24 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &> _native_batch_norm_legit_out(at::Tensor & out, at::Tensor & save_mean, at::Tensor & save_invstd, const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, bool training, double momentum, double eps);
|
| 25 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &> _native_batch_norm_legit_outf(const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, bool training, double momentum, double eps, at::Tensor & out, at::Tensor & save_mean, at::Tensor & save_invstd);
|
| 26 |
+
|
| 27 |
+
} // namespace cuda
|
| 28 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_nested_get_values_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _nested_get_values {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_nested_get_values")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_nested_get_values(Tensor(a) self) -> Tensor(a)")
|
| 24 |
+
static at::Tensor call(const at::Tensor & self);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_efficient_attention_native.h
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> _scaled_dot_product_efficient_attention_cuda(const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const ::std::optional<at::Tensor> & attn_bias, bool compute_log_sumexp, double dropout_p=0.0, bool is_causal=false, ::std::optional<double> scale=::std::nullopt);
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> _scaled_dot_product_efficient_attention_nestedtensor_cuda(const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const ::std::optional<at::Tensor> & attn_bias, bool compute_log_sumexp, double dropout_p=0.0, bool is_causal=false, ::std::optional<double> scale=::std::nullopt);
|
| 21 |
+
} // namespace native
|
| 22 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/acosh_meta_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace meta {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor acosh(const at::Tensor & self);
|
| 21 |
+
TORCH_API at::Tensor & acosh_out(at::Tensor & out, const at::Tensor & self);
|
| 22 |
+
TORCH_API at::Tensor & acosh_outf(const at::Tensor & self, at::Tensor & out);
|
| 23 |
+
TORCH_API at::Tensor & acosh_(at::Tensor & self);
|
| 24 |
+
|
| 25 |
+
} // namespace meta
|
| 26 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/addbmm.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/addbmm_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::addbmm.out(Tensor self, Tensor batch1, Tensor batch2, *, Scalar beta=1, Scalar alpha=1, Tensor(a!) out) -> Tensor(a!)
|
| 26 |
+
inline at::Tensor & addbmm_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & batch1, const at::Tensor & batch2, const at::Scalar & beta=1, const at::Scalar & alpha=1) {
|
| 27 |
+
return at::_ops::addbmm_out::call(self, batch1, batch2, beta, alpha, out);
|
| 28 |
+
}
|
| 29 |
+
// aten::addbmm.out(Tensor self, Tensor batch1, Tensor batch2, *, Scalar beta=1, Scalar alpha=1, Tensor(a!) out) -> Tensor(a!)
|
| 30 |
+
inline at::Tensor & addbmm_outf(const at::Tensor & self, const at::Tensor & batch1, const at::Tensor & batch2, const at::Scalar & beta, const at::Scalar & alpha, at::Tensor & out) {
|
| 31 |
+
return at::_ops::addbmm_out::call(self, batch1, batch2, beta, alpha, out);
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
// aten::addbmm(Tensor self, Tensor batch1, Tensor batch2, *, Scalar beta=1, Scalar alpha=1) -> Tensor
|
| 35 |
+
inline at::Tensor addbmm(const at::Tensor & self, const at::Tensor & batch1, const at::Tensor & batch2, const at::Scalar & beta=1, const at::Scalar & alpha=1) {
|
| 36 |
+
return at::_ops::addbmm::call(self, batch1, batch2, beta, alpha);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/argsort.h
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/argsort_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::argsort(Tensor self, int dim=-1, bool descending=False) -> Tensor
|
| 26 |
+
inline at::Tensor argsort(const at::Tensor & self, int64_t dim=-1, bool descending=false) {
|
| 27 |
+
return at::_ops::argsort::call(self, dim, descending);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::argsort.stable(Tensor self, *, bool stable, int dim=-1, bool descending=False) -> Tensor
|
| 31 |
+
inline at::Tensor argsort(const at::Tensor & self, bool stable, int64_t dim=-1, bool descending=false) {
|
| 32 |
+
return at::_ops::argsort_stable::call(self, stable, dim, descending);
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
// aten::argsort.dimname(Tensor self, Dimname dim, bool descending=False) -> Tensor
|
| 36 |
+
inline at::Tensor argsort(const at::Tensor & self, at::Dimname dim, bool descending=false) {
|
| 37 |
+
return at::_ops::argsort_dimname::call(self, dim, descending);
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
// aten::argsort.stable_out(Tensor self, *, bool stable, int dim=-1, bool descending=False, Tensor(a!) out) -> Tensor(a!)
|
| 41 |
+
inline at::Tensor & argsort_out(at::Tensor & out, const at::Tensor & self, bool stable, int64_t dim=-1, bool descending=false) {
|
| 42 |
+
return at::_ops::argsort_stable_out::call(self, stable, dim, descending, out);
|
| 43 |
+
}
|
| 44 |
+
// aten::argsort.stable_out(Tensor self, *, bool stable, int dim=-1, bool descending=False, Tensor(a!) out) -> Tensor(a!)
|
| 45 |
+
inline at::Tensor & argsort_outf(const at::Tensor & self, bool stable, int64_t dim, bool descending, at::Tensor & out) {
|
| 46 |
+
return at::_ops::argsort_stable_out::call(self, stable, dim, descending, out);
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_compositeexplicitautogradnonfunctional_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautogradnonfunctional {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor avg_pool2d_backward(const at::Tensor & grad_output, const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, ::std::optional<int64_t> divisor_override);
|
| 21 |
+
|
| 22 |
+
} // namespace compositeexplicitautogradnonfunctional
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/concat.h
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/concat_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::concat(Tensor[] tensors, int dim=0) -> Tensor
|
| 26 |
+
inline at::Tensor concat(at::TensorList tensors, int64_t dim=0) {
|
| 27 |
+
return at::_ops::concat::call(tensors, dim);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::concat.out(Tensor[] tensors, int dim=0, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & concat_out(at::Tensor & out, at::TensorList tensors, int64_t dim=0) {
|
| 32 |
+
return at::_ops::concat_out::call(tensors, dim, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::concat.out(Tensor[] tensors, int dim=0, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & concat_outf(at::TensorList tensors, int64_t dim, at::Tensor & out) {
|
| 36 |
+
return at::_ops::concat_out::call(tensors, dim, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
// aten::concat.names(Tensor[] tensors, Dimname dim) -> Tensor
|
| 40 |
+
inline at::Tensor concat(at::TensorList tensors, at::Dimname dim) {
|
| 41 |
+
return at::_ops::concat_names::call(tensors, dim);
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
// aten::concat.names_out(Tensor[] tensors, Dimname dim, *, Tensor(a!) out) -> Tensor(a!)
|
| 45 |
+
inline at::Tensor & concat_out(at::Tensor & out, at::TensorList tensors, at::Dimname dim) {
|
| 46 |
+
return at::_ops::concat_names_out::call(tensors, dim, out);
|
| 47 |
+
}
|
| 48 |
+
// aten::concat.names_out(Tensor[] tensors, Dimname dim, *, Tensor(a!) out) -> Tensor(a!)
|
| 49 |
+
inline at::Tensor & concat_outf(at::TensorList tensors, at::Dimname dim, at::Tensor & out) {
|
| 50 |
+
return at::_ops::concat_names_out::call(tensors, dim, out);
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/conj_physical_compositeimplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeimplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor conj_physical(const at::Tensor & self);
|
| 21 |
+
|
| 22 |
+
} // namespace compositeimplicitautograd
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/conv1d_compositeimplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeimplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor conv1d(const at::Tensor & input, const at::Tensor & weight, const ::std::optional<at::Tensor> & bias={}, at::IntArrayRef stride=1, at::IntArrayRef padding=0, at::IntArrayRef dilation=1, int64_t groups=1);
|
| 21 |
+
TORCH_API at::Tensor conv1d_symint(const at::Tensor & input, const at::Tensor & weight, const ::std::optional<at::Tensor> & bias={}, c10::SymIntArrayRef stride=c10::SymInt(1), c10::SymIntArrayRef padding=c10::SymInt(0), c10::SymIntArrayRef dilation=c10::SymInt(1), c10::SymInt groups=1);
|
| 22 |
+
TORCH_API at::Tensor conv1d(const at::Tensor & input, const at::Tensor & weight, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, c10::string_view padding, at::IntArrayRef dilation=1, int64_t groups=1);
|
| 23 |
+
TORCH_API at::Tensor conv1d_symint(const at::Tensor & input, const at::Tensor & weight, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::string_view padding, c10::SymIntArrayRef dilation=c10::SymInt(1), c10::SymInt groups=1);
|
| 24 |
+
|
| 25 |
+
} // namespace compositeimplicitautograd
|
| 26 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/empty_like_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor empty_like(const at::Tensor & self, at::TensorOptions options={}, ::std::optional<at::MemoryFormat> memory_format=::std::nullopt);
|
| 21 |
+
TORCH_API at::Tensor empty_like(const at::Tensor & self, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory, ::std::optional<at::MemoryFormat> memory_format);
|
| 22 |
+
TORCH_API at::Tensor & empty_like_out(at::Tensor & out, const at::Tensor & self, ::std::optional<at::MemoryFormat> memory_format=::std::nullopt);
|
| 23 |
+
TORCH_API at::Tensor & empty_like_outf(const at::Tensor & self, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out);
|
| 24 |
+
|
| 25 |
+
} // namespace compositeexplicitautograd
|
| 26 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/empty_quantized_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor & empty_quantized_out(at::Tensor & out, at::IntArrayRef size, const at::Tensor & qtensor, ::std::optional<at::MemoryFormat> memory_format=::std::nullopt);
|
| 21 |
+
TORCH_API at::Tensor & empty_quantized_outf(at::IntArrayRef size, const at::Tensor & qtensor, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out);
|
| 22 |
+
|
| 23 |
+
} // namespace compositeexplicitautograd
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/empty_quantized_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API empty_quantized {
|
| 18 |
+
using schema = at::Tensor (at::IntArrayRef, const at::Tensor &, ::std::optional<at::ScalarType>, ::std::optional<at::Layout>, ::std::optional<at::Device>, ::std::optional<bool>, ::std::optional<at::MemoryFormat>);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::empty_quantized")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "empty_quantized(int[] size, Tensor qtensor, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None, MemoryFormat? memory_format=None) -> Tensor")
|
| 24 |
+
static at::Tensor call(at::IntArrayRef size, const at::Tensor & qtensor, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory, ::std::optional<at::MemoryFormat> memory_format);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, at::IntArrayRef size, const at::Tensor & qtensor, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory, ::std::optional<at::MemoryFormat> memory_format);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API empty_quantized_out {
|
| 29 |
+
using schema = at::Tensor & (at::IntArrayRef, const at::Tensor &, ::std::optional<at::MemoryFormat>, at::Tensor &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::empty_quantized")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "empty_quantized.out(int[] size, Tensor qtensor, *, MemoryFormat? memory_format=None, Tensor(a!) out) -> Tensor(a!)")
|
| 35 |
+
static at::Tensor & call(at::IntArrayRef size, const at::Tensor & qtensor, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out);
|
| 36 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::IntArrayRef size, const at::Tensor & qtensor, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/hann_window_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor hann_window(int64_t window_length, at::TensorOptions options={});
|
| 21 |
+
TORCH_API at::Tensor hann_window(int64_t window_length, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory);
|
| 22 |
+
TORCH_API at::Tensor & hann_window_out(at::Tensor & out, int64_t window_length);
|
| 23 |
+
TORCH_API at::Tensor & hann_window_outf(int64_t window_length, at::Tensor & out);
|
| 24 |
+
TORCH_API at::Tensor hann_window(int64_t window_length, bool periodic, at::TensorOptions options={});
|
| 25 |
+
TORCH_API at::Tensor hann_window(int64_t window_length, bool periodic, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory);
|
| 26 |
+
TORCH_API at::Tensor & hann_window_out(at::Tensor & out, int64_t window_length, bool periodic);
|
| 27 |
+
TORCH_API at::Tensor & hann_window_outf(int64_t window_length, bool periodic, at::Tensor & out);
|
| 28 |
+
|
| 29 |
+
} // namespace compositeexplicitautograd
|
| 30 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/hinge_embedding_loss_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor hinge_embedding_loss(const at::Tensor & self, const at::Tensor & target, double margin=1.0, int64_t reduction=at::Reduction::Mean);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|