Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- deepseek/lib/python3.10/collections/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/collections/__pycache__/abc.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/collections/abc.py +3 -0
- deepseek/lib/python3.10/distutils/__pycache__/cmd.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/__pycache__/extension.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/__pycache__/file_util.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/__pycache__/spawn.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/__pycache__/unixccompiler.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/build.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/build_ext.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/build_py.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/install_egg_info.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/bdist.py +141 -0
- deepseek/lib/python3.10/distutils/command/bdist_msi.py +747 -0
- deepseek/lib/python3.10/distutils/command/bdist_rpm.py +579 -0
- deepseek/lib/python3.10/distutils/command/build_ext.py +754 -0
- deepseek/lib/python3.10/distutils/command/clean.py +76 -0
- deepseek/lib/python3.10/distutils/command/command_template +33 -0
- deepseek/lib/python3.10/distutils/command/install_lib.py +217 -0
- deepseek/lib/python3.10/distutils/command/register.py +304 -0
- deepseek/lib/python3.10/distutils/command/sdist.py +494 -0
- deepseek/lib/python3.10/distutils/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/tests/__pycache__/support.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/tests/__pycache__/test_build_clib.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/tests/__pycache__/test_filelist.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/tests/__pycache__/test_install.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/tests/__pycache__/test_install_data.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/tests/__pycache__/test_util.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/tests/includetest.rst +1 -0
- deepseek/lib/python3.10/distutils/tests/test_archive_util.py +396 -0
- deepseek/lib/python3.10/ensurepip/__main__.py +5 -0
- deepseek/lib/python3.10/ensurepip/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/ensurepip/__pycache__/__main__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/ensurepip/__pycache__/_uninstall.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/ensurepip/_bundled/__init__.py +0 -0
- deepseek/lib/python3.10/ensurepip/_bundled/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/ensurepip/_uninstall.py +31 -0
- deepseek/lib/python3.10/json/__init__.py +359 -0
- deepseek/lib/python3.10/json/__pycache__/encoder.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/json/__pycache__/scanner.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/json/__pycache__/tool.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/json/decoder.py +356 -0
- deepseek/lib/python3.10/json/encoder.py +442 -0
- deepseek/lib/python3.10/json/scanner.py +73 -0
- deepseek/lib/python3.10/multiprocessing/forkserver.py +348 -0
- deepseek/lib/python3.10/multiprocessing/managers.py +1378 -0
- deepseek/lib/python3.10/multiprocessing/popen_forkserver.py +74 -0
- deepseek/lib/python3.10/multiprocessing/popen_spawn_posix.py +72 -0
- deepseek/lib/python3.10/multiprocessing/util.py +489 -0
.gitattributes
CHANGED
|
@@ -423,3 +423,4 @@ deepseek/lib/libstdc++.so.6.0.29 filter=lfs diff=lfs merge=lfs -text
|
|
| 423 |
deepseek/lib/libitm.so filter=lfs diff=lfs merge=lfs -text
|
| 424 |
evalkit_tf437/lib/python3.10/site-packages/scipy/spatial/_voronoi.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 425 |
deepseek/lib/libgomp.so.1.0.0 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 423 |
deepseek/lib/libitm.so filter=lfs diff=lfs merge=lfs -text
|
| 424 |
evalkit_tf437/lib/python3.10/site-packages/scipy/spatial/_voronoi.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 425 |
deepseek/lib/libgomp.so.1.0.0 filter=lfs diff=lfs merge=lfs -text
|
| 426 |
+
evalkit_tf437/lib/python3.10/site-packages/matplotlib/_tri.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
deepseek/lib/python3.10/collections/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (48.4 kB). View file
|
|
|
deepseek/lib/python3.10/collections/__pycache__/abc.cpython-310.pyc
ADDED
|
Binary file (492 Bytes). View file
|
|
|
deepseek/lib/python3.10/collections/abc.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from _collections_abc import *
|
| 2 |
+
from _collections_abc import __all__
|
| 3 |
+
from _collections_abc import _CallableGenericAlias
|
deepseek/lib/python3.10/distutils/__pycache__/cmd.cpython-310.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/__pycache__/extension.cpython-310.pyc
ADDED
|
Binary file (7.23 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/__pycache__/file_util.cpython-310.pyc
ADDED
|
Binary file (5.92 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/__pycache__/spawn.cpython-310.pyc
ADDED
|
Binary file (3.67 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/__pycache__/unixccompiler.cpython-310.pyc
ADDED
|
Binary file (7.07 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/build.cpython-310.pyc
ADDED
|
Binary file (4.1 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/build_ext.cpython-310.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/build_py.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/install_egg_info.cpython-310.pyc
ADDED
|
Binary file (3.03 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/bdist.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist' command (create a built [binary]
|
| 4 |
+
distribution)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from distutils.core import Command
|
| 8 |
+
from distutils.errors import *
|
| 9 |
+
from distutils.util import get_platform
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def show_formats():
|
| 13 |
+
"""Print list of available formats (arguments to "--format" option).
|
| 14 |
+
"""
|
| 15 |
+
from distutils.fancy_getopt import FancyGetopt
|
| 16 |
+
formats = []
|
| 17 |
+
for format in bdist.format_commands:
|
| 18 |
+
formats.append(("formats=" + format, None,
|
| 19 |
+
bdist.format_command[format][1]))
|
| 20 |
+
pretty_printer = FancyGetopt(formats)
|
| 21 |
+
pretty_printer.print_help("List of available distribution formats:")
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class bdist(Command):
|
| 25 |
+
|
| 26 |
+
description = "create a built (binary) distribution"
|
| 27 |
+
|
| 28 |
+
user_options = [('bdist-base=', 'b',
|
| 29 |
+
"temporary directory for creating built distributions"),
|
| 30 |
+
('plat-name=', 'p',
|
| 31 |
+
"platform name to embed in generated filenames "
|
| 32 |
+
"(default: %s)" % get_platform()),
|
| 33 |
+
('formats=', None,
|
| 34 |
+
"formats for distribution (comma-separated list)"),
|
| 35 |
+
('dist-dir=', 'd',
|
| 36 |
+
"directory to put final built distributions in "
|
| 37 |
+
"[default: dist]"),
|
| 38 |
+
('skip-build', None,
|
| 39 |
+
"skip rebuilding everything (for testing/debugging)"),
|
| 40 |
+
('owner=', 'u',
|
| 41 |
+
"Owner name used when creating a tar file"
|
| 42 |
+
" [default: current user]"),
|
| 43 |
+
('group=', 'g',
|
| 44 |
+
"Group name used when creating a tar file"
|
| 45 |
+
" [default: current group]"),
|
| 46 |
+
]
|
| 47 |
+
|
| 48 |
+
boolean_options = ['skip-build']
|
| 49 |
+
|
| 50 |
+
help_options = [
|
| 51 |
+
('help-formats', None,
|
| 52 |
+
"lists available distribution formats", show_formats),
|
| 53 |
+
]
|
| 54 |
+
|
| 55 |
+
# The following commands do not take a format option from bdist
|
| 56 |
+
no_format_option = ('bdist_rpm',)
|
| 57 |
+
|
| 58 |
+
# This won't do in reality: will need to distinguish RPM-ish Linux,
|
| 59 |
+
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
|
| 60 |
+
default_format = {'posix': 'gztar',
|
| 61 |
+
'nt': 'zip'}
|
| 62 |
+
|
| 63 |
+
# Establish the preferred order (for the --help-formats option).
|
| 64 |
+
format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar',
|
| 65 |
+
'zip', 'msi']
|
| 66 |
+
|
| 67 |
+
# And the real information.
|
| 68 |
+
format_command = {'rpm': ('bdist_rpm', "RPM distribution"),
|
| 69 |
+
'gztar': ('bdist_dumb', "gzip'ed tar file"),
|
| 70 |
+
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
|
| 71 |
+
'xztar': ('bdist_dumb', "xz'ed tar file"),
|
| 72 |
+
'ztar': ('bdist_dumb', "compressed tar file"),
|
| 73 |
+
'tar': ('bdist_dumb', "tar file"),
|
| 74 |
+
'zip': ('bdist_dumb', "ZIP file"),
|
| 75 |
+
'msi': ('bdist_msi', "Microsoft Installer")
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def initialize_options(self):
|
| 80 |
+
self.bdist_base = None
|
| 81 |
+
self.plat_name = None
|
| 82 |
+
self.formats = None
|
| 83 |
+
self.dist_dir = None
|
| 84 |
+
self.skip_build = 0
|
| 85 |
+
self.group = None
|
| 86 |
+
self.owner = None
|
| 87 |
+
|
| 88 |
+
def finalize_options(self):
|
| 89 |
+
# have to finalize 'plat_name' before 'bdist_base'
|
| 90 |
+
if self.plat_name is None:
|
| 91 |
+
if self.skip_build:
|
| 92 |
+
self.plat_name = get_platform()
|
| 93 |
+
else:
|
| 94 |
+
self.plat_name = self.get_finalized_command('build').plat_name
|
| 95 |
+
|
| 96 |
+
# 'bdist_base' -- parent of per-built-distribution-format
|
| 97 |
+
# temporary directories (eg. we'll probably have
|
| 98 |
+
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
|
| 99 |
+
if self.bdist_base is None:
|
| 100 |
+
build_base = self.get_finalized_command('build').build_base
|
| 101 |
+
self.bdist_base = os.path.join(build_base,
|
| 102 |
+
'bdist.' + self.plat_name)
|
| 103 |
+
|
| 104 |
+
self.ensure_string_list('formats')
|
| 105 |
+
if self.formats is None:
|
| 106 |
+
try:
|
| 107 |
+
self.formats = [self.default_format[os.name]]
|
| 108 |
+
except KeyError:
|
| 109 |
+
raise DistutilsPlatformError(
|
| 110 |
+
"don't know how to create built distributions "
|
| 111 |
+
"on platform %s" % os.name)
|
| 112 |
+
|
| 113 |
+
if self.dist_dir is None:
|
| 114 |
+
self.dist_dir = "dist"
|
| 115 |
+
|
| 116 |
+
def run(self):
|
| 117 |
+
# Figure out which sub-commands we need to run.
|
| 118 |
+
commands = []
|
| 119 |
+
for format in self.formats:
|
| 120 |
+
try:
|
| 121 |
+
commands.append(self.format_command[format][0])
|
| 122 |
+
except KeyError:
|
| 123 |
+
raise DistutilsOptionError("invalid format '%s'" % format)
|
| 124 |
+
|
| 125 |
+
# Reinitialize and run each command.
|
| 126 |
+
for i in range(len(self.formats)):
|
| 127 |
+
cmd_name = commands[i]
|
| 128 |
+
sub_cmd = self.reinitialize_command(cmd_name)
|
| 129 |
+
if cmd_name not in self.no_format_option:
|
| 130 |
+
sub_cmd.format = self.formats[i]
|
| 131 |
+
|
| 132 |
+
# passing the owner and group names for tar archiving
|
| 133 |
+
if cmd_name == 'bdist_dumb':
|
| 134 |
+
sub_cmd.owner = self.owner
|
| 135 |
+
sub_cmd.group = self.group
|
| 136 |
+
|
| 137 |
+
# If we're going to need to run this command again, tell it to
|
| 138 |
+
# keep its temporary files around so subsequent runs go faster.
|
| 139 |
+
if cmd_name in commands[i+1:]:
|
| 140 |
+
sub_cmd.keep_temp = 1
|
| 141 |
+
self.run_command(cmd_name)
|
deepseek/lib/python3.10/distutils/command/bdist_msi.py
ADDED
|
@@ -0,0 +1,747 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2005, 2006 Martin von Löwis
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
"""
|
| 4 |
+
Implements the bdist_msi command.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
import warnings
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils.dir_util import remove_tree
|
| 12 |
+
from distutils.sysconfig import get_python_version
|
| 13 |
+
from distutils.version import StrictVersion
|
| 14 |
+
from distutils.errors import DistutilsOptionError
|
| 15 |
+
from distutils.util import get_platform
|
| 16 |
+
from distutils import log
|
| 17 |
+
import msilib
|
| 18 |
+
from msilib import schema, sequence, text
|
| 19 |
+
from msilib import Directory, Feature, Dialog, add_data
|
| 20 |
+
|
| 21 |
+
class PyDialog(Dialog):
|
| 22 |
+
"""Dialog class with a fixed layout: controls at the top, then a ruler,
|
| 23 |
+
then a list of buttons: back, next, cancel. Optionally a bitmap at the
|
| 24 |
+
left."""
|
| 25 |
+
def __init__(self, *args, **kw):
|
| 26 |
+
"""Dialog(database, name, x, y, w, h, attributes, title, first,
|
| 27 |
+
default, cancel, bitmap=true)"""
|
| 28 |
+
Dialog.__init__(self, *args)
|
| 29 |
+
ruler = self.h - 36
|
| 30 |
+
bmwidth = 152*ruler/328
|
| 31 |
+
#if kw.get("bitmap", True):
|
| 32 |
+
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
|
| 33 |
+
self.line("BottomLine", 0, ruler, self.w, 0)
|
| 34 |
+
|
| 35 |
+
def title(self, title):
|
| 36 |
+
"Set the title text of the dialog at the top."
|
| 37 |
+
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
|
| 38 |
+
# text, in VerdanaBold10
|
| 39 |
+
self.text("Title", 15, 10, 320, 60, 0x30003,
|
| 40 |
+
r"{\VerdanaBold10}%s" % title)
|
| 41 |
+
|
| 42 |
+
def back(self, title, next, name = "Back", active = 1):
|
| 43 |
+
"""Add a back button with a given title, the tab-next button,
|
| 44 |
+
its name in the Control table, possibly initially disabled.
|
| 45 |
+
|
| 46 |
+
Return the button, so that events can be associated"""
|
| 47 |
+
if active:
|
| 48 |
+
flags = 3 # Visible|Enabled
|
| 49 |
+
else:
|
| 50 |
+
flags = 1 # Visible
|
| 51 |
+
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
|
| 52 |
+
|
| 53 |
+
def cancel(self, title, next, name = "Cancel", active = 1):
|
| 54 |
+
"""Add a cancel button with a given title, the tab-next button,
|
| 55 |
+
its name in the Control table, possibly initially disabled.
|
| 56 |
+
|
| 57 |
+
Return the button, so that events can be associated"""
|
| 58 |
+
if active:
|
| 59 |
+
flags = 3 # Visible|Enabled
|
| 60 |
+
else:
|
| 61 |
+
flags = 1 # Visible
|
| 62 |
+
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
|
| 63 |
+
|
| 64 |
+
def next(self, title, next, name = "Next", active = 1):
|
| 65 |
+
"""Add a Next button with a given title, the tab-next button,
|
| 66 |
+
its name in the Control table, possibly initially disabled.
|
| 67 |
+
|
| 68 |
+
Return the button, so that events can be associated"""
|
| 69 |
+
if active:
|
| 70 |
+
flags = 3 # Visible|Enabled
|
| 71 |
+
else:
|
| 72 |
+
flags = 1 # Visible
|
| 73 |
+
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
|
| 74 |
+
|
| 75 |
+
def xbutton(self, name, title, next, xpos):
|
| 76 |
+
"""Add a button with a given title, the tab-next button,
|
| 77 |
+
its name in the Control table, giving its x position; the
|
| 78 |
+
y-position is aligned with the other buttons.
|
| 79 |
+
|
| 80 |
+
Return the button, so that events can be associated"""
|
| 81 |
+
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
|
| 82 |
+
|
| 83 |
+
class bdist_msi(Command):
|
| 84 |
+
|
| 85 |
+
description = "create a Microsoft Installer (.msi) binary distribution"
|
| 86 |
+
|
| 87 |
+
user_options = [('bdist-dir=', None,
|
| 88 |
+
"temporary directory for creating the distribution"),
|
| 89 |
+
('plat-name=', 'p',
|
| 90 |
+
"platform name to embed in generated filenames "
|
| 91 |
+
"(default: %s)" % get_platform()),
|
| 92 |
+
('keep-temp', 'k',
|
| 93 |
+
"keep the pseudo-installation tree around after " +
|
| 94 |
+
"creating the distribution archive"),
|
| 95 |
+
('target-version=', None,
|
| 96 |
+
"require a specific python version" +
|
| 97 |
+
" on the target system"),
|
| 98 |
+
('no-target-compile', 'c',
|
| 99 |
+
"do not compile .py to .pyc on the target system"),
|
| 100 |
+
('no-target-optimize', 'o',
|
| 101 |
+
"do not compile .py to .pyo (optimized) "
|
| 102 |
+
"on the target system"),
|
| 103 |
+
('dist-dir=', 'd',
|
| 104 |
+
"directory to put final built distributions in"),
|
| 105 |
+
('skip-build', None,
|
| 106 |
+
"skip rebuilding everything (for testing/debugging)"),
|
| 107 |
+
('install-script=', None,
|
| 108 |
+
"basename of installation script to be run after "
|
| 109 |
+
"installation or before deinstallation"),
|
| 110 |
+
('pre-install-script=', None,
|
| 111 |
+
"Fully qualified filename of a script to be run before "
|
| 112 |
+
"any files are installed. This script need not be in the "
|
| 113 |
+
"distribution"),
|
| 114 |
+
]
|
| 115 |
+
|
| 116 |
+
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
| 117 |
+
'skip-build']
|
| 118 |
+
|
| 119 |
+
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
|
| 120 |
+
'2.5', '2.6', '2.7', '2.8', '2.9',
|
| 121 |
+
'3.0', '3.1', '3.2', '3.3', '3.4',
|
| 122 |
+
'3.5', '3.6', '3.7', '3.8', '3.9']
|
| 123 |
+
other_version = 'X'
|
| 124 |
+
|
| 125 |
+
def __init__(self, *args, **kw):
|
| 126 |
+
super().__init__(*args, **kw)
|
| 127 |
+
warnings.warn("bdist_msi command is deprecated since Python 3.9, "
|
| 128 |
+
"use bdist_wheel (wheel packages) instead",
|
| 129 |
+
DeprecationWarning, 2)
|
| 130 |
+
|
| 131 |
+
def initialize_options(self):
|
| 132 |
+
self.bdist_dir = None
|
| 133 |
+
self.plat_name = None
|
| 134 |
+
self.keep_temp = 0
|
| 135 |
+
self.no_target_compile = 0
|
| 136 |
+
self.no_target_optimize = 0
|
| 137 |
+
self.target_version = None
|
| 138 |
+
self.dist_dir = None
|
| 139 |
+
self.skip_build = None
|
| 140 |
+
self.install_script = None
|
| 141 |
+
self.pre_install_script = None
|
| 142 |
+
self.versions = None
|
| 143 |
+
|
| 144 |
+
def finalize_options(self):
|
| 145 |
+
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
|
| 146 |
+
|
| 147 |
+
if self.bdist_dir is None:
|
| 148 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
| 149 |
+
self.bdist_dir = os.path.join(bdist_base, 'msi')
|
| 150 |
+
|
| 151 |
+
short_version = get_python_version()
|
| 152 |
+
if (not self.target_version) and self.distribution.has_ext_modules():
|
| 153 |
+
self.target_version = short_version
|
| 154 |
+
|
| 155 |
+
if self.target_version:
|
| 156 |
+
self.versions = [self.target_version]
|
| 157 |
+
if not self.skip_build and self.distribution.has_ext_modules()\
|
| 158 |
+
and self.target_version != short_version:
|
| 159 |
+
raise DistutilsOptionError(
|
| 160 |
+
"target version can only be %s, or the '--skip-build'"
|
| 161 |
+
" option must be specified" % (short_version,))
|
| 162 |
+
else:
|
| 163 |
+
self.versions = list(self.all_versions)
|
| 164 |
+
|
| 165 |
+
self.set_undefined_options('bdist',
|
| 166 |
+
('dist_dir', 'dist_dir'),
|
| 167 |
+
('plat_name', 'plat_name'),
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
if self.pre_install_script:
|
| 171 |
+
raise DistutilsOptionError(
|
| 172 |
+
"the pre-install-script feature is not yet implemented")
|
| 173 |
+
|
| 174 |
+
if self.install_script:
|
| 175 |
+
for script in self.distribution.scripts:
|
| 176 |
+
if self.install_script == os.path.basename(script):
|
| 177 |
+
break
|
| 178 |
+
else:
|
| 179 |
+
raise DistutilsOptionError(
|
| 180 |
+
"install_script '%s' not found in scripts"
|
| 181 |
+
% self.install_script)
|
| 182 |
+
self.install_script_key = None
|
| 183 |
+
|
| 184 |
+
def run(self):
|
| 185 |
+
if not self.skip_build:
|
| 186 |
+
self.run_command('build')
|
| 187 |
+
|
| 188 |
+
install = self.reinitialize_command('install', reinit_subcommands=1)
|
| 189 |
+
install.prefix = self.bdist_dir
|
| 190 |
+
install.skip_build = self.skip_build
|
| 191 |
+
install.warn_dir = 0
|
| 192 |
+
|
| 193 |
+
install_lib = self.reinitialize_command('install_lib')
|
| 194 |
+
# we do not want to include pyc or pyo files
|
| 195 |
+
install_lib.compile = 0
|
| 196 |
+
install_lib.optimize = 0
|
| 197 |
+
|
| 198 |
+
if self.distribution.has_ext_modules():
|
| 199 |
+
# If we are building an installer for a Python version other
|
| 200 |
+
# than the one we are currently running, then we need to ensure
|
| 201 |
+
# our build_lib reflects the other Python version rather than ours.
|
| 202 |
+
# Note that for target_version!=sys.version, we must have skipped the
|
| 203 |
+
# build step, so there is no issue with enforcing the build of this
|
| 204 |
+
# version.
|
| 205 |
+
target_version = self.target_version
|
| 206 |
+
if not target_version:
|
| 207 |
+
assert self.skip_build, "Should have already checked this"
|
| 208 |
+
target_version = '%d.%d' % sys.version_info[:2]
|
| 209 |
+
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
|
| 210 |
+
build = self.get_finalized_command('build')
|
| 211 |
+
build.build_lib = os.path.join(build.build_base,
|
| 212 |
+
'lib' + plat_specifier)
|
| 213 |
+
|
| 214 |
+
log.info("installing to %s", self.bdist_dir)
|
| 215 |
+
install.ensure_finalized()
|
| 216 |
+
|
| 217 |
+
# avoid warning of 'install_lib' about installing
|
| 218 |
+
# into a directory not in sys.path
|
| 219 |
+
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
|
| 220 |
+
|
| 221 |
+
install.run()
|
| 222 |
+
|
| 223 |
+
del sys.path[0]
|
| 224 |
+
|
| 225 |
+
self.mkpath(self.dist_dir)
|
| 226 |
+
fullname = self.distribution.get_fullname()
|
| 227 |
+
installer_name = self.get_installer_filename(fullname)
|
| 228 |
+
installer_name = os.path.abspath(installer_name)
|
| 229 |
+
if os.path.exists(installer_name): os.unlink(installer_name)
|
| 230 |
+
|
| 231 |
+
metadata = self.distribution.metadata
|
| 232 |
+
author = metadata.author
|
| 233 |
+
if not author:
|
| 234 |
+
author = metadata.maintainer
|
| 235 |
+
if not author:
|
| 236 |
+
author = "UNKNOWN"
|
| 237 |
+
version = metadata.get_version()
|
| 238 |
+
# ProductVersion must be strictly numeric
|
| 239 |
+
# XXX need to deal with prerelease versions
|
| 240 |
+
sversion = "%d.%d.%d" % StrictVersion(version).version
|
| 241 |
+
# Prefix ProductName with Python x.y, so that
|
| 242 |
+
# it sorts together with the other Python packages
|
| 243 |
+
# in Add-Remove-Programs (APR)
|
| 244 |
+
fullname = self.distribution.get_fullname()
|
| 245 |
+
if self.target_version:
|
| 246 |
+
product_name = "Python %s %s" % (self.target_version, fullname)
|
| 247 |
+
else:
|
| 248 |
+
product_name = "Python %s" % (fullname)
|
| 249 |
+
self.db = msilib.init_database(installer_name, schema,
|
| 250 |
+
product_name, msilib.gen_uuid(),
|
| 251 |
+
sversion, author)
|
| 252 |
+
msilib.add_tables(self.db, sequence)
|
| 253 |
+
props = [('DistVersion', version)]
|
| 254 |
+
email = metadata.author_email or metadata.maintainer_email
|
| 255 |
+
if email:
|
| 256 |
+
props.append(("ARPCONTACT", email))
|
| 257 |
+
if metadata.url:
|
| 258 |
+
props.append(("ARPURLINFOABOUT", metadata.url))
|
| 259 |
+
if props:
|
| 260 |
+
add_data(self.db, 'Property', props)
|
| 261 |
+
|
| 262 |
+
self.add_find_python()
|
| 263 |
+
self.add_files()
|
| 264 |
+
self.add_scripts()
|
| 265 |
+
self.add_ui()
|
| 266 |
+
self.db.Commit()
|
| 267 |
+
|
| 268 |
+
if hasattr(self.distribution, 'dist_files'):
|
| 269 |
+
tup = 'bdist_msi', self.target_version or 'any', fullname
|
| 270 |
+
self.distribution.dist_files.append(tup)
|
| 271 |
+
|
| 272 |
+
if not self.keep_temp:
|
| 273 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
| 274 |
+
|
| 275 |
+
def add_files(self):
|
| 276 |
+
db = self.db
|
| 277 |
+
cab = msilib.CAB("distfiles")
|
| 278 |
+
rootdir = os.path.abspath(self.bdist_dir)
|
| 279 |
+
|
| 280 |
+
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
|
| 281 |
+
f = Feature(db, "Python", "Python", "Everything",
|
| 282 |
+
0, 1, directory="TARGETDIR")
|
| 283 |
+
|
| 284 |
+
items = [(f, root, '')]
|
| 285 |
+
for version in self.versions + [self.other_version]:
|
| 286 |
+
target = "TARGETDIR" + version
|
| 287 |
+
name = default = "Python" + version
|
| 288 |
+
desc = "Everything"
|
| 289 |
+
if version is self.other_version:
|
| 290 |
+
title = "Python from another location"
|
| 291 |
+
level = 2
|
| 292 |
+
else:
|
| 293 |
+
title = "Python %s from registry" % version
|
| 294 |
+
level = 1
|
| 295 |
+
f = Feature(db, name, title, desc, 1, level, directory=target)
|
| 296 |
+
dir = Directory(db, cab, root, rootdir, target, default)
|
| 297 |
+
items.append((f, dir, version))
|
| 298 |
+
db.Commit()
|
| 299 |
+
|
| 300 |
+
seen = {}
|
| 301 |
+
for feature, dir, version in items:
|
| 302 |
+
todo = [dir]
|
| 303 |
+
while todo:
|
| 304 |
+
dir = todo.pop()
|
| 305 |
+
for file in os.listdir(dir.absolute):
|
| 306 |
+
afile = os.path.join(dir.absolute, file)
|
| 307 |
+
if os.path.isdir(afile):
|
| 308 |
+
short = "%s|%s" % (dir.make_short(file), file)
|
| 309 |
+
default = file + version
|
| 310 |
+
newdir = Directory(db, cab, dir, file, default, short)
|
| 311 |
+
todo.append(newdir)
|
| 312 |
+
else:
|
| 313 |
+
if not dir.component:
|
| 314 |
+
dir.start_component(dir.logical, feature, 0)
|
| 315 |
+
if afile not in seen:
|
| 316 |
+
key = seen[afile] = dir.add_file(file)
|
| 317 |
+
if file==self.install_script:
|
| 318 |
+
if self.install_script_key:
|
| 319 |
+
raise DistutilsOptionError(
|
| 320 |
+
"Multiple files with name %s" % file)
|
| 321 |
+
self.install_script_key = '[#%s]' % key
|
| 322 |
+
else:
|
| 323 |
+
key = seen[afile]
|
| 324 |
+
add_data(self.db, "DuplicateFile",
|
| 325 |
+
[(key + version, dir.component, key, None, dir.logical)])
|
| 326 |
+
db.Commit()
|
| 327 |
+
cab.commit(db)
|
| 328 |
+
|
| 329 |
+
def add_find_python(self):
|
| 330 |
+
"""Adds code to the installer to compute the location of Python.
|
| 331 |
+
|
| 332 |
+
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
|
| 333 |
+
registry for each version of Python.
|
| 334 |
+
|
| 335 |
+
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
|
| 336 |
+
else from PYTHON.MACHINE.X.Y.
|
| 337 |
+
|
| 338 |
+
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
|
| 339 |
+
|
| 340 |
+
start = 402
|
| 341 |
+
for ver in self.versions:
|
| 342 |
+
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
|
| 343 |
+
machine_reg = "python.machine." + ver
|
| 344 |
+
user_reg = "python.user." + ver
|
| 345 |
+
machine_prop = "PYTHON.MACHINE." + ver
|
| 346 |
+
user_prop = "PYTHON.USER." + ver
|
| 347 |
+
machine_action = "PythonFromMachine" + ver
|
| 348 |
+
user_action = "PythonFromUser" + ver
|
| 349 |
+
exe_action = "PythonExe" + ver
|
| 350 |
+
target_dir_prop = "TARGETDIR" + ver
|
| 351 |
+
exe_prop = "PYTHON" + ver
|
| 352 |
+
if msilib.Win64:
|
| 353 |
+
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
|
| 354 |
+
Type = 2+16
|
| 355 |
+
else:
|
| 356 |
+
Type = 2
|
| 357 |
+
add_data(self.db, "RegLocator",
|
| 358 |
+
[(machine_reg, 2, install_path, None, Type),
|
| 359 |
+
(user_reg, 1, install_path, None, Type)])
|
| 360 |
+
add_data(self.db, "AppSearch",
|
| 361 |
+
[(machine_prop, machine_reg),
|
| 362 |
+
(user_prop, user_reg)])
|
| 363 |
+
add_data(self.db, "CustomAction",
|
| 364 |
+
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
|
| 365 |
+
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
|
| 366 |
+
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
|
| 367 |
+
])
|
| 368 |
+
add_data(self.db, "InstallExecuteSequence",
|
| 369 |
+
[(machine_action, machine_prop, start),
|
| 370 |
+
(user_action, user_prop, start + 1),
|
| 371 |
+
(exe_action, None, start + 2),
|
| 372 |
+
])
|
| 373 |
+
add_data(self.db, "InstallUISequence",
|
| 374 |
+
[(machine_action, machine_prop, start),
|
| 375 |
+
(user_action, user_prop, start + 1),
|
| 376 |
+
(exe_action, None, start + 2),
|
| 377 |
+
])
|
| 378 |
+
add_data(self.db, "Condition",
|
| 379 |
+
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
|
| 380 |
+
start += 4
|
| 381 |
+
assert start < 500
|
| 382 |
+
|
| 383 |
+
def add_scripts(self):
|
| 384 |
+
if self.install_script:
|
| 385 |
+
start = 6800
|
| 386 |
+
for ver in self.versions + [self.other_version]:
|
| 387 |
+
install_action = "install_script." + ver
|
| 388 |
+
exe_prop = "PYTHON" + ver
|
| 389 |
+
add_data(self.db, "CustomAction",
|
| 390 |
+
[(install_action, 50, exe_prop, self.install_script_key)])
|
| 391 |
+
add_data(self.db, "InstallExecuteSequence",
|
| 392 |
+
[(install_action, "&Python%s=3" % ver, start)])
|
| 393 |
+
start += 1
|
| 394 |
+
# XXX pre-install scripts are currently refused in finalize_options()
|
| 395 |
+
# but if this feature is completed, it will also need to add
|
| 396 |
+
# entries for each version as the above code does
|
| 397 |
+
if self.pre_install_script:
|
| 398 |
+
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
|
| 399 |
+
with open(scriptfn, "w") as f:
|
| 400 |
+
# The batch file will be executed with [PYTHON], so that %1
|
| 401 |
+
# is the path to the Python interpreter; %0 will be the path
|
| 402 |
+
# of the batch file.
|
| 403 |
+
# rem ="""
|
| 404 |
+
# %1 %0
|
| 405 |
+
# exit
|
| 406 |
+
# """
|
| 407 |
+
# <actual script>
|
| 408 |
+
f.write('rem ="""\n%1 %0\nexit\n"""\n')
|
| 409 |
+
with open(self.pre_install_script) as fin:
|
| 410 |
+
f.write(fin.read())
|
| 411 |
+
add_data(self.db, "Binary",
|
| 412 |
+
[("PreInstall", msilib.Binary(scriptfn))
|
| 413 |
+
])
|
| 414 |
+
add_data(self.db, "CustomAction",
|
| 415 |
+
[("PreInstall", 2, "PreInstall", None)
|
| 416 |
+
])
|
| 417 |
+
add_data(self.db, "InstallExecuteSequence",
|
| 418 |
+
[("PreInstall", "NOT Installed", 450)])
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def add_ui(self):
|
| 422 |
+
db = self.db
|
| 423 |
+
x = y = 50
|
| 424 |
+
w = 370
|
| 425 |
+
h = 300
|
| 426 |
+
title = "[ProductName] Setup"
|
| 427 |
+
|
| 428 |
+
# see "Dialog Style Bits"
|
| 429 |
+
modal = 3 # visible | modal
|
| 430 |
+
modeless = 1 # visible
|
| 431 |
+
track_disk_space = 32
|
| 432 |
+
|
| 433 |
+
# UI customization properties
|
| 434 |
+
add_data(db, "Property",
|
| 435 |
+
# See "DefaultUIFont Property"
|
| 436 |
+
[("DefaultUIFont", "DlgFont8"),
|
| 437 |
+
# See "ErrorDialog Style Bit"
|
| 438 |
+
("ErrorDialog", "ErrorDlg"),
|
| 439 |
+
("Progress1", "Install"), # modified in maintenance type dlg
|
| 440 |
+
("Progress2", "installs"),
|
| 441 |
+
("MaintenanceForm_Action", "Repair"),
|
| 442 |
+
# possible values: ALL, JUSTME
|
| 443 |
+
("WhichUsers", "ALL")
|
| 444 |
+
])
|
| 445 |
+
|
| 446 |
+
# Fonts, see "TextStyle Table"
|
| 447 |
+
add_data(db, "TextStyle",
|
| 448 |
+
[("DlgFont8", "Tahoma", 9, None, 0),
|
| 449 |
+
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
|
| 450 |
+
("VerdanaBold10", "Verdana", 10, None, 1),
|
| 451 |
+
("VerdanaRed9", "Verdana", 9, 255, 0),
|
| 452 |
+
])
|
| 453 |
+
|
| 454 |
+
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
|
| 455 |
+
# Numbers indicate sequence; see sequence.py for how these action integrate
|
| 456 |
+
add_data(db, "InstallUISequence",
|
| 457 |
+
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
|
| 458 |
+
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
|
| 459 |
+
# In the user interface, assume all-users installation if privileged.
|
| 460 |
+
("SelectFeaturesDlg", "Not Installed", 1230),
|
| 461 |
+
# XXX no support for resume installations yet
|
| 462 |
+
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
|
| 463 |
+
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
|
| 464 |
+
("ProgressDlg", None, 1280)])
|
| 465 |
+
|
| 466 |
+
add_data(db, 'ActionText', text.ActionText)
|
| 467 |
+
add_data(db, 'UIText', text.UIText)
|
| 468 |
+
#####################################################################
|
| 469 |
+
# Standard dialogs: FatalError, UserExit, ExitDialog
|
| 470 |
+
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
|
| 471 |
+
"Finish", "Finish", "Finish")
|
| 472 |
+
fatal.title("[ProductName] Installer ended prematurely")
|
| 473 |
+
fatal.back("< Back", "Finish", active = 0)
|
| 474 |
+
fatal.cancel("Cancel", "Back", active = 0)
|
| 475 |
+
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
|
| 476 |
+
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
|
| 477 |
+
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
|
| 478 |
+
"Click the Finish button to exit the Installer.")
|
| 479 |
+
c=fatal.next("Finish", "Cancel", name="Finish")
|
| 480 |
+
c.event("EndDialog", "Exit")
|
| 481 |
+
|
| 482 |
+
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
|
| 483 |
+
"Finish", "Finish", "Finish")
|
| 484 |
+
user_exit.title("[ProductName] Installer was interrupted")
|
| 485 |
+
user_exit.back("< Back", "Finish", active = 0)
|
| 486 |
+
user_exit.cancel("Cancel", "Back", active = 0)
|
| 487 |
+
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
|
| 488 |
+
"[ProductName] setup was interrupted. Your system has not been modified. "
|
| 489 |
+
"To install this program at a later time, please run the installation again.")
|
| 490 |
+
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
|
| 491 |
+
"Click the Finish button to exit the Installer.")
|
| 492 |
+
c = user_exit.next("Finish", "Cancel", name="Finish")
|
| 493 |
+
c.event("EndDialog", "Exit")
|
| 494 |
+
|
| 495 |
+
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
|
| 496 |
+
"Finish", "Finish", "Finish")
|
| 497 |
+
exit_dialog.title("Completing the [ProductName] Installer")
|
| 498 |
+
exit_dialog.back("< Back", "Finish", active = 0)
|
| 499 |
+
exit_dialog.cancel("Cancel", "Back", active = 0)
|
| 500 |
+
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
|
| 501 |
+
"Click the Finish button to exit the Installer.")
|
| 502 |
+
c = exit_dialog.next("Finish", "Cancel", name="Finish")
|
| 503 |
+
c.event("EndDialog", "Return")
|
| 504 |
+
|
| 505 |
+
#####################################################################
|
| 506 |
+
# Required dialog: FilesInUse, ErrorDlg
|
| 507 |
+
inuse = PyDialog(db, "FilesInUse",
|
| 508 |
+
x, y, w, h,
|
| 509 |
+
19, # KeepModeless|Modal|Visible
|
| 510 |
+
title,
|
| 511 |
+
"Retry", "Retry", "Retry", bitmap=False)
|
| 512 |
+
inuse.text("Title", 15, 6, 200, 15, 0x30003,
|
| 513 |
+
r"{\DlgFontBold8}Files in Use")
|
| 514 |
+
inuse.text("Description", 20, 23, 280, 20, 0x30003,
|
| 515 |
+
"Some files that need to be updated are currently in use.")
|
| 516 |
+
inuse.text("Text", 20, 55, 330, 50, 3,
|
| 517 |
+
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
|
| 518 |
+
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
|
| 519 |
+
None, None, None)
|
| 520 |
+
c=inuse.back("Exit", "Ignore", name="Exit")
|
| 521 |
+
c.event("EndDialog", "Exit")
|
| 522 |
+
c=inuse.next("Ignore", "Retry", name="Ignore")
|
| 523 |
+
c.event("EndDialog", "Ignore")
|
| 524 |
+
c=inuse.cancel("Retry", "Exit", name="Retry")
|
| 525 |
+
c.event("EndDialog","Retry")
|
| 526 |
+
|
| 527 |
+
# See "Error Dialog". See "ICE20" for the required names of the controls.
|
| 528 |
+
error = Dialog(db, "ErrorDlg",
|
| 529 |
+
50, 10, 330, 101,
|
| 530 |
+
65543, # Error|Minimize|Modal|Visible
|
| 531 |
+
title,
|
| 532 |
+
"ErrorText", None, None)
|
| 533 |
+
error.text("ErrorText", 50,9,280,48,3, "")
|
| 534 |
+
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
|
| 535 |
+
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
|
| 536 |
+
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
|
| 537 |
+
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
|
| 538 |
+
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
|
| 539 |
+
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
|
| 540 |
+
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
|
| 541 |
+
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
|
| 542 |
+
|
| 543 |
+
#####################################################################
|
| 544 |
+
# Global "Query Cancel" dialog
|
| 545 |
+
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
|
| 546 |
+
"No", "No", "No")
|
| 547 |
+
cancel.text("Text", 48, 15, 194, 30, 3,
|
| 548 |
+
"Are you sure you want to cancel [ProductName] installation?")
|
| 549 |
+
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
|
| 550 |
+
# "py.ico", None, None)
|
| 551 |
+
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
|
| 552 |
+
c.event("EndDialog", "Exit")
|
| 553 |
+
|
| 554 |
+
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
|
| 555 |
+
c.event("EndDialog", "Return")
|
| 556 |
+
|
| 557 |
+
#####################################################################
|
| 558 |
+
# Global "Wait for costing" dialog
|
| 559 |
+
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
|
| 560 |
+
"Return", "Return", "Return")
|
| 561 |
+
costing.text("Text", 48, 15, 194, 30, 3,
|
| 562 |
+
"Please wait while the installer finishes determining your disk space requirements.")
|
| 563 |
+
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
|
| 564 |
+
c.event("EndDialog", "Exit")
|
| 565 |
+
|
| 566 |
+
#####################################################################
|
| 567 |
+
# Preparation dialog: no user input except cancellation
|
| 568 |
+
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
|
| 569 |
+
"Cancel", "Cancel", "Cancel")
|
| 570 |
+
prep.text("Description", 15, 70, 320, 40, 0x30003,
|
| 571 |
+
"Please wait while the Installer prepares to guide you through the installation.")
|
| 572 |
+
prep.title("Welcome to the [ProductName] Installer")
|
| 573 |
+
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
|
| 574 |
+
c.mapping("ActionText", "Text")
|
| 575 |
+
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
|
| 576 |
+
c.mapping("ActionData", "Text")
|
| 577 |
+
prep.back("Back", None, active=0)
|
| 578 |
+
prep.next("Next", None, active=0)
|
| 579 |
+
c=prep.cancel("Cancel", None)
|
| 580 |
+
c.event("SpawnDialog", "CancelDlg")
|
| 581 |
+
|
| 582 |
+
#####################################################################
|
| 583 |
+
# Feature (Python directory) selection
|
| 584 |
+
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
|
| 585 |
+
"Next", "Next", "Cancel")
|
| 586 |
+
seldlg.title("Select Python Installations")
|
| 587 |
+
|
| 588 |
+
seldlg.text("Hint", 15, 30, 300, 20, 3,
|
| 589 |
+
"Select the Python locations where %s should be installed."
|
| 590 |
+
% self.distribution.get_fullname())
|
| 591 |
+
|
| 592 |
+
seldlg.back("< Back", None, active=0)
|
| 593 |
+
c = seldlg.next("Next >", "Cancel")
|
| 594 |
+
order = 1
|
| 595 |
+
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
|
| 596 |
+
for version in self.versions + [self.other_version]:
|
| 597 |
+
order += 1
|
| 598 |
+
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
|
| 599 |
+
"FEATURE_SELECTED AND &Python%s=3" % version,
|
| 600 |
+
ordering=order)
|
| 601 |
+
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
|
| 602 |
+
c.event("EndDialog", "Return", ordering=order + 2)
|
| 603 |
+
c = seldlg.cancel("Cancel", "Features")
|
| 604 |
+
c.event("SpawnDialog", "CancelDlg")
|
| 605 |
+
|
| 606 |
+
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
|
| 607 |
+
"FEATURE", None, "PathEdit", None)
|
| 608 |
+
c.event("[FEATURE_SELECTED]", "1")
|
| 609 |
+
ver = self.other_version
|
| 610 |
+
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
|
| 611 |
+
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
|
| 612 |
+
|
| 613 |
+
c = seldlg.text("Other", 15, 200, 300, 15, 3,
|
| 614 |
+
"Provide an alternate Python location")
|
| 615 |
+
c.condition("Enable", install_other_cond)
|
| 616 |
+
c.condition("Show", install_other_cond)
|
| 617 |
+
c.condition("Disable", dont_install_other_cond)
|
| 618 |
+
c.condition("Hide", dont_install_other_cond)
|
| 619 |
+
|
| 620 |
+
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
|
| 621 |
+
"TARGETDIR" + ver, None, "Next", None)
|
| 622 |
+
c.condition("Enable", install_other_cond)
|
| 623 |
+
c.condition("Show", install_other_cond)
|
| 624 |
+
c.condition("Disable", dont_install_other_cond)
|
| 625 |
+
c.condition("Hide", dont_install_other_cond)
|
| 626 |
+
|
| 627 |
+
#####################################################################
|
| 628 |
+
# Disk cost
|
| 629 |
+
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
|
| 630 |
+
"OK", "OK", "OK", bitmap=False)
|
| 631 |
+
cost.text("Title", 15, 6, 200, 15, 0x30003,
|
| 632 |
+
r"{\DlgFontBold8}Disk Space Requirements")
|
| 633 |
+
cost.text("Description", 20, 20, 280, 20, 0x30003,
|
| 634 |
+
"The disk space required for the installation of the selected features.")
|
| 635 |
+
cost.text("Text", 20, 53, 330, 60, 3,
|
| 636 |
+
"The highlighted volumes (if any) do not have enough disk space "
|
| 637 |
+
"available for the currently selected features. You can either "
|
| 638 |
+
"remove some files from the highlighted volumes, or choose to "
|
| 639 |
+
"install less features onto local drive(s), or select different "
|
| 640 |
+
"destination drive(s).")
|
| 641 |
+
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
|
| 642 |
+
None, "{120}{70}{70}{70}{70}", None, None)
|
| 643 |
+
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
|
| 644 |
+
|
| 645 |
+
#####################################################################
|
| 646 |
+
# WhichUsers Dialog. Only available on NT, and for privileged users.
|
| 647 |
+
# This must be run before FindRelatedProducts, because that will
|
| 648 |
+
# take into account whether the previous installation was per-user
|
| 649 |
+
# or per-machine. We currently don't support going back to this
|
| 650 |
+
# dialog after "Next" was selected; to support this, we would need to
|
| 651 |
+
# find how to reset the ALLUSERS property, and how to re-run
|
| 652 |
+
# FindRelatedProducts.
|
| 653 |
+
# On Windows9x, the ALLUSERS property is ignored on the command line
|
| 654 |
+
# and in the Property table, but installer fails according to the documentation
|
| 655 |
+
# if a dialog attempts to set ALLUSERS.
|
| 656 |
+
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
|
| 657 |
+
"AdminInstall", "Next", "Cancel")
|
| 658 |
+
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
|
| 659 |
+
# A radio group with two options: allusers, justme
|
| 660 |
+
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
|
| 661 |
+
"WhichUsers", "", "Next")
|
| 662 |
+
g.add("ALL", 0, 5, 150, 20, "Install for all users")
|
| 663 |
+
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
|
| 664 |
+
|
| 665 |
+
whichusers.back("Back", None, active=0)
|
| 666 |
+
|
| 667 |
+
c = whichusers.next("Next >", "Cancel")
|
| 668 |
+
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
|
| 669 |
+
c.event("EndDialog", "Return", ordering = 2)
|
| 670 |
+
|
| 671 |
+
c = whichusers.cancel("Cancel", "AdminInstall")
|
| 672 |
+
c.event("SpawnDialog", "CancelDlg")
|
| 673 |
+
|
| 674 |
+
#####################################################################
|
| 675 |
+
# Installation Progress dialog (modeless)
|
| 676 |
+
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
|
| 677 |
+
"Cancel", "Cancel", "Cancel", bitmap=False)
|
| 678 |
+
progress.text("Title", 20, 15, 200, 15, 0x30003,
|
| 679 |
+
r"{\DlgFontBold8}[Progress1] [ProductName]")
|
| 680 |
+
progress.text("Text", 35, 65, 300, 30, 3,
|
| 681 |
+
"Please wait while the Installer [Progress2] [ProductName]. "
|
| 682 |
+
"This may take several minutes.")
|
| 683 |
+
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
|
| 684 |
+
|
| 685 |
+
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
|
| 686 |
+
c.mapping("ActionText", "Text")
|
| 687 |
+
|
| 688 |
+
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
|
| 689 |
+
#c.mapping("ActionData", "Text")
|
| 690 |
+
|
| 691 |
+
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
|
| 692 |
+
None, "Progress done", None, None)
|
| 693 |
+
c.mapping("SetProgress", "Progress")
|
| 694 |
+
|
| 695 |
+
progress.back("< Back", "Next", active=False)
|
| 696 |
+
progress.next("Next >", "Cancel", active=False)
|
| 697 |
+
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
|
| 698 |
+
|
| 699 |
+
###################################################################
|
| 700 |
+
# Maintenance type: repair/uninstall
|
| 701 |
+
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
|
| 702 |
+
"Next", "Next", "Cancel")
|
| 703 |
+
maint.title("Welcome to the [ProductName] Setup Wizard")
|
| 704 |
+
maint.text("BodyText", 15, 63, 330, 42, 3,
|
| 705 |
+
"Select whether you want to repair or remove [ProductName].")
|
| 706 |
+
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
|
| 707 |
+
"MaintenanceForm_Action", "", "Next")
|
| 708 |
+
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
|
| 709 |
+
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
|
| 710 |
+
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
|
| 711 |
+
|
| 712 |
+
maint.back("< Back", None, active=False)
|
| 713 |
+
c=maint.next("Finish", "Cancel")
|
| 714 |
+
# Change installation: Change progress dialog to "Change", then ask
|
| 715 |
+
# for feature selection
|
| 716 |
+
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
|
| 717 |
+
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
|
| 718 |
+
|
| 719 |
+
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
|
| 720 |
+
# Also set list of reinstalled features to "ALL"
|
| 721 |
+
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
|
| 722 |
+
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
|
| 723 |
+
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
|
| 724 |
+
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
|
| 725 |
+
|
| 726 |
+
# Uninstall: Change progress to "Remove", then invoke uninstall
|
| 727 |
+
# Also set list of removed features to "ALL"
|
| 728 |
+
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
|
| 729 |
+
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
|
| 730 |
+
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
|
| 731 |
+
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
|
| 732 |
+
|
| 733 |
+
# Close dialog when maintenance action scheduled
|
| 734 |
+
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
|
| 735 |
+
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
|
| 736 |
+
|
| 737 |
+
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
|
| 738 |
+
|
| 739 |
+
def get_installer_filename(self, fullname):
|
| 740 |
+
# Factored out to allow overriding in subclasses
|
| 741 |
+
if self.target_version:
|
| 742 |
+
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
|
| 743 |
+
self.target_version)
|
| 744 |
+
else:
|
| 745 |
+
base_name = "%s.%s.msi" % (fullname, self.plat_name)
|
| 746 |
+
installer_name = os.path.join(self.dist_dir, base_name)
|
| 747 |
+
return installer_name
|
deepseek/lib/python3.10/distutils/command/bdist_rpm.py
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist_rpm
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
|
| 4 |
+
distributions)."""
|
| 5 |
+
|
| 6 |
+
import subprocess, sys, os
|
| 7 |
+
from distutils.core import Command
|
| 8 |
+
from distutils.debug import DEBUG
|
| 9 |
+
from distutils.file_util import write_file
|
| 10 |
+
from distutils.errors import *
|
| 11 |
+
from distutils.sysconfig import get_python_version
|
| 12 |
+
from distutils import log
|
| 13 |
+
|
| 14 |
+
class bdist_rpm(Command):
|
| 15 |
+
|
| 16 |
+
description = "create an RPM distribution"
|
| 17 |
+
|
| 18 |
+
user_options = [
|
| 19 |
+
('bdist-base=', None,
|
| 20 |
+
"base directory for creating built distributions"),
|
| 21 |
+
('rpm-base=', None,
|
| 22 |
+
"base directory for creating RPMs (defaults to \"rpm\" under "
|
| 23 |
+
"--bdist-base; must be specified for RPM 2)"),
|
| 24 |
+
('dist-dir=', 'd',
|
| 25 |
+
"directory to put final RPM files in "
|
| 26 |
+
"(and .spec files if --spec-only)"),
|
| 27 |
+
('python=', None,
|
| 28 |
+
"path to Python interpreter to hard-code in the .spec file "
|
| 29 |
+
"(default: \"python\")"),
|
| 30 |
+
('fix-python', None,
|
| 31 |
+
"hard-code the exact path to the current Python interpreter in "
|
| 32 |
+
"the .spec file"),
|
| 33 |
+
('spec-only', None,
|
| 34 |
+
"only regenerate spec file"),
|
| 35 |
+
('source-only', None,
|
| 36 |
+
"only generate source RPM"),
|
| 37 |
+
('binary-only', None,
|
| 38 |
+
"only generate binary RPM"),
|
| 39 |
+
('use-bzip2', None,
|
| 40 |
+
"use bzip2 instead of gzip to create source distribution"),
|
| 41 |
+
|
| 42 |
+
# More meta-data: too RPM-specific to put in the setup script,
|
| 43 |
+
# but needs to go in the .spec file -- so we make these options
|
| 44 |
+
# to "bdist_rpm". The idea is that packagers would put this
|
| 45 |
+
# info in setup.cfg, although they are of course free to
|
| 46 |
+
# supply it on the command line.
|
| 47 |
+
('distribution-name=', None,
|
| 48 |
+
"name of the (Linux) distribution to which this "
|
| 49 |
+
"RPM applies (*not* the name of the module distribution!)"),
|
| 50 |
+
('group=', None,
|
| 51 |
+
"package classification [default: \"Development/Libraries\"]"),
|
| 52 |
+
('release=', None,
|
| 53 |
+
"RPM release number"),
|
| 54 |
+
('serial=', None,
|
| 55 |
+
"RPM serial number"),
|
| 56 |
+
('vendor=', None,
|
| 57 |
+
"RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
|
| 58 |
+
"[default: maintainer or author from setup script]"),
|
| 59 |
+
('packager=', None,
|
| 60 |
+
"RPM packager (eg. \"Jane Doe <jane@example.net>\") "
|
| 61 |
+
"[default: vendor]"),
|
| 62 |
+
('doc-files=', None,
|
| 63 |
+
"list of documentation files (space or comma-separated)"),
|
| 64 |
+
('changelog=', None,
|
| 65 |
+
"RPM changelog"),
|
| 66 |
+
('icon=', None,
|
| 67 |
+
"name of icon file"),
|
| 68 |
+
('provides=', None,
|
| 69 |
+
"capabilities provided by this package"),
|
| 70 |
+
('requires=', None,
|
| 71 |
+
"capabilities required by this package"),
|
| 72 |
+
('conflicts=', None,
|
| 73 |
+
"capabilities which conflict with this package"),
|
| 74 |
+
('build-requires=', None,
|
| 75 |
+
"capabilities required to build this package"),
|
| 76 |
+
('obsoletes=', None,
|
| 77 |
+
"capabilities made obsolete by this package"),
|
| 78 |
+
('no-autoreq', None,
|
| 79 |
+
"do not automatically calculate dependencies"),
|
| 80 |
+
|
| 81 |
+
# Actions to take when building RPM
|
| 82 |
+
('keep-temp', 'k',
|
| 83 |
+
"don't clean up RPM build directory"),
|
| 84 |
+
('no-keep-temp', None,
|
| 85 |
+
"clean up RPM build directory [default]"),
|
| 86 |
+
('use-rpm-opt-flags', None,
|
| 87 |
+
"compile with RPM_OPT_FLAGS when building from source RPM"),
|
| 88 |
+
('no-rpm-opt-flags', None,
|
| 89 |
+
"do not pass any RPM CFLAGS to compiler"),
|
| 90 |
+
('rpm3-mode', None,
|
| 91 |
+
"RPM 3 compatibility mode (default)"),
|
| 92 |
+
('rpm2-mode', None,
|
| 93 |
+
"RPM 2 compatibility mode"),
|
| 94 |
+
|
| 95 |
+
# Add the hooks necessary for specifying custom scripts
|
| 96 |
+
('prep-script=', None,
|
| 97 |
+
"Specify a script for the PREP phase of RPM building"),
|
| 98 |
+
('build-script=', None,
|
| 99 |
+
"Specify a script for the BUILD phase of RPM building"),
|
| 100 |
+
|
| 101 |
+
('pre-install=', None,
|
| 102 |
+
"Specify a script for the pre-INSTALL phase of RPM building"),
|
| 103 |
+
('install-script=', None,
|
| 104 |
+
"Specify a script for the INSTALL phase of RPM building"),
|
| 105 |
+
('post-install=', None,
|
| 106 |
+
"Specify a script for the post-INSTALL phase of RPM building"),
|
| 107 |
+
|
| 108 |
+
('pre-uninstall=', None,
|
| 109 |
+
"Specify a script for the pre-UNINSTALL phase of RPM building"),
|
| 110 |
+
('post-uninstall=', None,
|
| 111 |
+
"Specify a script for the post-UNINSTALL phase of RPM building"),
|
| 112 |
+
|
| 113 |
+
('clean-script=', None,
|
| 114 |
+
"Specify a script for the CLEAN phase of RPM building"),
|
| 115 |
+
|
| 116 |
+
('verify-script=', None,
|
| 117 |
+
"Specify a script for the VERIFY phase of the RPM build"),
|
| 118 |
+
|
| 119 |
+
# Allow a packager to explicitly force an architecture
|
| 120 |
+
('force-arch=', None,
|
| 121 |
+
"Force an architecture onto the RPM build process"),
|
| 122 |
+
|
| 123 |
+
('quiet', 'q',
|
| 124 |
+
"Run the INSTALL phase of RPM building in quiet mode"),
|
| 125 |
+
]
|
| 126 |
+
|
| 127 |
+
boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
|
| 128 |
+
'no-autoreq', 'quiet']
|
| 129 |
+
|
| 130 |
+
negative_opt = {'no-keep-temp': 'keep-temp',
|
| 131 |
+
'no-rpm-opt-flags': 'use-rpm-opt-flags',
|
| 132 |
+
'rpm2-mode': 'rpm3-mode'}
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def initialize_options(self):
|
| 136 |
+
self.bdist_base = None
|
| 137 |
+
self.rpm_base = None
|
| 138 |
+
self.dist_dir = None
|
| 139 |
+
self.python = None
|
| 140 |
+
self.fix_python = None
|
| 141 |
+
self.spec_only = None
|
| 142 |
+
self.binary_only = None
|
| 143 |
+
self.source_only = None
|
| 144 |
+
self.use_bzip2 = None
|
| 145 |
+
|
| 146 |
+
self.distribution_name = None
|
| 147 |
+
self.group = None
|
| 148 |
+
self.release = None
|
| 149 |
+
self.serial = None
|
| 150 |
+
self.vendor = None
|
| 151 |
+
self.packager = None
|
| 152 |
+
self.doc_files = None
|
| 153 |
+
self.changelog = None
|
| 154 |
+
self.icon = None
|
| 155 |
+
|
| 156 |
+
self.prep_script = None
|
| 157 |
+
self.build_script = None
|
| 158 |
+
self.install_script = None
|
| 159 |
+
self.clean_script = None
|
| 160 |
+
self.verify_script = None
|
| 161 |
+
self.pre_install = None
|
| 162 |
+
self.post_install = None
|
| 163 |
+
self.pre_uninstall = None
|
| 164 |
+
self.post_uninstall = None
|
| 165 |
+
self.prep = None
|
| 166 |
+
self.provides = None
|
| 167 |
+
self.requires = None
|
| 168 |
+
self.conflicts = None
|
| 169 |
+
self.build_requires = None
|
| 170 |
+
self.obsoletes = None
|
| 171 |
+
|
| 172 |
+
self.keep_temp = 0
|
| 173 |
+
self.use_rpm_opt_flags = 1
|
| 174 |
+
self.rpm3_mode = 1
|
| 175 |
+
self.no_autoreq = 0
|
| 176 |
+
|
| 177 |
+
self.force_arch = None
|
| 178 |
+
self.quiet = 0
|
| 179 |
+
|
| 180 |
+
def finalize_options(self):
|
| 181 |
+
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
| 182 |
+
if self.rpm_base is None:
|
| 183 |
+
if not self.rpm3_mode:
|
| 184 |
+
raise DistutilsOptionError(
|
| 185 |
+
"you must specify --rpm-base in RPM 2 mode")
|
| 186 |
+
self.rpm_base = os.path.join(self.bdist_base, "rpm")
|
| 187 |
+
|
| 188 |
+
if self.python is None:
|
| 189 |
+
if self.fix_python:
|
| 190 |
+
self.python = sys.executable
|
| 191 |
+
else:
|
| 192 |
+
self.python = "python3"
|
| 193 |
+
elif self.fix_python:
|
| 194 |
+
raise DistutilsOptionError(
|
| 195 |
+
"--python and --fix-python are mutually exclusive options")
|
| 196 |
+
|
| 197 |
+
if os.name != 'posix':
|
| 198 |
+
raise DistutilsPlatformError("don't know how to create RPM "
|
| 199 |
+
"distributions on platform %s" % os.name)
|
| 200 |
+
if self.binary_only and self.source_only:
|
| 201 |
+
raise DistutilsOptionError(
|
| 202 |
+
"cannot supply both '--source-only' and '--binary-only'")
|
| 203 |
+
|
| 204 |
+
# don't pass CFLAGS to pure python distributions
|
| 205 |
+
if not self.distribution.has_ext_modules():
|
| 206 |
+
self.use_rpm_opt_flags = 0
|
| 207 |
+
|
| 208 |
+
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
| 209 |
+
self.finalize_package_data()
|
| 210 |
+
|
| 211 |
+
def finalize_package_data(self):
|
| 212 |
+
self.ensure_string('group', "Development/Libraries")
|
| 213 |
+
self.ensure_string('vendor',
|
| 214 |
+
"%s <%s>" % (self.distribution.get_contact(),
|
| 215 |
+
self.distribution.get_contact_email()))
|
| 216 |
+
self.ensure_string('packager')
|
| 217 |
+
self.ensure_string_list('doc_files')
|
| 218 |
+
if isinstance(self.doc_files, list):
|
| 219 |
+
for readme in ('README', 'README.txt'):
|
| 220 |
+
if os.path.exists(readme) and readme not in self.doc_files:
|
| 221 |
+
self.doc_files.append(readme)
|
| 222 |
+
|
| 223 |
+
self.ensure_string('release', "1")
|
| 224 |
+
self.ensure_string('serial') # should it be an int?
|
| 225 |
+
|
| 226 |
+
self.ensure_string('distribution_name')
|
| 227 |
+
|
| 228 |
+
self.ensure_string('changelog')
|
| 229 |
+
# Format changelog correctly
|
| 230 |
+
self.changelog = self._format_changelog(self.changelog)
|
| 231 |
+
|
| 232 |
+
self.ensure_filename('icon')
|
| 233 |
+
|
| 234 |
+
self.ensure_filename('prep_script')
|
| 235 |
+
self.ensure_filename('build_script')
|
| 236 |
+
self.ensure_filename('install_script')
|
| 237 |
+
self.ensure_filename('clean_script')
|
| 238 |
+
self.ensure_filename('verify_script')
|
| 239 |
+
self.ensure_filename('pre_install')
|
| 240 |
+
self.ensure_filename('post_install')
|
| 241 |
+
self.ensure_filename('pre_uninstall')
|
| 242 |
+
self.ensure_filename('post_uninstall')
|
| 243 |
+
|
| 244 |
+
# XXX don't forget we punted on summaries and descriptions -- they
|
| 245 |
+
# should be handled here eventually!
|
| 246 |
+
|
| 247 |
+
# Now *this* is some meta-data that belongs in the setup script...
|
| 248 |
+
self.ensure_string_list('provides')
|
| 249 |
+
self.ensure_string_list('requires')
|
| 250 |
+
self.ensure_string_list('conflicts')
|
| 251 |
+
self.ensure_string_list('build_requires')
|
| 252 |
+
self.ensure_string_list('obsoletes')
|
| 253 |
+
|
| 254 |
+
self.ensure_string('force_arch')
|
| 255 |
+
|
| 256 |
+
def run(self):
|
| 257 |
+
if DEBUG:
|
| 258 |
+
print("before _get_package_data():")
|
| 259 |
+
print("vendor =", self.vendor)
|
| 260 |
+
print("packager =", self.packager)
|
| 261 |
+
print("doc_files =", self.doc_files)
|
| 262 |
+
print("changelog =", self.changelog)
|
| 263 |
+
|
| 264 |
+
# make directories
|
| 265 |
+
if self.spec_only:
|
| 266 |
+
spec_dir = self.dist_dir
|
| 267 |
+
self.mkpath(spec_dir)
|
| 268 |
+
else:
|
| 269 |
+
rpm_dir = {}
|
| 270 |
+
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
|
| 271 |
+
rpm_dir[d] = os.path.join(self.rpm_base, d)
|
| 272 |
+
self.mkpath(rpm_dir[d])
|
| 273 |
+
spec_dir = rpm_dir['SPECS']
|
| 274 |
+
|
| 275 |
+
# Spec file goes into 'dist_dir' if '--spec-only specified',
|
| 276 |
+
# build/rpm.<plat> otherwise.
|
| 277 |
+
spec_path = os.path.join(spec_dir,
|
| 278 |
+
"%s.spec" % self.distribution.get_name())
|
| 279 |
+
self.execute(write_file,
|
| 280 |
+
(spec_path,
|
| 281 |
+
self._make_spec_file()),
|
| 282 |
+
"writing '%s'" % spec_path)
|
| 283 |
+
|
| 284 |
+
if self.spec_only: # stop if requested
|
| 285 |
+
return
|
| 286 |
+
|
| 287 |
+
# Make a source distribution and copy to SOURCES directory with
|
| 288 |
+
# optional icon.
|
| 289 |
+
saved_dist_files = self.distribution.dist_files[:]
|
| 290 |
+
sdist = self.reinitialize_command('sdist')
|
| 291 |
+
if self.use_bzip2:
|
| 292 |
+
sdist.formats = ['bztar']
|
| 293 |
+
else:
|
| 294 |
+
sdist.formats = ['gztar']
|
| 295 |
+
self.run_command('sdist')
|
| 296 |
+
self.distribution.dist_files = saved_dist_files
|
| 297 |
+
|
| 298 |
+
source = sdist.get_archive_files()[0]
|
| 299 |
+
source_dir = rpm_dir['SOURCES']
|
| 300 |
+
self.copy_file(source, source_dir)
|
| 301 |
+
|
| 302 |
+
if self.icon:
|
| 303 |
+
if os.path.exists(self.icon):
|
| 304 |
+
self.copy_file(self.icon, source_dir)
|
| 305 |
+
else:
|
| 306 |
+
raise DistutilsFileError(
|
| 307 |
+
"icon file '%s' does not exist" % self.icon)
|
| 308 |
+
|
| 309 |
+
# build package
|
| 310 |
+
log.info("building RPMs")
|
| 311 |
+
rpm_cmd = ['rpmbuild']
|
| 312 |
+
|
| 313 |
+
if self.source_only: # what kind of RPMs?
|
| 314 |
+
rpm_cmd.append('-bs')
|
| 315 |
+
elif self.binary_only:
|
| 316 |
+
rpm_cmd.append('-bb')
|
| 317 |
+
else:
|
| 318 |
+
rpm_cmd.append('-ba')
|
| 319 |
+
rpm_cmd.extend(['--define', '__python %s' % self.python])
|
| 320 |
+
if self.rpm3_mode:
|
| 321 |
+
rpm_cmd.extend(['--define',
|
| 322 |
+
'_topdir %s' % os.path.abspath(self.rpm_base)])
|
| 323 |
+
if not self.keep_temp:
|
| 324 |
+
rpm_cmd.append('--clean')
|
| 325 |
+
|
| 326 |
+
if self.quiet:
|
| 327 |
+
rpm_cmd.append('--quiet')
|
| 328 |
+
|
| 329 |
+
rpm_cmd.append(spec_path)
|
| 330 |
+
# Determine the binary rpm names that should be built out of this spec
|
| 331 |
+
# file
|
| 332 |
+
# Note that some of these may not be really built (if the file
|
| 333 |
+
# list is empty)
|
| 334 |
+
nvr_string = "%{name}-%{version}-%{release}"
|
| 335 |
+
src_rpm = nvr_string + ".src.rpm"
|
| 336 |
+
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
|
| 337 |
+
q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % (
|
| 338 |
+
src_rpm, non_src_rpm, spec_path)
|
| 339 |
+
|
| 340 |
+
out = os.popen(q_cmd)
|
| 341 |
+
try:
|
| 342 |
+
binary_rpms = []
|
| 343 |
+
source_rpm = None
|
| 344 |
+
while True:
|
| 345 |
+
line = out.readline()
|
| 346 |
+
if not line:
|
| 347 |
+
break
|
| 348 |
+
l = line.strip().split()
|
| 349 |
+
assert(len(l) == 2)
|
| 350 |
+
binary_rpms.append(l[1])
|
| 351 |
+
# The source rpm is named after the first entry in the spec file
|
| 352 |
+
if source_rpm is None:
|
| 353 |
+
source_rpm = l[0]
|
| 354 |
+
|
| 355 |
+
status = out.close()
|
| 356 |
+
if status:
|
| 357 |
+
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
|
| 358 |
+
|
| 359 |
+
finally:
|
| 360 |
+
out.close()
|
| 361 |
+
|
| 362 |
+
self.spawn(rpm_cmd)
|
| 363 |
+
|
| 364 |
+
if not self.dry_run:
|
| 365 |
+
if self.distribution.has_ext_modules():
|
| 366 |
+
pyversion = get_python_version()
|
| 367 |
+
else:
|
| 368 |
+
pyversion = 'any'
|
| 369 |
+
|
| 370 |
+
if not self.binary_only:
|
| 371 |
+
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
|
| 372 |
+
assert(os.path.exists(srpm))
|
| 373 |
+
self.move_file(srpm, self.dist_dir)
|
| 374 |
+
filename = os.path.join(self.dist_dir, source_rpm)
|
| 375 |
+
self.distribution.dist_files.append(
|
| 376 |
+
('bdist_rpm', pyversion, filename))
|
| 377 |
+
|
| 378 |
+
if not self.source_only:
|
| 379 |
+
for rpm in binary_rpms:
|
| 380 |
+
rpm = os.path.join(rpm_dir['RPMS'], rpm)
|
| 381 |
+
if os.path.exists(rpm):
|
| 382 |
+
self.move_file(rpm, self.dist_dir)
|
| 383 |
+
filename = os.path.join(self.dist_dir,
|
| 384 |
+
os.path.basename(rpm))
|
| 385 |
+
self.distribution.dist_files.append(
|
| 386 |
+
('bdist_rpm', pyversion, filename))
|
| 387 |
+
|
| 388 |
+
def _dist_path(self, path):
|
| 389 |
+
return os.path.join(self.dist_dir, os.path.basename(path))
|
| 390 |
+
|
| 391 |
+
def _make_spec_file(self):
|
| 392 |
+
"""Generate the text of an RPM spec file and return it as a
|
| 393 |
+
list of strings (one per line).
|
| 394 |
+
"""
|
| 395 |
+
# definitions and headers
|
| 396 |
+
spec_file = [
|
| 397 |
+
'%define name ' + self.distribution.get_name(),
|
| 398 |
+
'%define version ' + self.distribution.get_version().replace('-','_'),
|
| 399 |
+
'%define unmangled_version ' + self.distribution.get_version(),
|
| 400 |
+
'%define release ' + self.release.replace('-','_'),
|
| 401 |
+
'',
|
| 402 |
+
'Summary: ' + self.distribution.get_description(),
|
| 403 |
+
]
|
| 404 |
+
|
| 405 |
+
# Workaround for #14443 which affects some RPM based systems such as
|
| 406 |
+
# RHEL6 (and probably derivatives)
|
| 407 |
+
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
|
| 408 |
+
# Generate a potential replacement value for __os_install_post (whilst
|
| 409 |
+
# normalizing the whitespace to simplify the test for whether the
|
| 410 |
+
# invocation of brp-python-bytecompile passes in __python):
|
| 411 |
+
vendor_hook = '\n'.join([' %s \\' % line.strip()
|
| 412 |
+
for line in vendor_hook.splitlines()])
|
| 413 |
+
problem = "brp-python-bytecompile \\\n"
|
| 414 |
+
fixed = "brp-python-bytecompile %{__python} \\\n"
|
| 415 |
+
fixed_hook = vendor_hook.replace(problem, fixed)
|
| 416 |
+
if fixed_hook != vendor_hook:
|
| 417 |
+
spec_file.append('# Workaround for http://bugs.python.org/issue14443')
|
| 418 |
+
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
|
| 419 |
+
|
| 420 |
+
# put locale summaries into spec file
|
| 421 |
+
# XXX not supported for now (hard to put a dictionary
|
| 422 |
+
# in a config file -- arg!)
|
| 423 |
+
#for locale in self.summaries.keys():
|
| 424 |
+
# spec_file.append('Summary(%s): %s' % (locale,
|
| 425 |
+
# self.summaries[locale]))
|
| 426 |
+
|
| 427 |
+
spec_file.extend([
|
| 428 |
+
'Name: %{name}',
|
| 429 |
+
'Version: %{version}',
|
| 430 |
+
'Release: %{release}',])
|
| 431 |
+
|
| 432 |
+
# XXX yuck! this filename is available from the "sdist" command,
|
| 433 |
+
# but only after it has run: and we create the spec file before
|
| 434 |
+
# running "sdist", in case of --spec-only.
|
| 435 |
+
if self.use_bzip2:
|
| 436 |
+
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
|
| 437 |
+
else:
|
| 438 |
+
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
|
| 439 |
+
|
| 440 |
+
spec_file.extend([
|
| 441 |
+
'License: ' + self.distribution.get_license(),
|
| 442 |
+
'Group: ' + self.group,
|
| 443 |
+
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
|
| 444 |
+
'Prefix: %{_prefix}', ])
|
| 445 |
+
|
| 446 |
+
if not self.force_arch:
|
| 447 |
+
# noarch if no extension modules
|
| 448 |
+
if not self.distribution.has_ext_modules():
|
| 449 |
+
spec_file.append('BuildArch: noarch')
|
| 450 |
+
else:
|
| 451 |
+
spec_file.append( 'BuildArch: %s' % self.force_arch )
|
| 452 |
+
|
| 453 |
+
for field in ('Vendor',
|
| 454 |
+
'Packager',
|
| 455 |
+
'Provides',
|
| 456 |
+
'Requires',
|
| 457 |
+
'Conflicts',
|
| 458 |
+
'Obsoletes',
|
| 459 |
+
):
|
| 460 |
+
val = getattr(self, field.lower())
|
| 461 |
+
if isinstance(val, list):
|
| 462 |
+
spec_file.append('%s: %s' % (field, ' '.join(val)))
|
| 463 |
+
elif val is not None:
|
| 464 |
+
spec_file.append('%s: %s' % (field, val))
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
if self.distribution.get_url() != 'UNKNOWN':
|
| 468 |
+
spec_file.append('Url: ' + self.distribution.get_url())
|
| 469 |
+
|
| 470 |
+
if self.distribution_name:
|
| 471 |
+
spec_file.append('Distribution: ' + self.distribution_name)
|
| 472 |
+
|
| 473 |
+
if self.build_requires:
|
| 474 |
+
spec_file.append('BuildRequires: ' +
|
| 475 |
+
' '.join(self.build_requires))
|
| 476 |
+
|
| 477 |
+
if self.icon:
|
| 478 |
+
spec_file.append('Icon: ' + os.path.basename(self.icon))
|
| 479 |
+
|
| 480 |
+
if self.no_autoreq:
|
| 481 |
+
spec_file.append('AutoReq: 0')
|
| 482 |
+
|
| 483 |
+
spec_file.extend([
|
| 484 |
+
'',
|
| 485 |
+
'%description',
|
| 486 |
+
self.distribution.get_long_description()
|
| 487 |
+
])
|
| 488 |
+
|
| 489 |
+
# put locale descriptions into spec file
|
| 490 |
+
# XXX again, suppressed because config file syntax doesn't
|
| 491 |
+
# easily support this ;-(
|
| 492 |
+
#for locale in self.descriptions.keys():
|
| 493 |
+
# spec_file.extend([
|
| 494 |
+
# '',
|
| 495 |
+
# '%description -l ' + locale,
|
| 496 |
+
# self.descriptions[locale],
|
| 497 |
+
# ])
|
| 498 |
+
|
| 499 |
+
# rpm scripts
|
| 500 |
+
# figure out default build script
|
| 501 |
+
def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
|
| 502 |
+
def_build = "%s build" % def_setup_call
|
| 503 |
+
if self.use_rpm_opt_flags:
|
| 504 |
+
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
|
| 505 |
+
|
| 506 |
+
# insert contents of files
|
| 507 |
+
|
| 508 |
+
# XXX this is kind of misleading: user-supplied options are files
|
| 509 |
+
# that we open and interpolate into the spec file, but the defaults
|
| 510 |
+
# are just text that we drop in as-is. Hmmm.
|
| 511 |
+
|
| 512 |
+
install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT '
|
| 513 |
+
'--record=INSTALLED_FILES') % def_setup_call
|
| 514 |
+
|
| 515 |
+
script_options = [
|
| 516 |
+
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
|
| 517 |
+
('build', 'build_script', def_build),
|
| 518 |
+
('install', 'install_script', install_cmd),
|
| 519 |
+
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
|
| 520 |
+
('verifyscript', 'verify_script', None),
|
| 521 |
+
('pre', 'pre_install', None),
|
| 522 |
+
('post', 'post_install', None),
|
| 523 |
+
('preun', 'pre_uninstall', None),
|
| 524 |
+
('postun', 'post_uninstall', None),
|
| 525 |
+
]
|
| 526 |
+
|
| 527 |
+
for (rpm_opt, attr, default) in script_options:
|
| 528 |
+
# Insert contents of file referred to, if no file is referred to
|
| 529 |
+
# use 'default' as contents of script
|
| 530 |
+
val = getattr(self, attr)
|
| 531 |
+
if val or default:
|
| 532 |
+
spec_file.extend([
|
| 533 |
+
'',
|
| 534 |
+
'%' + rpm_opt,])
|
| 535 |
+
if val:
|
| 536 |
+
with open(val) as f:
|
| 537 |
+
spec_file.extend(f.read().split('\n'))
|
| 538 |
+
else:
|
| 539 |
+
spec_file.append(default)
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
# files section
|
| 543 |
+
spec_file.extend([
|
| 544 |
+
'',
|
| 545 |
+
'%files -f INSTALLED_FILES',
|
| 546 |
+
'%defattr(-,root,root)',
|
| 547 |
+
])
|
| 548 |
+
|
| 549 |
+
if self.doc_files:
|
| 550 |
+
spec_file.append('%doc ' + ' '.join(self.doc_files))
|
| 551 |
+
|
| 552 |
+
if self.changelog:
|
| 553 |
+
spec_file.extend([
|
| 554 |
+
'',
|
| 555 |
+
'%changelog',])
|
| 556 |
+
spec_file.extend(self.changelog)
|
| 557 |
+
|
| 558 |
+
return spec_file
|
| 559 |
+
|
| 560 |
+
def _format_changelog(self, changelog):
|
| 561 |
+
"""Format the changelog correctly and convert it to a list of strings
|
| 562 |
+
"""
|
| 563 |
+
if not changelog:
|
| 564 |
+
return changelog
|
| 565 |
+
new_changelog = []
|
| 566 |
+
for line in changelog.strip().split('\n'):
|
| 567 |
+
line = line.strip()
|
| 568 |
+
if line[0] == '*':
|
| 569 |
+
new_changelog.extend(['', line])
|
| 570 |
+
elif line[0] == '-':
|
| 571 |
+
new_changelog.append(line)
|
| 572 |
+
else:
|
| 573 |
+
new_changelog.append(' ' + line)
|
| 574 |
+
|
| 575 |
+
# strip trailing newline inserted by first changelog entry
|
| 576 |
+
if not new_changelog[0]:
|
| 577 |
+
del new_changelog[0]
|
| 578 |
+
|
| 579 |
+
return new_changelog
|
deepseek/lib/python3.10/distutils/command/build_ext.py
ADDED
|
@@ -0,0 +1,754 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_ext
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_ext' command, for building extension
|
| 4 |
+
modules (currently limited to C extensions, should accommodate C++
|
| 5 |
+
extensions ASAP)."""
|
| 6 |
+
|
| 7 |
+
import contextlib
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import sys
|
| 11 |
+
from distutils.core import Command
|
| 12 |
+
from distutils.errors import *
|
| 13 |
+
from distutils.sysconfig import customize_compiler, get_python_version
|
| 14 |
+
from distutils.sysconfig import get_config_h_filename
|
| 15 |
+
from distutils.dep_util import newer_group
|
| 16 |
+
from distutils.extension import Extension
|
| 17 |
+
from distutils.util import get_platform
|
| 18 |
+
from distutils import log
|
| 19 |
+
|
| 20 |
+
from site import USER_BASE
|
| 21 |
+
|
| 22 |
+
# An extension name is just a dot-separated list of Python NAMEs (ie.
|
| 23 |
+
# the same as a fully-qualified module name).
|
| 24 |
+
extension_name_re = re.compile \
|
| 25 |
+
(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def show_compilers ():
|
| 29 |
+
from distutils.ccompiler import show_compilers
|
| 30 |
+
show_compilers()
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class build_ext(Command):
|
| 34 |
+
|
| 35 |
+
description = "build C/C++ extensions (compile/link to build directory)"
|
| 36 |
+
|
| 37 |
+
# XXX thoughts on how to deal with complex command-line options like
|
| 38 |
+
# these, i.e. how to make it so fancy_getopt can suck them off the
|
| 39 |
+
# command line and make it look like setup.py defined the appropriate
|
| 40 |
+
# lists of tuples of what-have-you.
|
| 41 |
+
# - each command needs a callback to process its command-line options
|
| 42 |
+
# - Command.__init__() needs access to its share of the whole
|
| 43 |
+
# command line (must ultimately come from
|
| 44 |
+
# Distribution.parse_command_line())
|
| 45 |
+
# - it then calls the current command class' option-parsing
|
| 46 |
+
# callback to deal with weird options like -D, which have to
|
| 47 |
+
# parse the option text and churn out some custom data
|
| 48 |
+
# structure
|
| 49 |
+
# - that data structure (in this case, a list of 2-tuples)
|
| 50 |
+
# will then be present in the command object by the time
|
| 51 |
+
# we get to finalize_options() (i.e. the constructor
|
| 52 |
+
# takes care of both command-line and client options
|
| 53 |
+
# in between initialize_options() and finalize_options())
|
| 54 |
+
|
| 55 |
+
sep_by = " (separated by '%s')" % os.pathsep
|
| 56 |
+
user_options = [
|
| 57 |
+
('build-lib=', 'b',
|
| 58 |
+
"directory for compiled extension modules"),
|
| 59 |
+
('build-temp=', 't',
|
| 60 |
+
"directory for temporary files (build by-products)"),
|
| 61 |
+
('plat-name=', 'p',
|
| 62 |
+
"platform name to cross-compile for, if supported "
|
| 63 |
+
"(default: %s)" % get_platform()),
|
| 64 |
+
('inplace', 'i',
|
| 65 |
+
"ignore build-lib and put compiled extensions into the source " +
|
| 66 |
+
"directory alongside your pure Python modules"),
|
| 67 |
+
('include-dirs=', 'I',
|
| 68 |
+
"list of directories to search for header files" + sep_by),
|
| 69 |
+
('define=', 'D',
|
| 70 |
+
"C preprocessor macros to define"),
|
| 71 |
+
('undef=', 'U',
|
| 72 |
+
"C preprocessor macros to undefine"),
|
| 73 |
+
('libraries=', 'l',
|
| 74 |
+
"external C libraries to link with"),
|
| 75 |
+
('library-dirs=', 'L',
|
| 76 |
+
"directories to search for external C libraries" + sep_by),
|
| 77 |
+
('rpath=', 'R',
|
| 78 |
+
"directories to search for shared C libraries at runtime"),
|
| 79 |
+
('link-objects=', 'O',
|
| 80 |
+
"extra explicit link objects to include in the link"),
|
| 81 |
+
('debug', 'g',
|
| 82 |
+
"compile/link with debugging information"),
|
| 83 |
+
('force', 'f',
|
| 84 |
+
"forcibly build everything (ignore file timestamps)"),
|
| 85 |
+
('compiler=', 'c',
|
| 86 |
+
"specify the compiler type"),
|
| 87 |
+
('parallel=', 'j',
|
| 88 |
+
"number of parallel build jobs"),
|
| 89 |
+
('swig-cpp', None,
|
| 90 |
+
"make SWIG create C++ files (default is C)"),
|
| 91 |
+
('swig-opts=', None,
|
| 92 |
+
"list of SWIG command line options"),
|
| 93 |
+
('swig=', None,
|
| 94 |
+
"path to the SWIG executable"),
|
| 95 |
+
('user', None,
|
| 96 |
+
"add user include, library and rpath")
|
| 97 |
+
]
|
| 98 |
+
|
| 99 |
+
boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
|
| 100 |
+
|
| 101 |
+
help_options = [
|
| 102 |
+
('help-compiler', None,
|
| 103 |
+
"list available compilers", show_compilers),
|
| 104 |
+
]
|
| 105 |
+
|
| 106 |
+
def initialize_options(self):
|
| 107 |
+
self.extensions = None
|
| 108 |
+
self.build_lib = None
|
| 109 |
+
self.plat_name = None
|
| 110 |
+
self.build_temp = None
|
| 111 |
+
self.inplace = 0
|
| 112 |
+
self.package = None
|
| 113 |
+
|
| 114 |
+
self.include_dirs = None
|
| 115 |
+
self.define = None
|
| 116 |
+
self.undef = None
|
| 117 |
+
self.libraries = None
|
| 118 |
+
self.library_dirs = None
|
| 119 |
+
self.rpath = None
|
| 120 |
+
self.link_objects = None
|
| 121 |
+
self.debug = None
|
| 122 |
+
self.force = None
|
| 123 |
+
self.compiler = None
|
| 124 |
+
self.swig = None
|
| 125 |
+
self.swig_cpp = None
|
| 126 |
+
self.swig_opts = None
|
| 127 |
+
self.user = None
|
| 128 |
+
self.parallel = None
|
| 129 |
+
|
| 130 |
+
def finalize_options(self):
|
| 131 |
+
from distutils import sysconfig
|
| 132 |
+
|
| 133 |
+
self.set_undefined_options('build',
|
| 134 |
+
('build_lib', 'build_lib'),
|
| 135 |
+
('build_temp', 'build_temp'),
|
| 136 |
+
('compiler', 'compiler'),
|
| 137 |
+
('debug', 'debug'),
|
| 138 |
+
('force', 'force'),
|
| 139 |
+
('parallel', 'parallel'),
|
| 140 |
+
('plat_name', 'plat_name'),
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
if self.package is None:
|
| 144 |
+
self.package = self.distribution.ext_package
|
| 145 |
+
|
| 146 |
+
self.extensions = self.distribution.ext_modules
|
| 147 |
+
|
| 148 |
+
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
| 149 |
+
# etc.) are in the include search path.
|
| 150 |
+
py_include = sysconfig.get_python_inc()
|
| 151 |
+
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
|
| 152 |
+
if self.include_dirs is None:
|
| 153 |
+
self.include_dirs = self.distribution.include_dirs or []
|
| 154 |
+
if isinstance(self.include_dirs, str):
|
| 155 |
+
self.include_dirs = self.include_dirs.split(os.pathsep)
|
| 156 |
+
|
| 157 |
+
# If in a virtualenv, add its include directory
|
| 158 |
+
# Issue 16116
|
| 159 |
+
if sys.exec_prefix != sys.base_exec_prefix:
|
| 160 |
+
self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
|
| 161 |
+
|
| 162 |
+
# Put the Python "system" include dir at the end, so that
|
| 163 |
+
# any local include dirs take precedence.
|
| 164 |
+
self.include_dirs.extend(py_include.split(os.path.pathsep))
|
| 165 |
+
if plat_py_include != py_include:
|
| 166 |
+
self.include_dirs.extend(
|
| 167 |
+
plat_py_include.split(os.path.pathsep))
|
| 168 |
+
|
| 169 |
+
self.ensure_string_list('libraries')
|
| 170 |
+
self.ensure_string_list('link_objects')
|
| 171 |
+
|
| 172 |
+
# Life is easier if we're not forever checking for None, so
|
| 173 |
+
# simplify these options to empty lists if unset
|
| 174 |
+
if self.libraries is None:
|
| 175 |
+
self.libraries = []
|
| 176 |
+
if self.library_dirs is None:
|
| 177 |
+
self.library_dirs = []
|
| 178 |
+
elif isinstance(self.library_dirs, str):
|
| 179 |
+
self.library_dirs = self.library_dirs.split(os.pathsep)
|
| 180 |
+
|
| 181 |
+
if self.rpath is None:
|
| 182 |
+
self.rpath = []
|
| 183 |
+
elif isinstance(self.rpath, str):
|
| 184 |
+
self.rpath = self.rpath.split(os.pathsep)
|
| 185 |
+
|
| 186 |
+
# for extensions under windows use different directories
|
| 187 |
+
# for Release and Debug builds.
|
| 188 |
+
# also Python's library directory must be appended to library_dirs
|
| 189 |
+
if os.name == 'nt':
|
| 190 |
+
# the 'libs' directory is for binary installs - we assume that
|
| 191 |
+
# must be the *native* platform. But we don't really support
|
| 192 |
+
# cross-compiling via a binary install anyway, so we let it go.
|
| 193 |
+
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
|
| 194 |
+
if sys.base_exec_prefix != sys.prefix: # Issue 16116
|
| 195 |
+
self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
|
| 196 |
+
if self.debug:
|
| 197 |
+
self.build_temp = os.path.join(self.build_temp, "Debug")
|
| 198 |
+
else:
|
| 199 |
+
self.build_temp = os.path.join(self.build_temp, "Release")
|
| 200 |
+
|
| 201 |
+
# Append the source distribution include and library directories,
|
| 202 |
+
# this allows distutils on windows to work in the source tree
|
| 203 |
+
self.include_dirs.append(os.path.dirname(get_config_h_filename()))
|
| 204 |
+
_sys_home = getattr(sys, '_home', None)
|
| 205 |
+
if _sys_home:
|
| 206 |
+
self.library_dirs.append(_sys_home)
|
| 207 |
+
|
| 208 |
+
# Use the .lib files for the correct architecture
|
| 209 |
+
if self.plat_name == 'win32':
|
| 210 |
+
suffix = 'win32'
|
| 211 |
+
else:
|
| 212 |
+
# win-amd64
|
| 213 |
+
suffix = self.plat_name[4:]
|
| 214 |
+
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
|
| 215 |
+
if suffix:
|
| 216 |
+
new_lib = os.path.join(new_lib, suffix)
|
| 217 |
+
self.library_dirs.append(new_lib)
|
| 218 |
+
|
| 219 |
+
# For extensions under Cygwin, Python's library directory must be
|
| 220 |
+
# appended to library_dirs
|
| 221 |
+
if sys.platform[:6] == 'cygwin':
|
| 222 |
+
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
|
| 223 |
+
# building third party extensions
|
| 224 |
+
self.library_dirs.append(os.path.join(sys.prefix, "lib",
|
| 225 |
+
"python" + get_python_version(),
|
| 226 |
+
"config"))
|
| 227 |
+
else:
|
| 228 |
+
# building python standard extensions
|
| 229 |
+
self.library_dirs.append('.')
|
| 230 |
+
|
| 231 |
+
# For building extensions with a shared Python library,
|
| 232 |
+
# Python's library directory must be appended to library_dirs
|
| 233 |
+
# See Issues: #1600860, #4366
|
| 234 |
+
if (sysconfig.get_config_var('Py_ENABLE_SHARED')):
|
| 235 |
+
if not sysconfig.python_build:
|
| 236 |
+
# building third party extensions
|
| 237 |
+
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
|
| 238 |
+
else:
|
| 239 |
+
# building python standard extensions
|
| 240 |
+
self.library_dirs.append('.')
|
| 241 |
+
|
| 242 |
+
# The argument parsing will result in self.define being a string, but
|
| 243 |
+
# it has to be a list of 2-tuples. All the preprocessor symbols
|
| 244 |
+
# specified by the 'define' option will be set to '1'. Multiple
|
| 245 |
+
# symbols can be separated with commas.
|
| 246 |
+
|
| 247 |
+
if self.define:
|
| 248 |
+
defines = self.define.split(',')
|
| 249 |
+
self.define = [(symbol, '1') for symbol in defines]
|
| 250 |
+
|
| 251 |
+
# The option for macros to undefine is also a string from the
|
| 252 |
+
# option parsing, but has to be a list. Multiple symbols can also
|
| 253 |
+
# be separated with commas here.
|
| 254 |
+
if self.undef:
|
| 255 |
+
self.undef = self.undef.split(',')
|
| 256 |
+
|
| 257 |
+
if self.swig_opts is None:
|
| 258 |
+
self.swig_opts = []
|
| 259 |
+
else:
|
| 260 |
+
self.swig_opts = self.swig_opts.split(' ')
|
| 261 |
+
|
| 262 |
+
# Finally add the user include and library directories if requested
|
| 263 |
+
if self.user:
|
| 264 |
+
user_include = os.path.join(USER_BASE, "include")
|
| 265 |
+
user_lib = os.path.join(USER_BASE, "lib")
|
| 266 |
+
if os.path.isdir(user_include):
|
| 267 |
+
self.include_dirs.append(user_include)
|
| 268 |
+
if os.path.isdir(user_lib):
|
| 269 |
+
self.library_dirs.append(user_lib)
|
| 270 |
+
self.rpath.append(user_lib)
|
| 271 |
+
|
| 272 |
+
if isinstance(self.parallel, str):
|
| 273 |
+
try:
|
| 274 |
+
self.parallel = int(self.parallel)
|
| 275 |
+
except ValueError:
|
| 276 |
+
raise DistutilsOptionError("parallel should be an integer")
|
| 277 |
+
|
| 278 |
+
def run(self):
|
| 279 |
+
from distutils.ccompiler import new_compiler
|
| 280 |
+
|
| 281 |
+
# 'self.extensions', as supplied by setup.py, is a list of
|
| 282 |
+
# Extension instances. See the documentation for Extension (in
|
| 283 |
+
# distutils.extension) for details.
|
| 284 |
+
#
|
| 285 |
+
# For backwards compatibility with Distutils 0.8.2 and earlier, we
|
| 286 |
+
# also allow the 'extensions' list to be a list of tuples:
|
| 287 |
+
# (ext_name, build_info)
|
| 288 |
+
# where build_info is a dictionary containing everything that
|
| 289 |
+
# Extension instances do except the name, with a few things being
|
| 290 |
+
# differently named. We convert these 2-tuples to Extension
|
| 291 |
+
# instances as needed.
|
| 292 |
+
|
| 293 |
+
if not self.extensions:
|
| 294 |
+
return
|
| 295 |
+
|
| 296 |
+
# If we were asked to build any C/C++ libraries, make sure that the
|
| 297 |
+
# directory where we put them is in the library search path for
|
| 298 |
+
# linking extensions.
|
| 299 |
+
if self.distribution.has_c_libraries():
|
| 300 |
+
build_clib = self.get_finalized_command('build_clib')
|
| 301 |
+
self.libraries.extend(build_clib.get_library_names() or [])
|
| 302 |
+
self.library_dirs.append(build_clib.build_clib)
|
| 303 |
+
|
| 304 |
+
# Setup the CCompiler object that we'll use to do all the
|
| 305 |
+
# compiling and linking
|
| 306 |
+
self.compiler = new_compiler(compiler=self.compiler,
|
| 307 |
+
verbose=self.verbose,
|
| 308 |
+
dry_run=self.dry_run,
|
| 309 |
+
force=self.force)
|
| 310 |
+
customize_compiler(self.compiler)
|
| 311 |
+
# If we are cross-compiling, init the compiler now (if we are not
|
| 312 |
+
# cross-compiling, init would not hurt, but people may rely on
|
| 313 |
+
# late initialization of compiler even if they shouldn't...)
|
| 314 |
+
if os.name == 'nt' and self.plat_name != get_platform():
|
| 315 |
+
self.compiler.initialize(self.plat_name)
|
| 316 |
+
|
| 317 |
+
# And make sure that any compile/link-related options (which might
|
| 318 |
+
# come from the command-line or from the setup script) are set in
|
| 319 |
+
# that CCompiler object -- that way, they automatically apply to
|
| 320 |
+
# all compiling and linking done here.
|
| 321 |
+
if self.include_dirs is not None:
|
| 322 |
+
self.compiler.set_include_dirs(self.include_dirs)
|
| 323 |
+
if self.define is not None:
|
| 324 |
+
# 'define' option is a list of (name,value) tuples
|
| 325 |
+
for (name, value) in self.define:
|
| 326 |
+
self.compiler.define_macro(name, value)
|
| 327 |
+
if self.undef is not None:
|
| 328 |
+
for macro in self.undef:
|
| 329 |
+
self.compiler.undefine_macro(macro)
|
| 330 |
+
if self.libraries is not None:
|
| 331 |
+
self.compiler.set_libraries(self.libraries)
|
| 332 |
+
if self.library_dirs is not None:
|
| 333 |
+
self.compiler.set_library_dirs(self.library_dirs)
|
| 334 |
+
if self.rpath is not None:
|
| 335 |
+
self.compiler.set_runtime_library_dirs(self.rpath)
|
| 336 |
+
if self.link_objects is not None:
|
| 337 |
+
self.compiler.set_link_objects(self.link_objects)
|
| 338 |
+
|
| 339 |
+
# Now actually compile and link everything.
|
| 340 |
+
self.build_extensions()
|
| 341 |
+
|
| 342 |
+
def check_extensions_list(self, extensions):
|
| 343 |
+
"""Ensure that the list of extensions (presumably provided as a
|
| 344 |
+
command option 'extensions') is valid, i.e. it is a list of
|
| 345 |
+
Extension objects. We also support the old-style list of 2-tuples,
|
| 346 |
+
where the tuples are (ext_name, build_info), which are converted to
|
| 347 |
+
Extension instances here.
|
| 348 |
+
|
| 349 |
+
Raise DistutilsSetupError if the structure is invalid anywhere;
|
| 350 |
+
just returns otherwise.
|
| 351 |
+
"""
|
| 352 |
+
if not isinstance(extensions, list):
|
| 353 |
+
raise DistutilsSetupError(
|
| 354 |
+
"'ext_modules' option must be a list of Extension instances")
|
| 355 |
+
|
| 356 |
+
for i, ext in enumerate(extensions):
|
| 357 |
+
if isinstance(ext, Extension):
|
| 358 |
+
continue # OK! (assume type-checking done
|
| 359 |
+
# by Extension constructor)
|
| 360 |
+
|
| 361 |
+
if not isinstance(ext, tuple) or len(ext) != 2:
|
| 362 |
+
raise DistutilsSetupError(
|
| 363 |
+
"each element of 'ext_modules' option must be an "
|
| 364 |
+
"Extension instance or 2-tuple")
|
| 365 |
+
|
| 366 |
+
ext_name, build_info = ext
|
| 367 |
+
|
| 368 |
+
log.warn("old-style (ext_name, build_info) tuple found in "
|
| 369 |
+
"ext_modules for extension '%s' "
|
| 370 |
+
"-- please convert to Extension instance", ext_name)
|
| 371 |
+
|
| 372 |
+
if not (isinstance(ext_name, str) and
|
| 373 |
+
extension_name_re.match(ext_name)):
|
| 374 |
+
raise DistutilsSetupError(
|
| 375 |
+
"first element of each tuple in 'ext_modules' "
|
| 376 |
+
"must be the extension name (a string)")
|
| 377 |
+
|
| 378 |
+
if not isinstance(build_info, dict):
|
| 379 |
+
raise DistutilsSetupError(
|
| 380 |
+
"second element of each tuple in 'ext_modules' "
|
| 381 |
+
"must be a dictionary (build info)")
|
| 382 |
+
|
| 383 |
+
# OK, the (ext_name, build_info) dict is type-safe: convert it
|
| 384 |
+
# to an Extension instance.
|
| 385 |
+
ext = Extension(ext_name, build_info['sources'])
|
| 386 |
+
|
| 387 |
+
# Easy stuff: one-to-one mapping from dict elements to
|
| 388 |
+
# instance attributes.
|
| 389 |
+
for key in ('include_dirs', 'library_dirs', 'libraries',
|
| 390 |
+
'extra_objects', 'extra_compile_args',
|
| 391 |
+
'extra_link_args'):
|
| 392 |
+
val = build_info.get(key)
|
| 393 |
+
if val is not None:
|
| 394 |
+
setattr(ext, key, val)
|
| 395 |
+
|
| 396 |
+
# Medium-easy stuff: same syntax/semantics, different names.
|
| 397 |
+
ext.runtime_library_dirs = build_info.get('rpath')
|
| 398 |
+
if 'def_file' in build_info:
|
| 399 |
+
log.warn("'def_file' element of build info dict "
|
| 400 |
+
"no longer supported")
|
| 401 |
+
|
| 402 |
+
# Non-trivial stuff: 'macros' split into 'define_macros'
|
| 403 |
+
# and 'undef_macros'.
|
| 404 |
+
macros = build_info.get('macros')
|
| 405 |
+
if macros:
|
| 406 |
+
ext.define_macros = []
|
| 407 |
+
ext.undef_macros = []
|
| 408 |
+
for macro in macros:
|
| 409 |
+
if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
|
| 410 |
+
raise DistutilsSetupError(
|
| 411 |
+
"'macros' element of build info dict "
|
| 412 |
+
"must be 1- or 2-tuple")
|
| 413 |
+
if len(macro) == 1:
|
| 414 |
+
ext.undef_macros.append(macro[0])
|
| 415 |
+
elif len(macro) == 2:
|
| 416 |
+
ext.define_macros.append(macro)
|
| 417 |
+
|
| 418 |
+
extensions[i] = ext
|
| 419 |
+
|
| 420 |
+
def get_source_files(self):
|
| 421 |
+
self.check_extensions_list(self.extensions)
|
| 422 |
+
filenames = []
|
| 423 |
+
|
| 424 |
+
# Wouldn't it be neat if we knew the names of header files too...
|
| 425 |
+
for ext in self.extensions:
|
| 426 |
+
filenames.extend(ext.sources)
|
| 427 |
+
return filenames
|
| 428 |
+
|
| 429 |
+
def get_outputs(self):
|
| 430 |
+
# Sanity check the 'extensions' list -- can't assume this is being
|
| 431 |
+
# done in the same run as a 'build_extensions()' call (in fact, we
|
| 432 |
+
# can probably assume that it *isn't*!).
|
| 433 |
+
self.check_extensions_list(self.extensions)
|
| 434 |
+
|
| 435 |
+
# And build the list of output (built) filenames. Note that this
|
| 436 |
+
# ignores the 'inplace' flag, and assumes everything goes in the
|
| 437 |
+
# "build" tree.
|
| 438 |
+
outputs = []
|
| 439 |
+
for ext in self.extensions:
|
| 440 |
+
outputs.append(self.get_ext_fullpath(ext.name))
|
| 441 |
+
return outputs
|
| 442 |
+
|
| 443 |
+
def build_extensions(self):
|
| 444 |
+
# First, sanity-check the 'extensions' list
|
| 445 |
+
self.check_extensions_list(self.extensions)
|
| 446 |
+
if self.parallel:
|
| 447 |
+
self._build_extensions_parallel()
|
| 448 |
+
else:
|
| 449 |
+
self._build_extensions_serial()
|
| 450 |
+
|
| 451 |
+
def _build_extensions_parallel(self):
|
| 452 |
+
workers = self.parallel
|
| 453 |
+
if self.parallel is True:
|
| 454 |
+
workers = os.cpu_count() # may return None
|
| 455 |
+
try:
|
| 456 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 457 |
+
except ImportError:
|
| 458 |
+
workers = None
|
| 459 |
+
|
| 460 |
+
if workers is None:
|
| 461 |
+
self._build_extensions_serial()
|
| 462 |
+
return
|
| 463 |
+
|
| 464 |
+
with ThreadPoolExecutor(max_workers=workers) as executor:
|
| 465 |
+
futures = [executor.submit(self.build_extension, ext)
|
| 466 |
+
for ext in self.extensions]
|
| 467 |
+
for ext, fut in zip(self.extensions, futures):
|
| 468 |
+
with self._filter_build_errors(ext):
|
| 469 |
+
fut.result()
|
| 470 |
+
|
| 471 |
+
def _build_extensions_serial(self):
|
| 472 |
+
for ext in self.extensions:
|
| 473 |
+
with self._filter_build_errors(ext):
|
| 474 |
+
self.build_extension(ext)
|
| 475 |
+
|
| 476 |
+
@contextlib.contextmanager
|
| 477 |
+
def _filter_build_errors(self, ext):
|
| 478 |
+
try:
|
| 479 |
+
yield
|
| 480 |
+
except (CCompilerError, DistutilsError, CompileError) as e:
|
| 481 |
+
if not ext.optional:
|
| 482 |
+
raise
|
| 483 |
+
self.warn('building extension "%s" failed: %s' %
|
| 484 |
+
(ext.name, e))
|
| 485 |
+
|
| 486 |
+
def build_extension(self, ext):
|
| 487 |
+
sources = ext.sources
|
| 488 |
+
if sources is None or not isinstance(sources, (list, tuple)):
|
| 489 |
+
raise DistutilsSetupError(
|
| 490 |
+
"in 'ext_modules' option (extension '%s'), "
|
| 491 |
+
"'sources' must be present and must be "
|
| 492 |
+
"a list of source filenames" % ext.name)
|
| 493 |
+
# sort to make the resulting .so file build reproducible
|
| 494 |
+
sources = sorted(sources)
|
| 495 |
+
|
| 496 |
+
ext_path = self.get_ext_fullpath(ext.name)
|
| 497 |
+
depends = sources + ext.depends
|
| 498 |
+
if not (self.force or newer_group(depends, ext_path, 'newer')):
|
| 499 |
+
log.debug("skipping '%s' extension (up-to-date)", ext.name)
|
| 500 |
+
return
|
| 501 |
+
else:
|
| 502 |
+
log.info("building '%s' extension", ext.name)
|
| 503 |
+
|
| 504 |
+
# First, scan the sources for SWIG definition files (.i), run
|
| 505 |
+
# SWIG on 'em to create .c files, and modify the sources list
|
| 506 |
+
# accordingly.
|
| 507 |
+
sources = self.swig_sources(sources, ext)
|
| 508 |
+
|
| 509 |
+
# Next, compile the source code to object files.
|
| 510 |
+
|
| 511 |
+
# XXX not honouring 'define_macros' or 'undef_macros' -- the
|
| 512 |
+
# CCompiler API needs to change to accommodate this, and I
|
| 513 |
+
# want to do one thing at a time!
|
| 514 |
+
|
| 515 |
+
# Two possible sources for extra compiler arguments:
|
| 516 |
+
# - 'extra_compile_args' in Extension object
|
| 517 |
+
# - CFLAGS environment variable (not particularly
|
| 518 |
+
# elegant, but people seem to expect it and I
|
| 519 |
+
# guess it's useful)
|
| 520 |
+
# The environment variable should take precedence, and
|
| 521 |
+
# any sensible compiler will give precedence to later
|
| 522 |
+
# command line args. Hence we combine them in order:
|
| 523 |
+
extra_args = ext.extra_compile_args or []
|
| 524 |
+
|
| 525 |
+
macros = ext.define_macros[:]
|
| 526 |
+
for undef in ext.undef_macros:
|
| 527 |
+
macros.append((undef,))
|
| 528 |
+
|
| 529 |
+
objects = self.compiler.compile(sources,
|
| 530 |
+
output_dir=self.build_temp,
|
| 531 |
+
macros=macros,
|
| 532 |
+
include_dirs=ext.include_dirs,
|
| 533 |
+
debug=self.debug,
|
| 534 |
+
extra_postargs=extra_args,
|
| 535 |
+
depends=ext.depends)
|
| 536 |
+
|
| 537 |
+
# XXX outdated variable, kept here in case third-part code
|
| 538 |
+
# needs it.
|
| 539 |
+
self._built_objects = objects[:]
|
| 540 |
+
|
| 541 |
+
# Now link the object files together into a "shared object" --
|
| 542 |
+
# of course, first we have to figure out all the other things
|
| 543 |
+
# that go into the mix.
|
| 544 |
+
if ext.extra_objects:
|
| 545 |
+
objects.extend(ext.extra_objects)
|
| 546 |
+
extra_args = ext.extra_link_args or []
|
| 547 |
+
|
| 548 |
+
# Detect target language, if not provided
|
| 549 |
+
language = ext.language or self.compiler.detect_language(sources)
|
| 550 |
+
|
| 551 |
+
self.compiler.link_shared_object(
|
| 552 |
+
objects, ext_path,
|
| 553 |
+
libraries=self.get_libraries(ext),
|
| 554 |
+
library_dirs=ext.library_dirs,
|
| 555 |
+
runtime_library_dirs=ext.runtime_library_dirs,
|
| 556 |
+
extra_postargs=extra_args,
|
| 557 |
+
export_symbols=self.get_export_symbols(ext),
|
| 558 |
+
debug=self.debug,
|
| 559 |
+
build_temp=self.build_temp,
|
| 560 |
+
target_lang=language)
|
| 561 |
+
|
| 562 |
+
def swig_sources(self, sources, extension):
|
| 563 |
+
"""Walk the list of source files in 'sources', looking for SWIG
|
| 564 |
+
interface (.i) files. Run SWIG on all that are found, and
|
| 565 |
+
return a modified 'sources' list with SWIG source files replaced
|
| 566 |
+
by the generated C (or C++) files.
|
| 567 |
+
"""
|
| 568 |
+
new_sources = []
|
| 569 |
+
swig_sources = []
|
| 570 |
+
swig_targets = {}
|
| 571 |
+
|
| 572 |
+
# XXX this drops generated C/C++ files into the source tree, which
|
| 573 |
+
# is fine for developers who want to distribute the generated
|
| 574 |
+
# source -- but there should be an option to put SWIG output in
|
| 575 |
+
# the temp dir.
|
| 576 |
+
|
| 577 |
+
if self.swig_cpp:
|
| 578 |
+
log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
|
| 579 |
+
|
| 580 |
+
if self.swig_cpp or ('-c++' in self.swig_opts) or \
|
| 581 |
+
('-c++' in extension.swig_opts):
|
| 582 |
+
target_ext = '.cpp'
|
| 583 |
+
else:
|
| 584 |
+
target_ext = '.c'
|
| 585 |
+
|
| 586 |
+
for source in sources:
|
| 587 |
+
(base, ext) = os.path.splitext(source)
|
| 588 |
+
if ext == ".i": # SWIG interface file
|
| 589 |
+
new_sources.append(base + '_wrap' + target_ext)
|
| 590 |
+
swig_sources.append(source)
|
| 591 |
+
swig_targets[source] = new_sources[-1]
|
| 592 |
+
else:
|
| 593 |
+
new_sources.append(source)
|
| 594 |
+
|
| 595 |
+
if not swig_sources:
|
| 596 |
+
return new_sources
|
| 597 |
+
|
| 598 |
+
swig = self.swig or self.find_swig()
|
| 599 |
+
swig_cmd = [swig, "-python"]
|
| 600 |
+
swig_cmd.extend(self.swig_opts)
|
| 601 |
+
if self.swig_cpp:
|
| 602 |
+
swig_cmd.append("-c++")
|
| 603 |
+
|
| 604 |
+
# Do not override commandline arguments
|
| 605 |
+
if not self.swig_opts:
|
| 606 |
+
for o in extension.swig_opts:
|
| 607 |
+
swig_cmd.append(o)
|
| 608 |
+
|
| 609 |
+
for source in swig_sources:
|
| 610 |
+
target = swig_targets[source]
|
| 611 |
+
log.info("swigging %s to %s", source, target)
|
| 612 |
+
self.spawn(swig_cmd + ["-o", target, source])
|
| 613 |
+
|
| 614 |
+
return new_sources
|
| 615 |
+
|
| 616 |
+
def find_swig(self):
|
| 617 |
+
"""Return the name of the SWIG executable. On Unix, this is
|
| 618 |
+
just "swig" -- it should be in the PATH. Tries a bit harder on
|
| 619 |
+
Windows.
|
| 620 |
+
"""
|
| 621 |
+
if os.name == "posix":
|
| 622 |
+
return "swig"
|
| 623 |
+
elif os.name == "nt":
|
| 624 |
+
# Look for SWIG in its standard installation directory on
|
| 625 |
+
# Windows (or so I presume!). If we find it there, great;
|
| 626 |
+
# if not, act like Unix and assume it's in the PATH.
|
| 627 |
+
for vers in ("1.3", "1.2", "1.1"):
|
| 628 |
+
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
|
| 629 |
+
if os.path.isfile(fn):
|
| 630 |
+
return fn
|
| 631 |
+
else:
|
| 632 |
+
return "swig.exe"
|
| 633 |
+
else:
|
| 634 |
+
raise DistutilsPlatformError(
|
| 635 |
+
"I don't know how to find (much less run) SWIG "
|
| 636 |
+
"on platform '%s'" % os.name)
|
| 637 |
+
|
| 638 |
+
# -- Name generators -----------------------------------------------
|
| 639 |
+
# (extension names, filenames, whatever)
|
| 640 |
+
def get_ext_fullpath(self, ext_name):
|
| 641 |
+
"""Returns the path of the filename for a given extension.
|
| 642 |
+
|
| 643 |
+
The file is located in `build_lib` or directly in the package
|
| 644 |
+
(inplace option).
|
| 645 |
+
"""
|
| 646 |
+
fullname = self.get_ext_fullname(ext_name)
|
| 647 |
+
modpath = fullname.split('.')
|
| 648 |
+
filename = self.get_ext_filename(modpath[-1])
|
| 649 |
+
|
| 650 |
+
if not self.inplace:
|
| 651 |
+
# no further work needed
|
| 652 |
+
# returning :
|
| 653 |
+
# build_dir/package/path/filename
|
| 654 |
+
filename = os.path.join(*modpath[:-1]+[filename])
|
| 655 |
+
return os.path.join(self.build_lib, filename)
|
| 656 |
+
|
| 657 |
+
# the inplace option requires to find the package directory
|
| 658 |
+
# using the build_py command for that
|
| 659 |
+
package = '.'.join(modpath[0:-1])
|
| 660 |
+
build_py = self.get_finalized_command('build_py')
|
| 661 |
+
package_dir = os.path.abspath(build_py.get_package_dir(package))
|
| 662 |
+
|
| 663 |
+
# returning
|
| 664 |
+
# package_dir/filename
|
| 665 |
+
return os.path.join(package_dir, filename)
|
| 666 |
+
|
| 667 |
+
def get_ext_fullname(self, ext_name):
|
| 668 |
+
"""Returns the fullname of a given extension name.
|
| 669 |
+
|
| 670 |
+
Adds the `package.` prefix"""
|
| 671 |
+
if self.package is None:
|
| 672 |
+
return ext_name
|
| 673 |
+
else:
|
| 674 |
+
return self.package + '.' + ext_name
|
| 675 |
+
|
| 676 |
+
def get_ext_filename(self, ext_name):
|
| 677 |
+
r"""Convert the name of an extension (eg. "foo.bar") into the name
|
| 678 |
+
of the file from which it will be loaded (eg. "foo/bar.so", or
|
| 679 |
+
"foo\bar.pyd").
|
| 680 |
+
"""
|
| 681 |
+
from distutils.sysconfig import get_config_var
|
| 682 |
+
ext_path = ext_name.split('.')
|
| 683 |
+
ext_suffix = get_config_var('EXT_SUFFIX')
|
| 684 |
+
return os.path.join(*ext_path) + ext_suffix
|
| 685 |
+
|
| 686 |
+
def get_export_symbols(self, ext):
|
| 687 |
+
"""Return the list of symbols that a shared extension has to
|
| 688 |
+
export. This either uses 'ext.export_symbols' or, if it's not
|
| 689 |
+
provided, "PyInit_" + module_name. Only relevant on Windows, where
|
| 690 |
+
the .pyd file (DLL) must export the module "PyInit_" function.
|
| 691 |
+
"""
|
| 692 |
+
suffix = '_' + ext.name.split('.')[-1]
|
| 693 |
+
try:
|
| 694 |
+
# Unicode module name support as defined in PEP-489
|
| 695 |
+
# https://www.python.org/dev/peps/pep-0489/#export-hook-name
|
| 696 |
+
suffix.encode('ascii')
|
| 697 |
+
except UnicodeEncodeError:
|
| 698 |
+
suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii')
|
| 699 |
+
|
| 700 |
+
initfunc_name = "PyInit" + suffix
|
| 701 |
+
if initfunc_name not in ext.export_symbols:
|
| 702 |
+
ext.export_symbols.append(initfunc_name)
|
| 703 |
+
return ext.export_symbols
|
| 704 |
+
|
| 705 |
+
def get_libraries(self, ext):
|
| 706 |
+
"""Return the list of libraries to link against when building a
|
| 707 |
+
shared extension. On most platforms, this is just 'ext.libraries';
|
| 708 |
+
on Windows, we add the Python library (eg. python20.dll).
|
| 709 |
+
"""
|
| 710 |
+
# The python library is always needed on Windows. For MSVC, this
|
| 711 |
+
# is redundant, since the library is mentioned in a pragma in
|
| 712 |
+
# pyconfig.h that MSVC groks. The other Windows compilers all seem
|
| 713 |
+
# to need it mentioned explicitly, though, so that's what we do.
|
| 714 |
+
# Append '_d' to the python import library on debug builds.
|
| 715 |
+
if sys.platform == "win32":
|
| 716 |
+
from distutils._msvccompiler import MSVCCompiler
|
| 717 |
+
if not isinstance(self.compiler, MSVCCompiler):
|
| 718 |
+
template = "python%d%d"
|
| 719 |
+
if self.debug:
|
| 720 |
+
template = template + '_d'
|
| 721 |
+
pythonlib = (template %
|
| 722 |
+
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
| 723 |
+
# don't extend ext.libraries, it may be shared with other
|
| 724 |
+
# extensions, it is a reference to the original list
|
| 725 |
+
return ext.libraries + [pythonlib]
|
| 726 |
+
else:
|
| 727 |
+
# On Android only the main executable and LD_PRELOADs are considered
|
| 728 |
+
# to be RTLD_GLOBAL, all the dependencies of the main executable
|
| 729 |
+
# remain RTLD_LOCAL and so the shared libraries must be linked with
|
| 730 |
+
# libpython when python is built with a shared python library (issue
|
| 731 |
+
# bpo-21536).
|
| 732 |
+
# On Cygwin (and if required, other POSIX-like platforms based on
|
| 733 |
+
# Windows like MinGW) it is simply necessary that all symbols in
|
| 734 |
+
# shared libraries are resolved at link time.
|
| 735 |
+
from distutils.sysconfig import get_config_var
|
| 736 |
+
link_libpython = False
|
| 737 |
+
if get_config_var('Py_ENABLE_SHARED'):
|
| 738 |
+
# A native build on an Android device or on Cygwin
|
| 739 |
+
if hasattr(sys, 'getandroidapilevel'):
|
| 740 |
+
link_libpython = True
|
| 741 |
+
elif sys.platform == 'cygwin':
|
| 742 |
+
link_libpython = True
|
| 743 |
+
elif '_PYTHON_HOST_PLATFORM' in os.environ:
|
| 744 |
+
# We are cross-compiling for one of the relevant platforms
|
| 745 |
+
if get_config_var('ANDROID_API_LEVEL') != 0:
|
| 746 |
+
link_libpython = True
|
| 747 |
+
elif get_config_var('MACHDEP') == 'cygwin':
|
| 748 |
+
link_libpython = True
|
| 749 |
+
|
| 750 |
+
if link_libpython:
|
| 751 |
+
ldversion = get_config_var('LDVERSION')
|
| 752 |
+
return ext.libraries + ['python' + ldversion]
|
| 753 |
+
|
| 754 |
+
return ext.libraries
|
deepseek/lib/python3.10/distutils/command/clean.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.clean
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'clean' command."""
|
| 4 |
+
|
| 5 |
+
# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from distutils.core import Command
|
| 9 |
+
from distutils.dir_util import remove_tree
|
| 10 |
+
from distutils import log
|
| 11 |
+
|
| 12 |
+
class clean(Command):
|
| 13 |
+
|
| 14 |
+
description = "clean up temporary files from 'build' command"
|
| 15 |
+
user_options = [
|
| 16 |
+
('build-base=', 'b',
|
| 17 |
+
"base build directory (default: 'build.build-base')"),
|
| 18 |
+
('build-lib=', None,
|
| 19 |
+
"build directory for all modules (default: 'build.build-lib')"),
|
| 20 |
+
('build-temp=', 't',
|
| 21 |
+
"temporary build directory (default: 'build.build-temp')"),
|
| 22 |
+
('build-scripts=', None,
|
| 23 |
+
"build directory for scripts (default: 'build.build-scripts')"),
|
| 24 |
+
('bdist-base=', None,
|
| 25 |
+
"temporary directory for built distributions"),
|
| 26 |
+
('all', 'a',
|
| 27 |
+
"remove all build output, not just temporary by-products")
|
| 28 |
+
]
|
| 29 |
+
|
| 30 |
+
boolean_options = ['all']
|
| 31 |
+
|
| 32 |
+
def initialize_options(self):
|
| 33 |
+
self.build_base = None
|
| 34 |
+
self.build_lib = None
|
| 35 |
+
self.build_temp = None
|
| 36 |
+
self.build_scripts = None
|
| 37 |
+
self.bdist_base = None
|
| 38 |
+
self.all = None
|
| 39 |
+
|
| 40 |
+
def finalize_options(self):
|
| 41 |
+
self.set_undefined_options('build',
|
| 42 |
+
('build_base', 'build_base'),
|
| 43 |
+
('build_lib', 'build_lib'),
|
| 44 |
+
('build_scripts', 'build_scripts'),
|
| 45 |
+
('build_temp', 'build_temp'))
|
| 46 |
+
self.set_undefined_options('bdist',
|
| 47 |
+
('bdist_base', 'bdist_base'))
|
| 48 |
+
|
| 49 |
+
def run(self):
|
| 50 |
+
# remove the build/temp.<plat> directory (unless it's already
|
| 51 |
+
# gone)
|
| 52 |
+
if os.path.exists(self.build_temp):
|
| 53 |
+
remove_tree(self.build_temp, dry_run=self.dry_run)
|
| 54 |
+
else:
|
| 55 |
+
log.debug("'%s' does not exist -- can't clean it",
|
| 56 |
+
self.build_temp)
|
| 57 |
+
|
| 58 |
+
if self.all:
|
| 59 |
+
# remove build directories
|
| 60 |
+
for directory in (self.build_lib,
|
| 61 |
+
self.bdist_base,
|
| 62 |
+
self.build_scripts):
|
| 63 |
+
if os.path.exists(directory):
|
| 64 |
+
remove_tree(directory, dry_run=self.dry_run)
|
| 65 |
+
else:
|
| 66 |
+
log.warn("'%s' does not exist -- can't clean it",
|
| 67 |
+
directory)
|
| 68 |
+
|
| 69 |
+
# just for the heck of it, try to remove the base build directory:
|
| 70 |
+
# we might have emptied it right now, but if not we don't care
|
| 71 |
+
if not self.dry_run:
|
| 72 |
+
try:
|
| 73 |
+
os.rmdir(self.build_base)
|
| 74 |
+
log.info("removing '%s'", self.build_base)
|
| 75 |
+
except OSError:
|
| 76 |
+
pass
|
deepseek/lib/python3.10/distutils/command/command_template
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.x
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'x' command.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# created 2000/mm/dd, John Doe
|
| 7 |
+
|
| 8 |
+
__revision__ = "$Id$"
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class x(Command):
|
| 14 |
+
|
| 15 |
+
# Brief (40-50 characters) description of the command
|
| 16 |
+
description = ""
|
| 17 |
+
|
| 18 |
+
# List of option tuples: long name, short name (None if no short
|
| 19 |
+
# name), and help string.
|
| 20 |
+
user_options = [('', '',
|
| 21 |
+
""),
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
def initialize_options(self):
|
| 25 |
+
self. = None
|
| 26 |
+
self. = None
|
| 27 |
+
self. = None
|
| 28 |
+
|
| 29 |
+
def finalize_options(self):
|
| 30 |
+
if self.x is None:
|
| 31 |
+
self.x =
|
| 32 |
+
|
| 33 |
+
def run(self):
|
deepseek/lib/python3.10/distutils/command/install_lib.py
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_lib
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_lib' command
|
| 4 |
+
(install all Python modules)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import importlib.util
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils.errors import DistutilsOptionError
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# Extension for Python source files.
|
| 15 |
+
PYTHON_SOURCE_EXTENSION = ".py"
|
| 16 |
+
|
| 17 |
+
class install_lib(Command):
|
| 18 |
+
|
| 19 |
+
description = "install all Python modules (extensions and pure Python)"
|
| 20 |
+
|
| 21 |
+
# The byte-compilation options are a tad confusing. Here are the
|
| 22 |
+
# possible scenarios:
|
| 23 |
+
# 1) no compilation at all (--no-compile --no-optimize)
|
| 24 |
+
# 2) compile .pyc only (--compile --no-optimize; default)
|
| 25 |
+
# 3) compile .pyc and "opt-1" .pyc (--compile --optimize)
|
| 26 |
+
# 4) compile "opt-1" .pyc only (--no-compile --optimize)
|
| 27 |
+
# 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
|
| 28 |
+
# 6) compile "opt-2" .pyc only (--no-compile --optimize-more)
|
| 29 |
+
#
|
| 30 |
+
# The UI for this is two options, 'compile' and 'optimize'.
|
| 31 |
+
# 'compile' is strictly boolean, and only decides whether to
|
| 32 |
+
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
|
| 33 |
+
# decides both whether to generate .pyc files and what level of
|
| 34 |
+
# optimization to use.
|
| 35 |
+
|
| 36 |
+
user_options = [
|
| 37 |
+
('install-dir=', 'd', "directory to install to"),
|
| 38 |
+
('build-dir=','b', "build directory (where to install from)"),
|
| 39 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 40 |
+
('compile', 'c', "compile .py to .pyc [default]"),
|
| 41 |
+
('no-compile', None, "don't compile .py files"),
|
| 42 |
+
('optimize=', 'O',
|
| 43 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 44 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
| 45 |
+
('skip-build', None, "skip the build steps"),
|
| 46 |
+
]
|
| 47 |
+
|
| 48 |
+
boolean_options = ['force', 'compile', 'skip-build']
|
| 49 |
+
negative_opt = {'no-compile' : 'compile'}
|
| 50 |
+
|
| 51 |
+
def initialize_options(self):
|
| 52 |
+
# let the 'install' command dictate our installation directory
|
| 53 |
+
self.install_dir = None
|
| 54 |
+
self.build_dir = None
|
| 55 |
+
self.force = 0
|
| 56 |
+
self.compile = None
|
| 57 |
+
self.optimize = None
|
| 58 |
+
self.skip_build = None
|
| 59 |
+
|
| 60 |
+
def finalize_options(self):
|
| 61 |
+
# Get all the information we need to install pure Python modules
|
| 62 |
+
# from the umbrella 'install' command -- build (source) directory,
|
| 63 |
+
# install (target) directory, and whether to compile .py files.
|
| 64 |
+
self.set_undefined_options('install',
|
| 65 |
+
('build_lib', 'build_dir'),
|
| 66 |
+
('install_lib', 'install_dir'),
|
| 67 |
+
('force', 'force'),
|
| 68 |
+
('compile', 'compile'),
|
| 69 |
+
('optimize', 'optimize'),
|
| 70 |
+
('skip_build', 'skip_build'),
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
if self.compile is None:
|
| 74 |
+
self.compile = True
|
| 75 |
+
if self.optimize is None:
|
| 76 |
+
self.optimize = False
|
| 77 |
+
|
| 78 |
+
if not isinstance(self.optimize, int):
|
| 79 |
+
try:
|
| 80 |
+
self.optimize = int(self.optimize)
|
| 81 |
+
if self.optimize not in (0, 1, 2):
|
| 82 |
+
raise AssertionError
|
| 83 |
+
except (ValueError, AssertionError):
|
| 84 |
+
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
| 85 |
+
|
| 86 |
+
def run(self):
|
| 87 |
+
# Make sure we have built everything we need first
|
| 88 |
+
self.build()
|
| 89 |
+
|
| 90 |
+
# Install everything: simply dump the entire contents of the build
|
| 91 |
+
# directory to the installation directory (that's the beauty of
|
| 92 |
+
# having a build directory!)
|
| 93 |
+
outfiles = self.install()
|
| 94 |
+
|
| 95 |
+
# (Optionally) compile .py to .pyc
|
| 96 |
+
if outfiles is not None and self.distribution.has_pure_modules():
|
| 97 |
+
self.byte_compile(outfiles)
|
| 98 |
+
|
| 99 |
+
# -- Top-level worker functions ------------------------------------
|
| 100 |
+
# (called from 'run()')
|
| 101 |
+
|
| 102 |
+
def build(self):
|
| 103 |
+
if not self.skip_build:
|
| 104 |
+
if self.distribution.has_pure_modules():
|
| 105 |
+
self.run_command('build_py')
|
| 106 |
+
if self.distribution.has_ext_modules():
|
| 107 |
+
self.run_command('build_ext')
|
| 108 |
+
|
| 109 |
+
def install(self):
|
| 110 |
+
if os.path.isdir(self.build_dir):
|
| 111 |
+
outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
| 112 |
+
else:
|
| 113 |
+
self.warn("'%s' does not exist -- no Python modules to install" %
|
| 114 |
+
self.build_dir)
|
| 115 |
+
return
|
| 116 |
+
return outfiles
|
| 117 |
+
|
| 118 |
+
def byte_compile(self, files):
|
| 119 |
+
if sys.dont_write_bytecode:
|
| 120 |
+
self.warn('byte-compiling is disabled, skipping.')
|
| 121 |
+
return
|
| 122 |
+
|
| 123 |
+
from distutils.util import byte_compile
|
| 124 |
+
|
| 125 |
+
# Get the "--root" directory supplied to the "install" command,
|
| 126 |
+
# and use it as a prefix to strip off the purported filename
|
| 127 |
+
# encoded in bytecode files. This is far from complete, but it
|
| 128 |
+
# should at least generate usable bytecode in RPM distributions.
|
| 129 |
+
install_root = self.get_finalized_command('install').root
|
| 130 |
+
|
| 131 |
+
if self.compile:
|
| 132 |
+
byte_compile(files, optimize=0,
|
| 133 |
+
force=self.force, prefix=install_root,
|
| 134 |
+
dry_run=self.dry_run)
|
| 135 |
+
if self.optimize > 0:
|
| 136 |
+
byte_compile(files, optimize=self.optimize,
|
| 137 |
+
force=self.force, prefix=install_root,
|
| 138 |
+
verbose=self.verbose, dry_run=self.dry_run)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
# -- Utility methods -----------------------------------------------
|
| 142 |
+
|
| 143 |
+
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
|
| 144 |
+
if not has_any:
|
| 145 |
+
return []
|
| 146 |
+
|
| 147 |
+
build_cmd = self.get_finalized_command(build_cmd)
|
| 148 |
+
build_files = build_cmd.get_outputs()
|
| 149 |
+
build_dir = getattr(build_cmd, cmd_option)
|
| 150 |
+
|
| 151 |
+
prefix_len = len(build_dir) + len(os.sep)
|
| 152 |
+
outputs = []
|
| 153 |
+
for file in build_files:
|
| 154 |
+
outputs.append(os.path.join(output_dir, file[prefix_len:]))
|
| 155 |
+
|
| 156 |
+
return outputs
|
| 157 |
+
|
| 158 |
+
def _bytecode_filenames(self, py_filenames):
|
| 159 |
+
bytecode_files = []
|
| 160 |
+
for py_file in py_filenames:
|
| 161 |
+
# Since build_py handles package data installation, the
|
| 162 |
+
# list of outputs can contain more than just .py files.
|
| 163 |
+
# Make sure we only report bytecode for the .py files.
|
| 164 |
+
ext = os.path.splitext(os.path.normcase(py_file))[1]
|
| 165 |
+
if ext != PYTHON_SOURCE_EXTENSION:
|
| 166 |
+
continue
|
| 167 |
+
if self.compile:
|
| 168 |
+
bytecode_files.append(importlib.util.cache_from_source(
|
| 169 |
+
py_file, optimization=''))
|
| 170 |
+
if self.optimize > 0:
|
| 171 |
+
bytecode_files.append(importlib.util.cache_from_source(
|
| 172 |
+
py_file, optimization=self.optimize))
|
| 173 |
+
|
| 174 |
+
return bytecode_files
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
# -- External interface --------------------------------------------
|
| 178 |
+
# (called by outsiders)
|
| 179 |
+
|
| 180 |
+
def get_outputs(self):
|
| 181 |
+
"""Return the list of files that would be installed if this command
|
| 182 |
+
were actually run. Not affected by the "dry-run" flag or whether
|
| 183 |
+
modules have actually been built yet.
|
| 184 |
+
"""
|
| 185 |
+
pure_outputs = \
|
| 186 |
+
self._mutate_outputs(self.distribution.has_pure_modules(),
|
| 187 |
+
'build_py', 'build_lib',
|
| 188 |
+
self.install_dir)
|
| 189 |
+
if self.compile:
|
| 190 |
+
bytecode_outputs = self._bytecode_filenames(pure_outputs)
|
| 191 |
+
else:
|
| 192 |
+
bytecode_outputs = []
|
| 193 |
+
|
| 194 |
+
ext_outputs = \
|
| 195 |
+
self._mutate_outputs(self.distribution.has_ext_modules(),
|
| 196 |
+
'build_ext', 'build_lib',
|
| 197 |
+
self.install_dir)
|
| 198 |
+
|
| 199 |
+
return pure_outputs + bytecode_outputs + ext_outputs
|
| 200 |
+
|
| 201 |
+
def get_inputs(self):
|
| 202 |
+
"""Get the list of files that are input to this command, ie. the
|
| 203 |
+
files that get installed as they are named in the build tree.
|
| 204 |
+
The files in this list correspond one-to-one to the output
|
| 205 |
+
filenames returned by 'get_outputs()'.
|
| 206 |
+
"""
|
| 207 |
+
inputs = []
|
| 208 |
+
|
| 209 |
+
if self.distribution.has_pure_modules():
|
| 210 |
+
build_py = self.get_finalized_command('build_py')
|
| 211 |
+
inputs.extend(build_py.get_outputs())
|
| 212 |
+
|
| 213 |
+
if self.distribution.has_ext_modules():
|
| 214 |
+
build_ext = self.get_finalized_command('build_ext')
|
| 215 |
+
inputs.extend(build_ext.get_outputs())
|
| 216 |
+
|
| 217 |
+
return inputs
|
deepseek/lib/python3.10/distutils/command/register.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.register
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'register' command (register with the repository).
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# created 2002/10/21, Richard Jones
|
| 7 |
+
|
| 8 |
+
import getpass
|
| 9 |
+
import io
|
| 10 |
+
import urllib.parse, urllib.request
|
| 11 |
+
from warnings import warn
|
| 12 |
+
|
| 13 |
+
from distutils.core import PyPIRCCommand
|
| 14 |
+
from distutils.errors import *
|
| 15 |
+
from distutils import log
|
| 16 |
+
|
| 17 |
+
class register(PyPIRCCommand):
|
| 18 |
+
|
| 19 |
+
description = ("register the distribution with the Python package index")
|
| 20 |
+
user_options = PyPIRCCommand.user_options + [
|
| 21 |
+
('list-classifiers', None,
|
| 22 |
+
'list the valid Trove classifiers'),
|
| 23 |
+
('strict', None ,
|
| 24 |
+
'Will stop the registering if the meta-data are not fully compliant')
|
| 25 |
+
]
|
| 26 |
+
boolean_options = PyPIRCCommand.boolean_options + [
|
| 27 |
+
'verify', 'list-classifiers', 'strict']
|
| 28 |
+
|
| 29 |
+
sub_commands = [('check', lambda self: True)]
|
| 30 |
+
|
| 31 |
+
def initialize_options(self):
|
| 32 |
+
PyPIRCCommand.initialize_options(self)
|
| 33 |
+
self.list_classifiers = 0
|
| 34 |
+
self.strict = 0
|
| 35 |
+
|
| 36 |
+
def finalize_options(self):
|
| 37 |
+
PyPIRCCommand.finalize_options(self)
|
| 38 |
+
# setting options for the `check` subcommand
|
| 39 |
+
check_options = {'strict': ('register', self.strict),
|
| 40 |
+
'restructuredtext': ('register', 1)}
|
| 41 |
+
self.distribution.command_options['check'] = check_options
|
| 42 |
+
|
| 43 |
+
def run(self):
|
| 44 |
+
self.finalize_options()
|
| 45 |
+
self._set_config()
|
| 46 |
+
|
| 47 |
+
# Run sub commands
|
| 48 |
+
for cmd_name in self.get_sub_commands():
|
| 49 |
+
self.run_command(cmd_name)
|
| 50 |
+
|
| 51 |
+
if self.dry_run:
|
| 52 |
+
self.verify_metadata()
|
| 53 |
+
elif self.list_classifiers:
|
| 54 |
+
self.classifiers()
|
| 55 |
+
else:
|
| 56 |
+
self.send_metadata()
|
| 57 |
+
|
| 58 |
+
def check_metadata(self):
|
| 59 |
+
"""Deprecated API."""
|
| 60 |
+
warn("distutils.command.register.check_metadata is deprecated, \
|
| 61 |
+
use the check command instead", PendingDeprecationWarning)
|
| 62 |
+
check = self.distribution.get_command_obj('check')
|
| 63 |
+
check.ensure_finalized()
|
| 64 |
+
check.strict = self.strict
|
| 65 |
+
check.restructuredtext = 1
|
| 66 |
+
check.run()
|
| 67 |
+
|
| 68 |
+
def _set_config(self):
|
| 69 |
+
''' Reads the configuration file and set attributes.
|
| 70 |
+
'''
|
| 71 |
+
config = self._read_pypirc()
|
| 72 |
+
if config != {}:
|
| 73 |
+
self.username = config['username']
|
| 74 |
+
self.password = config['password']
|
| 75 |
+
self.repository = config['repository']
|
| 76 |
+
self.realm = config['realm']
|
| 77 |
+
self.has_config = True
|
| 78 |
+
else:
|
| 79 |
+
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
| 80 |
+
raise ValueError('%s not found in .pypirc' % self.repository)
|
| 81 |
+
if self.repository == 'pypi':
|
| 82 |
+
self.repository = self.DEFAULT_REPOSITORY
|
| 83 |
+
self.has_config = False
|
| 84 |
+
|
| 85 |
+
def classifiers(self):
|
| 86 |
+
''' Fetch the list of classifiers from the server.
|
| 87 |
+
'''
|
| 88 |
+
url = self.repository+'?:action=list_classifiers'
|
| 89 |
+
response = urllib.request.urlopen(url)
|
| 90 |
+
log.info(self._read_pypi_response(response))
|
| 91 |
+
|
| 92 |
+
def verify_metadata(self):
|
| 93 |
+
''' Send the metadata to the package index server to be checked.
|
| 94 |
+
'''
|
| 95 |
+
# send the info to the server and report the result
|
| 96 |
+
(code, result) = self.post_to_server(self.build_post_data('verify'))
|
| 97 |
+
log.info('Server response (%s): %s', code, result)
|
| 98 |
+
|
| 99 |
+
def send_metadata(self):
|
| 100 |
+
''' Send the metadata to the package index server.
|
| 101 |
+
|
| 102 |
+
Well, do the following:
|
| 103 |
+
1. figure who the user is, and then
|
| 104 |
+
2. send the data as a Basic auth'ed POST.
|
| 105 |
+
|
| 106 |
+
First we try to read the username/password from $HOME/.pypirc,
|
| 107 |
+
which is a ConfigParser-formatted file with a section
|
| 108 |
+
[distutils] containing username and password entries (both
|
| 109 |
+
in clear text). Eg:
|
| 110 |
+
|
| 111 |
+
[distutils]
|
| 112 |
+
index-servers =
|
| 113 |
+
pypi
|
| 114 |
+
|
| 115 |
+
[pypi]
|
| 116 |
+
username: fred
|
| 117 |
+
password: sekrit
|
| 118 |
+
|
| 119 |
+
Otherwise, to figure who the user is, we offer the user three
|
| 120 |
+
choices:
|
| 121 |
+
|
| 122 |
+
1. use existing login,
|
| 123 |
+
2. register as a new user, or
|
| 124 |
+
3. set the password to a random string and email the user.
|
| 125 |
+
|
| 126 |
+
'''
|
| 127 |
+
# see if we can short-cut and get the username/password from the
|
| 128 |
+
# config
|
| 129 |
+
if self.has_config:
|
| 130 |
+
choice = '1'
|
| 131 |
+
username = self.username
|
| 132 |
+
password = self.password
|
| 133 |
+
else:
|
| 134 |
+
choice = 'x'
|
| 135 |
+
username = password = ''
|
| 136 |
+
|
| 137 |
+
# get the user's login info
|
| 138 |
+
choices = '1 2 3 4'.split()
|
| 139 |
+
while choice not in choices:
|
| 140 |
+
self.announce('''\
|
| 141 |
+
We need to know who you are, so please choose either:
|
| 142 |
+
1. use your existing login,
|
| 143 |
+
2. register as a new user,
|
| 144 |
+
3. have the server generate a new password for you (and email it to you), or
|
| 145 |
+
4. quit
|
| 146 |
+
Your selection [default 1]: ''', log.INFO)
|
| 147 |
+
choice = input()
|
| 148 |
+
if not choice:
|
| 149 |
+
choice = '1'
|
| 150 |
+
elif choice not in choices:
|
| 151 |
+
print('Please choose one of the four options!')
|
| 152 |
+
|
| 153 |
+
if choice == '1':
|
| 154 |
+
# get the username and password
|
| 155 |
+
while not username:
|
| 156 |
+
username = input('Username: ')
|
| 157 |
+
while not password:
|
| 158 |
+
password = getpass.getpass('Password: ')
|
| 159 |
+
|
| 160 |
+
# set up the authentication
|
| 161 |
+
auth = urllib.request.HTTPPasswordMgr()
|
| 162 |
+
host = urllib.parse.urlparse(self.repository)[1]
|
| 163 |
+
auth.add_password(self.realm, host, username, password)
|
| 164 |
+
# send the info to the server and report the result
|
| 165 |
+
code, result = self.post_to_server(self.build_post_data('submit'),
|
| 166 |
+
auth)
|
| 167 |
+
self.announce('Server response (%s): %s' % (code, result),
|
| 168 |
+
log.INFO)
|
| 169 |
+
|
| 170 |
+
# possibly save the login
|
| 171 |
+
if code == 200:
|
| 172 |
+
if self.has_config:
|
| 173 |
+
# sharing the password in the distribution instance
|
| 174 |
+
# so the upload command can reuse it
|
| 175 |
+
self.distribution.password = password
|
| 176 |
+
else:
|
| 177 |
+
self.announce(('I can store your PyPI login so future '
|
| 178 |
+
'submissions will be faster.'), log.INFO)
|
| 179 |
+
self.announce('(the login will be stored in %s)' % \
|
| 180 |
+
self._get_rc_file(), log.INFO)
|
| 181 |
+
choice = 'X'
|
| 182 |
+
while choice.lower() not in 'yn':
|
| 183 |
+
choice = input('Save your login (y/N)?')
|
| 184 |
+
if not choice:
|
| 185 |
+
choice = 'n'
|
| 186 |
+
if choice.lower() == 'y':
|
| 187 |
+
self._store_pypirc(username, password)
|
| 188 |
+
|
| 189 |
+
elif choice == '2':
|
| 190 |
+
data = {':action': 'user'}
|
| 191 |
+
data['name'] = data['password'] = data['email'] = ''
|
| 192 |
+
data['confirm'] = None
|
| 193 |
+
while not data['name']:
|
| 194 |
+
data['name'] = input('Username: ')
|
| 195 |
+
while data['password'] != data['confirm']:
|
| 196 |
+
while not data['password']:
|
| 197 |
+
data['password'] = getpass.getpass('Password: ')
|
| 198 |
+
while not data['confirm']:
|
| 199 |
+
data['confirm'] = getpass.getpass(' Confirm: ')
|
| 200 |
+
if data['password'] != data['confirm']:
|
| 201 |
+
data['password'] = ''
|
| 202 |
+
data['confirm'] = None
|
| 203 |
+
print("Password and confirm don't match!")
|
| 204 |
+
while not data['email']:
|
| 205 |
+
data['email'] = input(' EMail: ')
|
| 206 |
+
code, result = self.post_to_server(data)
|
| 207 |
+
if code != 200:
|
| 208 |
+
log.info('Server response (%s): %s', code, result)
|
| 209 |
+
else:
|
| 210 |
+
log.info('You will receive an email shortly.')
|
| 211 |
+
log.info(('Follow the instructions in it to '
|
| 212 |
+
'complete registration.'))
|
| 213 |
+
elif choice == '3':
|
| 214 |
+
data = {':action': 'password_reset'}
|
| 215 |
+
data['email'] = ''
|
| 216 |
+
while not data['email']:
|
| 217 |
+
data['email'] = input('Your email address: ')
|
| 218 |
+
code, result = self.post_to_server(data)
|
| 219 |
+
log.info('Server response (%s): %s', code, result)
|
| 220 |
+
|
| 221 |
+
def build_post_data(self, action):
|
| 222 |
+
# figure the data to send - the metadata plus some additional
|
| 223 |
+
# information used by the package server
|
| 224 |
+
meta = self.distribution.metadata
|
| 225 |
+
data = {
|
| 226 |
+
':action': action,
|
| 227 |
+
'metadata_version' : '1.0',
|
| 228 |
+
'name': meta.get_name(),
|
| 229 |
+
'version': meta.get_version(),
|
| 230 |
+
'summary': meta.get_description(),
|
| 231 |
+
'home_page': meta.get_url(),
|
| 232 |
+
'author': meta.get_contact(),
|
| 233 |
+
'author_email': meta.get_contact_email(),
|
| 234 |
+
'license': meta.get_licence(),
|
| 235 |
+
'description': meta.get_long_description(),
|
| 236 |
+
'keywords': meta.get_keywords(),
|
| 237 |
+
'platform': meta.get_platforms(),
|
| 238 |
+
'classifiers': meta.get_classifiers(),
|
| 239 |
+
'download_url': meta.get_download_url(),
|
| 240 |
+
# PEP 314
|
| 241 |
+
'provides': meta.get_provides(),
|
| 242 |
+
'requires': meta.get_requires(),
|
| 243 |
+
'obsoletes': meta.get_obsoletes(),
|
| 244 |
+
}
|
| 245 |
+
if data['provides'] or data['requires'] or data['obsoletes']:
|
| 246 |
+
data['metadata_version'] = '1.1'
|
| 247 |
+
return data
|
| 248 |
+
|
| 249 |
+
def post_to_server(self, data, auth=None):
|
| 250 |
+
''' Post a query to the server, and return a string response.
|
| 251 |
+
'''
|
| 252 |
+
if 'name' in data:
|
| 253 |
+
self.announce('Registering %s to %s' % (data['name'],
|
| 254 |
+
self.repository),
|
| 255 |
+
log.INFO)
|
| 256 |
+
# Build up the MIME payload for the urllib2 POST data
|
| 257 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
| 258 |
+
sep_boundary = '\n--' + boundary
|
| 259 |
+
end_boundary = sep_boundary + '--'
|
| 260 |
+
body = io.StringIO()
|
| 261 |
+
for key, value in data.items():
|
| 262 |
+
# handle multiple entries for the same name
|
| 263 |
+
if type(value) not in (type([]), type( () )):
|
| 264 |
+
value = [value]
|
| 265 |
+
for value in value:
|
| 266 |
+
value = str(value)
|
| 267 |
+
body.write(sep_boundary)
|
| 268 |
+
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
| 269 |
+
body.write("\n\n")
|
| 270 |
+
body.write(value)
|
| 271 |
+
if value and value[-1] == '\r':
|
| 272 |
+
body.write('\n') # write an extra newline (lurve Macs)
|
| 273 |
+
body.write(end_boundary)
|
| 274 |
+
body.write("\n")
|
| 275 |
+
body = body.getvalue().encode("utf-8")
|
| 276 |
+
|
| 277 |
+
# build the Request
|
| 278 |
+
headers = {
|
| 279 |
+
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
|
| 280 |
+
'Content-length': str(len(body))
|
| 281 |
+
}
|
| 282 |
+
req = urllib.request.Request(self.repository, body, headers)
|
| 283 |
+
|
| 284 |
+
# handle HTTP and include the Basic Auth handler
|
| 285 |
+
opener = urllib.request.build_opener(
|
| 286 |
+
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
| 287 |
+
)
|
| 288 |
+
data = ''
|
| 289 |
+
try:
|
| 290 |
+
result = opener.open(req)
|
| 291 |
+
except urllib.error.HTTPError as e:
|
| 292 |
+
if self.show_response:
|
| 293 |
+
data = e.fp.read()
|
| 294 |
+
result = e.code, e.msg
|
| 295 |
+
except urllib.error.URLError as e:
|
| 296 |
+
result = 500, str(e)
|
| 297 |
+
else:
|
| 298 |
+
if self.show_response:
|
| 299 |
+
data = self._read_pypi_response(result)
|
| 300 |
+
result = 200, 'OK'
|
| 301 |
+
if self.show_response:
|
| 302 |
+
msg = '\n'.join(('-' * 75, data, '-' * 75))
|
| 303 |
+
self.announce(msg, log.INFO)
|
| 304 |
+
return result
|
deepseek/lib/python3.10/distutils/command/sdist.py
ADDED
|
@@ -0,0 +1,494 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.sdist
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'sdist' command (create a source distribution)."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from glob import glob
|
| 8 |
+
from warnings import warn
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils import dir_util
|
| 12 |
+
from distutils import file_util
|
| 13 |
+
from distutils import archive_util
|
| 14 |
+
from distutils.text_file import TextFile
|
| 15 |
+
from distutils.filelist import FileList
|
| 16 |
+
from distutils import log
|
| 17 |
+
from distutils.util import convert_path
|
| 18 |
+
from distutils.errors import DistutilsTemplateError, DistutilsOptionError
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def show_formats():
|
| 22 |
+
"""Print all possible values for the 'formats' option (used by
|
| 23 |
+
the "--help-formats" command-line option).
|
| 24 |
+
"""
|
| 25 |
+
from distutils.fancy_getopt import FancyGetopt
|
| 26 |
+
from distutils.archive_util import ARCHIVE_FORMATS
|
| 27 |
+
formats = []
|
| 28 |
+
for format in ARCHIVE_FORMATS.keys():
|
| 29 |
+
formats.append(("formats=" + format, None,
|
| 30 |
+
ARCHIVE_FORMATS[format][2]))
|
| 31 |
+
formats.sort()
|
| 32 |
+
FancyGetopt(formats).print_help(
|
| 33 |
+
"List of available source distribution formats:")
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class sdist(Command):
|
| 37 |
+
|
| 38 |
+
description = "create a source distribution (tarball, zip file, etc.)"
|
| 39 |
+
|
| 40 |
+
def checking_metadata(self):
|
| 41 |
+
"""Callable used for the check sub-command.
|
| 42 |
+
|
| 43 |
+
Placed here so user_options can view it"""
|
| 44 |
+
return self.metadata_check
|
| 45 |
+
|
| 46 |
+
user_options = [
|
| 47 |
+
('template=', 't',
|
| 48 |
+
"name of manifest template file [default: MANIFEST.in]"),
|
| 49 |
+
('manifest=', 'm',
|
| 50 |
+
"name of manifest file [default: MANIFEST]"),
|
| 51 |
+
('use-defaults', None,
|
| 52 |
+
"include the default file set in the manifest "
|
| 53 |
+
"[default; disable with --no-defaults]"),
|
| 54 |
+
('no-defaults', None,
|
| 55 |
+
"don't include the default file set"),
|
| 56 |
+
('prune', None,
|
| 57 |
+
"specifically exclude files/directories that should not be "
|
| 58 |
+
"distributed (build tree, RCS/CVS dirs, etc.) "
|
| 59 |
+
"[default; disable with --no-prune]"),
|
| 60 |
+
('no-prune', None,
|
| 61 |
+
"don't automatically exclude anything"),
|
| 62 |
+
('manifest-only', 'o',
|
| 63 |
+
"just regenerate the manifest and then stop "
|
| 64 |
+
"(implies --force-manifest)"),
|
| 65 |
+
('force-manifest', 'f',
|
| 66 |
+
"forcibly regenerate the manifest and carry on as usual. "
|
| 67 |
+
"Deprecated: now the manifest is always regenerated."),
|
| 68 |
+
('formats=', None,
|
| 69 |
+
"formats for source distribution (comma-separated list)"),
|
| 70 |
+
('keep-temp', 'k',
|
| 71 |
+
"keep the distribution tree around after creating " +
|
| 72 |
+
"archive file(s)"),
|
| 73 |
+
('dist-dir=', 'd',
|
| 74 |
+
"directory to put the source distribution archive(s) in "
|
| 75 |
+
"[default: dist]"),
|
| 76 |
+
('metadata-check', None,
|
| 77 |
+
"Ensure that all required elements of meta-data "
|
| 78 |
+
"are supplied. Warn if any missing. [default]"),
|
| 79 |
+
('owner=', 'u',
|
| 80 |
+
"Owner name used when creating a tar file [default: current user]"),
|
| 81 |
+
('group=', 'g',
|
| 82 |
+
"Group name used when creating a tar file [default: current group]"),
|
| 83 |
+
]
|
| 84 |
+
|
| 85 |
+
boolean_options = ['use-defaults', 'prune',
|
| 86 |
+
'manifest-only', 'force-manifest',
|
| 87 |
+
'keep-temp', 'metadata-check']
|
| 88 |
+
|
| 89 |
+
help_options = [
|
| 90 |
+
('help-formats', None,
|
| 91 |
+
"list available distribution formats", show_formats),
|
| 92 |
+
]
|
| 93 |
+
|
| 94 |
+
negative_opt = {'no-defaults': 'use-defaults',
|
| 95 |
+
'no-prune': 'prune' }
|
| 96 |
+
|
| 97 |
+
sub_commands = [('check', checking_metadata)]
|
| 98 |
+
|
| 99 |
+
READMES = ('README', 'README.txt', 'README.rst')
|
| 100 |
+
|
| 101 |
+
def initialize_options(self):
|
| 102 |
+
# 'template' and 'manifest' are, respectively, the names of
|
| 103 |
+
# the manifest template and manifest file.
|
| 104 |
+
self.template = None
|
| 105 |
+
self.manifest = None
|
| 106 |
+
|
| 107 |
+
# 'use_defaults': if true, we will include the default file set
|
| 108 |
+
# in the manifest
|
| 109 |
+
self.use_defaults = 1
|
| 110 |
+
self.prune = 1
|
| 111 |
+
|
| 112 |
+
self.manifest_only = 0
|
| 113 |
+
self.force_manifest = 0
|
| 114 |
+
|
| 115 |
+
self.formats = ['gztar']
|
| 116 |
+
self.keep_temp = 0
|
| 117 |
+
self.dist_dir = None
|
| 118 |
+
|
| 119 |
+
self.archive_files = None
|
| 120 |
+
self.metadata_check = 1
|
| 121 |
+
self.owner = None
|
| 122 |
+
self.group = None
|
| 123 |
+
|
| 124 |
+
def finalize_options(self):
|
| 125 |
+
if self.manifest is None:
|
| 126 |
+
self.manifest = "MANIFEST"
|
| 127 |
+
if self.template is None:
|
| 128 |
+
self.template = "MANIFEST.in"
|
| 129 |
+
|
| 130 |
+
self.ensure_string_list('formats')
|
| 131 |
+
|
| 132 |
+
bad_format = archive_util.check_archive_formats(self.formats)
|
| 133 |
+
if bad_format:
|
| 134 |
+
raise DistutilsOptionError(
|
| 135 |
+
"unknown archive format '%s'" % bad_format)
|
| 136 |
+
|
| 137 |
+
if self.dist_dir is None:
|
| 138 |
+
self.dist_dir = "dist"
|
| 139 |
+
|
| 140 |
+
def run(self):
|
| 141 |
+
# 'filelist' contains the list of files that will make up the
|
| 142 |
+
# manifest
|
| 143 |
+
self.filelist = FileList()
|
| 144 |
+
|
| 145 |
+
# Run sub commands
|
| 146 |
+
for cmd_name in self.get_sub_commands():
|
| 147 |
+
self.run_command(cmd_name)
|
| 148 |
+
|
| 149 |
+
# Do whatever it takes to get the list of files to process
|
| 150 |
+
# (process the manifest template, read an existing manifest,
|
| 151 |
+
# whatever). File list is accumulated in 'self.filelist'.
|
| 152 |
+
self.get_file_list()
|
| 153 |
+
|
| 154 |
+
# If user just wanted us to regenerate the manifest, stop now.
|
| 155 |
+
if self.manifest_only:
|
| 156 |
+
return
|
| 157 |
+
|
| 158 |
+
# Otherwise, go ahead and create the source distribution tarball,
|
| 159 |
+
# or zipfile, or whatever.
|
| 160 |
+
self.make_distribution()
|
| 161 |
+
|
| 162 |
+
def check_metadata(self):
|
| 163 |
+
"""Deprecated API."""
|
| 164 |
+
warn("distutils.command.sdist.check_metadata is deprecated, \
|
| 165 |
+
use the check command instead", PendingDeprecationWarning)
|
| 166 |
+
check = self.distribution.get_command_obj('check')
|
| 167 |
+
check.ensure_finalized()
|
| 168 |
+
check.run()
|
| 169 |
+
|
| 170 |
+
def get_file_list(self):
|
| 171 |
+
"""Figure out the list of files to include in the source
|
| 172 |
+
distribution, and put it in 'self.filelist'. This might involve
|
| 173 |
+
reading the manifest template (and writing the manifest), or just
|
| 174 |
+
reading the manifest, or just using the default file set -- it all
|
| 175 |
+
depends on the user's options.
|
| 176 |
+
"""
|
| 177 |
+
# new behavior when using a template:
|
| 178 |
+
# the file list is recalculated every time because
|
| 179 |
+
# even if MANIFEST.in or setup.py are not changed
|
| 180 |
+
# the user might have added some files in the tree that
|
| 181 |
+
# need to be included.
|
| 182 |
+
#
|
| 183 |
+
# This makes --force the default and only behavior with templates.
|
| 184 |
+
template_exists = os.path.isfile(self.template)
|
| 185 |
+
if not template_exists and self._manifest_is_not_generated():
|
| 186 |
+
self.read_manifest()
|
| 187 |
+
self.filelist.sort()
|
| 188 |
+
self.filelist.remove_duplicates()
|
| 189 |
+
return
|
| 190 |
+
|
| 191 |
+
if not template_exists:
|
| 192 |
+
self.warn(("manifest template '%s' does not exist " +
|
| 193 |
+
"(using default file list)") %
|
| 194 |
+
self.template)
|
| 195 |
+
self.filelist.findall()
|
| 196 |
+
|
| 197 |
+
if self.use_defaults:
|
| 198 |
+
self.add_defaults()
|
| 199 |
+
|
| 200 |
+
if template_exists:
|
| 201 |
+
self.read_template()
|
| 202 |
+
|
| 203 |
+
if self.prune:
|
| 204 |
+
self.prune_file_list()
|
| 205 |
+
|
| 206 |
+
self.filelist.sort()
|
| 207 |
+
self.filelist.remove_duplicates()
|
| 208 |
+
self.write_manifest()
|
| 209 |
+
|
| 210 |
+
def add_defaults(self):
|
| 211 |
+
"""Add all the default files to self.filelist:
|
| 212 |
+
- README or README.txt
|
| 213 |
+
- setup.py
|
| 214 |
+
- test/test*.py
|
| 215 |
+
- all pure Python modules mentioned in setup script
|
| 216 |
+
- all files pointed by package_data (build_py)
|
| 217 |
+
- all files defined in data_files.
|
| 218 |
+
- all files defined as scripts.
|
| 219 |
+
- all C sources listed as part of extensions or C libraries
|
| 220 |
+
in the setup script (doesn't catch C headers!)
|
| 221 |
+
Warns if (README or README.txt) or setup.py are missing; everything
|
| 222 |
+
else is optional.
|
| 223 |
+
"""
|
| 224 |
+
self._add_defaults_standards()
|
| 225 |
+
self._add_defaults_optional()
|
| 226 |
+
self._add_defaults_python()
|
| 227 |
+
self._add_defaults_data_files()
|
| 228 |
+
self._add_defaults_ext()
|
| 229 |
+
self._add_defaults_c_libs()
|
| 230 |
+
self._add_defaults_scripts()
|
| 231 |
+
|
| 232 |
+
@staticmethod
|
| 233 |
+
def _cs_path_exists(fspath):
|
| 234 |
+
"""
|
| 235 |
+
Case-sensitive path existence check
|
| 236 |
+
|
| 237 |
+
>>> sdist._cs_path_exists(__file__)
|
| 238 |
+
True
|
| 239 |
+
>>> sdist._cs_path_exists(__file__.upper())
|
| 240 |
+
False
|
| 241 |
+
"""
|
| 242 |
+
if not os.path.exists(fspath):
|
| 243 |
+
return False
|
| 244 |
+
# make absolute so we always have a directory
|
| 245 |
+
abspath = os.path.abspath(fspath)
|
| 246 |
+
directory, filename = os.path.split(abspath)
|
| 247 |
+
return filename in os.listdir(directory)
|
| 248 |
+
|
| 249 |
+
def _add_defaults_standards(self):
|
| 250 |
+
standards = [self.READMES, self.distribution.script_name]
|
| 251 |
+
for fn in standards:
|
| 252 |
+
if isinstance(fn, tuple):
|
| 253 |
+
alts = fn
|
| 254 |
+
got_it = False
|
| 255 |
+
for fn in alts:
|
| 256 |
+
if self._cs_path_exists(fn):
|
| 257 |
+
got_it = True
|
| 258 |
+
self.filelist.append(fn)
|
| 259 |
+
break
|
| 260 |
+
|
| 261 |
+
if not got_it:
|
| 262 |
+
self.warn("standard file not found: should have one of " +
|
| 263 |
+
', '.join(alts))
|
| 264 |
+
else:
|
| 265 |
+
if self._cs_path_exists(fn):
|
| 266 |
+
self.filelist.append(fn)
|
| 267 |
+
else:
|
| 268 |
+
self.warn("standard file '%s' not found" % fn)
|
| 269 |
+
|
| 270 |
+
def _add_defaults_optional(self):
|
| 271 |
+
optional = ['test/test*.py', 'setup.cfg']
|
| 272 |
+
for pattern in optional:
|
| 273 |
+
files = filter(os.path.isfile, glob(pattern))
|
| 274 |
+
self.filelist.extend(files)
|
| 275 |
+
|
| 276 |
+
def _add_defaults_python(self):
|
| 277 |
+
# build_py is used to get:
|
| 278 |
+
# - python modules
|
| 279 |
+
# - files defined in package_data
|
| 280 |
+
build_py = self.get_finalized_command('build_py')
|
| 281 |
+
|
| 282 |
+
# getting python files
|
| 283 |
+
if self.distribution.has_pure_modules():
|
| 284 |
+
self.filelist.extend(build_py.get_source_files())
|
| 285 |
+
|
| 286 |
+
# getting package_data files
|
| 287 |
+
# (computed in build_py.data_files by build_py.finalize_options)
|
| 288 |
+
for pkg, src_dir, build_dir, filenames in build_py.data_files:
|
| 289 |
+
for filename in filenames:
|
| 290 |
+
self.filelist.append(os.path.join(src_dir, filename))
|
| 291 |
+
|
| 292 |
+
def _add_defaults_data_files(self):
|
| 293 |
+
# getting distribution.data_files
|
| 294 |
+
if self.distribution.has_data_files():
|
| 295 |
+
for item in self.distribution.data_files:
|
| 296 |
+
if isinstance(item, str):
|
| 297 |
+
# plain file
|
| 298 |
+
item = convert_path(item)
|
| 299 |
+
if os.path.isfile(item):
|
| 300 |
+
self.filelist.append(item)
|
| 301 |
+
else:
|
| 302 |
+
# a (dirname, filenames) tuple
|
| 303 |
+
dirname, filenames = item
|
| 304 |
+
for f in filenames:
|
| 305 |
+
f = convert_path(f)
|
| 306 |
+
if os.path.isfile(f):
|
| 307 |
+
self.filelist.append(f)
|
| 308 |
+
|
| 309 |
+
def _add_defaults_ext(self):
|
| 310 |
+
if self.distribution.has_ext_modules():
|
| 311 |
+
build_ext = self.get_finalized_command('build_ext')
|
| 312 |
+
self.filelist.extend(build_ext.get_source_files())
|
| 313 |
+
|
| 314 |
+
def _add_defaults_c_libs(self):
|
| 315 |
+
if self.distribution.has_c_libraries():
|
| 316 |
+
build_clib = self.get_finalized_command('build_clib')
|
| 317 |
+
self.filelist.extend(build_clib.get_source_files())
|
| 318 |
+
|
| 319 |
+
def _add_defaults_scripts(self):
|
| 320 |
+
if self.distribution.has_scripts():
|
| 321 |
+
build_scripts = self.get_finalized_command('build_scripts')
|
| 322 |
+
self.filelist.extend(build_scripts.get_source_files())
|
| 323 |
+
|
| 324 |
+
def read_template(self):
|
| 325 |
+
"""Read and parse manifest template file named by self.template.
|
| 326 |
+
|
| 327 |
+
(usually "MANIFEST.in") The parsing and processing is done by
|
| 328 |
+
'self.filelist', which updates itself accordingly.
|
| 329 |
+
"""
|
| 330 |
+
log.info("reading manifest template '%s'", self.template)
|
| 331 |
+
template = TextFile(self.template, strip_comments=1, skip_blanks=1,
|
| 332 |
+
join_lines=1, lstrip_ws=1, rstrip_ws=1,
|
| 333 |
+
collapse_join=1)
|
| 334 |
+
|
| 335 |
+
try:
|
| 336 |
+
while True:
|
| 337 |
+
line = template.readline()
|
| 338 |
+
if line is None: # end of file
|
| 339 |
+
break
|
| 340 |
+
|
| 341 |
+
try:
|
| 342 |
+
self.filelist.process_template_line(line)
|
| 343 |
+
# the call above can raise a DistutilsTemplateError for
|
| 344 |
+
# malformed lines, or a ValueError from the lower-level
|
| 345 |
+
# convert_path function
|
| 346 |
+
except (DistutilsTemplateError, ValueError) as msg:
|
| 347 |
+
self.warn("%s, line %d: %s" % (template.filename,
|
| 348 |
+
template.current_line,
|
| 349 |
+
msg))
|
| 350 |
+
finally:
|
| 351 |
+
template.close()
|
| 352 |
+
|
| 353 |
+
def prune_file_list(self):
|
| 354 |
+
"""Prune off branches that might slip into the file list as created
|
| 355 |
+
by 'read_template()', but really don't belong there:
|
| 356 |
+
* the build tree (typically "build")
|
| 357 |
+
* the release tree itself (only an issue if we ran "sdist"
|
| 358 |
+
previously with --keep-temp, or it aborted)
|
| 359 |
+
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
|
| 360 |
+
"""
|
| 361 |
+
build = self.get_finalized_command('build')
|
| 362 |
+
base_dir = self.distribution.get_fullname()
|
| 363 |
+
|
| 364 |
+
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
| 365 |
+
self.filelist.exclude_pattern(None, prefix=base_dir)
|
| 366 |
+
|
| 367 |
+
if sys.platform == 'win32':
|
| 368 |
+
seps = r'/|\\'
|
| 369 |
+
else:
|
| 370 |
+
seps = '/'
|
| 371 |
+
|
| 372 |
+
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
|
| 373 |
+
'_darcs']
|
| 374 |
+
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
|
| 375 |
+
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
|
| 376 |
+
|
| 377 |
+
def write_manifest(self):
|
| 378 |
+
"""Write the file list in 'self.filelist' (presumably as filled in
|
| 379 |
+
by 'add_defaults()' and 'read_template()') to the manifest file
|
| 380 |
+
named by 'self.manifest'.
|
| 381 |
+
"""
|
| 382 |
+
if self._manifest_is_not_generated():
|
| 383 |
+
log.info("not writing to manually maintained "
|
| 384 |
+
"manifest file '%s'" % self.manifest)
|
| 385 |
+
return
|
| 386 |
+
|
| 387 |
+
content = self.filelist.files[:]
|
| 388 |
+
content.insert(0, '# file GENERATED by distutils, do NOT edit')
|
| 389 |
+
self.execute(file_util.write_file, (self.manifest, content),
|
| 390 |
+
"writing manifest file '%s'" % self.manifest)
|
| 391 |
+
|
| 392 |
+
def _manifest_is_not_generated(self):
|
| 393 |
+
# check for special comment used in 3.1.3 and higher
|
| 394 |
+
if not os.path.isfile(self.manifest):
|
| 395 |
+
return False
|
| 396 |
+
|
| 397 |
+
fp = open(self.manifest)
|
| 398 |
+
try:
|
| 399 |
+
first_line = fp.readline()
|
| 400 |
+
finally:
|
| 401 |
+
fp.close()
|
| 402 |
+
return first_line != '# file GENERATED by distutils, do NOT edit\n'
|
| 403 |
+
|
| 404 |
+
def read_manifest(self):
|
| 405 |
+
"""Read the manifest file (named by 'self.manifest') and use it to
|
| 406 |
+
fill in 'self.filelist', the list of files to include in the source
|
| 407 |
+
distribution.
|
| 408 |
+
"""
|
| 409 |
+
log.info("reading manifest file '%s'", self.manifest)
|
| 410 |
+
with open(self.manifest) as manifest:
|
| 411 |
+
for line in manifest:
|
| 412 |
+
# ignore comments and blank lines
|
| 413 |
+
line = line.strip()
|
| 414 |
+
if line.startswith('#') or not line:
|
| 415 |
+
continue
|
| 416 |
+
self.filelist.append(line)
|
| 417 |
+
|
| 418 |
+
def make_release_tree(self, base_dir, files):
|
| 419 |
+
"""Create the directory tree that will become the source
|
| 420 |
+
distribution archive. All directories implied by the filenames in
|
| 421 |
+
'files' are created under 'base_dir', and then we hard link or copy
|
| 422 |
+
(if hard linking is unavailable) those files into place.
|
| 423 |
+
Essentially, this duplicates the developer's source tree, but in a
|
| 424 |
+
directory named after the distribution, containing only the files
|
| 425 |
+
to be distributed.
|
| 426 |
+
"""
|
| 427 |
+
# Create all the directories under 'base_dir' necessary to
|
| 428 |
+
# put 'files' there; the 'mkpath()' is just so we don't die
|
| 429 |
+
# if the manifest happens to be empty.
|
| 430 |
+
self.mkpath(base_dir)
|
| 431 |
+
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
|
| 432 |
+
|
| 433 |
+
# And walk over the list of files, either making a hard link (if
|
| 434 |
+
# os.link exists) to each one that doesn't already exist in its
|
| 435 |
+
# corresponding location under 'base_dir', or copying each file
|
| 436 |
+
# that's out-of-date in 'base_dir'. (Usually, all files will be
|
| 437 |
+
# out-of-date, because by default we blow away 'base_dir' when
|
| 438 |
+
# we're done making the distribution archives.)
|
| 439 |
+
|
| 440 |
+
if hasattr(os, 'link'): # can make hard links on this system
|
| 441 |
+
link = 'hard'
|
| 442 |
+
msg = "making hard links in %s..." % base_dir
|
| 443 |
+
else: # nope, have to copy
|
| 444 |
+
link = None
|
| 445 |
+
msg = "copying files to %s..." % base_dir
|
| 446 |
+
|
| 447 |
+
if not files:
|
| 448 |
+
log.warn("no files to distribute -- empty manifest?")
|
| 449 |
+
else:
|
| 450 |
+
log.info(msg)
|
| 451 |
+
for file in files:
|
| 452 |
+
if not os.path.isfile(file):
|
| 453 |
+
log.warn("'%s' not a regular file -- skipping", file)
|
| 454 |
+
else:
|
| 455 |
+
dest = os.path.join(base_dir, file)
|
| 456 |
+
self.copy_file(file, dest, link=link)
|
| 457 |
+
|
| 458 |
+
self.distribution.metadata.write_pkg_info(base_dir)
|
| 459 |
+
|
| 460 |
+
def make_distribution(self):
|
| 461 |
+
"""Create the source distribution(s). First, we create the release
|
| 462 |
+
tree with 'make_release_tree()'; then, we create all required
|
| 463 |
+
archive files (according to 'self.formats') from the release tree.
|
| 464 |
+
Finally, we clean up by blowing away the release tree (unless
|
| 465 |
+
'self.keep_temp' is true). The list of archive files created is
|
| 466 |
+
stored so it can be retrieved later by 'get_archive_files()'.
|
| 467 |
+
"""
|
| 468 |
+
# Don't warn about missing meta-data here -- should be (and is!)
|
| 469 |
+
# done elsewhere.
|
| 470 |
+
base_dir = self.distribution.get_fullname()
|
| 471 |
+
base_name = os.path.join(self.dist_dir, base_dir)
|
| 472 |
+
|
| 473 |
+
self.make_release_tree(base_dir, self.filelist.files)
|
| 474 |
+
archive_files = [] # remember names of files we create
|
| 475 |
+
# tar archive must be created last to avoid overwrite and remove
|
| 476 |
+
if 'tar' in self.formats:
|
| 477 |
+
self.formats.append(self.formats.pop(self.formats.index('tar')))
|
| 478 |
+
|
| 479 |
+
for fmt in self.formats:
|
| 480 |
+
file = self.make_archive(base_name, fmt, base_dir=base_dir,
|
| 481 |
+
owner=self.owner, group=self.group)
|
| 482 |
+
archive_files.append(file)
|
| 483 |
+
self.distribution.dist_files.append(('sdist', '', file))
|
| 484 |
+
|
| 485 |
+
self.archive_files = archive_files
|
| 486 |
+
|
| 487 |
+
if not self.keep_temp:
|
| 488 |
+
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
|
| 489 |
+
|
| 490 |
+
def get_archive_files(self):
|
| 491 |
+
"""Return the list of archive files created when the command
|
| 492 |
+
was run, or None if the command hasn't run yet.
|
| 493 |
+
"""
|
| 494 |
+
return self.archive_files
|
deepseek/lib/python3.10/distutils/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.65 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/tests/__pycache__/support.cpython-310.pyc
ADDED
|
Binary file (7.73 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/tests/__pycache__/test_build_clib.cpython-310.pyc
ADDED
|
Binary file (4.43 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/tests/__pycache__/test_filelist.cpython-310.pyc
ADDED
|
Binary file (8.68 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/tests/__pycache__/test_install.cpython-310.pyc
ADDED
|
Binary file (7.63 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/tests/__pycache__/test_install_data.cpython-310.pyc
ADDED
|
Binary file (1.95 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/tests/__pycache__/test_util.cpython-310.pyc
ADDED
|
Binary file (9.05 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/tests/includetest.rst
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
This should be included.
|
deepseek/lib/python3.10/distutils/tests/test_archive_util.py
ADDED
|
@@ -0,0 +1,396 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""Tests for distutils.archive_util."""
|
| 3 |
+
import unittest
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import tarfile
|
| 7 |
+
from os.path import splitdrive
|
| 8 |
+
import warnings
|
| 9 |
+
|
| 10 |
+
from distutils import archive_util
|
| 11 |
+
from distutils.archive_util import (check_archive_formats, make_tarball,
|
| 12 |
+
make_zipfile, make_archive,
|
| 13 |
+
ARCHIVE_FORMATS)
|
| 14 |
+
from distutils.spawn import find_executable, spawn
|
| 15 |
+
from distutils.tests import support
|
| 16 |
+
from test.support import run_unittest, patch
|
| 17 |
+
from test.support.os_helper import change_cwd
|
| 18 |
+
from test.support.warnings_helper import check_warnings
|
| 19 |
+
|
| 20 |
+
try:
|
| 21 |
+
import grp
|
| 22 |
+
import pwd
|
| 23 |
+
UID_GID_SUPPORT = True
|
| 24 |
+
except ImportError:
|
| 25 |
+
UID_GID_SUPPORT = False
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
import zipfile
|
| 29 |
+
ZIP_SUPPORT = True
|
| 30 |
+
except ImportError:
|
| 31 |
+
ZIP_SUPPORT = find_executable('zip')
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
import zlib
|
| 35 |
+
ZLIB_SUPPORT = True
|
| 36 |
+
except ImportError:
|
| 37 |
+
ZLIB_SUPPORT = False
|
| 38 |
+
|
| 39 |
+
try:
|
| 40 |
+
import bz2
|
| 41 |
+
except ImportError:
|
| 42 |
+
bz2 = None
|
| 43 |
+
|
| 44 |
+
try:
|
| 45 |
+
import lzma
|
| 46 |
+
except ImportError:
|
| 47 |
+
lzma = None
|
| 48 |
+
|
| 49 |
+
def can_fs_encode(filename):
|
| 50 |
+
"""
|
| 51 |
+
Return True if the filename can be saved in the file system.
|
| 52 |
+
"""
|
| 53 |
+
if os.path.supports_unicode_filenames:
|
| 54 |
+
return True
|
| 55 |
+
try:
|
| 56 |
+
filename.encode(sys.getfilesystemencoding())
|
| 57 |
+
except UnicodeEncodeError:
|
| 58 |
+
return False
|
| 59 |
+
return True
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class ArchiveUtilTestCase(support.TempdirManager,
|
| 63 |
+
support.LoggingSilencer,
|
| 64 |
+
unittest.TestCase):
|
| 65 |
+
|
| 66 |
+
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
| 67 |
+
def test_make_tarball(self, name='archive'):
|
| 68 |
+
# creating something to tar
|
| 69 |
+
tmpdir = self._create_files()
|
| 70 |
+
self._make_tarball(tmpdir, name, '.tar.gz')
|
| 71 |
+
# trying an uncompressed one
|
| 72 |
+
self._make_tarball(tmpdir, name, '.tar', compress=None)
|
| 73 |
+
|
| 74 |
+
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
| 75 |
+
def test_make_tarball_gzip(self):
|
| 76 |
+
tmpdir = self._create_files()
|
| 77 |
+
self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip')
|
| 78 |
+
|
| 79 |
+
@unittest.skipUnless(bz2, 'Need bz2 support to run')
|
| 80 |
+
def test_make_tarball_bzip2(self):
|
| 81 |
+
tmpdir = self._create_files()
|
| 82 |
+
self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2')
|
| 83 |
+
|
| 84 |
+
@unittest.skipUnless(lzma, 'Need lzma support to run')
|
| 85 |
+
def test_make_tarball_xz(self):
|
| 86 |
+
tmpdir = self._create_files()
|
| 87 |
+
self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz')
|
| 88 |
+
|
| 89 |
+
@unittest.skipUnless(can_fs_encode('årchiv'),
|
| 90 |
+
'File system cannot handle this filename')
|
| 91 |
+
def test_make_tarball_latin1(self):
|
| 92 |
+
"""
|
| 93 |
+
Mirror test_make_tarball, except filename contains latin characters.
|
| 94 |
+
"""
|
| 95 |
+
self.test_make_tarball('årchiv') # note this isn't a real word
|
| 96 |
+
|
| 97 |
+
@unittest.skipUnless(can_fs_encode('のアーカイブ'),
|
| 98 |
+
'File system cannot handle this filename')
|
| 99 |
+
def test_make_tarball_extended(self):
|
| 100 |
+
"""
|
| 101 |
+
Mirror test_make_tarball, except filename contains extended
|
| 102 |
+
characters outside the latin charset.
|
| 103 |
+
"""
|
| 104 |
+
self.test_make_tarball('のアーカイブ') # japanese for archive
|
| 105 |
+
|
| 106 |
+
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs):
|
| 107 |
+
tmpdir2 = self.mkdtemp()
|
| 108 |
+
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
|
| 109 |
+
"source and target should be on same drive")
|
| 110 |
+
|
| 111 |
+
base_name = os.path.join(tmpdir2, target_name)
|
| 112 |
+
|
| 113 |
+
# working with relative paths to avoid tar warnings
|
| 114 |
+
with change_cwd(tmpdir):
|
| 115 |
+
make_tarball(splitdrive(base_name)[1], 'dist', **kwargs)
|
| 116 |
+
|
| 117 |
+
# check if the compressed tarball was created
|
| 118 |
+
tarball = base_name + suffix
|
| 119 |
+
self.assertTrue(os.path.exists(tarball))
|
| 120 |
+
self.assertEqual(self._tarinfo(tarball), self._created_files)
|
| 121 |
+
|
| 122 |
+
def _tarinfo(self, path):
|
| 123 |
+
tar = tarfile.open(path)
|
| 124 |
+
try:
|
| 125 |
+
names = tar.getnames()
|
| 126 |
+
names.sort()
|
| 127 |
+
return names
|
| 128 |
+
finally:
|
| 129 |
+
tar.close()
|
| 130 |
+
|
| 131 |
+
_zip_created_files = ['dist/', 'dist/file1', 'dist/file2',
|
| 132 |
+
'dist/sub/', 'dist/sub/file3', 'dist/sub2/']
|
| 133 |
+
_created_files = [p.rstrip('/') for p in _zip_created_files]
|
| 134 |
+
|
| 135 |
+
def _create_files(self):
|
| 136 |
+
# creating something to tar
|
| 137 |
+
tmpdir = self.mkdtemp()
|
| 138 |
+
dist = os.path.join(tmpdir, 'dist')
|
| 139 |
+
os.mkdir(dist)
|
| 140 |
+
self.write_file([dist, 'file1'], 'xxx')
|
| 141 |
+
self.write_file([dist, 'file2'], 'xxx')
|
| 142 |
+
os.mkdir(os.path.join(dist, 'sub'))
|
| 143 |
+
self.write_file([dist, 'sub', 'file3'], 'xxx')
|
| 144 |
+
os.mkdir(os.path.join(dist, 'sub2'))
|
| 145 |
+
return tmpdir
|
| 146 |
+
|
| 147 |
+
@unittest.skipUnless(find_executable('tar') and find_executable('gzip')
|
| 148 |
+
and ZLIB_SUPPORT,
|
| 149 |
+
'Need the tar, gzip and zlib command to run')
|
| 150 |
+
def test_tarfile_vs_tar(self):
|
| 151 |
+
tmpdir = self._create_files()
|
| 152 |
+
tmpdir2 = self.mkdtemp()
|
| 153 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 154 |
+
old_dir = os.getcwd()
|
| 155 |
+
os.chdir(tmpdir)
|
| 156 |
+
try:
|
| 157 |
+
make_tarball(base_name, 'dist')
|
| 158 |
+
finally:
|
| 159 |
+
os.chdir(old_dir)
|
| 160 |
+
|
| 161 |
+
# check if the compressed tarball was created
|
| 162 |
+
tarball = base_name + '.tar.gz'
|
| 163 |
+
self.assertTrue(os.path.exists(tarball))
|
| 164 |
+
|
| 165 |
+
# now create another tarball using `tar`
|
| 166 |
+
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
|
| 167 |
+
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
|
| 168 |
+
gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar']
|
| 169 |
+
old_dir = os.getcwd()
|
| 170 |
+
os.chdir(tmpdir)
|
| 171 |
+
try:
|
| 172 |
+
spawn(tar_cmd)
|
| 173 |
+
spawn(gzip_cmd)
|
| 174 |
+
finally:
|
| 175 |
+
os.chdir(old_dir)
|
| 176 |
+
|
| 177 |
+
self.assertTrue(os.path.exists(tarball2))
|
| 178 |
+
# let's compare both tarballs
|
| 179 |
+
self.assertEqual(self._tarinfo(tarball), self._created_files)
|
| 180 |
+
self.assertEqual(self._tarinfo(tarball2), self._created_files)
|
| 181 |
+
|
| 182 |
+
# trying an uncompressed one
|
| 183 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 184 |
+
old_dir = os.getcwd()
|
| 185 |
+
os.chdir(tmpdir)
|
| 186 |
+
try:
|
| 187 |
+
make_tarball(base_name, 'dist', compress=None)
|
| 188 |
+
finally:
|
| 189 |
+
os.chdir(old_dir)
|
| 190 |
+
tarball = base_name + '.tar'
|
| 191 |
+
self.assertTrue(os.path.exists(tarball))
|
| 192 |
+
|
| 193 |
+
# now for a dry_run
|
| 194 |
+
base_name = os.path.join(tmpdir2, 'archive')
|
| 195 |
+
old_dir = os.getcwd()
|
| 196 |
+
os.chdir(tmpdir)
|
| 197 |
+
try:
|
| 198 |
+
make_tarball(base_name, 'dist', compress=None, dry_run=True)
|
| 199 |
+
finally:
|
| 200 |
+
os.chdir(old_dir)
|
| 201 |
+
tarball = base_name + '.tar'
|
| 202 |
+
self.assertTrue(os.path.exists(tarball))
|
| 203 |
+
|
| 204 |
+
@unittest.skipUnless(find_executable('compress'),
|
| 205 |
+
'The compress program is required')
|
| 206 |
+
def test_compress_deprecated(self):
|
| 207 |
+
tmpdir = self._create_files()
|
| 208 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 209 |
+
|
| 210 |
+
# using compress and testing the PendingDeprecationWarning
|
| 211 |
+
old_dir = os.getcwd()
|
| 212 |
+
os.chdir(tmpdir)
|
| 213 |
+
try:
|
| 214 |
+
with check_warnings() as w:
|
| 215 |
+
warnings.simplefilter("always")
|
| 216 |
+
make_tarball(base_name, 'dist', compress='compress')
|
| 217 |
+
finally:
|
| 218 |
+
os.chdir(old_dir)
|
| 219 |
+
tarball = base_name + '.tar.Z'
|
| 220 |
+
self.assertTrue(os.path.exists(tarball))
|
| 221 |
+
self.assertEqual(len(w.warnings), 1)
|
| 222 |
+
|
| 223 |
+
# same test with dry_run
|
| 224 |
+
os.remove(tarball)
|
| 225 |
+
old_dir = os.getcwd()
|
| 226 |
+
os.chdir(tmpdir)
|
| 227 |
+
try:
|
| 228 |
+
with check_warnings() as w:
|
| 229 |
+
warnings.simplefilter("always")
|
| 230 |
+
make_tarball(base_name, 'dist', compress='compress',
|
| 231 |
+
dry_run=True)
|
| 232 |
+
finally:
|
| 233 |
+
os.chdir(old_dir)
|
| 234 |
+
self.assertFalse(os.path.exists(tarball))
|
| 235 |
+
self.assertEqual(len(w.warnings), 1)
|
| 236 |
+
|
| 237 |
+
@unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
|
| 238 |
+
'Need zip and zlib support to run')
|
| 239 |
+
def test_make_zipfile(self):
|
| 240 |
+
# creating something to tar
|
| 241 |
+
tmpdir = self._create_files()
|
| 242 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 243 |
+
with change_cwd(tmpdir):
|
| 244 |
+
make_zipfile(base_name, 'dist')
|
| 245 |
+
|
| 246 |
+
# check if the compressed tarball was created
|
| 247 |
+
tarball = base_name + '.zip'
|
| 248 |
+
self.assertTrue(os.path.exists(tarball))
|
| 249 |
+
with zipfile.ZipFile(tarball) as zf:
|
| 250 |
+
self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
|
| 251 |
+
|
| 252 |
+
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
|
| 253 |
+
def test_make_zipfile_no_zlib(self):
|
| 254 |
+
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
|
| 255 |
+
|
| 256 |
+
called = []
|
| 257 |
+
zipfile_class = zipfile.ZipFile
|
| 258 |
+
def fake_zipfile(*a, **kw):
|
| 259 |
+
if kw.get('compression', None) == zipfile.ZIP_STORED:
|
| 260 |
+
called.append((a, kw))
|
| 261 |
+
return zipfile_class(*a, **kw)
|
| 262 |
+
|
| 263 |
+
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
|
| 264 |
+
|
| 265 |
+
# create something to tar and compress
|
| 266 |
+
tmpdir = self._create_files()
|
| 267 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 268 |
+
with change_cwd(tmpdir):
|
| 269 |
+
make_zipfile(base_name, 'dist')
|
| 270 |
+
|
| 271 |
+
tarball = base_name + '.zip'
|
| 272 |
+
self.assertEqual(called,
|
| 273 |
+
[((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
|
| 274 |
+
self.assertTrue(os.path.exists(tarball))
|
| 275 |
+
with zipfile.ZipFile(tarball) as zf:
|
| 276 |
+
self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
|
| 277 |
+
|
| 278 |
+
def test_check_archive_formats(self):
|
| 279 |
+
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
|
| 280 |
+
'xxx')
|
| 281 |
+
self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar',
|
| 282 |
+
'ztar', 'tar', 'zip']))
|
| 283 |
+
|
| 284 |
+
def test_make_archive(self):
|
| 285 |
+
tmpdir = self.mkdtemp()
|
| 286 |
+
base_name = os.path.join(tmpdir, 'archive')
|
| 287 |
+
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
|
| 288 |
+
|
| 289 |
+
def test_make_archive_cwd(self):
|
| 290 |
+
current_dir = os.getcwd()
|
| 291 |
+
def _breaks(*args, **kw):
|
| 292 |
+
raise RuntimeError()
|
| 293 |
+
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
|
| 294 |
+
try:
|
| 295 |
+
try:
|
| 296 |
+
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
|
| 297 |
+
except:
|
| 298 |
+
pass
|
| 299 |
+
self.assertEqual(os.getcwd(), current_dir)
|
| 300 |
+
finally:
|
| 301 |
+
del ARCHIVE_FORMATS['xxx']
|
| 302 |
+
|
| 303 |
+
def test_make_archive_tar(self):
|
| 304 |
+
base_dir = self._create_files()
|
| 305 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 306 |
+
res = make_archive(base_name, 'tar', base_dir, 'dist')
|
| 307 |
+
self.assertTrue(os.path.exists(res))
|
| 308 |
+
self.assertEqual(os.path.basename(res), 'archive.tar')
|
| 309 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 310 |
+
|
| 311 |
+
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
| 312 |
+
def test_make_archive_gztar(self):
|
| 313 |
+
base_dir = self._create_files()
|
| 314 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 315 |
+
res = make_archive(base_name, 'gztar', base_dir, 'dist')
|
| 316 |
+
self.assertTrue(os.path.exists(res))
|
| 317 |
+
self.assertEqual(os.path.basename(res), 'archive.tar.gz')
|
| 318 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 319 |
+
|
| 320 |
+
@unittest.skipUnless(bz2, 'Need bz2 support to run')
|
| 321 |
+
def test_make_archive_bztar(self):
|
| 322 |
+
base_dir = self._create_files()
|
| 323 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 324 |
+
res = make_archive(base_name, 'bztar', base_dir, 'dist')
|
| 325 |
+
self.assertTrue(os.path.exists(res))
|
| 326 |
+
self.assertEqual(os.path.basename(res), 'archive.tar.bz2')
|
| 327 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 328 |
+
|
| 329 |
+
@unittest.skipUnless(lzma, 'Need xz support to run')
|
| 330 |
+
def test_make_archive_xztar(self):
|
| 331 |
+
base_dir = self._create_files()
|
| 332 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 333 |
+
res = make_archive(base_name, 'xztar', base_dir, 'dist')
|
| 334 |
+
self.assertTrue(os.path.exists(res))
|
| 335 |
+
self.assertEqual(os.path.basename(res), 'archive.tar.xz')
|
| 336 |
+
self.assertEqual(self._tarinfo(res), self._created_files)
|
| 337 |
+
|
| 338 |
+
def test_make_archive_owner_group(self):
|
| 339 |
+
# testing make_archive with owner and group, with various combinations
|
| 340 |
+
# this works even if there's not gid/uid support
|
| 341 |
+
if UID_GID_SUPPORT:
|
| 342 |
+
group = grp.getgrgid(0)[0]
|
| 343 |
+
owner = pwd.getpwuid(0)[0]
|
| 344 |
+
else:
|
| 345 |
+
group = owner = 'root'
|
| 346 |
+
|
| 347 |
+
base_dir = self._create_files()
|
| 348 |
+
root_dir = self.mkdtemp()
|
| 349 |
+
base_name = os.path.join(self.mkdtemp() , 'archive')
|
| 350 |
+
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
|
| 351 |
+
group=group)
|
| 352 |
+
self.assertTrue(os.path.exists(res))
|
| 353 |
+
|
| 354 |
+
res = make_archive(base_name, 'zip', root_dir, base_dir)
|
| 355 |
+
self.assertTrue(os.path.exists(res))
|
| 356 |
+
|
| 357 |
+
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
| 358 |
+
owner=owner, group=group)
|
| 359 |
+
self.assertTrue(os.path.exists(res))
|
| 360 |
+
|
| 361 |
+
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
| 362 |
+
owner='kjhkjhkjg', group='oihohoh')
|
| 363 |
+
self.assertTrue(os.path.exists(res))
|
| 364 |
+
|
| 365 |
+
@unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib")
|
| 366 |
+
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
|
| 367 |
+
def test_tarfile_root_owner(self):
|
| 368 |
+
tmpdir = self._create_files()
|
| 369 |
+
base_name = os.path.join(self.mkdtemp(), 'archive')
|
| 370 |
+
old_dir = os.getcwd()
|
| 371 |
+
os.chdir(tmpdir)
|
| 372 |
+
group = grp.getgrgid(0)[0]
|
| 373 |
+
owner = pwd.getpwuid(0)[0]
|
| 374 |
+
try:
|
| 375 |
+
archive_name = make_tarball(base_name, 'dist', compress=None,
|
| 376 |
+
owner=owner, group=group)
|
| 377 |
+
finally:
|
| 378 |
+
os.chdir(old_dir)
|
| 379 |
+
|
| 380 |
+
# check if the compressed tarball was created
|
| 381 |
+
self.assertTrue(os.path.exists(archive_name))
|
| 382 |
+
|
| 383 |
+
# now checks the rights
|
| 384 |
+
archive = tarfile.open(archive_name)
|
| 385 |
+
try:
|
| 386 |
+
for member in archive.getmembers():
|
| 387 |
+
self.assertEqual(member.uid, 0)
|
| 388 |
+
self.assertEqual(member.gid, 0)
|
| 389 |
+
finally:
|
| 390 |
+
archive.close()
|
| 391 |
+
|
| 392 |
+
def test_suite():
|
| 393 |
+
return unittest.makeSuite(ArchiveUtilTestCase)
|
| 394 |
+
|
| 395 |
+
if __name__ == "__main__":
|
| 396 |
+
run_unittest(test_suite())
|
deepseek/lib/python3.10/ensurepip/__main__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ensurepip
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
if __name__ == "__main__":
|
| 5 |
+
sys.exit(ensurepip._main())
|
deepseek/lib/python3.10/ensurepip/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (6.57 kB). View file
|
|
|
deepseek/lib/python3.10/ensurepip/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (489 Bytes). View file
|
|
|
deepseek/lib/python3.10/ensurepip/__pycache__/_uninstall.cpython-310.pyc
ADDED
|
Binary file (1.2 kB). View file
|
|
|
deepseek/lib/python3.10/ensurepip/_bundled/__init__.py
ADDED
|
File without changes
|
deepseek/lib/python3.10/ensurepip/_bundled/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (397 Bytes). View file
|
|
|
deepseek/lib/python3.10/ensurepip/_uninstall.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Basic pip uninstallation support, helper for the Windows uninstaller"""
|
| 2 |
+
|
| 3 |
+
import argparse
|
| 4 |
+
import ensurepip
|
| 5 |
+
import sys
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _main(argv=None):
|
| 9 |
+
parser = argparse.ArgumentParser(prog="python -m ensurepip._uninstall")
|
| 10 |
+
parser.add_argument(
|
| 11 |
+
"--version",
|
| 12 |
+
action="version",
|
| 13 |
+
version="pip {}".format(ensurepip.version()),
|
| 14 |
+
help="Show the version of pip this will attempt to uninstall.",
|
| 15 |
+
)
|
| 16 |
+
parser.add_argument(
|
| 17 |
+
"-v", "--verbose",
|
| 18 |
+
action="count",
|
| 19 |
+
default=0,
|
| 20 |
+
dest="verbosity",
|
| 21 |
+
help=("Give more output. Option is additive, and can be used up to 3 "
|
| 22 |
+
"times."),
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
args = parser.parse_args(argv)
|
| 26 |
+
|
| 27 |
+
return ensurepip._uninstall_helper(verbosity=args.verbosity)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
if __name__ == "__main__":
|
| 31 |
+
sys.exit(_main())
|
deepseek/lib/python3.10/json/__init__.py
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""JSON (JavaScript Object Notation) <https://json.org> is a subset of
|
| 2 |
+
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
| 3 |
+
interchange format.
|
| 4 |
+
|
| 5 |
+
:mod:`json` exposes an API familiar to users of the standard library
|
| 6 |
+
:mod:`marshal` and :mod:`pickle` modules. It is derived from a
|
| 7 |
+
version of the externally maintained simplejson library.
|
| 8 |
+
|
| 9 |
+
Encoding basic Python object hierarchies::
|
| 10 |
+
|
| 11 |
+
>>> import json
|
| 12 |
+
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
| 13 |
+
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
| 14 |
+
>>> print(json.dumps("\"foo\bar"))
|
| 15 |
+
"\"foo\bar"
|
| 16 |
+
>>> print(json.dumps('\u1234'))
|
| 17 |
+
"\u1234"
|
| 18 |
+
>>> print(json.dumps('\\'))
|
| 19 |
+
"\\"
|
| 20 |
+
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
|
| 21 |
+
{"a": 0, "b": 0, "c": 0}
|
| 22 |
+
>>> from io import StringIO
|
| 23 |
+
>>> io = StringIO()
|
| 24 |
+
>>> json.dump(['streaming API'], io)
|
| 25 |
+
>>> io.getvalue()
|
| 26 |
+
'["streaming API"]'
|
| 27 |
+
|
| 28 |
+
Compact encoding::
|
| 29 |
+
|
| 30 |
+
>>> import json
|
| 31 |
+
>>> mydict = {'4': 5, '6': 7}
|
| 32 |
+
>>> json.dumps([1,2,3,mydict], separators=(',', ':'))
|
| 33 |
+
'[1,2,3,{"4":5,"6":7}]'
|
| 34 |
+
|
| 35 |
+
Pretty printing::
|
| 36 |
+
|
| 37 |
+
>>> import json
|
| 38 |
+
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
|
| 39 |
+
{
|
| 40 |
+
"4": 5,
|
| 41 |
+
"6": 7
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
Decoding JSON::
|
| 45 |
+
|
| 46 |
+
>>> import json
|
| 47 |
+
>>> obj = ['foo', {'bar': ['baz', None, 1.0, 2]}]
|
| 48 |
+
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
| 49 |
+
True
|
| 50 |
+
>>> json.loads('"\\"foo\\bar"') == '"foo\x08ar'
|
| 51 |
+
True
|
| 52 |
+
>>> from io import StringIO
|
| 53 |
+
>>> io = StringIO('["streaming API"]')
|
| 54 |
+
>>> json.load(io)[0] == 'streaming API'
|
| 55 |
+
True
|
| 56 |
+
|
| 57 |
+
Specializing JSON object decoding::
|
| 58 |
+
|
| 59 |
+
>>> import json
|
| 60 |
+
>>> def as_complex(dct):
|
| 61 |
+
... if '__complex__' in dct:
|
| 62 |
+
... return complex(dct['real'], dct['imag'])
|
| 63 |
+
... return dct
|
| 64 |
+
...
|
| 65 |
+
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
| 66 |
+
... object_hook=as_complex)
|
| 67 |
+
(1+2j)
|
| 68 |
+
>>> from decimal import Decimal
|
| 69 |
+
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
|
| 70 |
+
True
|
| 71 |
+
|
| 72 |
+
Specializing JSON object encoding::
|
| 73 |
+
|
| 74 |
+
>>> import json
|
| 75 |
+
>>> def encode_complex(obj):
|
| 76 |
+
... if isinstance(obj, complex):
|
| 77 |
+
... return [obj.real, obj.imag]
|
| 78 |
+
... raise TypeError(f'Object of type {obj.__class__.__name__} '
|
| 79 |
+
... f'is not JSON serializable')
|
| 80 |
+
...
|
| 81 |
+
>>> json.dumps(2 + 1j, default=encode_complex)
|
| 82 |
+
'[2.0, 1.0]'
|
| 83 |
+
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
| 84 |
+
'[2.0, 1.0]'
|
| 85 |
+
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
| 86 |
+
'[2.0, 1.0]'
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
Using json.tool from the shell to validate and pretty-print::
|
| 90 |
+
|
| 91 |
+
$ echo '{"json":"obj"}' | python -m json.tool
|
| 92 |
+
{
|
| 93 |
+
"json": "obj"
|
| 94 |
+
}
|
| 95 |
+
$ echo '{ 1.2:3.4}' | python -m json.tool
|
| 96 |
+
Expecting property name enclosed in double quotes: line 1 column 3 (char 2)
|
| 97 |
+
"""
|
| 98 |
+
__version__ = '2.0.9'
|
| 99 |
+
__all__ = [
|
| 100 |
+
'dump', 'dumps', 'load', 'loads',
|
| 101 |
+
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
|
| 102 |
+
]
|
| 103 |
+
|
| 104 |
+
__author__ = 'Bob Ippolito <bob@redivi.com>'
|
| 105 |
+
|
| 106 |
+
from .decoder import JSONDecoder, JSONDecodeError
|
| 107 |
+
from .encoder import JSONEncoder
|
| 108 |
+
import codecs
|
| 109 |
+
|
| 110 |
+
_default_encoder = JSONEncoder(
|
| 111 |
+
skipkeys=False,
|
| 112 |
+
ensure_ascii=True,
|
| 113 |
+
check_circular=True,
|
| 114 |
+
allow_nan=True,
|
| 115 |
+
indent=None,
|
| 116 |
+
separators=None,
|
| 117 |
+
default=None,
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
def dump(obj, fp, *, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 121 |
+
allow_nan=True, cls=None, indent=None, separators=None,
|
| 122 |
+
default=None, sort_keys=False, **kw):
|
| 123 |
+
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
| 124 |
+
``.write()``-supporting file-like object).
|
| 125 |
+
|
| 126 |
+
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
| 127 |
+
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
|
| 128 |
+
instead of raising a ``TypeError``.
|
| 129 |
+
|
| 130 |
+
If ``ensure_ascii`` is false, then the strings written to ``fp`` can
|
| 131 |
+
contain non-ASCII characters if they appear in strings contained in
|
| 132 |
+
``obj``. Otherwise, all such characters are escaped in JSON strings.
|
| 133 |
+
|
| 134 |
+
If ``check_circular`` is false, then the circular reference check
|
| 135 |
+
for container types will be skipped and a circular reference will
|
| 136 |
+
result in an ``RecursionError`` (or worse).
|
| 137 |
+
|
| 138 |
+
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
| 139 |
+
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
| 140 |
+
in strict compliance of the JSON specification, instead of using the
|
| 141 |
+
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
| 142 |
+
|
| 143 |
+
If ``indent`` is a non-negative integer, then JSON array elements and
|
| 144 |
+
object members will be pretty-printed with that indent level. An indent
|
| 145 |
+
level of 0 will only insert newlines. ``None`` is the most compact
|
| 146 |
+
representation.
|
| 147 |
+
|
| 148 |
+
If specified, ``separators`` should be an ``(item_separator, key_separator)``
|
| 149 |
+
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
|
| 150 |
+
``(',', ': ')`` otherwise. To get the most compact JSON representation,
|
| 151 |
+
you should specify ``(',', ':')`` to eliminate whitespace.
|
| 152 |
+
|
| 153 |
+
``default(obj)`` is a function that should return a serializable version
|
| 154 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 155 |
+
|
| 156 |
+
If *sort_keys* is true (default: ``False``), then the output of
|
| 157 |
+
dictionaries will be sorted by key.
|
| 158 |
+
|
| 159 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 160 |
+
``.default()`` method to serialize additional types), specify it with
|
| 161 |
+
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
|
| 162 |
+
|
| 163 |
+
"""
|
| 164 |
+
# cached encoder
|
| 165 |
+
if (not skipkeys and ensure_ascii and
|
| 166 |
+
check_circular and allow_nan and
|
| 167 |
+
cls is None and indent is None and separators is None and
|
| 168 |
+
default is None and not sort_keys and not kw):
|
| 169 |
+
iterable = _default_encoder.iterencode(obj)
|
| 170 |
+
else:
|
| 171 |
+
if cls is None:
|
| 172 |
+
cls = JSONEncoder
|
| 173 |
+
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 174 |
+
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
| 175 |
+
separators=separators,
|
| 176 |
+
default=default, sort_keys=sort_keys, **kw).iterencode(obj)
|
| 177 |
+
# could accelerate with writelines in some versions of Python, at
|
| 178 |
+
# a debuggability cost
|
| 179 |
+
for chunk in iterable:
|
| 180 |
+
fp.write(chunk)
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def dumps(obj, *, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 184 |
+
allow_nan=True, cls=None, indent=None, separators=None,
|
| 185 |
+
default=None, sort_keys=False, **kw):
|
| 186 |
+
"""Serialize ``obj`` to a JSON formatted ``str``.
|
| 187 |
+
|
| 188 |
+
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
| 189 |
+
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
|
| 190 |
+
instead of raising a ``TypeError``.
|
| 191 |
+
|
| 192 |
+
If ``ensure_ascii`` is false, then the return value can contain non-ASCII
|
| 193 |
+
characters if they appear in strings contained in ``obj``. Otherwise, all
|
| 194 |
+
such characters are escaped in JSON strings.
|
| 195 |
+
|
| 196 |
+
If ``check_circular`` is false, then the circular reference check
|
| 197 |
+
for container types will be skipped and a circular reference will
|
| 198 |
+
result in an ``RecursionError`` (or worse).
|
| 199 |
+
|
| 200 |
+
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
| 201 |
+
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
| 202 |
+
strict compliance of the JSON specification, instead of using the
|
| 203 |
+
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
| 204 |
+
|
| 205 |
+
If ``indent`` is a non-negative integer, then JSON array elements and
|
| 206 |
+
object members will be pretty-printed with that indent level. An indent
|
| 207 |
+
level of 0 will only insert newlines. ``None`` is the most compact
|
| 208 |
+
representation.
|
| 209 |
+
|
| 210 |
+
If specified, ``separators`` should be an ``(item_separator, key_separator)``
|
| 211 |
+
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
|
| 212 |
+
``(',', ': ')`` otherwise. To get the most compact JSON representation,
|
| 213 |
+
you should specify ``(',', ':')`` to eliminate whitespace.
|
| 214 |
+
|
| 215 |
+
``default(obj)`` is a function that should return a serializable version
|
| 216 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 217 |
+
|
| 218 |
+
If *sort_keys* is true (default: ``False``), then the output of
|
| 219 |
+
dictionaries will be sorted by key.
|
| 220 |
+
|
| 221 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 222 |
+
``.default()`` method to serialize additional types), specify it with
|
| 223 |
+
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
# cached encoder
|
| 227 |
+
if (not skipkeys and ensure_ascii and
|
| 228 |
+
check_circular and allow_nan and
|
| 229 |
+
cls is None and indent is None and separators is None and
|
| 230 |
+
default is None and not sort_keys and not kw):
|
| 231 |
+
return _default_encoder.encode(obj)
|
| 232 |
+
if cls is None:
|
| 233 |
+
cls = JSONEncoder
|
| 234 |
+
return cls(
|
| 235 |
+
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 236 |
+
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
| 237 |
+
separators=separators, default=default, sort_keys=sort_keys,
|
| 238 |
+
**kw).encode(obj)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
_default_decoder = JSONDecoder(object_hook=None, object_pairs_hook=None)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def detect_encoding(b):
|
| 245 |
+
bstartswith = b.startswith
|
| 246 |
+
if bstartswith((codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE)):
|
| 247 |
+
return 'utf-32'
|
| 248 |
+
if bstartswith((codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE)):
|
| 249 |
+
return 'utf-16'
|
| 250 |
+
if bstartswith(codecs.BOM_UTF8):
|
| 251 |
+
return 'utf-8-sig'
|
| 252 |
+
|
| 253 |
+
if len(b) >= 4:
|
| 254 |
+
if not b[0]:
|
| 255 |
+
# 00 00 -- -- - utf-32-be
|
| 256 |
+
# 00 XX -- -- - utf-16-be
|
| 257 |
+
return 'utf-16-be' if b[1] else 'utf-32-be'
|
| 258 |
+
if not b[1]:
|
| 259 |
+
# XX 00 00 00 - utf-32-le
|
| 260 |
+
# XX 00 00 XX - utf-16-le
|
| 261 |
+
# XX 00 XX -- - utf-16-le
|
| 262 |
+
return 'utf-16-le' if b[2] or b[3] else 'utf-32-le'
|
| 263 |
+
elif len(b) == 2:
|
| 264 |
+
if not b[0]:
|
| 265 |
+
# 00 XX - utf-16-be
|
| 266 |
+
return 'utf-16-be'
|
| 267 |
+
if not b[1]:
|
| 268 |
+
# XX 00 - utf-16-le
|
| 269 |
+
return 'utf-16-le'
|
| 270 |
+
# default
|
| 271 |
+
return 'utf-8'
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def load(fp, *, cls=None, object_hook=None, parse_float=None,
|
| 275 |
+
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
|
| 276 |
+
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
| 277 |
+
a JSON document) to a Python object.
|
| 278 |
+
|
| 279 |
+
``object_hook`` is an optional function that will be called with the
|
| 280 |
+
result of any object literal decode (a ``dict``). The return value of
|
| 281 |
+
``object_hook`` will be used instead of the ``dict``. This feature
|
| 282 |
+
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
| 283 |
+
|
| 284 |
+
``object_pairs_hook`` is an optional function that will be called with the
|
| 285 |
+
result of any object literal decoded with an ordered list of pairs. The
|
| 286 |
+
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 287 |
+
This feature can be used to implement custom decoders. If ``object_hook``
|
| 288 |
+
is also defined, the ``object_pairs_hook`` takes priority.
|
| 289 |
+
|
| 290 |
+
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
| 291 |
+
kwarg; otherwise ``JSONDecoder`` is used.
|
| 292 |
+
"""
|
| 293 |
+
return loads(fp.read(),
|
| 294 |
+
cls=cls, object_hook=object_hook,
|
| 295 |
+
parse_float=parse_float, parse_int=parse_int,
|
| 296 |
+
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def loads(s, *, cls=None, object_hook=None, parse_float=None,
|
| 300 |
+
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
|
| 301 |
+
"""Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance
|
| 302 |
+
containing a JSON document) to a Python object.
|
| 303 |
+
|
| 304 |
+
``object_hook`` is an optional function that will be called with the
|
| 305 |
+
result of any object literal decode (a ``dict``). The return value of
|
| 306 |
+
``object_hook`` will be used instead of the ``dict``. This feature
|
| 307 |
+
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
| 308 |
+
|
| 309 |
+
``object_pairs_hook`` is an optional function that will be called with the
|
| 310 |
+
result of any object literal decoded with an ordered list of pairs. The
|
| 311 |
+
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 312 |
+
This feature can be used to implement custom decoders. If ``object_hook``
|
| 313 |
+
is also defined, the ``object_pairs_hook`` takes priority.
|
| 314 |
+
|
| 315 |
+
``parse_float``, if specified, will be called with the string
|
| 316 |
+
of every JSON float to be decoded. By default this is equivalent to
|
| 317 |
+
float(num_str). This can be used to use another datatype or parser
|
| 318 |
+
for JSON floats (e.g. decimal.Decimal).
|
| 319 |
+
|
| 320 |
+
``parse_int``, if specified, will be called with the string
|
| 321 |
+
of every JSON int to be decoded. By default this is equivalent to
|
| 322 |
+
int(num_str). This can be used to use another datatype or parser
|
| 323 |
+
for JSON integers (e.g. float).
|
| 324 |
+
|
| 325 |
+
``parse_constant``, if specified, will be called with one of the
|
| 326 |
+
following strings: -Infinity, Infinity, NaN.
|
| 327 |
+
This can be used to raise an exception if invalid JSON numbers
|
| 328 |
+
are encountered.
|
| 329 |
+
|
| 330 |
+
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
| 331 |
+
kwarg; otherwise ``JSONDecoder`` is used.
|
| 332 |
+
"""
|
| 333 |
+
if isinstance(s, str):
|
| 334 |
+
if s.startswith('\ufeff'):
|
| 335 |
+
raise JSONDecodeError("Unexpected UTF-8 BOM (decode using utf-8-sig)",
|
| 336 |
+
s, 0)
|
| 337 |
+
else:
|
| 338 |
+
if not isinstance(s, (bytes, bytearray)):
|
| 339 |
+
raise TypeError(f'the JSON object must be str, bytes or bytearray, '
|
| 340 |
+
f'not {s.__class__.__name__}')
|
| 341 |
+
s = s.decode(detect_encoding(s), 'surrogatepass')
|
| 342 |
+
|
| 343 |
+
if (cls is None and object_hook is None and
|
| 344 |
+
parse_int is None and parse_float is None and
|
| 345 |
+
parse_constant is None and object_pairs_hook is None and not kw):
|
| 346 |
+
return _default_decoder.decode(s)
|
| 347 |
+
if cls is None:
|
| 348 |
+
cls = JSONDecoder
|
| 349 |
+
if object_hook is not None:
|
| 350 |
+
kw['object_hook'] = object_hook
|
| 351 |
+
if object_pairs_hook is not None:
|
| 352 |
+
kw['object_pairs_hook'] = object_pairs_hook
|
| 353 |
+
if parse_float is not None:
|
| 354 |
+
kw['parse_float'] = parse_float
|
| 355 |
+
if parse_int is not None:
|
| 356 |
+
kw['parse_int'] = parse_int
|
| 357 |
+
if parse_constant is not None:
|
| 358 |
+
kw['parse_constant'] = parse_constant
|
| 359 |
+
return cls(**kw).decode(s)
|
deepseek/lib/python3.10/json/__pycache__/encoder.cpython-310.pyc
ADDED
|
Binary file (11.4 kB). View file
|
|
|
deepseek/lib/python3.10/json/__pycache__/scanner.cpython-310.pyc
ADDED
|
Binary file (1.91 kB). View file
|
|
|
deepseek/lib/python3.10/json/__pycache__/tool.cpython-310.pyc
ADDED
|
Binary file (3.07 kB). View file
|
|
|
deepseek/lib/python3.10/json/decoder.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implementation of JSONDecoder
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
|
| 5 |
+
from json import scanner
|
| 6 |
+
try:
|
| 7 |
+
from _json import scanstring as c_scanstring
|
| 8 |
+
except ImportError:
|
| 9 |
+
c_scanstring = None
|
| 10 |
+
|
| 11 |
+
__all__ = ['JSONDecoder', 'JSONDecodeError']
|
| 12 |
+
|
| 13 |
+
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
| 14 |
+
|
| 15 |
+
NaN = float('nan')
|
| 16 |
+
PosInf = float('inf')
|
| 17 |
+
NegInf = float('-inf')
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class JSONDecodeError(ValueError):
|
| 21 |
+
"""Subclass of ValueError with the following additional properties:
|
| 22 |
+
|
| 23 |
+
msg: The unformatted error message
|
| 24 |
+
doc: The JSON document being parsed
|
| 25 |
+
pos: The start index of doc where parsing failed
|
| 26 |
+
lineno: The line corresponding to pos
|
| 27 |
+
colno: The column corresponding to pos
|
| 28 |
+
|
| 29 |
+
"""
|
| 30 |
+
# Note that this exception is used from _json
|
| 31 |
+
def __init__(self, msg, doc, pos):
|
| 32 |
+
lineno = doc.count('\n', 0, pos) + 1
|
| 33 |
+
colno = pos - doc.rfind('\n', 0, pos)
|
| 34 |
+
errmsg = '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
|
| 35 |
+
ValueError.__init__(self, errmsg)
|
| 36 |
+
self.msg = msg
|
| 37 |
+
self.doc = doc
|
| 38 |
+
self.pos = pos
|
| 39 |
+
self.lineno = lineno
|
| 40 |
+
self.colno = colno
|
| 41 |
+
|
| 42 |
+
def __reduce__(self):
|
| 43 |
+
return self.__class__, (self.msg, self.doc, self.pos)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
_CONSTANTS = {
|
| 47 |
+
'-Infinity': NegInf,
|
| 48 |
+
'Infinity': PosInf,
|
| 49 |
+
'NaN': NaN,
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
|
| 54 |
+
BACKSLASH = {
|
| 55 |
+
'"': '"', '\\': '\\', '/': '/',
|
| 56 |
+
'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t',
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
def _decode_uXXXX(s, pos):
|
| 60 |
+
esc = s[pos + 1:pos + 5]
|
| 61 |
+
if len(esc) == 4 and esc[1] not in 'xX':
|
| 62 |
+
try:
|
| 63 |
+
return int(esc, 16)
|
| 64 |
+
except ValueError:
|
| 65 |
+
pass
|
| 66 |
+
msg = "Invalid \\uXXXX escape"
|
| 67 |
+
raise JSONDecodeError(msg, s, pos)
|
| 68 |
+
|
| 69 |
+
def py_scanstring(s, end, strict=True,
|
| 70 |
+
_b=BACKSLASH, _m=STRINGCHUNK.match):
|
| 71 |
+
"""Scan the string s for a JSON string. End is the index of the
|
| 72 |
+
character in s after the quote that started the JSON string.
|
| 73 |
+
Unescapes all valid JSON string escape sequences and raises ValueError
|
| 74 |
+
on attempt to decode an invalid string. If strict is False then literal
|
| 75 |
+
control characters are allowed in the string.
|
| 76 |
+
|
| 77 |
+
Returns a tuple of the decoded string and the index of the character in s
|
| 78 |
+
after the end quote."""
|
| 79 |
+
chunks = []
|
| 80 |
+
_append = chunks.append
|
| 81 |
+
begin = end - 1
|
| 82 |
+
while 1:
|
| 83 |
+
chunk = _m(s, end)
|
| 84 |
+
if chunk is None:
|
| 85 |
+
raise JSONDecodeError("Unterminated string starting at", s, begin)
|
| 86 |
+
end = chunk.end()
|
| 87 |
+
content, terminator = chunk.groups()
|
| 88 |
+
# Content is contains zero or more unescaped string characters
|
| 89 |
+
if content:
|
| 90 |
+
_append(content)
|
| 91 |
+
# Terminator is the end of string, a literal control character,
|
| 92 |
+
# or a backslash denoting that an escape sequence follows
|
| 93 |
+
if terminator == '"':
|
| 94 |
+
break
|
| 95 |
+
elif terminator != '\\':
|
| 96 |
+
if strict:
|
| 97 |
+
#msg = "Invalid control character %r at" % (terminator,)
|
| 98 |
+
msg = "Invalid control character {0!r} at".format(terminator)
|
| 99 |
+
raise JSONDecodeError(msg, s, end)
|
| 100 |
+
else:
|
| 101 |
+
_append(terminator)
|
| 102 |
+
continue
|
| 103 |
+
try:
|
| 104 |
+
esc = s[end]
|
| 105 |
+
except IndexError:
|
| 106 |
+
raise JSONDecodeError("Unterminated string starting at",
|
| 107 |
+
s, begin) from None
|
| 108 |
+
# If not a unicode escape sequence, must be in the lookup table
|
| 109 |
+
if esc != 'u':
|
| 110 |
+
try:
|
| 111 |
+
char = _b[esc]
|
| 112 |
+
except KeyError:
|
| 113 |
+
msg = "Invalid \\escape: {0!r}".format(esc)
|
| 114 |
+
raise JSONDecodeError(msg, s, end)
|
| 115 |
+
end += 1
|
| 116 |
+
else:
|
| 117 |
+
uni = _decode_uXXXX(s, end)
|
| 118 |
+
end += 5
|
| 119 |
+
if 0xd800 <= uni <= 0xdbff and s[end:end + 2] == '\\u':
|
| 120 |
+
uni2 = _decode_uXXXX(s, end + 1)
|
| 121 |
+
if 0xdc00 <= uni2 <= 0xdfff:
|
| 122 |
+
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
|
| 123 |
+
end += 6
|
| 124 |
+
char = chr(uni)
|
| 125 |
+
_append(char)
|
| 126 |
+
return ''.join(chunks), end
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
# Use speedup if available
|
| 130 |
+
scanstring = c_scanstring or py_scanstring
|
| 131 |
+
|
| 132 |
+
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
|
| 133 |
+
WHITESPACE_STR = ' \t\n\r'
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook,
|
| 137 |
+
memo=None, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
| 138 |
+
s, end = s_and_end
|
| 139 |
+
pairs = []
|
| 140 |
+
pairs_append = pairs.append
|
| 141 |
+
# Backwards compatibility
|
| 142 |
+
if memo is None:
|
| 143 |
+
memo = {}
|
| 144 |
+
memo_get = memo.setdefault
|
| 145 |
+
# Use a slice to prevent IndexError from being raised, the following
|
| 146 |
+
# check will raise a more specific ValueError if the string is empty
|
| 147 |
+
nextchar = s[end:end + 1]
|
| 148 |
+
# Normally we expect nextchar == '"'
|
| 149 |
+
if nextchar != '"':
|
| 150 |
+
if nextchar in _ws:
|
| 151 |
+
end = _w(s, end).end()
|
| 152 |
+
nextchar = s[end:end + 1]
|
| 153 |
+
# Trivial empty object
|
| 154 |
+
if nextchar == '}':
|
| 155 |
+
if object_pairs_hook is not None:
|
| 156 |
+
result = object_pairs_hook(pairs)
|
| 157 |
+
return result, end + 1
|
| 158 |
+
pairs = {}
|
| 159 |
+
if object_hook is not None:
|
| 160 |
+
pairs = object_hook(pairs)
|
| 161 |
+
return pairs, end + 1
|
| 162 |
+
elif nextchar != '"':
|
| 163 |
+
raise JSONDecodeError(
|
| 164 |
+
"Expecting property name enclosed in double quotes", s, end)
|
| 165 |
+
end += 1
|
| 166 |
+
while True:
|
| 167 |
+
key, end = scanstring(s, end, strict)
|
| 168 |
+
key = memo_get(key, key)
|
| 169 |
+
# To skip some function call overhead we optimize the fast paths where
|
| 170 |
+
# the JSON key separator is ": " or just ":".
|
| 171 |
+
if s[end:end + 1] != ':':
|
| 172 |
+
end = _w(s, end).end()
|
| 173 |
+
if s[end:end + 1] != ':':
|
| 174 |
+
raise JSONDecodeError("Expecting ':' delimiter", s, end)
|
| 175 |
+
end += 1
|
| 176 |
+
|
| 177 |
+
try:
|
| 178 |
+
if s[end] in _ws:
|
| 179 |
+
end += 1
|
| 180 |
+
if s[end] in _ws:
|
| 181 |
+
end = _w(s, end + 1).end()
|
| 182 |
+
except IndexError:
|
| 183 |
+
pass
|
| 184 |
+
|
| 185 |
+
try:
|
| 186 |
+
value, end = scan_once(s, end)
|
| 187 |
+
except StopIteration as err:
|
| 188 |
+
raise JSONDecodeError("Expecting value", s, err.value) from None
|
| 189 |
+
pairs_append((key, value))
|
| 190 |
+
try:
|
| 191 |
+
nextchar = s[end]
|
| 192 |
+
if nextchar in _ws:
|
| 193 |
+
end = _w(s, end + 1).end()
|
| 194 |
+
nextchar = s[end]
|
| 195 |
+
except IndexError:
|
| 196 |
+
nextchar = ''
|
| 197 |
+
end += 1
|
| 198 |
+
|
| 199 |
+
if nextchar == '}':
|
| 200 |
+
break
|
| 201 |
+
elif nextchar != ',':
|
| 202 |
+
raise JSONDecodeError("Expecting ',' delimiter", s, end - 1)
|
| 203 |
+
end = _w(s, end).end()
|
| 204 |
+
nextchar = s[end:end + 1]
|
| 205 |
+
end += 1
|
| 206 |
+
if nextchar != '"':
|
| 207 |
+
raise JSONDecodeError(
|
| 208 |
+
"Expecting property name enclosed in double quotes", s, end - 1)
|
| 209 |
+
if object_pairs_hook is not None:
|
| 210 |
+
result = object_pairs_hook(pairs)
|
| 211 |
+
return result, end
|
| 212 |
+
pairs = dict(pairs)
|
| 213 |
+
if object_hook is not None:
|
| 214 |
+
pairs = object_hook(pairs)
|
| 215 |
+
return pairs, end
|
| 216 |
+
|
| 217 |
+
def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
| 218 |
+
s, end = s_and_end
|
| 219 |
+
values = []
|
| 220 |
+
nextchar = s[end:end + 1]
|
| 221 |
+
if nextchar in _ws:
|
| 222 |
+
end = _w(s, end + 1).end()
|
| 223 |
+
nextchar = s[end:end + 1]
|
| 224 |
+
# Look-ahead for trivial empty array
|
| 225 |
+
if nextchar == ']':
|
| 226 |
+
return values, end + 1
|
| 227 |
+
_append = values.append
|
| 228 |
+
while True:
|
| 229 |
+
try:
|
| 230 |
+
value, end = scan_once(s, end)
|
| 231 |
+
except StopIteration as err:
|
| 232 |
+
raise JSONDecodeError("Expecting value", s, err.value) from None
|
| 233 |
+
_append(value)
|
| 234 |
+
nextchar = s[end:end + 1]
|
| 235 |
+
if nextchar in _ws:
|
| 236 |
+
end = _w(s, end + 1).end()
|
| 237 |
+
nextchar = s[end:end + 1]
|
| 238 |
+
end += 1
|
| 239 |
+
if nextchar == ']':
|
| 240 |
+
break
|
| 241 |
+
elif nextchar != ',':
|
| 242 |
+
raise JSONDecodeError("Expecting ',' delimiter", s, end - 1)
|
| 243 |
+
try:
|
| 244 |
+
if s[end] in _ws:
|
| 245 |
+
end += 1
|
| 246 |
+
if s[end] in _ws:
|
| 247 |
+
end = _w(s, end + 1).end()
|
| 248 |
+
except IndexError:
|
| 249 |
+
pass
|
| 250 |
+
|
| 251 |
+
return values, end
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
class JSONDecoder(object):
|
| 255 |
+
"""Simple JSON <https://json.org> decoder
|
| 256 |
+
|
| 257 |
+
Performs the following translations in decoding by default:
|
| 258 |
+
|
| 259 |
+
+---------------+-------------------+
|
| 260 |
+
| JSON | Python |
|
| 261 |
+
+===============+===================+
|
| 262 |
+
| object | dict |
|
| 263 |
+
+---------------+-------------------+
|
| 264 |
+
| array | list |
|
| 265 |
+
+---------------+-------------------+
|
| 266 |
+
| string | str |
|
| 267 |
+
+---------------+-------------------+
|
| 268 |
+
| number (int) | int |
|
| 269 |
+
+---------------+-------------------+
|
| 270 |
+
| number (real) | float |
|
| 271 |
+
+---------------+-------------------+
|
| 272 |
+
| true | True |
|
| 273 |
+
+---------------+-------------------+
|
| 274 |
+
| false | False |
|
| 275 |
+
+---------------+-------------------+
|
| 276 |
+
| null | None |
|
| 277 |
+
+---------------+-------------------+
|
| 278 |
+
|
| 279 |
+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
|
| 280 |
+
their corresponding ``float`` values, which is outside the JSON spec.
|
| 281 |
+
|
| 282 |
+
"""
|
| 283 |
+
|
| 284 |
+
def __init__(self, *, object_hook=None, parse_float=None,
|
| 285 |
+
parse_int=None, parse_constant=None, strict=True,
|
| 286 |
+
object_pairs_hook=None):
|
| 287 |
+
"""``object_hook``, if specified, will be called with the result
|
| 288 |
+
of every JSON object decoded and its return value will be used in
|
| 289 |
+
place of the given ``dict``. This can be used to provide custom
|
| 290 |
+
deserializations (e.g. to support JSON-RPC class hinting).
|
| 291 |
+
|
| 292 |
+
``object_pairs_hook``, if specified will be called with the result of
|
| 293 |
+
every JSON object decoded with an ordered list of pairs. The return
|
| 294 |
+
value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 295 |
+
This feature can be used to implement custom decoders.
|
| 296 |
+
If ``object_hook`` is also defined, the ``object_pairs_hook`` takes
|
| 297 |
+
priority.
|
| 298 |
+
|
| 299 |
+
``parse_float``, if specified, will be called with the string
|
| 300 |
+
of every JSON float to be decoded. By default this is equivalent to
|
| 301 |
+
float(num_str). This can be used to use another datatype or parser
|
| 302 |
+
for JSON floats (e.g. decimal.Decimal).
|
| 303 |
+
|
| 304 |
+
``parse_int``, if specified, will be called with the string
|
| 305 |
+
of every JSON int to be decoded. By default this is equivalent to
|
| 306 |
+
int(num_str). This can be used to use another datatype or parser
|
| 307 |
+
for JSON integers (e.g. float).
|
| 308 |
+
|
| 309 |
+
``parse_constant``, if specified, will be called with one of the
|
| 310 |
+
following strings: -Infinity, Infinity, NaN.
|
| 311 |
+
This can be used to raise an exception if invalid JSON numbers
|
| 312 |
+
are encountered.
|
| 313 |
+
|
| 314 |
+
If ``strict`` is false (true is the default), then control
|
| 315 |
+
characters will be allowed inside strings. Control characters in
|
| 316 |
+
this context are those with character codes in the 0-31 range,
|
| 317 |
+
including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``.
|
| 318 |
+
"""
|
| 319 |
+
self.object_hook = object_hook
|
| 320 |
+
self.parse_float = parse_float or float
|
| 321 |
+
self.parse_int = parse_int or int
|
| 322 |
+
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
|
| 323 |
+
self.strict = strict
|
| 324 |
+
self.object_pairs_hook = object_pairs_hook
|
| 325 |
+
self.parse_object = JSONObject
|
| 326 |
+
self.parse_array = JSONArray
|
| 327 |
+
self.parse_string = scanstring
|
| 328 |
+
self.memo = {}
|
| 329 |
+
self.scan_once = scanner.make_scanner(self)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def decode(self, s, _w=WHITESPACE.match):
|
| 333 |
+
"""Return the Python representation of ``s`` (a ``str`` instance
|
| 334 |
+
containing a JSON document).
|
| 335 |
+
|
| 336 |
+
"""
|
| 337 |
+
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
|
| 338 |
+
end = _w(s, end).end()
|
| 339 |
+
if end != len(s):
|
| 340 |
+
raise JSONDecodeError("Extra data", s, end)
|
| 341 |
+
return obj
|
| 342 |
+
|
| 343 |
+
def raw_decode(self, s, idx=0):
|
| 344 |
+
"""Decode a JSON document from ``s`` (a ``str`` beginning with
|
| 345 |
+
a JSON document) and return a 2-tuple of the Python
|
| 346 |
+
representation and the index in ``s`` where the document ended.
|
| 347 |
+
|
| 348 |
+
This can be used to decode a JSON document from a string that may
|
| 349 |
+
have extraneous data at the end.
|
| 350 |
+
|
| 351 |
+
"""
|
| 352 |
+
try:
|
| 353 |
+
obj, end = self.scan_once(s, idx)
|
| 354 |
+
except StopIteration as err:
|
| 355 |
+
raise JSONDecodeError("Expecting value", s, err.value) from None
|
| 356 |
+
return obj, end
|
deepseek/lib/python3.10/json/encoder.py
ADDED
|
@@ -0,0 +1,442 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implementation of JSONEncoder
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
|
| 5 |
+
try:
|
| 6 |
+
from _json import encode_basestring_ascii as c_encode_basestring_ascii
|
| 7 |
+
except ImportError:
|
| 8 |
+
c_encode_basestring_ascii = None
|
| 9 |
+
try:
|
| 10 |
+
from _json import encode_basestring as c_encode_basestring
|
| 11 |
+
except ImportError:
|
| 12 |
+
c_encode_basestring = None
|
| 13 |
+
try:
|
| 14 |
+
from _json import make_encoder as c_make_encoder
|
| 15 |
+
except ImportError:
|
| 16 |
+
c_make_encoder = None
|
| 17 |
+
|
| 18 |
+
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
|
| 19 |
+
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
| 20 |
+
HAS_UTF8 = re.compile(b'[\x80-\xff]')
|
| 21 |
+
ESCAPE_DCT = {
|
| 22 |
+
'\\': '\\\\',
|
| 23 |
+
'"': '\\"',
|
| 24 |
+
'\b': '\\b',
|
| 25 |
+
'\f': '\\f',
|
| 26 |
+
'\n': '\\n',
|
| 27 |
+
'\r': '\\r',
|
| 28 |
+
'\t': '\\t',
|
| 29 |
+
}
|
| 30 |
+
for i in range(0x20):
|
| 31 |
+
ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
| 32 |
+
#ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
| 33 |
+
|
| 34 |
+
INFINITY = float('inf')
|
| 35 |
+
|
| 36 |
+
def py_encode_basestring(s):
|
| 37 |
+
"""Return a JSON representation of a Python string
|
| 38 |
+
|
| 39 |
+
"""
|
| 40 |
+
def replace(match):
|
| 41 |
+
return ESCAPE_DCT[match.group(0)]
|
| 42 |
+
return '"' + ESCAPE.sub(replace, s) + '"'
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
encode_basestring = (c_encode_basestring or py_encode_basestring)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def py_encode_basestring_ascii(s):
|
| 49 |
+
"""Return an ASCII-only JSON representation of a Python string
|
| 50 |
+
|
| 51 |
+
"""
|
| 52 |
+
def replace(match):
|
| 53 |
+
s = match.group(0)
|
| 54 |
+
try:
|
| 55 |
+
return ESCAPE_DCT[s]
|
| 56 |
+
except KeyError:
|
| 57 |
+
n = ord(s)
|
| 58 |
+
if n < 0x10000:
|
| 59 |
+
return '\\u{0:04x}'.format(n)
|
| 60 |
+
#return '\\u%04x' % (n,)
|
| 61 |
+
else:
|
| 62 |
+
# surrogate pair
|
| 63 |
+
n -= 0x10000
|
| 64 |
+
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
| 65 |
+
s2 = 0xdc00 | (n & 0x3ff)
|
| 66 |
+
return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
| 67 |
+
return '"' + ESCAPE_ASCII.sub(replace, s) + '"'
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
encode_basestring_ascii = (
|
| 71 |
+
c_encode_basestring_ascii or py_encode_basestring_ascii)
|
| 72 |
+
|
| 73 |
+
class JSONEncoder(object):
|
| 74 |
+
"""Extensible JSON <https://json.org> encoder for Python data structures.
|
| 75 |
+
|
| 76 |
+
Supports the following objects and types by default:
|
| 77 |
+
|
| 78 |
+
+-------------------+---------------+
|
| 79 |
+
| Python | JSON |
|
| 80 |
+
+===================+===============+
|
| 81 |
+
| dict | object |
|
| 82 |
+
+-------------------+---------------+
|
| 83 |
+
| list, tuple | array |
|
| 84 |
+
+-------------------+---------------+
|
| 85 |
+
| str | string |
|
| 86 |
+
+-------------------+---------------+
|
| 87 |
+
| int, float | number |
|
| 88 |
+
+-------------------+---------------+
|
| 89 |
+
| True | true |
|
| 90 |
+
+-------------------+---------------+
|
| 91 |
+
| False | false |
|
| 92 |
+
+-------------------+---------------+
|
| 93 |
+
| None | null |
|
| 94 |
+
+-------------------+---------------+
|
| 95 |
+
|
| 96 |
+
To extend this to recognize other objects, subclass and implement a
|
| 97 |
+
``.default()`` method with another method that returns a serializable
|
| 98 |
+
object for ``o`` if possible, otherwise it should call the superclass
|
| 99 |
+
implementation (to raise ``TypeError``).
|
| 100 |
+
|
| 101 |
+
"""
|
| 102 |
+
item_separator = ', '
|
| 103 |
+
key_separator = ': '
|
| 104 |
+
def __init__(self, *, skipkeys=False, ensure_ascii=True,
|
| 105 |
+
check_circular=True, allow_nan=True, sort_keys=False,
|
| 106 |
+
indent=None, separators=None, default=None):
|
| 107 |
+
"""Constructor for JSONEncoder, with sensible defaults.
|
| 108 |
+
|
| 109 |
+
If skipkeys is false, then it is a TypeError to attempt
|
| 110 |
+
encoding of keys that are not str, int, float or None. If
|
| 111 |
+
skipkeys is True, such items are simply skipped.
|
| 112 |
+
|
| 113 |
+
If ensure_ascii is true, the output is guaranteed to be str
|
| 114 |
+
objects with all incoming non-ASCII characters escaped. If
|
| 115 |
+
ensure_ascii is false, the output can contain non-ASCII characters.
|
| 116 |
+
|
| 117 |
+
If check_circular is true, then lists, dicts, and custom encoded
|
| 118 |
+
objects will be checked for circular references during encoding to
|
| 119 |
+
prevent an infinite recursion (which would cause an RecursionError).
|
| 120 |
+
Otherwise, no such check takes place.
|
| 121 |
+
|
| 122 |
+
If allow_nan is true, then NaN, Infinity, and -Infinity will be
|
| 123 |
+
encoded as such. This behavior is not JSON specification compliant,
|
| 124 |
+
but is consistent with most JavaScript based encoders and decoders.
|
| 125 |
+
Otherwise, it will be a ValueError to encode such floats.
|
| 126 |
+
|
| 127 |
+
If sort_keys is true, then the output of dictionaries will be
|
| 128 |
+
sorted by key; this is useful for regression tests to ensure
|
| 129 |
+
that JSON serializations can be compared on a day-to-day basis.
|
| 130 |
+
|
| 131 |
+
If indent is a non-negative integer, then JSON array
|
| 132 |
+
elements and object members will be pretty-printed with that
|
| 133 |
+
indent level. An indent level of 0 will only insert newlines.
|
| 134 |
+
None is the most compact representation.
|
| 135 |
+
|
| 136 |
+
If specified, separators should be an (item_separator, key_separator)
|
| 137 |
+
tuple. The default is (', ', ': ') if *indent* is ``None`` and
|
| 138 |
+
(',', ': ') otherwise. To get the most compact JSON representation,
|
| 139 |
+
you should specify (',', ':') to eliminate whitespace.
|
| 140 |
+
|
| 141 |
+
If specified, default is a function that gets called for objects
|
| 142 |
+
that can't otherwise be serialized. It should return a JSON encodable
|
| 143 |
+
version of the object or raise a ``TypeError``.
|
| 144 |
+
|
| 145 |
+
"""
|
| 146 |
+
|
| 147 |
+
self.skipkeys = skipkeys
|
| 148 |
+
self.ensure_ascii = ensure_ascii
|
| 149 |
+
self.check_circular = check_circular
|
| 150 |
+
self.allow_nan = allow_nan
|
| 151 |
+
self.sort_keys = sort_keys
|
| 152 |
+
self.indent = indent
|
| 153 |
+
if separators is not None:
|
| 154 |
+
self.item_separator, self.key_separator = separators
|
| 155 |
+
elif indent is not None:
|
| 156 |
+
self.item_separator = ','
|
| 157 |
+
if default is not None:
|
| 158 |
+
self.default = default
|
| 159 |
+
|
| 160 |
+
def default(self, o):
|
| 161 |
+
"""Implement this method in a subclass such that it returns
|
| 162 |
+
a serializable object for ``o``, or calls the base implementation
|
| 163 |
+
(to raise a ``TypeError``).
|
| 164 |
+
|
| 165 |
+
For example, to support arbitrary iterators, you could
|
| 166 |
+
implement default like this::
|
| 167 |
+
|
| 168 |
+
def default(self, o):
|
| 169 |
+
try:
|
| 170 |
+
iterable = iter(o)
|
| 171 |
+
except TypeError:
|
| 172 |
+
pass
|
| 173 |
+
else:
|
| 174 |
+
return list(iterable)
|
| 175 |
+
# Let the base class default method raise the TypeError
|
| 176 |
+
return JSONEncoder.default(self, o)
|
| 177 |
+
|
| 178 |
+
"""
|
| 179 |
+
raise TypeError(f'Object of type {o.__class__.__name__} '
|
| 180 |
+
f'is not JSON serializable')
|
| 181 |
+
|
| 182 |
+
def encode(self, o):
|
| 183 |
+
"""Return a JSON string representation of a Python data structure.
|
| 184 |
+
|
| 185 |
+
>>> from json.encoder import JSONEncoder
|
| 186 |
+
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
| 187 |
+
'{"foo": ["bar", "baz"]}'
|
| 188 |
+
|
| 189 |
+
"""
|
| 190 |
+
# This is for extremely simple cases and benchmarks.
|
| 191 |
+
if isinstance(o, str):
|
| 192 |
+
if self.ensure_ascii:
|
| 193 |
+
return encode_basestring_ascii(o)
|
| 194 |
+
else:
|
| 195 |
+
return encode_basestring(o)
|
| 196 |
+
# This doesn't pass the iterator directly to ''.join() because the
|
| 197 |
+
# exceptions aren't as detailed. The list call should be roughly
|
| 198 |
+
# equivalent to the PySequence_Fast that ''.join() would do.
|
| 199 |
+
chunks = self.iterencode(o, _one_shot=True)
|
| 200 |
+
if not isinstance(chunks, (list, tuple)):
|
| 201 |
+
chunks = list(chunks)
|
| 202 |
+
return ''.join(chunks)
|
| 203 |
+
|
| 204 |
+
def iterencode(self, o, _one_shot=False):
|
| 205 |
+
"""Encode the given object and yield each string
|
| 206 |
+
representation as available.
|
| 207 |
+
|
| 208 |
+
For example::
|
| 209 |
+
|
| 210 |
+
for chunk in JSONEncoder().iterencode(bigobject):
|
| 211 |
+
mysocket.write(chunk)
|
| 212 |
+
|
| 213 |
+
"""
|
| 214 |
+
if self.check_circular:
|
| 215 |
+
markers = {}
|
| 216 |
+
else:
|
| 217 |
+
markers = None
|
| 218 |
+
if self.ensure_ascii:
|
| 219 |
+
_encoder = encode_basestring_ascii
|
| 220 |
+
else:
|
| 221 |
+
_encoder = encode_basestring
|
| 222 |
+
|
| 223 |
+
def floatstr(o, allow_nan=self.allow_nan,
|
| 224 |
+
_repr=float.__repr__, _inf=INFINITY, _neginf=-INFINITY):
|
| 225 |
+
# Check for specials. Note that this type of test is processor
|
| 226 |
+
# and/or platform-specific, so do tests which don't depend on the
|
| 227 |
+
# internals.
|
| 228 |
+
|
| 229 |
+
if o != o:
|
| 230 |
+
text = 'NaN'
|
| 231 |
+
elif o == _inf:
|
| 232 |
+
text = 'Infinity'
|
| 233 |
+
elif o == _neginf:
|
| 234 |
+
text = '-Infinity'
|
| 235 |
+
else:
|
| 236 |
+
return _repr(o)
|
| 237 |
+
|
| 238 |
+
if not allow_nan:
|
| 239 |
+
raise ValueError(
|
| 240 |
+
"Out of range float values are not JSON compliant: " +
|
| 241 |
+
repr(o))
|
| 242 |
+
|
| 243 |
+
return text
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
if (_one_shot and c_make_encoder is not None
|
| 247 |
+
and self.indent is None):
|
| 248 |
+
_iterencode = c_make_encoder(
|
| 249 |
+
markers, self.default, _encoder, self.indent,
|
| 250 |
+
self.key_separator, self.item_separator, self.sort_keys,
|
| 251 |
+
self.skipkeys, self.allow_nan)
|
| 252 |
+
else:
|
| 253 |
+
_iterencode = _make_iterencode(
|
| 254 |
+
markers, self.default, _encoder, self.indent, floatstr,
|
| 255 |
+
self.key_separator, self.item_separator, self.sort_keys,
|
| 256 |
+
self.skipkeys, _one_shot)
|
| 257 |
+
return _iterencode(o, 0)
|
| 258 |
+
|
| 259 |
+
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
|
| 260 |
+
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
|
| 261 |
+
## HACK: hand-optimized bytecode; turn globals into locals
|
| 262 |
+
ValueError=ValueError,
|
| 263 |
+
dict=dict,
|
| 264 |
+
float=float,
|
| 265 |
+
id=id,
|
| 266 |
+
int=int,
|
| 267 |
+
isinstance=isinstance,
|
| 268 |
+
list=list,
|
| 269 |
+
str=str,
|
| 270 |
+
tuple=tuple,
|
| 271 |
+
_intstr=int.__repr__,
|
| 272 |
+
):
|
| 273 |
+
|
| 274 |
+
if _indent is not None and not isinstance(_indent, str):
|
| 275 |
+
_indent = ' ' * _indent
|
| 276 |
+
|
| 277 |
+
def _iterencode_list(lst, _current_indent_level):
|
| 278 |
+
if not lst:
|
| 279 |
+
yield '[]'
|
| 280 |
+
return
|
| 281 |
+
if markers is not None:
|
| 282 |
+
markerid = id(lst)
|
| 283 |
+
if markerid in markers:
|
| 284 |
+
raise ValueError("Circular reference detected")
|
| 285 |
+
markers[markerid] = lst
|
| 286 |
+
buf = '['
|
| 287 |
+
if _indent is not None:
|
| 288 |
+
_current_indent_level += 1
|
| 289 |
+
newline_indent = '\n' + _indent * _current_indent_level
|
| 290 |
+
separator = _item_separator + newline_indent
|
| 291 |
+
buf += newline_indent
|
| 292 |
+
else:
|
| 293 |
+
newline_indent = None
|
| 294 |
+
separator = _item_separator
|
| 295 |
+
first = True
|
| 296 |
+
for value in lst:
|
| 297 |
+
if first:
|
| 298 |
+
first = False
|
| 299 |
+
else:
|
| 300 |
+
buf = separator
|
| 301 |
+
if isinstance(value, str):
|
| 302 |
+
yield buf + _encoder(value)
|
| 303 |
+
elif value is None:
|
| 304 |
+
yield buf + 'null'
|
| 305 |
+
elif value is True:
|
| 306 |
+
yield buf + 'true'
|
| 307 |
+
elif value is False:
|
| 308 |
+
yield buf + 'false'
|
| 309 |
+
elif isinstance(value, int):
|
| 310 |
+
# Subclasses of int/float may override __repr__, but we still
|
| 311 |
+
# want to encode them as integers/floats in JSON. One example
|
| 312 |
+
# within the standard library is IntEnum.
|
| 313 |
+
yield buf + _intstr(value)
|
| 314 |
+
elif isinstance(value, float):
|
| 315 |
+
# see comment above for int
|
| 316 |
+
yield buf + _floatstr(value)
|
| 317 |
+
else:
|
| 318 |
+
yield buf
|
| 319 |
+
if isinstance(value, (list, tuple)):
|
| 320 |
+
chunks = _iterencode_list(value, _current_indent_level)
|
| 321 |
+
elif isinstance(value, dict):
|
| 322 |
+
chunks = _iterencode_dict(value, _current_indent_level)
|
| 323 |
+
else:
|
| 324 |
+
chunks = _iterencode(value, _current_indent_level)
|
| 325 |
+
yield from chunks
|
| 326 |
+
if newline_indent is not None:
|
| 327 |
+
_current_indent_level -= 1
|
| 328 |
+
yield '\n' + _indent * _current_indent_level
|
| 329 |
+
yield ']'
|
| 330 |
+
if markers is not None:
|
| 331 |
+
del markers[markerid]
|
| 332 |
+
|
| 333 |
+
def _iterencode_dict(dct, _current_indent_level):
|
| 334 |
+
if not dct:
|
| 335 |
+
yield '{}'
|
| 336 |
+
return
|
| 337 |
+
if markers is not None:
|
| 338 |
+
markerid = id(dct)
|
| 339 |
+
if markerid in markers:
|
| 340 |
+
raise ValueError("Circular reference detected")
|
| 341 |
+
markers[markerid] = dct
|
| 342 |
+
yield '{'
|
| 343 |
+
if _indent is not None:
|
| 344 |
+
_current_indent_level += 1
|
| 345 |
+
newline_indent = '\n' + _indent * _current_indent_level
|
| 346 |
+
item_separator = _item_separator + newline_indent
|
| 347 |
+
yield newline_indent
|
| 348 |
+
else:
|
| 349 |
+
newline_indent = None
|
| 350 |
+
item_separator = _item_separator
|
| 351 |
+
first = True
|
| 352 |
+
if _sort_keys:
|
| 353 |
+
items = sorted(dct.items())
|
| 354 |
+
else:
|
| 355 |
+
items = dct.items()
|
| 356 |
+
for key, value in items:
|
| 357 |
+
if isinstance(key, str):
|
| 358 |
+
pass
|
| 359 |
+
# JavaScript is weakly typed for these, so it makes sense to
|
| 360 |
+
# also allow them. Many encoders seem to do something like this.
|
| 361 |
+
elif isinstance(key, float):
|
| 362 |
+
# see comment for int/float in _make_iterencode
|
| 363 |
+
key = _floatstr(key)
|
| 364 |
+
elif key is True:
|
| 365 |
+
key = 'true'
|
| 366 |
+
elif key is False:
|
| 367 |
+
key = 'false'
|
| 368 |
+
elif key is None:
|
| 369 |
+
key = 'null'
|
| 370 |
+
elif isinstance(key, int):
|
| 371 |
+
# see comment for int/float in _make_iterencode
|
| 372 |
+
key = _intstr(key)
|
| 373 |
+
elif _skipkeys:
|
| 374 |
+
continue
|
| 375 |
+
else:
|
| 376 |
+
raise TypeError(f'keys must be str, int, float, bool or None, '
|
| 377 |
+
f'not {key.__class__.__name__}')
|
| 378 |
+
if first:
|
| 379 |
+
first = False
|
| 380 |
+
else:
|
| 381 |
+
yield item_separator
|
| 382 |
+
yield _encoder(key)
|
| 383 |
+
yield _key_separator
|
| 384 |
+
if isinstance(value, str):
|
| 385 |
+
yield _encoder(value)
|
| 386 |
+
elif value is None:
|
| 387 |
+
yield 'null'
|
| 388 |
+
elif value is True:
|
| 389 |
+
yield 'true'
|
| 390 |
+
elif value is False:
|
| 391 |
+
yield 'false'
|
| 392 |
+
elif isinstance(value, int):
|
| 393 |
+
# see comment for int/float in _make_iterencode
|
| 394 |
+
yield _intstr(value)
|
| 395 |
+
elif isinstance(value, float):
|
| 396 |
+
# see comment for int/float in _make_iterencode
|
| 397 |
+
yield _floatstr(value)
|
| 398 |
+
else:
|
| 399 |
+
if isinstance(value, (list, tuple)):
|
| 400 |
+
chunks = _iterencode_list(value, _current_indent_level)
|
| 401 |
+
elif isinstance(value, dict):
|
| 402 |
+
chunks = _iterencode_dict(value, _current_indent_level)
|
| 403 |
+
else:
|
| 404 |
+
chunks = _iterencode(value, _current_indent_level)
|
| 405 |
+
yield from chunks
|
| 406 |
+
if newline_indent is not None:
|
| 407 |
+
_current_indent_level -= 1
|
| 408 |
+
yield '\n' + _indent * _current_indent_level
|
| 409 |
+
yield '}'
|
| 410 |
+
if markers is not None:
|
| 411 |
+
del markers[markerid]
|
| 412 |
+
|
| 413 |
+
def _iterencode(o, _current_indent_level):
|
| 414 |
+
if isinstance(o, str):
|
| 415 |
+
yield _encoder(o)
|
| 416 |
+
elif o is None:
|
| 417 |
+
yield 'null'
|
| 418 |
+
elif o is True:
|
| 419 |
+
yield 'true'
|
| 420 |
+
elif o is False:
|
| 421 |
+
yield 'false'
|
| 422 |
+
elif isinstance(o, int):
|
| 423 |
+
# see comment for int/float in _make_iterencode
|
| 424 |
+
yield _intstr(o)
|
| 425 |
+
elif isinstance(o, float):
|
| 426 |
+
# see comment for int/float in _make_iterencode
|
| 427 |
+
yield _floatstr(o)
|
| 428 |
+
elif isinstance(o, (list, tuple)):
|
| 429 |
+
yield from _iterencode_list(o, _current_indent_level)
|
| 430 |
+
elif isinstance(o, dict):
|
| 431 |
+
yield from _iterencode_dict(o, _current_indent_level)
|
| 432 |
+
else:
|
| 433 |
+
if markers is not None:
|
| 434 |
+
markerid = id(o)
|
| 435 |
+
if markerid in markers:
|
| 436 |
+
raise ValueError("Circular reference detected")
|
| 437 |
+
markers[markerid] = o
|
| 438 |
+
o = _default(o)
|
| 439 |
+
yield from _iterencode(o, _current_indent_level)
|
| 440 |
+
if markers is not None:
|
| 441 |
+
del markers[markerid]
|
| 442 |
+
return _iterencode
|
deepseek/lib/python3.10/json/scanner.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""JSON token scanner
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
try:
|
| 5 |
+
from _json import make_scanner as c_make_scanner
|
| 6 |
+
except ImportError:
|
| 7 |
+
c_make_scanner = None
|
| 8 |
+
|
| 9 |
+
__all__ = ['make_scanner']
|
| 10 |
+
|
| 11 |
+
NUMBER_RE = re.compile(
|
| 12 |
+
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
|
| 13 |
+
(re.VERBOSE | re.MULTILINE | re.DOTALL))
|
| 14 |
+
|
| 15 |
+
def py_make_scanner(context):
|
| 16 |
+
parse_object = context.parse_object
|
| 17 |
+
parse_array = context.parse_array
|
| 18 |
+
parse_string = context.parse_string
|
| 19 |
+
match_number = NUMBER_RE.match
|
| 20 |
+
strict = context.strict
|
| 21 |
+
parse_float = context.parse_float
|
| 22 |
+
parse_int = context.parse_int
|
| 23 |
+
parse_constant = context.parse_constant
|
| 24 |
+
object_hook = context.object_hook
|
| 25 |
+
object_pairs_hook = context.object_pairs_hook
|
| 26 |
+
memo = context.memo
|
| 27 |
+
|
| 28 |
+
def _scan_once(string, idx):
|
| 29 |
+
try:
|
| 30 |
+
nextchar = string[idx]
|
| 31 |
+
except IndexError:
|
| 32 |
+
raise StopIteration(idx) from None
|
| 33 |
+
|
| 34 |
+
if nextchar == '"':
|
| 35 |
+
return parse_string(string, idx + 1, strict)
|
| 36 |
+
elif nextchar == '{':
|
| 37 |
+
return parse_object((string, idx + 1), strict,
|
| 38 |
+
_scan_once, object_hook, object_pairs_hook, memo)
|
| 39 |
+
elif nextchar == '[':
|
| 40 |
+
return parse_array((string, idx + 1), _scan_once)
|
| 41 |
+
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
|
| 42 |
+
return None, idx + 4
|
| 43 |
+
elif nextchar == 't' and string[idx:idx + 4] == 'true':
|
| 44 |
+
return True, idx + 4
|
| 45 |
+
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
|
| 46 |
+
return False, idx + 5
|
| 47 |
+
|
| 48 |
+
m = match_number(string, idx)
|
| 49 |
+
if m is not None:
|
| 50 |
+
integer, frac, exp = m.groups()
|
| 51 |
+
if frac or exp:
|
| 52 |
+
res = parse_float(integer + (frac or '') + (exp or ''))
|
| 53 |
+
else:
|
| 54 |
+
res = parse_int(integer)
|
| 55 |
+
return res, m.end()
|
| 56 |
+
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
|
| 57 |
+
return parse_constant('NaN'), idx + 3
|
| 58 |
+
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
|
| 59 |
+
return parse_constant('Infinity'), idx + 8
|
| 60 |
+
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
|
| 61 |
+
return parse_constant('-Infinity'), idx + 9
|
| 62 |
+
else:
|
| 63 |
+
raise StopIteration(idx)
|
| 64 |
+
|
| 65 |
+
def scan_once(string, idx):
|
| 66 |
+
try:
|
| 67 |
+
return _scan_once(string, idx)
|
| 68 |
+
finally:
|
| 69 |
+
memo.clear()
|
| 70 |
+
|
| 71 |
+
return scan_once
|
| 72 |
+
|
| 73 |
+
make_scanner = c_make_scanner or py_make_scanner
|
deepseek/lib/python3.10/multiprocessing/forkserver.py
ADDED
|
@@ -0,0 +1,348 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import os
|
| 3 |
+
import selectors
|
| 4 |
+
import signal
|
| 5 |
+
import socket
|
| 6 |
+
import struct
|
| 7 |
+
import sys
|
| 8 |
+
import threading
|
| 9 |
+
import warnings
|
| 10 |
+
|
| 11 |
+
from . import connection
|
| 12 |
+
from . import process
|
| 13 |
+
from .context import reduction
|
| 14 |
+
from . import resource_tracker
|
| 15 |
+
from . import spawn
|
| 16 |
+
from . import util
|
| 17 |
+
|
| 18 |
+
__all__ = ['ensure_running', 'get_inherited_fds', 'connect_to_new_process',
|
| 19 |
+
'set_forkserver_preload']
|
| 20 |
+
|
| 21 |
+
#
|
| 22 |
+
#
|
| 23 |
+
#
|
| 24 |
+
|
| 25 |
+
MAXFDS_TO_SEND = 256
|
| 26 |
+
SIGNED_STRUCT = struct.Struct('q') # large enough for pid_t
|
| 27 |
+
|
| 28 |
+
#
|
| 29 |
+
# Forkserver class
|
| 30 |
+
#
|
| 31 |
+
|
| 32 |
+
class ForkServer(object):
|
| 33 |
+
|
| 34 |
+
def __init__(self):
|
| 35 |
+
self._forkserver_address = None
|
| 36 |
+
self._forkserver_alive_fd = None
|
| 37 |
+
self._forkserver_pid = None
|
| 38 |
+
self._inherited_fds = None
|
| 39 |
+
self._lock = threading.Lock()
|
| 40 |
+
self._preload_modules = ['__main__']
|
| 41 |
+
|
| 42 |
+
def _stop(self):
|
| 43 |
+
# Method used by unit tests to stop the server
|
| 44 |
+
with self._lock:
|
| 45 |
+
self._stop_unlocked()
|
| 46 |
+
|
| 47 |
+
def _stop_unlocked(self):
|
| 48 |
+
if self._forkserver_pid is None:
|
| 49 |
+
return
|
| 50 |
+
|
| 51 |
+
# close the "alive" file descriptor asks the server to stop
|
| 52 |
+
os.close(self._forkserver_alive_fd)
|
| 53 |
+
self._forkserver_alive_fd = None
|
| 54 |
+
|
| 55 |
+
os.waitpid(self._forkserver_pid, 0)
|
| 56 |
+
self._forkserver_pid = None
|
| 57 |
+
|
| 58 |
+
if not util.is_abstract_socket_namespace(self._forkserver_address):
|
| 59 |
+
os.unlink(self._forkserver_address)
|
| 60 |
+
self._forkserver_address = None
|
| 61 |
+
|
| 62 |
+
def set_forkserver_preload(self, modules_names):
|
| 63 |
+
'''Set list of module names to try to load in forkserver process.'''
|
| 64 |
+
if not all(type(mod) is str for mod in self._preload_modules):
|
| 65 |
+
raise TypeError('module_names must be a list of strings')
|
| 66 |
+
self._preload_modules = modules_names
|
| 67 |
+
|
| 68 |
+
def get_inherited_fds(self):
|
| 69 |
+
'''Return list of fds inherited from parent process.
|
| 70 |
+
|
| 71 |
+
This returns None if the current process was not started by fork
|
| 72 |
+
server.
|
| 73 |
+
'''
|
| 74 |
+
return self._inherited_fds
|
| 75 |
+
|
| 76 |
+
def connect_to_new_process(self, fds):
|
| 77 |
+
'''Request forkserver to create a child process.
|
| 78 |
+
|
| 79 |
+
Returns a pair of fds (status_r, data_w). The calling process can read
|
| 80 |
+
the child process's pid and (eventually) its returncode from status_r.
|
| 81 |
+
The calling process should write to data_w the pickled preparation and
|
| 82 |
+
process data.
|
| 83 |
+
'''
|
| 84 |
+
self.ensure_running()
|
| 85 |
+
if len(fds) + 4 >= MAXFDS_TO_SEND:
|
| 86 |
+
raise ValueError('too many fds')
|
| 87 |
+
with socket.socket(socket.AF_UNIX) as client:
|
| 88 |
+
client.connect(self._forkserver_address)
|
| 89 |
+
parent_r, child_w = os.pipe()
|
| 90 |
+
child_r, parent_w = os.pipe()
|
| 91 |
+
allfds = [child_r, child_w, self._forkserver_alive_fd,
|
| 92 |
+
resource_tracker.getfd()]
|
| 93 |
+
allfds += fds
|
| 94 |
+
try:
|
| 95 |
+
reduction.sendfds(client, allfds)
|
| 96 |
+
return parent_r, parent_w
|
| 97 |
+
except:
|
| 98 |
+
os.close(parent_r)
|
| 99 |
+
os.close(parent_w)
|
| 100 |
+
raise
|
| 101 |
+
finally:
|
| 102 |
+
os.close(child_r)
|
| 103 |
+
os.close(child_w)
|
| 104 |
+
|
| 105 |
+
def ensure_running(self):
|
| 106 |
+
'''Make sure that a fork server is running.
|
| 107 |
+
|
| 108 |
+
This can be called from any process. Note that usually a child
|
| 109 |
+
process will just reuse the forkserver started by its parent, so
|
| 110 |
+
ensure_running() will do nothing.
|
| 111 |
+
'''
|
| 112 |
+
with self._lock:
|
| 113 |
+
resource_tracker.ensure_running()
|
| 114 |
+
if self._forkserver_pid is not None:
|
| 115 |
+
# forkserver was launched before, is it still running?
|
| 116 |
+
pid, status = os.waitpid(self._forkserver_pid, os.WNOHANG)
|
| 117 |
+
if not pid:
|
| 118 |
+
# still alive
|
| 119 |
+
return
|
| 120 |
+
# dead, launch it again
|
| 121 |
+
os.close(self._forkserver_alive_fd)
|
| 122 |
+
self._forkserver_address = None
|
| 123 |
+
self._forkserver_alive_fd = None
|
| 124 |
+
self._forkserver_pid = None
|
| 125 |
+
|
| 126 |
+
cmd = ('from multiprocessing.forkserver import main; ' +
|
| 127 |
+
'main(%d, %d, %r, **%r)')
|
| 128 |
+
|
| 129 |
+
if self._preload_modules:
|
| 130 |
+
desired_keys = {'main_path', 'sys_path'}
|
| 131 |
+
data = spawn.get_preparation_data('ignore')
|
| 132 |
+
data = {x: y for x, y in data.items() if x in desired_keys}
|
| 133 |
+
else:
|
| 134 |
+
data = {}
|
| 135 |
+
|
| 136 |
+
with socket.socket(socket.AF_UNIX) as listener:
|
| 137 |
+
address = connection.arbitrary_address('AF_UNIX')
|
| 138 |
+
listener.bind(address)
|
| 139 |
+
if not util.is_abstract_socket_namespace(address):
|
| 140 |
+
os.chmod(address, 0o600)
|
| 141 |
+
listener.listen()
|
| 142 |
+
|
| 143 |
+
# all client processes own the write end of the "alive" pipe;
|
| 144 |
+
# when they all terminate the read end becomes ready.
|
| 145 |
+
alive_r, alive_w = os.pipe()
|
| 146 |
+
try:
|
| 147 |
+
fds_to_pass = [listener.fileno(), alive_r]
|
| 148 |
+
cmd %= (listener.fileno(), alive_r, self._preload_modules,
|
| 149 |
+
data)
|
| 150 |
+
exe = spawn.get_executable()
|
| 151 |
+
args = [exe] + util._args_from_interpreter_flags()
|
| 152 |
+
args += ['-c', cmd]
|
| 153 |
+
pid = util.spawnv_passfds(exe, args, fds_to_pass)
|
| 154 |
+
except:
|
| 155 |
+
os.close(alive_w)
|
| 156 |
+
raise
|
| 157 |
+
finally:
|
| 158 |
+
os.close(alive_r)
|
| 159 |
+
self._forkserver_address = address
|
| 160 |
+
self._forkserver_alive_fd = alive_w
|
| 161 |
+
self._forkserver_pid = pid
|
| 162 |
+
|
| 163 |
+
#
|
| 164 |
+
#
|
| 165 |
+
#
|
| 166 |
+
|
| 167 |
+
def main(listener_fd, alive_r, preload, main_path=None, sys_path=None):
|
| 168 |
+
'''Run forkserver.'''
|
| 169 |
+
if preload:
|
| 170 |
+
if '__main__' in preload and main_path is not None:
|
| 171 |
+
process.current_process()._inheriting = True
|
| 172 |
+
try:
|
| 173 |
+
spawn.import_main_path(main_path)
|
| 174 |
+
finally:
|
| 175 |
+
del process.current_process()._inheriting
|
| 176 |
+
for modname in preload:
|
| 177 |
+
try:
|
| 178 |
+
__import__(modname)
|
| 179 |
+
except ImportError:
|
| 180 |
+
pass
|
| 181 |
+
|
| 182 |
+
util._close_stdin()
|
| 183 |
+
|
| 184 |
+
sig_r, sig_w = os.pipe()
|
| 185 |
+
os.set_blocking(sig_r, False)
|
| 186 |
+
os.set_blocking(sig_w, False)
|
| 187 |
+
|
| 188 |
+
def sigchld_handler(*_unused):
|
| 189 |
+
# Dummy signal handler, doesn't do anything
|
| 190 |
+
pass
|
| 191 |
+
|
| 192 |
+
handlers = {
|
| 193 |
+
# unblocking SIGCHLD allows the wakeup fd to notify our event loop
|
| 194 |
+
signal.SIGCHLD: sigchld_handler,
|
| 195 |
+
# protect the process from ^C
|
| 196 |
+
signal.SIGINT: signal.SIG_IGN,
|
| 197 |
+
}
|
| 198 |
+
old_handlers = {sig: signal.signal(sig, val)
|
| 199 |
+
for (sig, val) in handlers.items()}
|
| 200 |
+
|
| 201 |
+
# calling os.write() in the Python signal handler is racy
|
| 202 |
+
signal.set_wakeup_fd(sig_w)
|
| 203 |
+
|
| 204 |
+
# map child pids to client fds
|
| 205 |
+
pid_to_fd = {}
|
| 206 |
+
|
| 207 |
+
with socket.socket(socket.AF_UNIX, fileno=listener_fd) as listener, \
|
| 208 |
+
selectors.DefaultSelector() as selector:
|
| 209 |
+
_forkserver._forkserver_address = listener.getsockname()
|
| 210 |
+
|
| 211 |
+
selector.register(listener, selectors.EVENT_READ)
|
| 212 |
+
selector.register(alive_r, selectors.EVENT_READ)
|
| 213 |
+
selector.register(sig_r, selectors.EVENT_READ)
|
| 214 |
+
|
| 215 |
+
while True:
|
| 216 |
+
try:
|
| 217 |
+
while True:
|
| 218 |
+
rfds = [key.fileobj for (key, events) in selector.select()]
|
| 219 |
+
if rfds:
|
| 220 |
+
break
|
| 221 |
+
|
| 222 |
+
if alive_r in rfds:
|
| 223 |
+
# EOF because no more client processes left
|
| 224 |
+
assert os.read(alive_r, 1) == b'', "Not at EOF?"
|
| 225 |
+
raise SystemExit
|
| 226 |
+
|
| 227 |
+
if sig_r in rfds:
|
| 228 |
+
# Got SIGCHLD
|
| 229 |
+
os.read(sig_r, 65536) # exhaust
|
| 230 |
+
while True:
|
| 231 |
+
# Scan for child processes
|
| 232 |
+
try:
|
| 233 |
+
pid, sts = os.waitpid(-1, os.WNOHANG)
|
| 234 |
+
except ChildProcessError:
|
| 235 |
+
break
|
| 236 |
+
if pid == 0:
|
| 237 |
+
break
|
| 238 |
+
child_w = pid_to_fd.pop(pid, None)
|
| 239 |
+
if child_w is not None:
|
| 240 |
+
returncode = os.waitstatus_to_exitcode(sts)
|
| 241 |
+
|
| 242 |
+
# Send exit code to client process
|
| 243 |
+
try:
|
| 244 |
+
write_signed(child_w, returncode)
|
| 245 |
+
except BrokenPipeError:
|
| 246 |
+
# client vanished
|
| 247 |
+
pass
|
| 248 |
+
os.close(child_w)
|
| 249 |
+
else:
|
| 250 |
+
# This shouldn't happen really
|
| 251 |
+
warnings.warn('forkserver: waitpid returned '
|
| 252 |
+
'unexpected pid %d' % pid)
|
| 253 |
+
|
| 254 |
+
if listener in rfds:
|
| 255 |
+
# Incoming fork request
|
| 256 |
+
with listener.accept()[0] as s:
|
| 257 |
+
# Receive fds from client
|
| 258 |
+
fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1)
|
| 259 |
+
if len(fds) > MAXFDS_TO_SEND:
|
| 260 |
+
raise RuntimeError(
|
| 261 |
+
"Too many ({0:n}) fds to send".format(
|
| 262 |
+
len(fds)))
|
| 263 |
+
child_r, child_w, *fds = fds
|
| 264 |
+
s.close()
|
| 265 |
+
pid = os.fork()
|
| 266 |
+
if pid == 0:
|
| 267 |
+
# Child
|
| 268 |
+
code = 1
|
| 269 |
+
try:
|
| 270 |
+
listener.close()
|
| 271 |
+
selector.close()
|
| 272 |
+
unused_fds = [alive_r, child_w, sig_r, sig_w]
|
| 273 |
+
unused_fds.extend(pid_to_fd.values())
|
| 274 |
+
code = _serve_one(child_r, fds,
|
| 275 |
+
unused_fds,
|
| 276 |
+
old_handlers)
|
| 277 |
+
except Exception:
|
| 278 |
+
sys.excepthook(*sys.exc_info())
|
| 279 |
+
sys.stderr.flush()
|
| 280 |
+
finally:
|
| 281 |
+
os._exit(code)
|
| 282 |
+
else:
|
| 283 |
+
# Send pid to client process
|
| 284 |
+
try:
|
| 285 |
+
write_signed(child_w, pid)
|
| 286 |
+
except BrokenPipeError:
|
| 287 |
+
# client vanished
|
| 288 |
+
pass
|
| 289 |
+
pid_to_fd[pid] = child_w
|
| 290 |
+
os.close(child_r)
|
| 291 |
+
for fd in fds:
|
| 292 |
+
os.close(fd)
|
| 293 |
+
|
| 294 |
+
except OSError as e:
|
| 295 |
+
if e.errno != errno.ECONNABORTED:
|
| 296 |
+
raise
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def _serve_one(child_r, fds, unused_fds, handlers):
|
| 300 |
+
# close unnecessary stuff and reset signal handlers
|
| 301 |
+
signal.set_wakeup_fd(-1)
|
| 302 |
+
for sig, val in handlers.items():
|
| 303 |
+
signal.signal(sig, val)
|
| 304 |
+
for fd in unused_fds:
|
| 305 |
+
os.close(fd)
|
| 306 |
+
|
| 307 |
+
(_forkserver._forkserver_alive_fd,
|
| 308 |
+
resource_tracker._resource_tracker._fd,
|
| 309 |
+
*_forkserver._inherited_fds) = fds
|
| 310 |
+
|
| 311 |
+
# Run process object received over pipe
|
| 312 |
+
parent_sentinel = os.dup(child_r)
|
| 313 |
+
code = spawn._main(child_r, parent_sentinel)
|
| 314 |
+
|
| 315 |
+
return code
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
#
|
| 319 |
+
# Read and write signed numbers
|
| 320 |
+
#
|
| 321 |
+
|
| 322 |
+
def read_signed(fd):
|
| 323 |
+
data = b''
|
| 324 |
+
length = SIGNED_STRUCT.size
|
| 325 |
+
while len(data) < length:
|
| 326 |
+
s = os.read(fd, length - len(data))
|
| 327 |
+
if not s:
|
| 328 |
+
raise EOFError('unexpected EOF')
|
| 329 |
+
data += s
|
| 330 |
+
return SIGNED_STRUCT.unpack(data)[0]
|
| 331 |
+
|
| 332 |
+
def write_signed(fd, n):
|
| 333 |
+
msg = SIGNED_STRUCT.pack(n)
|
| 334 |
+
while msg:
|
| 335 |
+
nbytes = os.write(fd, msg)
|
| 336 |
+
if nbytes == 0:
|
| 337 |
+
raise RuntimeError('should not get here')
|
| 338 |
+
msg = msg[nbytes:]
|
| 339 |
+
|
| 340 |
+
#
|
| 341 |
+
#
|
| 342 |
+
#
|
| 343 |
+
|
| 344 |
+
_forkserver = ForkServer()
|
| 345 |
+
ensure_running = _forkserver.ensure_running
|
| 346 |
+
get_inherited_fds = _forkserver.get_inherited_fds
|
| 347 |
+
connect_to_new_process = _forkserver.connect_to_new_process
|
| 348 |
+
set_forkserver_preload = _forkserver.set_forkserver_preload
|
deepseek/lib/python3.10/multiprocessing/managers.py
ADDED
|
@@ -0,0 +1,1378 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Module providing manager classes for dealing
|
| 3 |
+
# with shared objects
|
| 4 |
+
#
|
| 5 |
+
# multiprocessing/managers.py
|
| 6 |
+
#
|
| 7 |
+
# Copyright (c) 2006-2008, R Oudkerk
|
| 8 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 9 |
+
#
|
| 10 |
+
|
| 11 |
+
__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
|
| 12 |
+
|
| 13 |
+
#
|
| 14 |
+
# Imports
|
| 15 |
+
#
|
| 16 |
+
|
| 17 |
+
import sys
|
| 18 |
+
import threading
|
| 19 |
+
import signal
|
| 20 |
+
import array
|
| 21 |
+
import queue
|
| 22 |
+
import time
|
| 23 |
+
import types
|
| 24 |
+
import os
|
| 25 |
+
from os import getpid
|
| 26 |
+
|
| 27 |
+
from traceback import format_exc
|
| 28 |
+
|
| 29 |
+
from . import connection
|
| 30 |
+
from .context import reduction, get_spawning_popen, ProcessError
|
| 31 |
+
from . import pool
|
| 32 |
+
from . import process
|
| 33 |
+
from . import util
|
| 34 |
+
from . import get_context
|
| 35 |
+
try:
|
| 36 |
+
from . import shared_memory
|
| 37 |
+
except ImportError:
|
| 38 |
+
HAS_SHMEM = False
|
| 39 |
+
else:
|
| 40 |
+
HAS_SHMEM = True
|
| 41 |
+
__all__.append('SharedMemoryManager')
|
| 42 |
+
|
| 43 |
+
#
|
| 44 |
+
# Register some things for pickling
|
| 45 |
+
#
|
| 46 |
+
|
| 47 |
+
def reduce_array(a):
|
| 48 |
+
return array.array, (a.typecode, a.tobytes())
|
| 49 |
+
reduction.register(array.array, reduce_array)
|
| 50 |
+
|
| 51 |
+
view_types = [type(getattr({}, name)()) for name in ('items','keys','values')]
|
| 52 |
+
if view_types[0] is not list: # only needed in Py3.0
|
| 53 |
+
def rebuild_as_list(obj):
|
| 54 |
+
return list, (list(obj),)
|
| 55 |
+
for view_type in view_types:
|
| 56 |
+
reduction.register(view_type, rebuild_as_list)
|
| 57 |
+
|
| 58 |
+
#
|
| 59 |
+
# Type for identifying shared objects
|
| 60 |
+
#
|
| 61 |
+
|
| 62 |
+
class Token(object):
|
| 63 |
+
'''
|
| 64 |
+
Type to uniquely identify a shared object
|
| 65 |
+
'''
|
| 66 |
+
__slots__ = ('typeid', 'address', 'id')
|
| 67 |
+
|
| 68 |
+
def __init__(self, typeid, address, id):
|
| 69 |
+
(self.typeid, self.address, self.id) = (typeid, address, id)
|
| 70 |
+
|
| 71 |
+
def __getstate__(self):
|
| 72 |
+
return (self.typeid, self.address, self.id)
|
| 73 |
+
|
| 74 |
+
def __setstate__(self, state):
|
| 75 |
+
(self.typeid, self.address, self.id) = state
|
| 76 |
+
|
| 77 |
+
def __repr__(self):
|
| 78 |
+
return '%s(typeid=%r, address=%r, id=%r)' % \
|
| 79 |
+
(self.__class__.__name__, self.typeid, self.address, self.id)
|
| 80 |
+
|
| 81 |
+
#
|
| 82 |
+
# Function for communication with a manager's server process
|
| 83 |
+
#
|
| 84 |
+
|
| 85 |
+
def dispatch(c, id, methodname, args=(), kwds={}):
|
| 86 |
+
'''
|
| 87 |
+
Send a message to manager using connection `c` and return response
|
| 88 |
+
'''
|
| 89 |
+
c.send((id, methodname, args, kwds))
|
| 90 |
+
kind, result = c.recv()
|
| 91 |
+
if kind == '#RETURN':
|
| 92 |
+
return result
|
| 93 |
+
raise convert_to_error(kind, result)
|
| 94 |
+
|
| 95 |
+
def convert_to_error(kind, result):
|
| 96 |
+
if kind == '#ERROR':
|
| 97 |
+
return result
|
| 98 |
+
elif kind in ('#TRACEBACK', '#UNSERIALIZABLE'):
|
| 99 |
+
if not isinstance(result, str):
|
| 100 |
+
raise TypeError(
|
| 101 |
+
"Result {0!r} (kind '{1}') type is {2}, not str".format(
|
| 102 |
+
result, kind, type(result)))
|
| 103 |
+
if kind == '#UNSERIALIZABLE':
|
| 104 |
+
return RemoteError('Unserializable message: %s\n' % result)
|
| 105 |
+
else:
|
| 106 |
+
return RemoteError(result)
|
| 107 |
+
else:
|
| 108 |
+
return ValueError('Unrecognized message type {!r}'.format(kind))
|
| 109 |
+
|
| 110 |
+
class RemoteError(Exception):
|
| 111 |
+
def __str__(self):
|
| 112 |
+
return ('\n' + '-'*75 + '\n' + str(self.args[0]) + '-'*75)
|
| 113 |
+
|
| 114 |
+
#
|
| 115 |
+
# Functions for finding the method names of an object
|
| 116 |
+
#
|
| 117 |
+
|
| 118 |
+
def all_methods(obj):
|
| 119 |
+
'''
|
| 120 |
+
Return a list of names of methods of `obj`
|
| 121 |
+
'''
|
| 122 |
+
temp = []
|
| 123 |
+
for name in dir(obj):
|
| 124 |
+
func = getattr(obj, name)
|
| 125 |
+
if callable(func):
|
| 126 |
+
temp.append(name)
|
| 127 |
+
return temp
|
| 128 |
+
|
| 129 |
+
def public_methods(obj):
|
| 130 |
+
'''
|
| 131 |
+
Return a list of names of methods of `obj` which do not start with '_'
|
| 132 |
+
'''
|
| 133 |
+
return [name for name in all_methods(obj) if name[0] != '_']
|
| 134 |
+
|
| 135 |
+
#
|
| 136 |
+
# Server which is run in a process controlled by a manager
|
| 137 |
+
#
|
| 138 |
+
|
| 139 |
+
class Server(object):
|
| 140 |
+
'''
|
| 141 |
+
Server class which runs in a process controlled by a manager object
|
| 142 |
+
'''
|
| 143 |
+
public = ['shutdown', 'create', 'accept_connection', 'get_methods',
|
| 144 |
+
'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref']
|
| 145 |
+
|
| 146 |
+
def __init__(self, registry, address, authkey, serializer):
|
| 147 |
+
if not isinstance(authkey, bytes):
|
| 148 |
+
raise TypeError(
|
| 149 |
+
"Authkey {0!r} is type {1!s}, not bytes".format(
|
| 150 |
+
authkey, type(authkey)))
|
| 151 |
+
self.registry = registry
|
| 152 |
+
self.authkey = process.AuthenticationString(authkey)
|
| 153 |
+
Listener, Client = listener_client[serializer]
|
| 154 |
+
|
| 155 |
+
# do authentication later
|
| 156 |
+
self.listener = Listener(address=address, backlog=16)
|
| 157 |
+
self.address = self.listener.address
|
| 158 |
+
|
| 159 |
+
self.id_to_obj = {'0': (None, ())}
|
| 160 |
+
self.id_to_refcount = {}
|
| 161 |
+
self.id_to_local_proxy_obj = {}
|
| 162 |
+
self.mutex = threading.Lock()
|
| 163 |
+
|
| 164 |
+
def serve_forever(self):
|
| 165 |
+
'''
|
| 166 |
+
Run the server forever
|
| 167 |
+
'''
|
| 168 |
+
self.stop_event = threading.Event()
|
| 169 |
+
process.current_process()._manager_server = self
|
| 170 |
+
try:
|
| 171 |
+
accepter = threading.Thread(target=self.accepter)
|
| 172 |
+
accepter.daemon = True
|
| 173 |
+
accepter.start()
|
| 174 |
+
try:
|
| 175 |
+
while not self.stop_event.is_set():
|
| 176 |
+
self.stop_event.wait(1)
|
| 177 |
+
except (KeyboardInterrupt, SystemExit):
|
| 178 |
+
pass
|
| 179 |
+
finally:
|
| 180 |
+
if sys.stdout != sys.__stdout__: # what about stderr?
|
| 181 |
+
util.debug('resetting stdout, stderr')
|
| 182 |
+
sys.stdout = sys.__stdout__
|
| 183 |
+
sys.stderr = sys.__stderr__
|
| 184 |
+
sys.exit(0)
|
| 185 |
+
|
| 186 |
+
def accepter(self):
|
| 187 |
+
while True:
|
| 188 |
+
try:
|
| 189 |
+
c = self.listener.accept()
|
| 190 |
+
except OSError:
|
| 191 |
+
continue
|
| 192 |
+
t = threading.Thread(target=self.handle_request, args=(c,))
|
| 193 |
+
t.daemon = True
|
| 194 |
+
t.start()
|
| 195 |
+
|
| 196 |
+
def _handle_request(self, c):
|
| 197 |
+
request = None
|
| 198 |
+
try:
|
| 199 |
+
connection.deliver_challenge(c, self.authkey)
|
| 200 |
+
connection.answer_challenge(c, self.authkey)
|
| 201 |
+
request = c.recv()
|
| 202 |
+
ignore, funcname, args, kwds = request
|
| 203 |
+
assert funcname in self.public, '%r unrecognized' % funcname
|
| 204 |
+
func = getattr(self, funcname)
|
| 205 |
+
except Exception:
|
| 206 |
+
msg = ('#TRACEBACK', format_exc())
|
| 207 |
+
else:
|
| 208 |
+
try:
|
| 209 |
+
result = func(c, *args, **kwds)
|
| 210 |
+
except Exception:
|
| 211 |
+
msg = ('#TRACEBACK', format_exc())
|
| 212 |
+
else:
|
| 213 |
+
msg = ('#RETURN', result)
|
| 214 |
+
|
| 215 |
+
try:
|
| 216 |
+
c.send(msg)
|
| 217 |
+
except Exception as e:
|
| 218 |
+
try:
|
| 219 |
+
c.send(('#TRACEBACK', format_exc()))
|
| 220 |
+
except Exception:
|
| 221 |
+
pass
|
| 222 |
+
util.info('Failure to send message: %r', msg)
|
| 223 |
+
util.info(' ... request was %r', request)
|
| 224 |
+
util.info(' ... exception was %r', e)
|
| 225 |
+
|
| 226 |
+
def handle_request(self, conn):
|
| 227 |
+
'''
|
| 228 |
+
Handle a new connection
|
| 229 |
+
'''
|
| 230 |
+
try:
|
| 231 |
+
self._handle_request(conn)
|
| 232 |
+
except SystemExit:
|
| 233 |
+
# Server.serve_client() calls sys.exit(0) on EOF
|
| 234 |
+
pass
|
| 235 |
+
finally:
|
| 236 |
+
conn.close()
|
| 237 |
+
|
| 238 |
+
def serve_client(self, conn):
|
| 239 |
+
'''
|
| 240 |
+
Handle requests from the proxies in a particular process/thread
|
| 241 |
+
'''
|
| 242 |
+
util.debug('starting server thread to service %r',
|
| 243 |
+
threading.current_thread().name)
|
| 244 |
+
|
| 245 |
+
recv = conn.recv
|
| 246 |
+
send = conn.send
|
| 247 |
+
id_to_obj = self.id_to_obj
|
| 248 |
+
|
| 249 |
+
while not self.stop_event.is_set():
|
| 250 |
+
|
| 251 |
+
try:
|
| 252 |
+
methodname = obj = None
|
| 253 |
+
request = recv()
|
| 254 |
+
ident, methodname, args, kwds = request
|
| 255 |
+
try:
|
| 256 |
+
obj, exposed, gettypeid = id_to_obj[ident]
|
| 257 |
+
except KeyError as ke:
|
| 258 |
+
try:
|
| 259 |
+
obj, exposed, gettypeid = \
|
| 260 |
+
self.id_to_local_proxy_obj[ident]
|
| 261 |
+
except KeyError:
|
| 262 |
+
raise ke
|
| 263 |
+
|
| 264 |
+
if methodname not in exposed:
|
| 265 |
+
raise AttributeError(
|
| 266 |
+
'method %r of %r object is not in exposed=%r' %
|
| 267 |
+
(methodname, type(obj), exposed)
|
| 268 |
+
)
|
| 269 |
+
|
| 270 |
+
function = getattr(obj, methodname)
|
| 271 |
+
|
| 272 |
+
try:
|
| 273 |
+
res = function(*args, **kwds)
|
| 274 |
+
except Exception as e:
|
| 275 |
+
msg = ('#ERROR', e)
|
| 276 |
+
else:
|
| 277 |
+
typeid = gettypeid and gettypeid.get(methodname, None)
|
| 278 |
+
if typeid:
|
| 279 |
+
rident, rexposed = self.create(conn, typeid, res)
|
| 280 |
+
token = Token(typeid, self.address, rident)
|
| 281 |
+
msg = ('#PROXY', (rexposed, token))
|
| 282 |
+
else:
|
| 283 |
+
msg = ('#RETURN', res)
|
| 284 |
+
|
| 285 |
+
except AttributeError:
|
| 286 |
+
if methodname is None:
|
| 287 |
+
msg = ('#TRACEBACK', format_exc())
|
| 288 |
+
else:
|
| 289 |
+
try:
|
| 290 |
+
fallback_func = self.fallback_mapping[methodname]
|
| 291 |
+
result = fallback_func(
|
| 292 |
+
self, conn, ident, obj, *args, **kwds
|
| 293 |
+
)
|
| 294 |
+
msg = ('#RETURN', result)
|
| 295 |
+
except Exception:
|
| 296 |
+
msg = ('#TRACEBACK', format_exc())
|
| 297 |
+
|
| 298 |
+
except EOFError:
|
| 299 |
+
util.debug('got EOF -- exiting thread serving %r',
|
| 300 |
+
threading.current_thread().name)
|
| 301 |
+
sys.exit(0)
|
| 302 |
+
|
| 303 |
+
except Exception:
|
| 304 |
+
msg = ('#TRACEBACK', format_exc())
|
| 305 |
+
|
| 306 |
+
try:
|
| 307 |
+
try:
|
| 308 |
+
send(msg)
|
| 309 |
+
except Exception:
|
| 310 |
+
send(('#UNSERIALIZABLE', format_exc()))
|
| 311 |
+
except Exception as e:
|
| 312 |
+
util.info('exception in thread serving %r',
|
| 313 |
+
threading.current_thread().name)
|
| 314 |
+
util.info(' ... message was %r', msg)
|
| 315 |
+
util.info(' ... exception was %r', e)
|
| 316 |
+
conn.close()
|
| 317 |
+
sys.exit(1)
|
| 318 |
+
|
| 319 |
+
def fallback_getvalue(self, conn, ident, obj):
|
| 320 |
+
return obj
|
| 321 |
+
|
| 322 |
+
def fallback_str(self, conn, ident, obj):
|
| 323 |
+
return str(obj)
|
| 324 |
+
|
| 325 |
+
def fallback_repr(self, conn, ident, obj):
|
| 326 |
+
return repr(obj)
|
| 327 |
+
|
| 328 |
+
fallback_mapping = {
|
| 329 |
+
'__str__':fallback_str,
|
| 330 |
+
'__repr__':fallback_repr,
|
| 331 |
+
'#GETVALUE':fallback_getvalue
|
| 332 |
+
}
|
| 333 |
+
|
| 334 |
+
def dummy(self, c):
|
| 335 |
+
pass
|
| 336 |
+
|
| 337 |
+
def debug_info(self, c):
|
| 338 |
+
'''
|
| 339 |
+
Return some info --- useful to spot problems with refcounting
|
| 340 |
+
'''
|
| 341 |
+
# Perhaps include debug info about 'c'?
|
| 342 |
+
with self.mutex:
|
| 343 |
+
result = []
|
| 344 |
+
keys = list(self.id_to_refcount.keys())
|
| 345 |
+
keys.sort()
|
| 346 |
+
for ident in keys:
|
| 347 |
+
if ident != '0':
|
| 348 |
+
result.append(' %s: refcount=%s\n %s' %
|
| 349 |
+
(ident, self.id_to_refcount[ident],
|
| 350 |
+
str(self.id_to_obj[ident][0])[:75]))
|
| 351 |
+
return '\n'.join(result)
|
| 352 |
+
|
| 353 |
+
def number_of_objects(self, c):
|
| 354 |
+
'''
|
| 355 |
+
Number of shared objects
|
| 356 |
+
'''
|
| 357 |
+
# Doesn't use (len(self.id_to_obj) - 1) as we shouldn't count ident='0'
|
| 358 |
+
return len(self.id_to_refcount)
|
| 359 |
+
|
| 360 |
+
def shutdown(self, c):
|
| 361 |
+
'''
|
| 362 |
+
Shutdown this process
|
| 363 |
+
'''
|
| 364 |
+
try:
|
| 365 |
+
util.debug('manager received shutdown message')
|
| 366 |
+
c.send(('#RETURN', None))
|
| 367 |
+
except:
|
| 368 |
+
import traceback
|
| 369 |
+
traceback.print_exc()
|
| 370 |
+
finally:
|
| 371 |
+
self.stop_event.set()
|
| 372 |
+
|
| 373 |
+
def create(self, c, typeid, /, *args, **kwds):
|
| 374 |
+
'''
|
| 375 |
+
Create a new shared object and return its id
|
| 376 |
+
'''
|
| 377 |
+
with self.mutex:
|
| 378 |
+
callable, exposed, method_to_typeid, proxytype = \
|
| 379 |
+
self.registry[typeid]
|
| 380 |
+
|
| 381 |
+
if callable is None:
|
| 382 |
+
if kwds or (len(args) != 1):
|
| 383 |
+
raise ValueError(
|
| 384 |
+
"Without callable, must have one non-keyword argument")
|
| 385 |
+
obj = args[0]
|
| 386 |
+
else:
|
| 387 |
+
obj = callable(*args, **kwds)
|
| 388 |
+
|
| 389 |
+
if exposed is None:
|
| 390 |
+
exposed = public_methods(obj)
|
| 391 |
+
if method_to_typeid is not None:
|
| 392 |
+
if not isinstance(method_to_typeid, dict):
|
| 393 |
+
raise TypeError(
|
| 394 |
+
"Method_to_typeid {0!r}: type {1!s}, not dict".format(
|
| 395 |
+
method_to_typeid, type(method_to_typeid)))
|
| 396 |
+
exposed = list(exposed) + list(method_to_typeid)
|
| 397 |
+
|
| 398 |
+
ident = '%x' % id(obj) # convert to string because xmlrpclib
|
| 399 |
+
# only has 32 bit signed integers
|
| 400 |
+
util.debug('%r callable returned object with id %r', typeid, ident)
|
| 401 |
+
|
| 402 |
+
self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid)
|
| 403 |
+
if ident not in self.id_to_refcount:
|
| 404 |
+
self.id_to_refcount[ident] = 0
|
| 405 |
+
|
| 406 |
+
self.incref(c, ident)
|
| 407 |
+
return ident, tuple(exposed)
|
| 408 |
+
|
| 409 |
+
def get_methods(self, c, token):
|
| 410 |
+
'''
|
| 411 |
+
Return the methods of the shared object indicated by token
|
| 412 |
+
'''
|
| 413 |
+
return tuple(self.id_to_obj[token.id][1])
|
| 414 |
+
|
| 415 |
+
def accept_connection(self, c, name):
|
| 416 |
+
'''
|
| 417 |
+
Spawn a new thread to serve this connection
|
| 418 |
+
'''
|
| 419 |
+
threading.current_thread().name = name
|
| 420 |
+
c.send(('#RETURN', None))
|
| 421 |
+
self.serve_client(c)
|
| 422 |
+
|
| 423 |
+
def incref(self, c, ident):
|
| 424 |
+
with self.mutex:
|
| 425 |
+
try:
|
| 426 |
+
self.id_to_refcount[ident] += 1
|
| 427 |
+
except KeyError as ke:
|
| 428 |
+
# If no external references exist but an internal (to the
|
| 429 |
+
# manager) still does and a new external reference is created
|
| 430 |
+
# from it, restore the manager's tracking of it from the
|
| 431 |
+
# previously stashed internal ref.
|
| 432 |
+
if ident in self.id_to_local_proxy_obj:
|
| 433 |
+
self.id_to_refcount[ident] = 1
|
| 434 |
+
self.id_to_obj[ident] = \
|
| 435 |
+
self.id_to_local_proxy_obj[ident]
|
| 436 |
+
obj, exposed, gettypeid = self.id_to_obj[ident]
|
| 437 |
+
util.debug('Server re-enabled tracking & INCREF %r', ident)
|
| 438 |
+
else:
|
| 439 |
+
raise ke
|
| 440 |
+
|
| 441 |
+
def decref(self, c, ident):
|
| 442 |
+
if ident not in self.id_to_refcount and \
|
| 443 |
+
ident in self.id_to_local_proxy_obj:
|
| 444 |
+
util.debug('Server DECREF skipping %r', ident)
|
| 445 |
+
return
|
| 446 |
+
|
| 447 |
+
with self.mutex:
|
| 448 |
+
if self.id_to_refcount[ident] <= 0:
|
| 449 |
+
raise AssertionError(
|
| 450 |
+
"Id {0!s} ({1!r}) has refcount {2:n}, not 1+".format(
|
| 451 |
+
ident, self.id_to_obj[ident],
|
| 452 |
+
self.id_to_refcount[ident]))
|
| 453 |
+
self.id_to_refcount[ident] -= 1
|
| 454 |
+
if self.id_to_refcount[ident] == 0:
|
| 455 |
+
del self.id_to_refcount[ident]
|
| 456 |
+
|
| 457 |
+
if ident not in self.id_to_refcount:
|
| 458 |
+
# Two-step process in case the object turns out to contain other
|
| 459 |
+
# proxy objects (e.g. a managed list of managed lists).
|
| 460 |
+
# Otherwise, deleting self.id_to_obj[ident] would trigger the
|
| 461 |
+
# deleting of the stored value (another managed object) which would
|
| 462 |
+
# in turn attempt to acquire the mutex that is already held here.
|
| 463 |
+
self.id_to_obj[ident] = (None, (), None) # thread-safe
|
| 464 |
+
util.debug('disposing of obj with id %r', ident)
|
| 465 |
+
with self.mutex:
|
| 466 |
+
del self.id_to_obj[ident]
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
#
|
| 470 |
+
# Class to represent state of a manager
|
| 471 |
+
#
|
| 472 |
+
|
| 473 |
+
class State(object):
|
| 474 |
+
__slots__ = ['value']
|
| 475 |
+
INITIAL = 0
|
| 476 |
+
STARTED = 1
|
| 477 |
+
SHUTDOWN = 2
|
| 478 |
+
|
| 479 |
+
#
|
| 480 |
+
# Mapping from serializer name to Listener and Client types
|
| 481 |
+
#
|
| 482 |
+
|
| 483 |
+
listener_client = {
|
| 484 |
+
'pickle' : (connection.Listener, connection.Client),
|
| 485 |
+
'xmlrpclib' : (connection.XmlListener, connection.XmlClient)
|
| 486 |
+
}
|
| 487 |
+
|
| 488 |
+
#
|
| 489 |
+
# Definition of BaseManager
|
| 490 |
+
#
|
| 491 |
+
|
| 492 |
+
class BaseManager(object):
|
| 493 |
+
'''
|
| 494 |
+
Base class for managers
|
| 495 |
+
'''
|
| 496 |
+
_registry = {}
|
| 497 |
+
_Server = Server
|
| 498 |
+
|
| 499 |
+
def __init__(self, address=None, authkey=None, serializer='pickle',
|
| 500 |
+
ctx=None):
|
| 501 |
+
if authkey is None:
|
| 502 |
+
authkey = process.current_process().authkey
|
| 503 |
+
self._address = address # XXX not final address if eg ('', 0)
|
| 504 |
+
self._authkey = process.AuthenticationString(authkey)
|
| 505 |
+
self._state = State()
|
| 506 |
+
self._state.value = State.INITIAL
|
| 507 |
+
self._serializer = serializer
|
| 508 |
+
self._Listener, self._Client = listener_client[serializer]
|
| 509 |
+
self._ctx = ctx or get_context()
|
| 510 |
+
|
| 511 |
+
def get_server(self):
|
| 512 |
+
'''
|
| 513 |
+
Return server object with serve_forever() method and address attribute
|
| 514 |
+
'''
|
| 515 |
+
if self._state.value != State.INITIAL:
|
| 516 |
+
if self._state.value == State.STARTED:
|
| 517 |
+
raise ProcessError("Already started server")
|
| 518 |
+
elif self._state.value == State.SHUTDOWN:
|
| 519 |
+
raise ProcessError("Manager has shut down")
|
| 520 |
+
else:
|
| 521 |
+
raise ProcessError(
|
| 522 |
+
"Unknown state {!r}".format(self._state.value))
|
| 523 |
+
return Server(self._registry, self._address,
|
| 524 |
+
self._authkey, self._serializer)
|
| 525 |
+
|
| 526 |
+
def connect(self):
|
| 527 |
+
'''
|
| 528 |
+
Connect manager object to the server process
|
| 529 |
+
'''
|
| 530 |
+
Listener, Client = listener_client[self._serializer]
|
| 531 |
+
conn = Client(self._address, authkey=self._authkey)
|
| 532 |
+
dispatch(conn, None, 'dummy')
|
| 533 |
+
self._state.value = State.STARTED
|
| 534 |
+
|
| 535 |
+
def start(self, initializer=None, initargs=()):
|
| 536 |
+
'''
|
| 537 |
+
Spawn a server process for this manager object
|
| 538 |
+
'''
|
| 539 |
+
if self._state.value != State.INITIAL:
|
| 540 |
+
if self._state.value == State.STARTED:
|
| 541 |
+
raise ProcessError("Already started server")
|
| 542 |
+
elif self._state.value == State.SHUTDOWN:
|
| 543 |
+
raise ProcessError("Manager has shut down")
|
| 544 |
+
else:
|
| 545 |
+
raise ProcessError(
|
| 546 |
+
"Unknown state {!r}".format(self._state.value))
|
| 547 |
+
|
| 548 |
+
if initializer is not None and not callable(initializer):
|
| 549 |
+
raise TypeError('initializer must be a callable')
|
| 550 |
+
|
| 551 |
+
# pipe over which we will retrieve address of server
|
| 552 |
+
reader, writer = connection.Pipe(duplex=False)
|
| 553 |
+
|
| 554 |
+
# spawn process which runs a server
|
| 555 |
+
self._process = self._ctx.Process(
|
| 556 |
+
target=type(self)._run_server,
|
| 557 |
+
args=(self._registry, self._address, self._authkey,
|
| 558 |
+
self._serializer, writer, initializer, initargs),
|
| 559 |
+
)
|
| 560 |
+
ident = ':'.join(str(i) for i in self._process._identity)
|
| 561 |
+
self._process.name = type(self).__name__ + '-' + ident
|
| 562 |
+
self._process.start()
|
| 563 |
+
|
| 564 |
+
# get address of server
|
| 565 |
+
writer.close()
|
| 566 |
+
self._address = reader.recv()
|
| 567 |
+
reader.close()
|
| 568 |
+
|
| 569 |
+
# register a finalizer
|
| 570 |
+
self._state.value = State.STARTED
|
| 571 |
+
self.shutdown = util.Finalize(
|
| 572 |
+
self, type(self)._finalize_manager,
|
| 573 |
+
args=(self._process, self._address, self._authkey,
|
| 574 |
+
self._state, self._Client),
|
| 575 |
+
exitpriority=0
|
| 576 |
+
)
|
| 577 |
+
|
| 578 |
+
@classmethod
|
| 579 |
+
def _run_server(cls, registry, address, authkey, serializer, writer,
|
| 580 |
+
initializer=None, initargs=()):
|
| 581 |
+
'''
|
| 582 |
+
Create a server, report its address and run it
|
| 583 |
+
'''
|
| 584 |
+
# bpo-36368: protect server process from KeyboardInterrupt signals
|
| 585 |
+
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
| 586 |
+
|
| 587 |
+
if initializer is not None:
|
| 588 |
+
initializer(*initargs)
|
| 589 |
+
|
| 590 |
+
# create server
|
| 591 |
+
server = cls._Server(registry, address, authkey, serializer)
|
| 592 |
+
|
| 593 |
+
# inform parent process of the server's address
|
| 594 |
+
writer.send(server.address)
|
| 595 |
+
writer.close()
|
| 596 |
+
|
| 597 |
+
# run the manager
|
| 598 |
+
util.info('manager serving at %r', server.address)
|
| 599 |
+
server.serve_forever()
|
| 600 |
+
|
| 601 |
+
def _create(self, typeid, /, *args, **kwds):
|
| 602 |
+
'''
|
| 603 |
+
Create a new shared object; return the token and exposed tuple
|
| 604 |
+
'''
|
| 605 |
+
assert self._state.value == State.STARTED, 'server not yet started'
|
| 606 |
+
conn = self._Client(self._address, authkey=self._authkey)
|
| 607 |
+
try:
|
| 608 |
+
id, exposed = dispatch(conn, None, 'create', (typeid,)+args, kwds)
|
| 609 |
+
finally:
|
| 610 |
+
conn.close()
|
| 611 |
+
return Token(typeid, self._address, id), exposed
|
| 612 |
+
|
| 613 |
+
def join(self, timeout=None):
|
| 614 |
+
'''
|
| 615 |
+
Join the manager process (if it has been spawned)
|
| 616 |
+
'''
|
| 617 |
+
if self._process is not None:
|
| 618 |
+
self._process.join(timeout)
|
| 619 |
+
if not self._process.is_alive():
|
| 620 |
+
self._process = None
|
| 621 |
+
|
| 622 |
+
def _debug_info(self):
|
| 623 |
+
'''
|
| 624 |
+
Return some info about the servers shared objects and connections
|
| 625 |
+
'''
|
| 626 |
+
conn = self._Client(self._address, authkey=self._authkey)
|
| 627 |
+
try:
|
| 628 |
+
return dispatch(conn, None, 'debug_info')
|
| 629 |
+
finally:
|
| 630 |
+
conn.close()
|
| 631 |
+
|
| 632 |
+
def _number_of_objects(self):
|
| 633 |
+
'''
|
| 634 |
+
Return the number of shared objects
|
| 635 |
+
'''
|
| 636 |
+
conn = self._Client(self._address, authkey=self._authkey)
|
| 637 |
+
try:
|
| 638 |
+
return dispatch(conn, None, 'number_of_objects')
|
| 639 |
+
finally:
|
| 640 |
+
conn.close()
|
| 641 |
+
|
| 642 |
+
def __enter__(self):
|
| 643 |
+
if self._state.value == State.INITIAL:
|
| 644 |
+
self.start()
|
| 645 |
+
if self._state.value != State.STARTED:
|
| 646 |
+
if self._state.value == State.INITIAL:
|
| 647 |
+
raise ProcessError("Unable to start server")
|
| 648 |
+
elif self._state.value == State.SHUTDOWN:
|
| 649 |
+
raise ProcessError("Manager has shut down")
|
| 650 |
+
else:
|
| 651 |
+
raise ProcessError(
|
| 652 |
+
"Unknown state {!r}".format(self._state.value))
|
| 653 |
+
return self
|
| 654 |
+
|
| 655 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 656 |
+
self.shutdown()
|
| 657 |
+
|
| 658 |
+
@staticmethod
|
| 659 |
+
def _finalize_manager(process, address, authkey, state, _Client):
|
| 660 |
+
'''
|
| 661 |
+
Shutdown the manager process; will be registered as a finalizer
|
| 662 |
+
'''
|
| 663 |
+
if process.is_alive():
|
| 664 |
+
util.info('sending shutdown message to manager')
|
| 665 |
+
try:
|
| 666 |
+
conn = _Client(address, authkey=authkey)
|
| 667 |
+
try:
|
| 668 |
+
dispatch(conn, None, 'shutdown')
|
| 669 |
+
finally:
|
| 670 |
+
conn.close()
|
| 671 |
+
except Exception:
|
| 672 |
+
pass
|
| 673 |
+
|
| 674 |
+
process.join(timeout=1.0)
|
| 675 |
+
if process.is_alive():
|
| 676 |
+
util.info('manager still alive')
|
| 677 |
+
if hasattr(process, 'terminate'):
|
| 678 |
+
util.info('trying to `terminate()` manager process')
|
| 679 |
+
process.terminate()
|
| 680 |
+
process.join(timeout=1.0)
|
| 681 |
+
if process.is_alive():
|
| 682 |
+
util.info('manager still alive after terminate')
|
| 683 |
+
|
| 684 |
+
state.value = State.SHUTDOWN
|
| 685 |
+
try:
|
| 686 |
+
del BaseProxy._address_to_local[address]
|
| 687 |
+
except KeyError:
|
| 688 |
+
pass
|
| 689 |
+
|
| 690 |
+
@property
|
| 691 |
+
def address(self):
|
| 692 |
+
return self._address
|
| 693 |
+
|
| 694 |
+
@classmethod
|
| 695 |
+
def register(cls, typeid, callable=None, proxytype=None, exposed=None,
|
| 696 |
+
method_to_typeid=None, create_method=True):
|
| 697 |
+
'''
|
| 698 |
+
Register a typeid with the manager type
|
| 699 |
+
'''
|
| 700 |
+
if '_registry' not in cls.__dict__:
|
| 701 |
+
cls._registry = cls._registry.copy()
|
| 702 |
+
|
| 703 |
+
if proxytype is None:
|
| 704 |
+
proxytype = AutoProxy
|
| 705 |
+
|
| 706 |
+
exposed = exposed or getattr(proxytype, '_exposed_', None)
|
| 707 |
+
|
| 708 |
+
method_to_typeid = method_to_typeid or \
|
| 709 |
+
getattr(proxytype, '_method_to_typeid_', None)
|
| 710 |
+
|
| 711 |
+
if method_to_typeid:
|
| 712 |
+
for key, value in list(method_to_typeid.items()): # isinstance?
|
| 713 |
+
assert type(key) is str, '%r is not a string' % key
|
| 714 |
+
assert type(value) is str, '%r is not a string' % value
|
| 715 |
+
|
| 716 |
+
cls._registry[typeid] = (
|
| 717 |
+
callable, exposed, method_to_typeid, proxytype
|
| 718 |
+
)
|
| 719 |
+
|
| 720 |
+
if create_method:
|
| 721 |
+
def temp(self, /, *args, **kwds):
|
| 722 |
+
util.debug('requesting creation of a shared %r object', typeid)
|
| 723 |
+
token, exp = self._create(typeid, *args, **kwds)
|
| 724 |
+
proxy = proxytype(
|
| 725 |
+
token, self._serializer, manager=self,
|
| 726 |
+
authkey=self._authkey, exposed=exp
|
| 727 |
+
)
|
| 728 |
+
conn = self._Client(token.address, authkey=self._authkey)
|
| 729 |
+
dispatch(conn, None, 'decref', (token.id,))
|
| 730 |
+
return proxy
|
| 731 |
+
temp.__name__ = typeid
|
| 732 |
+
setattr(cls, typeid, temp)
|
| 733 |
+
|
| 734 |
+
#
|
| 735 |
+
# Subclass of set which get cleared after a fork
|
| 736 |
+
#
|
| 737 |
+
|
| 738 |
+
class ProcessLocalSet(set):
|
| 739 |
+
def __init__(self):
|
| 740 |
+
util.register_after_fork(self, lambda obj: obj.clear())
|
| 741 |
+
def __reduce__(self):
|
| 742 |
+
return type(self), ()
|
| 743 |
+
|
| 744 |
+
#
|
| 745 |
+
# Definition of BaseProxy
|
| 746 |
+
#
|
| 747 |
+
|
| 748 |
+
class BaseProxy(object):
|
| 749 |
+
'''
|
| 750 |
+
A base for proxies of shared objects
|
| 751 |
+
'''
|
| 752 |
+
_address_to_local = {}
|
| 753 |
+
_mutex = util.ForkAwareThreadLock()
|
| 754 |
+
|
| 755 |
+
def __init__(self, token, serializer, manager=None,
|
| 756 |
+
authkey=None, exposed=None, incref=True, manager_owned=False):
|
| 757 |
+
with BaseProxy._mutex:
|
| 758 |
+
tls_idset = BaseProxy._address_to_local.get(token.address, None)
|
| 759 |
+
if tls_idset is None:
|
| 760 |
+
tls_idset = util.ForkAwareLocal(), ProcessLocalSet()
|
| 761 |
+
BaseProxy._address_to_local[token.address] = tls_idset
|
| 762 |
+
|
| 763 |
+
# self._tls is used to record the connection used by this
|
| 764 |
+
# thread to communicate with the manager at token.address
|
| 765 |
+
self._tls = tls_idset[0]
|
| 766 |
+
|
| 767 |
+
# self._idset is used to record the identities of all shared
|
| 768 |
+
# objects for which the current process owns references and
|
| 769 |
+
# which are in the manager at token.address
|
| 770 |
+
self._idset = tls_idset[1]
|
| 771 |
+
|
| 772 |
+
self._token = token
|
| 773 |
+
self._id = self._token.id
|
| 774 |
+
self._manager = manager
|
| 775 |
+
self._serializer = serializer
|
| 776 |
+
self._Client = listener_client[serializer][1]
|
| 777 |
+
|
| 778 |
+
# Should be set to True only when a proxy object is being created
|
| 779 |
+
# on the manager server; primary use case: nested proxy objects.
|
| 780 |
+
# RebuildProxy detects when a proxy is being created on the manager
|
| 781 |
+
# and sets this value appropriately.
|
| 782 |
+
self._owned_by_manager = manager_owned
|
| 783 |
+
|
| 784 |
+
if authkey is not None:
|
| 785 |
+
self._authkey = process.AuthenticationString(authkey)
|
| 786 |
+
elif self._manager is not None:
|
| 787 |
+
self._authkey = self._manager._authkey
|
| 788 |
+
else:
|
| 789 |
+
self._authkey = process.current_process().authkey
|
| 790 |
+
|
| 791 |
+
if incref:
|
| 792 |
+
self._incref()
|
| 793 |
+
|
| 794 |
+
util.register_after_fork(self, BaseProxy._after_fork)
|
| 795 |
+
|
| 796 |
+
def _connect(self):
|
| 797 |
+
util.debug('making connection to manager')
|
| 798 |
+
name = process.current_process().name
|
| 799 |
+
if threading.current_thread().name != 'MainThread':
|
| 800 |
+
name += '|' + threading.current_thread().name
|
| 801 |
+
conn = self._Client(self._token.address, authkey=self._authkey)
|
| 802 |
+
dispatch(conn, None, 'accept_connection', (name,))
|
| 803 |
+
self._tls.connection = conn
|
| 804 |
+
|
| 805 |
+
def _callmethod(self, methodname, args=(), kwds={}):
|
| 806 |
+
'''
|
| 807 |
+
Try to call a method of the referent and return a copy of the result
|
| 808 |
+
'''
|
| 809 |
+
try:
|
| 810 |
+
conn = self._tls.connection
|
| 811 |
+
except AttributeError:
|
| 812 |
+
util.debug('thread %r does not own a connection',
|
| 813 |
+
threading.current_thread().name)
|
| 814 |
+
self._connect()
|
| 815 |
+
conn = self._tls.connection
|
| 816 |
+
|
| 817 |
+
conn.send((self._id, methodname, args, kwds))
|
| 818 |
+
kind, result = conn.recv()
|
| 819 |
+
|
| 820 |
+
if kind == '#RETURN':
|
| 821 |
+
return result
|
| 822 |
+
elif kind == '#PROXY':
|
| 823 |
+
exposed, token = result
|
| 824 |
+
proxytype = self._manager._registry[token.typeid][-1]
|
| 825 |
+
token.address = self._token.address
|
| 826 |
+
proxy = proxytype(
|
| 827 |
+
token, self._serializer, manager=self._manager,
|
| 828 |
+
authkey=self._authkey, exposed=exposed
|
| 829 |
+
)
|
| 830 |
+
conn = self._Client(token.address, authkey=self._authkey)
|
| 831 |
+
dispatch(conn, None, 'decref', (token.id,))
|
| 832 |
+
return proxy
|
| 833 |
+
raise convert_to_error(kind, result)
|
| 834 |
+
|
| 835 |
+
def _getvalue(self):
|
| 836 |
+
'''
|
| 837 |
+
Get a copy of the value of the referent
|
| 838 |
+
'''
|
| 839 |
+
return self._callmethod('#GETVALUE')
|
| 840 |
+
|
| 841 |
+
def _incref(self):
|
| 842 |
+
if self._owned_by_manager:
|
| 843 |
+
util.debug('owned_by_manager skipped INCREF of %r', self._token.id)
|
| 844 |
+
return
|
| 845 |
+
|
| 846 |
+
conn = self._Client(self._token.address, authkey=self._authkey)
|
| 847 |
+
dispatch(conn, None, 'incref', (self._id,))
|
| 848 |
+
util.debug('INCREF %r', self._token.id)
|
| 849 |
+
|
| 850 |
+
self._idset.add(self._id)
|
| 851 |
+
|
| 852 |
+
state = self._manager and self._manager._state
|
| 853 |
+
|
| 854 |
+
self._close = util.Finalize(
|
| 855 |
+
self, BaseProxy._decref,
|
| 856 |
+
args=(self._token, self._authkey, state,
|
| 857 |
+
self._tls, self._idset, self._Client),
|
| 858 |
+
exitpriority=10
|
| 859 |
+
)
|
| 860 |
+
|
| 861 |
+
@staticmethod
|
| 862 |
+
def _decref(token, authkey, state, tls, idset, _Client):
|
| 863 |
+
idset.discard(token.id)
|
| 864 |
+
|
| 865 |
+
# check whether manager is still alive
|
| 866 |
+
if state is None or state.value == State.STARTED:
|
| 867 |
+
# tell manager this process no longer cares about referent
|
| 868 |
+
try:
|
| 869 |
+
util.debug('DECREF %r', token.id)
|
| 870 |
+
conn = _Client(token.address, authkey=authkey)
|
| 871 |
+
dispatch(conn, None, 'decref', (token.id,))
|
| 872 |
+
except Exception as e:
|
| 873 |
+
util.debug('... decref failed %s', e)
|
| 874 |
+
|
| 875 |
+
else:
|
| 876 |
+
util.debug('DECREF %r -- manager already shutdown', token.id)
|
| 877 |
+
|
| 878 |
+
# check whether we can close this thread's connection because
|
| 879 |
+
# the process owns no more references to objects for this manager
|
| 880 |
+
if not idset and hasattr(tls, 'connection'):
|
| 881 |
+
util.debug('thread %r has no more proxies so closing conn',
|
| 882 |
+
threading.current_thread().name)
|
| 883 |
+
tls.connection.close()
|
| 884 |
+
del tls.connection
|
| 885 |
+
|
| 886 |
+
def _after_fork(self):
|
| 887 |
+
self._manager = None
|
| 888 |
+
try:
|
| 889 |
+
self._incref()
|
| 890 |
+
except Exception as e:
|
| 891 |
+
# the proxy may just be for a manager which has shutdown
|
| 892 |
+
util.info('incref failed: %s' % e)
|
| 893 |
+
|
| 894 |
+
def __reduce__(self):
|
| 895 |
+
kwds = {}
|
| 896 |
+
if get_spawning_popen() is not None:
|
| 897 |
+
kwds['authkey'] = self._authkey
|
| 898 |
+
|
| 899 |
+
if getattr(self, '_isauto', False):
|
| 900 |
+
kwds['exposed'] = self._exposed_
|
| 901 |
+
return (RebuildProxy,
|
| 902 |
+
(AutoProxy, self._token, self._serializer, kwds))
|
| 903 |
+
else:
|
| 904 |
+
return (RebuildProxy,
|
| 905 |
+
(type(self), self._token, self._serializer, kwds))
|
| 906 |
+
|
| 907 |
+
def __deepcopy__(self, memo):
|
| 908 |
+
return self._getvalue()
|
| 909 |
+
|
| 910 |
+
def __repr__(self):
|
| 911 |
+
return '<%s object, typeid %r at %#x>' % \
|
| 912 |
+
(type(self).__name__, self._token.typeid, id(self))
|
| 913 |
+
|
| 914 |
+
def __str__(self):
|
| 915 |
+
'''
|
| 916 |
+
Return representation of the referent (or a fall-back if that fails)
|
| 917 |
+
'''
|
| 918 |
+
try:
|
| 919 |
+
return self._callmethod('__repr__')
|
| 920 |
+
except Exception:
|
| 921 |
+
return repr(self)[:-1] + "; '__str__()' failed>"
|
| 922 |
+
|
| 923 |
+
#
|
| 924 |
+
# Function used for unpickling
|
| 925 |
+
#
|
| 926 |
+
|
| 927 |
+
def RebuildProxy(func, token, serializer, kwds):
|
| 928 |
+
'''
|
| 929 |
+
Function used for unpickling proxy objects.
|
| 930 |
+
'''
|
| 931 |
+
server = getattr(process.current_process(), '_manager_server', None)
|
| 932 |
+
if server and server.address == token.address:
|
| 933 |
+
util.debug('Rebuild a proxy owned by manager, token=%r', token)
|
| 934 |
+
kwds['manager_owned'] = True
|
| 935 |
+
if token.id not in server.id_to_local_proxy_obj:
|
| 936 |
+
server.id_to_local_proxy_obj[token.id] = \
|
| 937 |
+
server.id_to_obj[token.id]
|
| 938 |
+
incref = (
|
| 939 |
+
kwds.pop('incref', True) and
|
| 940 |
+
not getattr(process.current_process(), '_inheriting', False)
|
| 941 |
+
)
|
| 942 |
+
return func(token, serializer, incref=incref, **kwds)
|
| 943 |
+
|
| 944 |
+
#
|
| 945 |
+
# Functions to create proxies and proxy types
|
| 946 |
+
#
|
| 947 |
+
|
| 948 |
+
def MakeProxyType(name, exposed, _cache={}):
|
| 949 |
+
'''
|
| 950 |
+
Return a proxy type whose methods are given by `exposed`
|
| 951 |
+
'''
|
| 952 |
+
exposed = tuple(exposed)
|
| 953 |
+
try:
|
| 954 |
+
return _cache[(name, exposed)]
|
| 955 |
+
except KeyError:
|
| 956 |
+
pass
|
| 957 |
+
|
| 958 |
+
dic = {}
|
| 959 |
+
|
| 960 |
+
for meth in exposed:
|
| 961 |
+
exec('''def %s(self, /, *args, **kwds):
|
| 962 |
+
return self._callmethod(%r, args, kwds)''' % (meth, meth), dic)
|
| 963 |
+
|
| 964 |
+
ProxyType = type(name, (BaseProxy,), dic)
|
| 965 |
+
ProxyType._exposed_ = exposed
|
| 966 |
+
_cache[(name, exposed)] = ProxyType
|
| 967 |
+
return ProxyType
|
| 968 |
+
|
| 969 |
+
|
| 970 |
+
def AutoProxy(token, serializer, manager=None, authkey=None,
|
| 971 |
+
exposed=None, incref=True, manager_owned=False):
|
| 972 |
+
'''
|
| 973 |
+
Return an auto-proxy for `token`
|
| 974 |
+
'''
|
| 975 |
+
_Client = listener_client[serializer][1]
|
| 976 |
+
|
| 977 |
+
if exposed is None:
|
| 978 |
+
conn = _Client(token.address, authkey=authkey)
|
| 979 |
+
try:
|
| 980 |
+
exposed = dispatch(conn, None, 'get_methods', (token,))
|
| 981 |
+
finally:
|
| 982 |
+
conn.close()
|
| 983 |
+
|
| 984 |
+
if authkey is None and manager is not None:
|
| 985 |
+
authkey = manager._authkey
|
| 986 |
+
if authkey is None:
|
| 987 |
+
authkey = process.current_process().authkey
|
| 988 |
+
|
| 989 |
+
ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
|
| 990 |
+
proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
|
| 991 |
+
incref=incref, manager_owned=manager_owned)
|
| 992 |
+
proxy._isauto = True
|
| 993 |
+
return proxy
|
| 994 |
+
|
| 995 |
+
#
|
| 996 |
+
# Types/callables which we will register with SyncManager
|
| 997 |
+
#
|
| 998 |
+
|
| 999 |
+
class Namespace(object):
|
| 1000 |
+
def __init__(self, /, **kwds):
|
| 1001 |
+
self.__dict__.update(kwds)
|
| 1002 |
+
def __repr__(self):
|
| 1003 |
+
items = list(self.__dict__.items())
|
| 1004 |
+
temp = []
|
| 1005 |
+
for name, value in items:
|
| 1006 |
+
if not name.startswith('_'):
|
| 1007 |
+
temp.append('%s=%r' % (name, value))
|
| 1008 |
+
temp.sort()
|
| 1009 |
+
return '%s(%s)' % (self.__class__.__name__, ', '.join(temp))
|
| 1010 |
+
|
| 1011 |
+
class Value(object):
|
| 1012 |
+
def __init__(self, typecode, value, lock=True):
|
| 1013 |
+
self._typecode = typecode
|
| 1014 |
+
self._value = value
|
| 1015 |
+
def get(self):
|
| 1016 |
+
return self._value
|
| 1017 |
+
def set(self, value):
|
| 1018 |
+
self._value = value
|
| 1019 |
+
def __repr__(self):
|
| 1020 |
+
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
|
| 1021 |
+
value = property(get, set)
|
| 1022 |
+
|
| 1023 |
+
def Array(typecode, sequence, lock=True):
|
| 1024 |
+
return array.array(typecode, sequence)
|
| 1025 |
+
|
| 1026 |
+
#
|
| 1027 |
+
# Proxy types used by SyncManager
|
| 1028 |
+
#
|
| 1029 |
+
|
| 1030 |
+
class IteratorProxy(BaseProxy):
|
| 1031 |
+
_exposed_ = ('__next__', 'send', 'throw', 'close')
|
| 1032 |
+
def __iter__(self):
|
| 1033 |
+
return self
|
| 1034 |
+
def __next__(self, *args):
|
| 1035 |
+
return self._callmethod('__next__', args)
|
| 1036 |
+
def send(self, *args):
|
| 1037 |
+
return self._callmethod('send', args)
|
| 1038 |
+
def throw(self, *args):
|
| 1039 |
+
return self._callmethod('throw', args)
|
| 1040 |
+
def close(self, *args):
|
| 1041 |
+
return self._callmethod('close', args)
|
| 1042 |
+
|
| 1043 |
+
|
| 1044 |
+
class AcquirerProxy(BaseProxy):
|
| 1045 |
+
_exposed_ = ('acquire', 'release')
|
| 1046 |
+
def acquire(self, blocking=True, timeout=None):
|
| 1047 |
+
args = (blocking,) if timeout is None else (blocking, timeout)
|
| 1048 |
+
return self._callmethod('acquire', args)
|
| 1049 |
+
def release(self):
|
| 1050 |
+
return self._callmethod('release')
|
| 1051 |
+
def __enter__(self):
|
| 1052 |
+
return self._callmethod('acquire')
|
| 1053 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 1054 |
+
return self._callmethod('release')
|
| 1055 |
+
|
| 1056 |
+
|
| 1057 |
+
class ConditionProxy(AcquirerProxy):
|
| 1058 |
+
_exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
|
| 1059 |
+
def wait(self, timeout=None):
|
| 1060 |
+
return self._callmethod('wait', (timeout,))
|
| 1061 |
+
def notify(self, n=1):
|
| 1062 |
+
return self._callmethod('notify', (n,))
|
| 1063 |
+
def notify_all(self):
|
| 1064 |
+
return self._callmethod('notify_all')
|
| 1065 |
+
def wait_for(self, predicate, timeout=None):
|
| 1066 |
+
result = predicate()
|
| 1067 |
+
if result:
|
| 1068 |
+
return result
|
| 1069 |
+
if timeout is not None:
|
| 1070 |
+
endtime = time.monotonic() + timeout
|
| 1071 |
+
else:
|
| 1072 |
+
endtime = None
|
| 1073 |
+
waittime = None
|
| 1074 |
+
while not result:
|
| 1075 |
+
if endtime is not None:
|
| 1076 |
+
waittime = endtime - time.monotonic()
|
| 1077 |
+
if waittime <= 0:
|
| 1078 |
+
break
|
| 1079 |
+
self.wait(waittime)
|
| 1080 |
+
result = predicate()
|
| 1081 |
+
return result
|
| 1082 |
+
|
| 1083 |
+
|
| 1084 |
+
class EventProxy(BaseProxy):
|
| 1085 |
+
_exposed_ = ('is_set', 'set', 'clear', 'wait')
|
| 1086 |
+
def is_set(self):
|
| 1087 |
+
return self._callmethod('is_set')
|
| 1088 |
+
def set(self):
|
| 1089 |
+
return self._callmethod('set')
|
| 1090 |
+
def clear(self):
|
| 1091 |
+
return self._callmethod('clear')
|
| 1092 |
+
def wait(self, timeout=None):
|
| 1093 |
+
return self._callmethod('wait', (timeout,))
|
| 1094 |
+
|
| 1095 |
+
|
| 1096 |
+
class BarrierProxy(BaseProxy):
|
| 1097 |
+
_exposed_ = ('__getattribute__', 'wait', 'abort', 'reset')
|
| 1098 |
+
def wait(self, timeout=None):
|
| 1099 |
+
return self._callmethod('wait', (timeout,))
|
| 1100 |
+
def abort(self):
|
| 1101 |
+
return self._callmethod('abort')
|
| 1102 |
+
def reset(self):
|
| 1103 |
+
return self._callmethod('reset')
|
| 1104 |
+
@property
|
| 1105 |
+
def parties(self):
|
| 1106 |
+
return self._callmethod('__getattribute__', ('parties',))
|
| 1107 |
+
@property
|
| 1108 |
+
def n_waiting(self):
|
| 1109 |
+
return self._callmethod('__getattribute__', ('n_waiting',))
|
| 1110 |
+
@property
|
| 1111 |
+
def broken(self):
|
| 1112 |
+
return self._callmethod('__getattribute__', ('broken',))
|
| 1113 |
+
|
| 1114 |
+
|
| 1115 |
+
class NamespaceProxy(BaseProxy):
|
| 1116 |
+
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
|
| 1117 |
+
def __getattr__(self, key):
|
| 1118 |
+
if key[0] == '_':
|
| 1119 |
+
return object.__getattribute__(self, key)
|
| 1120 |
+
callmethod = object.__getattribute__(self, '_callmethod')
|
| 1121 |
+
return callmethod('__getattribute__', (key,))
|
| 1122 |
+
def __setattr__(self, key, value):
|
| 1123 |
+
if key[0] == '_':
|
| 1124 |
+
return object.__setattr__(self, key, value)
|
| 1125 |
+
callmethod = object.__getattribute__(self, '_callmethod')
|
| 1126 |
+
return callmethod('__setattr__', (key, value))
|
| 1127 |
+
def __delattr__(self, key):
|
| 1128 |
+
if key[0] == '_':
|
| 1129 |
+
return object.__delattr__(self, key)
|
| 1130 |
+
callmethod = object.__getattribute__(self, '_callmethod')
|
| 1131 |
+
return callmethod('__delattr__', (key,))
|
| 1132 |
+
|
| 1133 |
+
|
| 1134 |
+
class ValueProxy(BaseProxy):
|
| 1135 |
+
_exposed_ = ('get', 'set')
|
| 1136 |
+
def get(self):
|
| 1137 |
+
return self._callmethod('get')
|
| 1138 |
+
def set(self, value):
|
| 1139 |
+
return self._callmethod('set', (value,))
|
| 1140 |
+
value = property(get, set)
|
| 1141 |
+
|
| 1142 |
+
__class_getitem__ = classmethod(types.GenericAlias)
|
| 1143 |
+
|
| 1144 |
+
|
| 1145 |
+
BaseListProxy = MakeProxyType('BaseListProxy', (
|
| 1146 |
+
'__add__', '__contains__', '__delitem__', '__getitem__', '__len__',
|
| 1147 |
+
'__mul__', '__reversed__', '__rmul__', '__setitem__',
|
| 1148 |
+
'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
|
| 1149 |
+
'reverse', 'sort', '__imul__'
|
| 1150 |
+
))
|
| 1151 |
+
class ListProxy(BaseListProxy):
|
| 1152 |
+
def __iadd__(self, value):
|
| 1153 |
+
self._callmethod('extend', (value,))
|
| 1154 |
+
return self
|
| 1155 |
+
def __imul__(self, value):
|
| 1156 |
+
self._callmethod('__imul__', (value,))
|
| 1157 |
+
return self
|
| 1158 |
+
|
| 1159 |
+
|
| 1160 |
+
DictProxy = MakeProxyType('DictProxy', (
|
| 1161 |
+
'__contains__', '__delitem__', '__getitem__', '__iter__', '__len__',
|
| 1162 |
+
'__setitem__', 'clear', 'copy', 'get', 'items',
|
| 1163 |
+
'keys', 'pop', 'popitem', 'setdefault', 'update', 'values'
|
| 1164 |
+
))
|
| 1165 |
+
DictProxy._method_to_typeid_ = {
|
| 1166 |
+
'__iter__': 'Iterator',
|
| 1167 |
+
}
|
| 1168 |
+
|
| 1169 |
+
|
| 1170 |
+
ArrayProxy = MakeProxyType('ArrayProxy', (
|
| 1171 |
+
'__len__', '__getitem__', '__setitem__'
|
| 1172 |
+
))
|
| 1173 |
+
|
| 1174 |
+
|
| 1175 |
+
BasePoolProxy = MakeProxyType('PoolProxy', (
|
| 1176 |
+
'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
|
| 1177 |
+
'map', 'map_async', 'starmap', 'starmap_async', 'terminate',
|
| 1178 |
+
))
|
| 1179 |
+
BasePoolProxy._method_to_typeid_ = {
|
| 1180 |
+
'apply_async': 'AsyncResult',
|
| 1181 |
+
'map_async': 'AsyncResult',
|
| 1182 |
+
'starmap_async': 'AsyncResult',
|
| 1183 |
+
'imap': 'Iterator',
|
| 1184 |
+
'imap_unordered': 'Iterator'
|
| 1185 |
+
}
|
| 1186 |
+
class PoolProxy(BasePoolProxy):
|
| 1187 |
+
def __enter__(self):
|
| 1188 |
+
return self
|
| 1189 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 1190 |
+
self.terminate()
|
| 1191 |
+
|
| 1192 |
+
#
|
| 1193 |
+
# Definition of SyncManager
|
| 1194 |
+
#
|
| 1195 |
+
|
| 1196 |
+
class SyncManager(BaseManager):
|
| 1197 |
+
'''
|
| 1198 |
+
Subclass of `BaseManager` which supports a number of shared object types.
|
| 1199 |
+
|
| 1200 |
+
The types registered are those intended for the synchronization
|
| 1201 |
+
of threads, plus `dict`, `list` and `Namespace`.
|
| 1202 |
+
|
| 1203 |
+
The `multiprocessing.Manager()` function creates started instances of
|
| 1204 |
+
this class.
|
| 1205 |
+
'''
|
| 1206 |
+
|
| 1207 |
+
SyncManager.register('Queue', queue.Queue)
|
| 1208 |
+
SyncManager.register('JoinableQueue', queue.Queue)
|
| 1209 |
+
SyncManager.register('Event', threading.Event, EventProxy)
|
| 1210 |
+
SyncManager.register('Lock', threading.Lock, AcquirerProxy)
|
| 1211 |
+
SyncManager.register('RLock', threading.RLock, AcquirerProxy)
|
| 1212 |
+
SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy)
|
| 1213 |
+
SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore,
|
| 1214 |
+
AcquirerProxy)
|
| 1215 |
+
SyncManager.register('Condition', threading.Condition, ConditionProxy)
|
| 1216 |
+
SyncManager.register('Barrier', threading.Barrier, BarrierProxy)
|
| 1217 |
+
SyncManager.register('Pool', pool.Pool, PoolProxy)
|
| 1218 |
+
SyncManager.register('list', list, ListProxy)
|
| 1219 |
+
SyncManager.register('dict', dict, DictProxy)
|
| 1220 |
+
SyncManager.register('Value', Value, ValueProxy)
|
| 1221 |
+
SyncManager.register('Array', Array, ArrayProxy)
|
| 1222 |
+
SyncManager.register('Namespace', Namespace, NamespaceProxy)
|
| 1223 |
+
|
| 1224 |
+
# types returned by methods of PoolProxy
|
| 1225 |
+
SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
|
| 1226 |
+
SyncManager.register('AsyncResult', create_method=False)
|
| 1227 |
+
|
| 1228 |
+
#
|
| 1229 |
+
# Definition of SharedMemoryManager and SharedMemoryServer
|
| 1230 |
+
#
|
| 1231 |
+
|
| 1232 |
+
if HAS_SHMEM:
|
| 1233 |
+
class _SharedMemoryTracker:
|
| 1234 |
+
"Manages one or more shared memory segments."
|
| 1235 |
+
|
| 1236 |
+
def __init__(self, name, segment_names=[]):
|
| 1237 |
+
self.shared_memory_context_name = name
|
| 1238 |
+
self.segment_names = segment_names
|
| 1239 |
+
|
| 1240 |
+
def register_segment(self, segment_name):
|
| 1241 |
+
"Adds the supplied shared memory block name to tracker."
|
| 1242 |
+
util.debug(f"Register segment {segment_name!r} in pid {getpid()}")
|
| 1243 |
+
self.segment_names.append(segment_name)
|
| 1244 |
+
|
| 1245 |
+
def destroy_segment(self, segment_name):
|
| 1246 |
+
"""Calls unlink() on the shared memory block with the supplied name
|
| 1247 |
+
and removes it from the list of blocks being tracked."""
|
| 1248 |
+
util.debug(f"Destroy segment {segment_name!r} in pid {getpid()}")
|
| 1249 |
+
self.segment_names.remove(segment_name)
|
| 1250 |
+
segment = shared_memory.SharedMemory(segment_name)
|
| 1251 |
+
segment.close()
|
| 1252 |
+
segment.unlink()
|
| 1253 |
+
|
| 1254 |
+
def unlink(self):
|
| 1255 |
+
"Calls destroy_segment() on all tracked shared memory blocks."
|
| 1256 |
+
for segment_name in self.segment_names[:]:
|
| 1257 |
+
self.destroy_segment(segment_name)
|
| 1258 |
+
|
| 1259 |
+
def __del__(self):
|
| 1260 |
+
util.debug(f"Call {self.__class__.__name__}.__del__ in {getpid()}")
|
| 1261 |
+
self.unlink()
|
| 1262 |
+
|
| 1263 |
+
def __getstate__(self):
|
| 1264 |
+
return (self.shared_memory_context_name, self.segment_names)
|
| 1265 |
+
|
| 1266 |
+
def __setstate__(self, state):
|
| 1267 |
+
self.__init__(*state)
|
| 1268 |
+
|
| 1269 |
+
|
| 1270 |
+
class SharedMemoryServer(Server):
|
| 1271 |
+
|
| 1272 |
+
public = Server.public + \
|
| 1273 |
+
['track_segment', 'release_segment', 'list_segments']
|
| 1274 |
+
|
| 1275 |
+
def __init__(self, *args, **kwargs):
|
| 1276 |
+
Server.__init__(self, *args, **kwargs)
|
| 1277 |
+
address = self.address
|
| 1278 |
+
# The address of Linux abstract namespaces can be bytes
|
| 1279 |
+
if isinstance(address, bytes):
|
| 1280 |
+
address = os.fsdecode(address)
|
| 1281 |
+
self.shared_memory_context = \
|
| 1282 |
+
_SharedMemoryTracker(f"shm_{address}_{getpid()}")
|
| 1283 |
+
util.debug(f"SharedMemoryServer started by pid {getpid()}")
|
| 1284 |
+
|
| 1285 |
+
def create(self, c, typeid, /, *args, **kwargs):
|
| 1286 |
+
"""Create a new distributed-shared object (not backed by a shared
|
| 1287 |
+
memory block) and return its id to be used in a Proxy Object."""
|
| 1288 |
+
# Unless set up as a shared proxy, don't make shared_memory_context
|
| 1289 |
+
# a standard part of kwargs. This makes things easier for supplying
|
| 1290 |
+
# simple functions.
|
| 1291 |
+
if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"):
|
| 1292 |
+
kwargs['shared_memory_context'] = self.shared_memory_context
|
| 1293 |
+
return Server.create(self, c, typeid, *args, **kwargs)
|
| 1294 |
+
|
| 1295 |
+
def shutdown(self, c):
|
| 1296 |
+
"Call unlink() on all tracked shared memory, terminate the Server."
|
| 1297 |
+
self.shared_memory_context.unlink()
|
| 1298 |
+
return Server.shutdown(self, c)
|
| 1299 |
+
|
| 1300 |
+
def track_segment(self, c, segment_name):
|
| 1301 |
+
"Adds the supplied shared memory block name to Server's tracker."
|
| 1302 |
+
self.shared_memory_context.register_segment(segment_name)
|
| 1303 |
+
|
| 1304 |
+
def release_segment(self, c, segment_name):
|
| 1305 |
+
"""Calls unlink() on the shared memory block with the supplied name
|
| 1306 |
+
and removes it from the tracker instance inside the Server."""
|
| 1307 |
+
self.shared_memory_context.destroy_segment(segment_name)
|
| 1308 |
+
|
| 1309 |
+
def list_segments(self, c):
|
| 1310 |
+
"""Returns a list of names of shared memory blocks that the Server
|
| 1311 |
+
is currently tracking."""
|
| 1312 |
+
return self.shared_memory_context.segment_names
|
| 1313 |
+
|
| 1314 |
+
|
| 1315 |
+
class SharedMemoryManager(BaseManager):
|
| 1316 |
+
"""Like SyncManager but uses SharedMemoryServer instead of Server.
|
| 1317 |
+
|
| 1318 |
+
It provides methods for creating and returning SharedMemory instances
|
| 1319 |
+
and for creating a list-like object (ShareableList) backed by shared
|
| 1320 |
+
memory. It also provides methods that create and return Proxy Objects
|
| 1321 |
+
that support synchronization across processes (i.e. multi-process-safe
|
| 1322 |
+
locks and semaphores).
|
| 1323 |
+
"""
|
| 1324 |
+
|
| 1325 |
+
_Server = SharedMemoryServer
|
| 1326 |
+
|
| 1327 |
+
def __init__(self, *args, **kwargs):
|
| 1328 |
+
if os.name == "posix":
|
| 1329 |
+
# bpo-36867: Ensure the resource_tracker is running before
|
| 1330 |
+
# launching the manager process, so that concurrent
|
| 1331 |
+
# shared_memory manipulation both in the manager and in the
|
| 1332 |
+
# current process does not create two resource_tracker
|
| 1333 |
+
# processes.
|
| 1334 |
+
from . import resource_tracker
|
| 1335 |
+
resource_tracker.ensure_running()
|
| 1336 |
+
BaseManager.__init__(self, *args, **kwargs)
|
| 1337 |
+
util.debug(f"{self.__class__.__name__} created by pid {getpid()}")
|
| 1338 |
+
|
| 1339 |
+
def __del__(self):
|
| 1340 |
+
util.debug(f"{self.__class__.__name__}.__del__ by pid {getpid()}")
|
| 1341 |
+
pass
|
| 1342 |
+
|
| 1343 |
+
def get_server(self):
|
| 1344 |
+
'Better than monkeypatching for now; merge into Server ultimately'
|
| 1345 |
+
if self._state.value != State.INITIAL:
|
| 1346 |
+
if self._state.value == State.STARTED:
|
| 1347 |
+
raise ProcessError("Already started SharedMemoryServer")
|
| 1348 |
+
elif self._state.value == State.SHUTDOWN:
|
| 1349 |
+
raise ProcessError("SharedMemoryManager has shut down")
|
| 1350 |
+
else:
|
| 1351 |
+
raise ProcessError(
|
| 1352 |
+
"Unknown state {!r}".format(self._state.value))
|
| 1353 |
+
return self._Server(self._registry, self._address,
|
| 1354 |
+
self._authkey, self._serializer)
|
| 1355 |
+
|
| 1356 |
+
def SharedMemory(self, size):
|
| 1357 |
+
"""Returns a new SharedMemory instance with the specified size in
|
| 1358 |
+
bytes, to be tracked by the manager."""
|
| 1359 |
+
with self._Client(self._address, authkey=self._authkey) as conn:
|
| 1360 |
+
sms = shared_memory.SharedMemory(None, create=True, size=size)
|
| 1361 |
+
try:
|
| 1362 |
+
dispatch(conn, None, 'track_segment', (sms.name,))
|
| 1363 |
+
except BaseException as e:
|
| 1364 |
+
sms.unlink()
|
| 1365 |
+
raise e
|
| 1366 |
+
return sms
|
| 1367 |
+
|
| 1368 |
+
def ShareableList(self, sequence):
|
| 1369 |
+
"""Returns a new ShareableList instance populated with the values
|
| 1370 |
+
from the input sequence, to be tracked by the manager."""
|
| 1371 |
+
with self._Client(self._address, authkey=self._authkey) as conn:
|
| 1372 |
+
sl = shared_memory.ShareableList(sequence)
|
| 1373 |
+
try:
|
| 1374 |
+
dispatch(conn, None, 'track_segment', (sl.shm.name,))
|
| 1375 |
+
except BaseException as e:
|
| 1376 |
+
sl.shm.unlink()
|
| 1377 |
+
raise e
|
| 1378 |
+
return sl
|
deepseek/lib/python3.10/multiprocessing/popen_forkserver.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from .context import reduction, set_spawning_popen
|
| 5 |
+
if not reduction.HAVE_SEND_HANDLE:
|
| 6 |
+
raise ImportError('No support for sending fds between processes')
|
| 7 |
+
from . import forkserver
|
| 8 |
+
from . import popen_fork
|
| 9 |
+
from . import spawn
|
| 10 |
+
from . import util
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
__all__ = ['Popen']
|
| 14 |
+
|
| 15 |
+
#
|
| 16 |
+
# Wrapper for an fd used while launching a process
|
| 17 |
+
#
|
| 18 |
+
|
| 19 |
+
class _DupFd(object):
|
| 20 |
+
def __init__(self, ind):
|
| 21 |
+
self.ind = ind
|
| 22 |
+
def detach(self):
|
| 23 |
+
return forkserver.get_inherited_fds()[self.ind]
|
| 24 |
+
|
| 25 |
+
#
|
| 26 |
+
# Start child process using a server process
|
| 27 |
+
#
|
| 28 |
+
|
| 29 |
+
class Popen(popen_fork.Popen):
|
| 30 |
+
method = 'forkserver'
|
| 31 |
+
DupFd = _DupFd
|
| 32 |
+
|
| 33 |
+
def __init__(self, process_obj):
|
| 34 |
+
self._fds = []
|
| 35 |
+
super().__init__(process_obj)
|
| 36 |
+
|
| 37 |
+
def duplicate_for_child(self, fd):
|
| 38 |
+
self._fds.append(fd)
|
| 39 |
+
return len(self._fds) - 1
|
| 40 |
+
|
| 41 |
+
def _launch(self, process_obj):
|
| 42 |
+
prep_data = spawn.get_preparation_data(process_obj._name)
|
| 43 |
+
buf = io.BytesIO()
|
| 44 |
+
set_spawning_popen(self)
|
| 45 |
+
try:
|
| 46 |
+
reduction.dump(prep_data, buf)
|
| 47 |
+
reduction.dump(process_obj, buf)
|
| 48 |
+
finally:
|
| 49 |
+
set_spawning_popen(None)
|
| 50 |
+
|
| 51 |
+
self.sentinel, w = forkserver.connect_to_new_process(self._fds)
|
| 52 |
+
# Keep a duplicate of the data pipe's write end as a sentinel of the
|
| 53 |
+
# parent process used by the child process.
|
| 54 |
+
_parent_w = os.dup(w)
|
| 55 |
+
self.finalizer = util.Finalize(self, util.close_fds,
|
| 56 |
+
(_parent_w, self.sentinel))
|
| 57 |
+
with open(w, 'wb', closefd=True) as f:
|
| 58 |
+
f.write(buf.getbuffer())
|
| 59 |
+
self.pid = forkserver.read_signed(self.sentinel)
|
| 60 |
+
|
| 61 |
+
def poll(self, flag=os.WNOHANG):
|
| 62 |
+
if self.returncode is None:
|
| 63 |
+
from multiprocessing.connection import wait
|
| 64 |
+
timeout = 0 if flag == os.WNOHANG else None
|
| 65 |
+
if not wait([self.sentinel], timeout):
|
| 66 |
+
return None
|
| 67 |
+
try:
|
| 68 |
+
self.returncode = forkserver.read_signed(self.sentinel)
|
| 69 |
+
except (OSError, EOFError):
|
| 70 |
+
# This should not happen usually, but perhaps the forkserver
|
| 71 |
+
# process itself got killed
|
| 72 |
+
self.returncode = 255
|
| 73 |
+
|
| 74 |
+
return self.returncode
|
deepseek/lib/python3.10/multiprocessing/popen_spawn_posix.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from .context import reduction, set_spawning_popen
|
| 5 |
+
from . import popen_fork
|
| 6 |
+
from . import spawn
|
| 7 |
+
from . import util
|
| 8 |
+
|
| 9 |
+
__all__ = ['Popen']
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
#
|
| 13 |
+
# Wrapper for an fd used while launching a process
|
| 14 |
+
#
|
| 15 |
+
|
| 16 |
+
class _DupFd(object):
|
| 17 |
+
def __init__(self, fd):
|
| 18 |
+
self.fd = fd
|
| 19 |
+
def detach(self):
|
| 20 |
+
return self.fd
|
| 21 |
+
|
| 22 |
+
#
|
| 23 |
+
# Start child process using a fresh interpreter
|
| 24 |
+
#
|
| 25 |
+
|
| 26 |
+
class Popen(popen_fork.Popen):
|
| 27 |
+
method = 'spawn'
|
| 28 |
+
DupFd = _DupFd
|
| 29 |
+
|
| 30 |
+
def __init__(self, process_obj):
|
| 31 |
+
self._fds = []
|
| 32 |
+
super().__init__(process_obj)
|
| 33 |
+
|
| 34 |
+
def duplicate_for_child(self, fd):
|
| 35 |
+
self._fds.append(fd)
|
| 36 |
+
return fd
|
| 37 |
+
|
| 38 |
+
def _launch(self, process_obj):
|
| 39 |
+
from . import resource_tracker
|
| 40 |
+
tracker_fd = resource_tracker.getfd()
|
| 41 |
+
self._fds.append(tracker_fd)
|
| 42 |
+
prep_data = spawn.get_preparation_data(process_obj._name)
|
| 43 |
+
fp = io.BytesIO()
|
| 44 |
+
set_spawning_popen(self)
|
| 45 |
+
try:
|
| 46 |
+
reduction.dump(prep_data, fp)
|
| 47 |
+
reduction.dump(process_obj, fp)
|
| 48 |
+
finally:
|
| 49 |
+
set_spawning_popen(None)
|
| 50 |
+
|
| 51 |
+
parent_r = child_w = child_r = parent_w = None
|
| 52 |
+
try:
|
| 53 |
+
parent_r, child_w = os.pipe()
|
| 54 |
+
child_r, parent_w = os.pipe()
|
| 55 |
+
cmd = spawn.get_command_line(tracker_fd=tracker_fd,
|
| 56 |
+
pipe_handle=child_r)
|
| 57 |
+
self._fds.extend([child_r, child_w])
|
| 58 |
+
self.pid = util.spawnv_passfds(spawn.get_executable(),
|
| 59 |
+
cmd, self._fds)
|
| 60 |
+
self.sentinel = parent_r
|
| 61 |
+
with open(parent_w, 'wb', closefd=False) as f:
|
| 62 |
+
f.write(fp.getbuffer())
|
| 63 |
+
finally:
|
| 64 |
+
fds_to_close = []
|
| 65 |
+
for fd in (parent_r, parent_w):
|
| 66 |
+
if fd is not None:
|
| 67 |
+
fds_to_close.append(fd)
|
| 68 |
+
self.finalizer = util.Finalize(self, util.close_fds, fds_to_close)
|
| 69 |
+
|
| 70 |
+
for fd in (child_r, child_w):
|
| 71 |
+
if fd is not None:
|
| 72 |
+
os.close(fd)
|
deepseek/lib/python3.10/multiprocessing/util.py
ADDED
|
@@ -0,0 +1,489 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Module providing various facilities to other parts of the package
|
| 3 |
+
#
|
| 4 |
+
# multiprocessing/util.py
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2006-2008, R Oudkerk
|
| 7 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 8 |
+
#
|
| 9 |
+
|
| 10 |
+
import os
|
| 11 |
+
import itertools
|
| 12 |
+
import sys
|
| 13 |
+
import weakref
|
| 14 |
+
import atexit
|
| 15 |
+
import threading # we want threading to install it's
|
| 16 |
+
# cleanup function before multiprocessing does
|
| 17 |
+
from subprocess import _args_from_interpreter_flags
|
| 18 |
+
|
| 19 |
+
from . import process
|
| 20 |
+
|
| 21 |
+
__all__ = [
|
| 22 |
+
'sub_debug', 'debug', 'info', 'sub_warning', 'get_logger',
|
| 23 |
+
'log_to_stderr', 'get_temp_dir', 'register_after_fork',
|
| 24 |
+
'is_exiting', 'Finalize', 'ForkAwareThreadLock', 'ForkAwareLocal',
|
| 25 |
+
'close_all_fds_except', 'SUBDEBUG', 'SUBWARNING',
|
| 26 |
+
]
|
| 27 |
+
|
| 28 |
+
#
|
| 29 |
+
# Logging
|
| 30 |
+
#
|
| 31 |
+
|
| 32 |
+
NOTSET = 0
|
| 33 |
+
SUBDEBUG = 5
|
| 34 |
+
DEBUG = 10
|
| 35 |
+
INFO = 20
|
| 36 |
+
SUBWARNING = 25
|
| 37 |
+
|
| 38 |
+
LOGGER_NAME = 'multiprocessing'
|
| 39 |
+
DEFAULT_LOGGING_FORMAT = '[%(levelname)s/%(processName)s] %(message)s'
|
| 40 |
+
|
| 41 |
+
_logger = None
|
| 42 |
+
_log_to_stderr = False
|
| 43 |
+
|
| 44 |
+
def sub_debug(msg, *args):
|
| 45 |
+
if _logger:
|
| 46 |
+
_logger.log(SUBDEBUG, msg, *args)
|
| 47 |
+
|
| 48 |
+
def debug(msg, *args):
|
| 49 |
+
if _logger:
|
| 50 |
+
_logger.log(DEBUG, msg, *args)
|
| 51 |
+
|
| 52 |
+
def info(msg, *args):
|
| 53 |
+
if _logger:
|
| 54 |
+
_logger.log(INFO, msg, *args)
|
| 55 |
+
|
| 56 |
+
def sub_warning(msg, *args):
|
| 57 |
+
if _logger:
|
| 58 |
+
_logger.log(SUBWARNING, msg, *args)
|
| 59 |
+
|
| 60 |
+
def get_logger():
|
| 61 |
+
'''
|
| 62 |
+
Returns logger used by multiprocessing
|
| 63 |
+
'''
|
| 64 |
+
global _logger
|
| 65 |
+
import logging
|
| 66 |
+
|
| 67 |
+
logging._acquireLock()
|
| 68 |
+
try:
|
| 69 |
+
if not _logger:
|
| 70 |
+
|
| 71 |
+
_logger = logging.getLogger(LOGGER_NAME)
|
| 72 |
+
_logger.propagate = 0
|
| 73 |
+
|
| 74 |
+
# XXX multiprocessing should cleanup before logging
|
| 75 |
+
if hasattr(atexit, 'unregister'):
|
| 76 |
+
atexit.unregister(_exit_function)
|
| 77 |
+
atexit.register(_exit_function)
|
| 78 |
+
else:
|
| 79 |
+
atexit._exithandlers.remove((_exit_function, (), {}))
|
| 80 |
+
atexit._exithandlers.append((_exit_function, (), {}))
|
| 81 |
+
|
| 82 |
+
finally:
|
| 83 |
+
logging._releaseLock()
|
| 84 |
+
|
| 85 |
+
return _logger
|
| 86 |
+
|
| 87 |
+
def log_to_stderr(level=None):
|
| 88 |
+
'''
|
| 89 |
+
Turn on logging and add a handler which prints to stderr
|
| 90 |
+
'''
|
| 91 |
+
global _log_to_stderr
|
| 92 |
+
import logging
|
| 93 |
+
|
| 94 |
+
logger = get_logger()
|
| 95 |
+
formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
|
| 96 |
+
handler = logging.StreamHandler()
|
| 97 |
+
handler.setFormatter(formatter)
|
| 98 |
+
logger.addHandler(handler)
|
| 99 |
+
|
| 100 |
+
if level:
|
| 101 |
+
logger.setLevel(level)
|
| 102 |
+
_log_to_stderr = True
|
| 103 |
+
return _logger
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
# Abstract socket support
|
| 107 |
+
|
| 108 |
+
def _platform_supports_abstract_sockets():
|
| 109 |
+
if sys.platform == "linux":
|
| 110 |
+
return True
|
| 111 |
+
if hasattr(sys, 'getandroidapilevel'):
|
| 112 |
+
return True
|
| 113 |
+
return False
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def is_abstract_socket_namespace(address):
|
| 117 |
+
if not address:
|
| 118 |
+
return False
|
| 119 |
+
if isinstance(address, bytes):
|
| 120 |
+
return address[0] == 0
|
| 121 |
+
elif isinstance(address, str):
|
| 122 |
+
return address[0] == "\0"
|
| 123 |
+
raise TypeError(f'address type of {address!r} unrecognized')
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
abstract_sockets_supported = _platform_supports_abstract_sockets()
|
| 127 |
+
|
| 128 |
+
#
|
| 129 |
+
# Function returning a temp directory which will be removed on exit
|
| 130 |
+
#
|
| 131 |
+
|
| 132 |
+
def _remove_temp_dir(rmtree, tempdir):
|
| 133 |
+
rmtree(tempdir)
|
| 134 |
+
|
| 135 |
+
current_process = process.current_process()
|
| 136 |
+
# current_process() can be None if the finalizer is called
|
| 137 |
+
# late during Python finalization
|
| 138 |
+
if current_process is not None:
|
| 139 |
+
current_process._config['tempdir'] = None
|
| 140 |
+
|
| 141 |
+
def get_temp_dir():
|
| 142 |
+
# get name of a temp directory which will be automatically cleaned up
|
| 143 |
+
tempdir = process.current_process()._config.get('tempdir')
|
| 144 |
+
if tempdir is None:
|
| 145 |
+
import shutil, tempfile
|
| 146 |
+
tempdir = tempfile.mkdtemp(prefix='pymp-')
|
| 147 |
+
info('created temp directory %s', tempdir)
|
| 148 |
+
# keep a strong reference to shutil.rmtree(), since the finalizer
|
| 149 |
+
# can be called late during Python shutdown
|
| 150 |
+
Finalize(None, _remove_temp_dir, args=(shutil.rmtree, tempdir),
|
| 151 |
+
exitpriority=-100)
|
| 152 |
+
process.current_process()._config['tempdir'] = tempdir
|
| 153 |
+
return tempdir
|
| 154 |
+
|
| 155 |
+
#
|
| 156 |
+
# Support for reinitialization of objects when bootstrapping a child process
|
| 157 |
+
#
|
| 158 |
+
|
| 159 |
+
_afterfork_registry = weakref.WeakValueDictionary()
|
| 160 |
+
_afterfork_counter = itertools.count()
|
| 161 |
+
|
| 162 |
+
def _run_after_forkers():
|
| 163 |
+
items = list(_afterfork_registry.items())
|
| 164 |
+
items.sort()
|
| 165 |
+
for (index, ident, func), obj in items:
|
| 166 |
+
try:
|
| 167 |
+
func(obj)
|
| 168 |
+
except Exception as e:
|
| 169 |
+
info('after forker raised exception %s', e)
|
| 170 |
+
|
| 171 |
+
def register_after_fork(obj, func):
|
| 172 |
+
_afterfork_registry[(next(_afterfork_counter), id(obj), func)] = obj
|
| 173 |
+
|
| 174 |
+
#
|
| 175 |
+
# Finalization using weakrefs
|
| 176 |
+
#
|
| 177 |
+
|
| 178 |
+
_finalizer_registry = {}
|
| 179 |
+
_finalizer_counter = itertools.count()
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class Finalize(object):
|
| 183 |
+
'''
|
| 184 |
+
Class which supports object finalization using weakrefs
|
| 185 |
+
'''
|
| 186 |
+
def __init__(self, obj, callback, args=(), kwargs=None, exitpriority=None):
|
| 187 |
+
if (exitpriority is not None) and not isinstance(exitpriority,int):
|
| 188 |
+
raise TypeError(
|
| 189 |
+
"Exitpriority ({0!r}) must be None or int, not {1!s}".format(
|
| 190 |
+
exitpriority, type(exitpriority)))
|
| 191 |
+
|
| 192 |
+
if obj is not None:
|
| 193 |
+
self._weakref = weakref.ref(obj, self)
|
| 194 |
+
elif exitpriority is None:
|
| 195 |
+
raise ValueError("Without object, exitpriority cannot be None")
|
| 196 |
+
|
| 197 |
+
self._callback = callback
|
| 198 |
+
self._args = args
|
| 199 |
+
self._kwargs = kwargs or {}
|
| 200 |
+
self._key = (exitpriority, next(_finalizer_counter))
|
| 201 |
+
self._pid = os.getpid()
|
| 202 |
+
|
| 203 |
+
_finalizer_registry[self._key] = self
|
| 204 |
+
|
| 205 |
+
def __call__(self, wr=None,
|
| 206 |
+
# Need to bind these locally because the globals can have
|
| 207 |
+
# been cleared at shutdown
|
| 208 |
+
_finalizer_registry=_finalizer_registry,
|
| 209 |
+
sub_debug=sub_debug, getpid=os.getpid):
|
| 210 |
+
'''
|
| 211 |
+
Run the callback unless it has already been called or cancelled
|
| 212 |
+
'''
|
| 213 |
+
try:
|
| 214 |
+
del _finalizer_registry[self._key]
|
| 215 |
+
except KeyError:
|
| 216 |
+
sub_debug('finalizer no longer registered')
|
| 217 |
+
else:
|
| 218 |
+
if self._pid != getpid():
|
| 219 |
+
sub_debug('finalizer ignored because different process')
|
| 220 |
+
res = None
|
| 221 |
+
else:
|
| 222 |
+
sub_debug('finalizer calling %s with args %s and kwargs %s',
|
| 223 |
+
self._callback, self._args, self._kwargs)
|
| 224 |
+
res = self._callback(*self._args, **self._kwargs)
|
| 225 |
+
self._weakref = self._callback = self._args = \
|
| 226 |
+
self._kwargs = self._key = None
|
| 227 |
+
return res
|
| 228 |
+
|
| 229 |
+
def cancel(self):
|
| 230 |
+
'''
|
| 231 |
+
Cancel finalization of the object
|
| 232 |
+
'''
|
| 233 |
+
try:
|
| 234 |
+
del _finalizer_registry[self._key]
|
| 235 |
+
except KeyError:
|
| 236 |
+
pass
|
| 237 |
+
else:
|
| 238 |
+
self._weakref = self._callback = self._args = \
|
| 239 |
+
self._kwargs = self._key = None
|
| 240 |
+
|
| 241 |
+
def still_active(self):
|
| 242 |
+
'''
|
| 243 |
+
Return whether this finalizer is still waiting to invoke callback
|
| 244 |
+
'''
|
| 245 |
+
return self._key in _finalizer_registry
|
| 246 |
+
|
| 247 |
+
def __repr__(self):
|
| 248 |
+
try:
|
| 249 |
+
obj = self._weakref()
|
| 250 |
+
except (AttributeError, TypeError):
|
| 251 |
+
obj = None
|
| 252 |
+
|
| 253 |
+
if obj is None:
|
| 254 |
+
return '<%s object, dead>' % self.__class__.__name__
|
| 255 |
+
|
| 256 |
+
x = '<%s object, callback=%s' % (
|
| 257 |
+
self.__class__.__name__,
|
| 258 |
+
getattr(self._callback, '__name__', self._callback))
|
| 259 |
+
if self._args:
|
| 260 |
+
x += ', args=' + str(self._args)
|
| 261 |
+
if self._kwargs:
|
| 262 |
+
x += ', kwargs=' + str(self._kwargs)
|
| 263 |
+
if self._key[0] is not None:
|
| 264 |
+
x += ', exitpriority=' + str(self._key[0])
|
| 265 |
+
return x + '>'
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def _run_finalizers(minpriority=None):
|
| 269 |
+
'''
|
| 270 |
+
Run all finalizers whose exit priority is not None and at least minpriority
|
| 271 |
+
|
| 272 |
+
Finalizers with highest priority are called first; finalizers with
|
| 273 |
+
the same priority will be called in reverse order of creation.
|
| 274 |
+
'''
|
| 275 |
+
if _finalizer_registry is None:
|
| 276 |
+
# This function may be called after this module's globals are
|
| 277 |
+
# destroyed. See the _exit_function function in this module for more
|
| 278 |
+
# notes.
|
| 279 |
+
return
|
| 280 |
+
|
| 281 |
+
if minpriority is None:
|
| 282 |
+
f = lambda p : p[0] is not None
|
| 283 |
+
else:
|
| 284 |
+
f = lambda p : p[0] is not None and p[0] >= minpriority
|
| 285 |
+
|
| 286 |
+
# Careful: _finalizer_registry may be mutated while this function
|
| 287 |
+
# is running (either by a GC run or by another thread).
|
| 288 |
+
|
| 289 |
+
# list(_finalizer_registry) should be atomic, while
|
| 290 |
+
# list(_finalizer_registry.items()) is not.
|
| 291 |
+
keys = [key for key in list(_finalizer_registry) if f(key)]
|
| 292 |
+
keys.sort(reverse=True)
|
| 293 |
+
|
| 294 |
+
for key in keys:
|
| 295 |
+
finalizer = _finalizer_registry.get(key)
|
| 296 |
+
# key may have been removed from the registry
|
| 297 |
+
if finalizer is not None:
|
| 298 |
+
sub_debug('calling %s', finalizer)
|
| 299 |
+
try:
|
| 300 |
+
finalizer()
|
| 301 |
+
except Exception:
|
| 302 |
+
import traceback
|
| 303 |
+
traceback.print_exc()
|
| 304 |
+
|
| 305 |
+
if minpriority is None:
|
| 306 |
+
_finalizer_registry.clear()
|
| 307 |
+
|
| 308 |
+
#
|
| 309 |
+
# Clean up on exit
|
| 310 |
+
#
|
| 311 |
+
|
| 312 |
+
def is_exiting():
|
| 313 |
+
'''
|
| 314 |
+
Returns true if the process is shutting down
|
| 315 |
+
'''
|
| 316 |
+
return _exiting or _exiting is None
|
| 317 |
+
|
| 318 |
+
_exiting = False
|
| 319 |
+
|
| 320 |
+
def _exit_function(info=info, debug=debug, _run_finalizers=_run_finalizers,
|
| 321 |
+
active_children=process.active_children,
|
| 322 |
+
current_process=process.current_process):
|
| 323 |
+
# We hold on to references to functions in the arglist due to the
|
| 324 |
+
# situation described below, where this function is called after this
|
| 325 |
+
# module's globals are destroyed.
|
| 326 |
+
|
| 327 |
+
global _exiting
|
| 328 |
+
|
| 329 |
+
if not _exiting:
|
| 330 |
+
_exiting = True
|
| 331 |
+
|
| 332 |
+
info('process shutting down')
|
| 333 |
+
debug('running all "atexit" finalizers with priority >= 0')
|
| 334 |
+
_run_finalizers(0)
|
| 335 |
+
|
| 336 |
+
if current_process() is not None:
|
| 337 |
+
# We check if the current process is None here because if
|
| 338 |
+
# it's None, any call to ``active_children()`` will raise
|
| 339 |
+
# an AttributeError (active_children winds up trying to
|
| 340 |
+
# get attributes from util._current_process). One
|
| 341 |
+
# situation where this can happen is if someone has
|
| 342 |
+
# manipulated sys.modules, causing this module to be
|
| 343 |
+
# garbage collected. The destructor for the module type
|
| 344 |
+
# then replaces all values in the module dict with None.
|
| 345 |
+
# For instance, after setuptools runs a test it replaces
|
| 346 |
+
# sys.modules with a copy created earlier. See issues
|
| 347 |
+
# #9775 and #15881. Also related: #4106, #9205, and
|
| 348 |
+
# #9207.
|
| 349 |
+
|
| 350 |
+
for p in active_children():
|
| 351 |
+
if p.daemon:
|
| 352 |
+
info('calling terminate() for daemon %s', p.name)
|
| 353 |
+
p._popen.terminate()
|
| 354 |
+
|
| 355 |
+
for p in active_children():
|
| 356 |
+
info('calling join() for process %s', p.name)
|
| 357 |
+
p.join()
|
| 358 |
+
|
| 359 |
+
debug('running the remaining "atexit" finalizers')
|
| 360 |
+
_run_finalizers()
|
| 361 |
+
|
| 362 |
+
atexit.register(_exit_function)
|
| 363 |
+
|
| 364 |
+
#
|
| 365 |
+
# Some fork aware types
|
| 366 |
+
#
|
| 367 |
+
|
| 368 |
+
class ForkAwareThreadLock(object):
|
| 369 |
+
def __init__(self):
|
| 370 |
+
self._lock = threading.Lock()
|
| 371 |
+
self.acquire = self._lock.acquire
|
| 372 |
+
self.release = self._lock.release
|
| 373 |
+
register_after_fork(self, ForkAwareThreadLock._at_fork_reinit)
|
| 374 |
+
|
| 375 |
+
def _at_fork_reinit(self):
|
| 376 |
+
self._lock._at_fork_reinit()
|
| 377 |
+
|
| 378 |
+
def __enter__(self):
|
| 379 |
+
return self._lock.__enter__()
|
| 380 |
+
|
| 381 |
+
def __exit__(self, *args):
|
| 382 |
+
return self._lock.__exit__(*args)
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
class ForkAwareLocal(threading.local):
|
| 386 |
+
def __init__(self):
|
| 387 |
+
register_after_fork(self, lambda obj : obj.__dict__.clear())
|
| 388 |
+
def __reduce__(self):
|
| 389 |
+
return type(self), ()
|
| 390 |
+
|
| 391 |
+
#
|
| 392 |
+
# Close fds except those specified
|
| 393 |
+
#
|
| 394 |
+
|
| 395 |
+
try:
|
| 396 |
+
MAXFD = os.sysconf("SC_OPEN_MAX")
|
| 397 |
+
except Exception:
|
| 398 |
+
MAXFD = 256
|
| 399 |
+
|
| 400 |
+
def close_all_fds_except(fds):
|
| 401 |
+
fds = list(fds) + [-1, MAXFD]
|
| 402 |
+
fds.sort()
|
| 403 |
+
assert fds[-1] == MAXFD, 'fd too large'
|
| 404 |
+
for i in range(len(fds) - 1):
|
| 405 |
+
os.closerange(fds[i]+1, fds[i+1])
|
| 406 |
+
#
|
| 407 |
+
# Close sys.stdin and replace stdin with os.devnull
|
| 408 |
+
#
|
| 409 |
+
|
| 410 |
+
def _close_stdin():
|
| 411 |
+
if sys.stdin is None:
|
| 412 |
+
return
|
| 413 |
+
|
| 414 |
+
try:
|
| 415 |
+
sys.stdin.close()
|
| 416 |
+
except (OSError, ValueError):
|
| 417 |
+
pass
|
| 418 |
+
|
| 419 |
+
try:
|
| 420 |
+
fd = os.open(os.devnull, os.O_RDONLY)
|
| 421 |
+
try:
|
| 422 |
+
sys.stdin = open(fd, encoding="utf-8", closefd=False)
|
| 423 |
+
except:
|
| 424 |
+
os.close(fd)
|
| 425 |
+
raise
|
| 426 |
+
except (OSError, ValueError):
|
| 427 |
+
pass
|
| 428 |
+
|
| 429 |
+
#
|
| 430 |
+
# Flush standard streams, if any
|
| 431 |
+
#
|
| 432 |
+
|
| 433 |
+
def _flush_std_streams():
|
| 434 |
+
try:
|
| 435 |
+
sys.stdout.flush()
|
| 436 |
+
except (AttributeError, ValueError):
|
| 437 |
+
pass
|
| 438 |
+
try:
|
| 439 |
+
sys.stderr.flush()
|
| 440 |
+
except (AttributeError, ValueError):
|
| 441 |
+
pass
|
| 442 |
+
|
| 443 |
+
#
|
| 444 |
+
# Start a program with only specified fds kept open
|
| 445 |
+
#
|
| 446 |
+
|
| 447 |
+
def spawnv_passfds(path, args, passfds):
|
| 448 |
+
import _posixsubprocess
|
| 449 |
+
passfds = tuple(sorted(map(int, passfds)))
|
| 450 |
+
errpipe_read, errpipe_write = os.pipe()
|
| 451 |
+
try:
|
| 452 |
+
return _posixsubprocess.fork_exec(
|
| 453 |
+
args, [os.fsencode(path)], True, passfds, None, None,
|
| 454 |
+
-1, -1, -1, -1, -1, -1, errpipe_read, errpipe_write,
|
| 455 |
+
False, False, None, None, None, -1, None)
|
| 456 |
+
finally:
|
| 457 |
+
os.close(errpipe_read)
|
| 458 |
+
os.close(errpipe_write)
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
def close_fds(*fds):
|
| 462 |
+
"""Close each file descriptor given as an argument"""
|
| 463 |
+
for fd in fds:
|
| 464 |
+
os.close(fd)
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def _cleanup_tests():
|
| 468 |
+
"""Cleanup multiprocessing resources when multiprocessing tests
|
| 469 |
+
completed."""
|
| 470 |
+
|
| 471 |
+
from test import support
|
| 472 |
+
|
| 473 |
+
# cleanup multiprocessing
|
| 474 |
+
process._cleanup()
|
| 475 |
+
|
| 476 |
+
# Stop the ForkServer process if it's running
|
| 477 |
+
from multiprocessing import forkserver
|
| 478 |
+
forkserver._forkserver._stop()
|
| 479 |
+
|
| 480 |
+
# Stop the ResourceTracker process if it's running
|
| 481 |
+
from multiprocessing import resource_tracker
|
| 482 |
+
resource_tracker._resource_tracker._stop()
|
| 483 |
+
|
| 484 |
+
# bpo-37421: Explicitly call _run_finalizers() to remove immediately
|
| 485 |
+
# temporary directories created by multiprocessing.util.get_temp_dir().
|
| 486 |
+
_run_finalizers()
|
| 487 |
+
support.gc_collect()
|
| 488 |
+
|
| 489 |
+
support.reap_children()
|