Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +4 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/__pycache__/turtle.cpython-38.pyc +3 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/lib-dynload/_codecs_tw.cpython-38-x86_64-linux-gnu.so +3 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/lib-dynload/_hashlib.cpython-38-x86_64-linux-gnu.so +3 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/.version +1 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/__init__.py +149 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/__main__.py +8 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/api.py +461 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/exports.py +378 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/history.py +410 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/lock.py +134 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/plan.py +563 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/resolve.py +1463 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/utils.py +531 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/pem.py +65 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc2631.py +37 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc5280.py +1658 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc5916.py +35 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc5990.py +237 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc6187.py +22 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc7633.py +38 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc7773.py +52 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc8358.py +50 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc8410.py +43 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/__init__.py +120 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/assets.py +36 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/auth.py +102 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/context.py +130 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/data_compat.py +160 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/dataclass_compat.py +225 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/default.py +145 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/errors.py +130 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/lazy.py +100 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/main.py +53 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/main_lib.py +47 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/manager.py +474 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/notebook.py +443 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/plugin_util.py +242 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/program.py +893 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/version.py +18 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_C.cpython-38-x86_64-linux-gnu.so +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_C_flatbuffer.cpython-38-x86_64-linux-gnu.so +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/__config__.py +20 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/__future__.py +19 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/__init__.py +960 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_appdirs.py +643 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_dl.cpython-38-x86_64-linux-gnu.so +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_linalg_utils.py +108 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_meta_registrations.py +140 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_namedtensor_internals.py +142 -0
.gitattributes
CHANGED
|
@@ -202,3 +202,7 @@ my_container_sandbox/workspace/anaconda3/pkgs/pip-24.0-pyhd8ed1ab_0.conda filter
|
|
| 202 |
my_container_sandbox/workspace/anaconda3/pkgs/conda-package-handling-1.7.3-py39h27cfd23_1.conda filter=lfs diff=lfs merge=lfs -text
|
| 203 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/__pycache__/_pydecimal.cpython-38.pyc filter=lfs diff=lfs merge=lfs -text
|
| 204 |
my_container_sandbox/workspace/anaconda3/lib/itcl4.2.2/libitcl4.2.2.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 202 |
my_container_sandbox/workspace/anaconda3/pkgs/conda-package-handling-1.7.3-py39h27cfd23_1.conda filter=lfs diff=lfs merge=lfs -text
|
| 203 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/__pycache__/_pydecimal.cpython-38.pyc filter=lfs diff=lfs merge=lfs -text
|
| 204 |
my_container_sandbox/workspace/anaconda3/lib/itcl4.2.2/libitcl4.2.2.so filter=lfs diff=lfs merge=lfs -text
|
| 205 |
+
my_container_sandbox/workspace/anaconda3/lib/tk8.6/demos/images/teapot.ppm filter=lfs diff=lfs merge=lfs -text
|
| 206 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/__pycache__/turtle.cpython-38.pyc filter=lfs diff=lfs merge=lfs -text
|
| 207 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/lib-dynload/_codecs_tw.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 208 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/lib-dynload/_hashlib.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/__pycache__/turtle.cpython-38.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a30f9c746e7da6afd15f2be270fd357970e5b7656190f73e6010912b7edd1f52
|
| 3 |
+
size 130017
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/lib-dynload/_codecs_tw.cpython-38-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1027d0872cc277ee517b0c33d7ec1b558efbed103bc6353e02aa2ff1ab2e31ba
|
| 3 |
+
size 139888
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/lib-dynload/_hashlib.cpython-38-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e7da966d0029e722191836daf8ffb8166d406e0b038ca01a746658c21a015afb
|
| 3 |
+
size 138464
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/.version
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
4.14.0
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/__init__.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
"""OS-agnostic, system-level binary package manager."""
|
| 5 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from os.path import abspath, dirname
|
| 9 |
+
import sys
|
| 10 |
+
import warnings
|
| 11 |
+
|
| 12 |
+
from json import JSONEncoder
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
__all__ = (
|
| 17 |
+
"__name__", "__version__", "__author__", "__email__", "__license__", "__summary__", "__url__",
|
| 18 |
+
"CONDA_PACKAGE_ROOT", "CondaError", "CondaMultiError", "CondaExitZero", "conda_signal_handler",
|
| 19 |
+
"__copyright__",
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
__name__ = "conda"
|
| 23 |
+
__version__ = "4.14.0"
|
| 24 |
+
__author__ = "Anaconda, Inc."
|
| 25 |
+
__email__ = "conda@continuum.io"
|
| 26 |
+
__license__ = "BSD-3-Clause"
|
| 27 |
+
__copyright__ = "Copyright (c) 2012, Anaconda, Inc."
|
| 28 |
+
__summary__ = __doc__
|
| 29 |
+
__url__ = "https://github.com/conda/conda"
|
| 30 |
+
|
| 31 |
+
if os.getenv('CONDA_ROOT') is None:
|
| 32 |
+
os.environ[str('CONDA_ROOT')] = sys.prefix
|
| 33 |
+
|
| 34 |
+
#: The conda package directory.
|
| 35 |
+
CONDA_PACKAGE_ROOT = abspath(dirname(__file__))
|
| 36 |
+
#: The path within which to find the conda package.
|
| 37 |
+
#:
|
| 38 |
+
#: If `conda` is statically installed this is the site-packages. If `conda` is an editable install
|
| 39 |
+
#: or otherwise uninstalled this is the git repo.
|
| 40 |
+
CONDA_SOURCE_ROOT = dirname(CONDA_PACKAGE_ROOT)
|
| 41 |
+
|
| 42 |
+
def another_to_unicode(val):
|
| 43 |
+
warnings.warn(
|
| 44 |
+
"`conda.another_to_unicode` is pending deprecation and will be removed in a "
|
| 45 |
+
"future release.",
|
| 46 |
+
PendingDeprecationWarning,
|
| 47 |
+
)
|
| 48 |
+
# ignore flake8 on this because it finds this as an error on py3 even though it is guarded
|
| 49 |
+
if isinstance(val, basestring) and not isinstance(val, unicode): # NOQA
|
| 50 |
+
return unicode(val, encoding='utf-8') # NOQA
|
| 51 |
+
return val
|
| 52 |
+
|
| 53 |
+
class CondaError(Exception):
|
| 54 |
+
return_code = 1
|
| 55 |
+
reportable = False # Exception may be reported to core maintainers
|
| 56 |
+
|
| 57 |
+
def __init__(self, message, caused_by=None, **kwargs):
|
| 58 |
+
self.message = message
|
| 59 |
+
self._kwargs = kwargs
|
| 60 |
+
self._caused_by = caused_by
|
| 61 |
+
super(CondaError, self).__init__(message)
|
| 62 |
+
|
| 63 |
+
def __repr__(self):
|
| 64 |
+
return '%s: %s' % (self.__class__.__name__, str(self))
|
| 65 |
+
|
| 66 |
+
def __str__(self):
|
| 67 |
+
try:
|
| 68 |
+
return str(self.message % self._kwargs)
|
| 69 |
+
except Exception:
|
| 70 |
+
debug_message = "\n".join((
|
| 71 |
+
"class: " + self.__class__.__name__,
|
| 72 |
+
"message:",
|
| 73 |
+
self.message,
|
| 74 |
+
"kwargs:",
|
| 75 |
+
str(self._kwargs),
|
| 76 |
+
"",
|
| 77 |
+
))
|
| 78 |
+
print(debug_message, file=sys.stderr)
|
| 79 |
+
raise
|
| 80 |
+
|
| 81 |
+
def dump_map(self):
|
| 82 |
+
result = dict((k, v) for k, v in vars(self).items() if not k.startswith('_'))
|
| 83 |
+
result.update(exception_type=str(type(self)),
|
| 84 |
+
exception_name=self.__class__.__name__,
|
| 85 |
+
message=str(self),
|
| 86 |
+
error=repr(self),
|
| 87 |
+
caused_by=repr(self._caused_by),
|
| 88 |
+
**self._kwargs)
|
| 89 |
+
return result
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class CondaMultiError(CondaError):
|
| 93 |
+
|
| 94 |
+
def __init__(self, errors):
|
| 95 |
+
self.errors = errors
|
| 96 |
+
super(CondaMultiError, self).__init__(None)
|
| 97 |
+
|
| 98 |
+
def __repr__(self):
|
| 99 |
+
errs = []
|
| 100 |
+
for e in self.errors:
|
| 101 |
+
if isinstance(e, EnvironmentError) and not isinstance(e, CondaError):
|
| 102 |
+
errs.append(str(e))
|
| 103 |
+
else:
|
| 104 |
+
# We avoid Python casting this back to a str()
|
| 105 |
+
# by using e.__repr__() instead of repr(e)
|
| 106 |
+
# https://github.com/scrapy/cssselect/issues/34
|
| 107 |
+
errs.append(e.__repr__())
|
| 108 |
+
res = '\n'.join(errs)
|
| 109 |
+
return res
|
| 110 |
+
|
| 111 |
+
def __str__(self):
|
| 112 |
+
return str('\n').join(str(e) for e in self.errors) + str('\n')
|
| 113 |
+
|
| 114 |
+
def dump_map(self):
|
| 115 |
+
return dict(exception_type=str(type(self)),
|
| 116 |
+
exception_name=self.__class__.__name__,
|
| 117 |
+
errors=tuple(error.dump_map() for error in self.errors),
|
| 118 |
+
error="Multiple Errors Encountered.",
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
def contains(self, exception_class):
|
| 122 |
+
return any(isinstance(e, exception_class) for e in self.errors)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class CondaExitZero(CondaError):
|
| 126 |
+
return_code = 0
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
ACTIVE_SUBPROCESSES = set()
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def conda_signal_handler(signum, frame):
|
| 133 |
+
# This function is in the base __init__.py so that it can be monkey-patched by other code
|
| 134 |
+
# if downstream conda users so choose. The biggest danger of monkey-patching is that
|
| 135 |
+
# unlink/link transactions don't get rolled back if interrupted mid-transaction.
|
| 136 |
+
for p in ACTIVE_SUBPROCESSES:
|
| 137 |
+
if p.poll() is None:
|
| 138 |
+
p.send_signal(signum)
|
| 139 |
+
|
| 140 |
+
from .exceptions import CondaSignalInterrupt
|
| 141 |
+
raise CondaSignalInterrupt(signum)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _default(self, obj):
|
| 145 |
+
return getattr(obj.__class__, "to_json", _default.default)(obj)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
_default.default = JSONEncoder().default
|
| 149 |
+
JSONEncoder.default = _default
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/__main__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
from .cli import main
|
| 7 |
+
|
| 8 |
+
sys.exit(main())
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/api.py
ADDED
|
@@ -0,0 +1,461 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 5 |
+
|
| 6 |
+
from .base.constants import DepsModifier as _DepsModifier, UpdateModifier as _UpdateModifier
|
| 7 |
+
from .common.constants import NULL
|
| 8 |
+
from .core.package_cache_data import PackageCacheData as _PackageCacheData
|
| 9 |
+
from .core.prefix_data import PrefixData as _PrefixData
|
| 10 |
+
from .core.solve import _get_solver_class
|
| 11 |
+
from .core.subdir_data import SubdirData as _SubdirData
|
| 12 |
+
from .models.channel import Channel
|
| 13 |
+
|
| 14 |
+
DepsModifier = _DepsModifier
|
| 15 |
+
"""Flags to enable alternate handling of dependencies."""
|
| 16 |
+
|
| 17 |
+
UpdateModifier = _UpdateModifier
|
| 18 |
+
"""Flags to enable alternate handling for updates of existing packages in the environment."""
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class Solver(object):
|
| 22 |
+
"""
|
| 23 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 24 |
+
|
| 25 |
+
A high-level API to conda's solving logic. Three public methods are provided to access a
|
| 26 |
+
solution in various forms.
|
| 27 |
+
|
| 28 |
+
* :meth:`solve_final_state`
|
| 29 |
+
* :meth:`solve_for_diff`
|
| 30 |
+
* :meth:`solve_for_transaction`
|
| 31 |
+
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=()):
|
| 35 |
+
"""
|
| 36 |
+
**Beta**
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
prefix (str):
|
| 40 |
+
The conda prefix / environment location for which the :class:`Solver`
|
| 41 |
+
is being instantiated.
|
| 42 |
+
channels (Sequence[:class:`Channel`]):
|
| 43 |
+
A prioritized list of channels to use for the solution.
|
| 44 |
+
subdirs (Sequence[str]):
|
| 45 |
+
A prioritized list of subdirs to use for the solution.
|
| 46 |
+
specs_to_add (Set[:class:`MatchSpec`]):
|
| 47 |
+
The set of package specs to add to the prefix.
|
| 48 |
+
specs_to_remove (Set[:class:`MatchSpec`]):
|
| 49 |
+
The set of package specs to remove from the prefix.
|
| 50 |
+
|
| 51 |
+
"""
|
| 52 |
+
SolverType = _get_solver_class()
|
| 53 |
+
self._internal = SolverType(prefix, channels, subdirs, specs_to_add, specs_to_remove)
|
| 54 |
+
|
| 55 |
+
def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL,
|
| 56 |
+
ignore_pinned=NULL, force_remove=NULL):
|
| 57 |
+
"""
|
| 58 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 59 |
+
|
| 60 |
+
Gives the final, solved state of the environment.
|
| 61 |
+
|
| 62 |
+
Args:
|
| 63 |
+
deps_modifier (DepsModifier):
|
| 64 |
+
An optional flag indicating special solver handling for dependencies. The
|
| 65 |
+
default solver behavior is to be as conservative as possible with dependency
|
| 66 |
+
updates (in the case the dependency already exists in the environment), while
|
| 67 |
+
still ensuring all dependencies are satisfied. Options include
|
| 68 |
+
* NO_DEPS
|
| 69 |
+
* ONLY_DEPS
|
| 70 |
+
* UPDATE_DEPS
|
| 71 |
+
* UPDATE_DEPS_ONLY_DEPS
|
| 72 |
+
* FREEZE_INSTALLED
|
| 73 |
+
prune (bool):
|
| 74 |
+
If ``True``, the solution will not contain packages that were
|
| 75 |
+
previously brought into the environment as dependencies but are no longer
|
| 76 |
+
required as dependencies and are not user-requested.
|
| 77 |
+
ignore_pinned (bool):
|
| 78 |
+
If ``True``, the solution will ignore pinned package configuration
|
| 79 |
+
for the prefix.
|
| 80 |
+
force_remove (bool):
|
| 81 |
+
Forces removal of a package without removing packages that depend on it.
|
| 82 |
+
|
| 83 |
+
Returns:
|
| 84 |
+
Tuple[PackageRef]:
|
| 85 |
+
In sorted dependency order from roots to leaves, the package references for
|
| 86 |
+
the solved state of the environment.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
return self._internal.solve_final_state(update_modifier, deps_modifier, prune,
|
| 90 |
+
ignore_pinned, force_remove)
|
| 91 |
+
|
| 92 |
+
def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL,
|
| 93 |
+
ignore_pinned=NULL, force_remove=NULL, force_reinstall=False):
|
| 94 |
+
"""
|
| 95 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 96 |
+
|
| 97 |
+
Gives the package references to remove from an environment, followed by
|
| 98 |
+
the package references to add to an environment.
|
| 99 |
+
|
| 100 |
+
Args:
|
| 101 |
+
deps_modifier (DepsModifier):
|
| 102 |
+
See :meth:`solve_final_state`.
|
| 103 |
+
prune (bool):
|
| 104 |
+
See :meth:`solve_final_state`.
|
| 105 |
+
ignore_pinned (bool):
|
| 106 |
+
See :meth:`solve_final_state`.
|
| 107 |
+
force_remove (bool):
|
| 108 |
+
See :meth:`solve_final_state`.
|
| 109 |
+
force_reinstall (bool):
|
| 110 |
+
For requested specs_to_add that are already satisfied in the environment,
|
| 111 |
+
instructs the solver to remove the package and spec from the environment,
|
| 112 |
+
and then add it back--possibly with the exact package instance modified,
|
| 113 |
+
depending on the spec exactness.
|
| 114 |
+
|
| 115 |
+
Returns:
|
| 116 |
+
Tuple[PackageRef], Tuple[PackageRef]:
|
| 117 |
+
A two-tuple of PackageRef sequences. The first is the group of packages to
|
| 118 |
+
remove from the environment, in sorted dependency order from leaves to roots.
|
| 119 |
+
The second is the group of packages to add to the environment, in sorted
|
| 120 |
+
dependency order from roots to leaves.
|
| 121 |
+
|
| 122 |
+
"""
|
| 123 |
+
return self._internal.solve_for_diff(update_modifier, deps_modifier, prune, ignore_pinned,
|
| 124 |
+
force_remove, force_reinstall)
|
| 125 |
+
|
| 126 |
+
def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL,
|
| 127 |
+
ignore_pinned=NULL, force_remove=NULL, force_reinstall=False):
|
| 128 |
+
"""
|
| 129 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 130 |
+
|
| 131 |
+
Gives an UnlinkLinkTransaction instance that can be used to execute the solution
|
| 132 |
+
on an environment.
|
| 133 |
+
|
| 134 |
+
Args:
|
| 135 |
+
deps_modifier (DepsModifier):
|
| 136 |
+
See :meth:`solve_final_state`.
|
| 137 |
+
prune (bool):
|
| 138 |
+
See :meth:`solve_final_state`.
|
| 139 |
+
ignore_pinned (bool):
|
| 140 |
+
See :meth:`solve_final_state`.
|
| 141 |
+
force_remove (bool):
|
| 142 |
+
See :meth:`solve_final_state`.
|
| 143 |
+
force_reinstall (bool):
|
| 144 |
+
See :meth:`solve_for_diff`.
|
| 145 |
+
|
| 146 |
+
Returns:
|
| 147 |
+
UnlinkLinkTransaction:
|
| 148 |
+
|
| 149 |
+
"""
|
| 150 |
+
return self._internal.solve_for_transaction(update_modifier, deps_modifier, prune,
|
| 151 |
+
ignore_pinned, force_remove, force_reinstall)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class SubdirData(object):
|
| 155 |
+
"""
|
| 156 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 157 |
+
|
| 158 |
+
High-level management and usage of repodata.json for subdirs.
|
| 159 |
+
"""
|
| 160 |
+
|
| 161 |
+
def __init__(self, channel):
|
| 162 |
+
"""
|
| 163 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 164 |
+
|
| 165 |
+
Args:
|
| 166 |
+
channel (str or Channel):
|
| 167 |
+
The target subdir for the instance. Must either be a url that includes a subdir
|
| 168 |
+
or a :obj:`Channel` that includes a subdir. e.g.:
|
| 169 |
+
* 'https://repo.anaconda.com/pkgs/main/linux-64'
|
| 170 |
+
* Channel('https://repo.anaconda.com/pkgs/main/linux-64')
|
| 171 |
+
* Channel('conda-forge/osx-64')
|
| 172 |
+
"""
|
| 173 |
+
channel = Channel(channel)
|
| 174 |
+
assert channel.subdir
|
| 175 |
+
self._internal = _SubdirData(channel)
|
| 176 |
+
|
| 177 |
+
def query(self, package_ref_or_match_spec):
|
| 178 |
+
"""
|
| 179 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 180 |
+
|
| 181 |
+
Run a query against this specific instance of repodata.
|
| 182 |
+
|
| 183 |
+
Args:
|
| 184 |
+
package_ref_or_match_spec (PackageRef or MatchSpec or str):
|
| 185 |
+
Either an exact :obj:`PackageRef` to match against, or a :obj:`MatchSpec`
|
| 186 |
+
query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically.
|
| 187 |
+
|
| 188 |
+
Returns:
|
| 189 |
+
Tuple[PackageRecord]
|
| 190 |
+
|
| 191 |
+
"""
|
| 192 |
+
return tuple(self._internal.query(package_ref_or_match_spec))
|
| 193 |
+
|
| 194 |
+
@staticmethod
|
| 195 |
+
def query_all(package_ref_or_match_spec, channels=None, subdirs=None):
|
| 196 |
+
"""
|
| 197 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 198 |
+
|
| 199 |
+
Run a query against all repodata instances in channel/subdir matrix.
|
| 200 |
+
|
| 201 |
+
Args:
|
| 202 |
+
package_ref_or_match_spec (PackageRef or MatchSpec or str):
|
| 203 |
+
Either an exact :obj:`PackageRef` to match against, or a :obj:`MatchSpec`
|
| 204 |
+
query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically.
|
| 205 |
+
channels (Iterable[Channel or str] or None):
|
| 206 |
+
An iterable of urls for channels or :obj:`Channel` objects. If None, will fall
|
| 207 |
+
back to context.channels.
|
| 208 |
+
subdirs (Iterable[str] or None):
|
| 209 |
+
If None, will fall back to context.subdirs.
|
| 210 |
+
|
| 211 |
+
Returns:
|
| 212 |
+
Tuple[PackageRecord]
|
| 213 |
+
|
| 214 |
+
"""
|
| 215 |
+
return tuple(_SubdirData.query_all(package_ref_or_match_spec, channels, subdirs))
|
| 216 |
+
|
| 217 |
+
def iter_records(self):
|
| 218 |
+
"""
|
| 219 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 220 |
+
|
| 221 |
+
Returns:
|
| 222 |
+
Iterable[PackageRecord]: A generator over all records contained in the repodata.json
|
| 223 |
+
instance. Warning: this is a generator that is exhausted on first use.
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
return self._internal.iter_records()
|
| 227 |
+
|
| 228 |
+
def reload(self):
|
| 229 |
+
"""
|
| 230 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 231 |
+
|
| 232 |
+
Update the instance with new information. Backing information (i.e. repodata.json)
|
| 233 |
+
is lazily downloaded/loaded on first use by the other methods of this class. You
|
| 234 |
+
should only use this method if you are *sure* you have outdated data.
|
| 235 |
+
|
| 236 |
+
Returns:
|
| 237 |
+
SubdirData
|
| 238 |
+
|
| 239 |
+
"""
|
| 240 |
+
self._internal = self._internal.reload()
|
| 241 |
+
return self
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
class PackageCacheData(object):
|
| 245 |
+
"""
|
| 246 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 247 |
+
|
| 248 |
+
High-level management and usage of package caches.
|
| 249 |
+
"""
|
| 250 |
+
|
| 251 |
+
def __init__(self, pkgs_dir):
|
| 252 |
+
"""
|
| 253 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 254 |
+
|
| 255 |
+
Args:
|
| 256 |
+
pkgs_dir (str):
|
| 257 |
+
"""
|
| 258 |
+
self._internal = _PackageCacheData(pkgs_dir)
|
| 259 |
+
|
| 260 |
+
def get(self, package_ref, default=NULL):
|
| 261 |
+
"""
|
| 262 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 263 |
+
|
| 264 |
+
Args:
|
| 265 |
+
package_ref (PackageRef):
|
| 266 |
+
A :obj:`PackageRef` instance representing the key for the
|
| 267 |
+
:obj:`PackageCacheRecord` being sought.
|
| 268 |
+
default: The default value to return if the record does not exist. If not
|
| 269 |
+
specified and no record exists, :exc:`KeyError` is raised.
|
| 270 |
+
|
| 271 |
+
Returns:
|
| 272 |
+
PackageCacheRecord
|
| 273 |
+
|
| 274 |
+
"""
|
| 275 |
+
return self._internal.get(package_ref, default)
|
| 276 |
+
|
| 277 |
+
def query(self, package_ref_or_match_spec):
|
| 278 |
+
"""
|
| 279 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 280 |
+
|
| 281 |
+
Run a query against this specific package cache instance.
|
| 282 |
+
|
| 283 |
+
Args:
|
| 284 |
+
package_ref_or_match_spec (PackageRef or MatchSpec or str):
|
| 285 |
+
Either an exact :obj:`PackageRef` to match against, or a :obj:`MatchSpec`
|
| 286 |
+
query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically.
|
| 287 |
+
|
| 288 |
+
Returns:
|
| 289 |
+
Tuple[PackageCacheRecord]
|
| 290 |
+
|
| 291 |
+
"""
|
| 292 |
+
return tuple(self._internal.query(package_ref_or_match_spec))
|
| 293 |
+
|
| 294 |
+
@staticmethod
|
| 295 |
+
def query_all(package_ref_or_match_spec, pkgs_dirs=None):
|
| 296 |
+
"""
|
| 297 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 298 |
+
|
| 299 |
+
Run a query against all package caches.
|
| 300 |
+
|
| 301 |
+
Args:
|
| 302 |
+
package_ref_or_match_spec (PackageRef or MatchSpec or str):
|
| 303 |
+
Either an exact :obj:`PackageRef` to match against, or a :obj:`MatchSpec`
|
| 304 |
+
query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically.
|
| 305 |
+
pkgs_dirs (Iterable[str] or None):
|
| 306 |
+
If None, will fall back to context.pkgs_dirs.
|
| 307 |
+
|
| 308 |
+
Returns:
|
| 309 |
+
Tuple[PackageCacheRecord]
|
| 310 |
+
|
| 311 |
+
"""
|
| 312 |
+
return tuple(_PackageCacheData.query_all(package_ref_or_match_spec, pkgs_dirs))
|
| 313 |
+
|
| 314 |
+
def iter_records(self):
|
| 315 |
+
"""
|
| 316 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 317 |
+
|
| 318 |
+
Returns:
|
| 319 |
+
Iterable[PackageCacheRecord]: A generator over all records contained in the package
|
| 320 |
+
cache instance. Warning: this is a generator that is exhausted on first use.
|
| 321 |
+
|
| 322 |
+
"""
|
| 323 |
+
return self._internal.iter_records()
|
| 324 |
+
|
| 325 |
+
@property
|
| 326 |
+
def is_writable(self):
|
| 327 |
+
"""
|
| 328 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 329 |
+
|
| 330 |
+
Indicates if the package cache location is writable or read-only.
|
| 331 |
+
|
| 332 |
+
Returns:
|
| 333 |
+
bool
|
| 334 |
+
|
| 335 |
+
"""
|
| 336 |
+
return self._internal.is_writable
|
| 337 |
+
|
| 338 |
+
@staticmethod
|
| 339 |
+
def first_writable(pkgs_dirs=None):
|
| 340 |
+
"""
|
| 341 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 342 |
+
|
| 343 |
+
Get an instance object for the first writable package cache.
|
| 344 |
+
|
| 345 |
+
Args:
|
| 346 |
+
pkgs_dirs (Iterable[str]):
|
| 347 |
+
If None, will fall back to context.pkgs_dirs.
|
| 348 |
+
|
| 349 |
+
Returns:
|
| 350 |
+
PackageCacheData:
|
| 351 |
+
An instance for the first writable package cache.
|
| 352 |
+
|
| 353 |
+
"""
|
| 354 |
+
return PackageCacheData(_PackageCacheData.first_writable(pkgs_dirs).pkgs_dir)
|
| 355 |
+
|
| 356 |
+
def reload(self):
|
| 357 |
+
"""
|
| 358 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 359 |
+
|
| 360 |
+
Update the instance with new information. Backing information (i.e. contents of
|
| 361 |
+
the pkgs_dir) is lazily loaded on first use by the other methods of this class. You
|
| 362 |
+
should only use this method if you are *sure* you have outdated data.
|
| 363 |
+
|
| 364 |
+
Returns:
|
| 365 |
+
PackageCacheData
|
| 366 |
+
|
| 367 |
+
"""
|
| 368 |
+
self._internal = self._internal.reload()
|
| 369 |
+
return self
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
class PrefixData(object):
|
| 373 |
+
"""
|
| 374 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 375 |
+
|
| 376 |
+
High-level management and usage of conda environment prefixes.
|
| 377 |
+
"""
|
| 378 |
+
|
| 379 |
+
def __init__(self, prefix_path):
|
| 380 |
+
"""
|
| 381 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 382 |
+
|
| 383 |
+
Args:
|
| 384 |
+
prefix_path (str):
|
| 385 |
+
"""
|
| 386 |
+
self._internal = _PrefixData(prefix_path)
|
| 387 |
+
|
| 388 |
+
def get(self, package_ref, default=NULL):
|
| 389 |
+
"""
|
| 390 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 391 |
+
|
| 392 |
+
Args:
|
| 393 |
+
package_ref (PackageRef):
|
| 394 |
+
A :obj:`PackageRef` instance representing the key for the
|
| 395 |
+
:obj:`PrefixRecord` being sought.
|
| 396 |
+
default: The default value to return if the record does not exist. If not
|
| 397 |
+
specified and no record exists, :exc:`KeyError` is raised.
|
| 398 |
+
|
| 399 |
+
Returns:
|
| 400 |
+
PrefixRecord
|
| 401 |
+
|
| 402 |
+
"""
|
| 403 |
+
return self._internal.get(package_ref.name, default)
|
| 404 |
+
|
| 405 |
+
def query(self, package_ref_or_match_spec):
|
| 406 |
+
"""
|
| 407 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 408 |
+
|
| 409 |
+
Run a query against this specific prefix instance.
|
| 410 |
+
|
| 411 |
+
Args:
|
| 412 |
+
package_ref_or_match_spec (PackageRef or MatchSpec or str):
|
| 413 |
+
Either an exact :obj:`PackageRef` to match against, or a :obj:`MatchSpec`
|
| 414 |
+
query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically.
|
| 415 |
+
|
| 416 |
+
Returns:
|
| 417 |
+
Tuple[PrefixRecord]
|
| 418 |
+
|
| 419 |
+
"""
|
| 420 |
+
return tuple(self._internal.query(package_ref_or_match_spec))
|
| 421 |
+
|
| 422 |
+
def iter_records(self):
|
| 423 |
+
"""
|
| 424 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 425 |
+
|
| 426 |
+
Returns:
|
| 427 |
+
Iterable[PrefixRecord]: A generator over all records contained in the prefix.
|
| 428 |
+
Warning: this is a generator that is exhausted on first use.
|
| 429 |
+
|
| 430 |
+
"""
|
| 431 |
+
return self._internal.iter_records()
|
| 432 |
+
|
| 433 |
+
@property
|
| 434 |
+
def is_writable(self):
|
| 435 |
+
"""
|
| 436 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 437 |
+
|
| 438 |
+
Indicates if the prefix is writable or read-only.
|
| 439 |
+
|
| 440 |
+
Returns:
|
| 441 |
+
bool or None:
|
| 442 |
+
True if the prefix is writable. False if read-only. None if the prefix
|
| 443 |
+
does not exist as a conda environment.
|
| 444 |
+
|
| 445 |
+
"""
|
| 446 |
+
return self._internal.is_writable
|
| 447 |
+
|
| 448 |
+
def reload(self):
|
| 449 |
+
"""
|
| 450 |
+
**Beta** While in beta, expect both major and minor changes across minor releases.
|
| 451 |
+
|
| 452 |
+
Update the instance with new information. Backing information (i.e. contents of
|
| 453 |
+
the conda-meta directory) is lazily loaded on first use by the other methods of this
|
| 454 |
+
class. You should only use this method if you are *sure* you have outdated data.
|
| 455 |
+
|
| 456 |
+
Returns:
|
| 457 |
+
PrefixData
|
| 458 |
+
|
| 459 |
+
"""
|
| 460 |
+
self._internal = self._internal.reload()
|
| 461 |
+
return self
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/exports.py
ADDED
|
@@ -0,0 +1,378 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 5 |
+
|
| 6 |
+
from collections.abc import Hashable as _Hashable
|
| 7 |
+
import errno
|
| 8 |
+
import functools
|
| 9 |
+
import os
|
| 10 |
+
import sys
|
| 11 |
+
import threading
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
# necessary for conda-build
|
| 15 |
+
from ._vendor.six import PY3, iteritems, string_types, text_type # noqa: F401
|
| 16 |
+
from io import StringIO # noqa: F401
|
| 17 |
+
from builtins import input # noqa: F401
|
| 18 |
+
|
| 19 |
+
from . import CondaError # noqa: F401
|
| 20 |
+
from .base.context import reset_context
|
| 21 |
+
|
| 22 |
+
reset_context() # initialize context when conda.exports is imported
|
| 23 |
+
|
| 24 |
+
from . import plan # noqa: F401
|
| 25 |
+
from .core.solve import Solver # noqa: F401
|
| 26 |
+
from .cli.common import specs_from_args, spec_from_line, specs_from_url # noqa: F401
|
| 27 |
+
from .cli.conda_argparse import add_parser_prefix, add_parser_channels # noqa: F401
|
| 28 |
+
from .cli.conda_argparse import ArgumentParser # noqa: F401
|
| 29 |
+
from .common import compat # noqa: F401
|
| 30 |
+
from .common.compat import on_win # noqa: F401
|
| 31 |
+
from .gateways.connection.session import CondaSession # noqa: F401
|
| 32 |
+
from .gateways.disk.create import TemporaryDirectory # noqa: F401
|
| 33 |
+
from .common.toposort import _toposort # noqa: F401
|
| 34 |
+
from .gateways.disk.link import lchmod # noqa: F401
|
| 35 |
+
from .gateways.connection.download import TmpDownload, download as _download # noqa: F401
|
| 36 |
+
|
| 37 |
+
handle_proxy_407 = lambda x, y: warnings.warn(
|
| 38 |
+
"The `conda.exports.handle_proxy_407` is pending deprecation and will be removed in a "
|
| 39 |
+
"future release. Now handled by CondaSession.",
|
| 40 |
+
PendingDeprecationWarning,
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
from .core.package_cache_data import rm_fetched # noqa: F401
|
| 44 |
+
from .gateways.disk.delete import delete_trash, move_to_trash # noqa: F401
|
| 45 |
+
from .misc import untracked, walk_prefix # noqa: F401
|
| 46 |
+
from .resolve import MatchSpec, ResolvePackageNotFound, Resolve, Unsatisfiable # noqa: F401
|
| 47 |
+
|
| 48 |
+
NoPackagesFound = NoPackagesFoundError = ResolvePackageNotFound
|
| 49 |
+
|
| 50 |
+
from .utils import hashsum_file, human_bytes, unix_path_to_win, url_path # noqa: F401
|
| 51 |
+
from .common.path import win_path_to_unix # noqa: F401
|
| 52 |
+
from .gateways.disk.read import compute_md5sum
|
| 53 |
+
|
| 54 |
+
md5_file = compute_md5sum
|
| 55 |
+
|
| 56 |
+
from .models.version import VersionOrder, normalized_version # noqa: F401
|
| 57 |
+
from .models.channel import Channel # noqa: F401
|
| 58 |
+
import conda.base.context
|
| 59 |
+
from .base.context import get_prefix, non_x86_machines, reset_context, sys_rc_path # noqa: F401
|
| 60 |
+
|
| 61 |
+
non_x86_linux_machines = non_x86_machines
|
| 62 |
+
|
| 63 |
+
from .auxlib.entity import EntityEncoder # noqa: F401
|
| 64 |
+
from .base.constants import ( # noqa: F401
|
| 65 |
+
DEFAULT_CHANNELS,
|
| 66 |
+
DEFAULT_CHANNELS_WIN,
|
| 67 |
+
DEFAULT_CHANNELS_UNIX,
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
get_default_urls = lambda: DEFAULT_CHANNELS
|
| 71 |
+
|
| 72 |
+
from .base.constants import PREFIX_PLACEHOLDER
|
| 73 |
+
|
| 74 |
+
_PREFIX_PLACEHOLDER = prefix_placeholder = PREFIX_PLACEHOLDER
|
| 75 |
+
|
| 76 |
+
arch_name = conda.base.context.context.arch_name
|
| 77 |
+
binstar_upload = conda.base.context.context.anaconda_upload
|
| 78 |
+
bits = conda.base.context.context.bits
|
| 79 |
+
default_prefix = conda.base.context.context.default_prefix
|
| 80 |
+
default_python = conda.base.context.context.default_python
|
| 81 |
+
envs_dirs = conda.base.context.context.envs_dirs
|
| 82 |
+
pkgs_dirs = conda.base.context.context.pkgs_dirs
|
| 83 |
+
platform = conda.base.context.context.platform
|
| 84 |
+
root_dir = conda.base.context.context.root_prefix
|
| 85 |
+
root_writable = conda.base.context.context.root_writable
|
| 86 |
+
subdir = conda.base.context.context.subdir
|
| 87 |
+
conda_private = conda.base.context.context.conda_private
|
| 88 |
+
conda_build = conda.base.context.context.conda_build
|
| 89 |
+
|
| 90 |
+
from .models.channel import get_conda_build_local_url # NOQA
|
| 91 |
+
|
| 92 |
+
get_rc_urls = lambda: list(conda.base.context.context.channels)
|
| 93 |
+
get_local_urls = lambda: list(get_conda_build_local_url()) or []
|
| 94 |
+
load_condarc = lambda fn: conda.base.context.reset_context([fn])
|
| 95 |
+
|
| 96 |
+
from .exceptions import PaddingError, LinkError, CondaOSError, PathNotFoundError # NOQA
|
| 97 |
+
|
| 98 |
+
PaddingError = PaddingError
|
| 99 |
+
LinkError = LinkError
|
| 100 |
+
CondaOSError = CondaOSError
|
| 101 |
+
# PathNotFoundError is the conda 4.4.x name for it - let's plan ahead.
|
| 102 |
+
PathNotFoundError = CondaFileNotFoundError = PathNotFoundError
|
| 103 |
+
|
| 104 |
+
from .models.enums import FileMode # noqa: F401
|
| 105 |
+
from .models.enums import PathType # noqa: F401
|
| 106 |
+
from .models.records import PackageRecord
|
| 107 |
+
|
| 108 |
+
IndexRecord = PackageRecord
|
| 109 |
+
|
| 110 |
+
from .models.dist import Dist
|
| 111 |
+
from .gateways.subprocess import ACTIVE_SUBPROCESSES, subprocess_call # noqa: F401
|
| 112 |
+
from .core.subdir_data import cache_fn_url # noqa: F401
|
| 113 |
+
from .core.package_cache_data import ProgressiveFetchExtract # noqa: F401
|
| 114 |
+
from .exceptions import CondaHTTPError, LockError, UnsatisfiableError # noqa: F401
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class Completer(object): # pragma: no cover
|
| 118 |
+
def get_items(self):
|
| 119 |
+
return self._get_items()
|
| 120 |
+
|
| 121 |
+
def __contains__(self, item):
|
| 122 |
+
return True
|
| 123 |
+
|
| 124 |
+
def __iter__(self):
|
| 125 |
+
return iter(self.get_items())
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class InstalledPackages(object):
|
| 129 |
+
pass
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class memoized(object): # pragma: no cover
|
| 133 |
+
"""Decorator. Caches a function's return value each time it is called.
|
| 134 |
+
If called later with the same arguments, the cached value is returned
|
| 135 |
+
(not reevaluated).
|
| 136 |
+
"""
|
| 137 |
+
def __init__(self, func):
|
| 138 |
+
warnings.warn(
|
| 139 |
+
"The `conda.exports.memoized` decorator is pending deprecation and will be removed in "
|
| 140 |
+
"a future release. Please use `functools.lru_cache` instead.",
|
| 141 |
+
PendingDeprecationWarning,
|
| 142 |
+
stacklevel=2,
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
self.func = func
|
| 146 |
+
self.cache = {}
|
| 147 |
+
self.lock = threading.Lock()
|
| 148 |
+
|
| 149 |
+
def __call__(self, *args, **kw):
|
| 150 |
+
newargs = []
|
| 151 |
+
for arg in args:
|
| 152 |
+
if isinstance(arg, list):
|
| 153 |
+
newargs.append(tuple(arg))
|
| 154 |
+
elif not isinstance(arg, _Hashable):
|
| 155 |
+
# uncacheable. a list, for instance.
|
| 156 |
+
# better to not cache than blow up.
|
| 157 |
+
return self.func(*args, **kw)
|
| 158 |
+
else:
|
| 159 |
+
newargs.append(arg)
|
| 160 |
+
newargs = tuple(newargs)
|
| 161 |
+
key = (newargs, frozenset(sorted(kw.items())))
|
| 162 |
+
with self.lock:
|
| 163 |
+
if key in self.cache:
|
| 164 |
+
return self.cache[key]
|
| 165 |
+
else:
|
| 166 |
+
value = self.func(*args, **kw)
|
| 167 |
+
self.cache[key] = value
|
| 168 |
+
return value
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
from .gateways.disk.delete import rm_rf as _rm_rf
|
| 172 |
+
from .core.prefix_data import delete_prefix_from_linked_data
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def rm_rf(path, max_retries=5, trash=True):
|
| 176 |
+
_rm_rf(path, max_retries, trash)
|
| 177 |
+
delete_prefix_from_linked_data(path)
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
# ######################
|
| 181 |
+
# signature.py
|
| 182 |
+
# ######################
|
| 183 |
+
KEYS = None
|
| 184 |
+
KEYS_DIR = None
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def hash_file(_):
|
| 188 |
+
return None # pragma: no cover
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def verify(_):
|
| 192 |
+
return False # pragma: no cover
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
from .plan import ( # noqa: F401
|
| 196 |
+
execute_actions,
|
| 197 |
+
execute_instructions,
|
| 198 |
+
execute_plan,
|
| 199 |
+
install_actions,
|
| 200 |
+
)
|
| 201 |
+
from .plan import display_actions as _display_actions
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def display_actions(actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=()):
|
| 205 |
+
if 'FETCH' in actions:
|
| 206 |
+
actions['FETCH'] = [index[d] for d in actions['FETCH']]
|
| 207 |
+
if 'LINK' in actions:
|
| 208 |
+
actions['LINK'] = [index[d] for d in actions['LINK']]
|
| 209 |
+
if 'UNLINK' in actions:
|
| 210 |
+
actions['UNLINK'] = [index[d] for d in actions['UNLINK']]
|
| 211 |
+
index = {prec: prec for prec in index.values()}
|
| 212 |
+
return _display_actions(actions, index, show_channel_urls, specs_to_remove, specs_to_add)
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
from .core.index import ( # noqa: F401
|
| 216 |
+
dist_str_in_index,
|
| 217 |
+
fetch_index as _fetch_index,
|
| 218 |
+
get_index as _get_index,
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def get_index(channel_urls=(), prepend=True, platform=None,
|
| 223 |
+
use_local=False, use_cache=False, unknown=None, prefix=None):
|
| 224 |
+
index = _get_index(channel_urls, prepend, platform, use_local, use_cache, unknown, prefix)
|
| 225 |
+
return {Dist(prec): prec for prec in index.values()}
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def fetch_index(channel_urls, use_cache=False, index=None):
|
| 229 |
+
index = _fetch_index(channel_urls, use_cache, index)
|
| 230 |
+
return {Dist(prec): prec for prec in index.values()}
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def package_cache():
|
| 234 |
+
from .core.package_cache_data import PackageCacheData
|
| 235 |
+
|
| 236 |
+
class package_cache(object):
|
| 237 |
+
|
| 238 |
+
def __contains__(self, dist):
|
| 239 |
+
return bool(PackageCacheData.first_writable().get(Dist(dist).to_package_ref(), None))
|
| 240 |
+
|
| 241 |
+
def keys(self):
|
| 242 |
+
return (Dist(v) for v in PackageCacheData.first_writable().values())
|
| 243 |
+
|
| 244 |
+
def __delitem__(self, dist):
|
| 245 |
+
PackageCacheData.first_writable().remove(Dist(dist).to_package_ref())
|
| 246 |
+
|
| 247 |
+
return package_cache()
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def symlink_conda(prefix, root_dir, shell=None): # pragma: no cover
|
| 251 |
+
print("WARNING: symlink_conda() is deprecated.", file=sys.stderr)
|
| 252 |
+
# do not symlink root env - this clobbers activate incorrectly.
|
| 253 |
+
# prefix should always be longer than, or outside the root dir.
|
| 254 |
+
if os.path.normcase(os.path.normpath(prefix)) in os.path.normcase(os.path.normpath(root_dir)):
|
| 255 |
+
return
|
| 256 |
+
if on_win:
|
| 257 |
+
where = 'condabin'
|
| 258 |
+
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
|
| 259 |
+
else:
|
| 260 |
+
where = 'bin'
|
| 261 |
+
symlink_fn = os.symlink
|
| 262 |
+
if not os.path.isdir(os.path.join(prefix, where)):
|
| 263 |
+
os.makedirs(os.path.join(prefix, where))
|
| 264 |
+
_symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def _symlink_conda_hlp(prefix, root_dir, where, symlink_fn): # pragma: no cover
|
| 268 |
+
scripts = ["conda", "activate", "deactivate"]
|
| 269 |
+
prefix_where = os.path.join(prefix, where)
|
| 270 |
+
if not os.path.isdir(prefix_where):
|
| 271 |
+
os.makedirs(prefix_where)
|
| 272 |
+
for f in scripts:
|
| 273 |
+
root_file = os.path.join(root_dir, where, f)
|
| 274 |
+
prefix_file = os.path.join(prefix_where, f)
|
| 275 |
+
try:
|
| 276 |
+
# try to kill stale links if they exist
|
| 277 |
+
if os.path.lexists(prefix_file):
|
| 278 |
+
rm_rf(prefix_file)
|
| 279 |
+
# if they're in use, they won't be killed. Skip making new symlink.
|
| 280 |
+
if not os.path.lexists(prefix_file):
|
| 281 |
+
symlink_fn(root_file, prefix_file)
|
| 282 |
+
except (IOError, OSError) as e:
|
| 283 |
+
if (os.path.lexists(prefix_file) and (e.errno in (
|
| 284 |
+
errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST
|
| 285 |
+
))):
|
| 286 |
+
# Cannot symlink root_file to prefix_file. Ignoring since link already exists
|
| 287 |
+
pass
|
| 288 |
+
else:
|
| 289 |
+
raise
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
if on_win: # pragma: no cover
|
| 293 |
+
def win_conda_bat_redirect(src, dst, shell):
|
| 294 |
+
"""Special function for Windows XP where the `CreateSymbolicLink`
|
| 295 |
+
function is not available.
|
| 296 |
+
|
| 297 |
+
Simply creates a `.bat` file at `dst` which calls `src` together with
|
| 298 |
+
all command line arguments.
|
| 299 |
+
|
| 300 |
+
Works of course only with callable files, e.g. `.bat` or `.exe` files.
|
| 301 |
+
"""
|
| 302 |
+
from .utils import shells
|
| 303 |
+
try:
|
| 304 |
+
os.makedirs(os.path.dirname(dst))
|
| 305 |
+
except OSError as exc: # Python >2.5
|
| 306 |
+
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
|
| 307 |
+
pass
|
| 308 |
+
else:
|
| 309 |
+
raise
|
| 310 |
+
|
| 311 |
+
# bat file redirect
|
| 312 |
+
if not os.path.isfile(dst + '.bat'):
|
| 313 |
+
with open(dst + '.bat', 'w') as f:
|
| 314 |
+
f.write('@echo off\ncall "%s" %%*\n' % src)
|
| 315 |
+
|
| 316 |
+
# TODO: probably need one here for powershell at some point
|
| 317 |
+
|
| 318 |
+
# This one is for bash/cygwin/msys
|
| 319 |
+
# set default shell to bash.exe when not provided, as that's most common
|
| 320 |
+
if not shell:
|
| 321 |
+
shell = "bash.exe"
|
| 322 |
+
|
| 323 |
+
# technically these are "links" - but islink doesn't work on win
|
| 324 |
+
if not os.path.isfile(dst):
|
| 325 |
+
with open(dst, "w") as f:
|
| 326 |
+
f.write("#!/usr/bin/env bash \n")
|
| 327 |
+
if src.endswith("conda"):
|
| 328 |
+
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
|
| 329 |
+
else:
|
| 330 |
+
f.write('source %s "$@"' % shells[shell]['path_to'](src))
|
| 331 |
+
# Make the new file executable
|
| 332 |
+
# http://stackoverflow.com/a/30463972/1170370
|
| 333 |
+
mode = os.stat(dst).st_mode
|
| 334 |
+
mode |= (mode & 292) >> 2 # copy R bits to X
|
| 335 |
+
os.chmod(dst, mode)
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def linked_data(prefix, ignore_channels=False):
|
| 339 |
+
"""
|
| 340 |
+
Return a dictionary of the linked packages in prefix.
|
| 341 |
+
"""
|
| 342 |
+
from .core.prefix_data import PrefixData
|
| 343 |
+
from .models.dist import Dist
|
| 344 |
+
pd = PrefixData(prefix)
|
| 345 |
+
return {Dist(prefix_record): prefix_record for prefix_record in pd._prefix_records.values()}
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def linked(prefix, ignore_channels=False):
|
| 349 |
+
"""
|
| 350 |
+
Return the Dists of linked packages in prefix.
|
| 351 |
+
"""
|
| 352 |
+
from .models.enums import PackageType
|
| 353 |
+
conda_package_types = PackageType.conda_package_types()
|
| 354 |
+
ld = linked_data(prefix, ignore_channels=ignore_channels).items()
|
| 355 |
+
return set(dist for dist, prefix_rec in ld if prefix_rec.package_type in conda_package_types)
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
# exports
|
| 359 |
+
def is_linked(prefix, dist):
|
| 360 |
+
"""
|
| 361 |
+
Return the install metadata for a linked package in a prefix, or None
|
| 362 |
+
if the package is not linked in the prefix.
|
| 363 |
+
"""
|
| 364 |
+
# FIXME Functions that begin with `is_` should return True/False
|
| 365 |
+
from .core.prefix_data import PrefixData
|
| 366 |
+
pd = PrefixData(prefix)
|
| 367 |
+
prefix_record = pd.get(dist.name, None)
|
| 368 |
+
if prefix_record is None:
|
| 369 |
+
return None
|
| 370 |
+
elif MatchSpec(dist).match(prefix_record):
|
| 371 |
+
return prefix_record
|
| 372 |
+
else:
|
| 373 |
+
return None
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
def download(url, dst_path, session=None, md5sum=None, urlstxt=False, retries=3,
|
| 377 |
+
sha256=None, size=None):
|
| 378 |
+
return _download(url, dst_path, md5=md5sum, sha256=sha256, size=size)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/history.py
ADDED
|
@@ -0,0 +1,410 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 5 |
+
|
| 6 |
+
from ast import literal_eval
|
| 7 |
+
import codecs
|
| 8 |
+
from errno import EACCES, EPERM, EROFS
|
| 9 |
+
import logging
|
| 10 |
+
from operator import itemgetter
|
| 11 |
+
import os
|
| 12 |
+
from os.path import isdir, isfile, join
|
| 13 |
+
import re
|
| 14 |
+
import sys
|
| 15 |
+
from textwrap import dedent
|
| 16 |
+
import time
|
| 17 |
+
import warnings
|
| 18 |
+
|
| 19 |
+
from . import __version__ as CONDA_VERSION
|
| 20 |
+
from .auxlib.ish import dals
|
| 21 |
+
from ._vendor.toolz import groupby, take
|
| 22 |
+
from .base.constants import DEFAULTS_CHANNEL_NAME
|
| 23 |
+
from .base.context import context
|
| 24 |
+
from .common.compat import ensure_text_type, open
|
| 25 |
+
from .common.path import paths_equal
|
| 26 |
+
from .core.prefix_data import PrefixData
|
| 27 |
+
from .exceptions import CondaHistoryError, NotWritableError
|
| 28 |
+
from .gateways.disk.update import touch
|
| 29 |
+
from .models.dist import dist_str_to_quad
|
| 30 |
+
from .models.version import VersionOrder, version_relation_re
|
| 31 |
+
from .models.match_spec import MatchSpec
|
| 32 |
+
|
| 33 |
+
log = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class CondaHistoryWarning(Warning):
|
| 37 |
+
pass
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def write_head(fo):
|
| 41 |
+
fo.write("==> %s <==\n" % time.strftime('%Y-%m-%d %H:%M:%S'))
|
| 42 |
+
fo.write("# cmd: %s\n" % (' '.join(ensure_text_type(s) for s in sys.argv)))
|
| 43 |
+
fo.write("# conda version: %s\n" % '.'.join(take(3, CONDA_VERSION.split('.'))))
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def is_diff(content):
|
| 47 |
+
return any(s.startswith(('-', '+')) for s in content)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def pretty_diff(diff):
|
| 51 |
+
added = {}
|
| 52 |
+
removed = {}
|
| 53 |
+
for s in diff:
|
| 54 |
+
fn = s[1:]
|
| 55 |
+
name, version, _, channel = dist_str_to_quad(fn)
|
| 56 |
+
if channel != DEFAULTS_CHANNEL_NAME:
|
| 57 |
+
version += ' (%s)' % channel
|
| 58 |
+
if s.startswith('-'):
|
| 59 |
+
removed[name.lower()] = version
|
| 60 |
+
elif s.startswith('+'):
|
| 61 |
+
added[name.lower()] = version
|
| 62 |
+
changed = set(added) & set(removed)
|
| 63 |
+
for name in sorted(changed):
|
| 64 |
+
yield ' %s {%s -> %s}' % (name, removed[name], added[name])
|
| 65 |
+
for name in sorted(set(removed) - changed):
|
| 66 |
+
yield '-%s-%s' % (name, removed[name])
|
| 67 |
+
for name in sorted(set(added) - changed):
|
| 68 |
+
yield '+%s-%s' % (name, added[name])
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def pretty_content(content):
|
| 72 |
+
if is_diff(content):
|
| 73 |
+
return pretty_diff(content)
|
| 74 |
+
else:
|
| 75 |
+
return iter(sorted(content))
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class History(object):
|
| 79 |
+
|
| 80 |
+
com_pat = re.compile(r'#\s*cmd:\s*(.+)')
|
| 81 |
+
spec_pat = re.compile(r'#\s*(\w+)\s*specs:\s*(.+)?')
|
| 82 |
+
conda_v_pat = re.compile(r'#\s*conda version:\s*(.+)')
|
| 83 |
+
|
| 84 |
+
def __init__(self, prefix):
|
| 85 |
+
self.prefix = prefix
|
| 86 |
+
self.meta_dir = join(prefix, 'conda-meta')
|
| 87 |
+
self.path = join(self.meta_dir, 'history')
|
| 88 |
+
|
| 89 |
+
def __enter__(self):
|
| 90 |
+
self.init_log_file()
|
| 91 |
+
return self
|
| 92 |
+
|
| 93 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
| 94 |
+
self.update()
|
| 95 |
+
|
| 96 |
+
def init_log_file(self):
|
| 97 |
+
touch(self.path, True)
|
| 98 |
+
|
| 99 |
+
def file_is_empty(self):
|
| 100 |
+
return os.stat(self.path).st_size == 0
|
| 101 |
+
|
| 102 |
+
def update(self):
|
| 103 |
+
"""
|
| 104 |
+
update the history file (creating a new one if necessary)
|
| 105 |
+
"""
|
| 106 |
+
try:
|
| 107 |
+
try:
|
| 108 |
+
last = set(self.get_state())
|
| 109 |
+
except CondaHistoryError as e:
|
| 110 |
+
warnings.warn("Error in %s: %s" % (self.path, e),
|
| 111 |
+
CondaHistoryWarning)
|
| 112 |
+
return
|
| 113 |
+
pd = PrefixData(self.prefix)
|
| 114 |
+
curr = set(prefix_rec.dist_str() for prefix_rec in pd.iter_records())
|
| 115 |
+
self.write_changes(last, curr)
|
| 116 |
+
except EnvironmentError as e:
|
| 117 |
+
if e.errno in (EACCES, EPERM, EROFS):
|
| 118 |
+
raise NotWritableError(self.path, e.errno)
|
| 119 |
+
else:
|
| 120 |
+
raise
|
| 121 |
+
|
| 122 |
+
def parse(self):
|
| 123 |
+
"""
|
| 124 |
+
parse the history file and return a list of
|
| 125 |
+
tuples(datetime strings, set of distributions/diffs, comments)
|
| 126 |
+
"""
|
| 127 |
+
res = []
|
| 128 |
+
if not isfile(self.path):
|
| 129 |
+
return res
|
| 130 |
+
sep_pat = re.compile(r'==>\s*(.+?)\s*<==')
|
| 131 |
+
with open(self.path) as f:
|
| 132 |
+
lines = f.read().splitlines()
|
| 133 |
+
for line in lines:
|
| 134 |
+
line = line.strip()
|
| 135 |
+
if not line:
|
| 136 |
+
continue
|
| 137 |
+
m = sep_pat.match(line)
|
| 138 |
+
if m:
|
| 139 |
+
res.append((m.group(1), set(), []))
|
| 140 |
+
elif line.startswith('#'):
|
| 141 |
+
res[-1][2].append(line)
|
| 142 |
+
elif len(res) > 0:
|
| 143 |
+
res[-1][1].add(line)
|
| 144 |
+
return res
|
| 145 |
+
|
| 146 |
+
@staticmethod
|
| 147 |
+
def _parse_old_format_specs_string(specs_string):
|
| 148 |
+
"""
|
| 149 |
+
Parse specifications string that use conda<4.5 syntax.
|
| 150 |
+
|
| 151 |
+
Examples
|
| 152 |
+
--------
|
| 153 |
+
- "param >=1.5.1,<2.0'"
|
| 154 |
+
- "python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0"
|
| 155 |
+
"""
|
| 156 |
+
specs = []
|
| 157 |
+
for spec in specs_string.split(','):
|
| 158 |
+
# If the spec starts with a version qualifier, then it actually belongs to the
|
| 159 |
+
# previous spec. But don't try to join if there was no previous spec.
|
| 160 |
+
if version_relation_re.match(spec) and specs:
|
| 161 |
+
specs[-1] = ','.join([specs[-1], spec])
|
| 162 |
+
else:
|
| 163 |
+
specs.append(spec)
|
| 164 |
+
return specs
|
| 165 |
+
|
| 166 |
+
@classmethod
|
| 167 |
+
def _parse_comment_line(cls, line):
|
| 168 |
+
"""
|
| 169 |
+
Parse comment lines in the history file.
|
| 170 |
+
|
| 171 |
+
These lines can be of command type or action type.
|
| 172 |
+
|
| 173 |
+
Examples
|
| 174 |
+
--------
|
| 175 |
+
- "# cmd: /scratch/mc3/bin/conda install -c conda-forge param>=1.5.1,<2.0"
|
| 176 |
+
- "# install specs: python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0"
|
| 177 |
+
"""
|
| 178 |
+
item = {}
|
| 179 |
+
m = cls.com_pat.match(line)
|
| 180 |
+
if m:
|
| 181 |
+
argv = m.group(1).split()
|
| 182 |
+
if argv[0].endswith('conda'):
|
| 183 |
+
argv[0] = 'conda'
|
| 184 |
+
item['cmd'] = argv
|
| 185 |
+
|
| 186 |
+
m = cls.conda_v_pat.match(line)
|
| 187 |
+
if m:
|
| 188 |
+
item['conda_version'] = m.group(1)
|
| 189 |
+
|
| 190 |
+
m = cls.spec_pat.match(line)
|
| 191 |
+
if m:
|
| 192 |
+
action, specs_string = m.groups()
|
| 193 |
+
specs_string = specs_string or ""
|
| 194 |
+
item['action'] = action
|
| 195 |
+
|
| 196 |
+
if specs_string.startswith('['):
|
| 197 |
+
specs = literal_eval(specs_string)
|
| 198 |
+
elif '[' not in specs_string:
|
| 199 |
+
specs = History._parse_old_format_specs_string(specs_string)
|
| 200 |
+
|
| 201 |
+
specs = [spec for spec in specs if spec and not spec.endswith('@')]
|
| 202 |
+
|
| 203 |
+
if specs and action in ('update', 'install', 'create'):
|
| 204 |
+
item['update_specs'] = item['specs'] = specs
|
| 205 |
+
elif specs and action in ('remove', 'uninstall'):
|
| 206 |
+
item['remove_specs'] = item['specs'] = specs
|
| 207 |
+
elif specs and action in ('neutered', ):
|
| 208 |
+
item['neutered_specs'] = item['specs'] = specs
|
| 209 |
+
|
| 210 |
+
return item
|
| 211 |
+
|
| 212 |
+
def get_user_requests(self):
|
| 213 |
+
"""
|
| 214 |
+
return a list of user requested items. Each item is a dict with the
|
| 215 |
+
following keys:
|
| 216 |
+
'date': the date and time running the command
|
| 217 |
+
'cmd': a list of argv of the actual command which was run
|
| 218 |
+
'action': install/remove/update
|
| 219 |
+
'specs': the specs being used
|
| 220 |
+
"""
|
| 221 |
+
res = []
|
| 222 |
+
for dt, unused_cont, comments in self.parse():
|
| 223 |
+
item = {'date': dt}
|
| 224 |
+
for line in comments:
|
| 225 |
+
comment_items = self._parse_comment_line(line)
|
| 226 |
+
item.update(comment_items)
|
| 227 |
+
|
| 228 |
+
if 'cmd' in item:
|
| 229 |
+
res.append(item)
|
| 230 |
+
|
| 231 |
+
dists = groupby(itemgetter(0), unused_cont)
|
| 232 |
+
item['unlink_dists'] = dists.get('-', ())
|
| 233 |
+
item['link_dists'] = dists.get('+', ())
|
| 234 |
+
|
| 235 |
+
conda_versions_from_history = tuple(x['conda_version'] for x in res
|
| 236 |
+
if 'conda_version' in x)
|
| 237 |
+
if conda_versions_from_history and not context.allow_conda_downgrades:
|
| 238 |
+
minimum_conda_version = sorted(conda_versions_from_history, key=VersionOrder)[-1]
|
| 239 |
+
minimum_major_minor = '.'.join(take(2, minimum_conda_version.split('.')))
|
| 240 |
+
current_major_minor = '.'.join(take(2, CONDA_VERSION.split('.')))
|
| 241 |
+
if VersionOrder(current_major_minor) < VersionOrder(minimum_major_minor):
|
| 242 |
+
message = dals("""
|
| 243 |
+
This environment has previously been operated on by a conda version that's newer
|
| 244 |
+
than the conda currently being used. A newer version of conda is required.
|
| 245 |
+
target environment location: %(target_prefix)s
|
| 246 |
+
current conda version: %(conda_version)s
|
| 247 |
+
minimum conda version: %(minimum_version)s
|
| 248 |
+
""") % {
|
| 249 |
+
"target_prefix": self.prefix,
|
| 250 |
+
"conda_version": CONDA_VERSION,
|
| 251 |
+
"minimum_version": minimum_major_minor,
|
| 252 |
+
}
|
| 253 |
+
if not paths_equal(self.prefix, context.root_prefix):
|
| 254 |
+
message += dedent("""
|
| 255 |
+
Update conda and try again.
|
| 256 |
+
$ conda install -p "%(base_prefix)s" "conda>=%(minimum_version)s"
|
| 257 |
+
""") % {
|
| 258 |
+
"base_prefix": context.root_prefix,
|
| 259 |
+
"minimum_version": minimum_major_minor,
|
| 260 |
+
}
|
| 261 |
+
message += dedent("""
|
| 262 |
+
To work around this restriction, one can also set the config parameter
|
| 263 |
+
'allow_conda_downgrades' to False at their own risk.
|
| 264 |
+
""")
|
| 265 |
+
|
| 266 |
+
# TODO: we need to rethink this. It's fine as a warning to try to get users
|
| 267 |
+
# to avoid breaking their system. However, right now it is preventing
|
| 268 |
+
# normal conda operation after downgrading conda.
|
| 269 |
+
# raise CondaUpgradeError(message)
|
| 270 |
+
|
| 271 |
+
return res
|
| 272 |
+
|
| 273 |
+
def get_requested_specs_map(self):
|
| 274 |
+
# keys are package names and values are specs
|
| 275 |
+
spec_map = {}
|
| 276 |
+
for request in self.get_user_requests():
|
| 277 |
+
remove_specs = (MatchSpec(spec) for spec in request.get('remove_specs', ()))
|
| 278 |
+
for spec in remove_specs:
|
| 279 |
+
spec_map.pop(spec.name, None)
|
| 280 |
+
update_specs = (MatchSpec(spec) for spec in request.get('update_specs', ()))
|
| 281 |
+
spec_map.update(((s.name, s) for s in update_specs))
|
| 282 |
+
# here is where the neutering takes effect, overriding past values
|
| 283 |
+
neutered_specs = (MatchSpec(spec) for spec in request.get('neutered_specs', ()))
|
| 284 |
+
spec_map.update(((s.name, s) for s in neutered_specs))
|
| 285 |
+
|
| 286 |
+
# Conda hasn't always been good about recording when specs have been removed from
|
| 287 |
+
# environments. If the package isn't installed in the current environment, then we
|
| 288 |
+
# shouldn't try to force it here.
|
| 289 |
+
prefix_recs = set(_.name for _ in PrefixData(self.prefix).iter_records())
|
| 290 |
+
return dict((name, spec) for name, spec in spec_map.items() if name in prefix_recs)
|
| 291 |
+
|
| 292 |
+
def construct_states(self):
|
| 293 |
+
"""
|
| 294 |
+
return a list of tuples(datetime strings, set of distributions)
|
| 295 |
+
"""
|
| 296 |
+
res = []
|
| 297 |
+
cur = set([])
|
| 298 |
+
for dt, cont, unused_com in self.parse():
|
| 299 |
+
if not is_diff(cont):
|
| 300 |
+
cur = cont
|
| 301 |
+
else:
|
| 302 |
+
for s in cont:
|
| 303 |
+
if s.startswith('-'):
|
| 304 |
+
cur.discard(s[1:])
|
| 305 |
+
elif s.startswith('+'):
|
| 306 |
+
cur.add(s[1:])
|
| 307 |
+
else:
|
| 308 |
+
raise CondaHistoryError('Did not expect: %s' % s)
|
| 309 |
+
res.append((dt, cur.copy()))
|
| 310 |
+
return res
|
| 311 |
+
|
| 312 |
+
def get_state(self, rev=-1):
|
| 313 |
+
"""
|
| 314 |
+
return the state, i.e. the set of distributions, for a given revision,
|
| 315 |
+
defaults to latest (which is the same as the current state when
|
| 316 |
+
the log file is up-to-date)
|
| 317 |
+
|
| 318 |
+
Returns a list of dist_strs
|
| 319 |
+
"""
|
| 320 |
+
states = self.construct_states()
|
| 321 |
+
if not states:
|
| 322 |
+
return set([])
|
| 323 |
+
times, pkgs = zip(*states)
|
| 324 |
+
return pkgs[rev]
|
| 325 |
+
|
| 326 |
+
def print_log(self):
|
| 327 |
+
for i, (date, content, unused_com) in enumerate(self.parse()):
|
| 328 |
+
print('%s (rev %d)' % (date, i))
|
| 329 |
+
for line in pretty_content(content):
|
| 330 |
+
print(' %s' % line)
|
| 331 |
+
print('')
|
| 332 |
+
|
| 333 |
+
def object_log(self):
|
| 334 |
+
result = []
|
| 335 |
+
for i, (date, content, unused_com) in enumerate(self.parse()):
|
| 336 |
+
# Based on Mateusz's code; provides more details about the
|
| 337 |
+
# history event
|
| 338 |
+
event = {
|
| 339 |
+
'date': date,
|
| 340 |
+
'rev': i,
|
| 341 |
+
'install': [],
|
| 342 |
+
'remove': [],
|
| 343 |
+
'upgrade': [],
|
| 344 |
+
'downgrade': []
|
| 345 |
+
}
|
| 346 |
+
added = {}
|
| 347 |
+
removed = {}
|
| 348 |
+
if is_diff(content):
|
| 349 |
+
for pkg in content:
|
| 350 |
+
name, version, build, channel = dist_str_to_quad(pkg[1:])
|
| 351 |
+
if pkg.startswith('+'):
|
| 352 |
+
added[name.lower()] = (version, build, channel)
|
| 353 |
+
elif pkg.startswith('-'):
|
| 354 |
+
removed[name.lower()] = (version, build, channel)
|
| 355 |
+
|
| 356 |
+
changed = set(added) & set(removed)
|
| 357 |
+
for name in sorted(changed):
|
| 358 |
+
old = removed[name]
|
| 359 |
+
new = added[name]
|
| 360 |
+
details = {
|
| 361 |
+
'old': '-'.join((name,) + old),
|
| 362 |
+
'new': '-'.join((name,) + new)
|
| 363 |
+
}
|
| 364 |
+
|
| 365 |
+
if new > old:
|
| 366 |
+
event['upgrade'].append(details)
|
| 367 |
+
else:
|
| 368 |
+
event['downgrade'].append(details)
|
| 369 |
+
|
| 370 |
+
for name in sorted(set(removed) - changed):
|
| 371 |
+
event['remove'].append('-'.join((name,) + removed[name]))
|
| 372 |
+
|
| 373 |
+
for name in sorted(set(added) - changed):
|
| 374 |
+
event['install'].append('-'.join((name,) + added[name]))
|
| 375 |
+
else:
|
| 376 |
+
for pkg in sorted(content):
|
| 377 |
+
event['install'].append(pkg)
|
| 378 |
+
result.append(event)
|
| 379 |
+
return result
|
| 380 |
+
|
| 381 |
+
def write_changes(self, last_state, current_state):
|
| 382 |
+
if not isdir(self.meta_dir):
|
| 383 |
+
os.makedirs(self.meta_dir)
|
| 384 |
+
with codecs.open(self.path, mode='ab', encoding='utf-8') as fo:
|
| 385 |
+
write_head(fo)
|
| 386 |
+
for fn in sorted(last_state - current_state):
|
| 387 |
+
fo.write('-%s\n' % fn)
|
| 388 |
+
for fn in sorted(current_state - last_state):
|
| 389 |
+
fo.write('+%s\n' % fn)
|
| 390 |
+
|
| 391 |
+
def write_specs(self, remove_specs=(), update_specs=(), neutered_specs=()):
|
| 392 |
+
remove_specs = [str(MatchSpec(s)) for s in remove_specs]
|
| 393 |
+
update_specs = [str(MatchSpec(s)) for s in update_specs]
|
| 394 |
+
neutered_specs = [str(MatchSpec(s)) for s in neutered_specs]
|
| 395 |
+
if any((update_specs, remove_specs, neutered_specs)):
|
| 396 |
+
with codecs.open(self.path, mode='ab', encoding='utf-8') as fh:
|
| 397 |
+
if remove_specs:
|
| 398 |
+
fh.write("# remove specs: %s\n" % remove_specs)
|
| 399 |
+
if update_specs:
|
| 400 |
+
fh.write("# update specs: %s\n" % update_specs)
|
| 401 |
+
if neutered_specs:
|
| 402 |
+
fh.write("# neutered specs: %s\n" % neutered_specs)
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
if __name__ == '__main__':
|
| 406 |
+
from pprint import pprint
|
| 407 |
+
# Don't use in context manager mode---it augments the history every time
|
| 408 |
+
h = History(sys.prefix)
|
| 409 |
+
pprint(h.get_user_requests())
|
| 410 |
+
print(h.get_requested_specs_map())
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/lock.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
"""
|
| 5 |
+
Tools for working with locks
|
| 6 |
+
|
| 7 |
+
A lock is just an empty directory. We use directories because this lets us use
|
| 8 |
+
the race condition-proof os.makedirs.
|
| 9 |
+
|
| 10 |
+
For now, there is one global lock for all of conda, because some things happen
|
| 11 |
+
globally (such as downloading packages).
|
| 12 |
+
|
| 13 |
+
We don't raise an error if the lock is named with the current PID
|
| 14 |
+
"""
|
| 15 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 16 |
+
|
| 17 |
+
from glob import glob
|
| 18 |
+
import logging
|
| 19 |
+
import os
|
| 20 |
+
from os.path import abspath, basename, dirname, isdir, join
|
| 21 |
+
import time
|
| 22 |
+
import warnings
|
| 23 |
+
|
| 24 |
+
from .exceptions import LockError
|
| 25 |
+
|
| 26 |
+
warnings.warn(
|
| 27 |
+
"The `conda.lock` module is pending deprecation and will be removed in a future release. "
|
| 28 |
+
"Please use `filelock` instead.",
|
| 29 |
+
PendingDeprecationWarning,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
LOCK_EXTENSION = 'conda_lock'
|
| 33 |
+
|
| 34 |
+
# Keep the string "LOCKERROR" in this string so that external
|
| 35 |
+
# programs can look for it.
|
| 36 |
+
LOCKSTR = """
|
| 37 |
+
LOCKERROR: It looks like conda is already doing something.
|
| 38 |
+
The lock {0} was found. Wait for it to finish before continuing.
|
| 39 |
+
If you are sure that conda is not running, remove it and try again.
|
| 40 |
+
You can also use: $ conda clean --lock
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
log = logging.getLogger(__name__)
|
| 44 |
+
stdoutlog = logging.getLogger('conda.stdoutlog')
|
| 45 |
+
|
| 46 |
+
def touch(file_name, times=None):
|
| 47 |
+
""" Touch function like touch in Unix shell
|
| 48 |
+
:param file_name: the name of file
|
| 49 |
+
:param times: the access and modified time
|
| 50 |
+
Examples:
|
| 51 |
+
touch("hello_world.py")
|
| 52 |
+
"""
|
| 53 |
+
try:
|
| 54 |
+
with open(file_name, 'a'):
|
| 55 |
+
os.utime(file_name, times)
|
| 56 |
+
except (OSError, IOError) as e:
|
| 57 |
+
log.warn("Failed to create lock, do not run conda in parallel processes [errno %d]",
|
| 58 |
+
e.errno)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class FileLock(object):
|
| 62 |
+
"""Lock a path (file or directory) with the lock file sitting *beside* path.
|
| 63 |
+
|
| 64 |
+
:param path_to_lock: the path to be locked
|
| 65 |
+
:param retries: max number of retries
|
| 66 |
+
"""
|
| 67 |
+
def __init__(self, path_to_lock, retries=10):
|
| 68 |
+
"""
|
| 69 |
+
"""
|
| 70 |
+
self.path_to_lock = abspath(path_to_lock)
|
| 71 |
+
self.retries = retries
|
| 72 |
+
self.lock_file_path = "%s.pid{0}.%s" % (self.path_to_lock, LOCK_EXTENSION)
|
| 73 |
+
# e.g. if locking path `/conda`, lock file will be `/conda.pidXXXX.conda_lock`
|
| 74 |
+
self.lock_file_glob_str = "%s.pid*.%s" % (self.path_to_lock, LOCK_EXTENSION)
|
| 75 |
+
assert isdir(dirname(self.path_to_lock)), "{0} doesn't exist".format(self.path_to_lock)
|
| 76 |
+
assert "::" not in self.path_to_lock, self.path_to_lock
|
| 77 |
+
|
| 78 |
+
def __enter__(self):
|
| 79 |
+
sleep_time = 1
|
| 80 |
+
self.lock_file_path = self.lock_file_path.format(os.getpid())
|
| 81 |
+
last_glob_match = None
|
| 82 |
+
|
| 83 |
+
for _ in range(self.retries + 1):
|
| 84 |
+
|
| 85 |
+
# search, whether there is process already locked on this file
|
| 86 |
+
glob_result = glob(self.lock_file_glob_str)
|
| 87 |
+
if glob_result:
|
| 88 |
+
log.debug(LOCKSTR.format(glob_result))
|
| 89 |
+
log.debug("Sleeping for %s seconds", sleep_time)
|
| 90 |
+
|
| 91 |
+
time.sleep(sleep_time / 10)
|
| 92 |
+
sleep_time *= 2
|
| 93 |
+
last_glob_match = glob_result
|
| 94 |
+
else:
|
| 95 |
+
touch(self.lock_file_path)
|
| 96 |
+
return self
|
| 97 |
+
|
| 98 |
+
stdoutlog.error("Exceeded max retries, giving up")
|
| 99 |
+
raise LockError(LOCKSTR.format(last_glob_match))
|
| 100 |
+
|
| 101 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
| 102 |
+
from .gateways.disk.delete import rm_rf
|
| 103 |
+
rm_rf(self.lock_file_path)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
# lgtm alert ignore because this lock functionality is unused and will soon be replaced
|
| 107 |
+
class DirectoryLock(FileLock): # lgtm [py/missing-call-to-init]
|
| 108 |
+
"""Lock a directory with the lock file sitting *within* the directory being locked.
|
| 109 |
+
|
| 110 |
+
Useful when, for example, locking the root prefix at ``/conda``, and ``/`` is not writable.
|
| 111 |
+
|
| 112 |
+
:param directory_path: the path to be locked
|
| 113 |
+
:param retries: max number of retries
|
| 114 |
+
"""
|
| 115 |
+
|
| 116 |
+
def __init__(self, directory_path, retries=10):
|
| 117 |
+
self.directory_path = abspath(directory_path)
|
| 118 |
+
directory_name = basename(self.directory_path)
|
| 119 |
+
self.retries = retries
|
| 120 |
+
lock_path_pre = join(self.directory_path, directory_name)
|
| 121 |
+
self.lock_file_path = "%s.pid{0}.%s" % (lock_path_pre, LOCK_EXTENSION)
|
| 122 |
+
# e.g. if locking directory `/conda`, lock file will be `/conda/conda.pidXXXX.conda_lock`
|
| 123 |
+
self.lock_file_glob_str = "%s.pid*.%s" % (lock_path_pre, LOCK_EXTENSION)
|
| 124 |
+
# make sure '/' exists
|
| 125 |
+
assert isdir(dirname(self.directory_path)), "{0} doesn't exist".format(self.directory_path)
|
| 126 |
+
if not isdir(self.directory_path):
|
| 127 |
+
try:
|
| 128 |
+
os.makedirs(self.directory_path)
|
| 129 |
+
log.debug("forced to create %s", self.directory_path)
|
| 130 |
+
except (OSError, IOError) as e:
|
| 131 |
+
log.warn("Failed to create directory %s [errno %d]", self.directory_path, e.errno)
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
Locked = DirectoryLock
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/plan.py
ADDED
|
@@ -0,0 +1,563 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
"""
|
| 5 |
+
Handle the planning of installs and their execution.
|
| 6 |
+
|
| 7 |
+
NOTE:
|
| 8 |
+
conda.install uses canonical package names in its interface functions,
|
| 9 |
+
whereas conda.resolve uses package filenames, as those are used as index
|
| 10 |
+
keys. We try to keep fixes to this "impedance mismatch" local to this
|
| 11 |
+
module.
|
| 12 |
+
"""
|
| 13 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 14 |
+
|
| 15 |
+
from collections import defaultdict
|
| 16 |
+
from logging import getLogger
|
| 17 |
+
import sys
|
| 18 |
+
|
| 19 |
+
from ._vendor.boltons.setutils import IndexedSet
|
| 20 |
+
from ._vendor.toolz import concatv
|
| 21 |
+
from .base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL
|
| 22 |
+
from .base.context import context, stack_context_default
|
| 23 |
+
from .common.io import dashlist, env_vars, time_recorder
|
| 24 |
+
from .core.index import LAST_CHANNEL_URLS, _supplement_index_with_prefix
|
| 25 |
+
from .core.link import PrefixSetup, UnlinkLinkTransaction
|
| 26 |
+
from .core.solve import diff_for_unlink_link_precs
|
| 27 |
+
from .exceptions import CondaIndexError, PackagesNotFoundError
|
| 28 |
+
from .history import History
|
| 29 |
+
from .instructions import (FETCH, LINK, SYMLINK_CONDA, UNLINK)
|
| 30 |
+
from .models.channel import Channel, prioritize_channels
|
| 31 |
+
from .models.dist import Dist
|
| 32 |
+
from .models.enums import LinkType
|
| 33 |
+
from .models.match_spec import ChannelMatch
|
| 34 |
+
from .models.prefix_graph import PrefixGraph
|
| 35 |
+
from .models.records import PackageRecord
|
| 36 |
+
from .models.version import normalized_version
|
| 37 |
+
from .resolve import MatchSpec
|
| 38 |
+
from .utils import human_bytes
|
| 39 |
+
|
| 40 |
+
log = getLogger(__name__)
|
| 41 |
+
|
| 42 |
+
# TODO: Remove conda/plan.py. This module should be almost completely deprecated now.
|
| 43 |
+
|
| 44 |
+
def print_dists(dists_extras):
|
| 45 |
+
fmt = " %-27s|%17s"
|
| 46 |
+
print(fmt % ('package', 'build'))
|
| 47 |
+
print(fmt % ('-' * 27, '-' * 17))
|
| 48 |
+
for prec, extra in dists_extras:
|
| 49 |
+
line = fmt % (prec.name + '-' + prec.version, prec.build)
|
| 50 |
+
if extra:
|
| 51 |
+
line += extra
|
| 52 |
+
print(line)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def display_actions(actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=()):
|
| 56 |
+
prefix = actions.get("PREFIX")
|
| 57 |
+
builder = ['', '## Package Plan ##\n']
|
| 58 |
+
if prefix:
|
| 59 |
+
builder.append(' environment location: %s' % prefix)
|
| 60 |
+
builder.append('')
|
| 61 |
+
if specs_to_remove:
|
| 62 |
+
builder.append(' removed specs: %s'
|
| 63 |
+
% dashlist(sorted(str(s) for s in specs_to_remove), indent=4))
|
| 64 |
+
builder.append('')
|
| 65 |
+
if specs_to_add:
|
| 66 |
+
builder.append(' added / updated specs: %s'
|
| 67 |
+
% dashlist(sorted(str(s) for s in specs_to_add), indent=4))
|
| 68 |
+
builder.append('')
|
| 69 |
+
print('\n'.join(builder))
|
| 70 |
+
|
| 71 |
+
if show_channel_urls is None:
|
| 72 |
+
show_channel_urls = context.show_channel_urls
|
| 73 |
+
|
| 74 |
+
def channel_str(rec):
|
| 75 |
+
if rec.get('schannel'):
|
| 76 |
+
return rec['schannel']
|
| 77 |
+
if rec.get('url'):
|
| 78 |
+
return Channel(rec['url']).canonical_name
|
| 79 |
+
if rec.get('channel'):
|
| 80 |
+
return Channel(rec['channel']).canonical_name
|
| 81 |
+
return UNKNOWN_CHANNEL
|
| 82 |
+
|
| 83 |
+
def channel_filt(s):
|
| 84 |
+
if show_channel_urls is False:
|
| 85 |
+
return ''
|
| 86 |
+
if show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME:
|
| 87 |
+
return ''
|
| 88 |
+
return s
|
| 89 |
+
|
| 90 |
+
if actions.get(FETCH):
|
| 91 |
+
print("\nThe following packages will be downloaded:\n")
|
| 92 |
+
|
| 93 |
+
disp_lst = []
|
| 94 |
+
for prec in actions[FETCH]:
|
| 95 |
+
assert isinstance(prec, PackageRecord)
|
| 96 |
+
extra = '%15s' % human_bytes(prec['size'])
|
| 97 |
+
schannel = channel_filt(prec.channel.canonical_name)
|
| 98 |
+
if schannel:
|
| 99 |
+
extra += ' ' + schannel
|
| 100 |
+
disp_lst.append((prec, extra))
|
| 101 |
+
print_dists(disp_lst)
|
| 102 |
+
|
| 103 |
+
if index and len(actions[FETCH]) > 1:
|
| 104 |
+
num_bytes = sum(prec['size'] for prec in actions[FETCH])
|
| 105 |
+
print(' ' * 4 + '-' * 60)
|
| 106 |
+
print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))
|
| 107 |
+
|
| 108 |
+
# package -> [oldver-oldbuild, newver-newbuild]
|
| 109 |
+
packages = defaultdict(lambda: list(('', '')))
|
| 110 |
+
features = defaultdict(lambda: list(('', '')))
|
| 111 |
+
channels = defaultdict(lambda: list(('', '')))
|
| 112 |
+
records = defaultdict(lambda: list((None, None)))
|
| 113 |
+
linktypes = {}
|
| 114 |
+
|
| 115 |
+
for prec in actions.get(LINK, []):
|
| 116 |
+
assert isinstance(prec, PackageRecord)
|
| 117 |
+
pkg = prec['name']
|
| 118 |
+
channels[pkg][1] = channel_str(prec)
|
| 119 |
+
packages[pkg][1] = prec['version'] + '-' + prec['build']
|
| 120 |
+
records[pkg][1] = prec
|
| 121 |
+
linktypes[pkg] = LinkType.hardlink # TODO: this is a lie; may have to give this report after UnlinkLinkTransaction.verify() # NOQA
|
| 122 |
+
features[pkg][1] = ','.join(prec.get('features') or ())
|
| 123 |
+
for prec in actions.get(UNLINK, []):
|
| 124 |
+
assert isinstance(prec, PackageRecord)
|
| 125 |
+
pkg = prec['name']
|
| 126 |
+
channels[pkg][0] = channel_str(prec)
|
| 127 |
+
packages[pkg][0] = prec['version'] + '-' + prec['build']
|
| 128 |
+
records[pkg][0] = prec
|
| 129 |
+
features[pkg][0] = ','.join(prec.get('features') or ())
|
| 130 |
+
|
| 131 |
+
new = {p for p in packages if not packages[p][0]}
|
| 132 |
+
removed = {p for p in packages if not packages[p][1]}
|
| 133 |
+
# New packages are actually listed in the left-hand column,
|
| 134 |
+
# so let's move them over there
|
| 135 |
+
for pkg in new:
|
| 136 |
+
for var in (packages, features, channels, records):
|
| 137 |
+
var[pkg] = var[pkg][::-1]
|
| 138 |
+
|
| 139 |
+
updated = set()
|
| 140 |
+
downgraded = set()
|
| 141 |
+
channeled = set()
|
| 142 |
+
oldfmt = {}
|
| 143 |
+
newfmt = {}
|
| 144 |
+
empty = True
|
| 145 |
+
if packages:
|
| 146 |
+
empty = False
|
| 147 |
+
maxpkg = max(len(p) for p in packages) + 1
|
| 148 |
+
maxoldver = max(len(p[0]) for p in packages.values())
|
| 149 |
+
maxnewver = max(len(p[1]) for p in packages.values())
|
| 150 |
+
maxoldfeatures = max(len(p[0]) for p in features.values())
|
| 151 |
+
maxnewfeatures = max(len(p[1]) for p in features.values())
|
| 152 |
+
maxoldchannels = max(len(channel_filt(p[0])) for p in channels.values())
|
| 153 |
+
maxnewchannels = max(len(channel_filt(p[1])) for p in channels.values())
|
| 154 |
+
for pkg in packages:
|
| 155 |
+
# That's right. I'm using old-style string formatting to generate a
|
| 156 |
+
# string with new-style string formatting.
|
| 157 |
+
oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
|
| 158 |
+
if maxoldchannels:
|
| 159 |
+
oldfmt[pkg] += ' {channels[0]:<%s}' % maxoldchannels
|
| 160 |
+
if features[pkg][0]:
|
| 161 |
+
oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
|
| 162 |
+
|
| 163 |
+
lt = LinkType(linktypes.get(pkg, LinkType.hardlink))
|
| 164 |
+
lt = '' if lt == LinkType.hardlink else (' (%s)' % lt)
|
| 165 |
+
if pkg in removed or pkg in new:
|
| 166 |
+
oldfmt[pkg] += lt
|
| 167 |
+
continue
|
| 168 |
+
|
| 169 |
+
newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
|
| 170 |
+
if maxnewchannels:
|
| 171 |
+
newfmt[pkg] += ' {channels[1]:<%s}' % maxnewchannels
|
| 172 |
+
if features[pkg][1]:
|
| 173 |
+
newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
|
| 174 |
+
newfmt[pkg] += lt
|
| 175 |
+
|
| 176 |
+
P0 = records[pkg][0]
|
| 177 |
+
P1 = records[pkg][1]
|
| 178 |
+
pri0 = P0.get('priority')
|
| 179 |
+
pri1 = P1.get('priority')
|
| 180 |
+
if pri0 is None or pri1 is None:
|
| 181 |
+
pri0 = pri1 = 1
|
| 182 |
+
try:
|
| 183 |
+
if str(P1.version) == 'custom':
|
| 184 |
+
newver = str(P0.version) != 'custom'
|
| 185 |
+
oldver = not newver
|
| 186 |
+
else:
|
| 187 |
+
# <= here means that unchanged packages will be put in updated
|
| 188 |
+
N0 = normalized_version(P0.version)
|
| 189 |
+
N1 = normalized_version(P1.version)
|
| 190 |
+
newver = N0 < N1
|
| 191 |
+
oldver = N0 > N1
|
| 192 |
+
except TypeError:
|
| 193 |
+
newver = P0.version < P1.version
|
| 194 |
+
oldver = P0.version > P1.version
|
| 195 |
+
oldbld = P0.build_number > P1.build_number
|
| 196 |
+
newbld = P0.build_number < P1.build_number
|
| 197 |
+
if context.channel_priority and pri1 < pri0 and (oldver or not newver and not newbld):
|
| 198 |
+
channeled.add(pkg)
|
| 199 |
+
elif newver:
|
| 200 |
+
updated.add(pkg)
|
| 201 |
+
elif pri1 < pri0 and (oldver or not newver and oldbld):
|
| 202 |
+
channeled.add(pkg)
|
| 203 |
+
elif oldver:
|
| 204 |
+
downgraded.add(pkg)
|
| 205 |
+
elif not oldbld:
|
| 206 |
+
updated.add(pkg)
|
| 207 |
+
else:
|
| 208 |
+
downgraded.add(pkg)
|
| 209 |
+
|
| 210 |
+
arrow = ' --> '
|
| 211 |
+
lead = ' ' * 4
|
| 212 |
+
|
| 213 |
+
def format(s, pkg):
|
| 214 |
+
chans = [channel_filt(c) for c in channels[pkg]]
|
| 215 |
+
return lead + s.format(pkg=pkg + ':', vers=packages[pkg],
|
| 216 |
+
channels=chans, features=features[pkg])
|
| 217 |
+
|
| 218 |
+
if new:
|
| 219 |
+
print("\nThe following NEW packages will be INSTALLED:\n")
|
| 220 |
+
for pkg in sorted(new):
|
| 221 |
+
# New packages have been moved to the "old" column for display
|
| 222 |
+
print(format(oldfmt[pkg], pkg))
|
| 223 |
+
|
| 224 |
+
if removed:
|
| 225 |
+
print("\nThe following packages will be REMOVED:\n")
|
| 226 |
+
for pkg in sorted(removed):
|
| 227 |
+
print(format(oldfmt[pkg], pkg))
|
| 228 |
+
|
| 229 |
+
if updated:
|
| 230 |
+
print("\nThe following packages will be UPDATED:\n")
|
| 231 |
+
for pkg in sorted(updated):
|
| 232 |
+
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
|
| 233 |
+
|
| 234 |
+
if channeled:
|
| 235 |
+
print("\nThe following packages will be SUPERSEDED by a higher-priority channel:\n")
|
| 236 |
+
for pkg in sorted(channeled):
|
| 237 |
+
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
|
| 238 |
+
|
| 239 |
+
if downgraded:
|
| 240 |
+
print("\nThe following packages will be DOWNGRADED:\n")
|
| 241 |
+
for pkg in sorted(downgraded):
|
| 242 |
+
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
|
| 243 |
+
|
| 244 |
+
if empty and actions.get(SYMLINK_CONDA):
|
| 245 |
+
print("\nThe following empty environments will be CREATED:\n")
|
| 246 |
+
print(actions['PREFIX'])
|
| 247 |
+
|
| 248 |
+
print('')
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def add_unlink(actions, dist):
|
| 252 |
+
assert isinstance(dist, Dist)
|
| 253 |
+
if UNLINK not in actions:
|
| 254 |
+
actions[UNLINK] = []
|
| 255 |
+
actions[UNLINK].append(dist)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
# -------------------------------------------------------------------
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def add_defaults_to_specs(r, linked, specs, update=False, prefix=None):
|
| 262 |
+
return
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
def _get_best_prec_match(precs):
|
| 266 |
+
assert precs
|
| 267 |
+
for chn in context.channels:
|
| 268 |
+
channel_matcher = ChannelMatch(chn)
|
| 269 |
+
prec_matches = tuple(prec for prec in precs if channel_matcher.match(prec.channel.name))
|
| 270 |
+
if prec_matches:
|
| 271 |
+
break
|
| 272 |
+
else:
|
| 273 |
+
prec_matches = precs
|
| 274 |
+
log.warn("Multiple packages found:%s", dashlist(prec_matches))
|
| 275 |
+
return prec_matches[0]
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def revert_actions(prefix, revision=-1, index=None):
|
| 279 |
+
# TODO: If revision raise a revision error, should always go back to a safe revision
|
| 280 |
+
h = History(prefix)
|
| 281 |
+
# TODO: need a History method to get user-requested specs for revision number
|
| 282 |
+
# Doing a revert right now messes up user-requested spec history.
|
| 283 |
+
# Either need to wipe out history after ``revision``, or add the correct
|
| 284 |
+
# history information to the new entry about to be created.
|
| 285 |
+
# TODO: This is wrong!!!!!!!!!!
|
| 286 |
+
user_requested_specs = h.get_requested_specs_map().values()
|
| 287 |
+
try:
|
| 288 |
+
target_state = {MatchSpec.from_dist_str(dist_str) for dist_str in h.get_state(revision)}
|
| 289 |
+
except IndexError:
|
| 290 |
+
raise CondaIndexError("no such revision: %d" % revision)
|
| 291 |
+
|
| 292 |
+
_supplement_index_with_prefix(index, prefix)
|
| 293 |
+
|
| 294 |
+
not_found_in_index_specs = set()
|
| 295 |
+
link_precs = set()
|
| 296 |
+
for spec in target_state:
|
| 297 |
+
precs = tuple(prec for prec in index.values() if spec.match(prec))
|
| 298 |
+
if not precs:
|
| 299 |
+
not_found_in_index_specs.add(spec)
|
| 300 |
+
elif len(precs) > 1:
|
| 301 |
+
link_precs.add(_get_best_prec_match(precs))
|
| 302 |
+
else:
|
| 303 |
+
link_precs.add(precs[0])
|
| 304 |
+
|
| 305 |
+
if not_found_in_index_specs:
|
| 306 |
+
raise PackagesNotFoundError(not_found_in_index_specs)
|
| 307 |
+
|
| 308 |
+
final_precs = IndexedSet(PrefixGraph(link_precs).graph) # toposort
|
| 309 |
+
unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs)
|
| 310 |
+
stp = PrefixSetup(prefix, unlink_precs, link_precs, (), user_requested_specs, ())
|
| 311 |
+
txn = UnlinkLinkTransaction(stp)
|
| 312 |
+
return txn
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
# ---------------------------- Backwards compat for conda-build --------------------------
|
| 316 |
+
|
| 317 |
+
@time_recorder("execute_actions")
|
| 318 |
+
def execute_actions(actions, index, verbose=False): # pragma: no cover
|
| 319 |
+
plan = _plan_from_actions(actions, index)
|
| 320 |
+
execute_instructions(plan, index, verbose)
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def _plan_from_actions(actions, index): # pragma: no cover
|
| 324 |
+
from .instructions import ACTION_CODES, PREFIX, PRINT, PROGRESS, PROGRESS_COMMANDS
|
| 325 |
+
|
| 326 |
+
if 'op_order' in actions and actions['op_order']:
|
| 327 |
+
op_order = actions['op_order']
|
| 328 |
+
else:
|
| 329 |
+
op_order = ACTION_CODES
|
| 330 |
+
|
| 331 |
+
assert PREFIX in actions and actions[PREFIX]
|
| 332 |
+
prefix = actions[PREFIX]
|
| 333 |
+
plan = [('PREFIX', '%s' % prefix)]
|
| 334 |
+
|
| 335 |
+
unlink_link_transaction = actions.get('UNLINKLINKTRANSACTION')
|
| 336 |
+
if unlink_link_transaction:
|
| 337 |
+
raise RuntimeError()
|
| 338 |
+
# progressive_fetch_extract = actions.get('PROGRESSIVEFETCHEXTRACT')
|
| 339 |
+
# if progressive_fetch_extract:
|
| 340 |
+
# plan.append((PROGRESSIVEFETCHEXTRACT, progressive_fetch_extract))
|
| 341 |
+
# plan.append((UNLINKLINKTRANSACTION, unlink_link_transaction))
|
| 342 |
+
# return plan
|
| 343 |
+
|
| 344 |
+
axn = actions.get('ACTION') or None
|
| 345 |
+
specs = actions.get('SPECS', [])
|
| 346 |
+
|
| 347 |
+
log.debug("Adding plans for operations: {0}".format(op_order))
|
| 348 |
+
for op in op_order:
|
| 349 |
+
if op not in actions:
|
| 350 |
+
log.trace("action {0} not in actions".format(op))
|
| 351 |
+
continue
|
| 352 |
+
if not actions[op]:
|
| 353 |
+
log.trace("action {0} has None value".format(op))
|
| 354 |
+
continue
|
| 355 |
+
if '_' not in op:
|
| 356 |
+
plan.append((PRINT, '%sing packages ...' % op.capitalize()))
|
| 357 |
+
elif op.startswith('RM_'):
|
| 358 |
+
plan.append((PRINT, 'Pruning %s packages from the cache ...' % op[3:].lower()))
|
| 359 |
+
if op in PROGRESS_COMMANDS:
|
| 360 |
+
plan.append((PROGRESS, '%d' % len(actions[op])))
|
| 361 |
+
for arg in actions[op]:
|
| 362 |
+
log.debug("appending value {0} for action {1}".format(arg, op))
|
| 363 |
+
plan.append((op, arg))
|
| 364 |
+
|
| 365 |
+
plan = _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs)
|
| 366 |
+
|
| 367 |
+
return plan
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
def _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs): # pragma: no cover
|
| 371 |
+
from os.path import isdir
|
| 372 |
+
from .models.dist import Dist
|
| 373 |
+
from ._vendor.toolz.itertoolz import groupby
|
| 374 |
+
from .instructions import LINK, PROGRESSIVEFETCHEXTRACT, UNLINK, UNLINKLINKTRANSACTION
|
| 375 |
+
from .core.package_cache_data import ProgressiveFetchExtract
|
| 376 |
+
from .core.link import PrefixSetup, UnlinkLinkTransaction
|
| 377 |
+
# this is only used for conda-build at this point
|
| 378 |
+
first_unlink_link_idx = next((q for q, p in enumerate(plan) if p[0] in (UNLINK, LINK)), -1)
|
| 379 |
+
if first_unlink_link_idx >= 0:
|
| 380 |
+
grouped_instructions = groupby(lambda x: x[0], plan)
|
| 381 |
+
unlink_dists = tuple(Dist(d[1]) for d in grouped_instructions.get(UNLINK, ()))
|
| 382 |
+
link_dists = tuple(Dist(d[1]) for d in grouped_instructions.get(LINK, ()))
|
| 383 |
+
unlink_dists, link_dists = _handle_menuinst(unlink_dists, link_dists)
|
| 384 |
+
|
| 385 |
+
if isdir(prefix):
|
| 386 |
+
unlink_precs = tuple(index[d] for d in unlink_dists)
|
| 387 |
+
else:
|
| 388 |
+
# there's nothing to unlink in an environment that doesn't exist
|
| 389 |
+
# this is a hack for what appears to be a logic error in conda-build
|
| 390 |
+
# caught in tests/test_subpackages.py::test_subpackage_recipes[python_test_dep]
|
| 391 |
+
unlink_precs = ()
|
| 392 |
+
link_precs = tuple(index[d] for d in link_dists)
|
| 393 |
+
|
| 394 |
+
pfe = ProgressiveFetchExtract(link_precs)
|
| 395 |
+
pfe.prepare()
|
| 396 |
+
|
| 397 |
+
stp = PrefixSetup(prefix, unlink_precs, link_precs, (), specs, ())
|
| 398 |
+
plan.insert(first_unlink_link_idx, (UNLINKLINKTRANSACTION, UnlinkLinkTransaction(stp)))
|
| 399 |
+
plan.insert(first_unlink_link_idx, (PROGRESSIVEFETCHEXTRACT, pfe))
|
| 400 |
+
elif axn in ('INSTALL', 'CREATE'):
|
| 401 |
+
plan.insert(0, (UNLINKLINKTRANSACTION, (prefix, (), (), (), specs)))
|
| 402 |
+
|
| 403 |
+
return plan
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def _handle_menuinst(unlink_dists, link_dists): # pragma: no cover
|
| 407 |
+
from .common.compat import on_win
|
| 408 |
+
if not on_win:
|
| 409 |
+
return unlink_dists, link_dists
|
| 410 |
+
|
| 411 |
+
# Always link/unlink menuinst first/last on windows in case a subsequent
|
| 412 |
+
# package tries to import it to create/remove a shortcut
|
| 413 |
+
|
| 414 |
+
# unlink
|
| 415 |
+
menuinst_idx = next((q for q, d in enumerate(unlink_dists) if d.name == 'menuinst'), None)
|
| 416 |
+
if menuinst_idx is not None:
|
| 417 |
+
unlink_dists = tuple(concatv(
|
| 418 |
+
unlink_dists[:menuinst_idx],
|
| 419 |
+
unlink_dists[menuinst_idx+1:],
|
| 420 |
+
unlink_dists[menuinst_idx:menuinst_idx+1],
|
| 421 |
+
))
|
| 422 |
+
|
| 423 |
+
# link
|
| 424 |
+
menuinst_idx = next((q for q, d in enumerate(link_dists) if d.name == 'menuinst'), None)
|
| 425 |
+
if menuinst_idx is not None:
|
| 426 |
+
link_dists = tuple(concatv(
|
| 427 |
+
link_dists[menuinst_idx:menuinst_idx+1],
|
| 428 |
+
link_dists[:menuinst_idx],
|
| 429 |
+
link_dists[menuinst_idx+1:],
|
| 430 |
+
))
|
| 431 |
+
|
| 432 |
+
return unlink_dists, link_dists
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
@time_recorder("install_actions")
|
| 436 |
+
def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False,
|
| 437 |
+
pinned=True, update_deps=True, prune=False,
|
| 438 |
+
channel_priority_map=None, is_update=False,
|
| 439 |
+
minimal_hint=False): # pragma: no cover
|
| 440 |
+
# this is for conda-build
|
| 441 |
+
with env_vars({
|
| 442 |
+
'CONDA_ALLOW_NON_CHANNEL_URLS': 'true',
|
| 443 |
+
'CONDA_SOLVER_IGNORE_TIMESTAMPS': 'false',
|
| 444 |
+
}, stack_callback=stack_context_default):
|
| 445 |
+
from os.path import basename
|
| 446 |
+
from ._vendor.boltons.setutils import IndexedSet
|
| 447 |
+
from .core.solve import _get_solver_class
|
| 448 |
+
from .models.channel import Channel
|
| 449 |
+
from .models.dist import Dist
|
| 450 |
+
if channel_priority_map:
|
| 451 |
+
channel_names = IndexedSet(Channel(url).canonical_name for url in channel_priority_map)
|
| 452 |
+
channels = IndexedSet(Channel(cn) for cn in channel_names)
|
| 453 |
+
subdirs = IndexedSet(basename(url) for url in channel_priority_map)
|
| 454 |
+
else:
|
| 455 |
+
# a hack for when conda-build calls this function without giving channel_priority_map
|
| 456 |
+
if LAST_CHANNEL_URLS:
|
| 457 |
+
channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS)
|
| 458 |
+
channels = IndexedSet(Channel(url) for url in channel_priority_map)
|
| 459 |
+
subdirs = IndexedSet(
|
| 460 |
+
subdir for subdir in (c.subdir for c in channels) if subdir
|
| 461 |
+
) or context.subdirs
|
| 462 |
+
else:
|
| 463 |
+
channels = subdirs = None
|
| 464 |
+
|
| 465 |
+
specs = tuple(MatchSpec(spec) for spec in specs)
|
| 466 |
+
|
| 467 |
+
from .core.prefix_data import PrefixData
|
| 468 |
+
PrefixData._cache_.clear()
|
| 469 |
+
|
| 470 |
+
solver = _get_solver_class()(prefix, channels, subdirs, specs_to_add=specs)
|
| 471 |
+
if index:
|
| 472 |
+
solver._index = {prec: prec for prec in index.values()}
|
| 473 |
+
txn = solver.solve_for_transaction(prune=prune, ignore_pinned=not pinned)
|
| 474 |
+
prefix_setup = txn.prefix_setups[prefix]
|
| 475 |
+
actions = get_blank_actions(prefix)
|
| 476 |
+
actions['UNLINK'].extend(Dist(prec) for prec in prefix_setup.unlink_precs)
|
| 477 |
+
actions['LINK'].extend(Dist(prec) for prec in prefix_setup.link_precs)
|
| 478 |
+
return actions
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
def get_blank_actions(prefix): # pragma: no cover
|
| 482 |
+
from collections import defaultdict
|
| 483 |
+
from .instructions import (CHECK_EXTRACT, CHECK_FETCH, EXTRACT, FETCH, LINK, PREFIX,
|
| 484 |
+
RM_EXTRACTED, RM_FETCHED, SYMLINK_CONDA, UNLINK)
|
| 485 |
+
actions = defaultdict(list)
|
| 486 |
+
actions[PREFIX] = prefix
|
| 487 |
+
actions['op_order'] = (CHECK_FETCH, RM_FETCHED, FETCH, CHECK_EXTRACT,
|
| 488 |
+
RM_EXTRACTED, EXTRACT,
|
| 489 |
+
UNLINK, LINK, SYMLINK_CONDA)
|
| 490 |
+
return actions
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
@time_recorder("execute_plan")
|
| 494 |
+
def execute_plan(old_plan, index=None, verbose=False): # pragma: no cover
|
| 495 |
+
"""
|
| 496 |
+
Deprecated: This should `conda.instructions.execute_instructions` instead
|
| 497 |
+
"""
|
| 498 |
+
plan = _update_old_plan(old_plan)
|
| 499 |
+
execute_instructions(plan, index, verbose)
|
| 500 |
+
|
| 501 |
+
|
| 502 |
+
def execute_instructions(plan, index=None, verbose=False, _commands=None): # pragma: no cover
|
| 503 |
+
"""Execute the instructions in the plan
|
| 504 |
+
|
| 505 |
+
:param plan: A list of (instruction, arg) tuples
|
| 506 |
+
:param index: The meta-data index
|
| 507 |
+
:param verbose: verbose output
|
| 508 |
+
:param _commands: (For testing only) dict mapping an instruction to executable if None
|
| 509 |
+
then the default commands will be used
|
| 510 |
+
"""
|
| 511 |
+
from .instructions import commands, PROGRESS_COMMANDS
|
| 512 |
+
from .base.context import context
|
| 513 |
+
from .models.dist import Dist
|
| 514 |
+
if _commands is None:
|
| 515 |
+
_commands = commands
|
| 516 |
+
|
| 517 |
+
log.debug("executing plan %s", plan)
|
| 518 |
+
|
| 519 |
+
state = {'i': None, 'prefix': context.root_prefix, 'index': index}
|
| 520 |
+
|
| 521 |
+
for instruction, arg in plan:
|
| 522 |
+
|
| 523 |
+
log.debug(' %s(%r)', instruction, arg)
|
| 524 |
+
|
| 525 |
+
if state['i'] is not None and instruction in PROGRESS_COMMANDS:
|
| 526 |
+
state['i'] += 1
|
| 527 |
+
getLogger('progress.update').info((Dist(arg).dist_name,
|
| 528 |
+
state['i'] - 1))
|
| 529 |
+
cmd = _commands[instruction]
|
| 530 |
+
|
| 531 |
+
if callable(cmd):
|
| 532 |
+
cmd(state, arg)
|
| 533 |
+
|
| 534 |
+
if (state['i'] is not None and instruction in PROGRESS_COMMANDS
|
| 535 |
+
and state['maxval'] == state['i']):
|
| 536 |
+
|
| 537 |
+
state['i'] = None
|
| 538 |
+
getLogger('progress.stop').info(None)
|
| 539 |
+
|
| 540 |
+
|
| 541 |
+
def _update_old_plan(old_plan): # pragma: no cover
|
| 542 |
+
"""
|
| 543 |
+
Update an old plan object to work with
|
| 544 |
+
`conda.instructions.execute_instructions`
|
| 545 |
+
"""
|
| 546 |
+
plan = []
|
| 547 |
+
for line in old_plan:
|
| 548 |
+
if line.startswith('#'):
|
| 549 |
+
continue
|
| 550 |
+
if ' ' not in line:
|
| 551 |
+
from .exceptions import ArgumentError
|
| 552 |
+
raise ArgumentError("The instruction '%s' takes at least"
|
| 553 |
+
" one argument" % line)
|
| 554 |
+
|
| 555 |
+
instruction, arg = line.split(' ', 1)
|
| 556 |
+
plan.append((instruction, arg))
|
| 557 |
+
return plan
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
if __name__ == '__main__':
|
| 561 |
+
# for testing new revert_actions() only
|
| 562 |
+
from pprint import pprint
|
| 563 |
+
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/resolve.py
ADDED
|
@@ -0,0 +1,1463 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 5 |
+
|
| 6 |
+
from collections import defaultdict, OrderedDict, deque
|
| 7 |
+
import copy
|
| 8 |
+
from functools import lru_cache
|
| 9 |
+
from logging import DEBUG, getLogger
|
| 10 |
+
|
| 11 |
+
from .auxlib.decorators import memoizemethod
|
| 12 |
+
from ._vendor.frozendict import FrozenOrderedDict as frozendict
|
| 13 |
+
from ._vendor.toolz import concat, groupby
|
| 14 |
+
from ._vendor.tqdm import tqdm
|
| 15 |
+
from .base.constants import ChannelPriority, MAX_CHANNEL_PRIORITY, SatSolverChoice
|
| 16 |
+
from .base.context import context
|
| 17 |
+
from .common.compat import on_win
|
| 18 |
+
from .common.io import dashlist, time_recorder
|
| 19 |
+
from .common.logic import (Clauses, PycoSatSolver, PyCryptoSatSolver, PySatSolver, TRUE,
|
| 20 |
+
minimal_unsatisfiable_subset)
|
| 21 |
+
from .common.toposort import toposort
|
| 22 |
+
from .exceptions import (CondaDependencyError, InvalidSpec, ResolvePackageNotFound,
|
| 23 |
+
UnsatisfiableError)
|
| 24 |
+
from .models.channel import Channel, MultiChannel
|
| 25 |
+
from .models.enums import NoarchType, PackageType
|
| 26 |
+
from .models.match_spec import MatchSpec
|
| 27 |
+
from .models.records import PackageRecord
|
| 28 |
+
from .models.version import VersionOrder
|
| 29 |
+
|
| 30 |
+
log = getLogger(__name__)
|
| 31 |
+
stdoutlog = getLogger('conda.stdoutlog')
|
| 32 |
+
|
| 33 |
+
# used in conda build
|
| 34 |
+
Unsatisfiable = UnsatisfiableError
|
| 35 |
+
ResolvePackageNotFound = ResolvePackageNotFound
|
| 36 |
+
|
| 37 |
+
_sat_solvers = {
|
| 38 |
+
SatSolverChoice.PYCOSAT: PycoSatSolver,
|
| 39 |
+
SatSolverChoice.PYCRYPTOSAT: PyCryptoSatSolver,
|
| 40 |
+
SatSolverChoice.PYSAT: PySatSolver,
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@lru_cache(maxsize=None)
|
| 45 |
+
def _get_sat_solver_cls(sat_solver_choice=SatSolverChoice.PYCOSAT):
|
| 46 |
+
def try_out_solver(sat_solver):
|
| 47 |
+
c = Clauses(sat_solver=sat_solver)
|
| 48 |
+
required = {c.new_var(), c.new_var()}
|
| 49 |
+
c.Require(c.And, *required)
|
| 50 |
+
solution = set(c.sat())
|
| 51 |
+
if not required.issubset(solution):
|
| 52 |
+
raise RuntimeError("Wrong SAT solution: {}. Required: {}".format(solution, required))
|
| 53 |
+
|
| 54 |
+
sat_solver = _sat_solvers[sat_solver_choice]
|
| 55 |
+
try:
|
| 56 |
+
try_out_solver(sat_solver)
|
| 57 |
+
except Exception as e:
|
| 58 |
+
log.warning("Could not run SAT solver through interface '%s'.", sat_solver_choice)
|
| 59 |
+
log.debug("SAT interface error due to: %s", e, exc_info=True)
|
| 60 |
+
else:
|
| 61 |
+
log.debug("Using SAT solver interface '%s'.", sat_solver_choice)
|
| 62 |
+
return sat_solver
|
| 63 |
+
for solver_choice, sat_solver in _sat_solvers.items():
|
| 64 |
+
try:
|
| 65 |
+
try_out_solver(sat_solver)
|
| 66 |
+
except Exception as e:
|
| 67 |
+
log.debug("Attempted SAT interface '%s' but unavailable due to: %s",
|
| 68 |
+
sat_solver_choice, e)
|
| 69 |
+
else:
|
| 70 |
+
log.debug("Falling back to SAT solver interface '%s'.", sat_solver_choice)
|
| 71 |
+
return sat_solver
|
| 72 |
+
raise CondaDependencyError("Cannot run solver. No functioning SAT implementations available.")
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def exactness_and_number_of_deps(resolve_obj, ms):
|
| 76 |
+
"""Sorting key to emphasize packages that have more strict
|
| 77 |
+
requirements. More strict means the reduced index can be reduced
|
| 78 |
+
more, so we want to consider these more constrained deps earlier in
|
| 79 |
+
reducing the index."""
|
| 80 |
+
if ms.strictness == 3:
|
| 81 |
+
prec = resolve_obj.find_matches(ms)
|
| 82 |
+
value = 3
|
| 83 |
+
if prec:
|
| 84 |
+
for dep in prec[0].depends:
|
| 85 |
+
value += MatchSpec(dep).strictness
|
| 86 |
+
else:
|
| 87 |
+
value = ms.strictness
|
| 88 |
+
return value
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class Resolve(object):
|
| 92 |
+
|
| 93 |
+
def __init__(self, index, processed=False, channels=()):
|
| 94 |
+
self.index = index
|
| 95 |
+
|
| 96 |
+
self.channels = channels
|
| 97 |
+
self._channel_priorities_map = self._make_channel_priorities(channels) if channels else {}
|
| 98 |
+
self._channel_priority = context.channel_priority
|
| 99 |
+
self._solver_ignore_timestamps = context.solver_ignore_timestamps
|
| 100 |
+
|
| 101 |
+
groups = groupby("name", index.values())
|
| 102 |
+
trackers = defaultdict(list)
|
| 103 |
+
|
| 104 |
+
for name in groups:
|
| 105 |
+
unmanageable_precs = [prec for prec in groups[name] if prec.is_unmanageable]
|
| 106 |
+
if unmanageable_precs:
|
| 107 |
+
log.debug("restricting to unmanageable packages: %s", name)
|
| 108 |
+
groups[name] = unmanageable_precs
|
| 109 |
+
tf_precs = (prec for prec in groups[name] if prec.track_features)
|
| 110 |
+
for prec in tf_precs:
|
| 111 |
+
for feature_name in prec.track_features:
|
| 112 |
+
trackers[feature_name].append(prec)
|
| 113 |
+
|
| 114 |
+
self.groups = groups # Dict[package_name, List[PackageRecord]]
|
| 115 |
+
self.trackers = trackers # Dict[track_feature, Set[PackageRecord]]
|
| 116 |
+
self._cached_find_matches = {} # Dict[MatchSpec, Set[PackageRecord]]
|
| 117 |
+
self.ms_depends_ = {} # Dict[PackageRecord, List[MatchSpec]]
|
| 118 |
+
self._reduced_index_cache = {}
|
| 119 |
+
self._pool_cache = {}
|
| 120 |
+
self._strict_channel_cache = {}
|
| 121 |
+
|
| 122 |
+
self._system_precs = {_ for _ in index if (
|
| 123 |
+
hasattr(_, 'package_type') and _.package_type == PackageType.VIRTUAL_SYSTEM)}
|
| 124 |
+
|
| 125 |
+
# sorting these in reverse order is effectively prioritizing
|
| 126 |
+
# constraint behavior from newer packages. It is applying broadening
|
| 127 |
+
# reduction based on the latest packages, which may reduce the space
|
| 128 |
+
# more, because more modern packages utilize constraints in more sane
|
| 129 |
+
# ways (for example, using run_exports in conda-build 3)
|
| 130 |
+
for name, group in self.groups.items():
|
| 131 |
+
self.groups[name] = sorted(group, key=self.version_key, reverse=True)
|
| 132 |
+
|
| 133 |
+
def __hash__(self):
|
| 134 |
+
return (super(Resolve, self).__hash__() ^
|
| 135 |
+
hash(frozenset(self.channels)) ^
|
| 136 |
+
hash(frozendict(self._channel_priorities_map)) ^
|
| 137 |
+
hash(self._channel_priority) ^
|
| 138 |
+
hash(self._solver_ignore_timestamps) ^
|
| 139 |
+
hash(frozendict((k, tuple(v)) for k, v in self.groups.items())) ^
|
| 140 |
+
hash(frozendict((k, tuple(v)) for k, v in self.trackers.items())) ^
|
| 141 |
+
hash(frozendict((k, tuple(v)) for k, v in self.ms_depends_.items()))
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
def default_filter(self, features=None, filter=None):
|
| 145 |
+
# TODO: fix this import; this is bad
|
| 146 |
+
from .core.subdir_data import make_feature_record
|
| 147 |
+
|
| 148 |
+
if filter is None:
|
| 149 |
+
filter = {}
|
| 150 |
+
else:
|
| 151 |
+
filter.clear()
|
| 152 |
+
|
| 153 |
+
filter.update({make_feature_record(fstr): False for fstr in self.trackers.keys()})
|
| 154 |
+
if features:
|
| 155 |
+
filter.update({make_feature_record(fstr): True for fstr in features})
|
| 156 |
+
return filter
|
| 157 |
+
|
| 158 |
+
def valid(self, spec_or_prec, filter, optional=True):
|
| 159 |
+
"""Tests if a package, MatchSpec, or a list of both has satisfiable
|
| 160 |
+
dependencies, assuming cyclic dependencies are always valid.
|
| 161 |
+
|
| 162 |
+
Args:
|
| 163 |
+
spec_or_prec: a package record, a MatchSpec, or an iterable of these.
|
| 164 |
+
filter: a dictionary of (fkey,valid) pairs, used to consider a subset
|
| 165 |
+
of dependencies, and to eliminate repeated searches.
|
| 166 |
+
optional: if True (default), do not enforce optional specifications
|
| 167 |
+
when considering validity. If False, enforce them.
|
| 168 |
+
|
| 169 |
+
Returns:
|
| 170 |
+
True if the full set of dependencies can be satisfied; False otherwise.
|
| 171 |
+
If filter is supplied and update is True, it will be updated with the
|
| 172 |
+
search results.
|
| 173 |
+
"""
|
| 174 |
+
def v_(spec):
|
| 175 |
+
return v_ms_(spec) if isinstance(spec, MatchSpec) else v_fkey_(spec)
|
| 176 |
+
|
| 177 |
+
def v_ms_(ms):
|
| 178 |
+
return (optional and ms.optional
|
| 179 |
+
or any(v_fkey_(fkey) for fkey in self.find_matches(ms)))
|
| 180 |
+
|
| 181 |
+
def v_fkey_(prec):
|
| 182 |
+
val = filter.get(prec)
|
| 183 |
+
if val is None:
|
| 184 |
+
filter[prec] = True
|
| 185 |
+
try:
|
| 186 |
+
depends = self.ms_depends(prec)
|
| 187 |
+
except InvalidSpec:
|
| 188 |
+
val = filter[prec] = False
|
| 189 |
+
else:
|
| 190 |
+
val = filter[prec] = all(v_ms_(ms) for ms in depends)
|
| 191 |
+
return val
|
| 192 |
+
|
| 193 |
+
result = v_(spec_or_prec)
|
| 194 |
+
return result
|
| 195 |
+
|
| 196 |
+
def valid2(self, spec_or_prec, filter_out, optional=True):
|
| 197 |
+
def is_valid(_spec_or_prec):
|
| 198 |
+
if isinstance(_spec_or_prec, MatchSpec):
|
| 199 |
+
return is_valid_spec(_spec_or_prec)
|
| 200 |
+
else:
|
| 201 |
+
return is_valid_prec(_spec_or_prec)
|
| 202 |
+
|
| 203 |
+
@memoizemethod
|
| 204 |
+
def is_valid_spec(_spec):
|
| 205 |
+
return optional and _spec.optional or any(
|
| 206 |
+
is_valid_prec(_prec) for _prec in self.find_matches(_spec)
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
def is_valid_prec(prec):
|
| 210 |
+
val = filter_out.get(prec)
|
| 211 |
+
if val is None:
|
| 212 |
+
filter_out[prec] = False
|
| 213 |
+
try:
|
| 214 |
+
has_valid_deps = all(is_valid_spec(ms) for ms in self.ms_depends(prec))
|
| 215 |
+
except InvalidSpec:
|
| 216 |
+
val = filter_out[prec] = "invalid dep specs"
|
| 217 |
+
else:
|
| 218 |
+
val = filter_out[prec] = False if has_valid_deps else "invalid depends specs"
|
| 219 |
+
return not val
|
| 220 |
+
|
| 221 |
+
return is_valid(spec_or_prec)
|
| 222 |
+
|
| 223 |
+
def invalid_chains(self, spec, filter, optional=True):
|
| 224 |
+
"""Constructs a set of 'dependency chains' for invalid specs.
|
| 225 |
+
|
| 226 |
+
A dependency chain is a tuple of MatchSpec objects, starting with
|
| 227 |
+
the requested spec, proceeding down the dependency tree, ending at
|
| 228 |
+
a specification that cannot be satisfied.
|
| 229 |
+
|
| 230 |
+
Args:
|
| 231 |
+
spec: a package key or MatchSpec
|
| 232 |
+
filter: a dictionary of (prec, valid) pairs to be used when
|
| 233 |
+
testing for package validity.
|
| 234 |
+
|
| 235 |
+
Returns:
|
| 236 |
+
A tuple of tuples, empty if the MatchSpec is valid.
|
| 237 |
+
"""
|
| 238 |
+
def chains_(spec, names):
|
| 239 |
+
if spec.name in names:
|
| 240 |
+
return
|
| 241 |
+
names.add(spec.name)
|
| 242 |
+
if self.valid(spec, filter, optional):
|
| 243 |
+
return
|
| 244 |
+
precs = self.find_matches(spec)
|
| 245 |
+
found = False
|
| 246 |
+
|
| 247 |
+
conflict_deps = set()
|
| 248 |
+
for prec in precs:
|
| 249 |
+
for m2 in self.ms_depends(prec):
|
| 250 |
+
for x in chains_(m2, names):
|
| 251 |
+
found = True
|
| 252 |
+
yield (spec,) + x
|
| 253 |
+
else:
|
| 254 |
+
conflict_deps.add(m2)
|
| 255 |
+
if not found:
|
| 256 |
+
conflict_groups = groupby(lambda x: x.name, conflict_deps)
|
| 257 |
+
for group in conflict_groups.values():
|
| 258 |
+
yield (spec,) + MatchSpec.union(group)
|
| 259 |
+
|
| 260 |
+
return chains_(spec, set())
|
| 261 |
+
|
| 262 |
+
def verify_specs(self, specs):
|
| 263 |
+
"""Perform a quick verification that specs and dependencies are reasonable.
|
| 264 |
+
|
| 265 |
+
Args:
|
| 266 |
+
specs: An iterable of strings or MatchSpec objects to be tested.
|
| 267 |
+
|
| 268 |
+
Returns:
|
| 269 |
+
Nothing, but if there is a conflict, an error is thrown.
|
| 270 |
+
|
| 271 |
+
Note that this does not attempt to resolve circular dependencies.
|
| 272 |
+
"""
|
| 273 |
+
non_tf_specs = []
|
| 274 |
+
bad_deps = []
|
| 275 |
+
feature_names = set()
|
| 276 |
+
for ms in specs:
|
| 277 |
+
_feature_names = ms.get_exact_value('track_features')
|
| 278 |
+
if _feature_names:
|
| 279 |
+
feature_names.update(_feature_names)
|
| 280 |
+
else:
|
| 281 |
+
non_tf_specs.append(ms)
|
| 282 |
+
bad_deps.extend((spec, ) for spec in non_tf_specs if (not spec.optional and
|
| 283 |
+
not self.find_matches(spec)))
|
| 284 |
+
if bad_deps:
|
| 285 |
+
raise ResolvePackageNotFound(bad_deps)
|
| 286 |
+
return tuple(non_tf_specs), feature_names
|
| 287 |
+
|
| 288 |
+
def _classify_bad_deps(self, bad_deps, specs_to_add, history_specs, strict_channel_priority):
|
| 289 |
+
classes = {'python': set(),
|
| 290 |
+
'request_conflict_with_history': set(),
|
| 291 |
+
'direct': set(),
|
| 292 |
+
'virtual_package': set(),
|
| 293 |
+
}
|
| 294 |
+
specs_to_add = set(MatchSpec(_) for _ in specs_to_add or [])
|
| 295 |
+
history_specs = set(MatchSpec(_) for _ in history_specs or [])
|
| 296 |
+
for chain in bad_deps:
|
| 297 |
+
# sometimes chains come in as strings
|
| 298 |
+
if len(chain) > 1 and chain[-1].name == 'python' and \
|
| 299 |
+
not any(_.name == 'python' for _ in specs_to_add) and \
|
| 300 |
+
any(_[0] for _ in bad_deps if _[0].name == 'python'):
|
| 301 |
+
python_first_specs = [_[0] for _ in bad_deps if _[0].name == 'python']
|
| 302 |
+
if python_first_specs:
|
| 303 |
+
python_spec = python_first_specs[0]
|
| 304 |
+
if not (set(self.find_matches(python_spec)) &
|
| 305 |
+
set(self.find_matches(chain[-1]))):
|
| 306 |
+
classes['python'].add((tuple([chain[0], chain[-1]]),
|
| 307 |
+
str(MatchSpec(python_spec, target=None))))
|
| 308 |
+
elif chain[-1].name.startswith('__'):
|
| 309 |
+
version = [_ for _ in self._system_precs if _.name == chain[-1].name]
|
| 310 |
+
virtual_package_version = version[0].version if version else "not available"
|
| 311 |
+
classes['virtual_package'].add((tuple(chain), virtual_package_version))
|
| 312 |
+
elif chain[0] in specs_to_add:
|
| 313 |
+
match = False
|
| 314 |
+
for spec in history_specs:
|
| 315 |
+
if spec.name == chain[-1].name:
|
| 316 |
+
classes['request_conflict_with_history'].add((
|
| 317 |
+
tuple(chain), str(MatchSpec(spec, target=None))))
|
| 318 |
+
match = True
|
| 319 |
+
|
| 320 |
+
if not match:
|
| 321 |
+
classes['direct'].add((tuple(chain), str(MatchSpec(chain[0], target=None))))
|
| 322 |
+
else:
|
| 323 |
+
if len(chain) > 1 or any(len(c) >= 1 and c[0] == chain[0] for c in bad_deps):
|
| 324 |
+
classes['direct'].add((tuple(chain),
|
| 325 |
+
str(MatchSpec(chain[0], target=None))))
|
| 326 |
+
|
| 327 |
+
if classes['python']:
|
| 328 |
+
# filter out plain single-entry python conflicts. The python section explains these.
|
| 329 |
+
classes['direct'] = [_ for _ in classes['direct']
|
| 330 |
+
if _[1].startswith('python ') or len(_[0]) > 1]
|
| 331 |
+
return classes
|
| 332 |
+
|
| 333 |
+
def find_matches_with_strict(self, ms, strict_channel_priority):
|
| 334 |
+
matches = self.find_matches(ms)
|
| 335 |
+
if not strict_channel_priority:
|
| 336 |
+
return matches
|
| 337 |
+
sole_source_channel_name = self._get_strict_channel(ms.name)
|
| 338 |
+
return tuple(f for f in matches if f.channel.name == sole_source_channel_name)
|
| 339 |
+
|
| 340 |
+
def find_conflicts(self, specs, specs_to_add=None, history_specs=None):
|
| 341 |
+
if context.unsatisfiable_hints:
|
| 342 |
+
if not context.json:
|
| 343 |
+
print("\nFound conflicts! Looking for incompatible packages.\n"
|
| 344 |
+
"This can take several minutes. Press CTRL-C to abort.")
|
| 345 |
+
bad_deps = self.build_conflict_map(specs, specs_to_add, history_specs)
|
| 346 |
+
else:
|
| 347 |
+
bad_deps = {}
|
| 348 |
+
strict_channel_priority = context.channel_priority == ChannelPriority.STRICT
|
| 349 |
+
raise UnsatisfiableError(bad_deps, strict=strict_channel_priority)
|
| 350 |
+
|
| 351 |
+
def breadth_first_search_for_dep_graph(self, root_spec, target_name, dep_graph, num_targets=1):
|
| 352 |
+
"""Return shorted path from root_spec to target_name"""
|
| 353 |
+
queue = []
|
| 354 |
+
queue.append([root_spec])
|
| 355 |
+
visited = []
|
| 356 |
+
target_paths = []
|
| 357 |
+
while queue:
|
| 358 |
+
path = queue.pop(0)
|
| 359 |
+
node = path[-1]
|
| 360 |
+
if node in visited:
|
| 361 |
+
continue
|
| 362 |
+
visited.append(node)
|
| 363 |
+
if node.name == target_name:
|
| 364 |
+
if len(target_paths) == 0:
|
| 365 |
+
target_paths.append(path)
|
| 366 |
+
if len(target_paths[-1]) == len(path):
|
| 367 |
+
last_spec = MatchSpec.union((path[-1], target_paths[-1][-1]))[0]
|
| 368 |
+
target_paths[-1][-1] = last_spec
|
| 369 |
+
else:
|
| 370 |
+
target_paths.append(path)
|
| 371 |
+
|
| 372 |
+
found_all_targets = len(target_paths) == num_targets and \
|
| 373 |
+
any(len(_) != len(path) for _ in queue)
|
| 374 |
+
if len(queue) == 0 or found_all_targets:
|
| 375 |
+
return target_paths
|
| 376 |
+
sub_graph = dep_graph
|
| 377 |
+
for p in path[0:-1]:
|
| 378 |
+
sub_graph = sub_graph[p]
|
| 379 |
+
children = [_ for _ in sub_graph.get(node, {})]
|
| 380 |
+
if children is None:
|
| 381 |
+
continue
|
| 382 |
+
for adj in children:
|
| 383 |
+
if len(target_paths) < num_targets:
|
| 384 |
+
new_path = list(path)
|
| 385 |
+
new_path.append(adj)
|
| 386 |
+
queue.append(new_path)
|
| 387 |
+
return target_paths
|
| 388 |
+
|
| 389 |
+
def build_graph_of_deps(self, spec):
|
| 390 |
+
dep_graph = {spec: {}}
|
| 391 |
+
all_deps = set()
|
| 392 |
+
queue = [[spec]]
|
| 393 |
+
while queue:
|
| 394 |
+
path = queue.pop(0)
|
| 395 |
+
sub_graph = dep_graph
|
| 396 |
+
for p in path:
|
| 397 |
+
sub_graph = sub_graph[p]
|
| 398 |
+
parent_node = path[-1]
|
| 399 |
+
matches = self.find_matches(parent_node)
|
| 400 |
+
for mat in matches:
|
| 401 |
+
if len(mat.depends) > 0:
|
| 402 |
+
for i in mat.depends:
|
| 403 |
+
new_node = MatchSpec(i)
|
| 404 |
+
sub_graph.update({new_node: {}})
|
| 405 |
+
all_deps.add(new_node)
|
| 406 |
+
new_path = list(path)
|
| 407 |
+
new_path.append(new_node)
|
| 408 |
+
if len(new_path) <= context.unsatisfiable_hints_check_depth:
|
| 409 |
+
queue.append(new_path)
|
| 410 |
+
return dep_graph, all_deps
|
| 411 |
+
|
| 412 |
+
def build_conflict_map(self, specs, specs_to_add=None, history_specs=None):
|
| 413 |
+
"""Perform a deeper analysis on conflicting specifications, by attempting
|
| 414 |
+
to find the common dependencies that might be the cause of conflicts.
|
| 415 |
+
|
| 416 |
+
Args:
|
| 417 |
+
specs: An iterable of strings or MatchSpec objects to be tested.
|
| 418 |
+
It is assumed that the specs conflict.
|
| 419 |
+
|
| 420 |
+
Returns:
|
| 421 |
+
bad_deps: A list of lists of bad deps
|
| 422 |
+
|
| 423 |
+
Strategy:
|
| 424 |
+
If we're here, we know that the specs conflict. This could be because:
|
| 425 |
+
- One spec conflicts with another; e.g.
|
| 426 |
+
['numpy 1.5*', 'numpy >=1.6']
|
| 427 |
+
- One spec conflicts with a dependency of another; e.g.
|
| 428 |
+
['numpy 1.5*', 'scipy 0.12.0b1']
|
| 429 |
+
- Each spec depends on *the same package* but in a different way; e.g.,
|
| 430 |
+
['A', 'B'] where A depends on numpy 1.5, and B on numpy 1.6.
|
| 431 |
+
Technically, all three of these cases can be boiled down to the last
|
| 432 |
+
one if we treat the spec itself as one of the "dependencies". There
|
| 433 |
+
might be more complex reasons for a conflict, but this code only
|
| 434 |
+
considers the ones above.
|
| 435 |
+
|
| 436 |
+
The purpose of this code, then, is to identify packages (like numpy
|
| 437 |
+
above) that all of the specs depend on *but in different ways*. We
|
| 438 |
+
then identify the dependency chains that lead to those packages.
|
| 439 |
+
"""
|
| 440 |
+
# if only a single package matches the spec use the packages depends
|
| 441 |
+
# rather than the spec itself
|
| 442 |
+
strict_channel_priority = context.channel_priority == ChannelPriority.STRICT
|
| 443 |
+
|
| 444 |
+
specs = set(specs) | (specs_to_add or set())
|
| 445 |
+
# Remove virtual packages
|
| 446 |
+
specs = set([spec for spec in specs if not spec.name.startswith('__')])
|
| 447 |
+
if len(specs) == 1:
|
| 448 |
+
matches = self.find_matches(next(iter(specs)))
|
| 449 |
+
if len(matches) == 1:
|
| 450 |
+
specs = set(self.ms_depends(matches[0]))
|
| 451 |
+
specs.update({_.to_match_spec() for _ in self._system_precs})
|
| 452 |
+
for spec in specs:
|
| 453 |
+
self._get_package_pool((spec, ))
|
| 454 |
+
|
| 455 |
+
dep_graph = {}
|
| 456 |
+
dep_list = {}
|
| 457 |
+
with tqdm(total=len(specs), desc="Building graph of deps",
|
| 458 |
+
leave=False, disable=context.json) as t:
|
| 459 |
+
for spec in specs:
|
| 460 |
+
t.set_description("Examining {}".format(spec))
|
| 461 |
+
t.update()
|
| 462 |
+
dep_graph_for_spec, all_deps_for_spec = self.build_graph_of_deps(spec)
|
| 463 |
+
dep_graph.update(dep_graph_for_spec)
|
| 464 |
+
if dep_list.get(spec.name):
|
| 465 |
+
dep_list[spec.name].append(spec)
|
| 466 |
+
else:
|
| 467 |
+
dep_list[spec.name] = [spec]
|
| 468 |
+
for dep in all_deps_for_spec:
|
| 469 |
+
if dep_list.get(dep.name):
|
| 470 |
+
dep_list[dep.name].append(spec)
|
| 471 |
+
else:
|
| 472 |
+
dep_list[dep.name] = [spec]
|
| 473 |
+
|
| 474 |
+
chains = []
|
| 475 |
+
conflicting_pkgs_pkgs = {}
|
| 476 |
+
for k, v in dep_list.items():
|
| 477 |
+
set_v = frozenset(v)
|
| 478 |
+
# Packages probably conflicts if many specs depend on it
|
| 479 |
+
if len(set_v) > 1:
|
| 480 |
+
if conflicting_pkgs_pkgs.get(set_v) is None:
|
| 481 |
+
conflicting_pkgs_pkgs[set_v] = [k]
|
| 482 |
+
else:
|
| 483 |
+
conflicting_pkgs_pkgs[set_v].append(k)
|
| 484 |
+
# Conflict if required virtual package is not present
|
| 485 |
+
elif k.startswith("__") and any(s for s in set_v if s.name != k):
|
| 486 |
+
conflicting_pkgs_pkgs[set_v] = [k]
|
| 487 |
+
|
| 488 |
+
with tqdm(total=len(specs), desc="Determining conflicts",
|
| 489 |
+
leave=False, disable=context.json) as t:
|
| 490 |
+
for roots, nodes in conflicting_pkgs_pkgs.items():
|
| 491 |
+
t.set_description("Examining conflict for {}".format(
|
| 492 |
+
" ".join(_.name for _ in roots)))
|
| 493 |
+
t.update()
|
| 494 |
+
lroots = [_ for _ in roots]
|
| 495 |
+
current_shortest_chain = []
|
| 496 |
+
shortest_node = None
|
| 497 |
+
requested_spec_unsat = frozenset(nodes).intersection(set(_.name for _ in roots))
|
| 498 |
+
if requested_spec_unsat:
|
| 499 |
+
chains.append([_ for _ in roots if _.name in requested_spec_unsat])
|
| 500 |
+
shortest_node = chains[-1][0]
|
| 501 |
+
for root in roots:
|
| 502 |
+
if root != chains[0][0]:
|
| 503 |
+
search_node = shortest_node.name
|
| 504 |
+
num_occurances = dep_list[search_node].count(root)
|
| 505 |
+
c = self.breadth_first_search_for_dep_graph(
|
| 506 |
+
root, search_node, dep_graph, num_occurances)
|
| 507 |
+
chains.extend(c)
|
| 508 |
+
else:
|
| 509 |
+
for node in nodes:
|
| 510 |
+
num_occurances = dep_list[node].count(lroots[0])
|
| 511 |
+
chain = self.breadth_first_search_for_dep_graph(
|
| 512 |
+
lroots[0], node, dep_graph, num_occurances)
|
| 513 |
+
chains.extend(chain)
|
| 514 |
+
if len(current_shortest_chain) == 0 or \
|
| 515 |
+
len(chain) < len(current_shortest_chain):
|
| 516 |
+
current_shortest_chain = chain
|
| 517 |
+
shortest_node = node
|
| 518 |
+
for root in lroots[1:]:
|
| 519 |
+
num_occurances = dep_list[shortest_node].count(root)
|
| 520 |
+
c = self.breadth_first_search_for_dep_graph(
|
| 521 |
+
root, shortest_node, dep_graph, num_occurances)
|
| 522 |
+
chains.extend(c)
|
| 523 |
+
|
| 524 |
+
bad_deps = self._classify_bad_deps(chains, specs_to_add, history_specs,
|
| 525 |
+
strict_channel_priority)
|
| 526 |
+
return bad_deps
|
| 527 |
+
|
| 528 |
+
def _get_strict_channel(self, package_name):
|
| 529 |
+
channel_name = None
|
| 530 |
+
try:
|
| 531 |
+
channel_name = self._strict_channel_cache[package_name]
|
| 532 |
+
except KeyError:
|
| 533 |
+
if package_name in self.groups:
|
| 534 |
+
all_channel_names = set(prec.channel.name for prec in self.groups[package_name])
|
| 535 |
+
by_cp = {self._channel_priorities_map.get(cn, 1): cn for cn in all_channel_names}
|
| 536 |
+
highest_priority = sorted(by_cp)[0] # highest priority is the lowest number
|
| 537 |
+
channel_name = self._strict_channel_cache[package_name] = by_cp[highest_priority]
|
| 538 |
+
return channel_name
|
| 539 |
+
|
| 540 |
+
@memoizemethod
|
| 541 |
+
def _broader(self, ms, specs_by_name):
|
| 542 |
+
"""prevent introduction of matchspecs that broaden our selection of choices"""
|
| 543 |
+
if not specs_by_name:
|
| 544 |
+
return False
|
| 545 |
+
return ms.strictness < specs_by_name[0].strictness
|
| 546 |
+
|
| 547 |
+
def _get_package_pool(self, specs):
|
| 548 |
+
specs = frozenset(specs)
|
| 549 |
+
if specs in self._pool_cache:
|
| 550 |
+
pool = self._pool_cache[specs]
|
| 551 |
+
else:
|
| 552 |
+
pool = self.get_reduced_index(specs)
|
| 553 |
+
grouped_pool = groupby(lambda x: x.name, pool)
|
| 554 |
+
pool = {k: set(v) for k, v in grouped_pool.items()}
|
| 555 |
+
self._pool_cache[specs] = pool
|
| 556 |
+
return pool
|
| 557 |
+
|
| 558 |
+
@time_recorder(module_name=__name__)
|
| 559 |
+
def get_reduced_index(self, explicit_specs, sort_by_exactness=True, exit_on_conflict=False):
|
| 560 |
+
# TODO: fix this import; this is bad
|
| 561 |
+
from .core.subdir_data import make_feature_record
|
| 562 |
+
|
| 563 |
+
strict_channel_priority = context.channel_priority == ChannelPriority.STRICT
|
| 564 |
+
|
| 565 |
+
cache_key = strict_channel_priority, tuple(explicit_specs)
|
| 566 |
+
if cache_key in self._reduced_index_cache:
|
| 567 |
+
return self._reduced_index_cache[cache_key]
|
| 568 |
+
|
| 569 |
+
if log.isEnabledFor(DEBUG):
|
| 570 |
+
log.debug('Retrieving packages for: %s', dashlist(
|
| 571 |
+
sorted(str(s) for s in explicit_specs)))
|
| 572 |
+
|
| 573 |
+
explicit_specs, features = self.verify_specs(explicit_specs)
|
| 574 |
+
filter_out = {prec: False if val else "feature not enabled"
|
| 575 |
+
for prec, val in self.default_filter(features).items()}
|
| 576 |
+
snames = set()
|
| 577 |
+
top_level_spec = None
|
| 578 |
+
cp_filter_applied = set() # values are package names
|
| 579 |
+
if sort_by_exactness:
|
| 580 |
+
# prioritize specs that are more exact. Exact specs will evaluate to 3,
|
| 581 |
+
# constrained specs will evaluate to 2, and name only will be 1
|
| 582 |
+
explicit_specs = sorted(list(explicit_specs), key=lambda x: (
|
| 583 |
+
exactness_and_number_of_deps(self, x), x.dist_str()), reverse=True)
|
| 584 |
+
# tuple because it needs to be hashable
|
| 585 |
+
explicit_specs = tuple(explicit_specs)
|
| 586 |
+
|
| 587 |
+
explicit_spec_package_pool = {}
|
| 588 |
+
for s in explicit_specs:
|
| 589 |
+
explicit_spec_package_pool[s.name] = explicit_spec_package_pool.get(
|
| 590 |
+
s.name, set()) | set(self.find_matches(s))
|
| 591 |
+
|
| 592 |
+
def filter_group(_specs):
|
| 593 |
+
# all _specs should be for the same package name
|
| 594 |
+
name = next(iter(_specs)).name
|
| 595 |
+
group = self.groups.get(name, ())
|
| 596 |
+
|
| 597 |
+
# implement strict channel priority
|
| 598 |
+
if group and strict_channel_priority and name not in cp_filter_applied:
|
| 599 |
+
sole_source_channel_name = self._get_strict_channel(name)
|
| 600 |
+
for prec in group:
|
| 601 |
+
if prec.channel.name != sole_source_channel_name:
|
| 602 |
+
filter_out[prec] = "removed due to strict channel priority"
|
| 603 |
+
cp_filter_applied.add(name)
|
| 604 |
+
|
| 605 |
+
# Prune packages that don't match any of the patterns,
|
| 606 |
+
# have unsatisfiable dependencies, or conflict with the explicit specs
|
| 607 |
+
nold = nnew = 0
|
| 608 |
+
for prec in group:
|
| 609 |
+
if not filter_out.setdefault(prec, False):
|
| 610 |
+
nold += 1
|
| 611 |
+
if (not self.match_any(_specs, prec)) or (
|
| 612 |
+
explicit_spec_package_pool.get(name) and
|
| 613 |
+
prec not in explicit_spec_package_pool[name]):
|
| 614 |
+
filter_out[prec] = "incompatible with required spec %s" % top_level_spec
|
| 615 |
+
continue
|
| 616 |
+
unsatisfiable_dep_specs = set()
|
| 617 |
+
for ms in self.ms_depends(prec):
|
| 618 |
+
if not ms.optional and not any(
|
| 619 |
+
rec for rec in self.find_matches(ms)
|
| 620 |
+
if not filter_out.get(rec, False)):
|
| 621 |
+
unsatisfiable_dep_specs.add(ms)
|
| 622 |
+
if unsatisfiable_dep_specs:
|
| 623 |
+
filter_out[prec] = "unsatisfiable dependencies %s" % " ".join(
|
| 624 |
+
str(s) for s in unsatisfiable_dep_specs
|
| 625 |
+
)
|
| 626 |
+
continue
|
| 627 |
+
filter_out[prec] = False
|
| 628 |
+
nnew += 1
|
| 629 |
+
|
| 630 |
+
reduced = nnew < nold
|
| 631 |
+
if reduced:
|
| 632 |
+
log.debug('%s: pruned from %d -> %d' % (name, nold, nnew))
|
| 633 |
+
if any(ms.optional for ms in _specs):
|
| 634 |
+
return reduced
|
| 635 |
+
elif nnew == 0:
|
| 636 |
+
# Indicates that a conflict was found; we can exit early
|
| 637 |
+
return None
|
| 638 |
+
|
| 639 |
+
# Perform the same filtering steps on any dependencies shared across
|
| 640 |
+
# *all* packages in the group. Even if just one of the packages does
|
| 641 |
+
# not have a particular dependency, it must be ignored in this pass.
|
| 642 |
+
# Otherwise, we might do more filtering than we should---and it is
|
| 643 |
+
# better to have extra packages here than missing ones.
|
| 644 |
+
if reduced or name not in snames:
|
| 645 |
+
snames.add(name)
|
| 646 |
+
|
| 647 |
+
_dep_specs = groupby(lambda s: s.name, (
|
| 648 |
+
dep_spec
|
| 649 |
+
for prec in group if not filter_out.get(prec, False)
|
| 650 |
+
for dep_spec in self.ms_depends(prec) if not dep_spec.optional
|
| 651 |
+
))
|
| 652 |
+
_dep_specs.pop("*", None) # discard track_features specs
|
| 653 |
+
|
| 654 |
+
for deps_name, deps in sorted(_dep_specs.items(),
|
| 655 |
+
key=lambda x: any(_.optional for _ in x[1])):
|
| 656 |
+
if len(deps) >= nnew:
|
| 657 |
+
res = filter_group(set(deps))
|
| 658 |
+
if res:
|
| 659 |
+
reduced = True
|
| 660 |
+
elif res is None:
|
| 661 |
+
# Indicates that a conflict was found; we can exit early
|
| 662 |
+
return None
|
| 663 |
+
|
| 664 |
+
return reduced
|
| 665 |
+
|
| 666 |
+
# Iterate on pruning until no progress is made. We've implemented
|
| 667 |
+
# what amounts to "double-elimination" here; packages get one additional
|
| 668 |
+
# chance after their first "False" reduction. This catches more instances
|
| 669 |
+
# where one package's filter affects another. But we don't have to be
|
| 670 |
+
# perfect about this, so performance matters.
|
| 671 |
+
pruned_to_zero = set()
|
| 672 |
+
for _ in range(2):
|
| 673 |
+
snames.clear()
|
| 674 |
+
slist = deque(explicit_specs)
|
| 675 |
+
while slist:
|
| 676 |
+
s = slist.popleft()
|
| 677 |
+
if filter_group([s]):
|
| 678 |
+
slist.append(s)
|
| 679 |
+
else:
|
| 680 |
+
pruned_to_zero.add(s)
|
| 681 |
+
|
| 682 |
+
if pruned_to_zero and exit_on_conflict:
|
| 683 |
+
return {}
|
| 684 |
+
|
| 685 |
+
# Determine all valid packages in the dependency graph
|
| 686 |
+
reduced_index2 = {prec: prec for prec in (make_feature_record(fstr) for fstr in features)}
|
| 687 |
+
specs_by_name_seed = OrderedDict()
|
| 688 |
+
for s in explicit_specs:
|
| 689 |
+
specs_by_name_seed[s.name] = specs_by_name_seed.get(s.name, list()) + [s]
|
| 690 |
+
for explicit_spec in explicit_specs:
|
| 691 |
+
add_these_precs2 = tuple(
|
| 692 |
+
prec for prec in self.find_matches(explicit_spec)
|
| 693 |
+
if prec not in reduced_index2 and self.valid2(prec, filter_out))
|
| 694 |
+
|
| 695 |
+
if strict_channel_priority and add_these_precs2:
|
| 696 |
+
strict_channel_name = self._get_strict_channel(add_these_precs2[0].name)
|
| 697 |
+
|
| 698 |
+
add_these_precs2 = tuple(
|
| 699 |
+
prec for prec in add_these_precs2 if prec.channel.name == strict_channel_name
|
| 700 |
+
)
|
| 701 |
+
reduced_index2.update((prec, prec) for prec in add_these_precs2)
|
| 702 |
+
|
| 703 |
+
for pkg in add_these_precs2:
|
| 704 |
+
# what we have seen is only relevant within the context of a single package
|
| 705 |
+
# that is picked up because of an explicit spec. We don't want the
|
| 706 |
+
# broadening check to apply across packages at the explicit level; only
|
| 707 |
+
# at the level of deps below that explicit package.
|
| 708 |
+
seen_specs = set()
|
| 709 |
+
specs_by_name = copy.deepcopy(specs_by_name_seed)
|
| 710 |
+
|
| 711 |
+
dep_specs = set(self.ms_depends(pkg))
|
| 712 |
+
for dep in dep_specs:
|
| 713 |
+
specs = specs_by_name.get(dep.name, list())
|
| 714 |
+
if dep not in specs and (not specs or dep.strictness >= specs[0].strictness):
|
| 715 |
+
specs.insert(0, dep)
|
| 716 |
+
specs_by_name[dep.name] = specs
|
| 717 |
+
|
| 718 |
+
while dep_specs:
|
| 719 |
+
# used for debugging
|
| 720 |
+
# size_index = len(reduced_index2)
|
| 721 |
+
# specs_added = []
|
| 722 |
+
ms = dep_specs.pop()
|
| 723 |
+
seen_specs.add(ms)
|
| 724 |
+
for dep_pkg in (_ for _ in self.find_matches(ms) if _ not in reduced_index2):
|
| 725 |
+
if not self.valid2(dep_pkg, filter_out):
|
| 726 |
+
continue
|
| 727 |
+
|
| 728 |
+
# expand the reduced index if not using strict channel priority,
|
| 729 |
+
# or if using it and this package is in the appropriate channel
|
| 730 |
+
if (not strict_channel_priority or
|
| 731 |
+
(self._get_strict_channel(dep_pkg.name) ==
|
| 732 |
+
dep_pkg.channel.name)):
|
| 733 |
+
reduced_index2[dep_pkg] = dep_pkg
|
| 734 |
+
|
| 735 |
+
# recurse to deps of this dep
|
| 736 |
+
new_specs = set(self.ms_depends(dep_pkg)) - seen_specs
|
| 737 |
+
for new_ms in new_specs:
|
| 738 |
+
# We do not pull packages into the reduced index due
|
| 739 |
+
# to a track_features dependency. Remember, a feature
|
| 740 |
+
# specifies a "soft" dependency: it must be in the
|
| 741 |
+
# environment, but it is not _pulled_ in. The SAT
|
| 742 |
+
# logic doesn't do a perfect job of capturing this
|
| 743 |
+
# behavior, but keeping these packags out of the
|
| 744 |
+
# reduced index helps. Of course, if _another_
|
| 745 |
+
# package pulls it in by dependency, that's fine.
|
| 746 |
+
if ('track_features' not in new_ms and not self._broader(
|
| 747 |
+
new_ms, tuple(specs_by_name.get(new_ms.name, tuple())))):
|
| 748 |
+
dep_specs.add(new_ms)
|
| 749 |
+
# if new_ms not in dep_specs:
|
| 750 |
+
# specs_added.append(new_ms)
|
| 751 |
+
else:
|
| 752 |
+
seen_specs.add(new_ms)
|
| 753 |
+
# debugging info - see what specs are bringing in the largest blobs
|
| 754 |
+
# if size_index != len(reduced_index2):
|
| 755 |
+
# print("MS {} added {} pkgs to index".format(ms,
|
| 756 |
+
# len(reduced_index2) - size_index))
|
| 757 |
+
# if specs_added:
|
| 758 |
+
# print("MS {} added {} specs to further examination".format(ms,
|
| 759 |
+
# specs_added))
|
| 760 |
+
|
| 761 |
+
reduced_index2 = frozendict(reduced_index2)
|
| 762 |
+
self._reduced_index_cache[cache_key] = reduced_index2
|
| 763 |
+
return reduced_index2
|
| 764 |
+
|
| 765 |
+
def match_any(self, mss, prec):
|
| 766 |
+
return any(ms.match(prec) for ms in mss)
|
| 767 |
+
|
| 768 |
+
def find_matches(self, spec):
|
| 769 |
+
# type: (MatchSpec) -> Set[PackageRecord]
|
| 770 |
+
res = self._cached_find_matches.get(spec, None)
|
| 771 |
+
if res is not None:
|
| 772 |
+
return res
|
| 773 |
+
|
| 774 |
+
spec_name = spec.get_exact_value('name')
|
| 775 |
+
if spec_name:
|
| 776 |
+
candidate_precs = self.groups.get(spec_name, ())
|
| 777 |
+
elif spec.get_exact_value('track_features'):
|
| 778 |
+
feature_names = spec.get_exact_value('track_features')
|
| 779 |
+
candidate_precs = concat(
|
| 780 |
+
self.trackers.get(feature_name, ()) for feature_name in feature_names
|
| 781 |
+
)
|
| 782 |
+
else:
|
| 783 |
+
candidate_precs = self.index.values()
|
| 784 |
+
|
| 785 |
+
res = tuple(p for p in candidate_precs if spec.match(p))
|
| 786 |
+
self._cached_find_matches[spec] = res
|
| 787 |
+
return res
|
| 788 |
+
|
| 789 |
+
def ms_depends(self, prec):
|
| 790 |
+
# type: (PackageRecord) -> List[MatchSpec]
|
| 791 |
+
deps = self.ms_depends_.get(prec)
|
| 792 |
+
if deps is None:
|
| 793 |
+
deps = [MatchSpec(d) for d in prec.combined_depends]
|
| 794 |
+
deps.extend(MatchSpec(track_features=feat) for feat in prec.features)
|
| 795 |
+
self.ms_depends_[prec] = deps
|
| 796 |
+
return deps
|
| 797 |
+
|
| 798 |
+
def version_key(self, prec, vtype=None):
|
| 799 |
+
channel = prec.channel
|
| 800 |
+
channel_priority = self._channel_priorities_map.get(channel.name, 1) # TODO: ask @mcg1969 why the default value is 1 here # NOQA
|
| 801 |
+
valid = 1 if channel_priority < MAX_CHANNEL_PRIORITY else 0
|
| 802 |
+
version_comparator = VersionOrder(prec.get('version', ''))
|
| 803 |
+
build_number = prec.get('build_number', 0)
|
| 804 |
+
build_string = prec.get('build')
|
| 805 |
+
noarch = - int(prec.subdir == 'noarch')
|
| 806 |
+
if self._channel_priority != ChannelPriority.DISABLED:
|
| 807 |
+
vkey = [valid, -channel_priority, version_comparator, build_number, noarch]
|
| 808 |
+
else:
|
| 809 |
+
vkey = [valid, version_comparator, -channel_priority, build_number, noarch]
|
| 810 |
+
if self._solver_ignore_timestamps:
|
| 811 |
+
vkey.append(build_string)
|
| 812 |
+
else:
|
| 813 |
+
vkey.extend((prec.get('timestamp', 0), build_string))
|
| 814 |
+
return vkey
|
| 815 |
+
|
| 816 |
+
@staticmethod
|
| 817 |
+
def _make_channel_priorities(channels):
|
| 818 |
+
priorities_map = {}
|
| 819 |
+
for priority_counter, chn in enumerate(concat(
|
| 820 |
+
(Channel(cc) for cc in c._channels) if isinstance(c, MultiChannel) else (c,)
|
| 821 |
+
for c in (Channel(c) for c in channels)
|
| 822 |
+
)):
|
| 823 |
+
channel_name = chn.name
|
| 824 |
+
if channel_name in priorities_map:
|
| 825 |
+
continue
|
| 826 |
+
priorities_map[channel_name] = min(priority_counter, MAX_CHANNEL_PRIORITY - 1)
|
| 827 |
+
return priorities_map
|
| 828 |
+
|
| 829 |
+
def get_pkgs(self, ms, emptyok=False): # pragma: no cover
|
| 830 |
+
# legacy method for conda-build
|
| 831 |
+
ms = MatchSpec(ms)
|
| 832 |
+
precs = self.find_matches(ms)
|
| 833 |
+
if not precs and not emptyok:
|
| 834 |
+
raise ResolvePackageNotFound([(ms,)])
|
| 835 |
+
return sorted(precs, key=self.version_key)
|
| 836 |
+
|
| 837 |
+
@staticmethod
|
| 838 |
+
def to_sat_name(val):
|
| 839 |
+
# val can be a PackageRecord or MatchSpec
|
| 840 |
+
if isinstance(val, PackageRecord):
|
| 841 |
+
return val.dist_str()
|
| 842 |
+
elif isinstance(val, MatchSpec):
|
| 843 |
+
return '@s@' + str(val) + ('?' if val.optional else '')
|
| 844 |
+
else:
|
| 845 |
+
raise NotImplementedError()
|
| 846 |
+
|
| 847 |
+
@staticmethod
|
| 848 |
+
def to_feature_metric_id(prec_dist_str, feat):
|
| 849 |
+
return '@fm@%s@%s' % (prec_dist_str, feat)
|
| 850 |
+
|
| 851 |
+
def push_MatchSpec(self, C, spec):
|
| 852 |
+
spec = MatchSpec(spec)
|
| 853 |
+
sat_name = self.to_sat_name(spec)
|
| 854 |
+
m = C.from_name(sat_name)
|
| 855 |
+
if m is not None:
|
| 856 |
+
# the spec has already been pushed onto the clauses stack
|
| 857 |
+
return sat_name
|
| 858 |
+
|
| 859 |
+
simple = spec._is_single()
|
| 860 |
+
nm = spec.get_exact_value('name')
|
| 861 |
+
tf = frozenset(_tf for _tf in (
|
| 862 |
+
f.strip() for f in spec.get_exact_value('track_features') or ()
|
| 863 |
+
) if _tf)
|
| 864 |
+
|
| 865 |
+
if nm:
|
| 866 |
+
tgroup = libs = self.groups.get(nm, [])
|
| 867 |
+
elif tf:
|
| 868 |
+
assert len(tf) == 1
|
| 869 |
+
k = next(iter(tf))
|
| 870 |
+
tgroup = libs = self.trackers.get(k, [])
|
| 871 |
+
else:
|
| 872 |
+
tgroup = libs = self.index.keys()
|
| 873 |
+
simple = False
|
| 874 |
+
if not simple:
|
| 875 |
+
libs = [fkey for fkey in tgroup if spec.match(fkey)]
|
| 876 |
+
if len(libs) == len(tgroup):
|
| 877 |
+
if spec.optional:
|
| 878 |
+
m = TRUE
|
| 879 |
+
elif not simple:
|
| 880 |
+
ms2 = MatchSpec(track_features=tf) if tf else MatchSpec(nm)
|
| 881 |
+
m = C.from_name(self.push_MatchSpec(C, ms2))
|
| 882 |
+
if m is None:
|
| 883 |
+
sat_names = [self.to_sat_name(prec) for prec in libs]
|
| 884 |
+
if spec.optional:
|
| 885 |
+
ms2 = MatchSpec(track_features=tf) if tf else MatchSpec(nm)
|
| 886 |
+
sat_names.append('!' + self.to_sat_name(ms2))
|
| 887 |
+
m = C.Any(sat_names)
|
| 888 |
+
C.name_var(m, sat_name)
|
| 889 |
+
return sat_name
|
| 890 |
+
|
| 891 |
+
@time_recorder(module_name=__name__)
|
| 892 |
+
def gen_clauses(self):
|
| 893 |
+
C = Clauses(sat_solver=_get_sat_solver_cls(context.sat_solver))
|
| 894 |
+
for name, group in self.groups.items():
|
| 895 |
+
group = [self.to_sat_name(prec) for prec in group]
|
| 896 |
+
# Create one variable for each package
|
| 897 |
+
for sat_name in group:
|
| 898 |
+
C.new_var(sat_name)
|
| 899 |
+
# Create one variable for the group
|
| 900 |
+
m = C.new_var(self.to_sat_name(MatchSpec(name)))
|
| 901 |
+
|
| 902 |
+
# Exactly one of the package variables, OR
|
| 903 |
+
# the negation of the group variable, is true
|
| 904 |
+
C.Require(C.ExactlyOne, group + [C.Not(m)])
|
| 905 |
+
|
| 906 |
+
# If a package is installed, its dependencies must be as well
|
| 907 |
+
for prec in self.index.values():
|
| 908 |
+
nkey = C.Not(self.to_sat_name(prec))
|
| 909 |
+
for ms in self.ms_depends(prec):
|
| 910 |
+
# Virtual packages can't be installed, we ignore them
|
| 911 |
+
if not ms.name.startswith('__'):
|
| 912 |
+
C.Require(C.Or, nkey, self.push_MatchSpec(C, ms))
|
| 913 |
+
|
| 914 |
+
if log.isEnabledFor(DEBUG):
|
| 915 |
+
log.debug("gen_clauses returning with clause count: %d", C.get_clause_count())
|
| 916 |
+
return C
|
| 917 |
+
|
| 918 |
+
def generate_spec_constraints(self, C, specs):
|
| 919 |
+
result = [(self.push_MatchSpec(C, ms),) for ms in specs]
|
| 920 |
+
if log.isEnabledFor(DEBUG):
|
| 921 |
+
log.debug(
|
| 922 |
+
"generate_spec_constraints returning with clause count: %d",
|
| 923 |
+
C.get_clause_count())
|
| 924 |
+
return result
|
| 925 |
+
|
| 926 |
+
def generate_feature_count(self, C):
|
| 927 |
+
result = {self.push_MatchSpec(C, MatchSpec(track_features=name)): 1
|
| 928 |
+
for name in self.trackers.keys()}
|
| 929 |
+
if log.isEnabledFor(DEBUG):
|
| 930 |
+
log.debug(
|
| 931 |
+
"generate_feature_count returning with clause count: %d", C.get_clause_count())
|
| 932 |
+
return result
|
| 933 |
+
|
| 934 |
+
def generate_update_count(self, C, specs):
|
| 935 |
+
return {'!'+ms.target: 1 for ms in specs if ms.target and C.from_name(ms.target)}
|
| 936 |
+
|
| 937 |
+
def generate_feature_metric(self, C):
|
| 938 |
+
eq = {} # a C.minimize() objective: Dict[varname, coeff]
|
| 939 |
+
# Given a pair (prec, feature), assign a "1" score IF:
|
| 940 |
+
# - The prec is installed
|
| 941 |
+
# - The prec does NOT require the feature
|
| 942 |
+
# - At least one package in the group DOES require the feature
|
| 943 |
+
# - A package that tracks the feature is installed
|
| 944 |
+
for name, group in self.groups.items():
|
| 945 |
+
prec_feats = {self.to_sat_name(prec): set(prec.features) for prec in group}
|
| 946 |
+
active_feats = set.union(*prec_feats.values()).intersection(self.trackers)
|
| 947 |
+
for feat in active_feats:
|
| 948 |
+
clause_id_for_feature = self.push_MatchSpec(C, MatchSpec(track_features=feat))
|
| 949 |
+
for prec_sat_name, features in prec_feats.items():
|
| 950 |
+
if feat not in features:
|
| 951 |
+
feature_metric_id = self.to_feature_metric_id(prec_sat_name, feat)
|
| 952 |
+
C.name_var(C.And(prec_sat_name, clause_id_for_feature), feature_metric_id)
|
| 953 |
+
eq[feature_metric_id] = 1
|
| 954 |
+
return eq
|
| 955 |
+
|
| 956 |
+
def generate_removal_count(self, C, specs):
|
| 957 |
+
return {'!'+self.push_MatchSpec(C, ms.name): 1 for ms in specs}
|
| 958 |
+
|
| 959 |
+
def generate_install_count(self, C, specs):
|
| 960 |
+
return {self.push_MatchSpec(C, ms.name): 1 for ms in specs if ms.optional}
|
| 961 |
+
|
| 962 |
+
def generate_package_count(self, C, missing):
|
| 963 |
+
return {self.push_MatchSpec(C, nm): 1 for nm in missing}
|
| 964 |
+
|
| 965 |
+
def generate_version_metrics(self, C, specs, include0=False):
|
| 966 |
+
# each of these are weights saying how well packages match the specs
|
| 967 |
+
# format for each: a C.minimize() objective: Dict[varname, coeff]
|
| 968 |
+
eqc = {} # channel
|
| 969 |
+
eqv = {} # version
|
| 970 |
+
eqb = {} # build number
|
| 971 |
+
eqa = {} # arch/noarch
|
| 972 |
+
eqt = {} # timestamp
|
| 973 |
+
|
| 974 |
+
sdict = {} # Dict[package_name, PackageRecord]
|
| 975 |
+
|
| 976 |
+
for s in specs:
|
| 977 |
+
s = MatchSpec(s) # needed for testing
|
| 978 |
+
sdict.setdefault(s.name, [])
|
| 979 |
+
# # TODO: this block is important! can't leave it commented out
|
| 980 |
+
# rec = sdict.setdefault(s.name, [])
|
| 981 |
+
# if s.target:
|
| 982 |
+
# dist = Dist(s.target)
|
| 983 |
+
# if dist in self.index:
|
| 984 |
+
# if self.index[dist].get('priority', 0) < MAX_CHANNEL_PRIORITY:
|
| 985 |
+
# rec.append(dist)
|
| 986 |
+
|
| 987 |
+
for name, targets in sdict.items():
|
| 988 |
+
pkgs = [(self.version_key(p), p) for p in self.groups.get(name, [])]
|
| 989 |
+
pkey = None
|
| 990 |
+
# keep in mind that pkgs is already sorted according to version_key (a tuple,
|
| 991 |
+
# so composite sort key). Later entries in the list are, by definition,
|
| 992 |
+
# greater in some way, so simply comparing with != suffices.
|
| 993 |
+
for version_key, prec in pkgs:
|
| 994 |
+
if targets and any(prec == t for t in targets):
|
| 995 |
+
continue
|
| 996 |
+
if pkey is None:
|
| 997 |
+
ic = iv = ib = it = ia = 0
|
| 998 |
+
# valid package, channel priority
|
| 999 |
+
elif pkey[0] != version_key[0] or pkey[1] != version_key[1]:
|
| 1000 |
+
ic += 1
|
| 1001 |
+
iv = ib = it = ia = 0
|
| 1002 |
+
# version
|
| 1003 |
+
elif pkey[2] != version_key[2]:
|
| 1004 |
+
iv += 1
|
| 1005 |
+
ib = it = ia = 0
|
| 1006 |
+
# build number
|
| 1007 |
+
elif pkey[3] != version_key[3]:
|
| 1008 |
+
ib += 1
|
| 1009 |
+
it = ia = 0
|
| 1010 |
+
# arch/noarch
|
| 1011 |
+
elif pkey[4] != version_key[4]:
|
| 1012 |
+
ia += 1
|
| 1013 |
+
it = 0
|
| 1014 |
+
elif not self._solver_ignore_timestamps and pkey[5] != version_key[5]:
|
| 1015 |
+
it += 1
|
| 1016 |
+
|
| 1017 |
+
prec_sat_name = self.to_sat_name(prec)
|
| 1018 |
+
if ic or include0:
|
| 1019 |
+
eqc[prec_sat_name] = ic
|
| 1020 |
+
if iv or include0:
|
| 1021 |
+
eqv[prec_sat_name] = iv
|
| 1022 |
+
if ib or include0:
|
| 1023 |
+
eqb[prec_sat_name] = ib
|
| 1024 |
+
if ia or include0:
|
| 1025 |
+
eqa[prec_sat_name] = ia
|
| 1026 |
+
if it or include0:
|
| 1027 |
+
eqt[prec_sat_name] = it
|
| 1028 |
+
pkey = version_key
|
| 1029 |
+
|
| 1030 |
+
return eqc, eqv, eqb, eqa, eqt
|
| 1031 |
+
|
| 1032 |
+
def dependency_sort(self, must_have):
|
| 1033 |
+
# type: (Dict[package_name, PackageRecord]) -> List[PackageRecord]
|
| 1034 |
+
assert isinstance(must_have, dict)
|
| 1035 |
+
|
| 1036 |
+
digraph = {} # Dict[package_name, Set[dependent_package_names]]
|
| 1037 |
+
for package_name, prec in must_have.items():
|
| 1038 |
+
if prec in self.index:
|
| 1039 |
+
digraph[package_name] = set(ms.name for ms in self.ms_depends(prec))
|
| 1040 |
+
|
| 1041 |
+
# There are currently at least three special cases to be aware of.
|
| 1042 |
+
# 1. The `toposort()` function, called below, contains special case code to remove
|
| 1043 |
+
# any circular dependency between python and pip.
|
| 1044 |
+
# 2. conda/plan.py has special case code for menuinst
|
| 1045 |
+
# Always link/unlink menuinst first/last on windows in case a subsequent
|
| 1046 |
+
# package tries to import it to create/remove a shortcut
|
| 1047 |
+
# 3. On windows, python noarch packages need an implicit dependency on conda added, if
|
| 1048 |
+
# conda is in the list of packages for the environment. Python noarch packages
|
| 1049 |
+
# that have entry points use conda's own conda.exe python entry point binary. If conda
|
| 1050 |
+
# is going to be updated during an operation, the unlink / link order matters.
|
| 1051 |
+
# See issue #6057.
|
| 1052 |
+
|
| 1053 |
+
if on_win and 'conda' in digraph:
|
| 1054 |
+
for package_name, dist in must_have.items():
|
| 1055 |
+
record = self.index.get(prec)
|
| 1056 |
+
if hasattr(record, 'noarch') and record.noarch == NoarchType.python:
|
| 1057 |
+
digraph[package_name].add('conda')
|
| 1058 |
+
|
| 1059 |
+
sorted_keys = toposort(digraph)
|
| 1060 |
+
must_have = must_have.copy()
|
| 1061 |
+
# Take all of the items in the sorted keys
|
| 1062 |
+
# Don't fail if the key does not exist
|
| 1063 |
+
result = [must_have.pop(key) for key in sorted_keys if key in must_have]
|
| 1064 |
+
# Take any key that were not sorted
|
| 1065 |
+
result.extend(must_have.values())
|
| 1066 |
+
return result
|
| 1067 |
+
|
| 1068 |
+
def environment_is_consistent(self, installed):
|
| 1069 |
+
log.debug('Checking if the current environment is consistent')
|
| 1070 |
+
if not installed:
|
| 1071 |
+
return None, []
|
| 1072 |
+
sat_name_map = {} # Dict[sat_name, PackageRecord]
|
| 1073 |
+
specs = []
|
| 1074 |
+
for prec in installed:
|
| 1075 |
+
sat_name_map[self.to_sat_name(prec)] = prec
|
| 1076 |
+
specs.append(MatchSpec('%s %s %s' % (prec.name, prec.version, prec.build)))
|
| 1077 |
+
r2 = Resolve(OrderedDict((prec, prec) for prec in installed), True, channels=self.channels)
|
| 1078 |
+
C = r2.gen_clauses()
|
| 1079 |
+
constraints = r2.generate_spec_constraints(C, specs)
|
| 1080 |
+
solution = C.sat(constraints)
|
| 1081 |
+
return bool(solution)
|
| 1082 |
+
|
| 1083 |
+
def get_conflicting_specs(self, specs, explicit_specs):
|
| 1084 |
+
if not specs:
|
| 1085 |
+
return ()
|
| 1086 |
+
|
| 1087 |
+
all_specs = set(specs) | set(explicit_specs)
|
| 1088 |
+
reduced_index = self.get_reduced_index(all_specs)
|
| 1089 |
+
|
| 1090 |
+
# Check if satisfiable
|
| 1091 |
+
def mysat(specs, add_if=False):
|
| 1092 |
+
constraints = r2.generate_spec_constraints(C, specs)
|
| 1093 |
+
return C.sat(constraints, add_if)
|
| 1094 |
+
|
| 1095 |
+
if reduced_index:
|
| 1096 |
+
r2 = Resolve(reduced_index, True, channels=self.channels)
|
| 1097 |
+
C = r2.gen_clauses()
|
| 1098 |
+
solution = mysat(all_specs, True)
|
| 1099 |
+
else:
|
| 1100 |
+
solution = None
|
| 1101 |
+
|
| 1102 |
+
if solution:
|
| 1103 |
+
final_unsat_specs = ()
|
| 1104 |
+
elif context.unsatisfiable_hints:
|
| 1105 |
+
r2 = Resolve(self.index, True, channels=self.channels)
|
| 1106 |
+
C = r2.gen_clauses()
|
| 1107 |
+
# This first result is just a single unsatisfiable core. There may be several.
|
| 1108 |
+
final_unsat_specs = tuple(minimal_unsatisfiable_subset(specs, sat=mysat,
|
| 1109 |
+
explicit_specs=explicit_specs))
|
| 1110 |
+
else:
|
| 1111 |
+
final_unsat_specs = None
|
| 1112 |
+
return final_unsat_specs
|
| 1113 |
+
|
| 1114 |
+
def bad_installed(self, installed, new_specs):
|
| 1115 |
+
log.debug('Checking if the current environment is consistent')
|
| 1116 |
+
if not installed:
|
| 1117 |
+
return None, []
|
| 1118 |
+
sat_name_map = {} # Dict[sat_name, PackageRecord]
|
| 1119 |
+
specs = []
|
| 1120 |
+
for prec in installed:
|
| 1121 |
+
sat_name_map[self.to_sat_name(prec)] = prec
|
| 1122 |
+
specs.append(MatchSpec('%s %s %s' % (prec.name, prec.version, prec.build)))
|
| 1123 |
+
new_index = {prec: prec for prec in sat_name_map.values()}
|
| 1124 |
+
name_map = {p.name: p for p in new_index}
|
| 1125 |
+
if 'python' in name_map and 'pip' not in name_map:
|
| 1126 |
+
python_prec = new_index[name_map['python']]
|
| 1127 |
+
if 'pip' in python_prec.depends:
|
| 1128 |
+
# strip pip dependency from python if not installed in environment
|
| 1129 |
+
new_deps = [d for d in python_prec.depends if d != 'pip']
|
| 1130 |
+
python_prec.depends = new_deps
|
| 1131 |
+
r2 = Resolve(new_index, True, channels=self.channels)
|
| 1132 |
+
C = r2.gen_clauses()
|
| 1133 |
+
constraints = r2.generate_spec_constraints(C, specs)
|
| 1134 |
+
solution = C.sat(constraints)
|
| 1135 |
+
limit = xtra = None
|
| 1136 |
+
if not solution or xtra:
|
| 1137 |
+
def get_(name, snames):
|
| 1138 |
+
if name not in snames:
|
| 1139 |
+
snames.add(name)
|
| 1140 |
+
for fn in self.groups.get(name, []):
|
| 1141 |
+
for ms in self.ms_depends(fn):
|
| 1142 |
+
get_(ms.name, snames)
|
| 1143 |
+
# New addition: find the largest set of installed packages that
|
| 1144 |
+
# are consistent with each other, and include those in the
|
| 1145 |
+
# list of packages to maintain consistency with
|
| 1146 |
+
snames = set()
|
| 1147 |
+
eq_optional_c = r2.generate_removal_count(C, specs)
|
| 1148 |
+
solution, _ = C.minimize(eq_optional_c, C.sat())
|
| 1149 |
+
snames.update(sat_name_map[sat_name]['name']
|
| 1150 |
+
for sat_name in (C.from_index(s) for s in solution)
|
| 1151 |
+
if sat_name and sat_name[0] != '!' and '@' not in sat_name)
|
| 1152 |
+
# Existing behavior: keep all specs and their dependencies
|
| 1153 |
+
for spec in new_specs:
|
| 1154 |
+
get_(MatchSpec(spec).name, snames)
|
| 1155 |
+
if len(snames) < len(sat_name_map):
|
| 1156 |
+
limit = snames
|
| 1157 |
+
xtra = [rec for sat_name, rec in sat_name_map.items()
|
| 1158 |
+
if rec['name'] not in snames]
|
| 1159 |
+
log.debug('Limiting solver to the following packages: %s', ', '.join(limit))
|
| 1160 |
+
if xtra:
|
| 1161 |
+
log.debug('Packages to be preserved: %s', xtra)
|
| 1162 |
+
return limit, xtra
|
| 1163 |
+
|
| 1164 |
+
def restore_bad(self, pkgs, preserve):
|
| 1165 |
+
if preserve:
|
| 1166 |
+
sdict = {prec.name: prec for prec in pkgs}
|
| 1167 |
+
pkgs.extend(p for p in preserve if p.name not in sdict)
|
| 1168 |
+
|
| 1169 |
+
def install_specs(self, specs, installed, update_deps=True):
|
| 1170 |
+
specs = list(map(MatchSpec, specs))
|
| 1171 |
+
snames = {s.name for s in specs}
|
| 1172 |
+
log.debug('Checking satisfiability of current install')
|
| 1173 |
+
limit, preserve = self.bad_installed(installed, specs)
|
| 1174 |
+
for prec in installed:
|
| 1175 |
+
if prec not in self.index:
|
| 1176 |
+
continue
|
| 1177 |
+
name, version, build = prec.name, prec.version, prec.build
|
| 1178 |
+
schannel = prec.channel.canonical_name
|
| 1179 |
+
if name in snames or limit is not None and name not in limit:
|
| 1180 |
+
continue
|
| 1181 |
+
# If update_deps=True, set the target package in MatchSpec so that
|
| 1182 |
+
# the solver can minimize the version change. If update_deps=False,
|
| 1183 |
+
# fix the version and build so that no change is possible.
|
| 1184 |
+
if update_deps:
|
| 1185 |
+
# TODO: fix target here
|
| 1186 |
+
spec = MatchSpec(name=name, target=prec.dist_str())
|
| 1187 |
+
else:
|
| 1188 |
+
spec = MatchSpec(name=name, version=version,
|
| 1189 |
+
build=build, channel=schannel)
|
| 1190 |
+
specs.insert(0, spec)
|
| 1191 |
+
return tuple(specs), preserve
|
| 1192 |
+
|
| 1193 |
+
def install(self, specs, installed=None, update_deps=True, returnall=False):
|
| 1194 |
+
specs, preserve = self.install_specs(specs, installed or [], update_deps)
|
| 1195 |
+
pkgs = []
|
| 1196 |
+
if specs:
|
| 1197 |
+
pkgs = self.solve(specs, returnall=returnall, _remove=False)
|
| 1198 |
+
self.restore_bad(pkgs, preserve)
|
| 1199 |
+
return pkgs
|
| 1200 |
+
|
| 1201 |
+
def remove_specs(self, specs, installed):
|
| 1202 |
+
nspecs = []
|
| 1203 |
+
# There's an imperfect thing happening here. "specs" nominally contains
|
| 1204 |
+
# a list of package names or track_feature values to be removed. But
|
| 1205 |
+
# because of add_defaults_to_specs it may also contain version constraints
|
| 1206 |
+
# like "python 2.7*", which are *not* asking for python to be removed.
|
| 1207 |
+
# We need to separate these two kinds of specs here.
|
| 1208 |
+
for s in map(MatchSpec, specs):
|
| 1209 |
+
# Since '@' is an illegal version number, this ensures that all of
|
| 1210 |
+
# these matches will never match an actual package. Combined with
|
| 1211 |
+
# optional=True, this has the effect of forcing their removal.
|
| 1212 |
+
if s._is_single():
|
| 1213 |
+
nspecs.append(MatchSpec(s, version='@', optional=True))
|
| 1214 |
+
else:
|
| 1215 |
+
nspecs.append(MatchSpec(s, optional=True))
|
| 1216 |
+
snames = set(s.name for s in nspecs if s.name)
|
| 1217 |
+
limit, _ = self.bad_installed(installed, nspecs)
|
| 1218 |
+
preserve = []
|
| 1219 |
+
for prec in installed:
|
| 1220 |
+
nm, ver = prec.name, prec.version
|
| 1221 |
+
if nm in snames:
|
| 1222 |
+
continue
|
| 1223 |
+
elif limit is not None:
|
| 1224 |
+
preserve.append(prec)
|
| 1225 |
+
else:
|
| 1226 |
+
# TODO: fix target here
|
| 1227 |
+
nspecs.append(MatchSpec(name=nm,
|
| 1228 |
+
version='>='+ver if ver else None,
|
| 1229 |
+
optional=True,
|
| 1230 |
+
target=prec.dist_str()))
|
| 1231 |
+
return nspecs, preserve
|
| 1232 |
+
|
| 1233 |
+
def remove(self, specs, installed):
|
| 1234 |
+
specs, preserve = self.remove_specs(specs, installed)
|
| 1235 |
+
pkgs = self.solve(specs, _remove=True)
|
| 1236 |
+
self.restore_bad(pkgs, preserve)
|
| 1237 |
+
return pkgs
|
| 1238 |
+
|
| 1239 |
+
@time_recorder(module_name=__name__)
|
| 1240 |
+
def solve(self, specs, returnall=False, _remove=False, specs_to_add=None, history_specs=None,
|
| 1241 |
+
should_retry_solve=False):
|
| 1242 |
+
# type: (List[str], bool) -> List[PackageRecord]
|
| 1243 |
+
|
| 1244 |
+
if specs and not isinstance(specs[0], MatchSpec):
|
| 1245 |
+
specs = tuple(MatchSpec(_) for _ in specs)
|
| 1246 |
+
|
| 1247 |
+
specs = set(specs)
|
| 1248 |
+
if log.isEnabledFor(DEBUG):
|
| 1249 |
+
dlist = dashlist(str(
|
| 1250 |
+
'%i: %s target=%s optional=%s' % (i, s, s.target, s.optional))
|
| 1251 |
+
for i, s in enumerate(specs))
|
| 1252 |
+
log.debug('Solving for: %s', dlist)
|
| 1253 |
+
|
| 1254 |
+
if not specs:
|
| 1255 |
+
return tuple()
|
| 1256 |
+
|
| 1257 |
+
# Find the compliant packages
|
| 1258 |
+
log.debug("Solve: Getting reduced index of compliant packages")
|
| 1259 |
+
len0 = len(specs)
|
| 1260 |
+
|
| 1261 |
+
reduced_index = self.get_reduced_index(
|
| 1262 |
+
specs, exit_on_conflict=not context.unsatisfiable_hints)
|
| 1263 |
+
if not reduced_index:
|
| 1264 |
+
# something is intrinsically unsatisfiable - either not found or
|
| 1265 |
+
# not the right version
|
| 1266 |
+
not_found_packages = set()
|
| 1267 |
+
wrong_version_packages = set()
|
| 1268 |
+
for s in specs:
|
| 1269 |
+
if not self.find_matches(s):
|
| 1270 |
+
if s.name in self.groups:
|
| 1271 |
+
wrong_version_packages.add(s)
|
| 1272 |
+
else:
|
| 1273 |
+
not_found_packages.add(s)
|
| 1274 |
+
if not_found_packages:
|
| 1275 |
+
raise ResolvePackageNotFound(not_found_packages)
|
| 1276 |
+
elif wrong_version_packages:
|
| 1277 |
+
raise UnsatisfiableError([[d] for d in wrong_version_packages], chains=False)
|
| 1278 |
+
if should_retry_solve:
|
| 1279 |
+
# We don't want to call find_conflicts until our last try.
|
| 1280 |
+
# This jumps back out to conda/cli/install.py, where the
|
| 1281 |
+
# retries happen
|
| 1282 |
+
raise UnsatisfiableError({})
|
| 1283 |
+
else:
|
| 1284 |
+
self.find_conflicts(specs, specs_to_add, history_specs)
|
| 1285 |
+
|
| 1286 |
+
# Check if satisfiable
|
| 1287 |
+
log.debug("Solve: determining satisfiability")
|
| 1288 |
+
|
| 1289 |
+
def mysat(specs, add_if=False):
|
| 1290 |
+
constraints = r2.generate_spec_constraints(C, specs)
|
| 1291 |
+
return C.sat(constraints, add_if)
|
| 1292 |
+
|
| 1293 |
+
# Return a solution of packages
|
| 1294 |
+
def clean(sol):
|
| 1295 |
+
return [q for q in (C.from_index(s) for s in sol)
|
| 1296 |
+
if q and q[0] != '!' and '@' not in q]
|
| 1297 |
+
|
| 1298 |
+
def is_converged(solution):
|
| 1299 |
+
""" Determine if the SAT problem has converged to a single solution.
|
| 1300 |
+
|
| 1301 |
+
This is determined by testing for a SAT solution with the current
|
| 1302 |
+
clause set and a clause in which at least one of the packages in
|
| 1303 |
+
the current solution is excluded. If a solution exists the problem
|
| 1304 |
+
has not converged as multiple solutions still exist.
|
| 1305 |
+
"""
|
| 1306 |
+
psolution = clean(solution)
|
| 1307 |
+
nclause = tuple(C.Not(C.from_name(q)) for q in psolution)
|
| 1308 |
+
if C.sat((nclause,), includeIf=False) is None:
|
| 1309 |
+
return True
|
| 1310 |
+
return False
|
| 1311 |
+
|
| 1312 |
+
r2 = Resolve(reduced_index, True, channels=self.channels)
|
| 1313 |
+
C = r2.gen_clauses()
|
| 1314 |
+
solution = mysat(specs, True)
|
| 1315 |
+
if not solution:
|
| 1316 |
+
if should_retry_solve:
|
| 1317 |
+
# we don't want to call find_conflicts until our last try
|
| 1318 |
+
raise UnsatisfiableError({})
|
| 1319 |
+
else:
|
| 1320 |
+
self.find_conflicts(specs, specs_to_add, history_specs)
|
| 1321 |
+
|
| 1322 |
+
speco = [] # optional packages
|
| 1323 |
+
specr = [] # requested packages
|
| 1324 |
+
speca = [] # all other packages
|
| 1325 |
+
specm = set(r2.groups) # missing from specs
|
| 1326 |
+
for k, s in enumerate(specs):
|
| 1327 |
+
if s.name in specm:
|
| 1328 |
+
specm.remove(s.name)
|
| 1329 |
+
if not s.optional:
|
| 1330 |
+
(speca if s.target or k >= len0 else specr).append(s)
|
| 1331 |
+
elif any(r2.find_matches(s)):
|
| 1332 |
+
s = MatchSpec(s.name, optional=True, target=s.target)
|
| 1333 |
+
speco.append(s)
|
| 1334 |
+
speca.append(s)
|
| 1335 |
+
speca.extend(MatchSpec(s) for s in specm)
|
| 1336 |
+
|
| 1337 |
+
if log.isEnabledFor(DEBUG):
|
| 1338 |
+
log.debug('Requested specs: %s', dashlist(sorted(str(s) for s in specr)))
|
| 1339 |
+
log.debug('Optional specs: %s', dashlist(sorted(str(s) for s in speco)))
|
| 1340 |
+
log.debug('All other specs: %s', dashlist(sorted(str(s) for s in speca)))
|
| 1341 |
+
log.debug('missing specs: %s', dashlist(sorted(str(s) for s in specm)))
|
| 1342 |
+
|
| 1343 |
+
# Removed packages: minimize count
|
| 1344 |
+
log.debug("Solve: minimize removed packages")
|
| 1345 |
+
if _remove:
|
| 1346 |
+
eq_optional_c = r2.generate_removal_count(C, speco)
|
| 1347 |
+
solution, obj7 = C.minimize(eq_optional_c, solution)
|
| 1348 |
+
log.debug('Package removal metric: %d', obj7)
|
| 1349 |
+
|
| 1350 |
+
# Requested packages: maximize versions
|
| 1351 |
+
log.debug("Solve: maximize versions of requested packages")
|
| 1352 |
+
eq_req_c, eq_req_v, eq_req_b, eq_req_a, eq_req_t = r2.generate_version_metrics(C, specr)
|
| 1353 |
+
solution, obj3a = C.minimize(eq_req_c, solution)
|
| 1354 |
+
solution, obj3 = C.minimize(eq_req_v, solution)
|
| 1355 |
+
log.debug('Initial package channel/version metric: %d/%d', obj3a, obj3)
|
| 1356 |
+
|
| 1357 |
+
# Track features: minimize feature count
|
| 1358 |
+
log.debug("Solve: minimize track_feature count")
|
| 1359 |
+
eq_feature_count = r2.generate_feature_count(C)
|
| 1360 |
+
solution, obj1 = C.minimize(eq_feature_count, solution)
|
| 1361 |
+
log.debug('Track feature count: %d', obj1)
|
| 1362 |
+
|
| 1363 |
+
# Featured packages: minimize number of featureless packages
|
| 1364 |
+
# installed when a featured alternative is feasible.
|
| 1365 |
+
# For example, package name foo exists with two built packages. One with
|
| 1366 |
+
# 'track_features: 'feat1', and one with 'track_features': 'feat2'.
|
| 1367 |
+
# The previous "Track features" minimization pass has chosen 'feat1' for the
|
| 1368 |
+
# environment, but not 'feat2'. In this case, the 'feat2' version of foo is
|
| 1369 |
+
# considered "featureless."
|
| 1370 |
+
eq_feature_metric = r2.generate_feature_metric(C)
|
| 1371 |
+
solution, obj2 = C.minimize(eq_feature_metric, solution)
|
| 1372 |
+
log.debug('Package misfeature count: %d', obj2)
|
| 1373 |
+
|
| 1374 |
+
# Requested packages: maximize builds
|
| 1375 |
+
log.debug("Solve: maximize build numbers of requested packages")
|
| 1376 |
+
solution, obj4 = C.minimize(eq_req_b, solution)
|
| 1377 |
+
log.debug('Initial package build metric: %d', obj4)
|
| 1378 |
+
|
| 1379 |
+
# prefer arch packages where available for requested specs
|
| 1380 |
+
log.debug("Solve: prefer arch over noarch for requested packages")
|
| 1381 |
+
solution, noarch_obj = C.minimize(eq_req_a, solution)
|
| 1382 |
+
log.debug('Noarch metric: %d', noarch_obj)
|
| 1383 |
+
|
| 1384 |
+
# Optional installations: minimize count
|
| 1385 |
+
if not _remove:
|
| 1386 |
+
log.debug("Solve: minimize number of optional installations")
|
| 1387 |
+
eq_optional_install = r2.generate_install_count(C, speco)
|
| 1388 |
+
solution, obj49 = C.minimize(eq_optional_install, solution)
|
| 1389 |
+
log.debug('Optional package install metric: %d', obj49)
|
| 1390 |
+
|
| 1391 |
+
# Dependencies: minimize the number of packages that need upgrading
|
| 1392 |
+
log.debug("Solve: minimize number of necessary upgrades")
|
| 1393 |
+
eq_u = r2.generate_update_count(C, speca)
|
| 1394 |
+
solution, obj50 = C.minimize(eq_u, solution)
|
| 1395 |
+
log.debug('Dependency update count: %d', obj50)
|
| 1396 |
+
|
| 1397 |
+
# Remaining packages: maximize versions, then builds
|
| 1398 |
+
log.debug("Solve: maximize versions and builds of indirect dependencies. "
|
| 1399 |
+
"Prefer arch over noarch where equivalent.")
|
| 1400 |
+
eq_c, eq_v, eq_b, eq_a, eq_t = r2.generate_version_metrics(C, speca)
|
| 1401 |
+
solution, obj5a = C.minimize(eq_c, solution)
|
| 1402 |
+
solution, obj5 = C.minimize(eq_v, solution)
|
| 1403 |
+
solution, obj6 = C.minimize(eq_b, solution)
|
| 1404 |
+
solution, obj6a = C.minimize(eq_a, solution)
|
| 1405 |
+
log.debug('Additional package channel/version/build/noarch metrics: %d/%d/%d/%d',
|
| 1406 |
+
obj5a, obj5, obj6, obj6a)
|
| 1407 |
+
|
| 1408 |
+
# Prune unnecessary packages
|
| 1409 |
+
log.debug("Solve: prune unnecessary packages")
|
| 1410 |
+
eq_c = r2.generate_package_count(C, specm)
|
| 1411 |
+
solution, obj7 = C.minimize(eq_c, solution, trymax=True)
|
| 1412 |
+
log.debug('Weak dependency count: %d', obj7)
|
| 1413 |
+
|
| 1414 |
+
if not is_converged(solution):
|
| 1415 |
+
# Maximize timestamps
|
| 1416 |
+
eq_t.update(eq_req_t)
|
| 1417 |
+
solution, obj6t = C.minimize(eq_t, solution)
|
| 1418 |
+
log.debug('Timestamp metric: %d', obj6t)
|
| 1419 |
+
|
| 1420 |
+
log.debug('Looking for alternate solutions')
|
| 1421 |
+
nsol = 1
|
| 1422 |
+
psolutions = []
|
| 1423 |
+
psolution = clean(solution)
|
| 1424 |
+
psolutions.append(psolution)
|
| 1425 |
+
while True:
|
| 1426 |
+
nclause = tuple(C.Not(C.from_name(q)) for q in psolution)
|
| 1427 |
+
solution = C.sat((nclause,), True)
|
| 1428 |
+
if solution is None:
|
| 1429 |
+
break
|
| 1430 |
+
nsol += 1
|
| 1431 |
+
if nsol > 10:
|
| 1432 |
+
log.debug('Too many solutions; terminating')
|
| 1433 |
+
break
|
| 1434 |
+
psolution = clean(solution)
|
| 1435 |
+
psolutions.append(psolution)
|
| 1436 |
+
|
| 1437 |
+
if nsol > 1:
|
| 1438 |
+
psols2 = list(map(set, psolutions))
|
| 1439 |
+
common = set.intersection(*psols2)
|
| 1440 |
+
diffs = [sorted(set(sol) - common) for sol in psols2]
|
| 1441 |
+
if not context.json:
|
| 1442 |
+
stdoutlog.info(
|
| 1443 |
+
'\nWarning: %s possible package resolutions '
|
| 1444 |
+
'(only showing differing packages):%s%s' %
|
| 1445 |
+
('>10' if nsol > 10 else nsol,
|
| 1446 |
+
dashlist(', '.join(diff) for diff in diffs),
|
| 1447 |
+
'\n ... and others' if nsol > 10 else ''))
|
| 1448 |
+
|
| 1449 |
+
# def stripfeat(sol):
|
| 1450 |
+
# return sol.split('[')[0]
|
| 1451 |
+
|
| 1452 |
+
new_index = {self.to_sat_name(prec): prec for prec in self.index.values()}
|
| 1453 |
+
|
| 1454 |
+
if returnall:
|
| 1455 |
+
if len(psolutions) > 1:
|
| 1456 |
+
raise RuntimeError()
|
| 1457 |
+
# TODO: clean up this mess
|
| 1458 |
+
# return [sorted(Dist(stripfeat(dname)) for dname in psol) for psol in psolutions]
|
| 1459 |
+
# return [sorted((new_index[sat_name] for sat_name in psol), key=lambda x: x.name)
|
| 1460 |
+
# for psol in psolutions]
|
| 1461 |
+
|
| 1462 |
+
# return sorted(Dist(stripfeat(dname)) for dname in psolutions[0])
|
| 1463 |
+
return sorted((new_index[sat_name] for sat_name in psolutions[0]), key=lambda x: x.name)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda/utils.py
ADDED
|
@@ -0,0 +1,531 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Copyright (C) 2012 Anaconda, Inc
|
| 3 |
+
# SPDX-License-Identifier: BSD-3-Clause
|
| 4 |
+
from __future__ import absolute_import, division, print_function, unicode_literals
|
| 5 |
+
|
| 6 |
+
from contextlib import contextmanager
|
| 7 |
+
from functools import lru_cache, wraps
|
| 8 |
+
import logging
|
| 9 |
+
from os.path import abspath, join, isfile, basename, dirname
|
| 10 |
+
from os import environ
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
import re
|
| 13 |
+
import sys
|
| 14 |
+
|
| 15 |
+
from . import CondaError
|
| 16 |
+
from .auxlib.compat import shlex_split_unicode, Utf8NamedTemporaryFile
|
| 17 |
+
from .common.compat import on_win, isiterable
|
| 18 |
+
from .common.path import win_path_to_unix, which
|
| 19 |
+
from .common.url import path_to_url
|
| 20 |
+
|
| 21 |
+
log = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
def path_identity(path):
|
| 24 |
+
"""Used as a dummy path converter where no conversion necessary"""
|
| 25 |
+
return path
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def unix_path_to_win(path, root_prefix=""):
|
| 29 |
+
"""Convert a path or :-separated string of paths into a Windows representation
|
| 30 |
+
|
| 31 |
+
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
|
| 32 |
+
"""
|
| 33 |
+
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
|
| 34 |
+
# already a windows path
|
| 35 |
+
return path.replace("/", "\\")
|
| 36 |
+
path_re = root_prefix + r'(/[a-zA-Z]/(?:(?![:\s]/)[^:*?"<>])*)'
|
| 37 |
+
|
| 38 |
+
def _translation(found_path):
|
| 39 |
+
group = found_path.group(0)
|
| 40 |
+
return "{0}:{1}".format(group[len(root_prefix)+1],
|
| 41 |
+
group[len(root_prefix)+2:].replace("/", "\\"))
|
| 42 |
+
translation = re.sub(path_re, _translation, path)
|
| 43 |
+
translation = re.sub(":([a-zA-Z]):\\\\",
|
| 44 |
+
lambda match: ";" + match.group(0)[1] + ":\\",
|
| 45 |
+
translation)
|
| 46 |
+
return translation
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# curry cygwin functions
|
| 50 |
+
def win_path_to_cygwin(path):
|
| 51 |
+
return win_path_to_unix(path, "/cygdrive")
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def cygwin_path_to_win(path):
|
| 55 |
+
return unix_path_to_win(path, "/cygdrive")
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def translate_stream(stream, translator):
|
| 59 |
+
return "\n".join(translator(line) for line in stream.split("\n"))
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def human_bytes(n):
|
| 63 |
+
"""
|
| 64 |
+
Return the number of bytes n in more human readable form.
|
| 65 |
+
|
| 66 |
+
Examples:
|
| 67 |
+
>>> human_bytes(42)
|
| 68 |
+
'42 B'
|
| 69 |
+
>>> human_bytes(1042)
|
| 70 |
+
'1 KB'
|
| 71 |
+
>>> human_bytes(10004242)
|
| 72 |
+
'9.5 MB'
|
| 73 |
+
>>> human_bytes(100000004242)
|
| 74 |
+
'93.13 GB'
|
| 75 |
+
"""
|
| 76 |
+
if n < 1024:
|
| 77 |
+
return '%d B' % n
|
| 78 |
+
k = n/1024
|
| 79 |
+
if k < 1024:
|
| 80 |
+
return '%d KB' % round(k)
|
| 81 |
+
m = k/1024
|
| 82 |
+
if m < 1024:
|
| 83 |
+
return '%.1f MB' % m
|
| 84 |
+
g = m/1024
|
| 85 |
+
return '%.2f GB' % g
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
# TODO: this should be done in a more extensible way
|
| 89 |
+
# (like files for each shell, with some registration mechanism.)
|
| 90 |
+
|
| 91 |
+
# defaults for unix shells. Note: missing "exe" entry, which should be set to
|
| 92 |
+
# either an executable on PATH, or a full path to an executable for a shell
|
| 93 |
+
unix_shell_base = dict(
|
| 94 |
+
binpath="/bin/", # mind the trailing slash.
|
| 95 |
+
echo="echo",
|
| 96 |
+
env_script_suffix=".sh",
|
| 97 |
+
nul='2>/dev/null',
|
| 98 |
+
path_from=path_identity,
|
| 99 |
+
path_to=path_identity,
|
| 100 |
+
pathsep=":",
|
| 101 |
+
printdefaultenv='echo $CONDA_DEFAULT_ENV',
|
| 102 |
+
printpath="echo $PATH",
|
| 103 |
+
printps1='echo $CONDA_PROMPT_MODIFIER',
|
| 104 |
+
promptvar='PS1',
|
| 105 |
+
sep="/",
|
| 106 |
+
set_var='export ',
|
| 107 |
+
shell_args=["-l", "-c"],
|
| 108 |
+
shell_suffix="",
|
| 109 |
+
slash_convert=("\\", "/"),
|
| 110 |
+
source_setup="source",
|
| 111 |
+
test_echo_extra="",
|
| 112 |
+
var_format="${}",
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
msys2_shell_base = dict(
|
| 116 |
+
unix_shell_base,
|
| 117 |
+
path_from=unix_path_to_win,
|
| 118 |
+
path_to=win_path_to_unix,
|
| 119 |
+
binpath="/bin/", # mind the trailing slash.
|
| 120 |
+
printpath="python -c \"import os; print(';'.join(os.environ['PATH'].split(';')[1:]))\" | cygpath --path -f -", # NOQA
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
if on_win:
|
| 124 |
+
shells = {
|
| 125 |
+
# "powershell.exe": dict(
|
| 126 |
+
# echo="echo",
|
| 127 |
+
# test_echo_extra=" .",
|
| 128 |
+
# var_format="${var}",
|
| 129 |
+
# binpath="/bin/", # mind the trailing slash.
|
| 130 |
+
# source_setup="source",
|
| 131 |
+
# nul='2>/dev/null',
|
| 132 |
+
# set_var='export ',
|
| 133 |
+
# shell_suffix=".ps",
|
| 134 |
+
# env_script_suffix=".ps",
|
| 135 |
+
# printps1='echo $PS1',
|
| 136 |
+
# printdefaultenv='echo $CONDA_DEFAULT_ENV',
|
| 137 |
+
# printpath="echo %PATH%",
|
| 138 |
+
# exe="powershell.exe",
|
| 139 |
+
# path_from=path_identity,
|
| 140 |
+
# path_to=path_identity,
|
| 141 |
+
# slash_convert = ("/", "\\"),
|
| 142 |
+
# ),
|
| 143 |
+
"cmd.exe": dict(
|
| 144 |
+
echo="@echo",
|
| 145 |
+
var_format="%{}%",
|
| 146 |
+
binpath="\\Scripts\\", # mind the trailing slash.
|
| 147 |
+
source_setup="call",
|
| 148 |
+
test_echo_extra="",
|
| 149 |
+
nul='1>NUL 2>&1',
|
| 150 |
+
set_var='set ',
|
| 151 |
+
shell_suffix=".bat",
|
| 152 |
+
env_script_suffix=".bat",
|
| 153 |
+
printps1="@echo %PROMPT%",
|
| 154 |
+
promptvar="PROMPT",
|
| 155 |
+
# parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA
|
| 156 |
+
printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n'
|
| 157 |
+
'echo %CONDA_DEFAULT_ENV% ) ELSE (\n'
|
| 158 |
+
'echo()',
|
| 159 |
+
printpath="@echo %PATH%",
|
| 160 |
+
exe="cmd.exe",
|
| 161 |
+
shell_args=["/d", "/c"],
|
| 162 |
+
path_from=path_identity,
|
| 163 |
+
path_to=path_identity,
|
| 164 |
+
slash_convert=("/", "\\"),
|
| 165 |
+
sep="\\",
|
| 166 |
+
pathsep=";",
|
| 167 |
+
),
|
| 168 |
+
"cygwin": dict(
|
| 169 |
+
unix_shell_base,
|
| 170 |
+
exe="bash.exe",
|
| 171 |
+
binpath="/Scripts/", # mind the trailing slash.
|
| 172 |
+
path_from=cygwin_path_to_win,
|
| 173 |
+
path_to=win_path_to_cygwin
|
| 174 |
+
),
|
| 175 |
+
# bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin
|
| 176 |
+
# entry instead. The only major difference is that it handle's cygwin's /cygdrive
|
| 177 |
+
# filesystem root.
|
| 178 |
+
"bash.exe": dict(
|
| 179 |
+
msys2_shell_base, exe="bash.exe",
|
| 180 |
+
),
|
| 181 |
+
"bash": dict(
|
| 182 |
+
msys2_shell_base, exe="bash",
|
| 183 |
+
),
|
| 184 |
+
"sh.exe": dict(
|
| 185 |
+
msys2_shell_base, exe="sh.exe",
|
| 186 |
+
),
|
| 187 |
+
"zsh.exe": dict(
|
| 188 |
+
msys2_shell_base, exe="zsh.exe",
|
| 189 |
+
),
|
| 190 |
+
"zsh": dict(
|
| 191 |
+
msys2_shell_base, exe="zsh",
|
| 192 |
+
),
|
| 193 |
+
}
|
| 194 |
+
|
| 195 |
+
else:
|
| 196 |
+
shells = {
|
| 197 |
+
"bash": dict(
|
| 198 |
+
unix_shell_base, exe="bash",
|
| 199 |
+
),
|
| 200 |
+
"dash": dict(
|
| 201 |
+
unix_shell_base, exe="dash",
|
| 202 |
+
source_setup=".",
|
| 203 |
+
),
|
| 204 |
+
"zsh": dict(
|
| 205 |
+
unix_shell_base, exe="zsh",
|
| 206 |
+
),
|
| 207 |
+
"fish": dict(
|
| 208 |
+
unix_shell_base, exe="fish",
|
| 209 |
+
pathsep=" ",
|
| 210 |
+
),
|
| 211 |
+
}
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
# ##########################################
|
| 215 |
+
# put back because of conda build
|
| 216 |
+
# ##########################################
|
| 217 |
+
|
| 218 |
+
urlpath = url_path = path_to_url
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def md5_file(path): # pragma: no cover
|
| 222 |
+
from .gateways.disk.read import compute_md5sum
|
| 223 |
+
return compute_md5sum(path)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
def hashsum_file(path, mode='md5'): # pragma: no cover
|
| 227 |
+
import hashlib
|
| 228 |
+
h = hashlib.new(mode)
|
| 229 |
+
with open(path, 'rb') as fi:
|
| 230 |
+
while True:
|
| 231 |
+
chunk = fi.read(262144) # process chunks of 256KB
|
| 232 |
+
if not chunk:
|
| 233 |
+
break
|
| 234 |
+
h.update(chunk)
|
| 235 |
+
return h.hexdigest()
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
@lru_cache(maxsize=None)
|
| 239 |
+
def sys_prefix_unfollowed():
|
| 240 |
+
"""Since conda is installed into non-root environments as a symlink only
|
| 241 |
+
and because sys.prefix follows symlinks, this function can be used to
|
| 242 |
+
get the 'unfollowed' sys.prefix.
|
| 243 |
+
|
| 244 |
+
This value is usually the same as the prefix of the environment into
|
| 245 |
+
which conda has been symlinked. An example of when this is necessary
|
| 246 |
+
is when conda looks for external sub-commands in find_commands.py
|
| 247 |
+
"""
|
| 248 |
+
try:
|
| 249 |
+
frame = next(iter(sys._current_frames().values()))
|
| 250 |
+
while frame.f_back:
|
| 251 |
+
frame = frame.f_back
|
| 252 |
+
code = frame.f_code
|
| 253 |
+
filename = code.co_filename
|
| 254 |
+
unfollowed = dirname(dirname(filename))
|
| 255 |
+
except Exception:
|
| 256 |
+
return sys.prefix
|
| 257 |
+
return unfollowed
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def quote_for_shell(*arguments):
|
| 261 |
+
"""Properly quote arguments for command line passing.
|
| 262 |
+
|
| 263 |
+
For POSIX uses `shlex.join`, for Windows uses a custom implementation to properly escape
|
| 264 |
+
metacharacters.
|
| 265 |
+
|
| 266 |
+
:param arguments: Arguments to quote.
|
| 267 |
+
:type arguments: list of str
|
| 268 |
+
:return: Quoted arguments.
|
| 269 |
+
:rtype: str
|
| 270 |
+
"""
|
| 271 |
+
# [backport] Support passing in a list of strings or args of string.
|
| 272 |
+
if len(arguments) == 1 and isiterable(arguments[0]):
|
| 273 |
+
arguments = arguments[0]
|
| 274 |
+
|
| 275 |
+
return _args_join(arguments)
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
if on_win:
|
| 279 |
+
# https://ss64.com/nt/syntax-esc.html
|
| 280 |
+
# https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way
|
| 281 |
+
|
| 282 |
+
_RE_UNSAFE = re.compile(r'["%\s^<>&|]')
|
| 283 |
+
_RE_DBL = re.compile(r'(["%])')
|
| 284 |
+
|
| 285 |
+
def _args_join(args):
|
| 286 |
+
"""Return a shell-escaped string from *args*."""
|
| 287 |
+
|
| 288 |
+
def quote(s):
|
| 289 |
+
# derived from shlex.quote
|
| 290 |
+
if not s:
|
| 291 |
+
return '""'
|
| 292 |
+
# if any unsafe chars are present we must quote
|
| 293 |
+
if not _RE_UNSAFE.search(s):
|
| 294 |
+
return s
|
| 295 |
+
# double escape (" -> "")
|
| 296 |
+
s = _RE_DBL.sub(r"\1\1", s)
|
| 297 |
+
# quote entire string
|
| 298 |
+
return f'"{s}"'
|
| 299 |
+
|
| 300 |
+
return " ".join(quote(arg) for arg in args)
|
| 301 |
+
else:
|
| 302 |
+
try:
|
| 303 |
+
from shlex import join as _args_join
|
| 304 |
+
except ImportError:
|
| 305 |
+
# [backport] Python <3.8
|
| 306 |
+
def _args_join(args):
|
| 307 |
+
"""Return a shell-escaped string from *args*."""
|
| 308 |
+
from shlex import quote
|
| 309 |
+
|
| 310 |
+
return " ".join(quote(arg) for arg in args)
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
# Ensures arguments are a tuple or a list. Strings are converted
|
| 314 |
+
# by shlex_split_unicode() which is bad; we warn about it or else
|
| 315 |
+
# we assert (and fix the code).
|
| 316 |
+
def massage_arguments(arguments, errors='assert'):
|
| 317 |
+
|
| 318 |
+
# For reference and in-case anything breaks ..
|
| 319 |
+
# .. one of the places (run_command in conda_env/utils.py) this
|
| 320 |
+
# gets called from used to do this too:
|
| 321 |
+
#
|
| 322 |
+
# def escape_for_winpath(p):
|
| 323 |
+
# return p.replace('\\', '\\\\')
|
| 324 |
+
#
|
| 325 |
+
# if not isinstance(arguments, list):
|
| 326 |
+
# arguments = list(map(escape_for_winpath, arguments))
|
| 327 |
+
|
| 328 |
+
if isinstance(arguments, str):
|
| 329 |
+
if errors == 'assert':
|
| 330 |
+
# This should be something like 'conda programming bug', it is an assert
|
| 331 |
+
assert False, 'Please ensure arguments are not strings'
|
| 332 |
+
else:
|
| 333 |
+
arguments = shlex_split_unicode(arguments)
|
| 334 |
+
log.warning("Please ensure arguments is not a string; "
|
| 335 |
+
"used `shlex_split_unicode()` on it")
|
| 336 |
+
|
| 337 |
+
if not isiterable(arguments):
|
| 338 |
+
arguments = (arguments,)
|
| 339 |
+
|
| 340 |
+
assert not any([isiterable(arg) for arg in arguments]), "Individual arguments must not be iterable" # NOQA
|
| 341 |
+
arguments = list(arguments)
|
| 342 |
+
|
| 343 |
+
return arguments
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
def wrap_subprocess_call(
|
| 347 |
+
root_prefix,
|
| 348 |
+
prefix,
|
| 349 |
+
dev_mode,
|
| 350 |
+
debug_wrapper_scripts,
|
| 351 |
+
arguments,
|
| 352 |
+
use_system_tmp_path=False):
|
| 353 |
+
arguments = massage_arguments(arguments)
|
| 354 |
+
if not use_system_tmp_path:
|
| 355 |
+
tmp_prefix = abspath(join(prefix, '.tmp'))
|
| 356 |
+
else:
|
| 357 |
+
tmp_prefix = None
|
| 358 |
+
script_caller = None
|
| 359 |
+
multiline = False
|
| 360 |
+
if len(arguments) == 1 and '\n' in arguments[0]:
|
| 361 |
+
multiline = True
|
| 362 |
+
if on_win:
|
| 363 |
+
comspec = get_comspec() # fail early with KeyError if undefined
|
| 364 |
+
if dev_mode:
|
| 365 |
+
from conda import CONDA_PACKAGE_ROOT
|
| 366 |
+
conda_bat = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda.bat')
|
| 367 |
+
else:
|
| 368 |
+
conda_bat = environ.get("CONDA_BAT",
|
| 369 |
+
abspath(join(root_prefix, 'condabin', 'conda.bat')))
|
| 370 |
+
with Utf8NamedTemporaryFile(mode='w', prefix=tmp_prefix,
|
| 371 |
+
suffix='.bat', delete=False) as fh:
|
| 372 |
+
silencer = "" if debug_wrapper_scripts else "@"
|
| 373 |
+
fh.write("{}ECHO OFF\n".format(silencer))
|
| 374 |
+
fh.write("{}SET PYTHONIOENCODING=utf-8\n".format(silencer))
|
| 375 |
+
fh.write("{}SET PYTHONUTF8=1\n".format(silencer))
|
| 376 |
+
fh.write('{}FOR /F "tokens=2 delims=:." %%A in (\'chcp\') do for %%B in (%%A) do set "_CONDA_OLD_CHCP=%%B"\n'.format(silencer)) # NOQA
|
| 377 |
+
fh.write("{}chcp 65001 > NUL\n".format(silencer))
|
| 378 |
+
if dev_mode:
|
| 379 |
+
from . import CONDA_SOURCE_ROOT
|
| 380 |
+
fh.write("{}SET CONDA_DEV=1\n".format(silencer))
|
| 381 |
+
# In dev mode, conda is really:
|
| 382 |
+
# 'python -m conda'
|
| 383 |
+
# *with* PYTHONPATH set.
|
| 384 |
+
fh.write("{}SET PYTHONPATH={}\n".format(silencer, CONDA_SOURCE_ROOT))
|
| 385 |
+
fh.write("{}SET CONDA_EXE={}\n".format(silencer, sys.executable))
|
| 386 |
+
fh.write("{}SET _CE_M=-m\n".format(silencer))
|
| 387 |
+
fh.write("{}SET _CE_CONDA=conda\n".format(silencer))
|
| 388 |
+
if debug_wrapper_scripts:
|
| 389 |
+
fh.write('echo *** environment before *** 1>&2\n')
|
| 390 |
+
fh.write('SET 1>&2\n')
|
| 391 |
+
# Not sure there is any point in backing this up, nothing will get called with it reset
|
| 392 |
+
# after all!
|
| 393 |
+
# fh.write("@FOR /F \"tokens=100\" %%F IN ('chcp') DO @SET CONDA_OLD_CHCP=%%F\n")
|
| 394 |
+
# fh.write('@chcp 65001>NUL\n')
|
| 395 |
+
fh.write('{0}CALL \"{1}\" activate \"{2}\"\n'.format(silencer, conda_bat, prefix))
|
| 396 |
+
fh.write("{}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n".format(silencer))
|
| 397 |
+
if debug_wrapper_scripts:
|
| 398 |
+
fh.write('echo *** environment after *** 1>&2\n')
|
| 399 |
+
fh.write('SET 1>&2\n')
|
| 400 |
+
if multiline:
|
| 401 |
+
# No point silencing the first line. If that's what's wanted then
|
| 402 |
+
# it needs doing for each line and the caller may as well do that.
|
| 403 |
+
fh.write("{0}\n".format(arguments[0]))
|
| 404 |
+
else:
|
| 405 |
+
assert not any("\n" in arg for arg in arguments), (
|
| 406 |
+
"Support for scripts where arguments contain newlines not implemented.\n"
|
| 407 |
+
".. requires writing the script to an external file and knowing how to "
|
| 408 |
+
"transform the command-line (e.g. `python -c args` => `python file`) "
|
| 409 |
+
"in a tool dependent way, or attempting something like:\n"
|
| 410 |
+
".. https://stackoverflow.com/a/15032476 (adds unacceptable escaping"
|
| 411 |
+
"requirements)"
|
| 412 |
+
)
|
| 413 |
+
fh.write("{0}{1}\n".format(silencer, quote_for_shell(*arguments)))
|
| 414 |
+
fh.write("{}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n".format(silencer))
|
| 415 |
+
fh.write("{}chcp %_CONDA_OLD_CHCP%>NUL\n".format(silencer))
|
| 416 |
+
script_caller = fh.name
|
| 417 |
+
command_args = [comspec, '/d', '/c', script_caller]
|
| 418 |
+
else:
|
| 419 |
+
shell_path = which('bash') or which('sh')
|
| 420 |
+
if shell_path is None:
|
| 421 |
+
raise Exception("No compatible shell found!")
|
| 422 |
+
|
| 423 |
+
# During tests, we sometimes like to have a temp env with e.g. an old python in it
|
| 424 |
+
# and have it run tests against the very latest development sources. For that to
|
| 425 |
+
# work we need extra smarts here, we want it to be instead:
|
| 426 |
+
if dev_mode:
|
| 427 |
+
conda_exe = [abspath(join(root_prefix, 'bin', 'python')), '-m', 'conda']
|
| 428 |
+
dev_arg = '--dev'
|
| 429 |
+
dev_args = [dev_arg]
|
| 430 |
+
else:
|
| 431 |
+
conda_exe = [environ.get("CONDA_EXE", abspath(join(root_prefix, 'bin', 'conda')))]
|
| 432 |
+
dev_arg = ''
|
| 433 |
+
dev_args = []
|
| 434 |
+
with Utf8NamedTemporaryFile(mode='w', prefix=tmp_prefix, delete=False) as fh:
|
| 435 |
+
if dev_mode:
|
| 436 |
+
from . import CONDA_SOURCE_ROOT
|
| 437 |
+
|
| 438 |
+
fh.write(">&2 export PYTHONPATH=" + CONDA_SOURCE_ROOT + "\n")
|
| 439 |
+
hook_quoted = quote_for_shell(*conda_exe, "shell.posix", "hook", *dev_args)
|
| 440 |
+
if debug_wrapper_scripts:
|
| 441 |
+
fh.write(">&2 echo '*** environment before ***'\n" ">&2 env\n")
|
| 442 |
+
fh.write('>&2 echo "$({0})"\n'.format(hook_quoted))
|
| 443 |
+
fh.write('eval "$({0})"\n'.format(hook_quoted))
|
| 444 |
+
fh.write("conda activate {0} {1}\n".format(dev_arg, quote_for_shell(prefix)))
|
| 445 |
+
if debug_wrapper_scripts:
|
| 446 |
+
fh.write(">&2 echo '*** environment after ***'\n" ">&2 env\n")
|
| 447 |
+
if multiline:
|
| 448 |
+
# The ' '.join() is pointless since mutliline is only True when there's 1 arg
|
| 449 |
+
# still, if that were to change this would prevent breakage.
|
| 450 |
+
fh.write("{0}\n".format(" ".join(arguments)))
|
| 451 |
+
else:
|
| 452 |
+
fh.write("{0}\n".format(quote_for_shell(*arguments)))
|
| 453 |
+
script_caller = fh.name
|
| 454 |
+
if debug_wrapper_scripts:
|
| 455 |
+
command_args = [shell_path, "-x", script_caller]
|
| 456 |
+
else:
|
| 457 |
+
command_args = [shell_path, script_caller]
|
| 458 |
+
|
| 459 |
+
return script_caller, command_args
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
def get_comspec():
|
| 463 |
+
"""Returns COMSPEC from envvars.
|
| 464 |
+
|
| 465 |
+
Ensures COMSPEC envvar is set to cmd.exe, if not attempt to find it.
|
| 466 |
+
|
| 467 |
+
:raises KeyError: COMSPEC is undefined and cannot be found.
|
| 468 |
+
:returns: COMSPEC value.
|
| 469 |
+
:rtype: str
|
| 470 |
+
"""
|
| 471 |
+
if basename(environ.get("COMSPEC", "")).lower() != "cmd.exe":
|
| 472 |
+
for comspec in (
|
| 473 |
+
# %SystemRoot%\System32\cmd.exe
|
| 474 |
+
environ.get("SystemRoot") and join(environ["SystemRoot"], "System32", "cmd.exe"),
|
| 475 |
+
# %windir%\System32\cmd.exe
|
| 476 |
+
environ.get("windir") and join(environ["windir"], "System32", "cmd.exe"),
|
| 477 |
+
):
|
| 478 |
+
if comspec and isfile(comspec):
|
| 479 |
+
environ["COMSPEC"] = comspec
|
| 480 |
+
break
|
| 481 |
+
else:
|
| 482 |
+
log.warn("cmd.exe could not be found. Looked in SystemRoot and windir env vars.\n")
|
| 483 |
+
|
| 484 |
+
# fails with KeyError if still undefined
|
| 485 |
+
return environ["COMSPEC"]
|
| 486 |
+
|
| 487 |
+
|
| 488 |
+
def ensure_dir_exists(func):
|
| 489 |
+
"""
|
| 490 |
+
Ensures that the directory exists for functions returning
|
| 491 |
+
a Path object containing a directory
|
| 492 |
+
"""
|
| 493 |
+
|
| 494 |
+
@wraps(func)
|
| 495 |
+
def wrapper(*args, **kwargs):
|
| 496 |
+
result = func(*args, **kwargs)
|
| 497 |
+
|
| 498 |
+
if isinstance(result, Path):
|
| 499 |
+
try:
|
| 500 |
+
result.mkdir(parents=True, exist_ok=True)
|
| 501 |
+
except OSError as exc:
|
| 502 |
+
raise CondaError(
|
| 503 |
+
"Error encountered while attempting to create cache directory."
|
| 504 |
+
f"\n Directory: {result}"
|
| 505 |
+
f"\n Exception: {exc}"
|
| 506 |
+
)
|
| 507 |
+
|
| 508 |
+
return result
|
| 509 |
+
|
| 510 |
+
return wrapper
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
@contextmanager
|
| 514 |
+
def safe_open(*args, **kwargs):
|
| 515 |
+
"""
|
| 516 |
+
Allows us to open files while catching any exceptions
|
| 517 |
+
and raise them as CondaErrors instead.
|
| 518 |
+
|
| 519 |
+
We do this to provide a more informative/actionable error output.
|
| 520 |
+
"""
|
| 521 |
+
try:
|
| 522 |
+
fp = open(*args, **kwargs)
|
| 523 |
+
yield fp
|
| 524 |
+
except OSError as exc:
|
| 525 |
+
raise CondaError(
|
| 526 |
+
"Error encountered while reading or writing from cache."
|
| 527 |
+
f"\n File: {args[0]}"
|
| 528 |
+
f"\n Exception: {exc}"
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
fp.close()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/pem.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
| 5 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 6 |
+
#
|
| 7 |
+
import base64
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
stSpam, stHam, stDump = 0, 1, 2
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
# The markers parameters is in form ('start1', 'stop1'), ('start2', 'stop2')...
|
| 14 |
+
# Return is (marker-index, substrate)
|
| 15 |
+
def readPemBlocksFromFile(fileObj, *markers):
|
| 16 |
+
startMarkers = dict(map(lambda x: (x[1], x[0]),
|
| 17 |
+
enumerate(map(lambda y: y[0], markers))))
|
| 18 |
+
stopMarkers = dict(map(lambda x: (x[1], x[0]),
|
| 19 |
+
enumerate(map(lambda y: y[1], markers))))
|
| 20 |
+
idx = -1
|
| 21 |
+
substrate = ''
|
| 22 |
+
certLines = []
|
| 23 |
+
state = stSpam
|
| 24 |
+
while True:
|
| 25 |
+
certLine = fileObj.readline()
|
| 26 |
+
if not certLine:
|
| 27 |
+
break
|
| 28 |
+
certLine = certLine.strip()
|
| 29 |
+
if state == stSpam:
|
| 30 |
+
if certLine in startMarkers:
|
| 31 |
+
certLines = []
|
| 32 |
+
idx = startMarkers[certLine]
|
| 33 |
+
state = stHam
|
| 34 |
+
continue
|
| 35 |
+
if state == stHam:
|
| 36 |
+
if certLine in stopMarkers and stopMarkers[certLine] == idx:
|
| 37 |
+
state = stDump
|
| 38 |
+
else:
|
| 39 |
+
certLines.append(certLine)
|
| 40 |
+
if state == stDump:
|
| 41 |
+
if sys.version_info[0] <= 2:
|
| 42 |
+
substrate = ''.join([base64.b64decode(x) for x in certLines])
|
| 43 |
+
else:
|
| 44 |
+
substrate = ''.encode().join([base64.b64decode(x.encode()) for x in certLines])
|
| 45 |
+
break
|
| 46 |
+
return idx, substrate
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# Backward compatibility routine
|
| 50 |
+
def readPemFromFile(fileObj,
|
| 51 |
+
startMarker='-----BEGIN CERTIFICATE-----',
|
| 52 |
+
endMarker='-----END CERTIFICATE-----'):
|
| 53 |
+
idx, substrate = readPemBlocksFromFile(fileObj, (startMarker, endMarker))
|
| 54 |
+
return substrate
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def readBase64fromText(text):
|
| 58 |
+
if sys.version_info[0] <= 2:
|
| 59 |
+
return base64.b64decode(text)
|
| 60 |
+
else:
|
| 61 |
+
return base64.b64decode(text.encode())
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def readBase64FromFile(fileObj):
|
| 65 |
+
return readBase64fromText(fileObj.read())
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc2631.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Created by Russ Housley with assistance from asn1ate v.0.6.0.
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 7 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 8 |
+
#
|
| 9 |
+
# Diffie-Hellman Key Agreement
|
| 10 |
+
#
|
| 11 |
+
# ASN.1 source from:
|
| 12 |
+
# https://www.rfc-editor.org/rfc/rfc2631.txt
|
| 13 |
+
# https://www.rfc-editor.org/errata/eid5897
|
| 14 |
+
#
|
| 15 |
+
|
| 16 |
+
from pyasn1.type import constraint
|
| 17 |
+
from pyasn1.type import namedtype
|
| 18 |
+
from pyasn1.type import tag
|
| 19 |
+
from pyasn1.type import univ
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class KeySpecificInfo(univ.Sequence):
|
| 23 |
+
componentType = namedtype.NamedTypes(
|
| 24 |
+
namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
|
| 25 |
+
namedtype.NamedType('counter', univ.OctetString().subtype(
|
| 26 |
+
subtypeSpec=constraint.ValueSizeConstraint(4, 4)))
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class OtherInfo(univ.Sequence):
|
| 31 |
+
componentType = namedtype.NamedTypes(
|
| 32 |
+
namedtype.NamedType('keyInfo', KeySpecificInfo()),
|
| 33 |
+
namedtype.OptionalNamedType('partyAInfo', univ.OctetString().subtype(
|
| 34 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 35 |
+
namedtype.NamedType('suppPubInfo', univ.OctetString().subtype(
|
| 36 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
|
| 37 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc5280.py
ADDED
|
@@ -0,0 +1,1658 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
#
|
| 3 |
+
# This file is part of pyasn1-modules software.
|
| 4 |
+
#
|
| 5 |
+
# Created by Stanisław Pitucha with asn1ate tool.
|
| 6 |
+
# Updated by Russ Housley for ORAddress Extension Attribute opentype support.
|
| 7 |
+
# Updated by Russ Housley for AlgorithmIdentifier opentype support.
|
| 8 |
+
#
|
| 9 |
+
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
| 10 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 11 |
+
#
|
| 12 |
+
# Internet X.509 Public Key Infrastructure Certificate and Certificate
|
| 13 |
+
# Revocation List (CRL) Profile
|
| 14 |
+
#
|
| 15 |
+
# ASN.1 source from:
|
| 16 |
+
# https://www.rfc-editor.org/rfc/rfc5280.txt
|
| 17 |
+
#
|
| 18 |
+
from pyasn1.type import char
|
| 19 |
+
from pyasn1.type import constraint
|
| 20 |
+
from pyasn1.type import namedtype
|
| 21 |
+
from pyasn1.type import namedval
|
| 22 |
+
from pyasn1.type import opentype
|
| 23 |
+
from pyasn1.type import tag
|
| 24 |
+
from pyasn1.type import univ
|
| 25 |
+
from pyasn1.type import useful
|
| 26 |
+
|
| 27 |
+
MAX = float('inf')
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _buildOid(*components):
|
| 31 |
+
output = []
|
| 32 |
+
for x in tuple(components):
|
| 33 |
+
if isinstance(x, univ.ObjectIdentifier):
|
| 34 |
+
output.extend(list(x))
|
| 35 |
+
else:
|
| 36 |
+
output.append(int(x))
|
| 37 |
+
|
| 38 |
+
return univ.ObjectIdentifier(output)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
ub_e163_4_sub_address_length = univ.Integer(40)
|
| 42 |
+
|
| 43 |
+
ub_e163_4_number_length = univ.Integer(15)
|
| 44 |
+
|
| 45 |
+
unformatted_postal_address = univ.Integer(16)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TerminalType(univ.Integer):
|
| 49 |
+
pass
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
TerminalType.namedValues = namedval.NamedValues(
|
| 53 |
+
('telex', 3),
|
| 54 |
+
('teletex', 4),
|
| 55 |
+
('g3-facsimile', 5),
|
| 56 |
+
('g4-facsimile', 6),
|
| 57 |
+
('ia5-terminal', 7),
|
| 58 |
+
('videotex', 8)
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class Extension(univ.Sequence):
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
Extension.componentType = namedtype.NamedTypes(
|
| 67 |
+
namedtype.NamedType('extnID', univ.ObjectIdentifier()),
|
| 68 |
+
namedtype.DefaultedNamedType('critical', univ.Boolean().subtype(value=0)),
|
| 69 |
+
namedtype.NamedType('extnValue', univ.OctetString())
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class Extensions(univ.SequenceOf):
|
| 74 |
+
pass
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
Extensions.componentType = Extension()
|
| 78 |
+
Extensions.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 79 |
+
|
| 80 |
+
physical_delivery_personal_name = univ.Integer(13)
|
| 81 |
+
|
| 82 |
+
ub_unformatted_address_length = univ.Integer(180)
|
| 83 |
+
|
| 84 |
+
ub_pds_parameter_length = univ.Integer(30)
|
| 85 |
+
|
| 86 |
+
ub_pds_physical_address_lines = univ.Integer(6)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class UnformattedPostalAddress(univ.Set):
|
| 90 |
+
pass
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
UnformattedPostalAddress.componentType = namedtype.NamedTypes(
|
| 94 |
+
namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(
|
| 95 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))),
|
| 96 |
+
namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
|
| 97 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length)))
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
ub_organization_name = univ.Integer(64)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class X520OrganizationName(univ.Choice):
|
| 104 |
+
pass
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
X520OrganizationName.componentType = namedtype.NamedTypes(
|
| 108 |
+
namedtype.NamedType('teletexString', char.TeletexString().subtype(
|
| 109 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
|
| 110 |
+
namedtype.NamedType('printableString', char.PrintableString().subtype(
|
| 111 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
|
| 112 |
+
namedtype.NamedType('universalString', char.UniversalString().subtype(
|
| 113 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
|
| 114 |
+
namedtype.NamedType('utf8String',
|
| 115 |
+
char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
|
| 116 |
+
namedtype.NamedType('bmpString',
|
| 117 |
+
char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name)))
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
ub_x121_address_length = univ.Integer(16)
|
| 121 |
+
|
| 122 |
+
pds_name = univ.Integer(7)
|
| 123 |
+
|
| 124 |
+
id_pkix = _buildOid(1, 3, 6, 1, 5, 5, 7)
|
| 125 |
+
|
| 126 |
+
id_kp = _buildOid(id_pkix, 3)
|
| 127 |
+
|
| 128 |
+
ub_postal_code_length = univ.Integer(16)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class PostalCode(univ.Choice):
|
| 132 |
+
pass
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
PostalCode.componentType = namedtype.NamedTypes(
|
| 136 |
+
namedtype.NamedType('numeric-code', char.NumericString().subtype(
|
| 137 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))),
|
| 138 |
+
namedtype.NamedType('printable-code', char.PrintableString().subtype(
|
| 139 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length)))
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
ub_generation_qualifier_length = univ.Integer(3)
|
| 143 |
+
|
| 144 |
+
unique_postal_name = univ.Integer(20)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class DomainComponent(char.IA5String):
|
| 148 |
+
pass
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
ub_domain_defined_attribute_value_length = univ.Integer(128)
|
| 152 |
+
|
| 153 |
+
ub_match = univ.Integer(128)
|
| 154 |
+
|
| 155 |
+
id_at = _buildOid(2, 5, 4)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class AttributeType(univ.ObjectIdentifier):
|
| 159 |
+
pass
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
id_at_organizationalUnitName = _buildOid(id_at, 11)
|
| 163 |
+
|
| 164 |
+
terminal_type = univ.Integer(23)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class PDSParameter(univ.Set):
|
| 168 |
+
pass
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
PDSParameter.componentType = namedtype.NamedTypes(
|
| 172 |
+
namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(
|
| 173 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))),
|
| 174 |
+
namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
|
| 175 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
class PhysicalDeliveryPersonalName(PDSParameter):
|
| 180 |
+
pass
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
ub_surname_length = univ.Integer(40)
|
| 184 |
+
|
| 185 |
+
id_ad = _buildOid(id_pkix, 48)
|
| 186 |
+
|
| 187 |
+
ub_domain_defined_attribute_type_length = univ.Integer(8)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
class TeletexDomainDefinedAttribute(univ.Sequence):
|
| 191 |
+
pass
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
TeletexDomainDefinedAttribute.componentType = namedtype.NamedTypes(
|
| 195 |
+
namedtype.NamedType('type', char.TeletexString().subtype(
|
| 196 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
|
| 197 |
+
namedtype.NamedType('value', char.TeletexString().subtype(
|
| 198 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
ub_domain_defined_attributes = univ.Integer(4)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
class TeletexDomainDefinedAttributes(univ.SequenceOf):
|
| 205 |
+
pass
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
TeletexDomainDefinedAttributes.componentType = TeletexDomainDefinedAttribute()
|
| 209 |
+
TeletexDomainDefinedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
|
| 210 |
+
|
| 211 |
+
extended_network_address = univ.Integer(22)
|
| 212 |
+
|
| 213 |
+
ub_locality_name = univ.Integer(128)
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
class X520LocalityName(univ.Choice):
|
| 217 |
+
pass
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
X520LocalityName.componentType = namedtype.NamedTypes(
|
| 221 |
+
namedtype.NamedType('teletexString',
|
| 222 |
+
char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
|
| 223 |
+
namedtype.NamedType('printableString', char.PrintableString().subtype(
|
| 224 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
|
| 225 |
+
namedtype.NamedType('universalString', char.UniversalString().subtype(
|
| 226 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
|
| 227 |
+
namedtype.NamedType('utf8String',
|
| 228 |
+
char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
|
| 229 |
+
namedtype.NamedType('bmpString',
|
| 230 |
+
char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name)))
|
| 231 |
+
)
|
| 232 |
+
|
| 233 |
+
teletex_organization_name = univ.Integer(3)
|
| 234 |
+
|
| 235 |
+
ub_given_name_length = univ.Integer(16)
|
| 236 |
+
|
| 237 |
+
ub_initials_length = univ.Integer(5)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
class PersonalName(univ.Set):
|
| 241 |
+
pass
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
PersonalName.componentType = namedtype.NamedTypes(
|
| 245 |
+
namedtype.NamedType('surname', char.PrintableString().subtype(
|
| 246 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length)).subtype(
|
| 247 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 248 |
+
namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(
|
| 249 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length)).subtype(
|
| 250 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 251 |
+
namedtype.OptionalNamedType('initials', char.PrintableString().subtype(
|
| 252 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length)).subtype(
|
| 253 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
|
| 254 |
+
namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(
|
| 255 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length)).subtype(
|
| 256 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
ub_organizational_unit_name_length = univ.Integer(32)
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
class OrganizationalUnitName(char.PrintableString):
|
| 263 |
+
pass
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
OrganizationalUnitName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
|
| 267 |
+
|
| 268 |
+
id_at_generationQualifier = _buildOid(id_at, 44)
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class Version(univ.Integer):
|
| 272 |
+
pass
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
Version.namedValues = namedval.NamedValues(
|
| 276 |
+
('v1', 0),
|
| 277 |
+
('v2', 1),
|
| 278 |
+
('v3', 2)
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
class CertificateSerialNumber(univ.Integer):
|
| 283 |
+
pass
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
algorithmIdentifierMap = {}
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
class AlgorithmIdentifier(univ.Sequence):
|
| 290 |
+
componentType = namedtype.NamedTypes(
|
| 291 |
+
namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
|
| 292 |
+
namedtype.OptionalNamedType('parameters', univ.Any(),
|
| 293 |
+
openType=opentype.OpenType('algorithm', algorithmIdentifierMap)
|
| 294 |
+
)
|
| 295 |
+
)
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
class Time(univ.Choice):
|
| 299 |
+
pass
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
Time.componentType = namedtype.NamedTypes(
|
| 303 |
+
namedtype.NamedType('utcTime', useful.UTCTime()),
|
| 304 |
+
namedtype.NamedType('generalTime', useful.GeneralizedTime())
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
class AttributeValue(univ.Any):
|
| 309 |
+
pass
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
certificateAttributesMap = {}
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
class AttributeTypeAndValue(univ.Sequence):
|
| 316 |
+
componentType = namedtype.NamedTypes(
|
| 317 |
+
namedtype.NamedType('type', AttributeType()),
|
| 318 |
+
namedtype.NamedType(
|
| 319 |
+
'value', AttributeValue(),
|
| 320 |
+
openType=opentype.OpenType('type', certificateAttributesMap)
|
| 321 |
+
)
|
| 322 |
+
)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
class RelativeDistinguishedName(univ.SetOf):
|
| 326 |
+
pass
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
RelativeDistinguishedName.componentType = AttributeTypeAndValue()
|
| 330 |
+
RelativeDistinguishedName.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
class RDNSequence(univ.SequenceOf):
|
| 334 |
+
pass
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
RDNSequence.componentType = RelativeDistinguishedName()
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
class Name(univ.Choice):
|
| 341 |
+
pass
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
Name.componentType = namedtype.NamedTypes(
|
| 345 |
+
namedtype.NamedType('rdnSequence', RDNSequence())
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
class TBSCertList(univ.Sequence):
|
| 350 |
+
pass
|
| 351 |
+
|
| 352 |
+
|
| 353 |
+
TBSCertList.componentType = namedtype.NamedTypes(
|
| 354 |
+
namedtype.OptionalNamedType('version', Version()),
|
| 355 |
+
namedtype.NamedType('signature', AlgorithmIdentifier()),
|
| 356 |
+
namedtype.NamedType('issuer', Name()),
|
| 357 |
+
namedtype.NamedType('thisUpdate', Time()),
|
| 358 |
+
namedtype.OptionalNamedType('nextUpdate', Time()),
|
| 359 |
+
namedtype.OptionalNamedType(
|
| 360 |
+
'revokedCertificates', univ.SequenceOf(
|
| 361 |
+
componentType=univ.Sequence(
|
| 362 |
+
componentType=namedtype.NamedTypes(
|
| 363 |
+
namedtype.NamedType('userCertificate', CertificateSerialNumber()),
|
| 364 |
+
namedtype.NamedType('revocationDate', Time()),
|
| 365 |
+
namedtype.OptionalNamedType('crlEntryExtensions', Extensions())
|
| 366 |
+
)
|
| 367 |
+
)
|
| 368 |
+
)
|
| 369 |
+
),
|
| 370 |
+
namedtype.OptionalNamedType(
|
| 371 |
+
'crlExtensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class CertificateList(univ.Sequence):
|
| 376 |
+
pass
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
CertificateList.componentType = namedtype.NamedTypes(
|
| 380 |
+
namedtype.NamedType('tbsCertList', TBSCertList()),
|
| 381 |
+
namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
|
| 382 |
+
namedtype.NamedType('signature', univ.BitString())
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class PhysicalDeliveryOfficeName(PDSParameter):
|
| 387 |
+
pass
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
ub_extension_attributes = univ.Integer(256)
|
| 391 |
+
|
| 392 |
+
certificateExtensionsMap = {
|
| 393 |
+
}
|
| 394 |
+
|
| 395 |
+
oraddressExtensionAttributeMap = {
|
| 396 |
+
}
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
class ExtensionAttribute(univ.Sequence):
|
| 400 |
+
componentType = namedtype.NamedTypes(
|
| 401 |
+
namedtype.NamedType(
|
| 402 |
+
'extension-attribute-type',
|
| 403 |
+
univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, ub_extension_attributes)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 404 |
+
namedtype.NamedType(
|
| 405 |
+
'extension-attribute-value',
|
| 406 |
+
univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)),
|
| 407 |
+
openType=opentype.OpenType('extension-attribute-type', oraddressExtensionAttributeMap))
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
id_qt = _buildOid(id_pkix, 2)
|
| 411 |
+
|
| 412 |
+
id_qt_cps = _buildOid(id_qt, 1)
|
| 413 |
+
|
| 414 |
+
id_at_stateOrProvinceName = _buildOid(id_at, 8)
|
| 415 |
+
|
| 416 |
+
id_at_title = _buildOid(id_at, 12)
|
| 417 |
+
|
| 418 |
+
id_at_serialNumber = _buildOid(id_at, 5)
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
class X520dnQualifier(char.PrintableString):
|
| 422 |
+
pass
|
| 423 |
+
|
| 424 |
+
|
| 425 |
+
class PosteRestanteAddress(PDSParameter):
|
| 426 |
+
pass
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
poste_restante_address = univ.Integer(19)
|
| 430 |
+
|
| 431 |
+
|
| 432 |
+
class UniqueIdentifier(univ.BitString):
|
| 433 |
+
pass
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
class Validity(univ.Sequence):
|
| 437 |
+
pass
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
Validity.componentType = namedtype.NamedTypes(
|
| 441 |
+
namedtype.NamedType('notBefore', Time()),
|
| 442 |
+
namedtype.NamedType('notAfter', Time())
|
| 443 |
+
)
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
class SubjectPublicKeyInfo(univ.Sequence):
|
| 447 |
+
pass
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
SubjectPublicKeyInfo.componentType = namedtype.NamedTypes(
|
| 451 |
+
namedtype.NamedType('algorithm', AlgorithmIdentifier()),
|
| 452 |
+
namedtype.NamedType('subjectPublicKey', univ.BitString())
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
|
| 456 |
+
class TBSCertificate(univ.Sequence):
|
| 457 |
+
pass
|
| 458 |
+
|
| 459 |
+
|
| 460 |
+
TBSCertificate.componentType = namedtype.NamedTypes(
|
| 461 |
+
namedtype.DefaultedNamedType('version',
|
| 462 |
+
Version().subtype(explicitTag=tag.Tag(tag.tagClassContext,
|
| 463 |
+
tag.tagFormatSimple, 0)).subtype(value="v1")),
|
| 464 |
+
namedtype.NamedType('serialNumber', CertificateSerialNumber()),
|
| 465 |
+
namedtype.NamedType('signature', AlgorithmIdentifier()),
|
| 466 |
+
namedtype.NamedType('issuer', Name()),
|
| 467 |
+
namedtype.NamedType('validity', Validity()),
|
| 468 |
+
namedtype.NamedType('subject', Name()),
|
| 469 |
+
namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
|
| 470 |
+
namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(
|
| 471 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 472 |
+
namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(
|
| 473 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
|
| 474 |
+
namedtype.OptionalNamedType('extensions',
|
| 475 |
+
Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
physical_delivery_office_name = univ.Integer(10)
|
| 479 |
+
|
| 480 |
+
ub_name = univ.Integer(32768)
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
class X520name(univ.Choice):
|
| 484 |
+
pass
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
X520name.componentType = namedtype.NamedTypes(
|
| 488 |
+
namedtype.NamedType('teletexString',
|
| 489 |
+
char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
|
| 490 |
+
namedtype.NamedType('printableString',
|
| 491 |
+
char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
|
| 492 |
+
namedtype.NamedType('universalString',
|
| 493 |
+
char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
|
| 494 |
+
namedtype.NamedType('utf8String',
|
| 495 |
+
char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
|
| 496 |
+
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name)))
|
| 497 |
+
)
|
| 498 |
+
|
| 499 |
+
id_at_dnQualifier = _buildOid(id_at, 46)
|
| 500 |
+
|
| 501 |
+
ub_serial_number = univ.Integer(64)
|
| 502 |
+
|
| 503 |
+
ub_pseudonym = univ.Integer(128)
|
| 504 |
+
|
| 505 |
+
pkcs_9 = _buildOid(1, 2, 840, 113549, 1, 9)
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
class X121Address(char.NumericString):
|
| 509 |
+
pass
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
X121Address.subtypeSpec = constraint.ValueSizeConstraint(1, ub_x121_address_length)
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
class NetworkAddress(X121Address):
|
| 516 |
+
pass
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
ub_integer_options = univ.Integer(256)
|
| 520 |
+
|
| 521 |
+
id_at_commonName = _buildOid(id_at, 3)
|
| 522 |
+
|
| 523 |
+
ub_organization_name_length = univ.Integer(64)
|
| 524 |
+
|
| 525 |
+
id_ad_ocsp = _buildOid(id_ad, 1)
|
| 526 |
+
|
| 527 |
+
ub_country_name_numeric_length = univ.Integer(3)
|
| 528 |
+
|
| 529 |
+
ub_country_name_alpha_length = univ.Integer(2)
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
class PhysicalDeliveryCountryName(univ.Choice):
|
| 533 |
+
pass
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
PhysicalDeliveryCountryName.componentType = namedtype.NamedTypes(
|
| 537 |
+
namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
|
| 538 |
+
subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
|
| 539 |
+
namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
|
| 540 |
+
subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
|
| 541 |
+
)
|
| 542 |
+
|
| 543 |
+
id_emailAddress = _buildOid(pkcs_9, 1)
|
| 544 |
+
|
| 545 |
+
common_name = univ.Integer(1)
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
class X520Pseudonym(univ.Choice):
|
| 549 |
+
pass
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
X520Pseudonym.componentType = namedtype.NamedTypes(
|
| 553 |
+
namedtype.NamedType('teletexString',
|
| 554 |
+
char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
|
| 555 |
+
namedtype.NamedType('printableString',
|
| 556 |
+
char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
|
| 557 |
+
namedtype.NamedType('universalString',
|
| 558 |
+
char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
|
| 559 |
+
namedtype.NamedType('utf8String',
|
| 560 |
+
char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
|
| 561 |
+
namedtype.NamedType('bmpString',
|
| 562 |
+
char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym)))
|
| 563 |
+
)
|
| 564 |
+
|
| 565 |
+
ub_domain_name_length = univ.Integer(16)
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
class AdministrationDomainName(univ.Choice):
|
| 569 |
+
pass
|
| 570 |
+
|
| 571 |
+
|
| 572 |
+
AdministrationDomainName.tagSet = univ.Choice.tagSet.tagExplicitly(
|
| 573 |
+
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2))
|
| 574 |
+
AdministrationDomainName.componentType = namedtype.NamedTypes(
|
| 575 |
+
namedtype.NamedType('numeric', char.NumericString().subtype(
|
| 576 |
+
subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))),
|
| 577 |
+
namedtype.NamedType('printable', char.PrintableString().subtype(
|
| 578 |
+
subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length)))
|
| 579 |
+
)
|
| 580 |
+
|
| 581 |
+
|
| 582 |
+
class PresentationAddress(univ.Sequence):
|
| 583 |
+
pass
|
| 584 |
+
|
| 585 |
+
|
| 586 |
+
PresentationAddress.componentType = namedtype.NamedTypes(
|
| 587 |
+
namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(
|
| 588 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 589 |
+
namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(
|
| 590 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 591 |
+
namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(
|
| 592 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
|
| 593 |
+
namedtype.NamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(
|
| 594 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
|
| 595 |
+
)
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
class ExtendedNetworkAddress(univ.Choice):
|
| 599 |
+
pass
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
ExtendedNetworkAddress.componentType = namedtype.NamedTypes(
|
| 603 |
+
namedtype.NamedType(
|
| 604 |
+
'e163-4-address', univ.Sequence(
|
| 605 |
+
componentType=namedtype.NamedTypes(
|
| 606 |
+
namedtype.NamedType('number', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 607 |
+
namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
|
| 608 |
+
)
|
| 609 |
+
)
|
| 610 |
+
),
|
| 611 |
+
namedtype.NamedType('psap-address', PresentationAddress().subtype(
|
| 612 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
|
| 613 |
+
)
|
| 614 |
+
|
| 615 |
+
|
| 616 |
+
class TeletexOrganizationName(char.TeletexString):
|
| 617 |
+
pass
|
| 618 |
+
|
| 619 |
+
|
| 620 |
+
TeletexOrganizationName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organization_name_length)
|
| 621 |
+
|
| 622 |
+
ub_terminal_id_length = univ.Integer(24)
|
| 623 |
+
|
| 624 |
+
|
| 625 |
+
class TerminalIdentifier(char.PrintableString):
|
| 626 |
+
pass
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
TerminalIdentifier.subtypeSpec = constraint.ValueSizeConstraint(1, ub_terminal_id_length)
|
| 630 |
+
|
| 631 |
+
id_ad_caIssuers = _buildOid(id_ad, 2)
|
| 632 |
+
|
| 633 |
+
id_at_countryName = _buildOid(id_at, 6)
|
| 634 |
+
|
| 635 |
+
|
| 636 |
+
class StreetAddress(PDSParameter):
|
| 637 |
+
pass
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
postal_code = univ.Integer(9)
|
| 641 |
+
|
| 642 |
+
id_at_givenName = _buildOid(id_at, 42)
|
| 643 |
+
|
| 644 |
+
ub_title = univ.Integer(64)
|
| 645 |
+
|
| 646 |
+
|
| 647 |
+
class ExtensionAttributes(univ.SetOf):
|
| 648 |
+
pass
|
| 649 |
+
|
| 650 |
+
|
| 651 |
+
ExtensionAttributes.componentType = ExtensionAttribute()
|
| 652 |
+
ExtensionAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_extension_attributes)
|
| 653 |
+
|
| 654 |
+
ub_emailaddress_length = univ.Integer(255)
|
| 655 |
+
|
| 656 |
+
id_ad_caRepository = _buildOid(id_ad, 5)
|
| 657 |
+
|
| 658 |
+
|
| 659 |
+
class ExtensionORAddressComponents(PDSParameter):
|
| 660 |
+
pass
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
ub_organizational_unit_name = univ.Integer(64)
|
| 664 |
+
|
| 665 |
+
|
| 666 |
+
class X520OrganizationalUnitName(univ.Choice):
|
| 667 |
+
pass
|
| 668 |
+
|
| 669 |
+
|
| 670 |
+
X520OrganizationalUnitName.componentType = namedtype.NamedTypes(
|
| 671 |
+
namedtype.NamedType('teletexString', char.TeletexString().subtype(
|
| 672 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
|
| 673 |
+
namedtype.NamedType('printableString', char.PrintableString().subtype(
|
| 674 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
|
| 675 |
+
namedtype.NamedType('universalString', char.UniversalString().subtype(
|
| 676 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
|
| 677 |
+
namedtype.NamedType('utf8String', char.UTF8String().subtype(
|
| 678 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
|
| 679 |
+
namedtype.NamedType('bmpString', char.BMPString().subtype(
|
| 680 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name)))
|
| 681 |
+
)
|
| 682 |
+
|
| 683 |
+
|
| 684 |
+
class LocalPostalAttributes(PDSParameter):
|
| 685 |
+
pass
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
teletex_organizational_unit_names = univ.Integer(5)
|
| 689 |
+
|
| 690 |
+
|
| 691 |
+
class X520Title(univ.Choice):
|
| 692 |
+
pass
|
| 693 |
+
|
| 694 |
+
|
| 695 |
+
X520Title.componentType = namedtype.NamedTypes(
|
| 696 |
+
namedtype.NamedType('teletexString',
|
| 697 |
+
char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
|
| 698 |
+
namedtype.NamedType('printableString',
|
| 699 |
+
char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
|
| 700 |
+
namedtype.NamedType('universalString',
|
| 701 |
+
char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
|
| 702 |
+
namedtype.NamedType('utf8String',
|
| 703 |
+
char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
|
| 704 |
+
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title)))
|
| 705 |
+
)
|
| 706 |
+
|
| 707 |
+
id_at_localityName = _buildOid(id_at, 7)
|
| 708 |
+
|
| 709 |
+
id_at_initials = _buildOid(id_at, 43)
|
| 710 |
+
|
| 711 |
+
ub_state_name = univ.Integer(128)
|
| 712 |
+
|
| 713 |
+
|
| 714 |
+
class X520StateOrProvinceName(univ.Choice):
|
| 715 |
+
pass
|
| 716 |
+
|
| 717 |
+
|
| 718 |
+
X520StateOrProvinceName.componentType = namedtype.NamedTypes(
|
| 719 |
+
namedtype.NamedType('teletexString',
|
| 720 |
+
char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
|
| 721 |
+
namedtype.NamedType('printableString',
|
| 722 |
+
char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
|
| 723 |
+
namedtype.NamedType('universalString',
|
| 724 |
+
char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
|
| 725 |
+
namedtype.NamedType('utf8String',
|
| 726 |
+
char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
|
| 727 |
+
namedtype.NamedType('bmpString',
|
| 728 |
+
char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name)))
|
| 729 |
+
)
|
| 730 |
+
|
| 731 |
+
physical_delivery_organization_name = univ.Integer(14)
|
| 732 |
+
|
| 733 |
+
id_at_surname = _buildOid(id_at, 4)
|
| 734 |
+
|
| 735 |
+
|
| 736 |
+
class X520countryName(char.PrintableString):
|
| 737 |
+
pass
|
| 738 |
+
|
| 739 |
+
|
| 740 |
+
X520countryName.subtypeSpec = constraint.ValueSizeConstraint(2, 2)
|
| 741 |
+
|
| 742 |
+
physical_delivery_office_number = univ.Integer(11)
|
| 743 |
+
|
| 744 |
+
id_qt_unotice = _buildOid(id_qt, 2)
|
| 745 |
+
|
| 746 |
+
|
| 747 |
+
class X520SerialNumber(char.PrintableString):
|
| 748 |
+
pass
|
| 749 |
+
|
| 750 |
+
|
| 751 |
+
X520SerialNumber.subtypeSpec = constraint.ValueSizeConstraint(1, ub_serial_number)
|
| 752 |
+
|
| 753 |
+
|
| 754 |
+
class Attribute(univ.Sequence):
|
| 755 |
+
componentType = namedtype.NamedTypes(
|
| 756 |
+
namedtype.NamedType('type', AttributeType()),
|
| 757 |
+
namedtype.NamedType('values',
|
| 758 |
+
univ.SetOf(componentType=AttributeValue()),
|
| 759 |
+
openType=opentype.OpenType('type', certificateAttributesMap))
|
| 760 |
+
)
|
| 761 |
+
|
| 762 |
+
ub_common_name = univ.Integer(64)
|
| 763 |
+
|
| 764 |
+
id_pe = _buildOid(id_pkix, 1)
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
class ExtensionPhysicalDeliveryAddressComponents(PDSParameter):
|
| 768 |
+
pass
|
| 769 |
+
|
| 770 |
+
|
| 771 |
+
class EmailAddress(char.IA5String):
|
| 772 |
+
pass
|
| 773 |
+
|
| 774 |
+
|
| 775 |
+
EmailAddress.subtypeSpec = constraint.ValueSizeConstraint(1, ub_emailaddress_length)
|
| 776 |
+
|
| 777 |
+
id_at_organizationName = _buildOid(id_at, 10)
|
| 778 |
+
|
| 779 |
+
post_office_box_address = univ.Integer(18)
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
class BuiltInDomainDefinedAttribute(univ.Sequence):
|
| 783 |
+
pass
|
| 784 |
+
|
| 785 |
+
|
| 786 |
+
BuiltInDomainDefinedAttribute.componentType = namedtype.NamedTypes(
|
| 787 |
+
namedtype.NamedType('type', char.PrintableString().subtype(
|
| 788 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
|
| 789 |
+
namedtype.NamedType('value', char.PrintableString().subtype(
|
| 790 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
|
| 791 |
+
)
|
| 792 |
+
|
| 793 |
+
|
| 794 |
+
class BuiltInDomainDefinedAttributes(univ.SequenceOf):
|
| 795 |
+
pass
|
| 796 |
+
|
| 797 |
+
|
| 798 |
+
BuiltInDomainDefinedAttributes.componentType = BuiltInDomainDefinedAttribute()
|
| 799 |
+
BuiltInDomainDefinedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
|
| 800 |
+
|
| 801 |
+
id_at_pseudonym = _buildOid(id_at, 65)
|
| 802 |
+
|
| 803 |
+
id_domainComponent = _buildOid(0, 9, 2342, 19200300, 100, 1, 25)
|
| 804 |
+
|
| 805 |
+
|
| 806 |
+
class X520CommonName(univ.Choice):
|
| 807 |
+
pass
|
| 808 |
+
|
| 809 |
+
|
| 810 |
+
X520CommonName.componentType = namedtype.NamedTypes(
|
| 811 |
+
namedtype.NamedType('teletexString',
|
| 812 |
+
char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
|
| 813 |
+
namedtype.NamedType('printableString',
|
| 814 |
+
char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
|
| 815 |
+
namedtype.NamedType('universalString',
|
| 816 |
+
char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
|
| 817 |
+
namedtype.NamedType('utf8String',
|
| 818 |
+
char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
|
| 819 |
+
namedtype.NamedType('bmpString',
|
| 820 |
+
char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name)))
|
| 821 |
+
)
|
| 822 |
+
|
| 823 |
+
extension_OR_address_components = univ.Integer(12)
|
| 824 |
+
|
| 825 |
+
ub_organizational_units = univ.Integer(4)
|
| 826 |
+
|
| 827 |
+
teletex_personal_name = univ.Integer(4)
|
| 828 |
+
|
| 829 |
+
ub_numeric_user_id_length = univ.Integer(32)
|
| 830 |
+
|
| 831 |
+
ub_common_name_length = univ.Integer(64)
|
| 832 |
+
|
| 833 |
+
|
| 834 |
+
class TeletexCommonName(char.TeletexString):
|
| 835 |
+
pass
|
| 836 |
+
|
| 837 |
+
|
| 838 |
+
TeletexCommonName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_common_name_length)
|
| 839 |
+
|
| 840 |
+
|
| 841 |
+
class PhysicalDeliveryOrganizationName(PDSParameter):
|
| 842 |
+
pass
|
| 843 |
+
|
| 844 |
+
|
| 845 |
+
extension_physical_delivery_address_components = univ.Integer(15)
|
| 846 |
+
|
| 847 |
+
|
| 848 |
+
class NumericUserIdentifier(char.NumericString):
|
| 849 |
+
pass
|
| 850 |
+
|
| 851 |
+
|
| 852 |
+
NumericUserIdentifier.subtypeSpec = constraint.ValueSizeConstraint(1, ub_numeric_user_id_length)
|
| 853 |
+
|
| 854 |
+
|
| 855 |
+
class CountryName(univ.Choice):
|
| 856 |
+
pass
|
| 857 |
+
|
| 858 |
+
|
| 859 |
+
CountryName.tagSet = univ.Choice.tagSet.tagExplicitly(tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1))
|
| 860 |
+
CountryName.componentType = namedtype.NamedTypes(
|
| 861 |
+
namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
|
| 862 |
+
subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
|
| 863 |
+
namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
|
| 864 |
+
subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
|
| 865 |
+
)
|
| 866 |
+
|
| 867 |
+
|
| 868 |
+
class OrganizationName(char.PrintableString):
|
| 869 |
+
pass
|
| 870 |
+
|
| 871 |
+
|
| 872 |
+
OrganizationName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organization_name_length)
|
| 873 |
+
|
| 874 |
+
|
| 875 |
+
class OrganizationalUnitNames(univ.SequenceOf):
|
| 876 |
+
pass
|
| 877 |
+
|
| 878 |
+
|
| 879 |
+
OrganizationalUnitNames.componentType = OrganizationalUnitName()
|
| 880 |
+
OrganizationalUnitNames.sizeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units)
|
| 881 |
+
|
| 882 |
+
|
| 883 |
+
class PrivateDomainName(univ.Choice):
|
| 884 |
+
pass
|
| 885 |
+
|
| 886 |
+
|
| 887 |
+
PrivateDomainName.componentType = namedtype.NamedTypes(
|
| 888 |
+
namedtype.NamedType('numeric', char.NumericString().subtype(
|
| 889 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))),
|
| 890 |
+
namedtype.NamedType('printable', char.PrintableString().subtype(
|
| 891 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length)))
|
| 892 |
+
)
|
| 893 |
+
|
| 894 |
+
|
| 895 |
+
class BuiltInStandardAttributes(univ.Sequence):
|
| 896 |
+
pass
|
| 897 |
+
|
| 898 |
+
|
| 899 |
+
BuiltInStandardAttributes.componentType = namedtype.NamedTypes(
|
| 900 |
+
namedtype.OptionalNamedType('country-name', CountryName()),
|
| 901 |
+
namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()),
|
| 902 |
+
namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(
|
| 903 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 904 |
+
namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(
|
| 905 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 906 |
+
namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(
|
| 907 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
|
| 908 |
+
namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(
|
| 909 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
|
| 910 |
+
namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(
|
| 911 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
|
| 912 |
+
namedtype.OptionalNamedType('personal-name', PersonalName().subtype(
|
| 913 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
|
| 914 |
+
namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(
|
| 915 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6)))
|
| 916 |
+
)
|
| 917 |
+
|
| 918 |
+
|
| 919 |
+
class ORAddress(univ.Sequence):
|
| 920 |
+
pass
|
| 921 |
+
|
| 922 |
+
|
| 923 |
+
ORAddress.componentType = namedtype.NamedTypes(
|
| 924 |
+
namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()),
|
| 925 |
+
namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()),
|
| 926 |
+
namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes())
|
| 927 |
+
)
|
| 928 |
+
|
| 929 |
+
|
| 930 |
+
class DistinguishedName(RDNSequence):
|
| 931 |
+
pass
|
| 932 |
+
|
| 933 |
+
|
| 934 |
+
id_ad_timeStamping = _buildOid(id_ad, 3)
|
| 935 |
+
|
| 936 |
+
|
| 937 |
+
class PhysicalDeliveryOfficeNumber(PDSParameter):
|
| 938 |
+
pass
|
| 939 |
+
|
| 940 |
+
|
| 941 |
+
teletex_domain_defined_attributes = univ.Integer(6)
|
| 942 |
+
|
| 943 |
+
|
| 944 |
+
class UniquePostalName(PDSParameter):
|
| 945 |
+
pass
|
| 946 |
+
|
| 947 |
+
|
| 948 |
+
physical_delivery_country_name = univ.Integer(8)
|
| 949 |
+
|
| 950 |
+
ub_pds_name_length = univ.Integer(16)
|
| 951 |
+
|
| 952 |
+
|
| 953 |
+
class PDSName(char.PrintableString):
|
| 954 |
+
pass
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
PDSName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_pds_name_length)
|
| 958 |
+
|
| 959 |
+
|
| 960 |
+
class TeletexPersonalName(univ.Set):
|
| 961 |
+
pass
|
| 962 |
+
|
| 963 |
+
|
| 964 |
+
TeletexPersonalName.componentType = namedtype.NamedTypes(
|
| 965 |
+
namedtype.NamedType('surname', char.TeletexString().subtype(
|
| 966 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length)).subtype(
|
| 967 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 968 |
+
namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(
|
| 969 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length)).subtype(
|
| 970 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 971 |
+
namedtype.OptionalNamedType('initials', char.TeletexString().subtype(
|
| 972 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length)).subtype(
|
| 973 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
|
| 974 |
+
namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(
|
| 975 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length)).subtype(
|
| 976 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
|
| 977 |
+
)
|
| 978 |
+
|
| 979 |
+
street_address = univ.Integer(17)
|
| 980 |
+
|
| 981 |
+
|
| 982 |
+
class PostOfficeBoxAddress(PDSParameter):
|
| 983 |
+
pass
|
| 984 |
+
|
| 985 |
+
|
| 986 |
+
local_postal_attributes = univ.Integer(21)
|
| 987 |
+
|
| 988 |
+
|
| 989 |
+
class DirectoryString(univ.Choice):
|
| 990 |
+
pass
|
| 991 |
+
|
| 992 |
+
|
| 993 |
+
DirectoryString.componentType = namedtype.NamedTypes(
|
| 994 |
+
namedtype.NamedType('teletexString',
|
| 995 |
+
char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
|
| 996 |
+
namedtype.NamedType('printableString',
|
| 997 |
+
char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
|
| 998 |
+
namedtype.NamedType('universalString',
|
| 999 |
+
char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
|
| 1000 |
+
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
|
| 1001 |
+
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
|
| 1002 |
+
)
|
| 1003 |
+
|
| 1004 |
+
teletex_common_name = univ.Integer(2)
|
| 1005 |
+
|
| 1006 |
+
|
| 1007 |
+
class CommonName(char.PrintableString):
|
| 1008 |
+
pass
|
| 1009 |
+
|
| 1010 |
+
|
| 1011 |
+
CommonName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_common_name_length)
|
| 1012 |
+
|
| 1013 |
+
|
| 1014 |
+
class Certificate(univ.Sequence):
|
| 1015 |
+
pass
|
| 1016 |
+
|
| 1017 |
+
|
| 1018 |
+
Certificate.componentType = namedtype.NamedTypes(
|
| 1019 |
+
namedtype.NamedType('tbsCertificate', TBSCertificate()),
|
| 1020 |
+
namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
|
| 1021 |
+
namedtype.NamedType('signature', univ.BitString())
|
| 1022 |
+
)
|
| 1023 |
+
|
| 1024 |
+
|
| 1025 |
+
class TeletexOrganizationalUnitName(char.TeletexString):
|
| 1026 |
+
pass
|
| 1027 |
+
|
| 1028 |
+
|
| 1029 |
+
TeletexOrganizationalUnitName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
|
| 1030 |
+
|
| 1031 |
+
id_at_name = _buildOid(id_at, 41)
|
| 1032 |
+
|
| 1033 |
+
|
| 1034 |
+
class TeletexOrganizationalUnitNames(univ.SequenceOf):
|
| 1035 |
+
pass
|
| 1036 |
+
|
| 1037 |
+
|
| 1038 |
+
TeletexOrganizationalUnitNames.componentType = TeletexOrganizationalUnitName()
|
| 1039 |
+
TeletexOrganizationalUnitNames.sizeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units)
|
| 1040 |
+
|
| 1041 |
+
id_ce = _buildOid(2, 5, 29)
|
| 1042 |
+
|
| 1043 |
+
id_ce_issuerAltName = _buildOid(id_ce, 18)
|
| 1044 |
+
|
| 1045 |
+
|
| 1046 |
+
class SkipCerts(univ.Integer):
|
| 1047 |
+
pass
|
| 1048 |
+
|
| 1049 |
+
|
| 1050 |
+
SkipCerts.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
|
| 1051 |
+
|
| 1052 |
+
|
| 1053 |
+
class CRLReason(univ.Enumerated):
|
| 1054 |
+
pass
|
| 1055 |
+
|
| 1056 |
+
|
| 1057 |
+
CRLReason.namedValues = namedval.NamedValues(
|
| 1058 |
+
('unspecified', 0),
|
| 1059 |
+
('keyCompromise', 1),
|
| 1060 |
+
('cACompromise', 2),
|
| 1061 |
+
('affiliationChanged', 3),
|
| 1062 |
+
('superseded', 4),
|
| 1063 |
+
('cessationOfOperation', 5),
|
| 1064 |
+
('certificateHold', 6),
|
| 1065 |
+
('removeFromCRL', 8),
|
| 1066 |
+
('privilegeWithdrawn', 9),
|
| 1067 |
+
('aACompromise', 10)
|
| 1068 |
+
)
|
| 1069 |
+
|
| 1070 |
+
|
| 1071 |
+
class PrivateKeyUsagePeriod(univ.Sequence):
|
| 1072 |
+
pass
|
| 1073 |
+
|
| 1074 |
+
|
| 1075 |
+
PrivateKeyUsagePeriod.componentType = namedtype.NamedTypes(
|
| 1076 |
+
namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(
|
| 1077 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 1078 |
+
namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(
|
| 1079 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
|
| 1080 |
+
)
|
| 1081 |
+
|
| 1082 |
+
|
| 1083 |
+
anotherNameMap = {
|
| 1084 |
+
|
| 1085 |
+
}
|
| 1086 |
+
|
| 1087 |
+
|
| 1088 |
+
class AnotherName(univ.Sequence):
|
| 1089 |
+
componentType = namedtype.NamedTypes(
|
| 1090 |
+
namedtype.NamedType('type-id', univ.ObjectIdentifier()),
|
| 1091 |
+
namedtype.NamedType(
|
| 1092 |
+
'value',
|
| 1093 |
+
univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)),
|
| 1094 |
+
openType=opentype.OpenType('type-id', anotherNameMap)
|
| 1095 |
+
)
|
| 1096 |
+
)
|
| 1097 |
+
|
| 1098 |
+
|
| 1099 |
+
class EDIPartyName(univ.Sequence):
|
| 1100 |
+
pass
|
| 1101 |
+
|
| 1102 |
+
|
| 1103 |
+
EDIPartyName.componentType = namedtype.NamedTypes(
|
| 1104 |
+
namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(
|
| 1105 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
|
| 1106 |
+
namedtype.NamedType('partyName', DirectoryString().subtype(
|
| 1107 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
|
| 1108 |
+
)
|
| 1109 |
+
|
| 1110 |
+
|
| 1111 |
+
class GeneralName(univ.Choice):
|
| 1112 |
+
pass
|
| 1113 |
+
|
| 1114 |
+
|
| 1115 |
+
GeneralName.componentType = namedtype.NamedTypes(
|
| 1116 |
+
namedtype.NamedType('otherName',
|
| 1117 |
+
AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
|
| 1118 |
+
namedtype.NamedType('rfc822Name',
|
| 1119 |
+
char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 1120 |
+
namedtype.NamedType('dNSName',
|
| 1121 |
+
char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
|
| 1122 |
+
namedtype.NamedType('x400Address',
|
| 1123 |
+
ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
|
| 1124 |
+
namedtype.NamedType('directoryName',
|
| 1125 |
+
Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
|
| 1126 |
+
namedtype.NamedType('ediPartyName',
|
| 1127 |
+
EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
|
| 1128 |
+
namedtype.NamedType('uniformResourceIdentifier',
|
| 1129 |
+
char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
|
| 1130 |
+
namedtype.NamedType('iPAddress',
|
| 1131 |
+
univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
|
| 1132 |
+
namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(
|
| 1133 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)))
|
| 1134 |
+
)
|
| 1135 |
+
|
| 1136 |
+
|
| 1137 |
+
class BaseDistance(univ.Integer):
|
| 1138 |
+
pass
|
| 1139 |
+
|
| 1140 |
+
|
| 1141 |
+
BaseDistance.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
|
| 1142 |
+
|
| 1143 |
+
|
| 1144 |
+
class GeneralSubtree(univ.Sequence):
|
| 1145 |
+
pass
|
| 1146 |
+
|
| 1147 |
+
|
| 1148 |
+
GeneralSubtree.componentType = namedtype.NamedTypes(
|
| 1149 |
+
namedtype.NamedType('base', GeneralName()),
|
| 1150 |
+
namedtype.DefaultedNamedType('minimum', BaseDistance().subtype(
|
| 1151 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)).subtype(value=0)),
|
| 1152 |
+
namedtype.OptionalNamedType('maximum', BaseDistance().subtype(
|
| 1153 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
|
| 1154 |
+
)
|
| 1155 |
+
|
| 1156 |
+
|
| 1157 |
+
class GeneralNames(univ.SequenceOf):
|
| 1158 |
+
pass
|
| 1159 |
+
|
| 1160 |
+
|
| 1161 |
+
GeneralNames.componentType = GeneralName()
|
| 1162 |
+
GeneralNames.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1163 |
+
|
| 1164 |
+
|
| 1165 |
+
class DistributionPointName(univ.Choice):
|
| 1166 |
+
pass
|
| 1167 |
+
|
| 1168 |
+
|
| 1169 |
+
DistributionPointName.componentType = namedtype.NamedTypes(
|
| 1170 |
+
namedtype.NamedType('fullName',
|
| 1171 |
+
GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 1172 |
+
namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(
|
| 1173 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
|
| 1174 |
+
)
|
| 1175 |
+
|
| 1176 |
+
|
| 1177 |
+
class ReasonFlags(univ.BitString):
|
| 1178 |
+
pass
|
| 1179 |
+
|
| 1180 |
+
|
| 1181 |
+
ReasonFlags.namedValues = namedval.NamedValues(
|
| 1182 |
+
('unused', 0),
|
| 1183 |
+
('keyCompromise', 1),
|
| 1184 |
+
('cACompromise', 2),
|
| 1185 |
+
('affiliationChanged', 3),
|
| 1186 |
+
('superseded', 4),
|
| 1187 |
+
('cessationOfOperation', 5),
|
| 1188 |
+
('certificateHold', 6),
|
| 1189 |
+
('privilegeWithdrawn', 7),
|
| 1190 |
+
('aACompromise', 8)
|
| 1191 |
+
)
|
| 1192 |
+
|
| 1193 |
+
|
| 1194 |
+
class IssuingDistributionPoint(univ.Sequence):
|
| 1195 |
+
pass
|
| 1196 |
+
|
| 1197 |
+
|
| 1198 |
+
IssuingDistributionPoint.componentType = namedtype.NamedTypes(
|
| 1199 |
+
namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
|
| 1200 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
|
| 1201 |
+
namedtype.DefaultedNamedType('onlyContainsUserCerts', univ.Boolean().subtype(
|
| 1202 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)).subtype(value=0)),
|
| 1203 |
+
namedtype.DefaultedNamedType('onlyContainsCACerts', univ.Boolean().subtype(
|
| 1204 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)).subtype(value=0)),
|
| 1205 |
+
namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(
|
| 1206 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
|
| 1207 |
+
namedtype.DefaultedNamedType('indirectCRL', univ.Boolean().subtype(
|
| 1208 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)).subtype(value=0)),
|
| 1209 |
+
namedtype.DefaultedNamedType('onlyContainsAttributeCerts', univ.Boolean().subtype(
|
| 1210 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5)).subtype(value=0))
|
| 1211 |
+
)
|
| 1212 |
+
|
| 1213 |
+
id_ce_certificatePolicies = _buildOid(id_ce, 32)
|
| 1214 |
+
|
| 1215 |
+
id_kp_emailProtection = _buildOid(id_kp, 4)
|
| 1216 |
+
|
| 1217 |
+
|
| 1218 |
+
class AccessDescription(univ.Sequence):
|
| 1219 |
+
pass
|
| 1220 |
+
|
| 1221 |
+
|
| 1222 |
+
AccessDescription.componentType = namedtype.NamedTypes(
|
| 1223 |
+
namedtype.NamedType('accessMethod', univ.ObjectIdentifier()),
|
| 1224 |
+
namedtype.NamedType('accessLocation', GeneralName())
|
| 1225 |
+
)
|
| 1226 |
+
|
| 1227 |
+
|
| 1228 |
+
class IssuerAltName(GeneralNames):
|
| 1229 |
+
pass
|
| 1230 |
+
|
| 1231 |
+
|
| 1232 |
+
id_ce_cRLDistributionPoints = _buildOid(id_ce, 31)
|
| 1233 |
+
|
| 1234 |
+
holdInstruction = _buildOid(2, 2, 840, 10040, 2)
|
| 1235 |
+
|
| 1236 |
+
id_holdinstruction_callissuer = _buildOid(holdInstruction, 2)
|
| 1237 |
+
|
| 1238 |
+
id_ce_subjectDirectoryAttributes = _buildOid(id_ce, 9)
|
| 1239 |
+
|
| 1240 |
+
id_ce_issuingDistributionPoint = _buildOid(id_ce, 28)
|
| 1241 |
+
|
| 1242 |
+
|
| 1243 |
+
class DistributionPoint(univ.Sequence):
|
| 1244 |
+
pass
|
| 1245 |
+
|
| 1246 |
+
|
| 1247 |
+
DistributionPoint.componentType = namedtype.NamedTypes(
|
| 1248 |
+
namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
|
| 1249 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
|
| 1250 |
+
namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(
|
| 1251 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 1252 |
+
namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(
|
| 1253 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
|
| 1254 |
+
)
|
| 1255 |
+
|
| 1256 |
+
|
| 1257 |
+
class CRLDistributionPoints(univ.SequenceOf):
|
| 1258 |
+
pass
|
| 1259 |
+
|
| 1260 |
+
|
| 1261 |
+
CRLDistributionPoints.componentType = DistributionPoint()
|
| 1262 |
+
CRLDistributionPoints.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1263 |
+
|
| 1264 |
+
|
| 1265 |
+
class GeneralSubtrees(univ.SequenceOf):
|
| 1266 |
+
pass
|
| 1267 |
+
|
| 1268 |
+
|
| 1269 |
+
GeneralSubtrees.componentType = GeneralSubtree()
|
| 1270 |
+
GeneralSubtrees.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1271 |
+
|
| 1272 |
+
|
| 1273 |
+
class NameConstraints(univ.Sequence):
|
| 1274 |
+
pass
|
| 1275 |
+
|
| 1276 |
+
|
| 1277 |
+
NameConstraints.componentType = namedtype.NamedTypes(
|
| 1278 |
+
namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(
|
| 1279 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 1280 |
+
namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(
|
| 1281 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
|
| 1282 |
+
)
|
| 1283 |
+
|
| 1284 |
+
|
| 1285 |
+
class SubjectDirectoryAttributes(univ.SequenceOf):
|
| 1286 |
+
pass
|
| 1287 |
+
|
| 1288 |
+
|
| 1289 |
+
SubjectDirectoryAttributes.componentType = Attribute()
|
| 1290 |
+
SubjectDirectoryAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1291 |
+
|
| 1292 |
+
id_kp_OCSPSigning = _buildOid(id_kp, 9)
|
| 1293 |
+
|
| 1294 |
+
id_kp_timeStamping = _buildOid(id_kp, 8)
|
| 1295 |
+
|
| 1296 |
+
|
| 1297 |
+
class DisplayText(univ.Choice):
|
| 1298 |
+
pass
|
| 1299 |
+
|
| 1300 |
+
|
| 1301 |
+
DisplayText.componentType = namedtype.NamedTypes(
|
| 1302 |
+
namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
|
| 1303 |
+
namedtype.NamedType('visibleString',
|
| 1304 |
+
char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
|
| 1305 |
+
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
|
| 1306 |
+
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
|
| 1307 |
+
)
|
| 1308 |
+
|
| 1309 |
+
|
| 1310 |
+
class NoticeReference(univ.Sequence):
|
| 1311 |
+
pass
|
| 1312 |
+
|
| 1313 |
+
|
| 1314 |
+
NoticeReference.componentType = namedtype.NamedTypes(
|
| 1315 |
+
namedtype.NamedType('organization', DisplayText()),
|
| 1316 |
+
namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer()))
|
| 1317 |
+
)
|
| 1318 |
+
|
| 1319 |
+
|
| 1320 |
+
class UserNotice(univ.Sequence):
|
| 1321 |
+
pass
|
| 1322 |
+
|
| 1323 |
+
|
| 1324 |
+
UserNotice.componentType = namedtype.NamedTypes(
|
| 1325 |
+
namedtype.OptionalNamedType('noticeRef', NoticeReference()),
|
| 1326 |
+
namedtype.OptionalNamedType('explicitText', DisplayText())
|
| 1327 |
+
)
|
| 1328 |
+
|
| 1329 |
+
|
| 1330 |
+
class PolicyQualifierId(univ.ObjectIdentifier):
|
| 1331 |
+
pass
|
| 1332 |
+
|
| 1333 |
+
|
| 1334 |
+
policyQualifierInfoMap = {
|
| 1335 |
+
|
| 1336 |
+
}
|
| 1337 |
+
|
| 1338 |
+
|
| 1339 |
+
class PolicyQualifierInfo(univ.Sequence):
|
| 1340 |
+
componentType = namedtype.NamedTypes(
|
| 1341 |
+
namedtype.NamedType('policyQualifierId', PolicyQualifierId()),
|
| 1342 |
+
namedtype.NamedType(
|
| 1343 |
+
'qualifier', univ.Any(),
|
| 1344 |
+
openType=opentype.OpenType('policyQualifierId', policyQualifierInfoMap)
|
| 1345 |
+
)
|
| 1346 |
+
)
|
| 1347 |
+
|
| 1348 |
+
|
| 1349 |
+
class CertPolicyId(univ.ObjectIdentifier):
|
| 1350 |
+
pass
|
| 1351 |
+
|
| 1352 |
+
|
| 1353 |
+
class PolicyInformation(univ.Sequence):
|
| 1354 |
+
pass
|
| 1355 |
+
|
| 1356 |
+
|
| 1357 |
+
PolicyInformation.componentType = namedtype.NamedTypes(
|
| 1358 |
+
namedtype.NamedType('policyIdentifier', CertPolicyId()),
|
| 1359 |
+
namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()))
|
| 1360 |
+
)
|
| 1361 |
+
|
| 1362 |
+
|
| 1363 |
+
class CertificatePolicies(univ.SequenceOf):
|
| 1364 |
+
pass
|
| 1365 |
+
|
| 1366 |
+
|
| 1367 |
+
CertificatePolicies.componentType = PolicyInformation()
|
| 1368 |
+
CertificatePolicies.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1369 |
+
|
| 1370 |
+
|
| 1371 |
+
class SubjectAltName(GeneralNames):
|
| 1372 |
+
pass
|
| 1373 |
+
|
| 1374 |
+
|
| 1375 |
+
id_ce_basicConstraints = _buildOid(id_ce, 19)
|
| 1376 |
+
|
| 1377 |
+
id_ce_authorityKeyIdentifier = _buildOid(id_ce, 35)
|
| 1378 |
+
|
| 1379 |
+
id_kp_codeSigning = _buildOid(id_kp, 3)
|
| 1380 |
+
|
| 1381 |
+
|
| 1382 |
+
class BasicConstraints(univ.Sequence):
|
| 1383 |
+
pass
|
| 1384 |
+
|
| 1385 |
+
|
| 1386 |
+
BasicConstraints.componentType = namedtype.NamedTypes(
|
| 1387 |
+
namedtype.DefaultedNamedType('cA', univ.Boolean().subtype(value=0)),
|
| 1388 |
+
namedtype.OptionalNamedType('pathLenConstraint',
|
| 1389 |
+
univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX)))
|
| 1390 |
+
)
|
| 1391 |
+
|
| 1392 |
+
id_ce_certificateIssuer = _buildOid(id_ce, 29)
|
| 1393 |
+
|
| 1394 |
+
|
| 1395 |
+
class PolicyMappings(univ.SequenceOf):
|
| 1396 |
+
pass
|
| 1397 |
+
|
| 1398 |
+
|
| 1399 |
+
PolicyMappings.componentType = univ.Sequence(
|
| 1400 |
+
componentType=namedtype.NamedTypes(
|
| 1401 |
+
namedtype.NamedType('issuerDomainPolicy', CertPolicyId()),
|
| 1402 |
+
namedtype.NamedType('subjectDomainPolicy', CertPolicyId())
|
| 1403 |
+
)
|
| 1404 |
+
)
|
| 1405 |
+
|
| 1406 |
+
PolicyMappings.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1407 |
+
|
| 1408 |
+
|
| 1409 |
+
class InhibitAnyPolicy(SkipCerts):
|
| 1410 |
+
pass
|
| 1411 |
+
|
| 1412 |
+
|
| 1413 |
+
anyPolicy = _buildOid(id_ce_certificatePolicies, 0)
|
| 1414 |
+
|
| 1415 |
+
|
| 1416 |
+
class CRLNumber(univ.Integer):
|
| 1417 |
+
pass
|
| 1418 |
+
|
| 1419 |
+
|
| 1420 |
+
CRLNumber.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
|
| 1421 |
+
|
| 1422 |
+
|
| 1423 |
+
class BaseCRLNumber(CRLNumber):
|
| 1424 |
+
pass
|
| 1425 |
+
|
| 1426 |
+
|
| 1427 |
+
id_ce_nameConstraints = _buildOid(id_ce, 30)
|
| 1428 |
+
|
| 1429 |
+
id_kp_serverAuth = _buildOid(id_kp, 1)
|
| 1430 |
+
|
| 1431 |
+
id_ce_freshestCRL = _buildOid(id_ce, 46)
|
| 1432 |
+
|
| 1433 |
+
id_ce_cRLReasons = _buildOid(id_ce, 21)
|
| 1434 |
+
|
| 1435 |
+
id_ce_extKeyUsage = _buildOid(id_ce, 37)
|
| 1436 |
+
|
| 1437 |
+
|
| 1438 |
+
class KeyIdentifier(univ.OctetString):
|
| 1439 |
+
pass
|
| 1440 |
+
|
| 1441 |
+
|
| 1442 |
+
class AuthorityKeyIdentifier(univ.Sequence):
|
| 1443 |
+
pass
|
| 1444 |
+
|
| 1445 |
+
|
| 1446 |
+
AuthorityKeyIdentifier.componentType = namedtype.NamedTypes(
|
| 1447 |
+
namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(
|
| 1448 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 1449 |
+
namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(
|
| 1450 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 1451 |
+
namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(
|
| 1452 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
|
| 1453 |
+
)
|
| 1454 |
+
|
| 1455 |
+
|
| 1456 |
+
class FreshestCRL(CRLDistributionPoints):
|
| 1457 |
+
pass
|
| 1458 |
+
|
| 1459 |
+
|
| 1460 |
+
id_ce_policyConstraints = _buildOid(id_ce, 36)
|
| 1461 |
+
|
| 1462 |
+
id_pe_authorityInfoAccess = _buildOid(id_pe, 1)
|
| 1463 |
+
|
| 1464 |
+
|
| 1465 |
+
class AuthorityInfoAccessSyntax(univ.SequenceOf):
|
| 1466 |
+
pass
|
| 1467 |
+
|
| 1468 |
+
|
| 1469 |
+
AuthorityInfoAccessSyntax.componentType = AccessDescription()
|
| 1470 |
+
AuthorityInfoAccessSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1471 |
+
|
| 1472 |
+
id_holdinstruction_none = _buildOid(holdInstruction, 1)
|
| 1473 |
+
|
| 1474 |
+
|
| 1475 |
+
class CPSuri(char.IA5String):
|
| 1476 |
+
pass
|
| 1477 |
+
|
| 1478 |
+
|
| 1479 |
+
id_pe_subjectInfoAccess = _buildOid(id_pe, 11)
|
| 1480 |
+
|
| 1481 |
+
|
| 1482 |
+
class SubjectKeyIdentifier(KeyIdentifier):
|
| 1483 |
+
pass
|
| 1484 |
+
|
| 1485 |
+
|
| 1486 |
+
id_ce_subjectAltName = _buildOid(id_ce, 17)
|
| 1487 |
+
|
| 1488 |
+
|
| 1489 |
+
class KeyPurposeId(univ.ObjectIdentifier):
|
| 1490 |
+
pass
|
| 1491 |
+
|
| 1492 |
+
|
| 1493 |
+
class ExtKeyUsageSyntax(univ.SequenceOf):
|
| 1494 |
+
pass
|
| 1495 |
+
|
| 1496 |
+
|
| 1497 |
+
ExtKeyUsageSyntax.componentType = KeyPurposeId()
|
| 1498 |
+
ExtKeyUsageSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1499 |
+
|
| 1500 |
+
|
| 1501 |
+
class HoldInstructionCode(univ.ObjectIdentifier):
|
| 1502 |
+
pass
|
| 1503 |
+
|
| 1504 |
+
|
| 1505 |
+
id_ce_deltaCRLIndicator = _buildOid(id_ce, 27)
|
| 1506 |
+
|
| 1507 |
+
id_ce_keyUsage = _buildOid(id_ce, 15)
|
| 1508 |
+
|
| 1509 |
+
id_ce_holdInstructionCode = _buildOid(id_ce, 23)
|
| 1510 |
+
|
| 1511 |
+
|
| 1512 |
+
class SubjectInfoAccessSyntax(univ.SequenceOf):
|
| 1513 |
+
pass
|
| 1514 |
+
|
| 1515 |
+
|
| 1516 |
+
SubjectInfoAccessSyntax.componentType = AccessDescription()
|
| 1517 |
+
SubjectInfoAccessSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX)
|
| 1518 |
+
|
| 1519 |
+
|
| 1520 |
+
class InvalidityDate(useful.GeneralizedTime):
|
| 1521 |
+
pass
|
| 1522 |
+
|
| 1523 |
+
|
| 1524 |
+
class KeyUsage(univ.BitString):
|
| 1525 |
+
pass
|
| 1526 |
+
|
| 1527 |
+
|
| 1528 |
+
KeyUsage.namedValues = namedval.NamedValues(
|
| 1529 |
+
('digitalSignature', 0),
|
| 1530 |
+
('nonRepudiation', 1),
|
| 1531 |
+
('keyEncipherment', 2),
|
| 1532 |
+
('dataEncipherment', 3),
|
| 1533 |
+
('keyAgreement', 4),
|
| 1534 |
+
('keyCertSign', 5),
|
| 1535 |
+
('cRLSign', 6),
|
| 1536 |
+
('encipherOnly', 7),
|
| 1537 |
+
('decipherOnly', 8)
|
| 1538 |
+
)
|
| 1539 |
+
|
| 1540 |
+
id_ce_invalidityDate = _buildOid(id_ce, 24)
|
| 1541 |
+
|
| 1542 |
+
id_ce_policyMappings = _buildOid(id_ce, 33)
|
| 1543 |
+
|
| 1544 |
+
anyExtendedKeyUsage = _buildOid(id_ce_extKeyUsage, 0)
|
| 1545 |
+
|
| 1546 |
+
id_ce_privateKeyUsagePeriod = _buildOid(id_ce, 16)
|
| 1547 |
+
|
| 1548 |
+
id_ce_cRLNumber = _buildOid(id_ce, 20)
|
| 1549 |
+
|
| 1550 |
+
|
| 1551 |
+
class CertificateIssuer(GeneralNames):
|
| 1552 |
+
pass
|
| 1553 |
+
|
| 1554 |
+
|
| 1555 |
+
id_holdinstruction_reject = _buildOid(holdInstruction, 3)
|
| 1556 |
+
|
| 1557 |
+
|
| 1558 |
+
class PolicyConstraints(univ.Sequence):
|
| 1559 |
+
pass
|
| 1560 |
+
|
| 1561 |
+
|
| 1562 |
+
PolicyConstraints.componentType = namedtype.NamedTypes(
|
| 1563 |
+
namedtype.OptionalNamedType('requireExplicitPolicy',
|
| 1564 |
+
SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 1565 |
+
namedtype.OptionalNamedType('inhibitPolicyMapping',
|
| 1566 |
+
SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
|
| 1567 |
+
)
|
| 1568 |
+
|
| 1569 |
+
id_kp_clientAuth = _buildOid(id_kp, 2)
|
| 1570 |
+
|
| 1571 |
+
id_ce_subjectKeyIdentifier = _buildOid(id_ce, 14)
|
| 1572 |
+
|
| 1573 |
+
id_ce_inhibitAnyPolicy = _buildOid(id_ce, 54)
|
| 1574 |
+
|
| 1575 |
+
# map of ORAddress ExtensionAttribute type to ExtensionAttribute value
|
| 1576 |
+
|
| 1577 |
+
_oraddressExtensionAttributeMapUpdate = {
|
| 1578 |
+
common_name: CommonName(),
|
| 1579 |
+
teletex_common_name: TeletexCommonName(),
|
| 1580 |
+
teletex_organization_name: TeletexOrganizationName(),
|
| 1581 |
+
teletex_personal_name: TeletexPersonalName(),
|
| 1582 |
+
teletex_organizational_unit_names: TeletexOrganizationalUnitNames(),
|
| 1583 |
+
pds_name: PDSName(),
|
| 1584 |
+
physical_delivery_country_name: PhysicalDeliveryCountryName(),
|
| 1585 |
+
postal_code: PostalCode(),
|
| 1586 |
+
physical_delivery_office_name: PhysicalDeliveryOfficeName(),
|
| 1587 |
+
physical_delivery_office_number: PhysicalDeliveryOfficeNumber(),
|
| 1588 |
+
extension_OR_address_components: ExtensionORAddressComponents(),
|
| 1589 |
+
physical_delivery_personal_name: PhysicalDeliveryPersonalName(),
|
| 1590 |
+
physical_delivery_organization_name: PhysicalDeliveryOrganizationName(),
|
| 1591 |
+
extension_physical_delivery_address_components: ExtensionPhysicalDeliveryAddressComponents(),
|
| 1592 |
+
unformatted_postal_address: UnformattedPostalAddress(),
|
| 1593 |
+
street_address: StreetAddress(),
|
| 1594 |
+
post_office_box_address: PostOfficeBoxAddress(),
|
| 1595 |
+
poste_restante_address: PosteRestanteAddress(),
|
| 1596 |
+
unique_postal_name: UniquePostalName(),
|
| 1597 |
+
local_postal_attributes: LocalPostalAttributes(),
|
| 1598 |
+
extended_network_address: ExtendedNetworkAddress(),
|
| 1599 |
+
terminal_type: TerminalType(),
|
| 1600 |
+
teletex_domain_defined_attributes: TeletexDomainDefinedAttributes(),
|
| 1601 |
+
}
|
| 1602 |
+
|
| 1603 |
+
oraddressExtensionAttributeMap.update(_oraddressExtensionAttributeMapUpdate)
|
| 1604 |
+
|
| 1605 |
+
|
| 1606 |
+
# map of AttributeType -> AttributeValue
|
| 1607 |
+
|
| 1608 |
+
_certificateAttributesMapUpdate = {
|
| 1609 |
+
id_at_name: X520name(),
|
| 1610 |
+
id_at_surname: X520name(),
|
| 1611 |
+
id_at_givenName: X520name(),
|
| 1612 |
+
id_at_initials: X520name(),
|
| 1613 |
+
id_at_generationQualifier: X520name(),
|
| 1614 |
+
id_at_commonName: X520CommonName(),
|
| 1615 |
+
id_at_localityName: X520LocalityName(),
|
| 1616 |
+
id_at_stateOrProvinceName: X520StateOrProvinceName(),
|
| 1617 |
+
id_at_organizationName: X520OrganizationName(),
|
| 1618 |
+
id_at_organizationalUnitName: X520OrganizationalUnitName(),
|
| 1619 |
+
id_at_title: X520Title(),
|
| 1620 |
+
id_at_dnQualifier: X520dnQualifier(),
|
| 1621 |
+
id_at_countryName: X520countryName(),
|
| 1622 |
+
id_at_serialNumber: X520SerialNumber(),
|
| 1623 |
+
id_at_pseudonym: X520Pseudonym(),
|
| 1624 |
+
id_domainComponent: DomainComponent(),
|
| 1625 |
+
id_emailAddress: EmailAddress(),
|
| 1626 |
+
}
|
| 1627 |
+
|
| 1628 |
+
certificateAttributesMap.update(_certificateAttributesMapUpdate)
|
| 1629 |
+
|
| 1630 |
+
|
| 1631 |
+
# map of Certificate Extension OIDs to Extensions
|
| 1632 |
+
|
| 1633 |
+
_certificateExtensionsMap = {
|
| 1634 |
+
id_ce_authorityKeyIdentifier: AuthorityKeyIdentifier(),
|
| 1635 |
+
id_ce_subjectKeyIdentifier: SubjectKeyIdentifier(),
|
| 1636 |
+
id_ce_keyUsage: KeyUsage(),
|
| 1637 |
+
id_ce_privateKeyUsagePeriod: PrivateKeyUsagePeriod(),
|
| 1638 |
+
id_ce_certificatePolicies: CertificatePolicies(),
|
| 1639 |
+
id_ce_policyMappings: PolicyMappings(),
|
| 1640 |
+
id_ce_subjectAltName: SubjectAltName(),
|
| 1641 |
+
id_ce_issuerAltName: IssuerAltName(),
|
| 1642 |
+
id_ce_subjectDirectoryAttributes: SubjectDirectoryAttributes(),
|
| 1643 |
+
id_ce_basicConstraints: BasicConstraints(),
|
| 1644 |
+
id_ce_nameConstraints: NameConstraints(),
|
| 1645 |
+
id_ce_policyConstraints: PolicyConstraints(),
|
| 1646 |
+
id_ce_extKeyUsage: ExtKeyUsageSyntax(),
|
| 1647 |
+
id_ce_cRLDistributionPoints: CRLDistributionPoints(),
|
| 1648 |
+
id_pe_authorityInfoAccess: AuthorityInfoAccessSyntax(),
|
| 1649 |
+
id_ce_cRLNumber: univ.Integer(),
|
| 1650 |
+
id_ce_deltaCRLIndicator: BaseCRLNumber(),
|
| 1651 |
+
id_ce_issuingDistributionPoint: IssuingDistributionPoint(),
|
| 1652 |
+
id_ce_cRLReasons: CRLReason(),
|
| 1653 |
+
id_ce_holdInstructionCode: univ.ObjectIdentifier(),
|
| 1654 |
+
id_ce_invalidityDate: useful.GeneralizedTime(),
|
| 1655 |
+
id_ce_certificateIssuer: GeneralNames(),
|
| 1656 |
+
}
|
| 1657 |
+
|
| 1658 |
+
certificateExtensionsMap.update(_certificateExtensionsMap)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc5916.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Created by Russ Housley.
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 7 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 8 |
+
#
|
| 9 |
+
# Device Owner Attribute
|
| 10 |
+
#
|
| 11 |
+
# ASN.1 source from:
|
| 12 |
+
# https://www.rfc-editor.org/rfc/rfc5916.txt
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
from pyasn1.type import univ
|
| 16 |
+
|
| 17 |
+
from pyasn1_modules import rfc5280
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# Device Owner Attribute
|
| 21 |
+
|
| 22 |
+
id_deviceOwner = univ.ObjectIdentifier((2, 16, 840, 1, 101, 2, 1, 5, 69))
|
| 23 |
+
|
| 24 |
+
at_deviceOwner = rfc5280.Attribute()
|
| 25 |
+
at_deviceOwner['type'] = id_deviceOwner
|
| 26 |
+
at_deviceOwner['values'][0] = univ.ObjectIdentifier()
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# Add to the map of Attribute Type OIDs to Attributes in rfc5280.py.
|
| 30 |
+
|
| 31 |
+
_certificateAttributesMapUpdate = {
|
| 32 |
+
id_deviceOwner: univ.ObjectIdentifier(),
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc5990.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Created by Russ Housley with assistance from asn1ate v.0.6.0.
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 7 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 8 |
+
#
|
| 9 |
+
# Use of the RSA-KEM Key Transport Algorithm in the CMS
|
| 10 |
+
#
|
| 11 |
+
# ASN.1 source from:
|
| 12 |
+
# https://www.rfc-editor.org/rfc/rfc5990.txt
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
from pyasn1.type import constraint
|
| 16 |
+
from pyasn1.type import namedtype
|
| 17 |
+
from pyasn1.type import univ
|
| 18 |
+
|
| 19 |
+
from pyasn1_modules import rfc5280
|
| 20 |
+
|
| 21 |
+
MAX = float('inf')
|
| 22 |
+
|
| 23 |
+
def _OID(*components):
|
| 24 |
+
output = []
|
| 25 |
+
for x in tuple(components):
|
| 26 |
+
if isinstance(x, univ.ObjectIdentifier):
|
| 27 |
+
output.extend(list(x))
|
| 28 |
+
else:
|
| 29 |
+
output.append(int(x))
|
| 30 |
+
return univ.ObjectIdentifier(output)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Imports from RFC 5280
|
| 34 |
+
|
| 35 |
+
AlgorithmIdentifier = rfc5280.AlgorithmIdentifier
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# Useful types and definitions
|
| 39 |
+
|
| 40 |
+
class NullParms(univ.Null):
|
| 41 |
+
pass
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
# Object identifier arcs
|
| 45 |
+
|
| 46 |
+
is18033_2 = _OID(1, 0, 18033, 2)
|
| 47 |
+
|
| 48 |
+
nistAlgorithm = _OID(2, 16, 840, 1, 101, 3, 4)
|
| 49 |
+
|
| 50 |
+
pkcs_1 = _OID(1, 2, 840, 113549, 1, 1)
|
| 51 |
+
|
| 52 |
+
x9_44 = _OID(1, 3, 133, 16, 840, 9, 44)
|
| 53 |
+
|
| 54 |
+
x9_44_components = _OID(x9_44, 1)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# Types for algorithm identifiers
|
| 58 |
+
|
| 59 |
+
class Camellia_KeyWrappingScheme(AlgorithmIdentifier):
|
| 60 |
+
pass
|
| 61 |
+
|
| 62 |
+
class DataEncapsulationMechanism(AlgorithmIdentifier):
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
class KDF2_HashFunction(AlgorithmIdentifier):
|
| 66 |
+
pass
|
| 67 |
+
|
| 68 |
+
class KDF3_HashFunction(AlgorithmIdentifier):
|
| 69 |
+
pass
|
| 70 |
+
|
| 71 |
+
class KeyDerivationFunction(AlgorithmIdentifier):
|
| 72 |
+
pass
|
| 73 |
+
|
| 74 |
+
class KeyEncapsulationMechanism(AlgorithmIdentifier):
|
| 75 |
+
pass
|
| 76 |
+
|
| 77 |
+
class X9_SymmetricKeyWrappingScheme(AlgorithmIdentifier):
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
# RSA-KEM Key Transport Algorithm
|
| 82 |
+
|
| 83 |
+
id_rsa_kem = _OID(1, 2, 840, 113549, 1, 9, 16, 3, 14)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class GenericHybridParameters(univ.Sequence):
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
GenericHybridParameters.componentType = namedtype.NamedTypes(
|
| 90 |
+
namedtype.NamedType('kem', KeyEncapsulationMechanism()),
|
| 91 |
+
namedtype.NamedType('dem', DataEncapsulationMechanism())
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
rsa_kem = AlgorithmIdentifier()
|
| 96 |
+
rsa_kem['algorithm'] = id_rsa_kem
|
| 97 |
+
rsa_kem['parameters'] = GenericHybridParameters()
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
# KEM-RSA Key Encapsulation Mechanism
|
| 101 |
+
|
| 102 |
+
id_kem_rsa = _OID(is18033_2, 2, 4)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class KeyLength(univ.Integer):
|
| 106 |
+
pass
|
| 107 |
+
|
| 108 |
+
KeyLength.subtypeSpec = constraint.ValueRangeConstraint(1, MAX)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class RsaKemParameters(univ.Sequence):
|
| 112 |
+
pass
|
| 113 |
+
|
| 114 |
+
RsaKemParameters.componentType = namedtype.NamedTypes(
|
| 115 |
+
namedtype.NamedType('keyDerivationFunction', KeyDerivationFunction()),
|
| 116 |
+
namedtype.NamedType('keyLength', KeyLength())
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
kem_rsa = AlgorithmIdentifier()
|
| 121 |
+
kem_rsa['algorithm'] = id_kem_rsa
|
| 122 |
+
kem_rsa['parameters'] = RsaKemParameters()
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
# Key Derivation Functions
|
| 126 |
+
|
| 127 |
+
id_kdf_kdf2 = _OID(x9_44_components, 1)
|
| 128 |
+
|
| 129 |
+
id_kdf_kdf3 = _OID(x9_44_components, 2)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
kdf2 = AlgorithmIdentifier()
|
| 133 |
+
kdf2['algorithm'] = id_kdf_kdf2
|
| 134 |
+
kdf2['parameters'] = KDF2_HashFunction()
|
| 135 |
+
|
| 136 |
+
kdf3 = AlgorithmIdentifier()
|
| 137 |
+
kdf3['algorithm'] = id_kdf_kdf3
|
| 138 |
+
kdf3['parameters'] = KDF3_HashFunction()
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
# Hash Functions
|
| 142 |
+
|
| 143 |
+
id_sha1 = _OID(1, 3, 14, 3, 2, 26)
|
| 144 |
+
|
| 145 |
+
id_sha224 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 4)
|
| 146 |
+
|
| 147 |
+
id_sha256 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 1)
|
| 148 |
+
|
| 149 |
+
id_sha384 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 2)
|
| 150 |
+
|
| 151 |
+
id_sha512 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 3)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
sha1 = AlgorithmIdentifier()
|
| 155 |
+
sha1['algorithm'] = id_sha1
|
| 156 |
+
sha1['parameters'] = univ.Null("")
|
| 157 |
+
|
| 158 |
+
sha224 = AlgorithmIdentifier()
|
| 159 |
+
sha224['algorithm'] = id_sha224
|
| 160 |
+
sha224['parameters'] = univ.Null("")
|
| 161 |
+
|
| 162 |
+
sha256 = AlgorithmIdentifier()
|
| 163 |
+
sha256['algorithm'] = id_sha256
|
| 164 |
+
sha256['parameters'] = univ.Null("")
|
| 165 |
+
|
| 166 |
+
sha384 = AlgorithmIdentifier()
|
| 167 |
+
sha384['algorithm'] = id_sha384
|
| 168 |
+
sha384['parameters'] = univ.Null("")
|
| 169 |
+
|
| 170 |
+
sha512 = AlgorithmIdentifier()
|
| 171 |
+
sha512['algorithm'] = id_sha512
|
| 172 |
+
sha512['parameters'] = univ.Null("")
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
# Symmetric Key-Wrapping Schemes
|
| 176 |
+
|
| 177 |
+
id_aes128_Wrap = _OID(nistAlgorithm, 1, 5)
|
| 178 |
+
|
| 179 |
+
id_aes192_Wrap = _OID(nistAlgorithm, 1, 25)
|
| 180 |
+
|
| 181 |
+
id_aes256_Wrap = _OID(nistAlgorithm, 1, 45)
|
| 182 |
+
|
| 183 |
+
id_alg_CMS3DESwrap = _OID(1, 2, 840, 113549, 1, 9, 16, 3, 6)
|
| 184 |
+
|
| 185 |
+
id_camellia128_Wrap = _OID(1, 2, 392, 200011, 61, 1, 1, 3, 2)
|
| 186 |
+
|
| 187 |
+
id_camellia192_Wrap = _OID(1, 2, 392, 200011, 61, 1, 1, 3, 3)
|
| 188 |
+
|
| 189 |
+
id_camellia256_Wrap = _OID(1, 2, 392, 200011, 61, 1, 1, 3, 4)
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
aes128_Wrap = AlgorithmIdentifier()
|
| 193 |
+
aes128_Wrap['algorithm'] = id_aes128_Wrap
|
| 194 |
+
# aes128_Wrap['parameters'] are absent
|
| 195 |
+
|
| 196 |
+
aes192_Wrap = AlgorithmIdentifier()
|
| 197 |
+
aes192_Wrap['algorithm'] = id_aes128_Wrap
|
| 198 |
+
# aes192_Wrap['parameters'] are absent
|
| 199 |
+
|
| 200 |
+
aes256_Wrap = AlgorithmIdentifier()
|
| 201 |
+
aes256_Wrap['algorithm'] = id_sha256
|
| 202 |
+
# aes256_Wrap['parameters'] are absent
|
| 203 |
+
|
| 204 |
+
tdes_Wrap = AlgorithmIdentifier()
|
| 205 |
+
tdes_Wrap['algorithm'] = id_alg_CMS3DESwrap
|
| 206 |
+
tdes_Wrap['parameters'] = univ.Null("")
|
| 207 |
+
|
| 208 |
+
camellia128_Wrap = AlgorithmIdentifier()
|
| 209 |
+
camellia128_Wrap['algorithm'] = id_camellia128_Wrap
|
| 210 |
+
# camellia128_Wrap['parameters'] are absent
|
| 211 |
+
|
| 212 |
+
camellia192_Wrap = AlgorithmIdentifier()
|
| 213 |
+
camellia192_Wrap['algorithm'] = id_camellia192_Wrap
|
| 214 |
+
# camellia192_Wrap['parameters'] are absent
|
| 215 |
+
|
| 216 |
+
camellia256_Wrap = AlgorithmIdentifier()
|
| 217 |
+
camellia256_Wrap['algorithm'] = id_camellia256_Wrap
|
| 218 |
+
# camellia256_Wrap['parameters'] are absent
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
# Update the Algorithm Identifier map in rfc5280.py.
|
| 222 |
+
# Note that the ones that must not have parameters are not added to the map.
|
| 223 |
+
|
| 224 |
+
_algorithmIdentifierMapUpdate = {
|
| 225 |
+
id_rsa_kem: GenericHybridParameters(),
|
| 226 |
+
id_kem_rsa: RsaKemParameters(),
|
| 227 |
+
id_kdf_kdf2: KDF2_HashFunction(),
|
| 228 |
+
id_kdf_kdf3: KDF3_HashFunction(),
|
| 229 |
+
id_sha1: univ.Null(),
|
| 230 |
+
id_sha224: univ.Null(),
|
| 231 |
+
id_sha256: univ.Null(),
|
| 232 |
+
id_sha384: univ.Null(),
|
| 233 |
+
id_sha512: univ.Null(),
|
| 234 |
+
id_alg_CMS3DESwrap: univ.Null(),
|
| 235 |
+
}
|
| 236 |
+
|
| 237 |
+
rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc6187.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Created by Russ Housley.
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 7 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 8 |
+
#
|
| 9 |
+
# X.509v3 Certificates for Secure Shell Authentication
|
| 10 |
+
#
|
| 11 |
+
# ASN.1 source from:
|
| 12 |
+
# https://www.rfc-editor.org/rfc/rfc6187.txt
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
from pyasn1.type import univ
|
| 16 |
+
|
| 17 |
+
id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7')
|
| 18 |
+
|
| 19 |
+
id_kp = id_pkix + (3, )
|
| 20 |
+
|
| 21 |
+
id_kp_secureShellClient = id_kp + (21, )
|
| 22 |
+
id_kp_secureShellServer = id_kp + (22, )
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc7633.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Created by Russ Housley with some assistance from asn1ate v.0.6.0.
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 7 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 8 |
+
#
|
| 9 |
+
# Transport Layer Security (TLS) Feature Certificate Extension
|
| 10 |
+
#
|
| 11 |
+
# ASN.1 source from:
|
| 12 |
+
# https://www.rfc-editor.org/rfc/rfc7633.txt
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
from pyasn1.type import univ
|
| 16 |
+
|
| 17 |
+
from pyasn1_modules import rfc5280
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# TLS Features Extension
|
| 21 |
+
|
| 22 |
+
id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1')
|
| 23 |
+
|
| 24 |
+
id_pe_tlsfeature = id_pe + (24, )
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Features(univ.SequenceOf):
|
| 28 |
+
componentType = univ.Integer()
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# Map of Certificate Extension OIDs to Extensions added to the
|
| 32 |
+
# ones that are in rfc5280.py
|
| 33 |
+
|
| 34 |
+
_certificateExtensionsMapUpdate = {
|
| 35 |
+
id_pe_tlsfeature: Features(),
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc7773.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Created by Russ Housley with some assistance from asn1ate v.0.6.0.
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 7 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 8 |
+
#
|
| 9 |
+
# Authentication Context Certificate Extension
|
| 10 |
+
#
|
| 11 |
+
# ASN.1 source from:
|
| 12 |
+
# https://www.rfc-editor.org/rfc/rfc7773.txt
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
from pyasn1.type import char
|
| 16 |
+
from pyasn1.type import constraint
|
| 17 |
+
from pyasn1.type import namedtype
|
| 18 |
+
from pyasn1.type import univ
|
| 19 |
+
|
| 20 |
+
from pyasn1_modules import rfc5280
|
| 21 |
+
|
| 22 |
+
MAX = float('inf')
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# Authentication Context Extension
|
| 26 |
+
|
| 27 |
+
e_legnamnden = univ.ObjectIdentifier('1.2.752.201')
|
| 28 |
+
|
| 29 |
+
id_eleg_ce = e_legnamnden + (5, )
|
| 30 |
+
|
| 31 |
+
id_ce_authContext = id_eleg_ce + (1, )
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class AuthenticationContext(univ.Sequence):
|
| 35 |
+
componentType = namedtype.NamedTypes(
|
| 36 |
+
namedtype.NamedType('contextType', char.UTF8String()),
|
| 37 |
+
namedtype.OptionalNamedType('contextInfo', char.UTF8String())
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
class AuthenticationContexts(univ.SequenceOf):
|
| 41 |
+
componentType = AuthenticationContext()
|
| 42 |
+
subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
# Map of Certificate Extension OIDs to Extensions added to the
|
| 46 |
+
# ones that are in rfc5280.py
|
| 47 |
+
|
| 48 |
+
_certificateExtensionsMapUpdate = {
|
| 49 |
+
id_ce_authContext: AuthenticationContexts(),
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc8358.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of pyasn1-modules software.
|
| 3 |
+
#
|
| 4 |
+
# Created by Russ Housley.
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 7 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 8 |
+
#
|
| 9 |
+
# Digital Signatures on Internet-Draft Documents
|
| 10 |
+
#
|
| 11 |
+
# ASN.1 source from:
|
| 12 |
+
# https://www.rfc-editor.org/rfc/rfc8358.txt
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
from pyasn1.type import univ
|
| 16 |
+
|
| 17 |
+
from pyasn1_modules import rfc5652
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
id_ct = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1')
|
| 21 |
+
|
| 22 |
+
id_ct_asciiTextWithCRLF = id_ct + (27, )
|
| 23 |
+
|
| 24 |
+
id_ct_epub = id_ct + (39, )
|
| 25 |
+
|
| 26 |
+
id_ct_htmlWithCRLF = id_ct + (38, )
|
| 27 |
+
|
| 28 |
+
id_ct_pdf = id_ct + (29, )
|
| 29 |
+
|
| 30 |
+
id_ct_postscript = id_ct + (30, )
|
| 31 |
+
|
| 32 |
+
id_ct_utf8TextWithCRLF = id_ct + (37, )
|
| 33 |
+
|
| 34 |
+
id_ct_xml = id_ct + (28, )
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# Map of Content Type OIDs to Content Types is added to the
|
| 38 |
+
# ones that are in rfc5652.py
|
| 39 |
+
|
| 40 |
+
_cmsContentTypesMapUpdate = {
|
| 41 |
+
id_ct_asciiTextWithCRLF: univ.OctetString(),
|
| 42 |
+
id_ct_epub: univ.OctetString(),
|
| 43 |
+
id_ct_htmlWithCRLF: univ.OctetString(),
|
| 44 |
+
id_ct_pdf: univ.OctetString(),
|
| 45 |
+
id_ct_postscript: univ.OctetString(),
|
| 46 |
+
id_ct_utf8TextWithCRLF: univ.OctetString(),
|
| 47 |
+
id_ct_xml: univ.OctetString(),
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1_modules/rfc8410.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is being contributed to pyasn1-modules software.
|
| 2 |
+
#
|
| 3 |
+
# Created by Russ Housley.
|
| 4 |
+
#
|
| 5 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 6 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 7 |
+
#
|
| 8 |
+
# Algorithm Identifiers for Ed25519, Ed448, X25519, and X448
|
| 9 |
+
#
|
| 10 |
+
# ASN.1 source from:
|
| 11 |
+
# https://www.rfc-editor.org/rfc/rfc8410.txt
|
| 12 |
+
|
| 13 |
+
from pyasn1.type import univ
|
| 14 |
+
from pyasn1_modules import rfc3565
|
| 15 |
+
from pyasn1_modules import rfc4055
|
| 16 |
+
from pyasn1_modules import rfc5280
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class SignatureAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
|
| 20 |
+
pass
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class KeyEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class CurvePrivateKey(univ.OctetString):
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
id_X25519 = univ.ObjectIdentifier('1.3.101.110')
|
| 32 |
+
|
| 33 |
+
id_X448 = univ.ObjectIdentifier('1.3.101.111')
|
| 34 |
+
|
| 35 |
+
id_Ed25519 = univ.ObjectIdentifier('1.3.101.112')
|
| 36 |
+
|
| 37 |
+
id_Ed448 = univ.ObjectIdentifier('1.3.101.113')
|
| 38 |
+
|
| 39 |
+
id_sha512 = rfc4055.id_sha512
|
| 40 |
+
|
| 41 |
+
id_aes128_wrap = rfc3565.id_aes128_wrap
|
| 42 |
+
|
| 43 |
+
id_aes256_wrap = rfc3565.id_aes256_wrap
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/__init__.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""TensorBoard is a webapp for understanding TensorFlow runs and graphs."""
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
from tensorboard import lazy as _lazy
|
| 19 |
+
from tensorboard import version as _version
|
| 20 |
+
|
| 21 |
+
# TensorBoard public API.
|
| 22 |
+
__all__ = [
|
| 23 |
+
"__version__",
|
| 24 |
+
"errors",
|
| 25 |
+
"notebook",
|
| 26 |
+
"program",
|
| 27 |
+
"summary",
|
| 28 |
+
]
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# Please be careful when changing the structure of this file.
|
| 32 |
+
#
|
| 33 |
+
# The lazy imports in this file must use `importlib.import_module`, not
|
| 34 |
+
# `import tensorboard.foo` or `from tensorboard import foo`, or it will
|
| 35 |
+
# be impossible to reload the TensorBoard module without breaking these
|
| 36 |
+
# top-level public APIs. This has to do with the gory details of
|
| 37 |
+
# Python's module system. Take `tensorboard.notebook` as an example:
|
| 38 |
+
#
|
| 39 |
+
# - When the `tensorboard` module (that's us!) is initialized, its
|
| 40 |
+
# `notebook` attribute is initialized to a new LazyModule. The
|
| 41 |
+
# actual `tensorboard.notebook` submodule is not loaded.
|
| 42 |
+
#
|
| 43 |
+
# - When the `tensorboard.notebook` submodule is first loaded, Python
|
| 44 |
+
# _reassigns_ the `notebook` attribute on the `tensorboard` module
|
| 45 |
+
# object to point to the underlying `tensorboard.notebook` module
|
| 46 |
+
# object, rather than its former LazyModule value. This occurs
|
| 47 |
+
# whether the module is loaded via the lazy module or directly as an
|
| 48 |
+
# import:
|
| 49 |
+
#
|
| 50 |
+
# - import tensorboard; tensorboard.notebook.start(...) # one way
|
| 51 |
+
# - from tensorboard import notebook # other way; same effect
|
| 52 |
+
#
|
| 53 |
+
# - When the `tensorboard` module is reloaded, its `notebook`
|
| 54 |
+
# attribute is once again bound to a (new) LazyModule, while the
|
| 55 |
+
# `tensorboard.notebook` module object is unaffected and still
|
| 56 |
+
# exists in `sys.modules`. But then...
|
| 57 |
+
#
|
| 58 |
+
# - When the new LazyModule is forced, it must resolve to the existing
|
| 59 |
+
# `tensorboard.notebook` module object rather than itself (which
|
| 60 |
+
# just creates a stack overflow). If the LazyModule load function
|
| 61 |
+
# uses `import tensorboard.notebook; return tensorboard.notebook`,
|
| 62 |
+
# then the first statement will do _nothing_ because the
|
| 63 |
+
# `tensorboard.notebook` module is already loaded, and the second
|
| 64 |
+
# statement will return the LazyModule itself. The same goes for the
|
| 65 |
+
# `from tensorboard import notebook` form. We need to ensure that
|
| 66 |
+
# the submodule is loaded and then pull the actual module object out
|
| 67 |
+
# of `sys.modules`... which is exactly what `importlib` handles for
|
| 68 |
+
# us.
|
| 69 |
+
#
|
| 70 |
+
# See <https://github.com/tensorflow/tensorboard/issues/1989> for
|
| 71 |
+
# additional discussion.
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@_lazy.lazy_load("tensorboard.data")
|
| 75 |
+
def data():
|
| 76 |
+
import importlib
|
| 77 |
+
|
| 78 |
+
return importlib.import_module("tensorboard.data")
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@_lazy.lazy_load("tensorboard.errors")
|
| 82 |
+
def errors():
|
| 83 |
+
import importlib
|
| 84 |
+
|
| 85 |
+
return importlib.import_module("tensorboard.errors")
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
@_lazy.lazy_load("tensorboard.notebook")
|
| 89 |
+
def notebook():
|
| 90 |
+
import importlib
|
| 91 |
+
|
| 92 |
+
return importlib.import_module("tensorboard.notebook")
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
@_lazy.lazy_load("tensorboard.program")
|
| 96 |
+
def program():
|
| 97 |
+
import importlib
|
| 98 |
+
|
| 99 |
+
return importlib.import_module("tensorboard.program")
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
@_lazy.lazy_load("tensorboard.summary")
|
| 103 |
+
def summary():
|
| 104 |
+
import importlib
|
| 105 |
+
|
| 106 |
+
return importlib.import_module("tensorboard.summary")
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def load_ipython_extension(ipython):
|
| 110 |
+
"""IPython API entry point.
|
| 111 |
+
|
| 112 |
+
Only intended to be called by the IPython runtime.
|
| 113 |
+
|
| 114 |
+
See:
|
| 115 |
+
https://ipython.readthedocs.io/en/stable/config/extensions/index.html
|
| 116 |
+
"""
|
| 117 |
+
notebook._load_ipython_extension(ipython)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
__version__ = _version.VERSION
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/assets.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Bindings for TensorBoard frontend assets."""
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
|
| 19 |
+
from tensorboard.util import tb_logging
|
| 20 |
+
|
| 21 |
+
logger = tb_logging.get_logger()
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def get_default_assets_zip_provider():
|
| 25 |
+
"""Try to get a function to provide frontend assets.
|
| 26 |
+
|
| 27 |
+
Returns:
|
| 28 |
+
Either (a) a callable that takes no arguments and returns an open
|
| 29 |
+
file handle to a Zip archive of frontend assets, or (b) `None`, if
|
| 30 |
+
the frontend assets cannot be found.
|
| 31 |
+
"""
|
| 32 |
+
path = os.path.join(os.path.dirname(__file__), "webfiles.zip")
|
| 33 |
+
if not os.path.exists(path):
|
| 34 |
+
logger.warning("webfiles.zip static assets not found: %s", path)
|
| 35 |
+
return None
|
| 36 |
+
return lambda: open(path, "rb")
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/auth.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Experimental framework for authentication in TensorBoard."""
|
| 16 |
+
|
| 17 |
+
import abc
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class AuthProvider(metaclass=abc.ABCMeta):
|
| 21 |
+
"""Authentication provider for a specific kind of credential."""
|
| 22 |
+
|
| 23 |
+
def authenticate(self, environ):
|
| 24 |
+
"""Produce an opaque auth token from a WSGI request environment.
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
environ: A WSGI environment `dict`; see PEP 3333.
|
| 28 |
+
|
| 29 |
+
Returns:
|
| 30 |
+
A Python object representing an auth token. The representation
|
| 31 |
+
and semantics depend on the particular `AuthProvider`
|
| 32 |
+
implementation.
|
| 33 |
+
|
| 34 |
+
Raises:
|
| 35 |
+
Exception: Any error, usually `tensorboard.errors.PublicError`
|
| 36 |
+
subclasses (like `PermissionDenied`) but also possibly a
|
| 37 |
+
custom error type that should propagate to a WSGI middleware
|
| 38 |
+
for effecting a redirect-driven auth flow.
|
| 39 |
+
"""
|
| 40 |
+
pass
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class AuthContext(object):
|
| 44 |
+
"""Authentication context within the scope of a single request.
|
| 45 |
+
|
| 46 |
+
Auth providers are keyed within an `AuthContext` by arbitrary
|
| 47 |
+
unique keys. It may often make sense for the key used for an
|
| 48 |
+
auth provider to simply be that provider's type object.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
def __init__(self, providers, environ):
|
| 52 |
+
"""Create an auth context.
|
| 53 |
+
|
| 54 |
+
Args:
|
| 55 |
+
providers: A mapping from provider keys (opaque values) to
|
| 56 |
+
`AuthProvider` implementations.
|
| 57 |
+
environ: A WSGI environment (see PEP 3333).
|
| 58 |
+
"""
|
| 59 |
+
self._environ = environ
|
| 60 |
+
self._providers = providers
|
| 61 |
+
self._cache = {}
|
| 62 |
+
|
| 63 |
+
@classmethod
|
| 64 |
+
def empty(cls):
|
| 65 |
+
"""Create an auth context with no registered providers.
|
| 66 |
+
|
| 67 |
+
Returns:
|
| 68 |
+
A new `AuthContext` value for which any call to `get` will
|
| 69 |
+
fail with a `KeyError`.
|
| 70 |
+
"""
|
| 71 |
+
# Use an empty dict for the environ. This is not a valid WSGI
|
| 72 |
+
# environment, but it doesn't matter because it's never used.
|
| 73 |
+
return cls({}, {})
|
| 74 |
+
|
| 75 |
+
def get(self, provider_key):
|
| 76 |
+
"""Get an auth token from the auth provider with the given key.
|
| 77 |
+
|
| 78 |
+
If successful, the result will be cached on this auth context.
|
| 79 |
+
If unsuccessful, nothing will be cached, so a future call will
|
| 80 |
+
invoke the underlying `AuthProvider.authenticate` method again.
|
| 81 |
+
|
| 82 |
+
This method is not thread-safe. If multiple threads share an
|
| 83 |
+
auth context for a single request, then they must synchronize
|
| 84 |
+
externally when calling this method.
|
| 85 |
+
|
| 86 |
+
Returns:
|
| 87 |
+
The result of `provider.authenticate(...)` for the auth
|
| 88 |
+
provider specified by `provider_key`.
|
| 89 |
+
|
| 90 |
+
Raises:
|
| 91 |
+
KeyError: If the given `provider_key` does not correspond to
|
| 92 |
+
any registered `AuthProvider`.
|
| 93 |
+
Exception: As raised by the underlying `AuthProvider`.
|
| 94 |
+
"""
|
| 95 |
+
provider = self._providers[provider_key]
|
| 96 |
+
sentinel = object()
|
| 97 |
+
value = self._cache.get(provider_key, sentinel)
|
| 98 |
+
if value is not sentinel:
|
| 99 |
+
return value
|
| 100 |
+
value = provider.authenticate(self._environ)
|
| 101 |
+
self._cache[provider_key] = value
|
| 102 |
+
return value
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/context.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Request-scoped context."""
|
| 16 |
+
|
| 17 |
+
from tensorboard import auth as auth_lib
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# A `RequestContext` value is stored on WSGI environments under this key.
|
| 21 |
+
_WSGI_KEY = "tensorboard.request_context"
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class RequestContext(object):
|
| 25 |
+
"""Container of request-scoped values.
|
| 26 |
+
|
| 27 |
+
This context is for cross-cutting concerns: authentication,
|
| 28 |
+
authorization, auditing, internationalization, logging, and so on.
|
| 29 |
+
It is not simply for passing commonly used parameters to functions.
|
| 30 |
+
|
| 31 |
+
`RequestContext` values are to be treated as immutable.
|
| 32 |
+
|
| 33 |
+
Fields:
|
| 34 |
+
auth: An `AuthContext`, which may be empty but is never `None`.
|
| 35 |
+
remote_ip: An `ipaddress.IPv4Address` or `ipaddress.IPv6Address` or None.
|
| 36 |
+
Best guess of the IP Address of the end user.
|
| 37 |
+
x_forwarded_for: A tuple of `ipaddress.IPv4Address` or `ipaddress.IPv6Address`,
|
| 38 |
+
which may be empty but is never None. This should be parsed value of X-Forwarded-For
|
| 39 |
+
HTTP header from the request.
|
| 40 |
+
client_feature_flags: A dict of string to arbitrary type. These represent
|
| 41 |
+
feature flag key/value pairs sent by the client application. Usage of
|
| 42 |
+
client_feature_flags should know the name of the feature flag key and
|
| 43 |
+
should know and validate the type of the value.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
def __init__(
|
| 47 |
+
self,
|
| 48 |
+
auth=None,
|
| 49 |
+
remote_ip=None,
|
| 50 |
+
x_forwarded_for=None,
|
| 51 |
+
client_feature_flags=None,
|
| 52 |
+
):
|
| 53 |
+
"""Create a request context.
|
| 54 |
+
|
| 55 |
+
The argument list is sorted and may be extended in the future;
|
| 56 |
+
therefore, callers must pass only named arguments to this
|
| 57 |
+
initializer.
|
| 58 |
+
|
| 59 |
+
Args:
|
| 60 |
+
See "Fields" on class docstring. All arguments are optional
|
| 61 |
+
and will be replaced with default values if appropriate.
|
| 62 |
+
"""
|
| 63 |
+
self._auth = auth if auth is not None else auth_lib.AuthContext.empty()
|
| 64 |
+
self._remote_ip = remote_ip
|
| 65 |
+
self._x_forwarded_for = x_forwarded_for or ()
|
| 66 |
+
self._client_feature_flags = client_feature_flags or {}
|
| 67 |
+
|
| 68 |
+
@property
|
| 69 |
+
def auth(self):
|
| 70 |
+
return self._auth
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
def remote_ip(self):
|
| 74 |
+
return self._remote_ip
|
| 75 |
+
|
| 76 |
+
@property
|
| 77 |
+
def x_forwarded_for(self):
|
| 78 |
+
return self._x_forwarded_for
|
| 79 |
+
|
| 80 |
+
@property
|
| 81 |
+
def client_feature_flags(self):
|
| 82 |
+
return self._client_feature_flags
|
| 83 |
+
|
| 84 |
+
def replace(self, **kwargs):
|
| 85 |
+
"""Create a copy of this context with updated key-value pairs.
|
| 86 |
+
|
| 87 |
+
Analogous to `namedtuple._replace`. For example, to create a new
|
| 88 |
+
request context like `ctx` but with auth context `auth`, call
|
| 89 |
+
`ctx.replace(auth=auth)`.
|
| 90 |
+
|
| 91 |
+
Args:
|
| 92 |
+
As to `__init__`.
|
| 93 |
+
|
| 94 |
+
Returns:
|
| 95 |
+
A new context like this one but with the specified updates.
|
| 96 |
+
"""
|
| 97 |
+
kwargs.setdefault("auth", self.auth)
|
| 98 |
+
kwargs.setdefault("remote_ip", self.remote_ip)
|
| 99 |
+
kwargs.setdefault("x_forwarded_for", self.x_forwarded_for)
|
| 100 |
+
kwargs.setdefault("client_feature_flags", self.client_feature_flags)
|
| 101 |
+
return type(self)(**kwargs)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def from_environ(environ):
|
| 105 |
+
"""Get a `RequestContext` from a WSGI environment.
|
| 106 |
+
|
| 107 |
+
See also `set_in_environ`.
|
| 108 |
+
|
| 109 |
+
Args:
|
| 110 |
+
environ: A WSGI environment (see PEP 3333).
|
| 111 |
+
|
| 112 |
+
Returns:
|
| 113 |
+
The `RequestContext` stored in the WSGI environment, or an empty
|
| 114 |
+
`RequestContext` if none is stored.
|
| 115 |
+
"""
|
| 116 |
+
result = environ.get(_WSGI_KEY)
|
| 117 |
+
return result if result is not None else RequestContext()
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def set_in_environ(environ, ctx):
|
| 121 |
+
"""Set the `RequestContext` in a WSGI environment.
|
| 122 |
+
|
| 123 |
+
After `set_in_environ(e, ctx)`, `from_environ(e) is ctx`. The input
|
| 124 |
+
environment is mutated.
|
| 125 |
+
|
| 126 |
+
Args:
|
| 127 |
+
environ: A WSGI environment to update.
|
| 128 |
+
ctx: A new `RequestContext` value.
|
| 129 |
+
"""
|
| 130 |
+
environ[_WSGI_KEY] = ctx
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/data_compat.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Utilities to migrate legacy protos to their modern equivalents."""
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
import numpy as np
|
| 19 |
+
|
| 20 |
+
from tensorboard.compat.proto import event_pb2
|
| 21 |
+
from tensorboard.compat.proto import summary_pb2
|
| 22 |
+
from tensorboard.plugins.audio import metadata as audio_metadata
|
| 23 |
+
from tensorboard.plugins.histogram import metadata as histogram_metadata
|
| 24 |
+
from tensorboard.plugins.image import metadata as image_metadata
|
| 25 |
+
from tensorboard.plugins.scalar import metadata as scalar_metadata
|
| 26 |
+
from tensorboard.util import tensor_util
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def migrate_event(event):
|
| 30 |
+
if not event.HasField("summary"):
|
| 31 |
+
return event
|
| 32 |
+
old_values = event.summary.value
|
| 33 |
+
new_values = [migrate_value(value) for value in old_values]
|
| 34 |
+
# Optimization: Don't create a new event if there were no changes.
|
| 35 |
+
if len(old_values) == len(new_values) and all(
|
| 36 |
+
x is y for (x, y) in zip(old_values, new_values)
|
| 37 |
+
):
|
| 38 |
+
return event
|
| 39 |
+
result = event_pb2.Event()
|
| 40 |
+
result.CopyFrom(event)
|
| 41 |
+
del result.summary.value[:]
|
| 42 |
+
result.summary.value.extend(new_values)
|
| 43 |
+
return result
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def migrate_value(value):
|
| 47 |
+
"""Convert `value` to a new-style value, if necessary and possible.
|
| 48 |
+
|
| 49 |
+
An "old-style" value is a value that uses any `value` field other than
|
| 50 |
+
the `tensor` field. A "new-style" value is a value that uses the
|
| 51 |
+
`tensor` field. TensorBoard continues to support old-style values on
|
| 52 |
+
disk; this method converts them to new-style values so that further
|
| 53 |
+
code need only deal with one data format.
|
| 54 |
+
|
| 55 |
+
Arguments:
|
| 56 |
+
value: A `Summary.Value` object. This argument is not modified.
|
| 57 |
+
|
| 58 |
+
Returns:
|
| 59 |
+
If the `value` is an old-style value for which there is a new-style
|
| 60 |
+
equivalent, the result is the new-style value. Otherwise---if the
|
| 61 |
+
value is already new-style or does not yet have a new-style
|
| 62 |
+
equivalent---the value will be returned unchanged.
|
| 63 |
+
|
| 64 |
+
:type value: Summary.Value
|
| 65 |
+
:rtype: Summary.Value
|
| 66 |
+
"""
|
| 67 |
+
handler = {
|
| 68 |
+
"histo": _migrate_histogram_value,
|
| 69 |
+
"image": _migrate_image_value,
|
| 70 |
+
"audio": _migrate_audio_value,
|
| 71 |
+
"simple_value": _migrate_scalar_value,
|
| 72 |
+
}.get(value.WhichOneof("value"))
|
| 73 |
+
return handler(value) if handler else value
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def make_summary(tag, metadata, data):
|
| 77 |
+
tensor_proto = tensor_util.make_tensor_proto(data)
|
| 78 |
+
return summary_pb2.Summary.Value(
|
| 79 |
+
tag=tag, metadata=metadata, tensor=tensor_proto
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _migrate_histogram_value(value):
|
| 84 |
+
"""Convert `old-style` histogram value to `new-style`.
|
| 85 |
+
|
| 86 |
+
The "old-style" format can have outermost bucket limits of -DBL_MAX and
|
| 87 |
+
DBL_MAX, which are problematic for visualization. We replace those here
|
| 88 |
+
with the actual min and max values seen in the input data, but then in
|
| 89 |
+
order to avoid introducing "backwards" buckets (where left edge > right
|
| 90 |
+
edge), we first must drop all empty buckets on the left and right ends.
|
| 91 |
+
"""
|
| 92 |
+
histogram_value = value.histo
|
| 93 |
+
bucket_counts = histogram_value.bucket
|
| 94 |
+
# Find the indices of the leftmost and rightmost non-empty buckets.
|
| 95 |
+
n = len(bucket_counts)
|
| 96 |
+
start = next((i for i in range(n) if bucket_counts[i] > 0), n)
|
| 97 |
+
end = next((i for i in reversed(range(n)) if bucket_counts[i] > 0), -1)
|
| 98 |
+
if start > end:
|
| 99 |
+
# If all input buckets were empty, treat it as a zero-bucket
|
| 100 |
+
# new-style histogram.
|
| 101 |
+
buckets = np.zeros([0, 3], dtype=np.float32)
|
| 102 |
+
else:
|
| 103 |
+
# Discard empty buckets on both ends, and keep only the "inner"
|
| 104 |
+
# edges from the remaining buckets. Note that bucket indices range
|
| 105 |
+
# from `start` to `end` inclusive, but bucket_limit indices are
|
| 106 |
+
# exclusive of `end` - this is because bucket_limit[i] is the
|
| 107 |
+
# right-hand edge for bucket[i].
|
| 108 |
+
bucket_counts = bucket_counts[start : end + 1]
|
| 109 |
+
inner_edges = histogram_value.bucket_limit[start:end]
|
| 110 |
+
# Use min as the left-hand limit for the first non-empty bucket.
|
| 111 |
+
bucket_lefts = [histogram_value.min] + inner_edges
|
| 112 |
+
# Use max as the right-hand limit for the last non-empty bucket.
|
| 113 |
+
bucket_rights = inner_edges + [histogram_value.max]
|
| 114 |
+
buckets = np.array(
|
| 115 |
+
[bucket_lefts, bucket_rights, bucket_counts], dtype=np.float32
|
| 116 |
+
).transpose()
|
| 117 |
+
|
| 118 |
+
summary_metadata = histogram_metadata.create_summary_metadata(
|
| 119 |
+
display_name=value.metadata.display_name or value.tag,
|
| 120 |
+
description=value.metadata.summary_description,
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
return make_summary(value.tag, summary_metadata, buckets)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _migrate_image_value(value):
|
| 127 |
+
image_value = value.image
|
| 128 |
+
data = [
|
| 129 |
+
str(image_value.width).encode("ascii"),
|
| 130 |
+
str(image_value.height).encode("ascii"),
|
| 131 |
+
image_value.encoded_image_string,
|
| 132 |
+
]
|
| 133 |
+
|
| 134 |
+
summary_metadata = image_metadata.create_summary_metadata(
|
| 135 |
+
display_name=value.metadata.display_name or value.tag,
|
| 136 |
+
description=value.metadata.summary_description,
|
| 137 |
+
converted_to_tensor=True,
|
| 138 |
+
)
|
| 139 |
+
return make_summary(value.tag, summary_metadata, data)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def _migrate_audio_value(value):
|
| 143 |
+
audio_value = value.audio
|
| 144 |
+
data = [[audio_value.encoded_audio_string, b""]] # empty label
|
| 145 |
+
summary_metadata = audio_metadata.create_summary_metadata(
|
| 146 |
+
display_name=value.metadata.display_name or value.tag,
|
| 147 |
+
description=value.metadata.summary_description,
|
| 148 |
+
encoding=audio_metadata.Encoding.Value("WAV"),
|
| 149 |
+
converted_to_tensor=True,
|
| 150 |
+
)
|
| 151 |
+
return make_summary(value.tag, summary_metadata, data)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _migrate_scalar_value(value):
|
| 155 |
+
scalar_value = value.simple_value
|
| 156 |
+
summary_metadata = scalar_metadata.create_summary_metadata(
|
| 157 |
+
display_name=value.metadata.display_name or value.tag,
|
| 158 |
+
description=value.metadata.summary_description,
|
| 159 |
+
)
|
| 160 |
+
return make_summary(value.tag, summary_metadata, scalar_value)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/dataclass_compat.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Utilities to migrate legacy summaries/events to generic data form.
|
| 16 |
+
|
| 17 |
+
For legacy summaries, this populates the `SummaryMetadata.data_class`
|
| 18 |
+
field and makes any necessary transformations to the tensor value. For
|
| 19 |
+
`graph_def` events, this creates a new summary event.
|
| 20 |
+
|
| 21 |
+
This should be effected after the `data_compat` transformation.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
from tensorboard.compat.proto import event_pb2
|
| 26 |
+
from tensorboard.compat.proto import summary_pb2
|
| 27 |
+
from tensorboard.plugins.audio import metadata as audio_metadata
|
| 28 |
+
from tensorboard.plugins.custom_scalar import (
|
| 29 |
+
metadata as custom_scalars_metadata,
|
| 30 |
+
)
|
| 31 |
+
from tensorboard.plugins.graph import metadata as graphs_metadata
|
| 32 |
+
from tensorboard.plugins.histogram import metadata as histograms_metadata
|
| 33 |
+
from tensorboard.plugins.hparams import metadata as hparams_metadata
|
| 34 |
+
from tensorboard.plugins.image import metadata as images_metadata
|
| 35 |
+
from tensorboard.plugins.mesh import metadata as mesh_metadata
|
| 36 |
+
from tensorboard.plugins.pr_curve import metadata as pr_curves_metadata
|
| 37 |
+
from tensorboard.plugins.scalar import metadata as scalars_metadata
|
| 38 |
+
from tensorboard.plugins.text import metadata as text_metadata
|
| 39 |
+
from tensorboard.util import tensor_util
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def migrate_event(event, initial_metadata):
|
| 43 |
+
"""Migrate an event to a sequence of events.
|
| 44 |
+
|
| 45 |
+
Args:
|
| 46 |
+
event: An `event_pb2.Event`. The caller transfers ownership of the
|
| 47 |
+
event to this method; the event may be mutated, and may or may
|
| 48 |
+
not appear in the returned sequence.
|
| 49 |
+
initial_metadata: Map from tag name (string) to `SummaryMetadata`
|
| 50 |
+
proto for the initial occurrence of the given tag within the
|
| 51 |
+
enclosing run. While loading a given run, the caller should
|
| 52 |
+
always pass the same dictionary here, initially `{}`; this
|
| 53 |
+
function will mutate it and reuse it for future calls.
|
| 54 |
+
|
| 55 |
+
Returns:
|
| 56 |
+
A sequence of `event_pb2.Event`s to use instead of `event`.
|
| 57 |
+
"""
|
| 58 |
+
what = event.WhichOneof("what")
|
| 59 |
+
if what == "graph_def":
|
| 60 |
+
return _migrate_graph_event(event)
|
| 61 |
+
if what == "tagged_run_metadata":
|
| 62 |
+
return _migrate_tagged_run_metadata_event(event)
|
| 63 |
+
if what == "summary":
|
| 64 |
+
return _migrate_summary_event(event, initial_metadata)
|
| 65 |
+
return (event,)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _migrate_graph_event(old_event):
|
| 69 |
+
result = event_pb2.Event()
|
| 70 |
+
result.wall_time = old_event.wall_time
|
| 71 |
+
result.step = old_event.step
|
| 72 |
+
value = result.summary.value.add(tag=graphs_metadata.RUN_GRAPH_NAME)
|
| 73 |
+
graph_bytes = old_event.graph_def
|
| 74 |
+
value.tensor.CopyFrom(tensor_util.make_tensor_proto([graph_bytes]))
|
| 75 |
+
value.metadata.plugin_data.plugin_name = graphs_metadata.PLUGIN_NAME
|
| 76 |
+
# `value.metadata.plugin_data.content` left empty
|
| 77 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
| 78 |
+
# As long as the graphs plugin still reads the old format, keep both
|
| 79 |
+
# the old event and the new event to maintain compatibility.
|
| 80 |
+
return (old_event, result)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _migrate_tagged_run_metadata_event(old_event):
|
| 84 |
+
result = event_pb2.Event()
|
| 85 |
+
result.wall_time = old_event.wall_time
|
| 86 |
+
result.step = old_event.step
|
| 87 |
+
trm = old_event.tagged_run_metadata
|
| 88 |
+
value = result.summary.value.add(tag=trm.tag)
|
| 89 |
+
value.tensor.CopyFrom(tensor_util.make_tensor_proto([trm.run_metadata]))
|
| 90 |
+
value.metadata.plugin_data.plugin_name = (
|
| 91 |
+
graphs_metadata.PLUGIN_NAME_TAGGED_RUN_METADATA
|
| 92 |
+
)
|
| 93 |
+
# `value.metadata.plugin_data.content` left empty
|
| 94 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
| 95 |
+
return (result,)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def _migrate_summary_event(event, initial_metadata):
|
| 99 |
+
values = event.summary.value
|
| 100 |
+
new_values = [
|
| 101 |
+
new for old in values for new in _migrate_value(old, initial_metadata)
|
| 102 |
+
]
|
| 103 |
+
# Optimization: Don't create a new event if there were no shallow
|
| 104 |
+
# changes (there may still have been in-place changes).
|
| 105 |
+
if len(values) == len(new_values) and all(
|
| 106 |
+
x is y for (x, y) in zip(values, new_values)
|
| 107 |
+
):
|
| 108 |
+
return (event,)
|
| 109 |
+
del event.summary.value[:]
|
| 110 |
+
event.summary.value.extend(new_values)
|
| 111 |
+
return (event,)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def _migrate_value(value, initial_metadata):
|
| 115 |
+
"""Convert an old value to a stream of new values. May mutate."""
|
| 116 |
+
metadata = initial_metadata.get(value.tag)
|
| 117 |
+
initial = False
|
| 118 |
+
if metadata is None:
|
| 119 |
+
initial = True
|
| 120 |
+
# Retain a copy of the initial metadata, so that even after we
|
| 121 |
+
# update its data class we know whether to also transform later
|
| 122 |
+
# events in this time series.
|
| 123 |
+
metadata = summary_pb2.SummaryMetadata()
|
| 124 |
+
metadata.CopyFrom(value.metadata)
|
| 125 |
+
initial_metadata[value.tag] = metadata
|
| 126 |
+
if metadata.data_class != summary_pb2.DATA_CLASS_UNKNOWN:
|
| 127 |
+
return (value,)
|
| 128 |
+
plugin_name = metadata.plugin_data.plugin_name
|
| 129 |
+
if plugin_name == histograms_metadata.PLUGIN_NAME:
|
| 130 |
+
return _migrate_histogram_value(value)
|
| 131 |
+
if plugin_name == images_metadata.PLUGIN_NAME:
|
| 132 |
+
return _migrate_image_value(value)
|
| 133 |
+
if plugin_name == audio_metadata.PLUGIN_NAME:
|
| 134 |
+
return _migrate_audio_value(value)
|
| 135 |
+
if plugin_name == scalars_metadata.PLUGIN_NAME:
|
| 136 |
+
return _migrate_scalar_value(value)
|
| 137 |
+
if plugin_name == text_metadata.PLUGIN_NAME:
|
| 138 |
+
return _migrate_text_value(value)
|
| 139 |
+
if plugin_name == hparams_metadata.PLUGIN_NAME:
|
| 140 |
+
return _migrate_hparams_value(value)
|
| 141 |
+
if plugin_name == pr_curves_metadata.PLUGIN_NAME:
|
| 142 |
+
return _migrate_pr_curve_value(value)
|
| 143 |
+
if plugin_name == mesh_metadata.PLUGIN_NAME:
|
| 144 |
+
return _migrate_mesh_value(value)
|
| 145 |
+
if plugin_name == custom_scalars_metadata.PLUGIN_NAME:
|
| 146 |
+
return _migrate_custom_scalars_value(value)
|
| 147 |
+
if plugin_name in [
|
| 148 |
+
graphs_metadata.PLUGIN_NAME_RUN_METADATA,
|
| 149 |
+
graphs_metadata.PLUGIN_NAME_RUN_METADATA_WITH_GRAPH,
|
| 150 |
+
graphs_metadata.PLUGIN_NAME_KERAS_MODEL,
|
| 151 |
+
]:
|
| 152 |
+
return _migrate_graph_sub_plugin_value(value)
|
| 153 |
+
return (value,)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def _migrate_scalar_value(value):
|
| 157 |
+
if value.HasField("metadata"):
|
| 158 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
|
| 159 |
+
return (value,)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def _migrate_histogram_value(value):
|
| 163 |
+
if value.HasField("metadata"):
|
| 164 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
| 165 |
+
return (value,)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def _migrate_image_value(value):
|
| 169 |
+
if value.HasField("metadata"):
|
| 170 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
| 171 |
+
return (value,)
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def _migrate_text_value(value):
|
| 175 |
+
if value.HasField("metadata"):
|
| 176 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
| 177 |
+
return (value,)
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def _migrate_audio_value(value):
|
| 181 |
+
if value.HasField("metadata"):
|
| 182 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
| 183 |
+
tensor = value.tensor
|
| 184 |
+
# Project out just the first axis: actual audio clips.
|
| 185 |
+
stride = 1
|
| 186 |
+
while len(tensor.tensor_shape.dim) > 1:
|
| 187 |
+
stride *= tensor.tensor_shape.dim.pop().size
|
| 188 |
+
if stride != 1:
|
| 189 |
+
tensor.string_val[:] = tensor.string_val[::stride]
|
| 190 |
+
return (value,)
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def _migrate_hparams_value(value):
|
| 194 |
+
if value.HasField("metadata"):
|
| 195 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
| 196 |
+
if not value.HasField("tensor"):
|
| 197 |
+
value.tensor.CopyFrom(hparams_metadata.NULL_TENSOR)
|
| 198 |
+
return (value,)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def _migrate_pr_curve_value(value):
|
| 202 |
+
if value.HasField("metadata"):
|
| 203 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
| 204 |
+
return (value,)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
def _migrate_mesh_value(value):
|
| 208 |
+
if value.HasField("metadata"):
|
| 209 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
| 210 |
+
return (value,)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def _migrate_custom_scalars_value(value):
|
| 214 |
+
if value.HasField("metadata"):
|
| 215 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
| 216 |
+
return (value,)
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def _migrate_graph_sub_plugin_value(value):
|
| 220 |
+
if value.HasField("metadata"):
|
| 221 |
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
| 222 |
+
shape = value.tensor.tensor_shape.dim
|
| 223 |
+
if not shape:
|
| 224 |
+
shape.add(size=1)
|
| 225 |
+
return (value,)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/default.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Collection of first-party plugins.
|
| 16 |
+
|
| 17 |
+
This module exists to isolate tensorboard.program from the potentially
|
| 18 |
+
heavyweight build dependencies for first-party plugins. This way people
|
| 19 |
+
doing custom builds of TensorBoard have the option to only pay for the
|
| 20 |
+
dependencies they want.
|
| 21 |
+
|
| 22 |
+
This module also grants the flexibility to those doing custom builds, to
|
| 23 |
+
automatically inherit the centrally-maintained list of standard plugins,
|
| 24 |
+
for less repetition.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
import logging
|
| 29 |
+
|
| 30 |
+
import pkg_resources
|
| 31 |
+
|
| 32 |
+
from tensorboard.backend import experimental_plugin
|
| 33 |
+
from tensorboard.plugins.audio import audio_plugin
|
| 34 |
+
from tensorboard.plugins.core import core_plugin
|
| 35 |
+
from tensorboard.plugins.custom_scalar import custom_scalars_plugin
|
| 36 |
+
from tensorboard.plugins.debugger_v2 import debugger_v2_plugin
|
| 37 |
+
from tensorboard.plugins.distribution import distributions_plugin
|
| 38 |
+
from tensorboard.plugins.graph import graphs_plugin
|
| 39 |
+
from tensorboard.plugins.histogram import histograms_plugin
|
| 40 |
+
from tensorboard.plugins.hparams import hparams_plugin
|
| 41 |
+
from tensorboard.plugins.image import images_plugin
|
| 42 |
+
from tensorboard.plugins.metrics import metrics_plugin
|
| 43 |
+
from tensorboard.plugins.pr_curve import pr_curves_plugin
|
| 44 |
+
from tensorboard.plugins.profile_redirect import profile_redirect_plugin
|
| 45 |
+
from tensorboard.plugins.scalar import scalars_plugin
|
| 46 |
+
from tensorboard.plugins.text import text_plugin
|
| 47 |
+
from tensorboard.plugins.text_v2 import text_v2_plugin
|
| 48 |
+
from tensorboard.plugins.mesh import mesh_plugin
|
| 49 |
+
from tensorboard.plugins.npmi import npmi_plugin
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
logger = logging.getLogger(__name__)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class ExperimentalTextV2Plugin(
|
| 56 |
+
text_v2_plugin.TextV2Plugin, experimental_plugin.ExperimentalPlugin
|
| 57 |
+
):
|
| 58 |
+
"""Angular Text Plugin marked as experimental."""
|
| 59 |
+
|
| 60 |
+
pass
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class ExperimentalNpmiPlugin(
|
| 64 |
+
npmi_plugin.NpmiPlugin, experimental_plugin.ExperimentalPlugin
|
| 65 |
+
):
|
| 66 |
+
"""Angular nPMI Plugin marked as experimental."""
|
| 67 |
+
|
| 68 |
+
pass
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# Ordering matters. The order in which these lines appear determines the
|
| 72 |
+
# ordering of tabs in TensorBoard's GUI.
|
| 73 |
+
_PLUGINS = [
|
| 74 |
+
core_plugin.CorePluginLoader(include_debug_info=True),
|
| 75 |
+
metrics_plugin.MetricsPlugin,
|
| 76 |
+
scalars_plugin.ScalarsPlugin,
|
| 77 |
+
custom_scalars_plugin.CustomScalarsPlugin,
|
| 78 |
+
images_plugin.ImagesPlugin,
|
| 79 |
+
audio_plugin.AudioPlugin,
|
| 80 |
+
debugger_v2_plugin.DebuggerV2Plugin,
|
| 81 |
+
graphs_plugin.GraphsPlugin,
|
| 82 |
+
distributions_plugin.DistributionsPlugin,
|
| 83 |
+
histograms_plugin.HistogramsPlugin,
|
| 84 |
+
text_plugin.TextPlugin,
|
| 85 |
+
pr_curves_plugin.PrCurvesPlugin,
|
| 86 |
+
profile_redirect_plugin.ProfileRedirectPluginLoader,
|
| 87 |
+
hparams_plugin.HParamsPlugin,
|
| 88 |
+
mesh_plugin.MeshPlugin,
|
| 89 |
+
ExperimentalTextV2Plugin,
|
| 90 |
+
ExperimentalNpmiPlugin,
|
| 91 |
+
]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def get_plugins():
|
| 95 |
+
"""Returns a list specifying all known TensorBoard plugins.
|
| 96 |
+
|
| 97 |
+
This includes both first-party, statically bundled plugins and
|
| 98 |
+
dynamic plugins.
|
| 99 |
+
|
| 100 |
+
This list can be passed to the `tensorboard.program.TensorBoard` API.
|
| 101 |
+
|
| 102 |
+
Returns:
|
| 103 |
+
The list of default first-party plugins.
|
| 104 |
+
"""
|
| 105 |
+
return get_static_plugins() + get_dynamic_plugins()
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def get_static_plugins():
|
| 109 |
+
"""Returns a list specifying TensorBoard's default first-party plugins.
|
| 110 |
+
|
| 111 |
+
Plugins are specified in this list either via a TBLoader instance to load the
|
| 112 |
+
plugin, or the TBPlugin class itself which will be loaded using a BasicLoader.
|
| 113 |
+
|
| 114 |
+
This list can be passed to the `tensorboard.program.TensorBoard` API.
|
| 115 |
+
|
| 116 |
+
Returns:
|
| 117 |
+
The list of default first-party plugins.
|
| 118 |
+
|
| 119 |
+
:rtype: list[Type[base_plugin.TBLoader] | Type[base_plugin.TBPlugin]]
|
| 120 |
+
"""
|
| 121 |
+
|
| 122 |
+
return _PLUGINS[:]
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def get_dynamic_plugins():
|
| 126 |
+
"""Returns a list specifying TensorBoard's dynamically loaded plugins.
|
| 127 |
+
|
| 128 |
+
A dynamic TensorBoard plugin is specified using entry_points [1] and it is
|
| 129 |
+
the robust way to integrate plugins into TensorBoard.
|
| 130 |
+
|
| 131 |
+
This list can be passed to the `tensorboard.program.TensorBoard` API.
|
| 132 |
+
|
| 133 |
+
Returns:
|
| 134 |
+
The list of dynamic plugins.
|
| 135 |
+
|
| 136 |
+
:rtype: list[Type[base_plugin.TBLoader] | Type[base_plugin.TBPlugin]]
|
| 137 |
+
|
| 138 |
+
[1]: https://packaging.python.org/specifications/entry-points/
|
| 139 |
+
"""
|
| 140 |
+
return [
|
| 141 |
+
entry_point.resolve()
|
| 142 |
+
for entry_point in pkg_resources.iter_entry_points(
|
| 143 |
+
"tensorboard_plugins"
|
| 144 |
+
)
|
| 145 |
+
]
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/errors.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Error codes (experimental module).
|
| 16 |
+
|
| 17 |
+
These types represent a subset of the Google error codes [1], which are
|
| 18 |
+
also used by TensorFlow, gRPC, et al.
|
| 19 |
+
|
| 20 |
+
When an HTTP handler raises one of these errors, the TensorBoard core
|
| 21 |
+
application will catch it and automatically serve a properly formatted
|
| 22 |
+
response with the error message.
|
| 23 |
+
|
| 24 |
+
[1]: https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class PublicError(RuntimeError):
|
| 29 |
+
"""An error whose text does not contain sensitive information.
|
| 30 |
+
|
| 31 |
+
Fields:
|
| 32 |
+
http_code: Integer between 400 and 599 inclusive (e.g., 404).
|
| 33 |
+
headers: List of additional key-value pairs to include in the
|
| 34 |
+
response body, like `[("Allow", "GET")]` for HTTP 405 or
|
| 35 |
+
`[("WWW-Authenticate", "Digest")]` for HTTP 401. May be empty.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
http_code = 500 # default; subclasses should override
|
| 39 |
+
|
| 40 |
+
def __init__(self, details):
|
| 41 |
+
super(PublicError, self).__init__(details)
|
| 42 |
+
self.headers = []
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class InvalidArgumentError(PublicError):
|
| 46 |
+
"""Client specified an invalid argument.
|
| 47 |
+
|
| 48 |
+
The text of this error is assumed not to contain sensitive data,
|
| 49 |
+
and so may appear in (e.g.) the response body of a failed HTTP
|
| 50 |
+
request.
|
| 51 |
+
|
| 52 |
+
Corresponds to HTTP 400 Bad Request or Google error code `INVALID_ARGUMENT`.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
http_code = 400
|
| 56 |
+
|
| 57 |
+
def __init__(self, details=None):
|
| 58 |
+
msg = _format_message("Invalid argument", details)
|
| 59 |
+
super(InvalidArgumentError, self).__init__(msg)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class NotFoundError(PublicError):
|
| 63 |
+
"""Some requested entity (e.g., file or directory) was not found.
|
| 64 |
+
|
| 65 |
+
The text of this error is assumed not to contain sensitive data,
|
| 66 |
+
and so may appear in (e.g.) the response body of a failed HTTP
|
| 67 |
+
request.
|
| 68 |
+
|
| 69 |
+
Corresponds to HTTP 404 Not Found or Google error code `NOT_FOUND`.
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
http_code = 404
|
| 73 |
+
|
| 74 |
+
def __init__(self, details=None):
|
| 75 |
+
msg = _format_message("Not found", details)
|
| 76 |
+
super(NotFoundError, self).__init__(msg)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class UnauthenticatedError(PublicError):
|
| 80 |
+
"""Request does not have valid authentication credentials for the operation.
|
| 81 |
+
|
| 82 |
+
The text of this error is assumed not to contain sensitive data,
|
| 83 |
+
and so may appear in (e.g.) the response body of a failed HTTP
|
| 84 |
+
request.
|
| 85 |
+
|
| 86 |
+
Corresponds to HTTP 401 Unauthorized (despite the name) or Google
|
| 87 |
+
error code `UNAUTHENTICATED`. HTTP 401 responses are required to
|
| 88 |
+
contain a `WWW-Authenticate` challenge, so `UnauthenticatedError`
|
| 89 |
+
values are, too.
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
http_code = 401
|
| 93 |
+
|
| 94 |
+
def __init__(self, details=None, *, challenge):
|
| 95 |
+
"""Initialize an `UnauthenticatedError`.
|
| 96 |
+
|
| 97 |
+
Args;
|
| 98 |
+
details: Optional public, user-facing error message, as a
|
| 99 |
+
string or any value that can be converted to string, or
|
| 100 |
+
`None` to omit details.
|
| 101 |
+
challenge: String value of the `WWW-Authenticate` HTTP header
|
| 102 |
+
as described in RFC 7235.
|
| 103 |
+
"""
|
| 104 |
+
msg = _format_message("Unauthenticated", details)
|
| 105 |
+
super(UnauthenticatedError, self).__init__(msg)
|
| 106 |
+
self.headers.append(("WWW-Authenticate", challenge))
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class PermissionDeniedError(PublicError):
|
| 110 |
+
"""The caller does not have permission to execute the specified operation.
|
| 111 |
+
|
| 112 |
+
The text of this error is assumed not to contain sensitive data,
|
| 113 |
+
and so may appear in (e.g.) the response body of a failed HTTP
|
| 114 |
+
request.
|
| 115 |
+
|
| 116 |
+
Corresponds to HTTP 403 Forbidden or Google error code `PERMISSION_DENIED`.
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
http_code = 403
|
| 120 |
+
|
| 121 |
+
def __init__(self, details=None):
|
| 122 |
+
msg = _format_message("Permission denied", details)
|
| 123 |
+
super(PermissionDeniedError, self).__init__(msg)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _format_message(code_name, details):
|
| 127 |
+
if details is None:
|
| 128 |
+
return code_name
|
| 129 |
+
else:
|
| 130 |
+
return "%s: %s" % (code_name, details)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/lazy.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""TensorBoard is a webapp for understanding TensorFlow runs and graphs."""
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
import functools
|
| 19 |
+
import threading
|
| 20 |
+
import types
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def lazy_load(name):
|
| 24 |
+
"""Decorator to define a function that lazily loads the module 'name'.
|
| 25 |
+
|
| 26 |
+
This can be used to defer importing troublesome dependencies - e.g. ones that
|
| 27 |
+
are large and infrequently used, or that cause a dependency cycle -
|
| 28 |
+
until they are actually used.
|
| 29 |
+
|
| 30 |
+
Args:
|
| 31 |
+
name: the fully-qualified name of the module; typically the last segment
|
| 32 |
+
of 'name' matches the name of the decorated function
|
| 33 |
+
|
| 34 |
+
Returns:
|
| 35 |
+
Decorator function that produces a lazy-loading module 'name' backed by the
|
| 36 |
+
underlying decorated function.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
def wrapper(load_fn):
|
| 40 |
+
# Wrap load_fn to call it exactly once and update __dict__ afterwards to
|
| 41 |
+
# make future lookups efficient (only failed lookups call __getattr__).
|
| 42 |
+
@_memoize
|
| 43 |
+
def load_once(self):
|
| 44 |
+
if load_once.loading:
|
| 45 |
+
raise ImportError(
|
| 46 |
+
"Circular import when resolving LazyModule %r" % name
|
| 47 |
+
)
|
| 48 |
+
load_once.loading = True
|
| 49 |
+
try:
|
| 50 |
+
module = load_fn()
|
| 51 |
+
finally:
|
| 52 |
+
load_once.loading = False
|
| 53 |
+
self.__dict__.update(module.__dict__)
|
| 54 |
+
load_once.loaded = True
|
| 55 |
+
return module
|
| 56 |
+
|
| 57 |
+
load_once.loading = False
|
| 58 |
+
load_once.loaded = False
|
| 59 |
+
|
| 60 |
+
# Define a module that proxies getattr() and dir() to the result of calling
|
| 61 |
+
# load_once() the first time it's needed. The class is nested so we can close
|
| 62 |
+
# over load_once() and avoid polluting the module's attrs with our own state.
|
| 63 |
+
class LazyModule(types.ModuleType):
|
| 64 |
+
def __getattr__(self, attr_name):
|
| 65 |
+
return getattr(load_once(self), attr_name)
|
| 66 |
+
|
| 67 |
+
def __dir__(self):
|
| 68 |
+
return dir(load_once(self))
|
| 69 |
+
|
| 70 |
+
def __repr__(self):
|
| 71 |
+
if load_once.loaded:
|
| 72 |
+
return "<%r via LazyModule (loaded)>" % load_once(self)
|
| 73 |
+
return (
|
| 74 |
+
"<module %r via LazyModule (not yet loaded)>"
|
| 75 |
+
% self.__name__
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
return LazyModule(name)
|
| 79 |
+
|
| 80 |
+
return wrapper
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _memoize(f):
|
| 84 |
+
"""Memoizing decorator for f, which must have exactly 1 hashable
|
| 85 |
+
argument."""
|
| 86 |
+
nothing = object() # Unique "no value" sentinel object.
|
| 87 |
+
cache = {}
|
| 88 |
+
# Use a reentrant lock so that if f references the resulting wrapper we die
|
| 89 |
+
# with recursion depth exceeded instead of deadlocking.
|
| 90 |
+
lock = threading.RLock()
|
| 91 |
+
|
| 92 |
+
@functools.wraps(f)
|
| 93 |
+
def wrapper(arg):
|
| 94 |
+
if cache.get(arg, nothing) is nothing:
|
| 95 |
+
with lock:
|
| 96 |
+
if cache.get(arg, nothing) is nothing:
|
| 97 |
+
cache[arg] = f(arg)
|
| 98 |
+
return cache[arg]
|
| 99 |
+
|
| 100 |
+
return wrapper
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/main.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""TensorBoard main module.
|
| 16 |
+
|
| 17 |
+
This module ties together `tensorboard.program` and
|
| 18 |
+
`tensorboard.default_plugins` to provide standard TensorBoard. It's
|
| 19 |
+
meant to be tiny and act as little other than a config file. Those
|
| 20 |
+
wishing to customize the set of plugins or static assets that
|
| 21 |
+
TensorBoard uses can swap out this file with their own.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
import sys
|
| 25 |
+
|
| 26 |
+
from absl import app
|
| 27 |
+
from tensorboard import default
|
| 28 |
+
from tensorboard import main_lib
|
| 29 |
+
from tensorboard import program
|
| 30 |
+
from tensorboard.plugins import base_plugin
|
| 31 |
+
from tensorboard.uploader import uploader_subcommand
|
| 32 |
+
from tensorboard.util import tb_logging
|
| 33 |
+
|
| 34 |
+
logger = tb_logging.get_logger()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def run_main():
|
| 38 |
+
"""Initializes flags and calls main()."""
|
| 39 |
+
main_lib.global_init()
|
| 40 |
+
|
| 41 |
+
tensorboard = program.TensorBoard(
|
| 42 |
+
plugins=default.get_plugins(),
|
| 43 |
+
subcommands=[uploader_subcommand.UploaderSubcommand()],
|
| 44 |
+
)
|
| 45 |
+
try:
|
| 46 |
+
app.run(tensorboard.main, flags_parser=tensorboard.configure)
|
| 47 |
+
except base_plugin.FlagsError as e:
|
| 48 |
+
print("Error: %s" % e, file=sys.stderr)
|
| 49 |
+
sys.exit(1)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
if __name__ == "__main__":
|
| 53 |
+
run_main()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/main_lib.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Helpers for TensorBoard main module."""
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
import sys
|
| 19 |
+
|
| 20 |
+
import absl.logging
|
| 21 |
+
|
| 22 |
+
from tensorboard.compat import tf
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def global_init():
|
| 26 |
+
"""Modifies the global environment for running TensorBoard as main.
|
| 27 |
+
|
| 28 |
+
This functions changes global state in the Python process, so it should
|
| 29 |
+
not be called from library routines.
|
| 30 |
+
"""
|
| 31 |
+
# TF versions prior to 1.15.0 included default GCS filesystem caching logic
|
| 32 |
+
# that interacted pathologically with the pattern of reads used by TensorBoard
|
| 33 |
+
# for logdirs. See: https://github.com/tensorflow/tensorboard/issues/1225
|
| 34 |
+
# The problematic behavior was fixed in 1.15.0 by
|
| 35 |
+
# https://github.com/tensorflow/tensorflow/commit/e43b94649d3e1ac5d538e4eca9166b899511d681
|
| 36 |
+
# but for older versions of TF, we avoid a regression by setting this env var to
|
| 37 |
+
# disable the cache, which must be done before the first import of tensorflow.
|
| 38 |
+
os.environ["GCS_READ_CACHE_DISABLED"] = "1"
|
| 39 |
+
|
| 40 |
+
if getattr(tf, "__version__", "stub") == "stub":
|
| 41 |
+
print(
|
| 42 |
+
"TensorFlow installation not found - running with reduced feature set.",
|
| 43 |
+
file=sys.stderr,
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
# Only emit log messages at WARNING and above by default to reduce spam.
|
| 47 |
+
absl.logging.set_verbosity(absl.logging.WARNING)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/manager.py
ADDED
|
@@ -0,0 +1,474 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Private utilities for managing multiple TensorBoard processes."""
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
import base64
|
| 19 |
+
import collections
|
| 20 |
+
import datetime
|
| 21 |
+
import errno
|
| 22 |
+
import json
|
| 23 |
+
import os
|
| 24 |
+
import subprocess
|
| 25 |
+
import tempfile
|
| 26 |
+
import time
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
from tensorboard import version
|
| 30 |
+
from tensorboard.util import tb_logging
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Type descriptors for `TensorBoardInfo` fields.
|
| 34 |
+
#
|
| 35 |
+
# We represent timestamps as int-seconds-since-epoch rather than
|
| 36 |
+
# datetime objects to work around a bug in Python on Windows. See:
|
| 37 |
+
# https://github.com/tensorflow/tensorboard/issues/2017.
|
| 38 |
+
_FieldType = collections.namedtuple(
|
| 39 |
+
"_FieldType",
|
| 40 |
+
(
|
| 41 |
+
"serialized_type",
|
| 42 |
+
"runtime_type",
|
| 43 |
+
"serialize",
|
| 44 |
+
"deserialize",
|
| 45 |
+
),
|
| 46 |
+
)
|
| 47 |
+
_type_int = _FieldType(
|
| 48 |
+
serialized_type=int,
|
| 49 |
+
runtime_type=int,
|
| 50 |
+
serialize=lambda n: n,
|
| 51 |
+
deserialize=lambda n: n,
|
| 52 |
+
)
|
| 53 |
+
_type_str = _FieldType(
|
| 54 |
+
serialized_type=str, # `json.loads` always gives Unicode
|
| 55 |
+
runtime_type=str,
|
| 56 |
+
serialize=str,
|
| 57 |
+
deserialize=str,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
# Information about a running TensorBoard instance.
|
| 61 |
+
_TENSORBOARD_INFO_FIELDS = collections.OrderedDict(
|
| 62 |
+
(
|
| 63 |
+
("version", _type_str),
|
| 64 |
+
("start_time", _type_int), # seconds since epoch
|
| 65 |
+
("pid", _type_int),
|
| 66 |
+
("port", _type_int),
|
| 67 |
+
("path_prefix", _type_str), # may be empty
|
| 68 |
+
("logdir", _type_str), # may be empty
|
| 69 |
+
("db", _type_str), # may be empty
|
| 70 |
+
("cache_key", _type_str), # opaque, as given by `cache_key` below
|
| 71 |
+
)
|
| 72 |
+
)
|
| 73 |
+
TensorBoardInfo = collections.namedtuple(
|
| 74 |
+
"TensorBoardInfo",
|
| 75 |
+
_TENSORBOARD_INFO_FIELDS,
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def data_source_from_info(info):
|
| 80 |
+
"""Format the data location for the given TensorBoardInfo.
|
| 81 |
+
|
| 82 |
+
Args:
|
| 83 |
+
info: A TensorBoardInfo value.
|
| 84 |
+
|
| 85 |
+
Returns:
|
| 86 |
+
A human-readable string describing the logdir or database connection
|
| 87 |
+
used by the server: e.g., "logdir /tmp/logs".
|
| 88 |
+
"""
|
| 89 |
+
if info.db:
|
| 90 |
+
return "db %s" % info.db
|
| 91 |
+
else:
|
| 92 |
+
return "logdir %s" % info.logdir
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def _info_to_string(info):
|
| 96 |
+
"""Convert a `TensorBoardInfo` to string form to be stored on disk.
|
| 97 |
+
|
| 98 |
+
The format returned by this function is opaque and should only be
|
| 99 |
+
interpreted by `_info_from_string`.
|
| 100 |
+
|
| 101 |
+
Args:
|
| 102 |
+
info: A valid `TensorBoardInfo` object.
|
| 103 |
+
|
| 104 |
+
Raises:
|
| 105 |
+
ValueError: If any field on `info` is not of the correct type.
|
| 106 |
+
|
| 107 |
+
Returns:
|
| 108 |
+
A string representation of the provided `TensorBoardInfo`.
|
| 109 |
+
"""
|
| 110 |
+
for key in _TENSORBOARD_INFO_FIELDS:
|
| 111 |
+
field_type = _TENSORBOARD_INFO_FIELDS[key]
|
| 112 |
+
if not isinstance(getattr(info, key), field_type.runtime_type):
|
| 113 |
+
raise ValueError(
|
| 114 |
+
"expected %r of type %s, but found: %r"
|
| 115 |
+
% (key, field_type.runtime_type, getattr(info, key))
|
| 116 |
+
)
|
| 117 |
+
if info.version != version.VERSION:
|
| 118 |
+
raise ValueError(
|
| 119 |
+
"expected 'version' to be %r, but found: %r"
|
| 120 |
+
% (version.VERSION, info.version)
|
| 121 |
+
)
|
| 122 |
+
json_value = {
|
| 123 |
+
k: _TENSORBOARD_INFO_FIELDS[k].serialize(getattr(info, k))
|
| 124 |
+
for k in _TENSORBOARD_INFO_FIELDS
|
| 125 |
+
}
|
| 126 |
+
return json.dumps(json_value, sort_keys=True, indent=4)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def _info_from_string(info_string):
|
| 130 |
+
"""Parse a `TensorBoardInfo` object from its string representation.
|
| 131 |
+
|
| 132 |
+
Args:
|
| 133 |
+
info_string: A string representation of a `TensorBoardInfo`, as
|
| 134 |
+
produced by a previous call to `_info_to_string`.
|
| 135 |
+
|
| 136 |
+
Returns:
|
| 137 |
+
A `TensorBoardInfo` value.
|
| 138 |
+
|
| 139 |
+
Raises:
|
| 140 |
+
ValueError: If the provided string is not valid JSON, or if it is
|
| 141 |
+
missing any required fields, or if any field is of incorrect type.
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
try:
|
| 145 |
+
json_value = json.loads(info_string)
|
| 146 |
+
except ValueError:
|
| 147 |
+
raise ValueError("invalid JSON: %r" % (info_string,))
|
| 148 |
+
if not isinstance(json_value, dict):
|
| 149 |
+
raise ValueError("not a JSON object: %r" % (json_value,))
|
| 150 |
+
expected_keys = frozenset(_TENSORBOARD_INFO_FIELDS)
|
| 151 |
+
actual_keys = frozenset(json_value)
|
| 152 |
+
missing_keys = expected_keys - actual_keys
|
| 153 |
+
if missing_keys:
|
| 154 |
+
raise ValueError(
|
| 155 |
+
"TensorBoardInfo missing keys: %r" % (sorted(missing_keys),)
|
| 156 |
+
)
|
| 157 |
+
# For forward compatibility, silently ignore unknown keys.
|
| 158 |
+
|
| 159 |
+
# Validate and deserialize fields.
|
| 160 |
+
fields = {}
|
| 161 |
+
for key in _TENSORBOARD_INFO_FIELDS:
|
| 162 |
+
field_type = _TENSORBOARD_INFO_FIELDS[key]
|
| 163 |
+
if not isinstance(json_value[key], field_type.serialized_type):
|
| 164 |
+
raise ValueError(
|
| 165 |
+
"expected %r of type %s, but found: %r"
|
| 166 |
+
% (key, field_type.serialized_type, json_value[key])
|
| 167 |
+
)
|
| 168 |
+
fields[key] = field_type.deserialize(json_value[key])
|
| 169 |
+
|
| 170 |
+
return TensorBoardInfo(**fields)
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def cache_key(working_directory, arguments, configure_kwargs):
|
| 174 |
+
"""Compute a `TensorBoardInfo.cache_key` field.
|
| 175 |
+
|
| 176 |
+
The format returned by this function is opaque. Clients may only
|
| 177 |
+
inspect it by comparing it for equality with other results from this
|
| 178 |
+
function.
|
| 179 |
+
|
| 180 |
+
Args:
|
| 181 |
+
working_directory: The directory from which TensorBoard was launched
|
| 182 |
+
and relative to which paths like `--logdir` and `--db` are
|
| 183 |
+
resolved.
|
| 184 |
+
arguments: The command-line args to TensorBoard, as `sys.argv[1:]`.
|
| 185 |
+
Should be a list (or tuple), not an unparsed string. If you have a
|
| 186 |
+
raw shell command, use `shlex.split` before passing it to this
|
| 187 |
+
function.
|
| 188 |
+
configure_kwargs: A dictionary of additional argument values to
|
| 189 |
+
override the textual `arguments`, with the same semantics as in
|
| 190 |
+
`tensorboard.program.TensorBoard.configure`. May be an empty
|
| 191 |
+
dictionary.
|
| 192 |
+
|
| 193 |
+
Returns:
|
| 194 |
+
A string such that if two (prospective or actual) TensorBoard
|
| 195 |
+
invocations have the same cache key then it is safe to use one in
|
| 196 |
+
place of the other. The converse is not guaranteed: it is often safe
|
| 197 |
+
to change the order of TensorBoard arguments, or to explicitly set
|
| 198 |
+
them to their default values, or to move them between `arguments`
|
| 199 |
+
and `configure_kwargs`, but such invocations may yield distinct
|
| 200 |
+
cache keys.
|
| 201 |
+
"""
|
| 202 |
+
if not isinstance(arguments, (list, tuple)):
|
| 203 |
+
raise TypeError(
|
| 204 |
+
"'arguments' should be a list of arguments, but found: %r "
|
| 205 |
+
"(use `shlex.split` if given a string)" % (arguments,)
|
| 206 |
+
)
|
| 207 |
+
datum = {
|
| 208 |
+
"working_directory": working_directory,
|
| 209 |
+
"arguments": arguments,
|
| 210 |
+
"configure_kwargs": configure_kwargs,
|
| 211 |
+
}
|
| 212 |
+
raw = base64.b64encode(
|
| 213 |
+
json.dumps(datum, sort_keys=True, separators=(",", ":")).encode("utf-8")
|
| 214 |
+
)
|
| 215 |
+
# `raw` is of type `bytes`, even though it only contains ASCII
|
| 216 |
+
# characters; we want it to be `str` in both Python 2 and 3.
|
| 217 |
+
return str(raw.decode("ascii"))
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def _get_info_dir():
|
| 221 |
+
"""Get path to directory in which to store info files.
|
| 222 |
+
|
| 223 |
+
The directory returned by this function is "owned" by this module. If
|
| 224 |
+
the contents of the directory are modified other than via the public
|
| 225 |
+
functions of this module, subsequent behavior is undefined.
|
| 226 |
+
|
| 227 |
+
The directory will be created if it does not exist.
|
| 228 |
+
"""
|
| 229 |
+
path = os.path.join(tempfile.gettempdir(), ".tensorboard-info")
|
| 230 |
+
try:
|
| 231 |
+
os.makedirs(path)
|
| 232 |
+
except OSError as e:
|
| 233 |
+
if e.errno == errno.EEXIST and os.path.isdir(path):
|
| 234 |
+
pass
|
| 235 |
+
else:
|
| 236 |
+
raise
|
| 237 |
+
else:
|
| 238 |
+
os.chmod(path, 0o777)
|
| 239 |
+
return path
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def _get_info_file_path():
|
| 243 |
+
"""Get path to info file for the current process.
|
| 244 |
+
|
| 245 |
+
As with `_get_info_dir`, the info directory will be created if it
|
| 246 |
+
does not exist.
|
| 247 |
+
"""
|
| 248 |
+
return os.path.join(_get_info_dir(), "pid-%d.info" % os.getpid())
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def write_info_file(tensorboard_info):
|
| 252 |
+
"""Write TensorBoardInfo to the current process's info file.
|
| 253 |
+
|
| 254 |
+
This should be called by `main` once the server is ready. When the
|
| 255 |
+
server shuts down, `remove_info_file` should be called.
|
| 256 |
+
|
| 257 |
+
Args:
|
| 258 |
+
tensorboard_info: A valid `TensorBoardInfo` object.
|
| 259 |
+
|
| 260 |
+
Raises:
|
| 261 |
+
ValueError: If any field on `info` is not of the correct type.
|
| 262 |
+
"""
|
| 263 |
+
payload = "%s\n" % _info_to_string(tensorboard_info)
|
| 264 |
+
with open(_get_info_file_path(), "w") as outfile:
|
| 265 |
+
outfile.write(payload)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def remove_info_file():
|
| 269 |
+
"""Remove the current process's TensorBoardInfo file, if it exists.
|
| 270 |
+
|
| 271 |
+
If the file does not exist, no action is taken and no error is
|
| 272 |
+
raised.
|
| 273 |
+
"""
|
| 274 |
+
try:
|
| 275 |
+
os.unlink(_get_info_file_path())
|
| 276 |
+
except OSError as e:
|
| 277 |
+
if e.errno == errno.ENOENT:
|
| 278 |
+
# The user may have wiped their temporary directory or something.
|
| 279 |
+
# Not a problem: we're already in the state that we want to be in.
|
| 280 |
+
pass
|
| 281 |
+
else:
|
| 282 |
+
raise
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
def get_all():
|
| 286 |
+
"""Return TensorBoardInfo values for running TensorBoard processes.
|
| 287 |
+
|
| 288 |
+
This function may not provide a perfect snapshot of the set of running
|
| 289 |
+
processes. Its result set may be incomplete if the user has cleaned
|
| 290 |
+
their /tmp/ directory while TensorBoard processes are running. It may
|
| 291 |
+
contain extraneous entries if TensorBoard processes exited uncleanly
|
| 292 |
+
(e.g., with SIGKILL or SIGQUIT).
|
| 293 |
+
|
| 294 |
+
Entries in the info directory that do not represent valid
|
| 295 |
+
`TensorBoardInfo` values will be silently ignored.
|
| 296 |
+
|
| 297 |
+
Returns:
|
| 298 |
+
A fresh list of `TensorBoardInfo` objects.
|
| 299 |
+
"""
|
| 300 |
+
info_dir = _get_info_dir()
|
| 301 |
+
results = []
|
| 302 |
+
for filename in os.listdir(info_dir):
|
| 303 |
+
filepath = os.path.join(info_dir, filename)
|
| 304 |
+
try:
|
| 305 |
+
with open(filepath) as infile:
|
| 306 |
+
contents = infile.read()
|
| 307 |
+
except IOError as e:
|
| 308 |
+
if e.errno == errno.EACCES:
|
| 309 |
+
# May have been written by this module in a process whose
|
| 310 |
+
# `umask` includes some bits of 0o444.
|
| 311 |
+
continue
|
| 312 |
+
else:
|
| 313 |
+
raise
|
| 314 |
+
try:
|
| 315 |
+
info = _info_from_string(contents)
|
| 316 |
+
except ValueError:
|
| 317 |
+
# Ignore unrecognized files, logging at debug only.
|
| 318 |
+
tb_logging.get_logger().debug(
|
| 319 |
+
"invalid info file: %r",
|
| 320 |
+
filepath,
|
| 321 |
+
exc_info=True,
|
| 322 |
+
)
|
| 323 |
+
else:
|
| 324 |
+
results.append(info)
|
| 325 |
+
return results
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
# The following five types enumerate the possible return values of the
|
| 329 |
+
# `start` function.
|
| 330 |
+
|
| 331 |
+
# Indicates that a call to `start` was compatible with an existing
|
| 332 |
+
# TensorBoard process, which can be reused according to the provided
|
| 333 |
+
# info.
|
| 334 |
+
StartReused = collections.namedtuple("StartReused", ("info",))
|
| 335 |
+
|
| 336 |
+
# Indicates that a call to `start` successfully launched a new
|
| 337 |
+
# TensorBoard process, which is available with the provided info.
|
| 338 |
+
StartLaunched = collections.namedtuple("StartLaunched", ("info",))
|
| 339 |
+
|
| 340 |
+
# Indicates that a call to `start` tried to launch a new TensorBoard
|
| 341 |
+
# instance, but the subprocess exited with the given exit code and
|
| 342 |
+
# output streams. (If the contents of the output streams are no longer
|
| 343 |
+
# available---e.g., because the user has emptied /tmp/---then the
|
| 344 |
+
# corresponding values will be `None`.)
|
| 345 |
+
StartFailed = collections.namedtuple(
|
| 346 |
+
"StartFailed",
|
| 347 |
+
(
|
| 348 |
+
"exit_code", # int, as `Popen.returncode` (negative for signal)
|
| 349 |
+
"stdout", # str, or `None` if the stream could not be read
|
| 350 |
+
"stderr", # str, or `None` if the stream could not be read
|
| 351 |
+
),
|
| 352 |
+
)
|
| 353 |
+
|
| 354 |
+
# Indicates that a call to `start` failed to invoke the subprocess.
|
| 355 |
+
#
|
| 356 |
+
# If the TensorBoard executable was chosen via the `TENSORBOARD_BINARY`
|
| 357 |
+
# environment variable, then the `explicit_binary` field contains the
|
| 358 |
+
# path to that binary; otherwise, the field is `None`.
|
| 359 |
+
StartExecFailed = collections.namedtuple(
|
| 360 |
+
"StartExecFailed",
|
| 361 |
+
(
|
| 362 |
+
"os_error", # `OSError` due to `Popen` invocation
|
| 363 |
+
"explicit_binary", # `str` or `None`; see type-level comment
|
| 364 |
+
),
|
| 365 |
+
)
|
| 366 |
+
|
| 367 |
+
# Indicates that a call to `start` launched a TensorBoard process, but
|
| 368 |
+
# that process neither exited nor wrote its info file within the allowed
|
| 369 |
+
# timeout period. The process may still be running under the included
|
| 370 |
+
# PID.
|
| 371 |
+
StartTimedOut = collections.namedtuple("StartTimedOut", ("pid",))
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def start(arguments, timeout=datetime.timedelta(seconds=60)):
|
| 375 |
+
"""Start a new TensorBoard instance, or reuse a compatible one.
|
| 376 |
+
|
| 377 |
+
If the cache key determined by the provided arguments and the current
|
| 378 |
+
working directory (see `cache_key`) matches the cache key of a running
|
| 379 |
+
TensorBoard process (see `get_all`), that process will be reused.
|
| 380 |
+
|
| 381 |
+
Otherwise, a new TensorBoard process will be spawned with the provided
|
| 382 |
+
arguments, using the `tensorboard` binary from the system path.
|
| 383 |
+
|
| 384 |
+
Args:
|
| 385 |
+
arguments: List of strings to be passed as arguments to
|
| 386 |
+
`tensorboard`. (If you have a raw command-line string, see
|
| 387 |
+
`shlex.split`.)
|
| 388 |
+
timeout: `datetime.timedelta` object describing how long to wait for
|
| 389 |
+
the subprocess to initialize a TensorBoard server and write its
|
| 390 |
+
`TensorBoardInfo` file. If the info file is not written within
|
| 391 |
+
this time period, `start` will assume that the subprocess is stuck
|
| 392 |
+
in a bad state, and will give up on waiting for it and return a
|
| 393 |
+
`StartTimedOut` result. Note that in such a case the subprocess
|
| 394 |
+
will not be killed. Default value is 60 seconds.
|
| 395 |
+
|
| 396 |
+
Returns:
|
| 397 |
+
A `StartReused`, `StartLaunched`, `StartFailed`, or `StartTimedOut`
|
| 398 |
+
object.
|
| 399 |
+
"""
|
| 400 |
+
this_cache_key = cache_key(
|
| 401 |
+
working_directory=os.getcwd(),
|
| 402 |
+
arguments=arguments,
|
| 403 |
+
configure_kwargs={},
|
| 404 |
+
)
|
| 405 |
+
match = _find_matching_instance(this_cache_key)
|
| 406 |
+
if match:
|
| 407 |
+
return StartReused(info=match)
|
| 408 |
+
|
| 409 |
+
(stdout_fd, stdout_path) = tempfile.mkstemp(prefix=".tensorboard-stdout-")
|
| 410 |
+
(stderr_fd, stderr_path) = tempfile.mkstemp(prefix=".tensorboard-stderr-")
|
| 411 |
+
start_time_seconds = time.time()
|
| 412 |
+
explicit_tb = os.environ.get("TENSORBOARD_BINARY", None)
|
| 413 |
+
try:
|
| 414 |
+
p = subprocess.Popen(
|
| 415 |
+
["tensorboard" if explicit_tb is None else explicit_tb] + arguments,
|
| 416 |
+
stdout=stdout_fd,
|
| 417 |
+
stderr=stderr_fd,
|
| 418 |
+
)
|
| 419 |
+
except OSError as e:
|
| 420 |
+
return StartExecFailed(os_error=e, explicit_binary=explicit_tb)
|
| 421 |
+
finally:
|
| 422 |
+
os.close(stdout_fd)
|
| 423 |
+
os.close(stderr_fd)
|
| 424 |
+
|
| 425 |
+
poll_interval_seconds = 0.5
|
| 426 |
+
end_time_seconds = start_time_seconds + timeout.total_seconds()
|
| 427 |
+
while time.time() < end_time_seconds:
|
| 428 |
+
time.sleep(poll_interval_seconds)
|
| 429 |
+
subprocess_result = p.poll()
|
| 430 |
+
if subprocess_result is not None:
|
| 431 |
+
return StartFailed(
|
| 432 |
+
exit_code=subprocess_result,
|
| 433 |
+
stdout=_maybe_read_file(stdout_path),
|
| 434 |
+
stderr=_maybe_read_file(stderr_path),
|
| 435 |
+
)
|
| 436 |
+
info = _find_matching_instance(this_cache_key)
|
| 437 |
+
if info:
|
| 438 |
+
# Don't check that `info.pid == p.pid`, since on Windows that may
|
| 439 |
+
# not be the case: see #4300.
|
| 440 |
+
return StartLaunched(info=info)
|
| 441 |
+
else:
|
| 442 |
+
return StartTimedOut(pid=p.pid)
|
| 443 |
+
|
| 444 |
+
|
| 445 |
+
def _find_matching_instance(cache_key):
|
| 446 |
+
"""Find a running TensorBoard instance compatible with the cache key.
|
| 447 |
+
|
| 448 |
+
Returns:
|
| 449 |
+
A `TensorBoardInfo` object, or `None` if none matches the cache key.
|
| 450 |
+
"""
|
| 451 |
+
infos = get_all()
|
| 452 |
+
candidates = [info for info in infos if info.cache_key == cache_key]
|
| 453 |
+
for candidate in sorted(candidates, key=lambda x: x.port):
|
| 454 |
+
# TODO(@wchargin): Check here that the provided port is still live.
|
| 455 |
+
return candidate
|
| 456 |
+
return None
|
| 457 |
+
|
| 458 |
+
|
| 459 |
+
def _maybe_read_file(filename):
|
| 460 |
+
"""Read the given file, if it exists.
|
| 461 |
+
|
| 462 |
+
Args:
|
| 463 |
+
filename: A path to a file.
|
| 464 |
+
|
| 465 |
+
Returns:
|
| 466 |
+
A string containing the file contents, or `None` if the file does
|
| 467 |
+
not exist.
|
| 468 |
+
"""
|
| 469 |
+
try:
|
| 470 |
+
with open(filename) as infile:
|
| 471 |
+
return infile.read()
|
| 472 |
+
except IOError as e:
|
| 473 |
+
if e.errno == errno.ENOENT:
|
| 474 |
+
return None
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/notebook.py
ADDED
|
@@ -0,0 +1,443 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 3 |
+
# you may not use this file except in compliance with the License.
|
| 4 |
+
# You may obtain a copy of the License at
|
| 5 |
+
#
|
| 6 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 7 |
+
#
|
| 8 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 9 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 10 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 11 |
+
# See the License for the specific language governing permissions and
|
| 12 |
+
# limitations under the License.
|
| 13 |
+
# ==============================================================================
|
| 14 |
+
"""Utilities for using TensorBoard in notebook contexts, like Colab.
|
| 15 |
+
|
| 16 |
+
These APIs are experimental and subject to change.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
import datetime
|
| 21 |
+
import errno
|
| 22 |
+
import html
|
| 23 |
+
import json
|
| 24 |
+
import os
|
| 25 |
+
import random
|
| 26 |
+
import shlex
|
| 27 |
+
import textwrap
|
| 28 |
+
import time
|
| 29 |
+
|
| 30 |
+
from tensorboard import manager
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Return values for `_get_context` (see that function's docs for
|
| 34 |
+
# details).
|
| 35 |
+
_CONTEXT_COLAB = "_CONTEXT_COLAB"
|
| 36 |
+
_CONTEXT_IPYTHON = "_CONTEXT_IPYTHON"
|
| 37 |
+
_CONTEXT_NONE = "_CONTEXT_NONE"
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _get_context():
|
| 41 |
+
"""Determine the most specific context that we're in.
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
_CONTEXT_COLAB: If in Colab with an IPython notebook context.
|
| 45 |
+
_CONTEXT_IPYTHON: If not in Colab, but we are in an IPython notebook
|
| 46 |
+
context (e.g., from running `jupyter notebook` at the command
|
| 47 |
+
line).
|
| 48 |
+
_CONTEXT_NONE: Otherwise (e.g., by running a Python script at the
|
| 49 |
+
command-line or using the `ipython` interactive shell).
|
| 50 |
+
"""
|
| 51 |
+
# In Colab, the `google.colab` module is available, but the shell
|
| 52 |
+
# returned by `IPython.get_ipython` does not have a `get_trait`
|
| 53 |
+
# method.
|
| 54 |
+
try:
|
| 55 |
+
import google.colab # noqa: F401
|
| 56 |
+
import IPython
|
| 57 |
+
except ImportError:
|
| 58 |
+
pass
|
| 59 |
+
else:
|
| 60 |
+
if IPython.get_ipython() is not None:
|
| 61 |
+
# We'll assume that we're in a Colab notebook context.
|
| 62 |
+
return _CONTEXT_COLAB
|
| 63 |
+
|
| 64 |
+
# In an IPython command line shell or Jupyter notebook, we can
|
| 65 |
+
# directly query whether we're in a notebook context.
|
| 66 |
+
try:
|
| 67 |
+
import IPython
|
| 68 |
+
except ImportError:
|
| 69 |
+
pass
|
| 70 |
+
else:
|
| 71 |
+
ipython = IPython.get_ipython()
|
| 72 |
+
if ipython is not None and ipython.has_trait("kernel"):
|
| 73 |
+
return _CONTEXT_IPYTHON
|
| 74 |
+
|
| 75 |
+
# Otherwise, we're not in a known notebook context.
|
| 76 |
+
return _CONTEXT_NONE
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def load_ipython_extension(ipython):
|
| 80 |
+
"""Deprecated: use `%load_ext tensorboard` instead.
|
| 81 |
+
|
| 82 |
+
Raises:
|
| 83 |
+
RuntimeError: Always.
|
| 84 |
+
"""
|
| 85 |
+
raise RuntimeError(
|
| 86 |
+
"Use '%load_ext tensorboard' instead of '%load_ext tensorboard.notebook'."
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _load_ipython_extension(ipython):
|
| 91 |
+
"""Load the TensorBoard notebook extension.
|
| 92 |
+
|
| 93 |
+
Intended to be called from `%load_ext tensorboard`. Do not invoke this
|
| 94 |
+
directly.
|
| 95 |
+
|
| 96 |
+
Args:
|
| 97 |
+
ipython: An `IPython.InteractiveShell` instance.
|
| 98 |
+
"""
|
| 99 |
+
_register_magics(ipython)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _register_magics(ipython):
|
| 103 |
+
"""Register IPython line/cell magics.
|
| 104 |
+
|
| 105 |
+
Args:
|
| 106 |
+
ipython: An `InteractiveShell` instance.
|
| 107 |
+
"""
|
| 108 |
+
ipython.register_magic_function(
|
| 109 |
+
_start_magic,
|
| 110 |
+
magic_kind="line",
|
| 111 |
+
magic_name="tensorboard",
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def _start_magic(line):
|
| 116 |
+
"""Implementation of the `%tensorboard` line magic."""
|
| 117 |
+
return start(line)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def start(args_string):
|
| 121 |
+
"""Launch and display a TensorBoard instance as if at the command line.
|
| 122 |
+
|
| 123 |
+
Args:
|
| 124 |
+
args_string: Command-line arguments to TensorBoard, to be
|
| 125 |
+
interpreted by `shlex.split`: e.g., "--logdir ./logs --port 0".
|
| 126 |
+
Shell metacharacters are not supported: e.g., "--logdir 2>&1" will
|
| 127 |
+
point the logdir at the literal directory named "2>&1".
|
| 128 |
+
"""
|
| 129 |
+
context = _get_context()
|
| 130 |
+
try:
|
| 131 |
+
import IPython
|
| 132 |
+
import IPython.display
|
| 133 |
+
except ImportError:
|
| 134 |
+
IPython = None
|
| 135 |
+
|
| 136 |
+
if context == _CONTEXT_NONE:
|
| 137 |
+
handle = None
|
| 138 |
+
print("Launching TensorBoard...")
|
| 139 |
+
else:
|
| 140 |
+
handle = IPython.display.display(
|
| 141 |
+
IPython.display.Pretty("Launching TensorBoard..."),
|
| 142 |
+
display_id=True,
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
def print_or_update(message):
|
| 146 |
+
if handle is None:
|
| 147 |
+
print(message)
|
| 148 |
+
else:
|
| 149 |
+
handle.update(IPython.display.Pretty(message))
|
| 150 |
+
|
| 151 |
+
parsed_args = shlex.split(args_string, comments=True, posix=True)
|
| 152 |
+
start_result = manager.start(parsed_args)
|
| 153 |
+
|
| 154 |
+
if isinstance(start_result, manager.StartLaunched):
|
| 155 |
+
_display(
|
| 156 |
+
port=start_result.info.port,
|
| 157 |
+
print_message=False,
|
| 158 |
+
display_handle=handle,
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
elif isinstance(start_result, manager.StartReused):
|
| 162 |
+
template = (
|
| 163 |
+
"Reusing TensorBoard on port {port} (pid {pid}), started {delta} ago. "
|
| 164 |
+
"(Use '!kill {pid}' to kill it.)"
|
| 165 |
+
)
|
| 166 |
+
message = template.format(
|
| 167 |
+
port=start_result.info.port,
|
| 168 |
+
pid=start_result.info.pid,
|
| 169 |
+
delta=_time_delta_from_info(start_result.info),
|
| 170 |
+
)
|
| 171 |
+
print_or_update(message)
|
| 172 |
+
_display(
|
| 173 |
+
port=start_result.info.port,
|
| 174 |
+
print_message=False,
|
| 175 |
+
display_handle=None,
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
elif isinstance(start_result, manager.StartFailed):
|
| 179 |
+
|
| 180 |
+
def format_stream(name, value):
|
| 181 |
+
if value == "":
|
| 182 |
+
return ""
|
| 183 |
+
elif value is None:
|
| 184 |
+
return "\n<could not read %s>" % name
|
| 185 |
+
else:
|
| 186 |
+
return "\nContents of %s:\n%s" % (name, value.strip())
|
| 187 |
+
|
| 188 |
+
message = (
|
| 189 |
+
"ERROR: Failed to launch TensorBoard (exited with %d).%s%s"
|
| 190 |
+
% (
|
| 191 |
+
start_result.exit_code,
|
| 192 |
+
format_stream("stderr", start_result.stderr),
|
| 193 |
+
format_stream("stdout", start_result.stdout),
|
| 194 |
+
)
|
| 195 |
+
)
|
| 196 |
+
print_or_update(message)
|
| 197 |
+
|
| 198 |
+
elif isinstance(start_result, manager.StartExecFailed):
|
| 199 |
+
the_tensorboard_binary = (
|
| 200 |
+
"%r (set by the `TENSORBOARD_BINARY` environment variable)"
|
| 201 |
+
% (start_result.explicit_binary,)
|
| 202 |
+
if start_result.explicit_binary is not None
|
| 203 |
+
else "`tensorboard`"
|
| 204 |
+
)
|
| 205 |
+
if start_result.os_error.errno == errno.ENOENT:
|
| 206 |
+
message = (
|
| 207 |
+
"ERROR: Could not find %s. Please ensure that your PATH contains "
|
| 208 |
+
"an executable `tensorboard` program, or explicitly specify the path "
|
| 209 |
+
"to a TensorBoard binary by setting the `TENSORBOARD_BINARY` "
|
| 210 |
+
"environment variable." % (the_tensorboard_binary,)
|
| 211 |
+
)
|
| 212 |
+
else:
|
| 213 |
+
message = "ERROR: Failed to start %s: %s" % (
|
| 214 |
+
the_tensorboard_binary,
|
| 215 |
+
start_result.os_error,
|
| 216 |
+
)
|
| 217 |
+
print_or_update(textwrap.fill(message))
|
| 218 |
+
|
| 219 |
+
elif isinstance(start_result, manager.StartTimedOut):
|
| 220 |
+
message = (
|
| 221 |
+
"ERROR: Timed out waiting for TensorBoard to start. "
|
| 222 |
+
"It may still be running as pid %d." % start_result.pid
|
| 223 |
+
)
|
| 224 |
+
print_or_update(message)
|
| 225 |
+
|
| 226 |
+
else:
|
| 227 |
+
raise TypeError(
|
| 228 |
+
"Unexpected result from `manager.start`: %r.\n"
|
| 229 |
+
"This is a TensorBoard bug; please report it." % start_result
|
| 230 |
+
)
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _time_delta_from_info(info):
|
| 234 |
+
"""Format the elapsed time for the given TensorBoardInfo.
|
| 235 |
+
|
| 236 |
+
Args:
|
| 237 |
+
info: A TensorBoardInfo value.
|
| 238 |
+
|
| 239 |
+
Returns:
|
| 240 |
+
A human-readable string describing the time since the server
|
| 241 |
+
described by `info` started: e.g., "2 days, 0:48:58".
|
| 242 |
+
"""
|
| 243 |
+
delta_seconds = int(time.time()) - info.start_time
|
| 244 |
+
return str(datetime.timedelta(seconds=delta_seconds))
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def display(port=None, height=None):
|
| 248 |
+
"""Display a TensorBoard instance already running on this machine.
|
| 249 |
+
|
| 250 |
+
Args:
|
| 251 |
+
port: The port on which the TensorBoard server is listening, as an
|
| 252 |
+
`int`, or `None` to automatically select the most recently
|
| 253 |
+
launched TensorBoard.
|
| 254 |
+
height: The height of the frame into which to render the TensorBoard
|
| 255 |
+
UI, as an `int` number of pixels, or `None` to use a default value
|
| 256 |
+
(currently 800).
|
| 257 |
+
"""
|
| 258 |
+
_display(port=port, height=height, print_message=True, display_handle=None)
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def _display(port=None, height=None, print_message=False, display_handle=None):
|
| 262 |
+
"""Internal version of `display`.
|
| 263 |
+
|
| 264 |
+
Args:
|
| 265 |
+
port: As with `display`.
|
| 266 |
+
height: As with `display`.
|
| 267 |
+
print_message: True to print which TensorBoard instance was selected
|
| 268 |
+
for display (if applicable), or False otherwise.
|
| 269 |
+
display_handle: If not None, an IPython display handle into which to
|
| 270 |
+
render TensorBoard.
|
| 271 |
+
"""
|
| 272 |
+
if height is None:
|
| 273 |
+
height = 800
|
| 274 |
+
|
| 275 |
+
if port is None:
|
| 276 |
+
infos = manager.get_all()
|
| 277 |
+
if not infos:
|
| 278 |
+
raise ValueError(
|
| 279 |
+
"Can't display TensorBoard: no known instances running."
|
| 280 |
+
)
|
| 281 |
+
else:
|
| 282 |
+
info = max(manager.get_all(), key=lambda x: x.start_time)
|
| 283 |
+
port = info.port
|
| 284 |
+
else:
|
| 285 |
+
infos = [i for i in manager.get_all() if i.port == port]
|
| 286 |
+
info = max(infos, key=lambda x: x.start_time) if infos else None
|
| 287 |
+
|
| 288 |
+
if print_message:
|
| 289 |
+
if info is not None:
|
| 290 |
+
message = (
|
| 291 |
+
"Selecting TensorBoard with {data_source} "
|
| 292 |
+
"(started {delta} ago; port {port}, pid {pid})."
|
| 293 |
+
).format(
|
| 294 |
+
data_source=manager.data_source_from_info(info),
|
| 295 |
+
delta=_time_delta_from_info(info),
|
| 296 |
+
port=info.port,
|
| 297 |
+
pid=info.pid,
|
| 298 |
+
)
|
| 299 |
+
print(message)
|
| 300 |
+
else:
|
| 301 |
+
# The user explicitly provided a port, and we don't have any
|
| 302 |
+
# additional information. There's nothing useful to say.
|
| 303 |
+
pass
|
| 304 |
+
|
| 305 |
+
fn = {
|
| 306 |
+
_CONTEXT_COLAB: _display_colab,
|
| 307 |
+
_CONTEXT_IPYTHON: _display_ipython,
|
| 308 |
+
_CONTEXT_NONE: _display_cli,
|
| 309 |
+
}[_get_context()]
|
| 310 |
+
return fn(port=port, height=height, display_handle=display_handle)
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def _display_colab(port, height, display_handle):
|
| 314 |
+
"""Display a TensorBoard instance in a Colab output frame.
|
| 315 |
+
|
| 316 |
+
The Colab VM is not directly exposed to the network, so the Colab
|
| 317 |
+
runtime provides a service worker tunnel to proxy requests from the
|
| 318 |
+
end user's browser through to servers running on the Colab VM: the
|
| 319 |
+
output frame may issue requests to https://localhost:<port> (HTTPS
|
| 320 |
+
only), which will be forwarded to the specified port on the VM.
|
| 321 |
+
|
| 322 |
+
It does not suffice to create an `iframe` and let the service worker
|
| 323 |
+
redirect its traffic (`<iframe src="https://localhost:6006">`),
|
| 324 |
+
because for security reasons service workers cannot intercept iframe
|
| 325 |
+
traffic. Instead, we manually fetch the TensorBoard index page with an
|
| 326 |
+
XHR in the output frame, and inject the raw HTML into `document.body`.
|
| 327 |
+
|
| 328 |
+
By default, the TensorBoard web app requests resources against
|
| 329 |
+
relative paths, like `./data/logdir`. Within the output frame, these
|
| 330 |
+
requests must instead hit `https://localhost:<port>/data/logdir`. To
|
| 331 |
+
redirect them, we change the document base URI, which transparently
|
| 332 |
+
affects all requests (XHRs and resources alike).
|
| 333 |
+
"""
|
| 334 |
+
import IPython.display
|
| 335 |
+
|
| 336 |
+
shell = """
|
| 337 |
+
(async () => {
|
| 338 |
+
const url = new URL(await google.colab.kernel.proxyPort(%PORT%, {'cache': true}));
|
| 339 |
+
url.searchParams.set('tensorboardColab', 'true');
|
| 340 |
+
const iframe = document.createElement('iframe');
|
| 341 |
+
iframe.src = url;
|
| 342 |
+
iframe.setAttribute('width', '100%');
|
| 343 |
+
iframe.setAttribute('height', '%HEIGHT%');
|
| 344 |
+
iframe.setAttribute('frameborder', 0);
|
| 345 |
+
document.body.appendChild(iframe);
|
| 346 |
+
})();
|
| 347 |
+
"""
|
| 348 |
+
replacements = [
|
| 349 |
+
("%PORT%", "%d" % port),
|
| 350 |
+
("%HEIGHT%", "%d" % height),
|
| 351 |
+
]
|
| 352 |
+
for (k, v) in replacements:
|
| 353 |
+
shell = shell.replace(k, v)
|
| 354 |
+
script = IPython.display.Javascript(shell)
|
| 355 |
+
|
| 356 |
+
if display_handle:
|
| 357 |
+
display_handle.update(script)
|
| 358 |
+
else:
|
| 359 |
+
IPython.display.display(script)
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
def _display_ipython(port, height, display_handle):
|
| 363 |
+
import IPython.display
|
| 364 |
+
|
| 365 |
+
frame_id = "tensorboard-frame-{:08x}".format(random.getrandbits(64))
|
| 366 |
+
shell = """
|
| 367 |
+
<iframe id="%HTML_ID%" width="100%" height="%HEIGHT%" frameborder="0">
|
| 368 |
+
</iframe>
|
| 369 |
+
<script>
|
| 370 |
+
(function() {
|
| 371 |
+
const frame = document.getElementById(%JSON_ID%);
|
| 372 |
+
const url = new URL(%URL%, window.location);
|
| 373 |
+
const port = %PORT%;
|
| 374 |
+
if (port) {
|
| 375 |
+
url.port = port;
|
| 376 |
+
}
|
| 377 |
+
frame.src = url;
|
| 378 |
+
})();
|
| 379 |
+
</script>
|
| 380 |
+
"""
|
| 381 |
+
proxy_url = os.environ.get("TENSORBOARD_PROXY_URL")
|
| 382 |
+
if proxy_url is not None:
|
| 383 |
+
# Allow %PORT% in $TENSORBOARD_PROXY_URL
|
| 384 |
+
proxy_url = proxy_url.replace("%PORT%", "%d" % port)
|
| 385 |
+
replacements = [
|
| 386 |
+
("%HTML_ID%", html.escape(frame_id, quote=True)),
|
| 387 |
+
("%JSON_ID%", json.dumps(frame_id)),
|
| 388 |
+
("%HEIGHT%", "%d" % height),
|
| 389 |
+
("%PORT%", "0"),
|
| 390 |
+
("%URL%", json.dumps(proxy_url)),
|
| 391 |
+
]
|
| 392 |
+
else:
|
| 393 |
+
replacements = [
|
| 394 |
+
("%HTML_ID%", html.escape(frame_id, quote=True)),
|
| 395 |
+
("%JSON_ID%", json.dumps(frame_id)),
|
| 396 |
+
("%HEIGHT%", "%d" % height),
|
| 397 |
+
("%PORT%", "%d" % port),
|
| 398 |
+
("%URL%", json.dumps("/")),
|
| 399 |
+
]
|
| 400 |
+
|
| 401 |
+
for (k, v) in replacements:
|
| 402 |
+
shell = shell.replace(k, v)
|
| 403 |
+
iframe = IPython.display.HTML(shell)
|
| 404 |
+
if display_handle:
|
| 405 |
+
display_handle.update(iframe)
|
| 406 |
+
else:
|
| 407 |
+
IPython.display.display(iframe)
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
def _display_cli(port, height, display_handle):
|
| 411 |
+
del height # unused
|
| 412 |
+
del display_handle # unused
|
| 413 |
+
message = "Please visit http://localhost:%d in a web browser." % port
|
| 414 |
+
print(message)
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def list():
|
| 418 |
+
"""Print a listing of known running TensorBoard instances.
|
| 419 |
+
|
| 420 |
+
TensorBoard instances that were killed uncleanly (e.g., with SIGKILL
|
| 421 |
+
or SIGQUIT) may appear in this list even if they are no longer
|
| 422 |
+
running. Conversely, this list may be missing some entries if your
|
| 423 |
+
operating system's temporary directory has been cleared since a
|
| 424 |
+
still-running TensorBoard instance started.
|
| 425 |
+
"""
|
| 426 |
+
infos = manager.get_all()
|
| 427 |
+
if not infos:
|
| 428 |
+
print("No known TensorBoard instances running.")
|
| 429 |
+
return
|
| 430 |
+
|
| 431 |
+
print("Known TensorBoard instances:")
|
| 432 |
+
for info in infos:
|
| 433 |
+
template = (
|
| 434 |
+
" - port {port}: {data_source} (started {delta} ago; pid {pid})"
|
| 435 |
+
)
|
| 436 |
+
print(
|
| 437 |
+
template.format(
|
| 438 |
+
port=info.port,
|
| 439 |
+
data_source=manager.data_source_from_info(info),
|
| 440 |
+
delta=_time_delta_from_info(info),
|
| 441 |
+
pid=info.pid,
|
| 442 |
+
)
|
| 443 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/plugin_util.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Provides utilities that may be especially useful to plugins."""
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
import threading
|
| 19 |
+
|
| 20 |
+
from tensorboard._vendor.bleach.sanitizer import Cleaner
|
| 21 |
+
|
| 22 |
+
# pylint: disable=g-bad-import-order
|
| 23 |
+
# Google-only: import markdown_freewisdom
|
| 24 |
+
import markdown
|
| 25 |
+
|
| 26 |
+
from tensorboard import context as _context
|
| 27 |
+
from tensorboard.backend import experiment_id as _experiment_id
|
| 28 |
+
from tensorboard.util import tb_logging
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
logger = tb_logging.get_logger()
|
| 32 |
+
|
| 33 |
+
_ALLOWED_ATTRIBUTES = {
|
| 34 |
+
"a": ["href", "title"],
|
| 35 |
+
"img": ["src", "title", "alt"],
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
_ALLOWED_TAGS = [
|
| 39 |
+
"ul",
|
| 40 |
+
"ol",
|
| 41 |
+
"li",
|
| 42 |
+
"p",
|
| 43 |
+
"pre",
|
| 44 |
+
"code",
|
| 45 |
+
"blockquote",
|
| 46 |
+
"h1",
|
| 47 |
+
"h2",
|
| 48 |
+
"h3",
|
| 49 |
+
"h4",
|
| 50 |
+
"h5",
|
| 51 |
+
"h6",
|
| 52 |
+
"hr",
|
| 53 |
+
"br",
|
| 54 |
+
"strong",
|
| 55 |
+
"em",
|
| 56 |
+
"a",
|
| 57 |
+
"img",
|
| 58 |
+
"table",
|
| 59 |
+
"thead",
|
| 60 |
+
"tbody",
|
| 61 |
+
"td",
|
| 62 |
+
"tr",
|
| 63 |
+
"th",
|
| 64 |
+
]
|
| 65 |
+
|
| 66 |
+
# Cache Markdown converter to avoid expensive initialization at each
|
| 67 |
+
# call to `markdown_to_safe_html`. Cache a different instance per thread.
|
| 68 |
+
class _MarkdownStore(threading.local):
|
| 69 |
+
def __init__(self):
|
| 70 |
+
self.markdown = markdown.Markdown(
|
| 71 |
+
extensions=[
|
| 72 |
+
"markdown.extensions.tables",
|
| 73 |
+
"markdown.extensions.fenced_code",
|
| 74 |
+
]
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
_MARKDOWN_STORE = _MarkdownStore()
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
# Cache Cleaner to avoid expensive initialization at each call to `clean`.
|
| 82 |
+
# Cache a different instance per thread.
|
| 83 |
+
class _CleanerStore(threading.local):
|
| 84 |
+
def __init__(self):
|
| 85 |
+
self.cleaner = Cleaner(
|
| 86 |
+
tags=_ALLOWED_TAGS, attributes=_ALLOWED_ATTRIBUTES
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
_CLEANER_STORE = _CleanerStore()
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def safe_html(unsafe_string):
|
| 94 |
+
"""Return the input as a str, sanitized for insertion into the DOM.
|
| 95 |
+
|
| 96 |
+
Arguments:
|
| 97 |
+
unsafe_string: A Unicode string or UTF-8--encoded bytestring
|
| 98 |
+
possibly containing unsafe HTML markup.
|
| 99 |
+
|
| 100 |
+
Returns:
|
| 101 |
+
A string containing safe HTML.
|
| 102 |
+
"""
|
| 103 |
+
total_null_bytes = 0
|
| 104 |
+
if isinstance(unsafe_string, bytes):
|
| 105 |
+
unsafe_string = unsafe_string.decode("utf-8")
|
| 106 |
+
return _CLEANER_STORE.cleaner.clean(unsafe_string)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def markdown_to_safe_html(markdown_string):
|
| 110 |
+
"""Convert Markdown to HTML that's safe to splice into the DOM.
|
| 111 |
+
|
| 112 |
+
Arguments:
|
| 113 |
+
markdown_string: A Unicode string or UTF-8--encoded bytestring
|
| 114 |
+
containing Markdown source. Markdown tables are supported.
|
| 115 |
+
|
| 116 |
+
Returns:
|
| 117 |
+
A string containing safe HTML.
|
| 118 |
+
"""
|
| 119 |
+
return markdowns_to_safe_html([markdown_string], lambda xs: xs[0])
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def markdowns_to_safe_html(markdown_strings, combine):
|
| 123 |
+
"""Convert multiple Markdown documents to one safe HTML document.
|
| 124 |
+
|
| 125 |
+
One could also achieve this by calling `markdown_to_safe_html`
|
| 126 |
+
multiple times and combining the results. Compared to that approach,
|
| 127 |
+
this function may be faster, because HTML sanitization (which can be
|
| 128 |
+
expensive) is performed only once rather than once per input. It may
|
| 129 |
+
also be less precise: if one of the input documents has unsafe HTML
|
| 130 |
+
that is sanitized away, that sanitization might affect other
|
| 131 |
+
documents, even if those documents are safe.
|
| 132 |
+
|
| 133 |
+
Args:
|
| 134 |
+
markdown_strings: List of Markdown source strings to convert, as
|
| 135 |
+
Unicode strings or UTF-8--encoded bytestrings. Markdown tables
|
| 136 |
+
are supported.
|
| 137 |
+
combine: Callback function that takes a list of unsafe HTML
|
| 138 |
+
strings of the same shape as `markdown_strings` and combines
|
| 139 |
+
them into a single unsafe HTML string, which will be sanitized
|
| 140 |
+
and returned.
|
| 141 |
+
|
| 142 |
+
Returns:
|
| 143 |
+
A string containing safe HTML.
|
| 144 |
+
"""
|
| 145 |
+
unsafe_htmls = []
|
| 146 |
+
total_null_bytes = 0
|
| 147 |
+
|
| 148 |
+
for source in markdown_strings:
|
| 149 |
+
# Convert to utf-8 whenever we have a binary input.
|
| 150 |
+
if isinstance(source, bytes):
|
| 151 |
+
source_decoded = source.decode("utf-8")
|
| 152 |
+
# Remove null bytes and warn if there were any, since it probably means
|
| 153 |
+
# we were given a bad encoding.
|
| 154 |
+
source = source_decoded.replace("\x00", "")
|
| 155 |
+
total_null_bytes += len(source_decoded) - len(source)
|
| 156 |
+
unsafe_html = _MARKDOWN_STORE.markdown.convert(source)
|
| 157 |
+
unsafe_htmls.append(unsafe_html)
|
| 158 |
+
|
| 159 |
+
unsafe_combined = combine(unsafe_htmls)
|
| 160 |
+
sanitized_combined = _CLEANER_STORE.cleaner.clean(unsafe_combined)
|
| 161 |
+
|
| 162 |
+
warning = ""
|
| 163 |
+
if total_null_bytes:
|
| 164 |
+
warning = (
|
| 165 |
+
"<!-- WARNING: discarded %d null bytes in markdown string "
|
| 166 |
+
"after UTF-8 decoding -->\n"
|
| 167 |
+
) % total_null_bytes
|
| 168 |
+
|
| 169 |
+
return warning + sanitized_combined
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def context(environ):
|
| 173 |
+
"""Get a TensorBoard `RequestContext` from a WSGI environment.
|
| 174 |
+
|
| 175 |
+
Returns:
|
| 176 |
+
A `RequestContext` value.
|
| 177 |
+
"""
|
| 178 |
+
return _context.from_environ(environ)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def experiment_id(environ):
|
| 182 |
+
"""Determine the experiment ID associated with a WSGI request.
|
| 183 |
+
|
| 184 |
+
Each request to TensorBoard has an associated experiment ID, which is
|
| 185 |
+
always a string and may be empty. This experiment ID should be passed
|
| 186 |
+
to data providers.
|
| 187 |
+
|
| 188 |
+
Args:
|
| 189 |
+
environ: A WSGI environment `dict`. For a Werkzeug request, this is
|
| 190 |
+
`request.environ`.
|
| 191 |
+
|
| 192 |
+
Returns:
|
| 193 |
+
A experiment ID, as a possibly-empty `str`.
|
| 194 |
+
"""
|
| 195 |
+
return environ.get(_experiment_id.WSGI_ENVIRON_KEY, "")
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
class _MetadataVersionChecker:
|
| 199 |
+
"""TensorBoard-internal utility for warning when data is too new.
|
| 200 |
+
|
| 201 |
+
Specify a maximum known `version` number as stored in summary
|
| 202 |
+
metadata, and automatically reject and warn on data from newer
|
| 203 |
+
versions. This keeps a (single) bit of internal state to handle
|
| 204 |
+
logging a warning to the user at most once.
|
| 205 |
+
|
| 206 |
+
This should only be used by plugins bundled with TensorBoard, since
|
| 207 |
+
it may instruct users to upgrade their copy of TensorBoard.
|
| 208 |
+
"""
|
| 209 |
+
|
| 210 |
+
def __init__(self, data_kind, latest_known_version):
|
| 211 |
+
"""Initialize a `_MetadataVersionChecker`.
|
| 212 |
+
|
| 213 |
+
Args:
|
| 214 |
+
data_kind: A human-readable description of the kind of data
|
| 215 |
+
being read, like "scalar" or "histogram" or "PR curve".
|
| 216 |
+
latest_known_version: Highest tolerated value of `version`,
|
| 217 |
+
like `0`.
|
| 218 |
+
"""
|
| 219 |
+
self._data_kind = data_kind
|
| 220 |
+
self._latest_known_version = latest_known_version
|
| 221 |
+
self._warned = False
|
| 222 |
+
|
| 223 |
+
def ok(self, version, run, tag):
|
| 224 |
+
"""Test whether `version` is permitted, else complain."""
|
| 225 |
+
if 0 <= version <= self._latest_known_version:
|
| 226 |
+
return True
|
| 227 |
+
self._maybe_warn(version, run, tag)
|
| 228 |
+
return False
|
| 229 |
+
|
| 230 |
+
def _maybe_warn(self, version, run, tag):
|
| 231 |
+
if self._warned:
|
| 232 |
+
return
|
| 233 |
+
self._warned = True
|
| 234 |
+
logger.warning(
|
| 235 |
+
"Some %s data is too new to be read by this version of TensorBoard. "
|
| 236 |
+
"Upgrading TensorBoard may fix this. "
|
| 237 |
+
"(sample: run %r, tag %r, data version %r)",
|
| 238 |
+
self._data_kind,
|
| 239 |
+
run,
|
| 240 |
+
tag,
|
| 241 |
+
version,
|
| 242 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/program.py
ADDED
|
@@ -0,0 +1,893 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
"""Utilities for TensorBoard command line program.
|
| 16 |
+
|
| 17 |
+
This is a lightweight module for bringing up a TensorBoard HTTP server
|
| 18 |
+
or emulating the `tensorboard` shell command.
|
| 19 |
+
|
| 20 |
+
Those wishing to create custom builds of TensorBoard can use this module
|
| 21 |
+
by swapping out `tensorboard.main` with the custom definition that
|
| 22 |
+
modifies the set of plugins and static assets.
|
| 23 |
+
|
| 24 |
+
This module does not depend on first-party plugins or the default web
|
| 25 |
+
server assets. Those are defined in `tensorboard.default`.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
from abc import ABCMeta
|
| 30 |
+
from abc import abstractmethod
|
| 31 |
+
import argparse
|
| 32 |
+
import atexit
|
| 33 |
+
from collections import defaultdict
|
| 34 |
+
import errno
|
| 35 |
+
import logging
|
| 36 |
+
import mimetypes
|
| 37 |
+
import os
|
| 38 |
+
import shlex
|
| 39 |
+
import signal
|
| 40 |
+
import socket
|
| 41 |
+
import sys
|
| 42 |
+
import threading
|
| 43 |
+
import time
|
| 44 |
+
import urllib.parse
|
| 45 |
+
|
| 46 |
+
from absl import flags as absl_flags
|
| 47 |
+
from absl.flags import argparse_flags
|
| 48 |
+
from werkzeug import serving
|
| 49 |
+
|
| 50 |
+
from tensorboard import manager
|
| 51 |
+
from tensorboard import version
|
| 52 |
+
from tensorboard.backend import application
|
| 53 |
+
from tensorboard.backend.event_processing import data_ingester as local_ingester
|
| 54 |
+
from tensorboard.backend.event_processing import event_file_inspector as efi
|
| 55 |
+
from tensorboard.data import server_ingester
|
| 56 |
+
from tensorboard.plugins.core import core_plugin
|
| 57 |
+
from tensorboard.util import tb_logging
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
logger = tb_logging.get_logger()
|
| 61 |
+
|
| 62 |
+
# Default subcommand name. This is a user-facing CLI and should not change.
|
| 63 |
+
_SERVE_SUBCOMMAND_NAME = "serve"
|
| 64 |
+
# Internal flag name used to store which subcommand was invoked.
|
| 65 |
+
_SUBCOMMAND_FLAG = "__tensorboard_subcommand"
|
| 66 |
+
|
| 67 |
+
# Message printed when we actually use the data server, so that users are not
|
| 68 |
+
# caught terribly by surprise.
|
| 69 |
+
_DATA_SERVER_ADVISORY_MESSAGE = """
|
| 70 |
+
NOTE: Using experimental fast data loading logic. To disable, pass
|
| 71 |
+
"--load_fast=false" and report issues on GitHub. More details:
|
| 72 |
+
https://github.com/tensorflow/tensorboard/issues/4784
|
| 73 |
+
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
# Message printed with `--load_fast=true` if the data server could not start up.
|
| 77 |
+
# To be formatted with one `DataServerStartupError` interpoland.
|
| 78 |
+
_DATA_SERVER_STARTUP_ERROR_MESSAGE_TEMPLATE = """\
|
| 79 |
+
Could not start data server: %s.
|
| 80 |
+
Try with --load_fast=false and report issues on GitHub. Details:
|
| 81 |
+
https://github.com/tensorflow/tensorboard/issues/4784
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class TensorBoard(object):
|
| 86 |
+
"""Class for running TensorBoard.
|
| 87 |
+
|
| 88 |
+
Fields:
|
| 89 |
+
plugin_loaders: Set from plugins passed to constructor.
|
| 90 |
+
assets_zip_provider: Set by constructor.
|
| 91 |
+
server_class: Set by constructor.
|
| 92 |
+
flags: An argparse.Namespace set by the configure() method.
|
| 93 |
+
cache_key: As `manager.cache_key`; set by the configure() method.
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
def __init__(
|
| 97 |
+
self,
|
| 98 |
+
plugins=None,
|
| 99 |
+
assets_zip_provider=None,
|
| 100 |
+
server_class=None,
|
| 101 |
+
subcommands=None,
|
| 102 |
+
):
|
| 103 |
+
"""Creates new instance.
|
| 104 |
+
|
| 105 |
+
Args:
|
| 106 |
+
plugins: A list of TensorBoard plugins to load, as TBPlugin classes or
|
| 107 |
+
TBLoader instances or classes. If not specified, defaults to first-party
|
| 108 |
+
plugins.
|
| 109 |
+
assets_zip_provider: A function that provides a zip file containing
|
| 110 |
+
assets to the application. If `None`, the default TensorBoard web
|
| 111 |
+
assets will be used. (If building from source, your binary must
|
| 112 |
+
explicitly depend on `//tensorboard:assets_lib` if you pass `None`.)
|
| 113 |
+
server_class: An optional factory for a `TensorBoardServer` to use
|
| 114 |
+
for serving the TensorBoard WSGI app. If provided, its callable
|
| 115 |
+
signature should match that of `TensorBoardServer.__init__`.
|
| 116 |
+
|
| 117 |
+
:type plugins:
|
| 118 |
+
list[
|
| 119 |
+
base_plugin.TBLoader | Type[base_plugin.TBLoader] |
|
| 120 |
+
Type[base_plugin.TBPlugin]
|
| 121 |
+
]
|
| 122 |
+
"""
|
| 123 |
+
if plugins is None:
|
| 124 |
+
from tensorboard import default
|
| 125 |
+
|
| 126 |
+
plugins = default.get_plugins()
|
| 127 |
+
if assets_zip_provider is None:
|
| 128 |
+
try:
|
| 129 |
+
from tensorboard import assets
|
| 130 |
+
except ImportError as e:
|
| 131 |
+
# `tensorboard.assets` is not a strict Bazel dep; clients are
|
| 132 |
+
# required to either depend on `//tensorboard:assets_lib` or
|
| 133 |
+
# pass a valid assets provider.
|
| 134 |
+
raise ImportError(
|
| 135 |
+
"No `assets_zip_provider` given, but `tensorboard.assets` "
|
| 136 |
+
"could not be imported to resolve defaults"
|
| 137 |
+
) from e
|
| 138 |
+
assets_zip_provider = assets.get_default_assets_zip_provider()
|
| 139 |
+
if server_class is None:
|
| 140 |
+
server_class = create_port_scanning_werkzeug_server
|
| 141 |
+
if subcommands is None:
|
| 142 |
+
subcommands = []
|
| 143 |
+
self.plugin_loaders = [
|
| 144 |
+
application.make_plugin_loader(p) for p in plugins
|
| 145 |
+
]
|
| 146 |
+
self.assets_zip_provider = assets_zip_provider
|
| 147 |
+
self.server_class = server_class
|
| 148 |
+
self.subcommands = {}
|
| 149 |
+
for subcommand in subcommands:
|
| 150 |
+
name = subcommand.name()
|
| 151 |
+
if name in self.subcommands or name == _SERVE_SUBCOMMAND_NAME:
|
| 152 |
+
raise ValueError("Duplicate subcommand name: %r" % name)
|
| 153 |
+
self.subcommands[name] = subcommand
|
| 154 |
+
self.flags = None
|
| 155 |
+
|
| 156 |
+
def configure(self, argv=("",), **kwargs):
|
| 157 |
+
"""Configures TensorBoard behavior via flags.
|
| 158 |
+
|
| 159 |
+
This method will populate the "flags" property with an argparse.Namespace
|
| 160 |
+
representing flag values parsed from the provided argv list, overridden by
|
| 161 |
+
explicit flags from remaining keyword arguments.
|
| 162 |
+
|
| 163 |
+
Args:
|
| 164 |
+
argv: Can be set to CLI args equivalent to sys.argv; the first arg is
|
| 165 |
+
taken to be the name of the path being executed.
|
| 166 |
+
kwargs: Additional arguments will override what was parsed from
|
| 167 |
+
argv. They must be passed as Python data structures, e.g.
|
| 168 |
+
`foo=1` rather than `foo="1"`.
|
| 169 |
+
|
| 170 |
+
Returns:
|
| 171 |
+
Either argv[:1] if argv was non-empty, or [''] otherwise, as a mechanism
|
| 172 |
+
for absl.app.run() compatibility.
|
| 173 |
+
|
| 174 |
+
Raises:
|
| 175 |
+
ValueError: If flag values are invalid.
|
| 176 |
+
"""
|
| 177 |
+
|
| 178 |
+
base_parser = argparse_flags.ArgumentParser(
|
| 179 |
+
prog="tensorboard",
|
| 180 |
+
description=(
|
| 181 |
+
"TensorBoard is a suite of web applications for "
|
| 182 |
+
"inspecting and understanding your TensorFlow runs "
|
| 183 |
+
"and graphs. https://github.com/tensorflow/tensorboard "
|
| 184 |
+
),
|
| 185 |
+
)
|
| 186 |
+
subparsers = base_parser.add_subparsers(
|
| 187 |
+
help="TensorBoard subcommand (defaults to %r)"
|
| 188 |
+
% _SERVE_SUBCOMMAND_NAME
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
serve_subparser = subparsers.add_parser(
|
| 192 |
+
_SERVE_SUBCOMMAND_NAME,
|
| 193 |
+
help="start local TensorBoard server (default subcommand)",
|
| 194 |
+
)
|
| 195 |
+
serve_subparser.set_defaults(
|
| 196 |
+
**{_SUBCOMMAND_FLAG: _SERVE_SUBCOMMAND_NAME}
|
| 197 |
+
)
|
| 198 |
+
|
| 199 |
+
if len(argv) < 2 or argv[1].startswith("-"):
|
| 200 |
+
# This invocation, if valid, must not use any subcommands: we
|
| 201 |
+
# don't permit flags before the subcommand name.
|
| 202 |
+
serve_parser = base_parser
|
| 203 |
+
else:
|
| 204 |
+
# This invocation, if valid, must use a subcommand: we don't take
|
| 205 |
+
# any positional arguments to `serve`.
|
| 206 |
+
serve_parser = serve_subparser
|
| 207 |
+
|
| 208 |
+
for (name, subcommand) in self.subcommands.items():
|
| 209 |
+
subparser = subparsers.add_parser(
|
| 210 |
+
name,
|
| 211 |
+
help=subcommand.help(),
|
| 212 |
+
description=subcommand.description(),
|
| 213 |
+
)
|
| 214 |
+
subparser.set_defaults(**{_SUBCOMMAND_FLAG: name})
|
| 215 |
+
subcommand.define_flags(subparser)
|
| 216 |
+
|
| 217 |
+
for loader in self.plugin_loaders:
|
| 218 |
+
loader.define_flags(serve_parser)
|
| 219 |
+
|
| 220 |
+
arg0 = argv[0] if argv else ""
|
| 221 |
+
|
| 222 |
+
flags = base_parser.parse_args(argv[1:]) # Strip binary name from argv.
|
| 223 |
+
if getattr(flags, _SUBCOMMAND_FLAG, None) is None:
|
| 224 |
+
# Manually assign default value rather than using `set_defaults`
|
| 225 |
+
# on the base parser to work around Python bug #9351 on old
|
| 226 |
+
# versions of `argparse`: <https://bugs.python.org/issue9351>
|
| 227 |
+
setattr(flags, _SUBCOMMAND_FLAG, _SERVE_SUBCOMMAND_NAME)
|
| 228 |
+
|
| 229 |
+
self.cache_key = manager.cache_key(
|
| 230 |
+
working_directory=os.getcwd(),
|
| 231 |
+
arguments=argv[1:],
|
| 232 |
+
configure_kwargs=kwargs,
|
| 233 |
+
)
|
| 234 |
+
if arg0:
|
| 235 |
+
# Only expose main module Abseil flags as TensorBoard native flags.
|
| 236 |
+
# This is the same logic Abseil's ArgumentParser uses for determining
|
| 237 |
+
# which Abseil flags to include in the short helpstring.
|
| 238 |
+
for flag in set(absl_flags.FLAGS.get_key_flags_for_module(arg0)):
|
| 239 |
+
if hasattr(flags, flag.name):
|
| 240 |
+
raise ValueError("Conflicting Abseil flag: %s" % flag.name)
|
| 241 |
+
setattr(flags, flag.name, flag.value)
|
| 242 |
+
for k, v in kwargs.items():
|
| 243 |
+
if not hasattr(flags, k):
|
| 244 |
+
raise ValueError("Unknown TensorBoard flag: %s" % k)
|
| 245 |
+
setattr(flags, k, v)
|
| 246 |
+
if getattr(flags, _SUBCOMMAND_FLAG) == _SERVE_SUBCOMMAND_NAME:
|
| 247 |
+
for loader in self.plugin_loaders:
|
| 248 |
+
loader.fix_flags(flags)
|
| 249 |
+
self.flags = flags
|
| 250 |
+
return [arg0]
|
| 251 |
+
|
| 252 |
+
def main(self, ignored_argv=("",)):
|
| 253 |
+
"""Blocking main function for TensorBoard.
|
| 254 |
+
|
| 255 |
+
This method is called by `tensorboard.main.run_main`, which is the
|
| 256 |
+
standard entrypoint for the tensorboard command line program. The
|
| 257 |
+
configure() method must be called first.
|
| 258 |
+
|
| 259 |
+
Args:
|
| 260 |
+
ignored_argv: Do not pass. Required for Abseil compatibility.
|
| 261 |
+
|
| 262 |
+
Returns:
|
| 263 |
+
Process exit code, i.e. 0 if successful or non-zero on failure. In
|
| 264 |
+
practice, an exception will most likely be raised instead of
|
| 265 |
+
returning non-zero.
|
| 266 |
+
|
| 267 |
+
:rtype: int
|
| 268 |
+
"""
|
| 269 |
+
self._install_signal_handler(signal.SIGTERM, "SIGTERM")
|
| 270 |
+
self._fix_mime_types()
|
| 271 |
+
subcommand_name = getattr(self.flags, _SUBCOMMAND_FLAG)
|
| 272 |
+
if subcommand_name == _SERVE_SUBCOMMAND_NAME:
|
| 273 |
+
runner = self._run_serve_subcommand
|
| 274 |
+
else:
|
| 275 |
+
runner = self.subcommands[subcommand_name].run
|
| 276 |
+
return runner(self.flags) or 0
|
| 277 |
+
|
| 278 |
+
def _run_serve_subcommand(self, flags):
|
| 279 |
+
# TODO(#2801): Make `--version` a flag on only the base parser, not `serve`.
|
| 280 |
+
if flags.version_tb:
|
| 281 |
+
print(version.VERSION)
|
| 282 |
+
return 0
|
| 283 |
+
if flags.inspect:
|
| 284 |
+
# TODO(@wchargin): Convert `inspect` to a normal subcommand?
|
| 285 |
+
logger.info(
|
| 286 |
+
"Not bringing up TensorBoard, but inspecting event files."
|
| 287 |
+
)
|
| 288 |
+
event_file = os.path.expanduser(flags.event_file)
|
| 289 |
+
efi.inspect(flags.logdir, event_file, flags.tag)
|
| 290 |
+
return 0
|
| 291 |
+
try:
|
| 292 |
+
server = self._make_server()
|
| 293 |
+
server.print_serving_message()
|
| 294 |
+
self._register_info(server)
|
| 295 |
+
server.serve_forever()
|
| 296 |
+
return 0
|
| 297 |
+
except TensorBoardServerException as e:
|
| 298 |
+
logger.error(e.msg)
|
| 299 |
+
sys.stderr.write("ERROR: %s\n" % e.msg)
|
| 300 |
+
sys.stderr.flush()
|
| 301 |
+
return -1
|
| 302 |
+
|
| 303 |
+
def launch(self):
|
| 304 |
+
"""Python API for launching TensorBoard.
|
| 305 |
+
|
| 306 |
+
This method is the same as main() except it launches TensorBoard in
|
| 307 |
+
a separate permanent thread. The configure() method must be called
|
| 308 |
+
first.
|
| 309 |
+
|
| 310 |
+
Returns:
|
| 311 |
+
The URL of the TensorBoard web server.
|
| 312 |
+
|
| 313 |
+
:rtype: str
|
| 314 |
+
"""
|
| 315 |
+
# Make it easy to run TensorBoard inside other programs, e.g. Colab.
|
| 316 |
+
server = self._make_server()
|
| 317 |
+
thread = threading.Thread(
|
| 318 |
+
target=server.serve_forever, name="TensorBoard"
|
| 319 |
+
)
|
| 320 |
+
thread.daemon = True
|
| 321 |
+
thread.start()
|
| 322 |
+
return server.get_url()
|
| 323 |
+
|
| 324 |
+
def _register_info(self, server):
|
| 325 |
+
"""Write a TensorBoardInfo file and arrange for its cleanup.
|
| 326 |
+
|
| 327 |
+
Args:
|
| 328 |
+
server: The result of `self._make_server()`.
|
| 329 |
+
"""
|
| 330 |
+
server_url = urllib.parse.urlparse(server.get_url())
|
| 331 |
+
info = manager.TensorBoardInfo(
|
| 332 |
+
version=version.VERSION,
|
| 333 |
+
start_time=int(time.time()),
|
| 334 |
+
port=server_url.port,
|
| 335 |
+
pid=os.getpid(),
|
| 336 |
+
path_prefix=self.flags.path_prefix,
|
| 337 |
+
logdir=self.flags.logdir or self.flags.logdir_spec,
|
| 338 |
+
db=self.flags.db,
|
| 339 |
+
cache_key=self.cache_key,
|
| 340 |
+
)
|
| 341 |
+
atexit.register(manager.remove_info_file)
|
| 342 |
+
manager.write_info_file(info)
|
| 343 |
+
|
| 344 |
+
def _install_signal_handler(self, signal_number, signal_name):
|
| 345 |
+
"""Set a signal handler to gracefully exit on the given signal.
|
| 346 |
+
|
| 347 |
+
When this process receives the given signal, it will run `atexit`
|
| 348 |
+
handlers and then exit with `0`.
|
| 349 |
+
|
| 350 |
+
Args:
|
| 351 |
+
signal_number: The numeric code for the signal to handle, like
|
| 352 |
+
`signal.SIGTERM`.
|
| 353 |
+
signal_name: The human-readable signal name.
|
| 354 |
+
"""
|
| 355 |
+
# Note to maintainers: Google-internal code overrides this
|
| 356 |
+
# method (cf. cl/334534610). Double-check changes before
|
| 357 |
+
# modifying API.
|
| 358 |
+
|
| 359 |
+
old_signal_handler = None # set below
|
| 360 |
+
|
| 361 |
+
def handler(handled_signal_number, frame):
|
| 362 |
+
# In case we catch this signal again while running atexit
|
| 363 |
+
# handlers, take the hint and actually die.
|
| 364 |
+
signal.signal(signal_number, signal.SIG_DFL)
|
| 365 |
+
sys.stderr.write(
|
| 366 |
+
"TensorBoard caught %s; exiting...\n" % signal_name
|
| 367 |
+
)
|
| 368 |
+
# The main thread is the only non-daemon thread, so it suffices to
|
| 369 |
+
# exit hence.
|
| 370 |
+
if old_signal_handler not in (signal.SIG_IGN, signal.SIG_DFL):
|
| 371 |
+
old_signal_handler(handled_signal_number, frame)
|
| 372 |
+
sys.exit(0)
|
| 373 |
+
|
| 374 |
+
old_signal_handler = signal.signal(signal_number, handler)
|
| 375 |
+
|
| 376 |
+
def _fix_mime_types(self):
|
| 377 |
+
"""Fix incorrect entries in the `mimetypes` registry.
|
| 378 |
+
|
| 379 |
+
On Windows, the Python standard library's `mimetypes` reads in
|
| 380 |
+
mappings from file extension to MIME type from the Windows
|
| 381 |
+
registry. Other applications can and do write incorrect values
|
| 382 |
+
to this registry, which causes `mimetypes.guess_type` to return
|
| 383 |
+
incorrect values, which causes TensorBoard to fail to render on
|
| 384 |
+
the frontend.
|
| 385 |
+
|
| 386 |
+
This method hard-codes the correct mappings for certain MIME
|
| 387 |
+
types that are known to be either used by TensorBoard or
|
| 388 |
+
problematic in general.
|
| 389 |
+
"""
|
| 390 |
+
# Known to be problematic when Visual Studio is installed:
|
| 391 |
+
# <https://github.com/tensorflow/tensorboard/issues/3120>
|
| 392 |
+
mimetypes.add_type("text/javascript", ".js")
|
| 393 |
+
# Not known to be problematic, but used by TensorBoard:
|
| 394 |
+
mimetypes.add_type("font/woff2", ".woff2")
|
| 395 |
+
mimetypes.add_type("text/html", ".html")
|
| 396 |
+
|
| 397 |
+
def _start_subprocess_data_ingester(self):
|
| 398 |
+
"""Creates, starts, and returns a `SubprocessServerDataIngester`."""
|
| 399 |
+
flags = self.flags
|
| 400 |
+
server_binary = server_ingester.get_server_binary()
|
| 401 |
+
ingester = server_ingester.SubprocessServerDataIngester(
|
| 402 |
+
server_binary=server_binary,
|
| 403 |
+
logdir=flags.logdir,
|
| 404 |
+
reload_interval=flags.reload_interval,
|
| 405 |
+
channel_creds_type=flags.grpc_creds_type,
|
| 406 |
+
samples_per_plugin=flags.samples_per_plugin,
|
| 407 |
+
extra_flags=shlex.split(flags.extra_data_server_flags),
|
| 408 |
+
)
|
| 409 |
+
ingester.start()
|
| 410 |
+
return ingester
|
| 411 |
+
|
| 412 |
+
def _make_data_ingester(self):
|
| 413 |
+
"""Determines the right data ingester, starts it, and returns it."""
|
| 414 |
+
flags = self.flags
|
| 415 |
+
if flags.grpc_data_provider:
|
| 416 |
+
ingester = server_ingester.ExistingServerDataIngester(
|
| 417 |
+
flags.grpc_data_provider,
|
| 418 |
+
channel_creds_type=flags.grpc_creds_type,
|
| 419 |
+
)
|
| 420 |
+
ingester.start()
|
| 421 |
+
return ingester
|
| 422 |
+
|
| 423 |
+
if flags.load_fast == "true":
|
| 424 |
+
try:
|
| 425 |
+
return self._start_subprocess_data_ingester()
|
| 426 |
+
except server_ingester.NoDataServerError as e:
|
| 427 |
+
msg = "Option --load_fast=true not available: %s\n" % e
|
| 428 |
+
sys.stderr.write(msg)
|
| 429 |
+
sys.exit(1)
|
| 430 |
+
except server_ingester.DataServerStartupError as e:
|
| 431 |
+
msg = _DATA_SERVER_STARTUP_ERROR_MESSAGE_TEMPLATE % e
|
| 432 |
+
sys.stderr.write(msg)
|
| 433 |
+
sys.exit(1)
|
| 434 |
+
|
| 435 |
+
if flags.load_fast == "auto" and _should_use_data_server(flags):
|
| 436 |
+
try:
|
| 437 |
+
ingester = self._start_subprocess_data_ingester()
|
| 438 |
+
sys.stderr.write(_DATA_SERVER_ADVISORY_MESSAGE)
|
| 439 |
+
sys.stderr.flush()
|
| 440 |
+
return ingester
|
| 441 |
+
except server_ingester.NoDataServerError as e:
|
| 442 |
+
logger.info("No data server: %s", e)
|
| 443 |
+
except server_ingester.DataServerStartupError as e:
|
| 444 |
+
logger.info(
|
| 445 |
+
"Data server error: %s; falling back to multiplexer", e
|
| 446 |
+
)
|
| 447 |
+
|
| 448 |
+
ingester = local_ingester.LocalDataIngester(flags)
|
| 449 |
+
ingester.start()
|
| 450 |
+
return ingester
|
| 451 |
+
|
| 452 |
+
def _make_data_provider(self):
|
| 453 |
+
"""Returns `(data_provider, deprecated_multiplexer)`."""
|
| 454 |
+
ingester = self._make_data_ingester()
|
| 455 |
+
# Stash ingester so that it can avoid GCing Windows file handles.
|
| 456 |
+
# (See comment in `SubprocessServerDataIngester.start` for details.)
|
| 457 |
+
self._ingester = ingester
|
| 458 |
+
|
| 459 |
+
deprecated_multiplexer = None
|
| 460 |
+
if isinstance(ingester, local_ingester.LocalDataIngester):
|
| 461 |
+
deprecated_multiplexer = ingester.deprecated_multiplexer
|
| 462 |
+
return (ingester.data_provider, deprecated_multiplexer)
|
| 463 |
+
|
| 464 |
+
def _make_server(self):
|
| 465 |
+
"""Constructs the TensorBoard WSGI app and instantiates the server."""
|
| 466 |
+
(data_provider, deprecated_multiplexer) = self._make_data_provider()
|
| 467 |
+
app = application.TensorBoardWSGIApp(
|
| 468 |
+
self.flags,
|
| 469 |
+
self.plugin_loaders,
|
| 470 |
+
data_provider,
|
| 471 |
+
self.assets_zip_provider,
|
| 472 |
+
deprecated_multiplexer,
|
| 473 |
+
)
|
| 474 |
+
return self.server_class(app, self.flags)
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
def _should_use_data_server(flags):
|
| 478 |
+
if flags.logdir_spec and not flags.logdir:
|
| 479 |
+
logger.info(
|
| 480 |
+
"Note: --logdir_spec is not supported with --load_fast behavior; "
|
| 481 |
+
"falling back to slower Python-only load path. To use the data "
|
| 482 |
+
"server, replace --logdir_spec with --logdir."
|
| 483 |
+
)
|
| 484 |
+
return False
|
| 485 |
+
if not flags.logdir:
|
| 486 |
+
# Using some other legacy mode; not supported.
|
| 487 |
+
return False
|
| 488 |
+
if "://" in flags.logdir and not flags.logdir.startswith("gs://"):
|
| 489 |
+
logger.info(
|
| 490 |
+
"Note: --load_fast behavior only supports local and GCS (gs://) "
|
| 491 |
+
"paths; falling back to slower Python-only load path."
|
| 492 |
+
)
|
| 493 |
+
return False
|
| 494 |
+
if flags.detect_file_replacement is True:
|
| 495 |
+
logger.info(
|
| 496 |
+
"Note: --detect_file_replacement=true is not supported with "
|
| 497 |
+
"--load_fast behavior; falling back to slower Python-only load "
|
| 498 |
+
"path."
|
| 499 |
+
)
|
| 500 |
+
return False
|
| 501 |
+
return True
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
class TensorBoardSubcommand(metaclass=ABCMeta):
|
| 505 |
+
"""Experimental private API for defining subcommands to tensorboard(1)."""
|
| 506 |
+
|
| 507 |
+
@abstractmethod
|
| 508 |
+
def name(self):
|
| 509 |
+
"""Name of this subcommand, as specified on the command line.
|
| 510 |
+
|
| 511 |
+
This must be unique across all subcommands.
|
| 512 |
+
|
| 513 |
+
Returns:
|
| 514 |
+
A string.
|
| 515 |
+
"""
|
| 516 |
+
pass
|
| 517 |
+
|
| 518 |
+
@abstractmethod
|
| 519 |
+
def define_flags(self, parser):
|
| 520 |
+
"""Configure an argument parser for this subcommand.
|
| 521 |
+
|
| 522 |
+
Flags whose names start with two underscores (e.g., `__foo`) are
|
| 523 |
+
reserved for use by the runtime and must not be defined by
|
| 524 |
+
subcommands.
|
| 525 |
+
|
| 526 |
+
Args:
|
| 527 |
+
parser: An `argparse.ArgumentParser` scoped to this subcommand,
|
| 528 |
+
which this function should mutate.
|
| 529 |
+
"""
|
| 530 |
+
pass
|
| 531 |
+
|
| 532 |
+
@abstractmethod
|
| 533 |
+
def run(self, flags):
|
| 534 |
+
"""Execute this subcommand with user-provided flags.
|
| 535 |
+
|
| 536 |
+
Args:
|
| 537 |
+
flags: An `argparse.Namespace` object with all defined flags.
|
| 538 |
+
|
| 539 |
+
Returns:
|
| 540 |
+
An `int` exit code, or `None` as an alias for `0`.
|
| 541 |
+
"""
|
| 542 |
+
pass
|
| 543 |
+
|
| 544 |
+
def help(self):
|
| 545 |
+
"""Short, one-line help text to display on `tensorboard --help`."""
|
| 546 |
+
return None
|
| 547 |
+
|
| 548 |
+
def description(self):
|
| 549 |
+
"""Description to display on `tensorboard SUBCOMMAND --help`."""
|
| 550 |
+
return None
|
| 551 |
+
|
| 552 |
+
|
| 553 |
+
class TensorBoardServer(metaclass=ABCMeta):
|
| 554 |
+
"""Class for customizing TensorBoard WSGI app serving."""
|
| 555 |
+
|
| 556 |
+
@abstractmethod
|
| 557 |
+
def __init__(self, wsgi_app, flags):
|
| 558 |
+
"""Create a flag-configured HTTP server for TensorBoard's WSGI app.
|
| 559 |
+
|
| 560 |
+
Args:
|
| 561 |
+
wsgi_app: The TensorBoard WSGI application to create a server for.
|
| 562 |
+
flags: argparse.Namespace instance of TensorBoard flags.
|
| 563 |
+
"""
|
| 564 |
+
raise NotImplementedError()
|
| 565 |
+
|
| 566 |
+
@abstractmethod
|
| 567 |
+
def serve_forever(self):
|
| 568 |
+
"""Blocking call to start serving the TensorBoard server."""
|
| 569 |
+
raise NotImplementedError()
|
| 570 |
+
|
| 571 |
+
@abstractmethod
|
| 572 |
+
def get_url(self):
|
| 573 |
+
"""Returns a URL at which this server should be reachable."""
|
| 574 |
+
raise NotImplementedError()
|
| 575 |
+
|
| 576 |
+
def print_serving_message(self):
|
| 577 |
+
"""Prints a user-friendly message prior to server start.
|
| 578 |
+
|
| 579 |
+
This will be called just before `serve_forever`.
|
| 580 |
+
"""
|
| 581 |
+
sys.stderr.write(
|
| 582 |
+
"TensorBoard %s at %s (Press CTRL+C to quit)\n"
|
| 583 |
+
% (version.VERSION, self.get_url())
|
| 584 |
+
)
|
| 585 |
+
sys.stderr.flush()
|
| 586 |
+
|
| 587 |
+
|
| 588 |
+
class TensorBoardServerException(Exception):
|
| 589 |
+
"""Exception raised by TensorBoardServer for user-friendly errors.
|
| 590 |
+
|
| 591 |
+
Subclasses of TensorBoardServer can raise this exception in order to
|
| 592 |
+
generate a clean error message for the user rather than a
|
| 593 |
+
stacktrace.
|
| 594 |
+
"""
|
| 595 |
+
|
| 596 |
+
def __init__(self, msg):
|
| 597 |
+
self.msg = msg
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
class TensorBoardPortInUseError(TensorBoardServerException):
|
| 601 |
+
"""Error raised when attempting to bind to a port that is in use.
|
| 602 |
+
|
| 603 |
+
This should be raised when it is expected that binding to another
|
| 604 |
+
similar port would succeed. It is used as a signal to indicate that
|
| 605 |
+
automatic port searching should continue rather than abort.
|
| 606 |
+
"""
|
| 607 |
+
|
| 608 |
+
pass
|
| 609 |
+
|
| 610 |
+
|
| 611 |
+
def with_port_scanning(cls):
|
| 612 |
+
"""Create a server factory that performs port scanning.
|
| 613 |
+
|
| 614 |
+
This function returns a callable whose signature matches the
|
| 615 |
+
specification of `TensorBoardServer.__init__`, using `cls` as an
|
| 616 |
+
underlying implementation. It passes through `flags` unchanged except
|
| 617 |
+
in the case that `flags.port is None`, in which case it repeatedly
|
| 618 |
+
instantiates the underlying server with new port suggestions.
|
| 619 |
+
|
| 620 |
+
Args:
|
| 621 |
+
cls: A valid implementation of `TensorBoardServer`. This class's
|
| 622 |
+
initializer should raise a `TensorBoardPortInUseError` upon
|
| 623 |
+
failing to bind to a port when it is expected that binding to
|
| 624 |
+
another nearby port might succeed.
|
| 625 |
+
|
| 626 |
+
The initializer for `cls` will only ever be invoked with `flags`
|
| 627 |
+
such that `flags.port is not None`.
|
| 628 |
+
|
| 629 |
+
Returns:
|
| 630 |
+
A function that implements the `__init__` contract of
|
| 631 |
+
`TensorBoardServer`.
|
| 632 |
+
"""
|
| 633 |
+
|
| 634 |
+
def init(wsgi_app, flags):
|
| 635 |
+
# base_port: what's the first port to which we should try to bind?
|
| 636 |
+
# should_scan: if that fails, shall we try additional ports?
|
| 637 |
+
# max_attempts: how many ports shall we try?
|
| 638 |
+
should_scan = flags.port is None
|
| 639 |
+
base_port = (
|
| 640 |
+
core_plugin.DEFAULT_PORT if flags.port is None else flags.port
|
| 641 |
+
)
|
| 642 |
+
|
| 643 |
+
if base_port > 0xFFFF:
|
| 644 |
+
raise TensorBoardServerException(
|
| 645 |
+
"TensorBoard cannot bind to port %d > %d" % (base_port, 0xFFFF)
|
| 646 |
+
)
|
| 647 |
+
max_attempts = 100 if should_scan else 1
|
| 648 |
+
base_port = min(base_port + max_attempts, 0x10000) - max_attempts
|
| 649 |
+
|
| 650 |
+
for port in range(base_port, base_port + max_attempts):
|
| 651 |
+
subflags = argparse.Namespace(**vars(flags))
|
| 652 |
+
subflags.port = port
|
| 653 |
+
try:
|
| 654 |
+
return cls(wsgi_app=wsgi_app, flags=subflags)
|
| 655 |
+
except TensorBoardPortInUseError:
|
| 656 |
+
if not should_scan:
|
| 657 |
+
raise
|
| 658 |
+
# All attempts failed to bind.
|
| 659 |
+
raise TensorBoardServerException(
|
| 660 |
+
"TensorBoard could not bind to any port around %s "
|
| 661 |
+
"(tried %d times)" % (base_port, max_attempts)
|
| 662 |
+
)
|
| 663 |
+
|
| 664 |
+
return init
|
| 665 |
+
|
| 666 |
+
|
| 667 |
+
class _WSGIRequestHandler(serving.WSGIRequestHandler):
|
| 668 |
+
"""Custom subclass of Werkzeug request handler to use HTTP/1.1."""
|
| 669 |
+
|
| 670 |
+
# The default on the http.server is HTTP/1.0 for legacy reasons:
|
| 671 |
+
# https://docs.python.org/3/library/http.server.html#http.server.BaseHTTPRequestHandler.protocol_version
|
| 672 |
+
# Override here to use HTTP/1.1 to avoid needing a new TCP socket and Python
|
| 673 |
+
# thread for each HTTP request. The tradeoff is we must always specify the
|
| 674 |
+
# Content-Length header, or do chunked encoding for streaming.
|
| 675 |
+
protocol_version = "HTTP/1.1"
|
| 676 |
+
|
| 677 |
+
|
| 678 |
+
class WerkzeugServer(serving.ThreadedWSGIServer, TensorBoardServer):
|
| 679 |
+
"""Implementation of TensorBoardServer using the Werkzeug dev server."""
|
| 680 |
+
|
| 681 |
+
# ThreadedWSGIServer handles this in werkzeug 0.12+ but we allow 0.11.x.
|
| 682 |
+
daemon_threads = True
|
| 683 |
+
|
| 684 |
+
def __init__(self, wsgi_app, flags):
|
| 685 |
+
self._flags = flags
|
| 686 |
+
host = flags.host
|
| 687 |
+
port = flags.port
|
| 688 |
+
|
| 689 |
+
self._auto_wildcard = flags.bind_all
|
| 690 |
+
if self._auto_wildcard:
|
| 691 |
+
# Serve on all interfaces, and attempt to serve both IPv4 and IPv6
|
| 692 |
+
# traffic through one socket.
|
| 693 |
+
host = self._get_wildcard_address(port)
|
| 694 |
+
elif host is None:
|
| 695 |
+
host = "localhost"
|
| 696 |
+
|
| 697 |
+
self._host = host
|
| 698 |
+
self._url = None # Will be set by get_url() below
|
| 699 |
+
|
| 700 |
+
self._fix_werkzeug_logging()
|
| 701 |
+
try:
|
| 702 |
+
super(WerkzeugServer, self).__init__(
|
| 703 |
+
host, port, wsgi_app, _WSGIRequestHandler
|
| 704 |
+
)
|
| 705 |
+
except socket.error as e:
|
| 706 |
+
if hasattr(errno, "EACCES") and e.errno == errno.EACCES:
|
| 707 |
+
raise TensorBoardServerException(
|
| 708 |
+
"TensorBoard must be run as superuser to bind to port %d"
|
| 709 |
+
% port
|
| 710 |
+
)
|
| 711 |
+
elif hasattr(errno, "EADDRINUSE") and e.errno == errno.EADDRINUSE:
|
| 712 |
+
if port == 0:
|
| 713 |
+
raise TensorBoardServerException(
|
| 714 |
+
"TensorBoard unable to find any open port"
|
| 715 |
+
)
|
| 716 |
+
else:
|
| 717 |
+
raise TensorBoardPortInUseError(
|
| 718 |
+
"TensorBoard could not bind to port %d, it was already in use"
|
| 719 |
+
% port
|
| 720 |
+
)
|
| 721 |
+
elif (
|
| 722 |
+
hasattr(errno, "EADDRNOTAVAIL")
|
| 723 |
+
and e.errno == errno.EADDRNOTAVAIL
|
| 724 |
+
):
|
| 725 |
+
raise TensorBoardServerException(
|
| 726 |
+
"TensorBoard could not bind to unavailable address %s"
|
| 727 |
+
% host
|
| 728 |
+
)
|
| 729 |
+
elif (
|
| 730 |
+
hasattr(errno, "EAFNOSUPPORT") and e.errno == errno.EAFNOSUPPORT
|
| 731 |
+
):
|
| 732 |
+
raise TensorBoardServerException(
|
| 733 |
+
"Tensorboard could not bind to unsupported address family %s"
|
| 734 |
+
% host
|
| 735 |
+
)
|
| 736 |
+
# Raise the raw exception if it wasn't identifiable as a user error.
|
| 737 |
+
raise
|
| 738 |
+
|
| 739 |
+
def _get_wildcard_address(self, port):
|
| 740 |
+
"""Returns a wildcard address for the port in question.
|
| 741 |
+
|
| 742 |
+
This will attempt to follow the best practice of calling
|
| 743 |
+
getaddrinfo() with a null host and AI_PASSIVE to request a
|
| 744 |
+
server-side socket wildcard address. If that succeeds, this
|
| 745 |
+
returns the first IPv6 address found, or if none, then returns
|
| 746 |
+
the first IPv4 address. If that fails, then this returns the
|
| 747 |
+
hardcoded address "::" if socket.has_ipv6 is True, else
|
| 748 |
+
"0.0.0.0".
|
| 749 |
+
"""
|
| 750 |
+
fallback_address = "::" if socket.has_ipv6 else "0.0.0.0"
|
| 751 |
+
if hasattr(socket, "AI_PASSIVE"):
|
| 752 |
+
try:
|
| 753 |
+
addrinfos = socket.getaddrinfo(
|
| 754 |
+
None,
|
| 755 |
+
port,
|
| 756 |
+
socket.AF_UNSPEC,
|
| 757 |
+
socket.SOCK_STREAM,
|
| 758 |
+
socket.IPPROTO_TCP,
|
| 759 |
+
socket.AI_PASSIVE,
|
| 760 |
+
)
|
| 761 |
+
except socket.gaierror as e:
|
| 762 |
+
logger.warning(
|
| 763 |
+
"Failed to auto-detect wildcard address, assuming %s: %s",
|
| 764 |
+
fallback_address,
|
| 765 |
+
str(e),
|
| 766 |
+
)
|
| 767 |
+
return fallback_address
|
| 768 |
+
addrs_by_family = defaultdict(list)
|
| 769 |
+
for family, _, _, _, sockaddr in addrinfos:
|
| 770 |
+
# Format of the "sockaddr" socket address varies by address family,
|
| 771 |
+
# but [0] is always the IP address portion.
|
| 772 |
+
addrs_by_family[family].append(sockaddr[0])
|
| 773 |
+
if hasattr(socket, "AF_INET6") and addrs_by_family[socket.AF_INET6]:
|
| 774 |
+
return addrs_by_family[socket.AF_INET6][0]
|
| 775 |
+
if hasattr(socket, "AF_INET") and addrs_by_family[socket.AF_INET]:
|
| 776 |
+
return addrs_by_family[socket.AF_INET][0]
|
| 777 |
+
logger.warning(
|
| 778 |
+
"Failed to auto-detect wildcard address, assuming %s",
|
| 779 |
+
fallback_address,
|
| 780 |
+
)
|
| 781 |
+
return fallback_address
|
| 782 |
+
|
| 783 |
+
def server_bind(self):
|
| 784 |
+
"""Override to set custom options on the socket."""
|
| 785 |
+
if self._flags.reuse_port:
|
| 786 |
+
try:
|
| 787 |
+
socket.SO_REUSEPORT
|
| 788 |
+
except AttributeError:
|
| 789 |
+
raise TensorBoardServerException(
|
| 790 |
+
"TensorBoard --reuse_port option is not supported on this platform"
|
| 791 |
+
)
|
| 792 |
+
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
| 793 |
+
|
| 794 |
+
# Enable IPV4 mapping for IPV6 sockets when desired.
|
| 795 |
+
# The main use case for this is so that when no host is specified,
|
| 796 |
+
# TensorBoard can listen on all interfaces for both IPv4 and IPv6
|
| 797 |
+
# connections, rather than having to choose v4 or v6 and hope the
|
| 798 |
+
# browser didn't choose the other one.
|
| 799 |
+
socket_is_v6 = (
|
| 800 |
+
hasattr(socket, "AF_INET6")
|
| 801 |
+
and self.socket.family == socket.AF_INET6
|
| 802 |
+
)
|
| 803 |
+
has_v6only_option = hasattr(socket, "IPPROTO_IPV6") and hasattr(
|
| 804 |
+
socket, "IPV6_V6ONLY"
|
| 805 |
+
)
|
| 806 |
+
if self._auto_wildcard and socket_is_v6 and has_v6only_option:
|
| 807 |
+
try:
|
| 808 |
+
self.socket.setsockopt(
|
| 809 |
+
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0
|
| 810 |
+
)
|
| 811 |
+
except socket.error as e:
|
| 812 |
+
# Log a warning on failure to dual-bind, except for EAFNOSUPPORT
|
| 813 |
+
# since that's expected if IPv4 isn't supported at all (IPv6-only).
|
| 814 |
+
if (
|
| 815 |
+
hasattr(errno, "EAFNOSUPPORT")
|
| 816 |
+
and e.errno != errno.EAFNOSUPPORT
|
| 817 |
+
):
|
| 818 |
+
logger.warning(
|
| 819 |
+
"Failed to dual-bind to IPv4 wildcard: %s", str(e)
|
| 820 |
+
)
|
| 821 |
+
super(WerkzeugServer, self).server_bind()
|
| 822 |
+
|
| 823 |
+
def handle_error(self, request, client_address):
|
| 824 |
+
"""Override to get rid of noisy EPIPE errors."""
|
| 825 |
+
del request # unused
|
| 826 |
+
# Kludge to override a SocketServer.py method so we can get rid of noisy
|
| 827 |
+
# EPIPE errors. They're kind of a red herring as far as errors go. For
|
| 828 |
+
# example, `curl -N http://localhost:6006/ | head` will cause an EPIPE.
|
| 829 |
+
exc_info = sys.exc_info()
|
| 830 |
+
e = exc_info[1]
|
| 831 |
+
if isinstance(e, IOError) and e.errno == errno.EPIPE:
|
| 832 |
+
logger.warning(
|
| 833 |
+
"EPIPE caused by %s in HTTP serving" % str(client_address)
|
| 834 |
+
)
|
| 835 |
+
else:
|
| 836 |
+
logger.error("HTTP serving error", exc_info=exc_info)
|
| 837 |
+
|
| 838 |
+
def get_url(self):
|
| 839 |
+
if not self._url:
|
| 840 |
+
if self._auto_wildcard:
|
| 841 |
+
display_host = socket.getfqdn()
|
| 842 |
+
# Confirm that the connection is open, otherwise change to `localhost`
|
| 843 |
+
try:
|
| 844 |
+
socket.create_connection(
|
| 845 |
+
(display_host, self.server_port), timeout=1
|
| 846 |
+
)
|
| 847 |
+
except socket.error as e:
|
| 848 |
+
display_host = "localhost"
|
| 849 |
+
|
| 850 |
+
else:
|
| 851 |
+
host = self._host
|
| 852 |
+
display_host = (
|
| 853 |
+
"[%s]" % host
|
| 854 |
+
if ":" in host and not host.startswith("[")
|
| 855 |
+
else host
|
| 856 |
+
)
|
| 857 |
+
self._url = "http://%s:%d%s/" % (
|
| 858 |
+
display_host,
|
| 859 |
+
self.server_port,
|
| 860 |
+
self._flags.path_prefix.rstrip("/"),
|
| 861 |
+
)
|
| 862 |
+
return self._url
|
| 863 |
+
|
| 864 |
+
def print_serving_message(self):
|
| 865 |
+
if self._flags.host is None and not self._flags.bind_all:
|
| 866 |
+
sys.stderr.write(
|
| 867 |
+
"Serving TensorBoard on localhost; to expose to the network, "
|
| 868 |
+
"use a proxy or pass --bind_all\n"
|
| 869 |
+
)
|
| 870 |
+
sys.stderr.flush()
|
| 871 |
+
super(WerkzeugServer, self).print_serving_message()
|
| 872 |
+
|
| 873 |
+
def _fix_werkzeug_logging(self):
|
| 874 |
+
"""Fix werkzeug logging setup so it inherits TensorBoard's log level.
|
| 875 |
+
|
| 876 |
+
This addresses a change in werkzeug 0.15.0+ [1] that causes it set its own
|
| 877 |
+
log level to INFO regardless of the root logger configuration. We instead
|
| 878 |
+
want werkzeug to inherit TensorBoard's root logger log level (set via absl
|
| 879 |
+
to WARNING by default).
|
| 880 |
+
|
| 881 |
+
[1]: https://github.com/pallets/werkzeug/commit/4cf77d25858ff46ac7e9d64ade054bf05b41ce12
|
| 882 |
+
"""
|
| 883 |
+
# Log once at DEBUG to force werkzeug to initialize its singleton logger,
|
| 884 |
+
# which sets the logger level to INFO it if is unset, and then access that
|
| 885 |
+
# object via logging.getLogger('werkzeug') to durably revert the level to
|
| 886 |
+
# unset (and thus make messages logged to it inherit the root logger level).
|
| 887 |
+
self.log(
|
| 888 |
+
"debug", "Fixing werkzeug logger to inherit TensorBoard log level"
|
| 889 |
+
)
|
| 890 |
+
logging.getLogger("werkzeug").setLevel(logging.NOTSET)
|
| 891 |
+
|
| 892 |
+
|
| 893 |
+
create_port_scanning_werkzeug_server = with_port_scanning(WerkzeugServer)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/tensorboard/version.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
# ==============================================================================
|
| 15 |
+
|
| 16 |
+
"""Contains the version string."""
|
| 17 |
+
|
| 18 |
+
VERSION = "2.11.2"
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_C.cpython-38-x86_64-linux-gnu.so
ADDED
|
Binary file (37.4 kB). View file
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_C_flatbuffer.cpython-38-x86_64-linux-gnu.so
ADDED
|
Binary file (37.4 kB). View file
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/__config__.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def show():
|
| 5 |
+
"""
|
| 6 |
+
Return a human-readable string with descriptions of the
|
| 7 |
+
configuration of PyTorch.
|
| 8 |
+
"""
|
| 9 |
+
return torch._C._show_config()
|
| 10 |
+
|
| 11 |
+
# TODO: In principle, we could provide more structured version/config
|
| 12 |
+
# information here. For now only CXX_FLAGS is exposed, as Timer
|
| 13 |
+
# uses them.
|
| 14 |
+
def _cxx_flags():
|
| 15 |
+
"""Returns the CXX_FLAGS used when building PyTorch."""
|
| 16 |
+
return torch._C._cxx_flags()
|
| 17 |
+
|
| 18 |
+
def parallel_info():
|
| 19 |
+
r"""Returns detailed string with parallelization settings"""
|
| 20 |
+
return torch._C._parallel_info()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/__future__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This global flag controls whether to assign new tensors to the parameters
|
| 3 |
+
instead of changing the existing parameters in-place when converting an `nn.Module`
|
| 4 |
+
using the following methods:
|
| 5 |
+
1. `module.cuda()` / `.cpu()` (for moving `module` between devices)
|
| 6 |
+
2. `module.float()` / `.double()` / `.half()` (for converting `module` to a different dtype)
|
| 7 |
+
3. `module.to()` / `.type()` (for changing `module`'s device or dtype)
|
| 8 |
+
4. `module._apply(fn)` (for generic functions applied to `module`)
|
| 9 |
+
|
| 10 |
+
Default: False
|
| 11 |
+
"""
|
| 12 |
+
_overwrite_module_params_on_conversion = False
|
| 13 |
+
|
| 14 |
+
def set_overwrite_module_params_on_conversion(value):
|
| 15 |
+
global _overwrite_module_params_on_conversion
|
| 16 |
+
_overwrite_module_params_on_conversion = value
|
| 17 |
+
|
| 18 |
+
def get_overwrite_module_params_on_conversion():
|
| 19 |
+
return _overwrite_module_params_on_conversion
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/__init__.py
ADDED
|
@@ -0,0 +1,960 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
r"""
|
| 3 |
+
The torch package contains data structures for multi-dimensional
|
| 4 |
+
tensors and defines mathematical operations over these tensors.
|
| 5 |
+
Additionally, it provides many utilities for efficient serializing of
|
| 6 |
+
Tensors and arbitrary types, and other useful utilities.
|
| 7 |
+
|
| 8 |
+
It has a CUDA counterpart, that enables you to run your tensor computations
|
| 9 |
+
on an NVIDIA GPU with compute capability >= 3.0.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import os
|
| 13 |
+
import sys
|
| 14 |
+
import platform
|
| 15 |
+
import textwrap
|
| 16 |
+
import ctypes
|
| 17 |
+
import warnings
|
| 18 |
+
import inspect
|
| 19 |
+
if sys.version_info < (3,):
|
| 20 |
+
raise Exception("Python 2 has reached end-of-life and is no longer supported by PyTorch.")
|
| 21 |
+
|
| 22 |
+
from ._utils import _import_dotted_name, classproperty
|
| 23 |
+
from ._utils_internal import get_file_path, prepare_multiprocessing_environment, \
|
| 24 |
+
USE_RTLD_GLOBAL_WITH_LIBTORCH, USE_GLOBAL_DEPS
|
| 25 |
+
# TODO(torch_deploy) figure out how to freeze version.py in fbcode build
|
| 26 |
+
if sys.executable == 'torch_deploy':
|
| 27 |
+
__version__ = "torch-deploy-1.8"
|
| 28 |
+
else:
|
| 29 |
+
from .torch_version import __version__ as __version__
|
| 30 |
+
|
| 31 |
+
from ._six import string_classes as _string_classes
|
| 32 |
+
|
| 33 |
+
from typing import Set, Type, TYPE_CHECKING, Union, Callable
|
| 34 |
+
import builtins
|
| 35 |
+
|
| 36 |
+
__all__ = [
|
| 37 |
+
'typename', 'is_tensor', 'is_storage', 'set_default_tensor_type',
|
| 38 |
+
'set_rng_state', 'get_rng_state', 'manual_seed', 'initial_seed', 'seed',
|
| 39 |
+
'save', 'load', 'set_printoptions', 'chunk', 'split', 'stack', 'matmul',
|
| 40 |
+
'no_grad', 'enable_grad', 'rand', 'randn', 'inference_mode',
|
| 41 |
+
'DoubleStorage', 'FloatStorage', 'LongStorage', 'IntStorage',
|
| 42 |
+
'ShortStorage', 'CharStorage', 'ByteStorage', 'BoolStorage',
|
| 43 |
+
'_TypedStorage',
|
| 44 |
+
'DoubleTensor', 'FloatTensor', 'LongTensor', 'IntTensor',
|
| 45 |
+
'ShortTensor', 'CharTensor', 'ByteTensor', 'BoolTensor', 'Tensor',
|
| 46 |
+
'lobpcg', 'use_deterministic_algorithms',
|
| 47 |
+
'are_deterministic_algorithms_enabled',
|
| 48 |
+
'is_deterministic_algorithms_warn_only_enabled',
|
| 49 |
+
'set_deterministic_debug_mode', 'get_deterministic_debug_mode',
|
| 50 |
+
'set_float32_matmul_precision', 'get_float32_matmul_precision',
|
| 51 |
+
'set_warn_always', 'is_warn_always_enabled',
|
| 52 |
+
]
|
| 53 |
+
|
| 54 |
+
################################################################################
|
| 55 |
+
# Load the extension module
|
| 56 |
+
################################################################################
|
| 57 |
+
|
| 58 |
+
if sys.platform == 'win32':
|
| 59 |
+
pfiles_path = os.getenv('ProgramFiles', 'C:\\Program Files')
|
| 60 |
+
py_dll_path = os.path.join(sys.exec_prefix, 'Library', 'bin')
|
| 61 |
+
th_dll_path = os.path.join(os.path.dirname(__file__), 'lib')
|
| 62 |
+
|
| 63 |
+
# When users create a virtualenv that inherits the base environment,
|
| 64 |
+
# we will need to add the corresponding library directory into
|
| 65 |
+
# DLL search directories. Otherwise, it will rely on `PATH` which
|
| 66 |
+
# is dependent on user settings.
|
| 67 |
+
if sys.exec_prefix != sys.base_exec_prefix:
|
| 68 |
+
base_py_dll_path = os.path.join(sys.base_exec_prefix, 'Library', 'bin')
|
| 69 |
+
else:
|
| 70 |
+
base_py_dll_path = ''
|
| 71 |
+
|
| 72 |
+
dll_paths = list(filter(os.path.exists, [th_dll_path, py_dll_path, base_py_dll_path]))
|
| 73 |
+
|
| 74 |
+
if all([not os.path.exists(os.path.join(p, 'nvToolsExt64_1.dll')) for p in dll_paths]):
|
| 75 |
+
nvtoolsext_dll_path = os.path.join(
|
| 76 |
+
os.getenv('NVTOOLSEXT_PATH', os.path.join(pfiles_path, 'NVIDIA Corporation', 'NvToolsExt')), 'bin', 'x64')
|
| 77 |
+
else:
|
| 78 |
+
nvtoolsext_dll_path = ''
|
| 79 |
+
|
| 80 |
+
from .version import cuda as cuda_version
|
| 81 |
+
import glob
|
| 82 |
+
if cuda_version and all([not glob.glob(os.path.join(p, 'cudart64*.dll')) for p in dll_paths]):
|
| 83 |
+
cuda_version_1 = cuda_version.replace('.', '_')
|
| 84 |
+
cuda_path_var = 'CUDA_PATH_V' + cuda_version_1
|
| 85 |
+
default_path = os.path.join(pfiles_path, 'NVIDIA GPU Computing Toolkit', 'CUDA', 'v' + cuda_version)
|
| 86 |
+
cuda_path = os.path.join(os.getenv(cuda_path_var, default_path), 'bin')
|
| 87 |
+
else:
|
| 88 |
+
cuda_path = ''
|
| 89 |
+
|
| 90 |
+
dll_paths.extend(filter(os.path.exists, [nvtoolsext_dll_path, cuda_path]))
|
| 91 |
+
|
| 92 |
+
kernel32 = ctypes.WinDLL('kernel32.dll', use_last_error=True)
|
| 93 |
+
with_load_library_flags = hasattr(kernel32, 'AddDllDirectory')
|
| 94 |
+
prev_error_mode = kernel32.SetErrorMode(0x0001)
|
| 95 |
+
|
| 96 |
+
kernel32.LoadLibraryW.restype = ctypes.c_void_p
|
| 97 |
+
if with_load_library_flags:
|
| 98 |
+
kernel32.AddDllDirectory.restype = ctypes.c_void_p
|
| 99 |
+
kernel32.LoadLibraryExW.restype = ctypes.c_void_p
|
| 100 |
+
|
| 101 |
+
for dll_path in dll_paths:
|
| 102 |
+
if sys.version_info >= (3, 8):
|
| 103 |
+
os.add_dll_directory(dll_path)
|
| 104 |
+
elif with_load_library_flags:
|
| 105 |
+
res = kernel32.AddDllDirectory(dll_path)
|
| 106 |
+
if res is None:
|
| 107 |
+
err = ctypes.WinError(ctypes.get_last_error())
|
| 108 |
+
err.strerror += f' Error adding "{dll_path}" to the DLL directories.'
|
| 109 |
+
raise err
|
| 110 |
+
|
| 111 |
+
try:
|
| 112 |
+
ctypes.CDLL('vcruntime140.dll')
|
| 113 |
+
ctypes.CDLL('msvcp140.dll')
|
| 114 |
+
ctypes.CDLL('vcruntime140_1.dll')
|
| 115 |
+
except OSError:
|
| 116 |
+
print('''Microsoft Visual C++ Redistributable is not installed, this may lead to the DLL load failure.
|
| 117 |
+
It can be downloaded at https://aka.ms/vs/16/release/vc_redist.x64.exe''')
|
| 118 |
+
|
| 119 |
+
dlls = glob.glob(os.path.join(th_dll_path, '*.dll'))
|
| 120 |
+
path_patched = False
|
| 121 |
+
for dll in dlls:
|
| 122 |
+
is_loaded = False
|
| 123 |
+
if with_load_library_flags:
|
| 124 |
+
res = kernel32.LoadLibraryExW(dll, None, 0x00001100)
|
| 125 |
+
last_error = ctypes.get_last_error()
|
| 126 |
+
if res is None and last_error != 126:
|
| 127 |
+
err = ctypes.WinError(last_error)
|
| 128 |
+
err.strerror += f' Error loading "{dll}" or one of its dependencies.'
|
| 129 |
+
raise err
|
| 130 |
+
elif res is not None:
|
| 131 |
+
is_loaded = True
|
| 132 |
+
if not is_loaded:
|
| 133 |
+
if not path_patched:
|
| 134 |
+
os.environ['PATH'] = ';'.join(dll_paths + [os.environ['PATH']])
|
| 135 |
+
path_patched = True
|
| 136 |
+
res = kernel32.LoadLibraryW(dll)
|
| 137 |
+
if res is None:
|
| 138 |
+
err = ctypes.WinError(ctypes.get_last_error())
|
| 139 |
+
err.strerror += f' Error loading "{dll}" or one of its dependencies.'
|
| 140 |
+
raise err
|
| 141 |
+
|
| 142 |
+
kernel32.SetErrorMode(prev_error_mode)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
# See Note [Global dependencies]
|
| 146 |
+
def _load_global_deps():
|
| 147 |
+
if platform.system() == 'Windows' or sys.executable == 'torch_deploy':
|
| 148 |
+
return
|
| 149 |
+
|
| 150 |
+
lib_name = 'libtorch_global_deps' + ('.dylib' if platform.system() == 'Darwin' else '.so')
|
| 151 |
+
here = os.path.abspath(__file__)
|
| 152 |
+
lib_path = os.path.join(os.path.dirname(here), 'lib', lib_name)
|
| 153 |
+
|
| 154 |
+
ctypes.CDLL(lib_path, mode=ctypes.RTLD_GLOBAL)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
if (USE_RTLD_GLOBAL_WITH_LIBTORCH or os.getenv('TORCH_USE_RTLD_GLOBAL')) and \
|
| 158 |
+
platform.system() != 'Windows':
|
| 159 |
+
# Do it the hard way. You might want to load libtorch with RTLD_GLOBAL in a
|
| 160 |
+
# few circumstances:
|
| 161 |
+
#
|
| 162 |
+
# 1. You're in a build environment (e.g., fbcode) where
|
| 163 |
+
# libtorch_global_deps is not available, but you still need
|
| 164 |
+
# to get mkl to link in with RTLD_GLOBAL or it will just
|
| 165 |
+
# not work.
|
| 166 |
+
#
|
| 167 |
+
# 2. You're trying to run PyTorch under UBSAN and you need
|
| 168 |
+
# to ensure that only one copy of libtorch is loaded, so
|
| 169 |
+
# vptr checks work properly
|
| 170 |
+
#
|
| 171 |
+
# If you're using this setting, you must verify that all the libraries
|
| 172 |
+
# you load consistently use the same libstdc++, or you may have
|
| 173 |
+
# mysterious segfaults.
|
| 174 |
+
#
|
| 175 |
+
import os as _dl_flags
|
| 176 |
+
if not hasattr(_dl_flags, 'RTLD_GLOBAL') or not hasattr(_dl_flags, 'RTLD_LAZY'):
|
| 177 |
+
try:
|
| 178 |
+
# next try if DLFCN exists
|
| 179 |
+
import DLFCN as _dl_flags # type: ignore[import, no-redef]
|
| 180 |
+
except ImportError:
|
| 181 |
+
# as a last attempt, use compile-time constants
|
| 182 |
+
import torch._dl as _dl_flags # type: ignore[import, no-redef]
|
| 183 |
+
old_flags = sys.getdlopenflags()
|
| 184 |
+
sys.setdlopenflags(_dl_flags.RTLD_GLOBAL | _dl_flags.RTLD_LAZY)
|
| 185 |
+
from torch._C import * # noqa: F403
|
| 186 |
+
sys.setdlopenflags(old_flags)
|
| 187 |
+
del old_flags
|
| 188 |
+
del _dl_flags
|
| 189 |
+
|
| 190 |
+
else:
|
| 191 |
+
# Easy way. You want this most of the time, because it will prevent
|
| 192 |
+
# C++ symbols from libtorch clobbering C++ symbols from other
|
| 193 |
+
# libraries, leading to mysterious segfaults.
|
| 194 |
+
#
|
| 195 |
+
# If building in an environment where libtorch_global_deps isn't available
|
| 196 |
+
# like parts of fbsource, but where RTLD_GLOBAL causes segfaults, you will
|
| 197 |
+
# want USE_RTLD_GLOBAL_WITH_LIBTORCH = False and USE_GLOBAL_DEPS = False
|
| 198 |
+
#
|
| 199 |
+
# See Note [Global dependencies]
|
| 200 |
+
if USE_GLOBAL_DEPS:
|
| 201 |
+
_load_global_deps()
|
| 202 |
+
from torch._C import * # noqa: F403
|
| 203 |
+
|
| 204 |
+
# Appease the type checker; ordinarily this binding is inserted by the
|
| 205 |
+
# torch._C module initialization code in C
|
| 206 |
+
if TYPE_CHECKING:
|
| 207 |
+
import torch._C as _C
|
| 208 |
+
|
| 209 |
+
# Check to see if we can load C extensions, and if not provide some guidance
|
| 210 |
+
# on what the problem might be.
|
| 211 |
+
try:
|
| 212 |
+
# _initExtension is chosen (arbitrarily) as a sentinel.
|
| 213 |
+
from torch._C import _initExtension
|
| 214 |
+
except ImportError:
|
| 215 |
+
import torch._C as _C_for_compiled_check
|
| 216 |
+
|
| 217 |
+
# The __file__ check only works for Python 3.7 and above.
|
| 218 |
+
if sys.version_info >= (3, 7) and _C_for_compiled_check.__file__ is None:
|
| 219 |
+
raise ImportError(textwrap.dedent('''
|
| 220 |
+
Failed to load PyTorch C extensions:
|
| 221 |
+
It appears that PyTorch has loaded the `torch/_C` folder
|
| 222 |
+
of the PyTorch repository rather than the C extensions which
|
| 223 |
+
are expected in the `torch._C` namespace. This can occur when
|
| 224 |
+
using the `install` workflow. e.g.
|
| 225 |
+
$ python setup.py install && python -c "import torch"
|
| 226 |
+
|
| 227 |
+
This error can generally be solved using the `develop` workflow
|
| 228 |
+
$ python setup.py develop && python -c "import torch" # This should succeed
|
| 229 |
+
or by running Python from a different directory.
|
| 230 |
+
''').strip()) from None
|
| 231 |
+
raise # If __file__ is not None the cause is unknown, so just re-raise.
|
| 232 |
+
|
| 233 |
+
for name in dir(_C):
|
| 234 |
+
if name[0] != '_' and not name.endswith('Base'):
|
| 235 |
+
__all__.append(name)
|
| 236 |
+
obj = getattr(_C, name)
|
| 237 |
+
if (isinstance(obj, Callable) or inspect.isclass(obj)): # type: ignore[arg-type]
|
| 238 |
+
if (obj.__module__ != 'torch'):
|
| 239 |
+
# TODO: fix their module from C++ side
|
| 240 |
+
if name not in ['DisableTorchFunction', 'Generator']:
|
| 241 |
+
obj.__module__ = 'torch'
|
| 242 |
+
|
| 243 |
+
if not TYPE_CHECKING:
|
| 244 |
+
# issue 38137 and python issue 43367. Submodules of a C extension are
|
| 245 |
+
# non-standard, and attributes of those submodules cannot be pickled since
|
| 246 |
+
# pickle expect to be able to import them as "from _C.sub import attr"
|
| 247 |
+
# which fails with "_C is not a package
|
| 248 |
+
for attr in dir(_C):
|
| 249 |
+
candidate = getattr(_C, attr)
|
| 250 |
+
if type(candidate) is type(_C):
|
| 251 |
+
# submodule
|
| 252 |
+
if f'torch._C.{attr}' not in sys.modules:
|
| 253 |
+
sys.modules[f'torch._C.{attr}'] = candidate
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
################################################################################
|
| 257 |
+
# Define basic utilities
|
| 258 |
+
################################################################################
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def typename(o):
|
| 262 |
+
if isinstance(o, torch.Tensor):
|
| 263 |
+
return o.type()
|
| 264 |
+
|
| 265 |
+
module = ''
|
| 266 |
+
class_name = ''
|
| 267 |
+
if hasattr(o, '__module__') and o.__module__ != 'builtins' \
|
| 268 |
+
and o.__module__ != '__builtin__' and o.__module__ is not None:
|
| 269 |
+
module = o.__module__ + '.'
|
| 270 |
+
|
| 271 |
+
if hasattr(o, '__qualname__'):
|
| 272 |
+
class_name = o.__qualname__
|
| 273 |
+
elif hasattr(o, '__name__'):
|
| 274 |
+
class_name = o.__name__
|
| 275 |
+
else:
|
| 276 |
+
class_name = o.__class__.__name__
|
| 277 |
+
|
| 278 |
+
return module + class_name
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def is_tensor(obj):
|
| 282 |
+
r"""Returns True if `obj` is a PyTorch tensor.
|
| 283 |
+
|
| 284 |
+
Note that this function is simply doing ``isinstance(obj, Tensor)``.
|
| 285 |
+
Using that ``isinstance`` check is better for typechecking with mypy,
|
| 286 |
+
and more explicit - so it's recommended to use that instead of
|
| 287 |
+
``is_tensor``.
|
| 288 |
+
|
| 289 |
+
Args:
|
| 290 |
+
obj (Object): Object to test
|
| 291 |
+
Example::
|
| 292 |
+
|
| 293 |
+
>>> x=torch.tensor([1,2,3])
|
| 294 |
+
>>> torch.is_tensor(x)
|
| 295 |
+
True
|
| 296 |
+
|
| 297 |
+
"""
|
| 298 |
+
return isinstance(obj, torch.Tensor)
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def is_storage(obj):
|
| 302 |
+
r"""Returns True if `obj` is a PyTorch storage object.
|
| 303 |
+
|
| 304 |
+
Args:
|
| 305 |
+
obj (Object): Object to test
|
| 306 |
+
"""
|
| 307 |
+
return type(obj) in _storage_classes
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def set_default_tensor_type(t):
|
| 311 |
+
r"""Sets the default ``torch.Tensor`` type to floating point tensor type
|
| 312 |
+
``t``. This type will also be used as default floating point type for
|
| 313 |
+
type inference in :func:`torch.tensor`.
|
| 314 |
+
|
| 315 |
+
The default floating point tensor type is initially ``torch.FloatTensor``.
|
| 316 |
+
|
| 317 |
+
Args:
|
| 318 |
+
t (type or string): the floating point tensor type or its name
|
| 319 |
+
|
| 320 |
+
Example::
|
| 321 |
+
|
| 322 |
+
>>> torch.tensor([1.2, 3]).dtype # initial default for floating point is torch.float32
|
| 323 |
+
torch.float32
|
| 324 |
+
>>> torch.set_default_tensor_type(torch.DoubleTensor)
|
| 325 |
+
>>> torch.tensor([1.2, 3]).dtype # a new floating point tensor
|
| 326 |
+
torch.float64
|
| 327 |
+
|
| 328 |
+
"""
|
| 329 |
+
if isinstance(t, _string_classes):
|
| 330 |
+
t = _import_dotted_name(t)
|
| 331 |
+
_C._set_default_tensor_type(t)
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
def set_default_dtype(d):
|
| 335 |
+
r"""
|
| 336 |
+
|
| 337 |
+
Sets the default floating point dtype to :attr:`d`. Supports torch.float32
|
| 338 |
+
and torch.float64 as inputs. Other dtypes may be accepted without complaint
|
| 339 |
+
but are not supported and are unlikely to work as expected.
|
| 340 |
+
|
| 341 |
+
When PyTorch is initialized its default floating point dtype is torch.float32,
|
| 342 |
+
and the intent of set_default_dtype(torch.float64) is to facilitate NumPy-like
|
| 343 |
+
type inference. The default floating point dtype is used to:
|
| 344 |
+
|
| 345 |
+
1. Implicitly determine the default complex dtype. When the default floating point
|
| 346 |
+
type is float32 the default complex dtype is complex64, and when the default
|
| 347 |
+
floating point type is float64 the default complex type is complex128.
|
| 348 |
+
2. Infer the dtype for tensors constructed using Python floats or complex Python
|
| 349 |
+
numbers. See examples below.
|
| 350 |
+
3. Determine the result of type promotion between bool and integer tensors and
|
| 351 |
+
Python floats and complex Python numbers.
|
| 352 |
+
|
| 353 |
+
Args:
|
| 354 |
+
d (:class:`torch.dtype`): the floating point dtype to make the default.
|
| 355 |
+
Either torch.float32 or torch.float64.
|
| 356 |
+
|
| 357 |
+
Example:
|
| 358 |
+
>>> # initial default for floating point is torch.float32
|
| 359 |
+
>>> # Python floats are interpreted as float32
|
| 360 |
+
>>> torch.tensor([1.2, 3]).dtype
|
| 361 |
+
torch.float32
|
| 362 |
+
>>> # initial default for floating point is torch.complex64
|
| 363 |
+
>>> # Complex Python numbers are interpreted as complex64
|
| 364 |
+
>>> torch.tensor([1.2, 3j]).dtype
|
| 365 |
+
torch.complex64
|
| 366 |
+
|
| 367 |
+
>>> torch.set_default_dtype(torch.float64)
|
| 368 |
+
|
| 369 |
+
>>> # Python floats are now interpreted as float64
|
| 370 |
+
>>> torch.tensor([1.2, 3]).dtype # a new floating point tensor
|
| 371 |
+
torch.float64
|
| 372 |
+
>>> # Complex Python numbers are now interpreted as complex128
|
| 373 |
+
>>> torch.tensor([1.2, 3j]).dtype # a new complex tensor
|
| 374 |
+
torch.complex128
|
| 375 |
+
|
| 376 |
+
"""
|
| 377 |
+
_C._set_default_dtype(d)
|
| 378 |
+
|
| 379 |
+
def use_deterministic_algorithms(mode, *, warn_only=False):
|
| 380 |
+
r""" Sets whether PyTorch operations must use "deterministic"
|
| 381 |
+
algorithms. That is, algorithms which, given the same input, and when
|
| 382 |
+
run on the same software and hardware, always produce the same output.
|
| 383 |
+
When enabled, operations will use deterministic algorithms when available,
|
| 384 |
+
and if only nondeterministic algorithms are available they will throw a
|
| 385 |
+
:class:`RuntimeError` when called.
|
| 386 |
+
|
| 387 |
+
.. note:: :func:`torch.set_deterministic_debug_mode` offers an alternative
|
| 388 |
+
interface for this feature.
|
| 389 |
+
|
| 390 |
+
The following normally-nondeterministic operations will act
|
| 391 |
+
deterministically when ``mode=True``:
|
| 392 |
+
|
| 393 |
+
* :class:`torch.nn.Conv1d` when called on CUDA tensor
|
| 394 |
+
* :class:`torch.nn.Conv2d` when called on CUDA tensor
|
| 395 |
+
* :class:`torch.nn.Conv3d` when called on CUDA tensor
|
| 396 |
+
* :class:`torch.nn.ConvTranspose1d` when called on CUDA tensor
|
| 397 |
+
* :class:`torch.nn.ConvTranspose2d` when called on CUDA tensor
|
| 398 |
+
* :class:`torch.nn.ConvTranspose3d` when called on CUDA tensor
|
| 399 |
+
* :func:`torch.bmm` when called on sparse-dense CUDA tensors
|
| 400 |
+
* :func:`torch.Tensor.__getitem__` when attempting to differentiate a CPU tensor
|
| 401 |
+
and the index is a list of tensors
|
| 402 |
+
* :func:`torch.Tensor.index_put` with ``accumulate=False``
|
| 403 |
+
* :func:`torch.Tensor.index_put` with ``accumulate=True`` when called on a CPU
|
| 404 |
+
tensor
|
| 405 |
+
* :func:`torch.Tensor.put_` with ``accumulate=True`` when called on a CPU
|
| 406 |
+
tensor
|
| 407 |
+
* :func:`torch.Tensor.scatter_add_` when ``input`` dimension is one and called
|
| 408 |
+
on a CUDA tensor
|
| 409 |
+
* :func:`torch.gather` when ``input`` dimension is one and called
|
| 410 |
+
on a CUDA tensor that requires grad
|
| 411 |
+
* :func:`torch.index_add` when called on CUDA tensor
|
| 412 |
+
* :func:`torch.index_select` when attempting to differentiate a CUDA tensor
|
| 413 |
+
* :func:`torch.repeat_interleave` when attempting to differentiate a CUDA tensor
|
| 414 |
+
* :func:`torch.Tensor.index_copy` when called on a CPU or CUDA tensor
|
| 415 |
+
|
| 416 |
+
The following normally-nondeterministic operations will throw a
|
| 417 |
+
:class:`RuntimeError` when ``mode=True``:
|
| 418 |
+
|
| 419 |
+
* :class:`torch.nn.AvgPool3d` when attempting to differentiate a CUDA tensor
|
| 420 |
+
* :class:`torch.nn.AdaptiveAvgPool2d` when attempting to differentiate a CUDA tensor
|
| 421 |
+
* :class:`torch.nn.AdaptiveAvgPool3d` when attempting to differentiate a CUDA tensor
|
| 422 |
+
* :class:`torch.nn.MaxPool3d` when attempting to differentiate a CUDA tensor
|
| 423 |
+
* :class:`torch.nn.AdaptiveMaxPool2d` when attempting to differentiate a CUDA tensor
|
| 424 |
+
* :class:`torch.nn.FractionalMaxPool2d` when attempting to differentiate a CUDA tensor
|
| 425 |
+
* :class:`torch.nn.FractionalMaxPool3d` when attempting to differentiate a CUDA tensor
|
| 426 |
+
* :func:`torch.nn.functional.interpolate` when attempting to differentiate a CUDA tensor
|
| 427 |
+
and one of the following modes is used:
|
| 428 |
+
|
| 429 |
+
- ``linear``
|
| 430 |
+
- ``bilinear``
|
| 431 |
+
- ``bicubic``
|
| 432 |
+
- ``trilinear``
|
| 433 |
+
|
| 434 |
+
* :class:`torch.nn.ReflectionPad1d` when attempting to differentiate a CUDA tensor
|
| 435 |
+
* :class:`torch.nn.ReflectionPad2d` when attempting to differentiate a CUDA tensor
|
| 436 |
+
* :class:`torch.nn.ReflectionPad3d` when attempting to differentiate a CUDA tensor
|
| 437 |
+
* :class:`torch.nn.ReplicationPad1d` when attempting to differentiate a CUDA tensor
|
| 438 |
+
* :class:`torch.nn.ReplicationPad2d` when attempting to differentiate a CUDA tensor
|
| 439 |
+
* :class:`torch.nn.ReplicationPad3d` when attempting to differentiate a CUDA tensor
|
| 440 |
+
* :class:`torch.nn.NLLLoss` when called on a CUDA tensor
|
| 441 |
+
* :class:`torch.nn.CTCLoss` when attempting to differentiate a CUDA tensor
|
| 442 |
+
* :class:`torch.nn.EmbeddingBag` when attempting to differentiate a CUDA tensor when
|
| 443 |
+
``mode='max'``
|
| 444 |
+
* :func:`torch.Tensor.scatter_add_` when ``input`` dimension is larger than one
|
| 445 |
+
and called on a CUDA tensor
|
| 446 |
+
* :func:`torch.gather` when ``input`` dimension is larger than one
|
| 447 |
+
and called on a CUDA tensor that requires grad
|
| 448 |
+
* :func:`torch.Tensor.put_` when ``accumulate=False``
|
| 449 |
+
* :func:`torch.Tensor.put_` when ``accumulate=True`` and called on a CUDA tensor
|
| 450 |
+
* :func:`torch.histc` when called on a CUDA tensor
|
| 451 |
+
* :func:`torch.bincount` when called on a CUDA tensor
|
| 452 |
+
* :func:`torch.kthvalue` with called on a CUDA tensor
|
| 453 |
+
* :func:`torch.median` with indices output when called on a CUDA tensor
|
| 454 |
+
* :func:`torch.nn.functional.grid_sample` when attempting to differentiate a CUDA tensor
|
| 455 |
+
|
| 456 |
+
A handful of CUDA operations are nondeterministic if the CUDA version is
|
| 457 |
+
10.2 or greater, unless the environment variable ``CUBLAS_WORKSPACE_CONFIG=:4096:8``
|
| 458 |
+
or ``CUBLAS_WORKSPACE_CONFIG=:16:8`` is set. See the CUDA documentation for more
|
| 459 |
+
details: `<https://docs.nvidia.com/cuda/cublas/index.html#cublasApi_reproducibility>`_
|
| 460 |
+
If one of these environment variable configurations is not set, a :class:`RuntimeError`
|
| 461 |
+
will be raised from these operations when called with CUDA tensors:
|
| 462 |
+
|
| 463 |
+
* :func:`torch.mm`
|
| 464 |
+
* :func:`torch.mv`
|
| 465 |
+
* :func:`torch.bmm`
|
| 466 |
+
|
| 467 |
+
Note that deterministic operations tend to have worse performance than
|
| 468 |
+
nondeterministic operations.
|
| 469 |
+
|
| 470 |
+
.. note::
|
| 471 |
+
|
| 472 |
+
This flag does not detect or prevent nondeterministic behavior caused
|
| 473 |
+
by calling an inplace operation on a tensor with an internal memory
|
| 474 |
+
overlap or by giving such a tensor as the :attr:`out` argument for an
|
| 475 |
+
operation. In these cases, multiple writes of different data may target
|
| 476 |
+
a single memory location, and the order of writes is not guaranteed.
|
| 477 |
+
|
| 478 |
+
Args:
|
| 479 |
+
mode (:class:`bool`): If True, makes potentially nondeterministic
|
| 480 |
+
operations switch to a deterministic algorithm or throw a runtime
|
| 481 |
+
error. If False, allows nondeterministic operations.
|
| 482 |
+
|
| 483 |
+
Keyword args:
|
| 484 |
+
warn_only (:class:`bool`, optional): If True, operations that do not
|
| 485 |
+
have a deterministic implementation will throw a warning instead of
|
| 486 |
+
an error. Default: ``False``
|
| 487 |
+
|
| 488 |
+
Example::
|
| 489 |
+
|
| 490 |
+
>>> torch.use_deterministic_algorithms(True)
|
| 491 |
+
|
| 492 |
+
# Forward mode nondeterministic error
|
| 493 |
+
>>> torch.randn(10).index_copy(0, torch.tensor([0]), torch.randn(1))
|
| 494 |
+
...
|
| 495 |
+
RuntimeError: index_copy does not have a deterministic implementation...
|
| 496 |
+
|
| 497 |
+
# Backward mode nondeterministic error
|
| 498 |
+
>>> torch.randn(10, requires_grad=True, device='cuda').index_select(0, torch.tensor([0], device='cuda')).backward()
|
| 499 |
+
...
|
| 500 |
+
RuntimeError: index_add_cuda_ does not have a deterministic implementation...
|
| 501 |
+
"""
|
| 502 |
+
_C._set_deterministic_algorithms(mode, warn_only=warn_only)
|
| 503 |
+
|
| 504 |
+
def are_deterministic_algorithms_enabled():
|
| 505 |
+
r"""Returns True if the global deterministic flag is turned on. Refer to
|
| 506 |
+
:func:`torch.use_deterministic_algorithms` documentation for more details.
|
| 507 |
+
"""
|
| 508 |
+
return _C._get_deterministic_algorithms()
|
| 509 |
+
|
| 510 |
+
def is_deterministic_algorithms_warn_only_enabled():
|
| 511 |
+
r"""Returns True if the global deterministic flag is set to warn only.
|
| 512 |
+
Refer to :func:`torch.use_deterministic_algorithms` documentation for more
|
| 513 |
+
details.
|
| 514 |
+
"""
|
| 515 |
+
return _C._get_deterministic_algorithms_warn_only()
|
| 516 |
+
|
| 517 |
+
def set_deterministic_debug_mode(debug_mode: Union[builtins.int, str]) -> None:
|
| 518 |
+
r"""Sets the debug mode for deterministic operations.
|
| 519 |
+
|
| 520 |
+
.. note:: This is an alternative interface for
|
| 521 |
+
:func:`torch.use_deterministic_algorithms`. Refer to that function's
|
| 522 |
+
documentation for details about affected operations.
|
| 523 |
+
|
| 524 |
+
Args:
|
| 525 |
+
debug_mode(str or int): If "default" or 0, don't error or warn on
|
| 526 |
+
nondeterministic operations. If "warn" or 1, warn on
|
| 527 |
+
nondeterministic operations. If "error" or 2, error on
|
| 528 |
+
nondeterministic operations.
|
| 529 |
+
"""
|
| 530 |
+
|
| 531 |
+
# NOTE: builtins.int is used here because int in this scope resolves
|
| 532 |
+
# to torch.int
|
| 533 |
+
if not isinstance(debug_mode, (builtins.int, str)):
|
| 534 |
+
raise TypeError(f'debug_mode must be str or int, but got {type(debug_mode)}')
|
| 535 |
+
|
| 536 |
+
if isinstance(debug_mode, str):
|
| 537 |
+
if debug_mode == 'default':
|
| 538 |
+
debug_mode = 0
|
| 539 |
+
elif debug_mode == 'warn':
|
| 540 |
+
debug_mode = 1
|
| 541 |
+
elif debug_mode == 'error':
|
| 542 |
+
debug_mode = 2
|
| 543 |
+
else:
|
| 544 |
+
raise RuntimeError(
|
| 545 |
+
'invalid value of debug_mode, expected one of `default`, '
|
| 546 |
+
f'`warn`, `error`, but got {debug_mode}')
|
| 547 |
+
|
| 548 |
+
if debug_mode == 0:
|
| 549 |
+
_C._set_deterministic_algorithms(False)
|
| 550 |
+
elif debug_mode == 1:
|
| 551 |
+
_C._set_deterministic_algorithms(True, warn_only=True)
|
| 552 |
+
elif debug_mode == 2:
|
| 553 |
+
_C._set_deterministic_algorithms(True)
|
| 554 |
+
else:
|
| 555 |
+
raise RuntimeError(
|
| 556 |
+
'invalid value of debug_mode, expected 0, 1, or 2, '
|
| 557 |
+
f'but got {debug_mode}')
|
| 558 |
+
|
| 559 |
+
def get_deterministic_debug_mode() -> builtins.int:
|
| 560 |
+
r"""Returns the current value of the debug mode for deterministic
|
| 561 |
+
operations. Refer to :func:`torch.set_deterministic_debug_mode`
|
| 562 |
+
documentation for more details.
|
| 563 |
+
"""
|
| 564 |
+
|
| 565 |
+
if _C._get_deterministic_algorithms():
|
| 566 |
+
if _C._get_deterministic_algorithms_warn_only():
|
| 567 |
+
return 1
|
| 568 |
+
else:
|
| 569 |
+
return 2
|
| 570 |
+
else:
|
| 571 |
+
return 0
|
| 572 |
+
|
| 573 |
+
def get_float32_matmul_precision() -> builtins.str:
|
| 574 |
+
r"""Returns the current value of float32 matrix multiplication precision. Refer to
|
| 575 |
+
:func:`torch.set_float32_matmul_precision` documentation for more details.
|
| 576 |
+
"""
|
| 577 |
+
return _C._get_float32_matmul_precision()
|
| 578 |
+
|
| 579 |
+
def set_float32_matmul_precision(precision):
|
| 580 |
+
r"""Sets the internal precision of float32 matrix multiplications.
|
| 581 |
+
|
| 582 |
+
Running float32 matrix multiplications in lower precision may significantly increase
|
| 583 |
+
performance, and in some programs the loss of precision has a negligible impact.
|
| 584 |
+
|
| 585 |
+
Supports three settings:
|
| 586 |
+
|
| 587 |
+
* "highest", float32 matrix multiplications use the float32 datatype for
|
| 588 |
+
internal computations.
|
| 589 |
+
* "high", float32 matrix multiplications use the TensorFloat32 or bfloat16_3x
|
| 590 |
+
datatypes for internal computations, if fast matrix multiplication algorithms
|
| 591 |
+
using those datatypes internally are available. Otherwise float32
|
| 592 |
+
matrix multiplications are computed as if the precision is "highest".
|
| 593 |
+
* "medium", float32 matrix multiplications use the bfloat16 datatype for
|
| 594 |
+
internal computations, if a fast matrix multiplication algorithm
|
| 595 |
+
using that datatype internally is available. Otherwise float32
|
| 596 |
+
matrix multiplications are computed as if the precision is "high".
|
| 597 |
+
|
| 598 |
+
.. note::
|
| 599 |
+
|
| 600 |
+
This does not change the output dtype of float32 matrix multiplications,
|
| 601 |
+
it controls how the internal computation of the matrix multiplication is performed.
|
| 602 |
+
|
| 603 |
+
.. note::
|
| 604 |
+
|
| 605 |
+
This does not change the precision of convolution operations. Other flags,
|
| 606 |
+
like `torch.backends.cudnn.allow_tf32`, may control the precision of convolution
|
| 607 |
+
operations.
|
| 608 |
+
|
| 609 |
+
.. note::
|
| 610 |
+
|
| 611 |
+
This flag currently only affects one native device type: CUDA.
|
| 612 |
+
If "high" or "medium" are set then the TensorFloat32 datatype will be used
|
| 613 |
+
when computing float32 matrix multiplications, equivalent to setting
|
| 614 |
+
`torch.backends.cuda.matmul.allow_tf32 = True`. When "highest" (the default)
|
| 615 |
+
is set then the float32 datatype is used for internal computations, equivalent
|
| 616 |
+
to setting `torch.backends.cuda.matmul.allow_tf32 = False`.
|
| 617 |
+
|
| 618 |
+
Args:
|
| 619 |
+
precision(str): can be set to "highest" (default), "high", or "medium" (see above).
|
| 620 |
+
|
| 621 |
+
"""
|
| 622 |
+
_C._set_float32_matmul_precision(precision)
|
| 623 |
+
|
| 624 |
+
def set_warn_always(b):
|
| 625 |
+
r"""When this flag is False (default) then some PyTorch warnings may only
|
| 626 |
+
appear once per process. This helps avoid excessive warning information.
|
| 627 |
+
Setting it to True causes these warnings to always appear, which may be
|
| 628 |
+
helpful when debugging.
|
| 629 |
+
|
| 630 |
+
Args:
|
| 631 |
+
b (:class:`bool`): If True, force warnings to always be emitted
|
| 632 |
+
If False, set to the default behaviour
|
| 633 |
+
"""
|
| 634 |
+
_C._set_warnAlways(b)
|
| 635 |
+
|
| 636 |
+
def is_warn_always_enabled():
|
| 637 |
+
r"""Returns True if the global warn_always flag is turned on. Refer to
|
| 638 |
+
:func:`torch.set_warn_always` documentation for more details.
|
| 639 |
+
"""
|
| 640 |
+
return _C._get_warnAlways()
|
| 641 |
+
|
| 642 |
+
################################################################################
|
| 643 |
+
# Define numeric constants
|
| 644 |
+
################################################################################
|
| 645 |
+
|
| 646 |
+
# For Python Array API (https://data-apis.org/array-api/latest/API_specification/constants.html) and
|
| 647 |
+
# NumPy consistency (https://numpy.org/devdocs/reference/constants.html)
|
| 648 |
+
from math import e , nan , inf , pi
|
| 649 |
+
__all__.extend(['e', 'pi', 'nan', 'inf'])
|
| 650 |
+
|
| 651 |
+
################################################################################
|
| 652 |
+
# Define Storage and Tensor classes
|
| 653 |
+
################################################################################
|
| 654 |
+
|
| 655 |
+
from ._tensor import Tensor
|
| 656 |
+
from .storage import _StorageBase, _TypedStorage, _LegacyStorage, _UntypedStorage
|
| 657 |
+
|
| 658 |
+
# NOTE: New <type>Storage classes should never be added. When adding a new
|
| 659 |
+
# dtype, use torch.storage._TypedStorage directly.
|
| 660 |
+
|
| 661 |
+
class ByteStorage(_LegacyStorage):
|
| 662 |
+
@classproperty
|
| 663 |
+
def dtype(self):
|
| 664 |
+
return torch.uint8
|
| 665 |
+
|
| 666 |
+
class DoubleStorage(_LegacyStorage):
|
| 667 |
+
@classproperty
|
| 668 |
+
def dtype(self):
|
| 669 |
+
return torch.double
|
| 670 |
+
|
| 671 |
+
class FloatStorage(_LegacyStorage):
|
| 672 |
+
@classproperty
|
| 673 |
+
def dtype(self):
|
| 674 |
+
return torch.float
|
| 675 |
+
|
| 676 |
+
class HalfStorage(_LegacyStorage):
|
| 677 |
+
@classproperty
|
| 678 |
+
def dtype(self):
|
| 679 |
+
return torch.half
|
| 680 |
+
|
| 681 |
+
class LongStorage(_LegacyStorage):
|
| 682 |
+
@classproperty
|
| 683 |
+
def dtype(self):
|
| 684 |
+
return torch.long
|
| 685 |
+
|
| 686 |
+
class IntStorage(_LegacyStorage):
|
| 687 |
+
@classproperty
|
| 688 |
+
def dtype(self):
|
| 689 |
+
return torch.int
|
| 690 |
+
|
| 691 |
+
class ShortStorage(_LegacyStorage):
|
| 692 |
+
@classproperty
|
| 693 |
+
def dtype(self):
|
| 694 |
+
return torch.short
|
| 695 |
+
|
| 696 |
+
class CharStorage(_LegacyStorage):
|
| 697 |
+
@classproperty
|
| 698 |
+
def dtype(self):
|
| 699 |
+
return torch.int8
|
| 700 |
+
|
| 701 |
+
class BoolStorage(_LegacyStorage):
|
| 702 |
+
@classproperty
|
| 703 |
+
def dtype(self):
|
| 704 |
+
return torch.bool
|
| 705 |
+
|
| 706 |
+
class BFloat16Storage(_LegacyStorage):
|
| 707 |
+
@classproperty
|
| 708 |
+
def dtype(self):
|
| 709 |
+
return torch.bfloat16
|
| 710 |
+
|
| 711 |
+
class ComplexDoubleStorage(_LegacyStorage):
|
| 712 |
+
@classproperty
|
| 713 |
+
def dtype(self):
|
| 714 |
+
return torch.cdouble
|
| 715 |
+
|
| 716 |
+
class ComplexFloatStorage(_LegacyStorage):
|
| 717 |
+
@classproperty
|
| 718 |
+
def dtype(self):
|
| 719 |
+
return torch.cfloat
|
| 720 |
+
|
| 721 |
+
class QUInt8Storage(_LegacyStorage):
|
| 722 |
+
@classproperty
|
| 723 |
+
def dtype(self):
|
| 724 |
+
return torch.quint8
|
| 725 |
+
|
| 726 |
+
class QInt8Storage(_LegacyStorage):
|
| 727 |
+
@classproperty
|
| 728 |
+
def dtype(self):
|
| 729 |
+
return torch.qint8
|
| 730 |
+
|
| 731 |
+
class QInt32Storage(_LegacyStorage):
|
| 732 |
+
@classproperty
|
| 733 |
+
def dtype(self):
|
| 734 |
+
return torch.qint32
|
| 735 |
+
|
| 736 |
+
class QUInt4x2Storage(_LegacyStorage):
|
| 737 |
+
@classproperty
|
| 738 |
+
def dtype(self):
|
| 739 |
+
return torch.quint4x2
|
| 740 |
+
|
| 741 |
+
class QUInt2x4Storage(_LegacyStorage):
|
| 742 |
+
@classproperty
|
| 743 |
+
def dtype(self):
|
| 744 |
+
return torch.quint2x4
|
| 745 |
+
|
| 746 |
+
_storage_classes = {
|
| 747 |
+
_UntypedStorage, DoubleStorage, FloatStorage, LongStorage, IntStorage,
|
| 748 |
+
ShortStorage, CharStorage, ByteStorage, HalfStorage, BoolStorage,
|
| 749 |
+
QUInt8Storage, QInt8Storage, QInt32Storage, BFloat16Storage,
|
| 750 |
+
ComplexFloatStorage, ComplexDoubleStorage, QUInt4x2Storage, QUInt2x4Storage,
|
| 751 |
+
_TypedStorage
|
| 752 |
+
}
|
| 753 |
+
|
| 754 |
+
# The _tensor_classes set is initialized by the call to _C._initialize_tensor_type_bindings()
|
| 755 |
+
_tensor_classes: Set[Type] = set()
|
| 756 |
+
|
| 757 |
+
# If you edit these imports, please update torch/__init__.py.in as well
|
| 758 |
+
from .random import set_rng_state, get_rng_state, manual_seed, initial_seed, seed
|
| 759 |
+
from .serialization import save, load
|
| 760 |
+
from ._tensor_str import set_printoptions
|
| 761 |
+
|
| 762 |
+
################################################################################
|
| 763 |
+
# Initialize extension
|
| 764 |
+
################################################################################
|
| 765 |
+
|
| 766 |
+
def manager_path():
|
| 767 |
+
if platform.system() == 'Windows' or sys.executable == 'torch_deploy':
|
| 768 |
+
return b""
|
| 769 |
+
path = get_file_path('torch', 'bin', 'torch_shm_manager')
|
| 770 |
+
prepare_multiprocessing_environment(get_file_path('torch'))
|
| 771 |
+
if not os.path.exists(path):
|
| 772 |
+
raise RuntimeError("Unable to find torch_shm_manager at " + path)
|
| 773 |
+
return path.encode('utf-8')
|
| 774 |
+
|
| 775 |
+
from torch.amp import autocast
|
| 776 |
+
|
| 777 |
+
# Shared memory manager needs to know the exact location of manager executable
|
| 778 |
+
_C._initExtension(manager_path())
|
| 779 |
+
del manager_path
|
| 780 |
+
|
| 781 |
+
# Appease the type checker: it can't deal with direct setting of globals().
|
| 782 |
+
# Note that we will see "too many" functions when reexporting this way; there
|
| 783 |
+
# is not a good way to fix this problem. Perhaps, try to redesign VariableFunctions
|
| 784 |
+
# so that this import is good enough
|
| 785 |
+
if TYPE_CHECKING:
|
| 786 |
+
# Some type signatures pulled in from _VariableFunctions here clash with
|
| 787 |
+
# signatures already imported. For now these clashes are ignored; see
|
| 788 |
+
# PR #43339 for details.
|
| 789 |
+
from torch._C._VariableFunctions import * # type: ignore[misc] # noqa: F403
|
| 790 |
+
|
| 791 |
+
# Ops not to be exposed in `torch` namespace,
|
| 792 |
+
# mostly helper ops.
|
| 793 |
+
PRIVATE_OPS = (
|
| 794 |
+
'unique_dim',
|
| 795 |
+
)
|
| 796 |
+
|
| 797 |
+
for name in dir(_C._VariableFunctions):
|
| 798 |
+
if name.startswith('__') or name in PRIVATE_OPS:
|
| 799 |
+
continue
|
| 800 |
+
obj = getattr(_C._VariableFunctions, name)
|
| 801 |
+
obj.__module__ = 'torch'
|
| 802 |
+
globals()[name] = obj
|
| 803 |
+
if not name.startswith("_"):
|
| 804 |
+
__all__.append(name)
|
| 805 |
+
|
| 806 |
+
################################################################################
|
| 807 |
+
# Import interface functions defined in Python
|
| 808 |
+
################################################################################
|
| 809 |
+
|
| 810 |
+
# needs to be after the above ATen bindings so we can overwrite from Python side
|
| 811 |
+
from .functional import * # noqa: F403
|
| 812 |
+
|
| 813 |
+
|
| 814 |
+
################################################################################
|
| 815 |
+
# Remove unnecessary members
|
| 816 |
+
################################################################################
|
| 817 |
+
|
| 818 |
+
del _StorageBase
|
| 819 |
+
del _LegacyStorage
|
| 820 |
+
|
| 821 |
+
################################################################################
|
| 822 |
+
# Define _assert
|
| 823 |
+
################################################################################
|
| 824 |
+
|
| 825 |
+
# needs to be before the submodule imports to avoid circular dependencies
|
| 826 |
+
def _assert(condition, message):
|
| 827 |
+
r"""A wrapper around Python's assert which is symbolically traceable.
|
| 828 |
+
"""
|
| 829 |
+
from .overrides import has_torch_function, handle_torch_function
|
| 830 |
+
|
| 831 |
+
if type(condition) is not torch.Tensor and has_torch_function((condition,)):
|
| 832 |
+
return handle_torch_function(_assert, (condition,), condition, message)
|
| 833 |
+
assert condition, message
|
| 834 |
+
|
| 835 |
+
################################################################################
|
| 836 |
+
# Import most common subpackages
|
| 837 |
+
################################################################################
|
| 838 |
+
|
| 839 |
+
# Use the redundant form so that type checkers know that these are a part of
|
| 840 |
+
# the public API. The "regular" import lines are there solely for the runtime
|
| 841 |
+
# side effect of adding to the imported module's members for other users.
|
| 842 |
+
from torch import cuda as cuda
|
| 843 |
+
from torch import cpu as cpu
|
| 844 |
+
from torch import autograd as autograd
|
| 845 |
+
from torch.autograd import (
|
| 846 |
+
no_grad as no_grad,
|
| 847 |
+
enable_grad as enable_grad,
|
| 848 |
+
set_grad_enabled as set_grad_enabled,
|
| 849 |
+
inference_mode as inference_mode,
|
| 850 |
+
)
|
| 851 |
+
from torch import fft as fft
|
| 852 |
+
from torch import futures as futures
|
| 853 |
+
from torch import nn as nn
|
| 854 |
+
from torch import optim as optim
|
| 855 |
+
import torch.optim._multi_tensor
|
| 856 |
+
from torch import multiprocessing as multiprocessing
|
| 857 |
+
from torch import sparse as sparse
|
| 858 |
+
from torch import special as special
|
| 859 |
+
import torch.utils.backcompat
|
| 860 |
+
from torch import onnx as onnx
|
| 861 |
+
from torch import jit as jit
|
| 862 |
+
from torch import linalg as linalg
|
| 863 |
+
from torch import hub as hub
|
| 864 |
+
from torch import random as random
|
| 865 |
+
from torch import distributions as distributions
|
| 866 |
+
from torch import testing as testing
|
| 867 |
+
import torch.backends.cuda
|
| 868 |
+
import torch.backends.mps
|
| 869 |
+
import torch.backends.cudnn
|
| 870 |
+
import torch.backends.mkl
|
| 871 |
+
import torch.backends.mkldnn
|
| 872 |
+
import torch.backends.openmp
|
| 873 |
+
import torch.backends.quantized
|
| 874 |
+
import torch.utils.data
|
| 875 |
+
from torch import __config__ as __config__
|
| 876 |
+
from torch import __future__ as __future__
|
| 877 |
+
from torch import profiler as profiler
|
| 878 |
+
|
| 879 |
+
# Quantized, sparse, AO, etc. should be last to get imported, as nothing
|
| 880 |
+
# is expected to depend on them.
|
| 881 |
+
import torch.nn.intrinsic
|
| 882 |
+
import torch.nn.quantizable
|
| 883 |
+
import torch.nn.quantized
|
| 884 |
+
# AO depends on nn, as well as quantized stuff -- so should be after those.
|
| 885 |
+
from torch import ao as ao
|
| 886 |
+
|
| 887 |
+
_C._init_names(list(torch._storage_classes))
|
| 888 |
+
|
| 889 |
+
# attach docstrings to torch and tensor functions
|
| 890 |
+
from . import _torch_docs, _tensor_docs, _storage_docs
|
| 891 |
+
del _torch_docs, _tensor_docs, _storage_docs
|
| 892 |
+
|
| 893 |
+
|
| 894 |
+
def compiled_with_cxx11_abi():
|
| 895 |
+
r"""Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1"""
|
| 896 |
+
return _C._GLIBCXX_USE_CXX11_ABI
|
| 897 |
+
|
| 898 |
+
|
| 899 |
+
# Import the ops "namespace"
|
| 900 |
+
from torch._ops import ops
|
| 901 |
+
from torch._classes import classes
|
| 902 |
+
|
| 903 |
+
# quantization depends on torch.fx
|
| 904 |
+
# Import quantization
|
| 905 |
+
from torch import quantization as quantization
|
| 906 |
+
|
| 907 |
+
# Import the quasi random sampler
|
| 908 |
+
from torch import quasirandom as quasirandom
|
| 909 |
+
|
| 910 |
+
# If you are seeing this, it means that this call site was not checked if
|
| 911 |
+
# the memory format could be preserved, and it was switched to old default
|
| 912 |
+
# behaviour of contiguous
|
| 913 |
+
legacy_contiguous_format = contiguous_format
|
| 914 |
+
|
| 915 |
+
# Register fork handler to initialize OpenMP in child processes (see gh-28389)
|
| 916 |
+
from torch.multiprocessing._atfork import register_after_fork
|
| 917 |
+
register_after_fork(torch.get_num_threads)
|
| 918 |
+
del register_after_fork
|
| 919 |
+
|
| 920 |
+
# Import tools that require fully imported torch (for applying
|
| 921 |
+
# torch.jit.script as a decorator, for instance):
|
| 922 |
+
from ._lobpcg import lobpcg as lobpcg
|
| 923 |
+
|
| 924 |
+
# These were previously defined in native_functions.yaml and appeared on the
|
| 925 |
+
# `torch` namespace, but we moved them to c10 dispatch to facilitate custom
|
| 926 |
+
# class usage. We add these lines here to preserve backward compatibility.
|
| 927 |
+
quantized_lstm = torch.ops.aten.quantized_lstm
|
| 928 |
+
quantized_gru = torch.ops.aten.quantized_gru
|
| 929 |
+
|
| 930 |
+
from torch.utils.dlpack import from_dlpack, to_dlpack
|
| 931 |
+
|
| 932 |
+
# Import experimental masked operations support. See
|
| 933 |
+
# [RFC-0016](https://github.com/pytorch/rfcs/pull/27) for more
|
| 934 |
+
# information.
|
| 935 |
+
from . import _masked
|
| 936 |
+
|
| 937 |
+
# Import removed ops with error message about removal
|
| 938 |
+
from ._linalg_utils import solve
|
| 939 |
+
|
| 940 |
+
|
| 941 |
+
def _register_device_module(device_type, module):
|
| 942 |
+
r"""Register an external runtime module of the specific :attr:`device_type`
|
| 943 |
+
supported by torch.
|
| 944 |
+
|
| 945 |
+
After the :attr:`module` is registered correctly, the user can refer
|
| 946 |
+
the external runtime module as part of torch with attribute torch.xxx.
|
| 947 |
+
"""
|
| 948 |
+
# Make sure the device_type represent a supported device type for torch.
|
| 949 |
+
device_type = torch.device(device_type).type
|
| 950 |
+
m = sys.modules[__name__]
|
| 951 |
+
if hasattr(m, device_type):
|
| 952 |
+
raise RuntimeError("The runtime module of '{}' has already "
|
| 953 |
+
"been registered with '{}'".format(device_type, getattr(m, device_type)))
|
| 954 |
+
setattr(m, device_type, module)
|
| 955 |
+
|
| 956 |
+
# expose return_types
|
| 957 |
+
from . import return_types
|
| 958 |
+
if sys.executable != 'torch_deploy':
|
| 959 |
+
from . import library
|
| 960 |
+
from . import _meta_registrations
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_appdirs.py
ADDED
|
@@ -0,0 +1,643 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
| 4 |
+
# Copyright (c) 2013 Eddy Petrișor
|
| 5 |
+
|
| 6 |
+
# flake8: noqa
|
| 7 |
+
|
| 8 |
+
"""
|
| 9 |
+
This file is directly from
|
| 10 |
+
https://github.com/ActiveState/appdirs/blob/3fe6a83776843a46f20c2e5587afcffe05e03b39/appdirs.py
|
| 11 |
+
|
| 12 |
+
The license of https://github.com/ActiveState/appdirs copied below:
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# This is the MIT license
|
| 16 |
+
|
| 17 |
+
Copyright (c) 2010 ActiveState Software Inc.
|
| 18 |
+
|
| 19 |
+
Permission is hereby granted, free of charge, to any person obtaining a
|
| 20 |
+
copy of this software and associated documentation files (the
|
| 21 |
+
"Software"), to deal in the Software without restriction, including
|
| 22 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
| 23 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
| 24 |
+
permit persons to whom the Software is furnished to do so, subject to
|
| 25 |
+
the following conditions:
|
| 26 |
+
|
| 27 |
+
The above copyright notice and this permission notice shall be included
|
| 28 |
+
in all copies or substantial portions of the Software.
|
| 29 |
+
|
| 30 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
| 31 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 32 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
| 33 |
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
| 34 |
+
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
| 35 |
+
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
| 36 |
+
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
"""Utilities for determining application-specific dirs.
|
| 40 |
+
|
| 41 |
+
See <https://github.com/ActiveState/appdirs> for details and usage.
|
| 42 |
+
"""
|
| 43 |
+
# Dev Notes:
|
| 44 |
+
# - MSDN on where to store app data files:
|
| 45 |
+
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
| 46 |
+
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
| 47 |
+
# - XDG spec for Un*x: https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
| 48 |
+
|
| 49 |
+
__version__ = "1.4.4"
|
| 50 |
+
__version_info__ = tuple(int(segment) for segment in __version__.split("."))
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
import sys
|
| 54 |
+
import os
|
| 55 |
+
|
| 56 |
+
unicode = str
|
| 57 |
+
|
| 58 |
+
if sys.platform.startswith('java'):
|
| 59 |
+
import platform
|
| 60 |
+
os_name = platform.java_ver()[3][0]
|
| 61 |
+
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
| 62 |
+
system = 'win32'
|
| 63 |
+
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
| 64 |
+
system = 'darwin'
|
| 65 |
+
else: # "Linux", "SunOS", "FreeBSD", etc.
|
| 66 |
+
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
| 67 |
+
# are actually checked for and the rest of the module expects
|
| 68 |
+
# *sys.platform* style strings.
|
| 69 |
+
system = 'linux2'
|
| 70 |
+
else:
|
| 71 |
+
system = sys.platform
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
| 76 |
+
r"""Return full path to the user-specific data dir for this application.
|
| 77 |
+
|
| 78 |
+
"appname" is the name of application.
|
| 79 |
+
If None, just the system directory is returned.
|
| 80 |
+
"appauthor" (only used on Windows) is the name of the
|
| 81 |
+
appauthor or distributing body for this application. Typically
|
| 82 |
+
it is the owning company name. This falls back to appname. You may
|
| 83 |
+
pass False to disable it.
|
| 84 |
+
"version" is an optional version path element to append to the
|
| 85 |
+
path. You might want to use this if you want multiple versions
|
| 86 |
+
of your app to be able to run independently. If used, this
|
| 87 |
+
would typically be "<major>.<minor>".
|
| 88 |
+
Only applied when appname is present.
|
| 89 |
+
"roaming" (boolean, default False) can be set True to use the Windows
|
| 90 |
+
roaming appdata directory. That means that for users on a Windows
|
| 91 |
+
network setup for roaming profiles, this user data will be
|
| 92 |
+
sync'd on login. See
|
| 93 |
+
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
| 94 |
+
for a discussion of issues.
|
| 95 |
+
|
| 96 |
+
Typical user data directories are:
|
| 97 |
+
Mac OS X: ~/Library/Application Support/<AppName>
|
| 98 |
+
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
| 99 |
+
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
| 100 |
+
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
| 101 |
+
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
| 102 |
+
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
| 103 |
+
|
| 104 |
+
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
| 105 |
+
That means, by default "~/.local/share/<AppName>".
|
| 106 |
+
"""
|
| 107 |
+
if system == "win32":
|
| 108 |
+
if appauthor is None:
|
| 109 |
+
appauthor = appname
|
| 110 |
+
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
| 111 |
+
path = os.path.normpath(_get_win_folder(const))
|
| 112 |
+
if appname:
|
| 113 |
+
if appauthor is not False:
|
| 114 |
+
path = os.path.join(path, appauthor, appname)
|
| 115 |
+
else:
|
| 116 |
+
path = os.path.join(path, appname)
|
| 117 |
+
elif system == 'darwin':
|
| 118 |
+
path = os.path.expanduser('~/Library/Application Support/')
|
| 119 |
+
if appname:
|
| 120 |
+
path = os.path.join(path, appname)
|
| 121 |
+
else:
|
| 122 |
+
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
| 123 |
+
if appname:
|
| 124 |
+
path = os.path.join(path, appname)
|
| 125 |
+
if appname and version:
|
| 126 |
+
path = os.path.join(path, version)
|
| 127 |
+
return path
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
| 131 |
+
r"""Return full path to the user-shared data dir for this application.
|
| 132 |
+
|
| 133 |
+
"appname" is the name of application.
|
| 134 |
+
If None, just the system directory is returned.
|
| 135 |
+
"appauthor" (only used on Windows) is the name of the
|
| 136 |
+
appauthor or distributing body for this application. Typically
|
| 137 |
+
it is the owning company name. This falls back to appname. You may
|
| 138 |
+
pass False to disable it.
|
| 139 |
+
"version" is an optional version path element to append to the
|
| 140 |
+
path. You might want to use this if you want multiple versions
|
| 141 |
+
of your app to be able to run independently. If used, this
|
| 142 |
+
would typically be "<major>.<minor>".
|
| 143 |
+
Only applied when appname is present.
|
| 144 |
+
"multipath" is an optional parameter only applicable to *nix
|
| 145 |
+
which indicates that the entire list of data dirs should be
|
| 146 |
+
returned. By default, the first item from XDG_DATA_DIRS is
|
| 147 |
+
returned, or '/usr/local/share/<AppName>',
|
| 148 |
+
if XDG_DATA_DIRS is not set
|
| 149 |
+
|
| 150 |
+
Typical site data directories are:
|
| 151 |
+
Mac OS X: /Library/Application Support/<AppName>
|
| 152 |
+
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
| 153 |
+
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
| 154 |
+
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
| 155 |
+
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
| 156 |
+
|
| 157 |
+
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
| 158 |
+
|
| 159 |
+
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
| 160 |
+
"""
|
| 161 |
+
if system == "win32":
|
| 162 |
+
if appauthor is None:
|
| 163 |
+
appauthor = appname
|
| 164 |
+
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
| 165 |
+
if appname:
|
| 166 |
+
if appauthor is not False:
|
| 167 |
+
path = os.path.join(path, appauthor, appname)
|
| 168 |
+
else:
|
| 169 |
+
path = os.path.join(path, appname)
|
| 170 |
+
elif system == 'darwin':
|
| 171 |
+
path = os.path.expanduser('/Library/Application Support')
|
| 172 |
+
if appname:
|
| 173 |
+
path = os.path.join(path, appname)
|
| 174 |
+
else:
|
| 175 |
+
# XDG default for $XDG_DATA_DIRS
|
| 176 |
+
# only first, if multipath is False
|
| 177 |
+
path = os.getenv('XDG_DATA_DIRS',
|
| 178 |
+
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
| 179 |
+
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
| 180 |
+
if appname:
|
| 181 |
+
if version:
|
| 182 |
+
appname = os.path.join(appname, version)
|
| 183 |
+
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
| 184 |
+
|
| 185 |
+
if multipath:
|
| 186 |
+
path = os.pathsep.join(pathlist)
|
| 187 |
+
else:
|
| 188 |
+
path = pathlist[0]
|
| 189 |
+
return path
|
| 190 |
+
|
| 191 |
+
if appname and version:
|
| 192 |
+
path = os.path.join(path, version)
|
| 193 |
+
return path
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
| 197 |
+
r"""Return full path to the user-specific config dir for this application.
|
| 198 |
+
|
| 199 |
+
"appname" is the name of application.
|
| 200 |
+
If None, just the system directory is returned.
|
| 201 |
+
"appauthor" (only used on Windows) is the name of the
|
| 202 |
+
appauthor or distributing body for this application. Typically
|
| 203 |
+
it is the owning company name. This falls back to appname. You may
|
| 204 |
+
pass False to disable it.
|
| 205 |
+
"version" is an optional version path element to append to the
|
| 206 |
+
path. You might want to use this if you want multiple versions
|
| 207 |
+
of your app to be able to run independently. If used, this
|
| 208 |
+
would typically be "<major>.<minor>".
|
| 209 |
+
Only applied when appname is present.
|
| 210 |
+
"roaming" (boolean, default False) can be set True to use the Windows
|
| 211 |
+
roaming appdata directory. That means that for users on a Windows
|
| 212 |
+
network setup for roaming profiles, this user data will be
|
| 213 |
+
sync'd on login. See
|
| 214 |
+
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
| 215 |
+
for a discussion of issues.
|
| 216 |
+
|
| 217 |
+
Typical user config directories are:
|
| 218 |
+
Mac OS X: ~/Library/Preferences/<AppName>
|
| 219 |
+
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
| 220 |
+
Win *: same as user_data_dir
|
| 221 |
+
|
| 222 |
+
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
| 223 |
+
That means, by default "~/.config/<AppName>".
|
| 224 |
+
"""
|
| 225 |
+
if system == "win32":
|
| 226 |
+
path = user_data_dir(appname, appauthor, None, roaming)
|
| 227 |
+
elif system == 'darwin':
|
| 228 |
+
path = os.path.expanduser('~/Library/Preferences/')
|
| 229 |
+
if appname:
|
| 230 |
+
path = os.path.join(path, appname)
|
| 231 |
+
else:
|
| 232 |
+
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
| 233 |
+
if appname:
|
| 234 |
+
path = os.path.join(path, appname)
|
| 235 |
+
if appname and version:
|
| 236 |
+
path = os.path.join(path, version)
|
| 237 |
+
return path
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
| 241 |
+
r"""Return full path to the user-shared data dir for this application.
|
| 242 |
+
|
| 243 |
+
"appname" is the name of application.
|
| 244 |
+
If None, just the system directory is returned.
|
| 245 |
+
"appauthor" (only used on Windows) is the name of the
|
| 246 |
+
appauthor or distributing body for this application. Typically
|
| 247 |
+
it is the owning company name. This falls back to appname. You may
|
| 248 |
+
pass False to disable it.
|
| 249 |
+
"version" is an optional version path element to append to the
|
| 250 |
+
path. You might want to use this if you want multiple versions
|
| 251 |
+
of your app to be able to run independently. If used, this
|
| 252 |
+
would typically be "<major>.<minor>".
|
| 253 |
+
Only applied when appname is present.
|
| 254 |
+
"multipath" is an optional parameter only applicable to *nix
|
| 255 |
+
which indicates that the entire list of config dirs should be
|
| 256 |
+
returned. By default, the first item from XDG_CONFIG_DIRS is
|
| 257 |
+
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
| 258 |
+
|
| 259 |
+
Typical site config directories are:
|
| 260 |
+
Mac OS X: same as site_data_dir
|
| 261 |
+
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
| 262 |
+
$XDG_CONFIG_DIRS
|
| 263 |
+
Win *: same as site_data_dir
|
| 264 |
+
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
| 265 |
+
|
| 266 |
+
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
| 267 |
+
|
| 268 |
+
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
| 269 |
+
"""
|
| 270 |
+
if system == 'win32':
|
| 271 |
+
path = site_data_dir(appname, appauthor)
|
| 272 |
+
if appname and version:
|
| 273 |
+
path = os.path.join(path, version)
|
| 274 |
+
elif system == 'darwin':
|
| 275 |
+
path = os.path.expanduser('/Library/Preferences')
|
| 276 |
+
if appname:
|
| 277 |
+
path = os.path.join(path, appname)
|
| 278 |
+
else:
|
| 279 |
+
# XDG default for $XDG_CONFIG_DIRS
|
| 280 |
+
# only first, if multipath is False
|
| 281 |
+
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
| 282 |
+
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
| 283 |
+
if appname:
|
| 284 |
+
if version:
|
| 285 |
+
appname = os.path.join(appname, version)
|
| 286 |
+
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
| 287 |
+
|
| 288 |
+
if multipath:
|
| 289 |
+
path = os.pathsep.join(pathlist)
|
| 290 |
+
else:
|
| 291 |
+
path = pathlist[0]
|
| 292 |
+
return path
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
| 296 |
+
r"""Return full path to the user-specific cache dir for this application.
|
| 297 |
+
|
| 298 |
+
"appname" is the name of application.
|
| 299 |
+
If None, just the system directory is returned.
|
| 300 |
+
"appauthor" (only used on Windows) is the name of the
|
| 301 |
+
appauthor or distributing body for this application. Typically
|
| 302 |
+
it is the owning company name. This falls back to appname. You may
|
| 303 |
+
pass False to disable it.
|
| 304 |
+
"version" is an optional version path element to append to the
|
| 305 |
+
path. You might want to use this if you want multiple versions
|
| 306 |
+
of your app to be able to run independently. If used, this
|
| 307 |
+
would typically be "<major>.<minor>".
|
| 308 |
+
Only applied when appname is present.
|
| 309 |
+
"opinion" (boolean) can be False to disable the appending of
|
| 310 |
+
"Cache" to the base app data dir for Windows. See
|
| 311 |
+
discussion below.
|
| 312 |
+
|
| 313 |
+
Typical user cache directories are:
|
| 314 |
+
Mac OS X: ~/Library/Caches/<AppName>
|
| 315 |
+
Unix: ~/.cache/<AppName> (XDG default)
|
| 316 |
+
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
| 317 |
+
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
| 318 |
+
|
| 319 |
+
On Windows the only suggestion in the MSDN docs is that local settings go in
|
| 320 |
+
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
| 321 |
+
app data dir (the default returned by `user_data_dir` above). Apps typically
|
| 322 |
+
put cache data somewhere *under* the given dir here. Some examples:
|
| 323 |
+
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
| 324 |
+
...\Acme\SuperApp\Cache\1.0
|
| 325 |
+
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
| 326 |
+
This can be disabled with the `opinion=False` option.
|
| 327 |
+
"""
|
| 328 |
+
if system == "win32":
|
| 329 |
+
if appauthor is None:
|
| 330 |
+
appauthor = appname
|
| 331 |
+
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
| 332 |
+
if appname:
|
| 333 |
+
if appauthor is not False:
|
| 334 |
+
path = os.path.join(path, appauthor, appname)
|
| 335 |
+
else:
|
| 336 |
+
path = os.path.join(path, appname)
|
| 337 |
+
if opinion:
|
| 338 |
+
path = os.path.join(path, "Cache")
|
| 339 |
+
elif system == 'darwin':
|
| 340 |
+
path = os.path.expanduser('~/Library/Caches')
|
| 341 |
+
if appname:
|
| 342 |
+
path = os.path.join(path, appname)
|
| 343 |
+
else:
|
| 344 |
+
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
| 345 |
+
if appname:
|
| 346 |
+
path = os.path.join(path, appname)
|
| 347 |
+
if appname and version:
|
| 348 |
+
path = os.path.join(path, version)
|
| 349 |
+
return path
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
|
| 353 |
+
r"""Return full path to the user-specific state dir for this application.
|
| 354 |
+
|
| 355 |
+
"appname" is the name of application.
|
| 356 |
+
If None, just the system directory is returned.
|
| 357 |
+
"appauthor" (only used on Windows) is the name of the
|
| 358 |
+
appauthor or distributing body for this application. Typically
|
| 359 |
+
it is the owning company name. This falls back to appname. You may
|
| 360 |
+
pass False to disable it.
|
| 361 |
+
"version" is an optional version path element to append to the
|
| 362 |
+
path. You might want to use this if you want multiple versions
|
| 363 |
+
of your app to be able to run independently. If used, this
|
| 364 |
+
would typically be "<major>.<minor>".
|
| 365 |
+
Only applied when appname is present.
|
| 366 |
+
"roaming" (boolean, default False) can be set True to use the Windows
|
| 367 |
+
roaming appdata directory. That means that for users on a Windows
|
| 368 |
+
network setup for roaming profiles, this user data will be
|
| 369 |
+
sync'd on login. See
|
| 370 |
+
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
| 371 |
+
for a discussion of issues.
|
| 372 |
+
|
| 373 |
+
Typical user state directories are:
|
| 374 |
+
Mac OS X: same as user_data_dir
|
| 375 |
+
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
|
| 376 |
+
Win *: same as user_data_dir
|
| 377 |
+
|
| 378 |
+
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
|
| 379 |
+
to extend the XDG spec and support $XDG_STATE_HOME.
|
| 380 |
+
|
| 381 |
+
That means, by default "~/.local/state/<AppName>".
|
| 382 |
+
"""
|
| 383 |
+
if system in ["win32", "darwin"]:
|
| 384 |
+
path = user_data_dir(appname, appauthor, None, roaming)
|
| 385 |
+
else:
|
| 386 |
+
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
|
| 387 |
+
if appname:
|
| 388 |
+
path = os.path.join(path, appname)
|
| 389 |
+
if appname and version:
|
| 390 |
+
path = os.path.join(path, version)
|
| 391 |
+
return path
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
| 395 |
+
r"""Return full path to the user-specific log dir for this application.
|
| 396 |
+
|
| 397 |
+
"appname" is the name of application.
|
| 398 |
+
If None, just the system directory is returned.
|
| 399 |
+
"appauthor" (only used on Windows) is the name of the
|
| 400 |
+
appauthor or distributing body for this application. Typically
|
| 401 |
+
it is the owning company name. This falls back to appname. You may
|
| 402 |
+
pass False to disable it.
|
| 403 |
+
"version" is an optional version path element to append to the
|
| 404 |
+
path. You might want to use this if you want multiple versions
|
| 405 |
+
of your app to be able to run independently. If used, this
|
| 406 |
+
would typically be "<major>.<minor>".
|
| 407 |
+
Only applied when appname is present.
|
| 408 |
+
"opinion" (boolean) can be False to disable the appending of
|
| 409 |
+
"Logs" to the base app data dir for Windows, and "log" to the
|
| 410 |
+
base cache dir for Unix. See discussion below.
|
| 411 |
+
|
| 412 |
+
Typical user log directories are:
|
| 413 |
+
Mac OS X: ~/Library/Logs/<AppName>
|
| 414 |
+
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
| 415 |
+
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
| 416 |
+
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
| 417 |
+
|
| 418 |
+
On Windows the only suggestion in the MSDN docs is that local settings
|
| 419 |
+
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
| 420 |
+
examples of what some windows apps use for a logs dir.)
|
| 421 |
+
|
| 422 |
+
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
| 423 |
+
value for Windows and appends "log" to the user cache dir for Unix.
|
| 424 |
+
This can be disabled with the `opinion=False` option.
|
| 425 |
+
"""
|
| 426 |
+
if system == "darwin":
|
| 427 |
+
path = os.path.join(
|
| 428 |
+
os.path.expanduser('~/Library/Logs'),
|
| 429 |
+
appname)
|
| 430 |
+
elif system == "win32":
|
| 431 |
+
path = user_data_dir(appname, appauthor, version)
|
| 432 |
+
version = False
|
| 433 |
+
if opinion:
|
| 434 |
+
path = os.path.join(path, "Logs")
|
| 435 |
+
else:
|
| 436 |
+
path = user_cache_dir(appname, appauthor, version)
|
| 437 |
+
version = False
|
| 438 |
+
if opinion:
|
| 439 |
+
path = os.path.join(path, "log")
|
| 440 |
+
if appname and version:
|
| 441 |
+
path = os.path.join(path, version)
|
| 442 |
+
return path
|
| 443 |
+
|
| 444 |
+
|
| 445 |
+
class AppDirs(object):
|
| 446 |
+
"""Convenience wrapper for getting application dirs."""
|
| 447 |
+
def __init__(self, appname=None, appauthor=None, version=None,
|
| 448 |
+
roaming=False, multipath=False):
|
| 449 |
+
self.appname = appname
|
| 450 |
+
self.appauthor = appauthor
|
| 451 |
+
self.version = version
|
| 452 |
+
self.roaming = roaming
|
| 453 |
+
self.multipath = multipath
|
| 454 |
+
|
| 455 |
+
@property
|
| 456 |
+
def user_data_dir(self):
|
| 457 |
+
return user_data_dir(self.appname, self.appauthor,
|
| 458 |
+
version=self.version, roaming=self.roaming)
|
| 459 |
+
|
| 460 |
+
@property
|
| 461 |
+
def site_data_dir(self):
|
| 462 |
+
return site_data_dir(self.appname, self.appauthor,
|
| 463 |
+
version=self.version, multipath=self.multipath)
|
| 464 |
+
|
| 465 |
+
@property
|
| 466 |
+
def user_config_dir(self):
|
| 467 |
+
return user_config_dir(self.appname, self.appauthor,
|
| 468 |
+
version=self.version, roaming=self.roaming)
|
| 469 |
+
|
| 470 |
+
@property
|
| 471 |
+
def site_config_dir(self):
|
| 472 |
+
return site_config_dir(self.appname, self.appauthor,
|
| 473 |
+
version=self.version, multipath=self.multipath)
|
| 474 |
+
|
| 475 |
+
@property
|
| 476 |
+
def user_cache_dir(self):
|
| 477 |
+
return user_cache_dir(self.appname, self.appauthor,
|
| 478 |
+
version=self.version)
|
| 479 |
+
|
| 480 |
+
@property
|
| 481 |
+
def user_state_dir(self):
|
| 482 |
+
return user_state_dir(self.appname, self.appauthor,
|
| 483 |
+
version=self.version)
|
| 484 |
+
|
| 485 |
+
@property
|
| 486 |
+
def user_log_dir(self):
|
| 487 |
+
return user_log_dir(self.appname, self.appauthor,
|
| 488 |
+
version=self.version)
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
#---- internal support stuff
|
| 492 |
+
|
| 493 |
+
def _get_win_folder_from_registry(csidl_name):
|
| 494 |
+
"""This is a fallback technique at best. I'm not sure if using the
|
| 495 |
+
registry for this guarantees us the correct answer for all CSIDL_*
|
| 496 |
+
names.
|
| 497 |
+
"""
|
| 498 |
+
import winreg as _winreg
|
| 499 |
+
|
| 500 |
+
shell_folder_name = {
|
| 501 |
+
"CSIDL_APPDATA": "AppData",
|
| 502 |
+
"CSIDL_COMMON_APPDATA": "Common AppData",
|
| 503 |
+
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
| 504 |
+
}[csidl_name]
|
| 505 |
+
|
| 506 |
+
key = _winreg.OpenKey(
|
| 507 |
+
_winreg.HKEY_CURRENT_USER,
|
| 508 |
+
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
| 509 |
+
)
|
| 510 |
+
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
| 511 |
+
return dir
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def _get_win_folder_with_pywin32(csidl_name):
|
| 515 |
+
from win32com.shell import shellcon, shell
|
| 516 |
+
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
| 517 |
+
# Try to make this a unicode path because SHGetFolderPath does
|
| 518 |
+
# not return unicode strings when there is unicode data in the
|
| 519 |
+
# path.
|
| 520 |
+
try:
|
| 521 |
+
dir = unicode(dir)
|
| 522 |
+
|
| 523 |
+
# Downgrade to short path name if have highbit chars. See
|
| 524 |
+
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
| 525 |
+
has_high_char = False
|
| 526 |
+
for c in dir:
|
| 527 |
+
if ord(c) > 255:
|
| 528 |
+
has_high_char = True
|
| 529 |
+
break
|
| 530 |
+
if has_high_char:
|
| 531 |
+
try:
|
| 532 |
+
import win32api
|
| 533 |
+
dir = win32api.GetShortPathName(dir)
|
| 534 |
+
except ImportError:
|
| 535 |
+
pass
|
| 536 |
+
except UnicodeError:
|
| 537 |
+
pass
|
| 538 |
+
return dir
|
| 539 |
+
|
| 540 |
+
|
| 541 |
+
def _get_win_folder_with_ctypes(csidl_name):
|
| 542 |
+
import ctypes
|
| 543 |
+
|
| 544 |
+
csidl_const = {
|
| 545 |
+
"CSIDL_APPDATA": 26,
|
| 546 |
+
"CSIDL_COMMON_APPDATA": 35,
|
| 547 |
+
"CSIDL_LOCAL_APPDATA": 28,
|
| 548 |
+
}[csidl_name]
|
| 549 |
+
|
| 550 |
+
buf = ctypes.create_unicode_buffer(1024)
|
| 551 |
+
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
| 552 |
+
|
| 553 |
+
# Downgrade to short path name if have highbit chars. See
|
| 554 |
+
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
| 555 |
+
has_high_char = False
|
| 556 |
+
for c in buf:
|
| 557 |
+
if ord(c) > 255:
|
| 558 |
+
has_high_char = True
|
| 559 |
+
break
|
| 560 |
+
if has_high_char:
|
| 561 |
+
buf2 = ctypes.create_unicode_buffer(1024)
|
| 562 |
+
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
| 563 |
+
buf = buf2
|
| 564 |
+
|
| 565 |
+
return buf.value
|
| 566 |
+
|
| 567 |
+
def _get_win_folder_with_jna(csidl_name):
|
| 568 |
+
import array
|
| 569 |
+
from com.sun import jna
|
| 570 |
+
from com.sun.jna.platform import win32
|
| 571 |
+
|
| 572 |
+
buf_size = win32.WinDef.MAX_PATH * 2
|
| 573 |
+
buf = array.zeros('c', buf_size)
|
| 574 |
+
shell = win32.Shell32.INSTANCE
|
| 575 |
+
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
| 576 |
+
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
| 577 |
+
|
| 578 |
+
# Downgrade to short path name if have highbit chars. See
|
| 579 |
+
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
| 580 |
+
has_high_char = False
|
| 581 |
+
for c in dir:
|
| 582 |
+
if ord(c) > 255:
|
| 583 |
+
has_high_char = True
|
| 584 |
+
break
|
| 585 |
+
if has_high_char:
|
| 586 |
+
buf = array.zeros('c', buf_size)
|
| 587 |
+
kernel = win32.Kernel32.INSTANCE
|
| 588 |
+
if kernel.GetShortPathName(dir, buf, buf_size):
|
| 589 |
+
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
| 590 |
+
|
| 591 |
+
return dir
|
| 592 |
+
|
| 593 |
+
if system == "win32":
|
| 594 |
+
try:
|
| 595 |
+
import win32com.shell
|
| 596 |
+
_get_win_folder = _get_win_folder_with_pywin32
|
| 597 |
+
except ImportError:
|
| 598 |
+
try:
|
| 599 |
+
from ctypes import windll
|
| 600 |
+
_get_win_folder = _get_win_folder_with_ctypes
|
| 601 |
+
except ImportError:
|
| 602 |
+
try:
|
| 603 |
+
import com.sun.jna
|
| 604 |
+
_get_win_folder = _get_win_folder_with_jna
|
| 605 |
+
except ImportError:
|
| 606 |
+
_get_win_folder = _get_win_folder_from_registry
|
| 607 |
+
|
| 608 |
+
|
| 609 |
+
#---- self test code
|
| 610 |
+
|
| 611 |
+
if __name__ == "__main__":
|
| 612 |
+
appname = "MyApp"
|
| 613 |
+
appauthor = "MyCompany"
|
| 614 |
+
|
| 615 |
+
props = ("user_data_dir",
|
| 616 |
+
"user_config_dir",
|
| 617 |
+
"user_cache_dir",
|
| 618 |
+
"user_state_dir",
|
| 619 |
+
"user_log_dir",
|
| 620 |
+
"site_data_dir",
|
| 621 |
+
"site_config_dir")
|
| 622 |
+
|
| 623 |
+
print("-- app dirs %s --" % __version__)
|
| 624 |
+
|
| 625 |
+
print("-- app dirs (with optional 'version')")
|
| 626 |
+
dirs = AppDirs(appname, appauthor, version="1.0")
|
| 627 |
+
for prop in props:
|
| 628 |
+
print("%s: %s" % (prop, getattr(dirs, prop)))
|
| 629 |
+
|
| 630 |
+
print("\n-- app dirs (without optional 'version')")
|
| 631 |
+
dirs = AppDirs(appname, appauthor)
|
| 632 |
+
for prop in props:
|
| 633 |
+
print("%s: %s" % (prop, getattr(dirs, prop)))
|
| 634 |
+
|
| 635 |
+
print("\n-- app dirs (without optional 'appauthor')")
|
| 636 |
+
dirs = AppDirs(appname)
|
| 637 |
+
for prop in props:
|
| 638 |
+
print("%s: %s" % (prop, getattr(dirs, prop)))
|
| 639 |
+
|
| 640 |
+
print("\n-- app dirs (with disabled 'appauthor')")
|
| 641 |
+
dirs = AppDirs(appname, appauthor=False)
|
| 642 |
+
for prop in props:
|
| 643 |
+
print("%s: %s" % (prop, getattr(dirs, prop)))
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_dl.cpython-38-x86_64-linux-gnu.so
ADDED
|
Binary file (37.9 kB). View file
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_linalg_utils.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Various linear algebra utility methods for internal use.
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from torch import Tensor
|
| 6 |
+
import torch
|
| 7 |
+
|
| 8 |
+
from typing import Optional, Tuple
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def is_sparse(A):
|
| 12 |
+
"""Check if tensor A is a sparse tensor"""
|
| 13 |
+
if isinstance(A, torch.Tensor):
|
| 14 |
+
return A.layout == torch.sparse_coo
|
| 15 |
+
|
| 16 |
+
error_str = "expected Tensor"
|
| 17 |
+
if not torch.jit.is_scripting():
|
| 18 |
+
error_str += " but got {}".format(type(A))
|
| 19 |
+
raise TypeError(error_str)
|
| 20 |
+
|
| 21 |
+
def get_floating_dtype(A):
|
| 22 |
+
"""Return the floating point dtype of tensor A.
|
| 23 |
+
|
| 24 |
+
Integer types map to float32.
|
| 25 |
+
"""
|
| 26 |
+
dtype = A.dtype
|
| 27 |
+
if dtype in (torch.float16, torch.float32, torch.float64):
|
| 28 |
+
return dtype
|
| 29 |
+
return torch.float32
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def matmul(A: Optional[Tensor], B: Tensor) -> Tensor:
|
| 33 |
+
"""Multiply two matrices.
|
| 34 |
+
|
| 35 |
+
If A is None, return B. A can be sparse or dense. B is always
|
| 36 |
+
dense.
|
| 37 |
+
"""
|
| 38 |
+
if A is None:
|
| 39 |
+
return B
|
| 40 |
+
if is_sparse(A):
|
| 41 |
+
return torch.sparse.mm(A, B)
|
| 42 |
+
return torch.matmul(A, B)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def conjugate(A):
|
| 46 |
+
"""Return conjugate of tensor A.
|
| 47 |
+
|
| 48 |
+
.. note:: If A's dtype is not complex, A is returned.
|
| 49 |
+
"""
|
| 50 |
+
if A.is_complex():
|
| 51 |
+
return A.conj()
|
| 52 |
+
return A
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def transpose(A):
|
| 56 |
+
"""Return transpose of a matrix or batches of matrices.
|
| 57 |
+
"""
|
| 58 |
+
ndim = len(A.shape)
|
| 59 |
+
return A.transpose(ndim - 1, ndim - 2)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def transjugate(A):
|
| 63 |
+
"""Return transpose conjugate of a matrix or batches of matrices.
|
| 64 |
+
"""
|
| 65 |
+
return conjugate(transpose(A))
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def bform(X: Tensor, A: Optional[Tensor], Y: Tensor) -> Tensor:
|
| 69 |
+
"""Return bilinear form of matrices: :math:`X^T A Y`.
|
| 70 |
+
"""
|
| 71 |
+
return matmul(transpose(X), matmul(A, Y))
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def qform(A: Optional[Tensor], S: Tensor):
|
| 75 |
+
"""Return quadratic form :math:`S^T A S`.
|
| 76 |
+
"""
|
| 77 |
+
return bform(S, A, S)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def basis(A):
|
| 81 |
+
"""Return orthogonal basis of A columns.
|
| 82 |
+
"""
|
| 83 |
+
if A.is_cuda:
|
| 84 |
+
# torch.orgqr is not available in CUDA
|
| 85 |
+
Q = torch.linalg.qr(A).Q
|
| 86 |
+
else:
|
| 87 |
+
Q = torch.orgqr(*torch.geqrf(A))
|
| 88 |
+
return Q
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def symeig(A: Tensor, largest: Optional[bool] = False) -> Tuple[Tensor, Tensor]:
|
| 92 |
+
"""Return eigenpairs of A with specified ordering.
|
| 93 |
+
"""
|
| 94 |
+
if largest is None:
|
| 95 |
+
largest = False
|
| 96 |
+
E, Z = torch.linalg.eigh(A, UPLO='U')
|
| 97 |
+
# assuming that E is ordered
|
| 98 |
+
if largest:
|
| 99 |
+
E = torch.flip(E, dims=(-1,))
|
| 100 |
+
Z = torch.flip(Z, dims=(-1,))
|
| 101 |
+
return E, Z
|
| 102 |
+
|
| 103 |
+
# This function was deprecated and removed
|
| 104 |
+
# This nice error message can be removed in version 1.13+
|
| 105 |
+
def solve(input: Tensor, A: Tensor, *, out=None) -> Tuple[Tensor, Tensor]:
|
| 106 |
+
raise RuntimeError(
|
| 107 |
+
"This function was deprecated since version 1.9 and is now removed. Please use the `torch.linalg.solve` function instead.",
|
| 108 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_meta_registrations.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from torch._prims import utils
|
| 3 |
+
|
| 4 |
+
meta_lib = torch.library.Library("aten", "IMPL", "Meta")
|
| 5 |
+
|
| 6 |
+
def check(b, s):
|
| 7 |
+
if not b:
|
| 8 |
+
raise RuntimeError(s)
|
| 9 |
+
|
| 10 |
+
def toRealValueType(dtype):
|
| 11 |
+
from_complex = {
|
| 12 |
+
torch.complex32: torch.half,
|
| 13 |
+
torch.cfloat: torch.float,
|
| 14 |
+
torch.cdouble: torch.double
|
| 15 |
+
}
|
| 16 |
+
return from_complex.get(dtype, dtype)
|
| 17 |
+
|
| 18 |
+
# Implementations below are taken from https://github.com/albanD/subclass_zoo/blob/main/python_meta_tensor.py
|
| 19 |
+
@torch.library.impl(meta_lib, "index_select")
|
| 20 |
+
def meta_index_select(self, dim, index):
|
| 21 |
+
result_size = list(self.size())
|
| 22 |
+
if self.dim() > 0:
|
| 23 |
+
result_size[dim] = index.numel()
|
| 24 |
+
return self.new_empty(result_size)
|
| 25 |
+
|
| 26 |
+
@torch.library.impl(meta_lib, "index_select.out")
|
| 27 |
+
def meta_index_select_out(self, dim, index, out):
|
| 28 |
+
torch._resize_output_(out, self.size(), self.device)
|
| 29 |
+
return out.copy_(torch.index_select(self, dim, index))
|
| 30 |
+
|
| 31 |
+
@torch.library.impl(meta_lib, "abs")
|
| 32 |
+
def meta_abs(self):
|
| 33 |
+
if self.is_complex():
|
| 34 |
+
float_type = toRealValueType(self.dtype)
|
| 35 |
+
return self.new_empty(self.size(), dtype=float_type)
|
| 36 |
+
else:
|
| 37 |
+
return self.new_empty(self.size())
|
| 38 |
+
|
| 39 |
+
@torch.library.impl(meta_lib, "abs.out")
|
| 40 |
+
def meta_abs_out(self, out):
|
| 41 |
+
torch._resize_output_(out, self.size(), self.device)
|
| 42 |
+
return out.copy_(torch.abs(self))
|
| 43 |
+
|
| 44 |
+
@torch.library.impl(meta_lib, "max")
|
| 45 |
+
def meta_max(self):
|
| 46 |
+
return self.new_empty(())
|
| 47 |
+
|
| 48 |
+
@torch.library.impl(meta_lib, "min")
|
| 49 |
+
def meta_min(self):
|
| 50 |
+
return self.new_empty(())
|
| 51 |
+
|
| 52 |
+
def squareCheckInputs(self, f_name):
|
| 53 |
+
assert self.dim() >= 2, f"{f_name}: The input tensor must have at least 2 dimensions."
|
| 54 |
+
# TODO: I think the error message has the -2 and -1 swapped. If you fix
|
| 55 |
+
# it fix the C++ squareCheckInputs too
|
| 56 |
+
assert self.size(-1) == self.size(-2), \
|
| 57 |
+
f"{f_name}: A must be batches of square matrices, but they are {self.size(-1)} by {self.size(-2)} matrices"
|
| 58 |
+
|
| 59 |
+
def checkUplo(uplo: str):
|
| 60 |
+
uplo_uppercase = uplo.upper()
|
| 61 |
+
assert len(uplo) == 1 and uplo_uppercase == 'U' or uplo_uppercase == 'L', \
|
| 62 |
+
f"Expected UPLO argument to be 'L' or 'U', but got {uplo}"
|
| 63 |
+
|
| 64 |
+
@torch.library.impl(meta_lib, "linalg_eigh")
|
| 65 |
+
def meta_linalg_eigh(self, uplo="L"):
|
| 66 |
+
squareCheckInputs(self, "linalg_eigh")
|
| 67 |
+
checkUplo(uplo)
|
| 68 |
+
real_dtype = toRealValueType(self.dtype)
|
| 69 |
+
assert self.dim() >= 2
|
| 70 |
+
values = self.new_empty(self.shape, dtype=real_dtype)
|
| 71 |
+
values.transpose_(-2, -1)
|
| 72 |
+
vectors = self.new_empty(self.shape[:-1])
|
| 73 |
+
return (values, vectors)
|
| 74 |
+
|
| 75 |
+
@torch.library.impl(meta_lib, "reflection_pad2d")
|
| 76 |
+
def meta_pad2d(self, padding):
|
| 77 |
+
valid_dims = self.size(1) != 0 and self.size(2) != 0
|
| 78 |
+
check(
|
| 79 |
+
(self.ndim == 3 and valid_dims)
|
| 80 |
+
or (self.ndim == 4 and valid_dims and self.size(3) != 0),
|
| 81 |
+
f"3D or 4D (batch mode) tensor expected for input, but got: {self}"
|
| 82 |
+
)
|
| 83 |
+
if self.ndim == 4:
|
| 84 |
+
nbatch, nplane, input_h, input_w = self.shape
|
| 85 |
+
else:
|
| 86 |
+
nbatch = 1
|
| 87 |
+
nplane, input_h, input_w = self.shape
|
| 88 |
+
|
| 89 |
+
pad_l, pad_r, pad_t, pad_b = padding
|
| 90 |
+
|
| 91 |
+
output_h = input_h + pad_t + pad_b
|
| 92 |
+
output_w = input_w + pad_l + pad_r
|
| 93 |
+
|
| 94 |
+
if self.ndim == 3:
|
| 95 |
+
return self.new_empty((nplane, output_h, output_w))
|
| 96 |
+
else:
|
| 97 |
+
return self.new_empty((nbatch, nplane, output_h, output_w))
|
| 98 |
+
|
| 99 |
+
@torch.library.impl(meta_lib, "dot")
|
| 100 |
+
def meta_dot(self, tensor):
|
| 101 |
+
check(
|
| 102 |
+
self.dim() == 1 and tensor.dim() == 1,
|
| 103 |
+
f"1D tensors expected, but got {self.dim()}D and {tensor.dim()}D tensors"
|
| 104 |
+
)
|
| 105 |
+
return self.new_empty(())
|
| 106 |
+
|
| 107 |
+
@torch.library.impl(meta_lib, "var_mean.correction")
|
| 108 |
+
def meta_var_mean_correction(self, dim, *, correction, keepdim=False):
|
| 109 |
+
dim = utils.reduction_dims(self.shape, dim)
|
| 110 |
+
if keepdim:
|
| 111 |
+
output_shape = tuple(self.shape[i] if i not in dim else 1 for i in range(self.ndim))
|
| 112 |
+
else:
|
| 113 |
+
output_shape = utils.compute_reduction_output_shape(self.shape, dim)
|
| 114 |
+
result1 = self.new_empty(output_shape, dtype=toRealValueType(self.dtype))
|
| 115 |
+
result2 = self.new_empty(output_shape)
|
| 116 |
+
return result1, result2
|
| 117 |
+
|
| 118 |
+
@torch.library.impl(meta_lib, "inverse")
|
| 119 |
+
def meta_inverse(self):
|
| 120 |
+
# Bug: https://github.com/pytorch/pytorch/issues/77498
|
| 121 |
+
if self.numel() == 0:
|
| 122 |
+
return torch.empty_like(self)
|
| 123 |
+
r = self.new_empty(self.shape)
|
| 124 |
+
r.transpose_(-2, -1)
|
| 125 |
+
return r
|
| 126 |
+
|
| 127 |
+
@torch.library.impl(meta_lib, "bernoulli.out")
|
| 128 |
+
def meta_bernoulli(self, *, generator=None, out):
|
| 129 |
+
torch._resize_output_(out, self.size(), self.device)
|
| 130 |
+
return out
|
| 131 |
+
|
| 132 |
+
@torch.library.impl(meta_lib, "_adaptive_avg_pool2d")
|
| 133 |
+
def meta_adaptive_avg_pool2d(self, output_size):
|
| 134 |
+
check(self.ndim == 3 or self.ndim == 4, f"Expected 3D or 4D tensor, but got {self.shape}")
|
| 135 |
+
return self.new_empty(self.shape[:-2] + tuple(output_size))
|
| 136 |
+
|
| 137 |
+
@torch.library.impl(meta_lib, "_adaptive_avg_pool3d")
|
| 138 |
+
def meta_adaptive_avg_pool3d(self, output_size):
|
| 139 |
+
check(self.ndim == 4 or self.ndim == 5, f"Expected 4D or 5D tensor, but got {self.shape}")
|
| 140 |
+
return self.new_empty(self.shape[:-3] + tuple(output_size))
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_namedtensor_internals.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import OrderedDict
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
This file contains helper functions that implement experimental functionality
|
| 5 |
+
for named tensors in python. All of these are experimental, unstable, and
|
| 6 |
+
subject to change or deletion.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def check_serializing_named_tensor(tensor):
|
| 11 |
+
if tensor.has_names():
|
| 12 |
+
raise RuntimeError(
|
| 13 |
+
"NYI: Named tensors don't support serialization. Please drop "
|
| 14 |
+
"names via `tensor = tensor.rename(None)` before serialization.")
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def build_dim_map(tensor):
|
| 18 |
+
"""Returns a map of { dim: dim_name } where dim is a name if the dim is named
|
| 19 |
+
and the dim index otherwise."""
|
| 20 |
+
return OrderedDict([(idx if name is None else name, name)
|
| 21 |
+
for idx, name in enumerate(tensor.names)])
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def unzip_namedshape(namedshape):
|
| 25 |
+
if isinstance(namedshape, OrderedDict):
|
| 26 |
+
namedshape = namedshape.items()
|
| 27 |
+
if not hasattr(namedshape, '__iter__') and not isinstance(namedshape, tuple):
|
| 28 |
+
raise RuntimeError(
|
| 29 |
+
'Expected namedshape to be OrderedDict or iterable of tuples, got: {}'
|
| 30 |
+
.format(type(namedshape)))
|
| 31 |
+
if len(namedshape) == 0:
|
| 32 |
+
raise RuntimeError('Expected namedshape to non-empty.')
|
| 33 |
+
return zip(*namedshape)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def namer_api_name(inplace):
|
| 37 |
+
if inplace:
|
| 38 |
+
return 'rename_'
|
| 39 |
+
else:
|
| 40 |
+
return 'rename'
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def is_ellipsis(item):
|
| 44 |
+
return item == Ellipsis or item == '...'
|
| 45 |
+
|
| 46 |
+
def single_ellipsis_index(names, fn_name):
|
| 47 |
+
ellipsis_indices = [i for i, name in enumerate(names) if is_ellipsis(name)]
|
| 48 |
+
if len(ellipsis_indices) >= 2:
|
| 49 |
+
raise RuntimeError('{}: More than one Ellipsis (\'...\') found in names ('
|
| 50 |
+
'{}). This function supports up to one Ellipsis.'
|
| 51 |
+
.format(fn_name, names))
|
| 52 |
+
if len(ellipsis_indices) == 1:
|
| 53 |
+
return ellipsis_indices[0]
|
| 54 |
+
return None
|
| 55 |
+
|
| 56 |
+
def expand_single_ellipsis(numel_pre_glob, numel_post_glob, names):
|
| 57 |
+
return names[numel_pre_glob:len(names) - numel_post_glob]
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def replace_ellipsis_by_position(ellipsis_idx, names, tensor_names):
|
| 61 |
+
globbed_names = expand_single_ellipsis(ellipsis_idx, len(names) - ellipsis_idx - 1, tensor_names)
|
| 62 |
+
return names[:ellipsis_idx] + globbed_names + names[ellipsis_idx + 1:]
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def resolve_ellipsis(names, tensor_names, fn_name):
|
| 66 |
+
"""
|
| 67 |
+
Expands ... inside `names` to be equal to a list of names from `tensor_names`.
|
| 68 |
+
"""
|
| 69 |
+
ellipsis_idx = single_ellipsis_index(names, fn_name)
|
| 70 |
+
if ellipsis_idx is None:
|
| 71 |
+
return names
|
| 72 |
+
return replace_ellipsis_by_position(ellipsis_idx, names, tensor_names)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def update_names_with_list(tensor, names, inplace):
|
| 76 |
+
# Special case for tensor.rename(None)
|
| 77 |
+
if len(names) == 1 and names[0] is None:
|
| 78 |
+
return tensor._update_names(None, inplace)
|
| 79 |
+
|
| 80 |
+
return tensor._update_names(
|
| 81 |
+
resolve_ellipsis(names, tensor.names, namer_api_name(inplace)), inplace)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def update_names_with_mapping(tensor, rename_map, inplace):
|
| 85 |
+
dim_map = build_dim_map(tensor)
|
| 86 |
+
for old_dim in rename_map.keys():
|
| 87 |
+
new_dim = rename_map[old_dim]
|
| 88 |
+
if old_dim in dim_map.keys():
|
| 89 |
+
dim_map[old_dim] = new_dim
|
| 90 |
+
else:
|
| 91 |
+
raise RuntimeError(('{api_name}: Tried to rename dim \'{old_dim}\' to dim '
|
| 92 |
+
'{new_dim} in Tensor[{dims}] but dim \'{old_dim}\' does not exist')
|
| 93 |
+
.format(old_dim=old_dim, new_dim=new_dim, dims=tensor.names,
|
| 94 |
+
api_name=namer_api_name(inplace)))
|
| 95 |
+
return tensor._update_names(tuple(dim_map.values()), inplace)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def update_names(tensor, names, rename_map, inplace):
|
| 99 |
+
"""There are two usages:
|
| 100 |
+
|
| 101 |
+
tensor.rename(*names) returns a view on tensor with named dims `names`.
|
| 102 |
+
`names` must be of length `tensor.dim()`; otherwise, if '...' is in `names`,
|
| 103 |
+
then it is expanded greedily to be equal to the corresponding names from
|
| 104 |
+
`tensor.names`.
|
| 105 |
+
|
| 106 |
+
For example,
|
| 107 |
+
```
|
| 108 |
+
>>> x = torch.empty(2, 3, 5, 7, names=('N', 'C', 'H', 'W'))
|
| 109 |
+
>>> x.rename('...', 'height', 'width').names
|
| 110 |
+
('N', 'C', 'height', 'width')
|
| 111 |
+
|
| 112 |
+
>>> x.rename('batch', '...', 'width').names
|
| 113 |
+
('batch', 'C', 'H', 'width')
|
| 114 |
+
```
|
| 115 |
+
|
| 116 |
+
tensor.rename(**rename_map) returns a view on tensor that has rename dims
|
| 117 |
+
as specified in the mapping `rename_map`.
|
| 118 |
+
|
| 119 |
+
For example,
|
| 120 |
+
```
|
| 121 |
+
>>> x = torch.empty(2, 3, 5, 7, names=('N', 'C', 'H', 'W'))
|
| 122 |
+
>>> x.rename(W='width', H='height').names
|
| 123 |
+
('N', 'C', 'height', 'width')
|
| 124 |
+
```
|
| 125 |
+
|
| 126 |
+
Finally, tensor.rename has an in-place version called tensor.rename_.
|
| 127 |
+
"""
|
| 128 |
+
has_names = len(names) > 0
|
| 129 |
+
has_rename_pairs = bool(rename_map)
|
| 130 |
+
if has_names and has_rename_pairs:
|
| 131 |
+
raise RuntimeError('{api_name}: This function takes either positional '
|
| 132 |
+
'args or keyword args, but not both. Use tensor.{api_name}(*names) '
|
| 133 |
+
'to name dims and tensor.{api_name}(**rename_map) to rename '
|
| 134 |
+
'dims.'.format(api_name=namer_api_name(inplace)))
|
| 135 |
+
|
| 136 |
+
# Special case for tensor.rename(*[]), which is valid for a 0 dim tensor.
|
| 137 |
+
if not has_names and not has_rename_pairs:
|
| 138 |
+
return update_names_with_list(tensor, names, inplace)
|
| 139 |
+
|
| 140 |
+
if has_names:
|
| 141 |
+
return update_names_with_list(tensor, names, inplace)
|
| 142 |
+
return update_names_with_mapping(tensor, rename_map, inplace)
|