ZTWHHH commited on
Commit
286ba09
·
verified ·
1 Parent(s): 69bcdfb

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. deepseek/lib/python3.10/site-packages/distro/__init__.py +54 -0
  3. deepseek/lib/python3.10/site-packages/distro/__main__.py +4 -0
  4. deepseek/lib/python3.10/site-packages/distro/__pycache__/__init__.cpython-310.pyc +0 -0
  5. deepseek/lib/python3.10/site-packages/distro/__pycache__/__main__.cpython-310.pyc +0 -0
  6. deepseek/lib/python3.10/site-packages/distro/__pycache__/distro.cpython-310.pyc +0 -0
  7. deepseek/lib/python3.10/site-packages/distro/distro.py +1403 -0
  8. deepseek/lib/python3.10/site-packages/distro/py.typed +0 -0
  9. deepseek/lib/python3.10/site-packages/importlib_metadata/__init__.py +1132 -0
  10. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/__init__.cpython-310.pyc +0 -0
  11. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_adapters.cpython-310.pyc +0 -0
  12. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_collections.cpython-310.pyc +0 -0
  13. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_compat.cpython-310.pyc +0 -0
  14. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_functools.cpython-310.pyc +0 -0
  15. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_itertools.cpython-310.pyc +0 -0
  16. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_meta.cpython-310.pyc +0 -0
  17. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_text.cpython-310.pyc +0 -0
  18. deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/diagnose.cpython-310.pyc +0 -0
  19. deepseek/lib/python3.10/site-packages/importlib_metadata/_adapters.py +83 -0
  20. deepseek/lib/python3.10/site-packages/importlib_metadata/_collections.py +30 -0
  21. deepseek/lib/python3.10/site-packages/importlib_metadata/_compat.py +56 -0
  22. deepseek/lib/python3.10/site-packages/importlib_metadata/_functools.py +104 -0
  23. deepseek/lib/python3.10/site-packages/importlib_metadata/_itertools.py +171 -0
  24. deepseek/lib/python3.10/site-packages/importlib_metadata/_meta.py +75 -0
  25. deepseek/lib/python3.10/site-packages/importlib_metadata/_text.py +99 -0
  26. deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__init__.py +0 -0
  27. deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__pycache__/__init__.cpython-310.pyc +0 -0
  28. deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__pycache__/py311.cpython-310.pyc +0 -0
  29. deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__pycache__/py39.cpython-310.pyc +0 -0
  30. deepseek/lib/python3.10/site-packages/importlib_metadata/compat/py39.py +36 -0
  31. deepseek/lib/python3.10/site-packages/importlib_metadata/diagnose.py +21 -0
  32. deepseek/lib/python3.10/site-packages/importlib_metadata/py.typed +0 -0
  33. deepseek/lib/python3.10/site-packages/mistral_common/protocol/instruct/__pycache__/validator.cpython-310.pyc +0 -0
  34. deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/INSTALLER +1 -0
  35. deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/License.txt +154 -0
  36. deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/METADATA +36 -0
  37. deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/REQUESTED +0 -0
  38. deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/WHEEL +5 -0
  39. deepseek/lib/python3.10/site-packages/websockets/__init__.py +172 -0
  40. deepseek/lib/python3.10/site-packages/websockets/__main__.py +159 -0
  41. deepseek/lib/python3.10/site-packages/websockets/auth.py +18 -0
  42. deepseek/lib/python3.10/site-packages/websockets/client.py +400 -0
  43. deepseek/lib/python3.10/site-packages/websockets/connection.py +12 -0
  44. deepseek/lib/python3.10/site-packages/websockets/datastructures.py +183 -0
  45. deepseek/lib/python3.10/site-packages/websockets/exceptions.py +418 -0
  46. deepseek/lib/python3.10/site-packages/websockets/frames.py +429 -0
  47. deepseek/lib/python3.10/site-packages/websockets/headers.py +580 -0
  48. deepseek/lib/python3.10/site-packages/websockets/http.py +20 -0
  49. deepseek/lib/python3.10/site-packages/websockets/http11.py +386 -0
  50. deepseek/lib/python3.10/site-packages/websockets/imports.py +100 -0
.gitattributes CHANGED
@@ -672,3 +672,4 @@ deepseekvl2/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-5b2eba61.so
672
  deepseekvl2/lib/python3.10/site-packages/pillow.libs/libopenjp2-ca16f087.so.2.5.3 filter=lfs diff=lfs merge=lfs -text
673
  deepseek/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
674
  deepseekvl2/lib/python3.10/site-packages/triton/third_party/cuda/lib/libdevice.10.bc filter=lfs diff=lfs merge=lfs -text
 
 
672
  deepseekvl2/lib/python3.10/site-packages/pillow.libs/libopenjp2-ca16f087.so.2.5.3 filter=lfs diff=lfs merge=lfs -text
673
  deepseek/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
674
  deepseekvl2/lib/python3.10/site-packages/triton/third_party/cuda/lib/libdevice.10.bc filter=lfs diff=lfs merge=lfs -text
675
+ evalkit_tf437/lib/python3.10/site-packages/scipy/optimize/_highs/_highs_wrapper.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
deepseek/lib/python3.10/site-packages/distro/__init__.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .distro import (
2
+ NORMALIZED_DISTRO_ID,
3
+ NORMALIZED_LSB_ID,
4
+ NORMALIZED_OS_ID,
5
+ LinuxDistribution,
6
+ __version__,
7
+ build_number,
8
+ codename,
9
+ distro_release_attr,
10
+ distro_release_info,
11
+ id,
12
+ info,
13
+ like,
14
+ linux_distribution,
15
+ lsb_release_attr,
16
+ lsb_release_info,
17
+ major_version,
18
+ minor_version,
19
+ name,
20
+ os_release_attr,
21
+ os_release_info,
22
+ uname_attr,
23
+ uname_info,
24
+ version,
25
+ version_parts,
26
+ )
27
+
28
+ __all__ = [
29
+ "NORMALIZED_DISTRO_ID",
30
+ "NORMALIZED_LSB_ID",
31
+ "NORMALIZED_OS_ID",
32
+ "LinuxDistribution",
33
+ "build_number",
34
+ "codename",
35
+ "distro_release_attr",
36
+ "distro_release_info",
37
+ "id",
38
+ "info",
39
+ "like",
40
+ "linux_distribution",
41
+ "lsb_release_attr",
42
+ "lsb_release_info",
43
+ "major_version",
44
+ "minor_version",
45
+ "name",
46
+ "os_release_attr",
47
+ "os_release_info",
48
+ "uname_attr",
49
+ "uname_info",
50
+ "version",
51
+ "version_parts",
52
+ ]
53
+
54
+ __version__ = __version__
deepseek/lib/python3.10/site-packages/distro/__main__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .distro import main
2
+
3
+ if __name__ == "__main__":
4
+ main()
deepseek/lib/python3.10/site-packages/distro/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (885 Bytes). View file
 
deepseek/lib/python3.10/site-packages/distro/__pycache__/__main__.cpython-310.pyc ADDED
Binary file (239 Bytes). View file
 
deepseek/lib/python3.10/site-packages/distro/__pycache__/distro.cpython-310.pyc ADDED
Binary file (42.1 kB). View file
 
deepseek/lib/python3.10/site-packages/distro/distro.py ADDED
@@ -0,0 +1,1403 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # Copyright 2015-2021 Nir Cohen
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ """
17
+ The ``distro`` package (``distro`` stands for Linux Distribution) provides
18
+ information about the Linux distribution it runs on, such as a reliable
19
+ machine-readable distro ID, or version information.
20
+
21
+ It is the recommended replacement for Python's original
22
+ :py:func:`platform.linux_distribution` function, but it provides much more
23
+ functionality. An alternative implementation became necessary because Python
24
+ 3.5 deprecated this function, and Python 3.8 removed it altogether. Its
25
+ predecessor function :py:func:`platform.dist` was already deprecated since
26
+ Python 2.6 and removed in Python 3.8. Still, there are many cases in which
27
+ access to OS distribution information is needed. See `Python issue 1322
28
+ <https://bugs.python.org/issue1322>`_ for more information.
29
+ """
30
+
31
+ import argparse
32
+ import json
33
+ import logging
34
+ import os
35
+ import re
36
+ import shlex
37
+ import subprocess
38
+ import sys
39
+ import warnings
40
+ from typing import (
41
+ Any,
42
+ Callable,
43
+ Dict,
44
+ Iterable,
45
+ Optional,
46
+ Sequence,
47
+ TextIO,
48
+ Tuple,
49
+ Type,
50
+ )
51
+
52
+ try:
53
+ from typing import TypedDict
54
+ except ImportError:
55
+ # Python 3.7
56
+ TypedDict = dict
57
+
58
+ __version__ = "1.9.0"
59
+
60
+
61
+ class VersionDict(TypedDict):
62
+ major: str
63
+ minor: str
64
+ build_number: str
65
+
66
+
67
+ class InfoDict(TypedDict):
68
+ id: str
69
+ version: str
70
+ version_parts: VersionDict
71
+ like: str
72
+ codename: str
73
+
74
+
75
+ _UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
76
+ _UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
77
+ _OS_RELEASE_BASENAME = "os-release"
78
+
79
+ #: Translation table for normalizing the "ID" attribute defined in os-release
80
+ #: files, for use by the :func:`distro.id` method.
81
+ #:
82
+ #: * Key: Value as defined in the os-release file, translated to lower case,
83
+ #: with blanks translated to underscores.
84
+ #:
85
+ #: * Value: Normalized value.
86
+ NORMALIZED_OS_ID = {
87
+ "ol": "oracle", # Oracle Linux
88
+ "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap
89
+ }
90
+
91
+ #: Translation table for normalizing the "Distributor ID" attribute returned by
92
+ #: the lsb_release command, for use by the :func:`distro.id` method.
93
+ #:
94
+ #: * Key: Value as returned by the lsb_release command, translated to lower
95
+ #: case, with blanks translated to underscores.
96
+ #:
97
+ #: * Value: Normalized value.
98
+ NORMALIZED_LSB_ID = {
99
+ "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
100
+ "enterpriseenterpriseserver": "oracle", # Oracle Linux 5
101
+ "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
102
+ "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
103
+ "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
104
+ }
105
+
106
+ #: Translation table for normalizing the distro ID derived from the file name
107
+ #: of distro release files, for use by the :func:`distro.id` method.
108
+ #:
109
+ #: * Key: Value as derived from the file name of a distro release file,
110
+ #: translated to lower case, with blanks translated to underscores.
111
+ #:
112
+ #: * Value: Normalized value.
113
+ NORMALIZED_DISTRO_ID = {
114
+ "redhat": "rhel", # RHEL 6.x, 7.x
115
+ }
116
+
117
+ # Pattern for content of distro release file (reversed)
118
+ _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
119
+ r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
120
+ )
121
+
122
+ # Pattern for base file name of distro release file
123
+ _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
124
+
125
+ # Base file names to be looked up for if _UNIXCONFDIR is not readable.
126
+ _DISTRO_RELEASE_BASENAMES = [
127
+ "SuSE-release",
128
+ "altlinux-release",
129
+ "arch-release",
130
+ "base-release",
131
+ "centos-release",
132
+ "fedora-release",
133
+ "gentoo-release",
134
+ "mageia-release",
135
+ "mandrake-release",
136
+ "mandriva-release",
137
+ "mandrivalinux-release",
138
+ "manjaro-release",
139
+ "oracle-release",
140
+ "redhat-release",
141
+ "rocky-release",
142
+ "sl-release",
143
+ "slackware-version",
144
+ ]
145
+
146
+ # Base file names to be ignored when searching for distro release file
147
+ _DISTRO_RELEASE_IGNORE_BASENAMES = (
148
+ "debian_version",
149
+ "lsb-release",
150
+ "oem-release",
151
+ _OS_RELEASE_BASENAME,
152
+ "system-release",
153
+ "plesk-release",
154
+ "iredmail-release",
155
+ "board-release",
156
+ "ec2_version",
157
+ )
158
+
159
+
160
+ def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]:
161
+ """
162
+ .. deprecated:: 1.6.0
163
+
164
+ :func:`distro.linux_distribution()` is deprecated. It should only be
165
+ used as a compatibility shim with Python's
166
+ :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
167
+ :func:`distro.version` and :func:`distro.name` instead.
168
+
169
+ Return information about the current OS distribution as a tuple
170
+ ``(id_name, version, codename)`` with items as follows:
171
+
172
+ * ``id_name``: If *full_distribution_name* is false, the result of
173
+ :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
174
+
175
+ * ``version``: The result of :func:`distro.version`.
176
+
177
+ * ``codename``: The extra item (usually in parentheses) after the
178
+ os-release version number, or the result of :func:`distro.codename`.
179
+
180
+ The interface of this function is compatible with the original
181
+ :py:func:`platform.linux_distribution` function, supporting a subset of
182
+ its parameters.
183
+
184
+ The data it returns may not exactly be the same, because it uses more data
185
+ sources than the original function, and that may lead to different data if
186
+ the OS distribution is not consistent across multiple data sources it
187
+ provides (there are indeed such distributions ...).
188
+
189
+ Another reason for differences is the fact that the :func:`distro.id`
190
+ method normalizes the distro ID string to a reliable machine-readable value
191
+ for a number of popular OS distributions.
192
+ """
193
+ warnings.warn(
194
+ "distro.linux_distribution() is deprecated. It should only be used as a "
195
+ "compatibility shim with Python's platform.linux_distribution(). Please use "
196
+ "distro.id(), distro.version() and distro.name() instead.",
197
+ DeprecationWarning,
198
+ stacklevel=2,
199
+ )
200
+ return _distro.linux_distribution(full_distribution_name)
201
+
202
+
203
+ def id() -> str:
204
+ """
205
+ Return the distro ID of the current distribution, as a
206
+ machine-readable string.
207
+
208
+ For a number of OS distributions, the returned distro ID value is
209
+ *reliable*, in the sense that it is documented and that it does not change
210
+ across releases of the distribution.
211
+
212
+ This package maintains the following reliable distro ID values:
213
+
214
+ ============== =========================================
215
+ Distro ID Distribution
216
+ ============== =========================================
217
+ "ubuntu" Ubuntu
218
+ "debian" Debian
219
+ "rhel" RedHat Enterprise Linux
220
+ "centos" CentOS
221
+ "fedora" Fedora
222
+ "sles" SUSE Linux Enterprise Server
223
+ "opensuse" openSUSE
224
+ "amzn" Amazon Linux
225
+ "arch" Arch Linux
226
+ "buildroot" Buildroot
227
+ "cloudlinux" CloudLinux OS
228
+ "exherbo" Exherbo Linux
229
+ "gentoo" GenToo Linux
230
+ "ibm_powerkvm" IBM PowerKVM
231
+ "kvmibm" KVM for IBM z Systems
232
+ "linuxmint" Linux Mint
233
+ "mageia" Mageia
234
+ "mandriva" Mandriva Linux
235
+ "parallels" Parallels
236
+ "pidora" Pidora
237
+ "raspbian" Raspbian
238
+ "oracle" Oracle Linux (and Oracle Enterprise Linux)
239
+ "scientific" Scientific Linux
240
+ "slackware" Slackware
241
+ "xenserver" XenServer
242
+ "openbsd" OpenBSD
243
+ "netbsd" NetBSD
244
+ "freebsd" FreeBSD
245
+ "midnightbsd" MidnightBSD
246
+ "rocky" Rocky Linux
247
+ "aix" AIX
248
+ "guix" Guix System
249
+ "altlinux" ALT Linux
250
+ ============== =========================================
251
+
252
+ If you have a need to get distros for reliable IDs added into this set,
253
+ or if you find that the :func:`distro.id` function returns a different
254
+ distro ID for one of the listed distros, please create an issue in the
255
+ `distro issue tracker`_.
256
+
257
+ **Lookup hierarchy and transformations:**
258
+
259
+ First, the ID is obtained from the following sources, in the specified
260
+ order. The first available and non-empty value is used:
261
+
262
+ * the value of the "ID" attribute of the os-release file,
263
+
264
+ * the value of the "Distributor ID" attribute returned by the lsb_release
265
+ command,
266
+
267
+ * the first part of the file name of the distro release file,
268
+
269
+ The so determined ID value then passes the following transformations,
270
+ before it is returned by this method:
271
+
272
+ * it is translated to lower case,
273
+
274
+ * blanks (which should not be there anyway) are translated to underscores,
275
+
276
+ * a normalization of the ID is performed, based upon
277
+ `normalization tables`_. The purpose of this normalization is to ensure
278
+ that the ID is as reliable as possible, even across incompatible changes
279
+ in the OS distributions. A common reason for an incompatible change is
280
+ the addition of an os-release file, or the addition of the lsb_release
281
+ command, with ID values that differ from what was previously determined
282
+ from the distro release file name.
283
+ """
284
+ return _distro.id()
285
+
286
+
287
+ def name(pretty: bool = False) -> str:
288
+ """
289
+ Return the name of the current OS distribution, as a human-readable
290
+ string.
291
+
292
+ If *pretty* is false, the name is returned without version or codename.
293
+ (e.g. "CentOS Linux")
294
+
295
+ If *pretty* is true, the version and codename are appended.
296
+ (e.g. "CentOS Linux 7.1.1503 (Core)")
297
+
298
+ **Lookup hierarchy:**
299
+
300
+ The name is obtained from the following sources, in the specified order.
301
+ The first available and non-empty value is used:
302
+
303
+ * If *pretty* is false:
304
+
305
+ - the value of the "NAME" attribute of the os-release file,
306
+
307
+ - the value of the "Distributor ID" attribute returned by the lsb_release
308
+ command,
309
+
310
+ - the value of the "<name>" field of the distro release file.
311
+
312
+ * If *pretty* is true:
313
+
314
+ - the value of the "PRETTY_NAME" attribute of the os-release file,
315
+
316
+ - the value of the "Description" attribute returned by the lsb_release
317
+ command,
318
+
319
+ - the value of the "<name>" field of the distro release file, appended
320
+ with the value of the pretty version ("<version_id>" and "<codename>"
321
+ fields) of the distro release file, if available.
322
+ """
323
+ return _distro.name(pretty)
324
+
325
+
326
+ def version(pretty: bool = False, best: bool = False) -> str:
327
+ """
328
+ Return the version of the current OS distribution, as a human-readable
329
+ string.
330
+
331
+ If *pretty* is false, the version is returned without codename (e.g.
332
+ "7.0").
333
+
334
+ If *pretty* is true, the codename in parenthesis is appended, if the
335
+ codename is non-empty (e.g. "7.0 (Maipo)").
336
+
337
+ Some distributions provide version numbers with different precisions in
338
+ the different sources of distribution information. Examining the different
339
+ sources in a fixed priority order does not always yield the most precise
340
+ version (e.g. for Debian 8.2, or CentOS 7.1).
341
+
342
+ Some other distributions may not provide this kind of information. In these
343
+ cases, an empty string would be returned. This behavior can be observed
344
+ with rolling releases distributions (e.g. Arch Linux).
345
+
346
+ The *best* parameter can be used to control the approach for the returned
347
+ version:
348
+
349
+ If *best* is false, the first non-empty version number in priority order of
350
+ the examined sources is returned.
351
+
352
+ If *best* is true, the most precise version number out of all examined
353
+ sources is returned.
354
+
355
+ **Lookup hierarchy:**
356
+
357
+ In all cases, the version number is obtained from the following sources.
358
+ If *best* is false, this order represents the priority order:
359
+
360
+ * the value of the "VERSION_ID" attribute of the os-release file,
361
+ * the value of the "Release" attribute returned by the lsb_release
362
+ command,
363
+ * the version number parsed from the "<version_id>" field of the first line
364
+ of the distro release file,
365
+ * the version number parsed from the "PRETTY_NAME" attribute of the
366
+ os-release file, if it follows the format of the distro release files.
367
+ * the version number parsed from the "Description" attribute returned by
368
+ the lsb_release command, if it follows the format of the distro release
369
+ files.
370
+ """
371
+ return _distro.version(pretty, best)
372
+
373
+
374
+ def version_parts(best: bool = False) -> Tuple[str, str, str]:
375
+ """
376
+ Return the version of the current OS distribution as a tuple
377
+ ``(major, minor, build_number)`` with items as follows:
378
+
379
+ * ``major``: The result of :func:`distro.major_version`.
380
+
381
+ * ``minor``: The result of :func:`distro.minor_version`.
382
+
383
+ * ``build_number``: The result of :func:`distro.build_number`.
384
+
385
+ For a description of the *best* parameter, see the :func:`distro.version`
386
+ method.
387
+ """
388
+ return _distro.version_parts(best)
389
+
390
+
391
+ def major_version(best: bool = False) -> str:
392
+ """
393
+ Return the major version of the current OS distribution, as a string,
394
+ if provided.
395
+ Otherwise, the empty string is returned. The major version is the first
396
+ part of the dot-separated version string.
397
+
398
+ For a description of the *best* parameter, see the :func:`distro.version`
399
+ method.
400
+ """
401
+ return _distro.major_version(best)
402
+
403
+
404
+ def minor_version(best: bool = False) -> str:
405
+ """
406
+ Return the minor version of the current OS distribution, as a string,
407
+ if provided.
408
+ Otherwise, the empty string is returned. The minor version is the second
409
+ part of the dot-separated version string.
410
+
411
+ For a description of the *best* parameter, see the :func:`distro.version`
412
+ method.
413
+ """
414
+ return _distro.minor_version(best)
415
+
416
+
417
+ def build_number(best: bool = False) -> str:
418
+ """
419
+ Return the build number of the current OS distribution, as a string,
420
+ if provided.
421
+ Otherwise, the empty string is returned. The build number is the third part
422
+ of the dot-separated version string.
423
+
424
+ For a description of the *best* parameter, see the :func:`distro.version`
425
+ method.
426
+ """
427
+ return _distro.build_number(best)
428
+
429
+
430
+ def like() -> str:
431
+ """
432
+ Return a space-separated list of distro IDs of distributions that are
433
+ closely related to the current OS distribution in regards to packaging
434
+ and programming interfaces, for example distributions the current
435
+ distribution is a derivative from.
436
+
437
+ **Lookup hierarchy:**
438
+
439
+ This information item is only provided by the os-release file.
440
+ For details, see the description of the "ID_LIKE" attribute in the
441
+ `os-release man page
442
+ <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
443
+ """
444
+ return _distro.like()
445
+
446
+
447
+ def codename() -> str:
448
+ """
449
+ Return the codename for the release of the current OS distribution,
450
+ as a string.
451
+
452
+ If the distribution does not have a codename, an empty string is returned.
453
+
454
+ Note that the returned codename is not always really a codename. For
455
+ example, openSUSE returns "x86_64". This function does not handle such
456
+ cases in any special way and just returns the string it finds, if any.
457
+
458
+ **Lookup hierarchy:**
459
+
460
+ * the codename within the "VERSION" attribute of the os-release file, if
461
+ provided,
462
+
463
+ * the value of the "Codename" attribute returned by the lsb_release
464
+ command,
465
+
466
+ * the value of the "<codename>" field of the distro release file.
467
+ """
468
+ return _distro.codename()
469
+
470
+
471
+ def info(pretty: bool = False, best: bool = False) -> InfoDict:
472
+ """
473
+ Return certain machine-readable information items about the current OS
474
+ distribution in a dictionary, as shown in the following example:
475
+
476
+ .. sourcecode:: python
477
+
478
+ {
479
+ 'id': 'rhel',
480
+ 'version': '7.0',
481
+ 'version_parts': {
482
+ 'major': '7',
483
+ 'minor': '0',
484
+ 'build_number': ''
485
+ },
486
+ 'like': 'fedora',
487
+ 'codename': 'Maipo'
488
+ }
489
+
490
+ The dictionary structure and keys are always the same, regardless of which
491
+ information items are available in the underlying data sources. The values
492
+ for the various keys are as follows:
493
+
494
+ * ``id``: The result of :func:`distro.id`.
495
+
496
+ * ``version``: The result of :func:`distro.version`.
497
+
498
+ * ``version_parts -> major``: The result of :func:`distro.major_version`.
499
+
500
+ * ``version_parts -> minor``: The result of :func:`distro.minor_version`.
501
+
502
+ * ``version_parts -> build_number``: The result of
503
+ :func:`distro.build_number`.
504
+
505
+ * ``like``: The result of :func:`distro.like`.
506
+
507
+ * ``codename``: The result of :func:`distro.codename`.
508
+
509
+ For a description of the *pretty* and *best* parameters, see the
510
+ :func:`distro.version` method.
511
+ """
512
+ return _distro.info(pretty, best)
513
+
514
+
515
+ def os_release_info() -> Dict[str, str]:
516
+ """
517
+ Return a dictionary containing key-value pairs for the information items
518
+ from the os-release file data source of the current OS distribution.
519
+
520
+ See `os-release file`_ for details about these information items.
521
+ """
522
+ return _distro.os_release_info()
523
+
524
+
525
+ def lsb_release_info() -> Dict[str, str]:
526
+ """
527
+ Return a dictionary containing key-value pairs for the information items
528
+ from the lsb_release command data source of the current OS distribution.
529
+
530
+ See `lsb_release command output`_ for details about these information
531
+ items.
532
+ """
533
+ return _distro.lsb_release_info()
534
+
535
+
536
+ def distro_release_info() -> Dict[str, str]:
537
+ """
538
+ Return a dictionary containing key-value pairs for the information items
539
+ from the distro release file data source of the current OS distribution.
540
+
541
+ See `distro release file`_ for details about these information items.
542
+ """
543
+ return _distro.distro_release_info()
544
+
545
+
546
+ def uname_info() -> Dict[str, str]:
547
+ """
548
+ Return a dictionary containing key-value pairs for the information items
549
+ from the distro release file data source of the current OS distribution.
550
+ """
551
+ return _distro.uname_info()
552
+
553
+
554
+ def os_release_attr(attribute: str) -> str:
555
+ """
556
+ Return a single named information item from the os-release file data source
557
+ of the current OS distribution.
558
+
559
+ Parameters:
560
+
561
+ * ``attribute`` (string): Key of the information item.
562
+
563
+ Returns:
564
+
565
+ * (string): Value of the information item, if the item exists.
566
+ The empty string, if the item does not exist.
567
+
568
+ See `os-release file`_ for details about these information items.
569
+ """
570
+ return _distro.os_release_attr(attribute)
571
+
572
+
573
+ def lsb_release_attr(attribute: str) -> str:
574
+ """
575
+ Return a single named information item from the lsb_release command output
576
+ data source of the current OS distribution.
577
+
578
+ Parameters:
579
+
580
+ * ``attribute`` (string): Key of the information item.
581
+
582
+ Returns:
583
+
584
+ * (string): Value of the information item, if the item exists.
585
+ The empty string, if the item does not exist.
586
+
587
+ See `lsb_release command output`_ for details about these information
588
+ items.
589
+ """
590
+ return _distro.lsb_release_attr(attribute)
591
+
592
+
593
+ def distro_release_attr(attribute: str) -> str:
594
+ """
595
+ Return a single named information item from the distro release file
596
+ data source of the current OS distribution.
597
+
598
+ Parameters:
599
+
600
+ * ``attribute`` (string): Key of the information item.
601
+
602
+ Returns:
603
+
604
+ * (string): Value of the information item, if the item exists.
605
+ The empty string, if the item does not exist.
606
+
607
+ See `distro release file`_ for details about these information items.
608
+ """
609
+ return _distro.distro_release_attr(attribute)
610
+
611
+
612
+ def uname_attr(attribute: str) -> str:
613
+ """
614
+ Return a single named information item from the distro release file
615
+ data source of the current OS distribution.
616
+
617
+ Parameters:
618
+
619
+ * ``attribute`` (string): Key of the information item.
620
+
621
+ Returns:
622
+
623
+ * (string): Value of the information item, if the item exists.
624
+ The empty string, if the item does not exist.
625
+ """
626
+ return _distro.uname_attr(attribute)
627
+
628
+
629
+ try:
630
+ from functools import cached_property
631
+ except ImportError:
632
+ # Python < 3.8
633
+ class cached_property: # type: ignore
634
+ """A version of @property which caches the value. On access, it calls the
635
+ underlying function and sets the value in `__dict__` so future accesses
636
+ will not re-call the property.
637
+ """
638
+
639
+ def __init__(self, f: Callable[[Any], Any]) -> None:
640
+ self._fname = f.__name__
641
+ self._f = f
642
+
643
+ def __get__(self, obj: Any, owner: Type[Any]) -> Any:
644
+ assert obj is not None, f"call {self._fname} on an instance"
645
+ ret = obj.__dict__[self._fname] = self._f(obj)
646
+ return ret
647
+
648
+
649
+ class LinuxDistribution:
650
+ """
651
+ Provides information about a OS distribution.
652
+
653
+ This package creates a private module-global instance of this class with
654
+ default initialization arguments, that is used by the
655
+ `consolidated accessor functions`_ and `single source accessor functions`_.
656
+ By using default initialization arguments, that module-global instance
657
+ returns data about the current OS distribution (i.e. the distro this
658
+ package runs on).
659
+
660
+ Normally, it is not necessary to create additional instances of this class.
661
+ However, in situations where control is needed over the exact data sources
662
+ that are used, instances of this class can be created with a specific
663
+ distro release file, or a specific os-release file, or without invoking the
664
+ lsb_release command.
665
+ """
666
+
667
+ def __init__(
668
+ self,
669
+ include_lsb: Optional[bool] = None,
670
+ os_release_file: str = "",
671
+ distro_release_file: str = "",
672
+ include_uname: Optional[bool] = None,
673
+ root_dir: Optional[str] = None,
674
+ include_oslevel: Optional[bool] = None,
675
+ ) -> None:
676
+ """
677
+ The initialization method of this class gathers information from the
678
+ available data sources, and stores that in private instance attributes.
679
+ Subsequent access to the information items uses these private instance
680
+ attributes, so that the data sources are read only once.
681
+
682
+ Parameters:
683
+
684
+ * ``include_lsb`` (bool): Controls whether the
685
+ `lsb_release command output`_ is included as a data source.
686
+
687
+ If the lsb_release command is not available in the program execution
688
+ path, the data source for the lsb_release command will be empty.
689
+
690
+ * ``os_release_file`` (string): The path name of the
691
+ `os-release file`_ that is to be used as a data source.
692
+
693
+ An empty string (the default) will cause the default path name to
694
+ be used (see `os-release file`_ for details).
695
+
696
+ If the specified or defaulted os-release file does not exist, the
697
+ data source for the os-release file will be empty.
698
+
699
+ * ``distro_release_file`` (string): The path name of the
700
+ `distro release file`_ that is to be used as a data source.
701
+
702
+ An empty string (the default) will cause a default search algorithm
703
+ to be used (see `distro release file`_ for details).
704
+
705
+ If the specified distro release file does not exist, or if no default
706
+ distro release file can be found, the data source for the distro
707
+ release file will be empty.
708
+
709
+ * ``include_uname`` (bool): Controls whether uname command output is
710
+ included as a data source. If the uname command is not available in
711
+ the program execution path the data source for the uname command will
712
+ be empty.
713
+
714
+ * ``root_dir`` (string): The absolute path to the root directory to use
715
+ to find distro-related information files. Note that ``include_*``
716
+ parameters must not be enabled in combination with ``root_dir``.
717
+
718
+ * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command
719
+ output is included as a data source. If the oslevel command is not
720
+ available in the program execution path the data source will be
721
+ empty.
722
+
723
+ Public instance attributes:
724
+
725
+ * ``os_release_file`` (string): The path name of the
726
+ `os-release file`_ that is actually used as a data source. The
727
+ empty string if no distro release file is used as a data source.
728
+
729
+ * ``distro_release_file`` (string): The path name of the
730
+ `distro release file`_ that is actually used as a data source. The
731
+ empty string if no distro release file is used as a data source.
732
+
733
+ * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
734
+ This controls whether the lsb information will be loaded.
735
+
736
+ * ``include_uname`` (bool): The result of the ``include_uname``
737
+ parameter. This controls whether the uname information will
738
+ be loaded.
739
+
740
+ * ``include_oslevel`` (bool): The result of the ``include_oslevel``
741
+ parameter. This controls whether (AIX) oslevel information will be
742
+ loaded.
743
+
744
+ * ``root_dir`` (string): The result of the ``root_dir`` parameter.
745
+ The absolute path to the root directory to use to find distro-related
746
+ information files.
747
+
748
+ Raises:
749
+
750
+ * :py:exc:`ValueError`: Initialization parameters combination is not
751
+ supported.
752
+
753
+ * :py:exc:`OSError`: Some I/O issue with an os-release file or distro
754
+ release file.
755
+
756
+ * :py:exc:`UnicodeError`: A data source has unexpected characters or
757
+ uses an unexpected encoding.
758
+ """
759
+ self.root_dir = root_dir
760
+ self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
761
+ self.usr_lib_dir = (
762
+ os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
763
+ )
764
+
765
+ if os_release_file:
766
+ self.os_release_file = os_release_file
767
+ else:
768
+ etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
769
+ usr_lib_os_release_file = os.path.join(
770
+ self.usr_lib_dir, _OS_RELEASE_BASENAME
771
+ )
772
+
773
+ # NOTE: The idea is to respect order **and** have it set
774
+ # at all times for API backwards compatibility.
775
+ if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
776
+ usr_lib_os_release_file
777
+ ):
778
+ self.os_release_file = etc_dir_os_release_file
779
+ else:
780
+ self.os_release_file = usr_lib_os_release_file
781
+
782
+ self.distro_release_file = distro_release_file or "" # updated later
783
+
784
+ is_root_dir_defined = root_dir is not None
785
+ if is_root_dir_defined and (include_lsb or include_uname or include_oslevel):
786
+ raise ValueError(
787
+ "Including subprocess data sources from specific root_dir is disallowed"
788
+ " to prevent false information"
789
+ )
790
+ self.include_lsb = (
791
+ include_lsb if include_lsb is not None else not is_root_dir_defined
792
+ )
793
+ self.include_uname = (
794
+ include_uname if include_uname is not None else not is_root_dir_defined
795
+ )
796
+ self.include_oslevel = (
797
+ include_oslevel if include_oslevel is not None else not is_root_dir_defined
798
+ )
799
+
800
+ def __repr__(self) -> str:
801
+ """Return repr of all info"""
802
+ return (
803
+ "LinuxDistribution("
804
+ "os_release_file={self.os_release_file!r}, "
805
+ "distro_release_file={self.distro_release_file!r}, "
806
+ "include_lsb={self.include_lsb!r}, "
807
+ "include_uname={self.include_uname!r}, "
808
+ "include_oslevel={self.include_oslevel!r}, "
809
+ "root_dir={self.root_dir!r}, "
810
+ "_os_release_info={self._os_release_info!r}, "
811
+ "_lsb_release_info={self._lsb_release_info!r}, "
812
+ "_distro_release_info={self._distro_release_info!r}, "
813
+ "_uname_info={self._uname_info!r}, "
814
+ "_oslevel_info={self._oslevel_info!r})".format(self=self)
815
+ )
816
+
817
+ def linux_distribution(
818
+ self, full_distribution_name: bool = True
819
+ ) -> Tuple[str, str, str]:
820
+ """
821
+ Return information about the OS distribution that is compatible
822
+ with Python's :func:`platform.linux_distribution`, supporting a subset
823
+ of its parameters.
824
+
825
+ For details, see :func:`distro.linux_distribution`.
826
+ """
827
+ return (
828
+ self.name() if full_distribution_name else self.id(),
829
+ self.version(),
830
+ self._os_release_info.get("release_codename") or self.codename(),
831
+ )
832
+
833
+ def id(self) -> str:
834
+ """Return the distro ID of the OS distribution, as a string.
835
+
836
+ For details, see :func:`distro.id`.
837
+ """
838
+
839
+ def normalize(distro_id: str, table: Dict[str, str]) -> str:
840
+ distro_id = distro_id.lower().replace(" ", "_")
841
+ return table.get(distro_id, distro_id)
842
+
843
+ distro_id = self.os_release_attr("id")
844
+ if distro_id:
845
+ return normalize(distro_id, NORMALIZED_OS_ID)
846
+
847
+ distro_id = self.lsb_release_attr("distributor_id")
848
+ if distro_id:
849
+ return normalize(distro_id, NORMALIZED_LSB_ID)
850
+
851
+ distro_id = self.distro_release_attr("id")
852
+ if distro_id:
853
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
854
+
855
+ distro_id = self.uname_attr("id")
856
+ if distro_id:
857
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
858
+
859
+ return ""
860
+
861
+ def name(self, pretty: bool = False) -> str:
862
+ """
863
+ Return the name of the OS distribution, as a string.
864
+
865
+ For details, see :func:`distro.name`.
866
+ """
867
+ name = (
868
+ self.os_release_attr("name")
869
+ or self.lsb_release_attr("distributor_id")
870
+ or self.distro_release_attr("name")
871
+ or self.uname_attr("name")
872
+ )
873
+ if pretty:
874
+ name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
875
+ "description"
876
+ )
877
+ if not name:
878
+ name = self.distro_release_attr("name") or self.uname_attr("name")
879
+ version = self.version(pretty=True)
880
+ if version:
881
+ name = f"{name} {version}"
882
+ return name or ""
883
+
884
+ def version(self, pretty: bool = False, best: bool = False) -> str:
885
+ """
886
+ Return the version of the OS distribution, as a string.
887
+
888
+ For details, see :func:`distro.version`.
889
+ """
890
+ versions = [
891
+ self.os_release_attr("version_id"),
892
+ self.lsb_release_attr("release"),
893
+ self.distro_release_attr("version_id"),
894
+ self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
895
+ "version_id", ""
896
+ ),
897
+ self._parse_distro_release_content(
898
+ self.lsb_release_attr("description")
899
+ ).get("version_id", ""),
900
+ self.uname_attr("release"),
901
+ ]
902
+ if self.uname_attr("id").startswith("aix"):
903
+ # On AIX platforms, prefer oslevel command output.
904
+ versions.insert(0, self.oslevel_info())
905
+ elif self.id() == "debian" or "debian" in self.like().split():
906
+ # On Debian-like, add debian_version file content to candidates list.
907
+ versions.append(self._debian_version)
908
+ version = ""
909
+ if best:
910
+ # This algorithm uses the last version in priority order that has
911
+ # the best precision. If the versions are not in conflict, that
912
+ # does not matter; otherwise, using the last one instead of the
913
+ # first one might be considered a surprise.
914
+ for v in versions:
915
+ if v.count(".") > version.count(".") or version == "":
916
+ version = v
917
+ else:
918
+ for v in versions:
919
+ if v != "":
920
+ version = v
921
+ break
922
+ if pretty and version and self.codename():
923
+ version = f"{version} ({self.codename()})"
924
+ return version
925
+
926
+ def version_parts(self, best: bool = False) -> Tuple[str, str, str]:
927
+ """
928
+ Return the version of the OS distribution, as a tuple of version
929
+ numbers.
930
+
931
+ For details, see :func:`distro.version_parts`.
932
+ """
933
+ version_str = self.version(best=best)
934
+ if version_str:
935
+ version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
936
+ matches = version_regex.match(version_str)
937
+ if matches:
938
+ major, minor, build_number = matches.groups()
939
+ return major, minor or "", build_number or ""
940
+ return "", "", ""
941
+
942
+ def major_version(self, best: bool = False) -> str:
943
+ """
944
+ Return the major version number of the current distribution.
945
+
946
+ For details, see :func:`distro.major_version`.
947
+ """
948
+ return self.version_parts(best)[0]
949
+
950
+ def minor_version(self, best: bool = False) -> str:
951
+ """
952
+ Return the minor version number of the current distribution.
953
+
954
+ For details, see :func:`distro.minor_version`.
955
+ """
956
+ return self.version_parts(best)[1]
957
+
958
+ def build_number(self, best: bool = False) -> str:
959
+ """
960
+ Return the build number of the current distribution.
961
+
962
+ For details, see :func:`distro.build_number`.
963
+ """
964
+ return self.version_parts(best)[2]
965
+
966
+ def like(self) -> str:
967
+ """
968
+ Return the IDs of distributions that are like the OS distribution.
969
+
970
+ For details, see :func:`distro.like`.
971
+ """
972
+ return self.os_release_attr("id_like") or ""
973
+
974
+ def codename(self) -> str:
975
+ """
976
+ Return the codename of the OS distribution.
977
+
978
+ For details, see :func:`distro.codename`.
979
+ """
980
+ try:
981
+ # Handle os_release specially since distros might purposefully set
982
+ # this to empty string to have no codename
983
+ return self._os_release_info["codename"]
984
+ except KeyError:
985
+ return (
986
+ self.lsb_release_attr("codename")
987
+ or self.distro_release_attr("codename")
988
+ or ""
989
+ )
990
+
991
+ def info(self, pretty: bool = False, best: bool = False) -> InfoDict:
992
+ """
993
+ Return certain machine-readable information about the OS
994
+ distribution.
995
+
996
+ For details, see :func:`distro.info`.
997
+ """
998
+ return InfoDict(
999
+ id=self.id(),
1000
+ version=self.version(pretty, best),
1001
+ version_parts=VersionDict(
1002
+ major=self.major_version(best),
1003
+ minor=self.minor_version(best),
1004
+ build_number=self.build_number(best),
1005
+ ),
1006
+ like=self.like(),
1007
+ codename=self.codename(),
1008
+ )
1009
+
1010
+ def os_release_info(self) -> Dict[str, str]:
1011
+ """
1012
+ Return a dictionary containing key-value pairs for the information
1013
+ items from the os-release file data source of the OS distribution.
1014
+
1015
+ For details, see :func:`distro.os_release_info`.
1016
+ """
1017
+ return self._os_release_info
1018
+
1019
+ def lsb_release_info(self) -> Dict[str, str]:
1020
+ """
1021
+ Return a dictionary containing key-value pairs for the information
1022
+ items from the lsb_release command data source of the OS
1023
+ distribution.
1024
+
1025
+ For details, see :func:`distro.lsb_release_info`.
1026
+ """
1027
+ return self._lsb_release_info
1028
+
1029
+ def distro_release_info(self) -> Dict[str, str]:
1030
+ """
1031
+ Return a dictionary containing key-value pairs for the information
1032
+ items from the distro release file data source of the OS
1033
+ distribution.
1034
+
1035
+ For details, see :func:`distro.distro_release_info`.
1036
+ """
1037
+ return self._distro_release_info
1038
+
1039
+ def uname_info(self) -> Dict[str, str]:
1040
+ """
1041
+ Return a dictionary containing key-value pairs for the information
1042
+ items from the uname command data source of the OS distribution.
1043
+
1044
+ For details, see :func:`distro.uname_info`.
1045
+ """
1046
+ return self._uname_info
1047
+
1048
+ def oslevel_info(self) -> str:
1049
+ """
1050
+ Return AIX' oslevel command output.
1051
+ """
1052
+ return self._oslevel_info
1053
+
1054
+ def os_release_attr(self, attribute: str) -> str:
1055
+ """
1056
+ Return a single named information item from the os-release file data
1057
+ source of the OS distribution.
1058
+
1059
+ For details, see :func:`distro.os_release_attr`.
1060
+ """
1061
+ return self._os_release_info.get(attribute, "")
1062
+
1063
+ def lsb_release_attr(self, attribute: str) -> str:
1064
+ """
1065
+ Return a single named information item from the lsb_release command
1066
+ output data source of the OS distribution.
1067
+
1068
+ For details, see :func:`distro.lsb_release_attr`.
1069
+ """
1070
+ return self._lsb_release_info.get(attribute, "")
1071
+
1072
+ def distro_release_attr(self, attribute: str) -> str:
1073
+ """
1074
+ Return a single named information item from the distro release file
1075
+ data source of the OS distribution.
1076
+
1077
+ For details, see :func:`distro.distro_release_attr`.
1078
+ """
1079
+ return self._distro_release_info.get(attribute, "")
1080
+
1081
+ def uname_attr(self, attribute: str) -> str:
1082
+ """
1083
+ Return a single named information item from the uname command
1084
+ output data source of the OS distribution.
1085
+
1086
+ For details, see :func:`distro.uname_attr`.
1087
+ """
1088
+ return self._uname_info.get(attribute, "")
1089
+
1090
+ @cached_property
1091
+ def _os_release_info(self) -> Dict[str, str]:
1092
+ """
1093
+ Get the information items from the specified os-release file.
1094
+
1095
+ Returns:
1096
+ A dictionary containing all information items.
1097
+ """
1098
+ if os.path.isfile(self.os_release_file):
1099
+ with open(self.os_release_file, encoding="utf-8") as release_file:
1100
+ return self._parse_os_release_content(release_file)
1101
+ return {}
1102
+
1103
+ @staticmethod
1104
+ def _parse_os_release_content(lines: TextIO) -> Dict[str, str]:
1105
+ """
1106
+ Parse the lines of an os-release file.
1107
+
1108
+ Parameters:
1109
+
1110
+ * lines: Iterable through the lines in the os-release file.
1111
+ Each line must be a unicode string or a UTF-8 encoded byte
1112
+ string.
1113
+
1114
+ Returns:
1115
+ A dictionary containing all information items.
1116
+ """
1117
+ props = {}
1118
+ lexer = shlex.shlex(lines, posix=True)
1119
+ lexer.whitespace_split = True
1120
+
1121
+ tokens = list(lexer)
1122
+ for token in tokens:
1123
+ # At this point, all shell-like parsing has been done (i.e.
1124
+ # comments processed, quotes and backslash escape sequences
1125
+ # processed, multi-line values assembled, trailing newlines
1126
+ # stripped, etc.), so the tokens are now either:
1127
+ # * variable assignments: var=value
1128
+ # * commands or their arguments (not allowed in os-release)
1129
+ # Ignore any tokens that are not variable assignments
1130
+ if "=" in token:
1131
+ k, v = token.split("=", 1)
1132
+ props[k.lower()] = v
1133
+
1134
+ if "version" in props:
1135
+ # extract release codename (if any) from version attribute
1136
+ match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
1137
+ if match:
1138
+ release_codename = match.group(1) or match.group(2)
1139
+ props["codename"] = props["release_codename"] = release_codename
1140
+
1141
+ if "version_codename" in props:
1142
+ # os-release added a version_codename field. Use that in
1143
+ # preference to anything else Note that some distros purposefully
1144
+ # do not have code names. They should be setting
1145
+ # version_codename=""
1146
+ props["codename"] = props["version_codename"]
1147
+ elif "ubuntu_codename" in props:
1148
+ # Same as above but a non-standard field name used on older Ubuntus
1149
+ props["codename"] = props["ubuntu_codename"]
1150
+
1151
+ return props
1152
+
1153
+ @cached_property
1154
+ def _lsb_release_info(self) -> Dict[str, str]:
1155
+ """
1156
+ Get the information items from the lsb_release command output.
1157
+
1158
+ Returns:
1159
+ A dictionary containing all information items.
1160
+ """
1161
+ if not self.include_lsb:
1162
+ return {}
1163
+ try:
1164
+ cmd = ("lsb_release", "-a")
1165
+ stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
1166
+ # Command not found or lsb_release returned error
1167
+ except (OSError, subprocess.CalledProcessError):
1168
+ return {}
1169
+ content = self._to_str(stdout).splitlines()
1170
+ return self._parse_lsb_release_content(content)
1171
+
1172
+ @staticmethod
1173
+ def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]:
1174
+ """
1175
+ Parse the output of the lsb_release command.
1176
+
1177
+ Parameters:
1178
+
1179
+ * lines: Iterable through the lines of the lsb_release output.
1180
+ Each line must be a unicode string or a UTF-8 encoded byte
1181
+ string.
1182
+
1183
+ Returns:
1184
+ A dictionary containing all information items.
1185
+ """
1186
+ props = {}
1187
+ for line in lines:
1188
+ kv = line.strip("\n").split(":", 1)
1189
+ if len(kv) != 2:
1190
+ # Ignore lines without colon.
1191
+ continue
1192
+ k, v = kv
1193
+ props.update({k.replace(" ", "_").lower(): v.strip()})
1194
+ return props
1195
+
1196
+ @cached_property
1197
+ def _uname_info(self) -> Dict[str, str]:
1198
+ if not self.include_uname:
1199
+ return {}
1200
+ try:
1201
+ cmd = ("uname", "-rs")
1202
+ stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
1203
+ except OSError:
1204
+ return {}
1205
+ content = self._to_str(stdout).splitlines()
1206
+ return self._parse_uname_content(content)
1207
+
1208
+ @cached_property
1209
+ def _oslevel_info(self) -> str:
1210
+ if not self.include_oslevel:
1211
+ return ""
1212
+ try:
1213
+ stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
1214
+ except (OSError, subprocess.CalledProcessError):
1215
+ return ""
1216
+ return self._to_str(stdout).strip()
1217
+
1218
+ @cached_property
1219
+ def _debian_version(self) -> str:
1220
+ try:
1221
+ with open(
1222
+ os.path.join(self.etc_dir, "debian_version"), encoding="ascii"
1223
+ ) as fp:
1224
+ return fp.readline().rstrip()
1225
+ except FileNotFoundError:
1226
+ return ""
1227
+
1228
+ @staticmethod
1229
+ def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]:
1230
+ if not lines:
1231
+ return {}
1232
+ props = {}
1233
+ match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
1234
+ if match:
1235
+ name, version = match.groups()
1236
+
1237
+ # This is to prevent the Linux kernel version from
1238
+ # appearing as the 'best' version on otherwise
1239
+ # identifiable distributions.
1240
+ if name == "Linux":
1241
+ return {}
1242
+ props["id"] = name.lower()
1243
+ props["name"] = name
1244
+ props["release"] = version
1245
+ return props
1246
+
1247
+ @staticmethod
1248
+ def _to_str(bytestring: bytes) -> str:
1249
+ encoding = sys.getfilesystemencoding()
1250
+ return bytestring.decode(encoding)
1251
+
1252
+ @cached_property
1253
+ def _distro_release_info(self) -> Dict[str, str]:
1254
+ """
1255
+ Get the information items from the specified distro release file.
1256
+
1257
+ Returns:
1258
+ A dictionary containing all information items.
1259
+ """
1260
+ if self.distro_release_file:
1261
+ # If it was specified, we use it and parse what we can, even if
1262
+ # its file name or content does not match the expected pattern.
1263
+ distro_info = self._parse_distro_release_file(self.distro_release_file)
1264
+ basename = os.path.basename(self.distro_release_file)
1265
+ # The file name pattern for user-specified distro release files
1266
+ # is somewhat more tolerant (compared to when searching for the
1267
+ # file), because we want to use what was specified as best as
1268
+ # possible.
1269
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
1270
+ else:
1271
+ try:
1272
+ basenames = [
1273
+ basename
1274
+ for basename in os.listdir(self.etc_dir)
1275
+ if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
1276
+ and os.path.isfile(os.path.join(self.etc_dir, basename))
1277
+ ]
1278
+ # We sort for repeatability in cases where there are multiple
1279
+ # distro specific files; e.g. CentOS, Oracle, Enterprise all
1280
+ # containing `redhat-release` on top of their own.
1281
+ basenames.sort()
1282
+ except OSError:
1283
+ # This may occur when /etc is not readable but we can't be
1284
+ # sure about the *-release files. Check common entries of
1285
+ # /etc for information. If they turn out to not be there the
1286
+ # error is handled in `_parse_distro_release_file()`.
1287
+ basenames = _DISTRO_RELEASE_BASENAMES
1288
+ for basename in basenames:
1289
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
1290
+ if match is None:
1291
+ continue
1292
+ filepath = os.path.join(self.etc_dir, basename)
1293
+ distro_info = self._parse_distro_release_file(filepath)
1294
+ # The name is always present if the pattern matches.
1295
+ if "name" not in distro_info:
1296
+ continue
1297
+ self.distro_release_file = filepath
1298
+ break
1299
+ else: # the loop didn't "break": no candidate.
1300
+ return {}
1301
+
1302
+ if match is not None:
1303
+ distro_info["id"] = match.group(1)
1304
+
1305
+ # CloudLinux < 7: manually enrich info with proper id.
1306
+ if "cloudlinux" in distro_info.get("name", "").lower():
1307
+ distro_info["id"] = "cloudlinux"
1308
+
1309
+ return distro_info
1310
+
1311
+ def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]:
1312
+ """
1313
+ Parse a distro release file.
1314
+
1315
+ Parameters:
1316
+
1317
+ * filepath: Path name of the distro release file.
1318
+
1319
+ Returns:
1320
+ A dictionary containing all information items.
1321
+ """
1322
+ try:
1323
+ with open(filepath, encoding="utf-8") as fp:
1324
+ # Only parse the first line. For instance, on SLES there
1325
+ # are multiple lines. We don't want them...
1326
+ return self._parse_distro_release_content(fp.readline())
1327
+ except OSError:
1328
+ # Ignore not being able to read a specific, seemingly version
1329
+ # related file.
1330
+ # See https://github.com/python-distro/distro/issues/162
1331
+ return {}
1332
+
1333
+ @staticmethod
1334
+ def _parse_distro_release_content(line: str) -> Dict[str, str]:
1335
+ """
1336
+ Parse a line from a distro release file.
1337
+
1338
+ Parameters:
1339
+ * line: Line from the distro release file. Must be a unicode string
1340
+ or a UTF-8 encoded byte string.
1341
+
1342
+ Returns:
1343
+ A dictionary containing all information items.
1344
+ """
1345
+ matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
1346
+ distro_info = {}
1347
+ if matches:
1348
+ # regexp ensures non-None
1349
+ distro_info["name"] = matches.group(3)[::-1]
1350
+ if matches.group(2):
1351
+ distro_info["version_id"] = matches.group(2)[::-1]
1352
+ if matches.group(1):
1353
+ distro_info["codename"] = matches.group(1)[::-1]
1354
+ elif line:
1355
+ distro_info["name"] = line.strip()
1356
+ return distro_info
1357
+
1358
+
1359
+ _distro = LinuxDistribution()
1360
+
1361
+
1362
+ def main() -> None:
1363
+ logger = logging.getLogger(__name__)
1364
+ logger.setLevel(logging.DEBUG)
1365
+ logger.addHandler(logging.StreamHandler(sys.stdout))
1366
+
1367
+ parser = argparse.ArgumentParser(description="OS distro info tool")
1368
+ parser.add_argument(
1369
+ "--json", "-j", help="Output in machine readable format", action="store_true"
1370
+ )
1371
+
1372
+ parser.add_argument(
1373
+ "--root-dir",
1374
+ "-r",
1375
+ type=str,
1376
+ dest="root_dir",
1377
+ help="Path to the root filesystem directory (defaults to /)",
1378
+ )
1379
+
1380
+ args = parser.parse_args()
1381
+
1382
+ if args.root_dir:
1383
+ dist = LinuxDistribution(
1384
+ include_lsb=False,
1385
+ include_uname=False,
1386
+ include_oslevel=False,
1387
+ root_dir=args.root_dir,
1388
+ )
1389
+ else:
1390
+ dist = _distro
1391
+
1392
+ if args.json:
1393
+ logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
1394
+ else:
1395
+ logger.info("Name: %s", dist.name(pretty=True))
1396
+ distribution_version = dist.version(pretty=True)
1397
+ logger.info("Version: %s", distribution_version)
1398
+ distribution_codename = dist.codename()
1399
+ logger.info("Codename: %s", distribution_codename)
1400
+
1401
+
1402
+ if __name__ == "__main__":
1403
+ main()
deepseek/lib/python3.10/site-packages/distro/py.typed ADDED
File without changes
deepseek/lib/python3.10/site-packages/importlib_metadata/__init__.py ADDED
@@ -0,0 +1,1132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ APIs exposing metadata from third-party Python packages.
3
+
4
+ This codebase is shared between importlib.metadata in the stdlib
5
+ and importlib_metadata in PyPI. See
6
+ https://github.com/python/importlib_metadata/wiki/Development-Methodology
7
+ for more detail.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import abc
13
+ import collections
14
+ import email
15
+ import functools
16
+ import itertools
17
+ import operator
18
+ import os
19
+ import pathlib
20
+ import posixpath
21
+ import re
22
+ import sys
23
+ import textwrap
24
+ import types
25
+ from contextlib import suppress
26
+ from importlib import import_module
27
+ from importlib.abc import MetaPathFinder
28
+ from itertools import starmap
29
+ from typing import Any, Iterable, List, Mapping, Match, Optional, Set, cast
30
+
31
+ from . import _meta
32
+ from ._collections import FreezableDefaultDict, Pair
33
+ from ._compat import (
34
+ NullFinder,
35
+ install,
36
+ )
37
+ from ._functools import method_cache, pass_none
38
+ from ._itertools import always_iterable, bucket, unique_everseen
39
+ from ._meta import PackageMetadata, SimplePath
40
+ from .compat import py39, py311
41
+
42
+ __all__ = [
43
+ 'Distribution',
44
+ 'DistributionFinder',
45
+ 'PackageMetadata',
46
+ 'PackageNotFoundError',
47
+ 'SimplePath',
48
+ 'distribution',
49
+ 'distributions',
50
+ 'entry_points',
51
+ 'files',
52
+ 'metadata',
53
+ 'packages_distributions',
54
+ 'requires',
55
+ 'version',
56
+ ]
57
+
58
+
59
+ class PackageNotFoundError(ModuleNotFoundError):
60
+ """The package was not found."""
61
+
62
+ def __str__(self) -> str:
63
+ return f"No package metadata was found for {self.name}"
64
+
65
+ @property
66
+ def name(self) -> str: # type: ignore[override] # make readonly
67
+ (name,) = self.args
68
+ return name
69
+
70
+
71
+ class Sectioned:
72
+ """
73
+ A simple entry point config parser for performance
74
+
75
+ >>> for item in Sectioned.read(Sectioned._sample):
76
+ ... print(item)
77
+ Pair(name='sec1', value='# comments ignored')
78
+ Pair(name='sec1', value='a = 1')
79
+ Pair(name='sec1', value='b = 2')
80
+ Pair(name='sec2', value='a = 2')
81
+
82
+ >>> res = Sectioned.section_pairs(Sectioned._sample)
83
+ >>> item = next(res)
84
+ >>> item.name
85
+ 'sec1'
86
+ >>> item.value
87
+ Pair(name='a', value='1')
88
+ >>> item = next(res)
89
+ >>> item.value
90
+ Pair(name='b', value='2')
91
+ >>> item = next(res)
92
+ >>> item.name
93
+ 'sec2'
94
+ >>> item.value
95
+ Pair(name='a', value='2')
96
+ >>> list(res)
97
+ []
98
+ """
99
+
100
+ _sample = textwrap.dedent(
101
+ """
102
+ [sec1]
103
+ # comments ignored
104
+ a = 1
105
+ b = 2
106
+
107
+ [sec2]
108
+ a = 2
109
+ """
110
+ ).lstrip()
111
+
112
+ @classmethod
113
+ def section_pairs(cls, text):
114
+ return (
115
+ section._replace(value=Pair.parse(section.value))
116
+ for section in cls.read(text, filter_=cls.valid)
117
+ if section.name is not None
118
+ )
119
+
120
+ @staticmethod
121
+ def read(text, filter_=None):
122
+ lines = filter(filter_, map(str.strip, text.splitlines()))
123
+ name = None
124
+ for value in lines:
125
+ section_match = value.startswith('[') and value.endswith(']')
126
+ if section_match:
127
+ name = value.strip('[]')
128
+ continue
129
+ yield Pair(name, value)
130
+
131
+ @staticmethod
132
+ def valid(line: str):
133
+ return line and not line.startswith('#')
134
+
135
+
136
+ class EntryPoint:
137
+ """An entry point as defined by Python packaging conventions.
138
+
139
+ See `the packaging docs on entry points
140
+ <https://packaging.python.org/specifications/entry-points/>`_
141
+ for more information.
142
+
143
+ >>> ep = EntryPoint(
144
+ ... name=None, group=None, value='package.module:attr [extra1, extra2]')
145
+ >>> ep.module
146
+ 'package.module'
147
+ >>> ep.attr
148
+ 'attr'
149
+ >>> ep.extras
150
+ ['extra1', 'extra2']
151
+ """
152
+
153
+ pattern = re.compile(
154
+ r'(?P<module>[\w.]+)\s*'
155
+ r'(:\s*(?P<attr>[\w.]+)\s*)?'
156
+ r'((?P<extras>\[.*\])\s*)?$'
157
+ )
158
+ """
159
+ A regular expression describing the syntax for an entry point,
160
+ which might look like:
161
+
162
+ - module
163
+ - package.module
164
+ - package.module:attribute
165
+ - package.module:object.attribute
166
+ - package.module:attr [extra1, extra2]
167
+
168
+ Other combinations are possible as well.
169
+
170
+ The expression is lenient about whitespace around the ':',
171
+ following the attr, and following any extras.
172
+ """
173
+
174
+ name: str
175
+ value: str
176
+ group: str
177
+
178
+ dist: Optional[Distribution] = None
179
+
180
+ def __init__(self, name: str, value: str, group: str) -> None:
181
+ vars(self).update(name=name, value=value, group=group)
182
+
183
+ def load(self) -> Any:
184
+ """Load the entry point from its definition. If only a module
185
+ is indicated by the value, return that module. Otherwise,
186
+ return the named object.
187
+ """
188
+ match = cast(Match, self.pattern.match(self.value))
189
+ module = import_module(match.group('module'))
190
+ attrs = filter(None, (match.group('attr') or '').split('.'))
191
+ return functools.reduce(getattr, attrs, module)
192
+
193
+ @property
194
+ def module(self) -> str:
195
+ match = self.pattern.match(self.value)
196
+ assert match is not None
197
+ return match.group('module')
198
+
199
+ @property
200
+ def attr(self) -> str:
201
+ match = self.pattern.match(self.value)
202
+ assert match is not None
203
+ return match.group('attr')
204
+
205
+ @property
206
+ def extras(self) -> List[str]:
207
+ match = self.pattern.match(self.value)
208
+ assert match is not None
209
+ return re.findall(r'\w+', match.group('extras') or '')
210
+
211
+ def _for(self, dist):
212
+ vars(self).update(dist=dist)
213
+ return self
214
+
215
+ def matches(self, **params):
216
+ """
217
+ EntryPoint matches the given parameters.
218
+
219
+ >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]')
220
+ >>> ep.matches(group='foo')
221
+ True
222
+ >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]')
223
+ True
224
+ >>> ep.matches(group='foo', name='other')
225
+ False
226
+ >>> ep.matches()
227
+ True
228
+ >>> ep.matches(extras=['extra1', 'extra2'])
229
+ True
230
+ >>> ep.matches(module='bing')
231
+ True
232
+ >>> ep.matches(attr='bong')
233
+ True
234
+ """
235
+ self._disallow_dist(params)
236
+ attrs = (getattr(self, param) for param in params)
237
+ return all(map(operator.eq, params.values(), attrs))
238
+
239
+ @staticmethod
240
+ def _disallow_dist(params):
241
+ """
242
+ Querying by dist is not allowed (dist objects are not comparable).
243
+ >>> EntryPoint(name='fan', value='fav', group='fag').matches(dist='foo')
244
+ Traceback (most recent call last):
245
+ ...
246
+ ValueError: "dist" is not suitable for matching...
247
+ """
248
+ if "dist" in params:
249
+ raise ValueError(
250
+ '"dist" is not suitable for matching. '
251
+ "Instead, use Distribution.entry_points.select() on a "
252
+ "located distribution."
253
+ )
254
+
255
+ def _key(self):
256
+ return self.name, self.value, self.group
257
+
258
+ def __lt__(self, other):
259
+ return self._key() < other._key()
260
+
261
+ def __eq__(self, other):
262
+ return self._key() == other._key()
263
+
264
+ def __setattr__(self, name, value):
265
+ raise AttributeError("EntryPoint objects are immutable.")
266
+
267
+ def __repr__(self):
268
+ return (
269
+ f'EntryPoint(name={self.name!r}, value={self.value!r}, '
270
+ f'group={self.group!r})'
271
+ )
272
+
273
+ def __hash__(self) -> int:
274
+ return hash(self._key())
275
+
276
+
277
+ class EntryPoints(tuple):
278
+ """
279
+ An immutable collection of selectable EntryPoint objects.
280
+ """
281
+
282
+ __slots__ = ()
283
+
284
+ def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] # Work with str instead of int
285
+ """
286
+ Get the EntryPoint in self matching name.
287
+ """
288
+ try:
289
+ return next(iter(self.select(name=name)))
290
+ except StopIteration:
291
+ raise KeyError(name)
292
+
293
+ def __repr__(self):
294
+ """
295
+ Repr with classname and tuple constructor to
296
+ signal that we deviate from regular tuple behavior.
297
+ """
298
+ return '%s(%r)' % (self.__class__.__name__, tuple(self))
299
+
300
+ def select(self, **params) -> EntryPoints:
301
+ """
302
+ Select entry points from self that match the
303
+ given parameters (typically group and/or name).
304
+ """
305
+ return EntryPoints(ep for ep in self if py39.ep_matches(ep, **params))
306
+
307
+ @property
308
+ def names(self) -> Set[str]:
309
+ """
310
+ Return the set of all names of all entry points.
311
+ """
312
+ return {ep.name for ep in self}
313
+
314
+ @property
315
+ def groups(self) -> Set[str]:
316
+ """
317
+ Return the set of all groups of all entry points.
318
+ """
319
+ return {ep.group for ep in self}
320
+
321
+ @classmethod
322
+ def _from_text_for(cls, text, dist):
323
+ return cls(ep._for(dist) for ep in cls._from_text(text))
324
+
325
+ @staticmethod
326
+ def _from_text(text):
327
+ return (
328
+ EntryPoint(name=item.value.name, value=item.value.value, group=item.name)
329
+ for item in Sectioned.section_pairs(text or '')
330
+ )
331
+
332
+
333
+ class PackagePath(pathlib.PurePosixPath):
334
+ """A reference to a path in a package"""
335
+
336
+ hash: Optional[FileHash]
337
+ size: int
338
+ dist: Distribution
339
+
340
+ def read_text(self, encoding: str = 'utf-8') -> str:
341
+ return self.locate().read_text(encoding=encoding)
342
+
343
+ def read_binary(self) -> bytes:
344
+ return self.locate().read_bytes()
345
+
346
+ def locate(self) -> SimplePath:
347
+ """Return a path-like object for this path"""
348
+ return self.dist.locate_file(self)
349
+
350
+
351
+ class FileHash:
352
+ def __init__(self, spec: str) -> None:
353
+ self.mode, _, self.value = spec.partition('=')
354
+
355
+ def __repr__(self) -> str:
356
+ return f'<FileHash mode: {self.mode} value: {self.value}>'
357
+
358
+
359
+ class Distribution(metaclass=abc.ABCMeta):
360
+ """
361
+ An abstract Python distribution package.
362
+
363
+ Custom providers may derive from this class and define
364
+ the abstract methods to provide a concrete implementation
365
+ for their environment. Some providers may opt to override
366
+ the default implementation of some properties to bypass
367
+ the file-reading mechanism.
368
+ """
369
+
370
+ @abc.abstractmethod
371
+ def read_text(self, filename) -> Optional[str]:
372
+ """Attempt to load metadata file given by the name.
373
+
374
+ Python distribution metadata is organized by blobs of text
375
+ typically represented as "files" in the metadata directory
376
+ (e.g. package-1.0.dist-info). These files include things
377
+ like:
378
+
379
+ - METADATA: The distribution metadata including fields
380
+ like Name and Version and Description.
381
+ - entry_points.txt: A series of entry points as defined in
382
+ `the entry points spec <https://packaging.python.org/en/latest/specifications/entry-points/#file-format>`_.
383
+ - RECORD: A record of files according to
384
+ `this recording spec <https://packaging.python.org/en/latest/specifications/recording-installed-packages/#the-record-file>`_.
385
+
386
+ A package may provide any set of files, including those
387
+ not listed here or none at all.
388
+
389
+ :param filename: The name of the file in the distribution info.
390
+ :return: The text if found, otherwise None.
391
+ """
392
+
393
+ @abc.abstractmethod
394
+ def locate_file(self, path: str | os.PathLike[str]) -> SimplePath:
395
+ """
396
+ Given a path to a file in this distribution, return a SimplePath
397
+ to it.
398
+
399
+ This method is used by callers of ``Distribution.files()`` to
400
+ locate files within the distribution. If it's possible for a
401
+ Distribution to represent files in the distribution as
402
+ ``SimplePath`` objects, it should implement this method
403
+ to resolve such objects.
404
+
405
+ Some Distribution providers may elect not to resolve SimplePath
406
+ objects within the distribution by raising a
407
+ NotImplementedError, but consumers of such a Distribution would
408
+ be unable to invoke ``Distribution.files()``.
409
+ """
410
+
411
+ @classmethod
412
+ def from_name(cls, name: str) -> Distribution:
413
+ """Return the Distribution for the given package name.
414
+
415
+ :param name: The name of the distribution package to search for.
416
+ :return: The Distribution instance (or subclass thereof) for the named
417
+ package, if found.
418
+ :raises PackageNotFoundError: When the named package's distribution
419
+ metadata cannot be found.
420
+ :raises ValueError: When an invalid value is supplied for name.
421
+ """
422
+ if not name:
423
+ raise ValueError("A distribution name is required.")
424
+ try:
425
+ return next(iter(cls._prefer_valid(cls.discover(name=name))))
426
+ except StopIteration:
427
+ raise PackageNotFoundError(name)
428
+
429
+ @classmethod
430
+ def discover(
431
+ cls, *, context: Optional[DistributionFinder.Context] = None, **kwargs
432
+ ) -> Iterable[Distribution]:
433
+ """Return an iterable of Distribution objects for all packages.
434
+
435
+ Pass a ``context`` or pass keyword arguments for constructing
436
+ a context.
437
+
438
+ :context: A ``DistributionFinder.Context`` object.
439
+ :return: Iterable of Distribution objects for packages matching
440
+ the context.
441
+ """
442
+ if context and kwargs:
443
+ raise ValueError("cannot accept context and kwargs")
444
+ context = context or DistributionFinder.Context(**kwargs)
445
+ return itertools.chain.from_iterable(
446
+ resolver(context) for resolver in cls._discover_resolvers()
447
+ )
448
+
449
+ @staticmethod
450
+ def _prefer_valid(dists: Iterable[Distribution]) -> Iterable[Distribution]:
451
+ """
452
+ Prefer (move to the front) distributions that have metadata.
453
+
454
+ Ref python/importlib_resources#489.
455
+ """
456
+ buckets = bucket(dists, lambda dist: bool(dist.metadata))
457
+ return itertools.chain(buckets[True], buckets[False])
458
+
459
+ @staticmethod
460
+ def at(path: str | os.PathLike[str]) -> Distribution:
461
+ """Return a Distribution for the indicated metadata path.
462
+
463
+ :param path: a string or path-like object
464
+ :return: a concrete Distribution instance for the path
465
+ """
466
+ return PathDistribution(pathlib.Path(path))
467
+
468
+ @staticmethod
469
+ def _discover_resolvers():
470
+ """Search the meta_path for resolvers (MetadataPathFinders)."""
471
+ declared = (
472
+ getattr(finder, 'find_distributions', None) for finder in sys.meta_path
473
+ )
474
+ return filter(None, declared)
475
+
476
+ @property
477
+ def metadata(self) -> _meta.PackageMetadata:
478
+ """Return the parsed metadata for this Distribution.
479
+
480
+ The returned object will have keys that name the various bits of
481
+ metadata per the
482
+ `Core metadata specifications <https://packaging.python.org/en/latest/specifications/core-metadata/#core-metadata>`_.
483
+
484
+ Custom providers may provide the METADATA file or override this
485
+ property.
486
+ """
487
+ # deferred for performance (python/cpython#109829)
488
+ from . import _adapters
489
+
490
+ opt_text = (
491
+ self.read_text('METADATA')
492
+ or self.read_text('PKG-INFO')
493
+ # This last clause is here to support old egg-info files. Its
494
+ # effect is to just end up using the PathDistribution's self._path
495
+ # (which points to the egg-info file) attribute unchanged.
496
+ or self.read_text('')
497
+ )
498
+ text = cast(str, opt_text)
499
+ return _adapters.Message(email.message_from_string(text))
500
+
501
+ @property
502
+ def name(self) -> str:
503
+ """Return the 'Name' metadata for the distribution package."""
504
+ return self.metadata['Name']
505
+
506
+ @property
507
+ def _normalized_name(self):
508
+ """Return a normalized version of the name."""
509
+ return Prepared.normalize(self.name)
510
+
511
+ @property
512
+ def version(self) -> str:
513
+ """Return the 'Version' metadata for the distribution package."""
514
+ return self.metadata['Version']
515
+
516
+ @property
517
+ def entry_points(self) -> EntryPoints:
518
+ """
519
+ Return EntryPoints for this distribution.
520
+
521
+ Custom providers may provide the ``entry_points.txt`` file
522
+ or override this property.
523
+ """
524
+ return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self)
525
+
526
+ @property
527
+ def files(self) -> Optional[List[PackagePath]]:
528
+ """Files in this distribution.
529
+
530
+ :return: List of PackagePath for this distribution or None
531
+
532
+ Result is `None` if the metadata file that enumerates files
533
+ (i.e. RECORD for dist-info, or installed-files.txt or
534
+ SOURCES.txt for egg-info) is missing.
535
+ Result may be empty if the metadata exists but is empty.
536
+
537
+ Custom providers are recommended to provide a "RECORD" file (in
538
+ ``read_text``) or override this property to allow for callers to be
539
+ able to resolve filenames provided by the package.
540
+ """
541
+
542
+ def make_file(name, hash=None, size_str=None):
543
+ result = PackagePath(name)
544
+ result.hash = FileHash(hash) if hash else None
545
+ result.size = int(size_str) if size_str else None
546
+ result.dist = self
547
+ return result
548
+
549
+ @pass_none
550
+ def make_files(lines):
551
+ # Delay csv import, since Distribution.files is not as widely used
552
+ # as other parts of importlib.metadata
553
+ import csv
554
+
555
+ return starmap(make_file, csv.reader(lines))
556
+
557
+ @pass_none
558
+ def skip_missing_files(package_paths):
559
+ return list(filter(lambda path: path.locate().exists(), package_paths))
560
+
561
+ return skip_missing_files(
562
+ make_files(
563
+ self._read_files_distinfo()
564
+ or self._read_files_egginfo_installed()
565
+ or self._read_files_egginfo_sources()
566
+ )
567
+ )
568
+
569
+ def _read_files_distinfo(self):
570
+ """
571
+ Read the lines of RECORD.
572
+ """
573
+ text = self.read_text('RECORD')
574
+ return text and text.splitlines()
575
+
576
+ def _read_files_egginfo_installed(self):
577
+ """
578
+ Read installed-files.txt and return lines in a similar
579
+ CSV-parsable format as RECORD: each file must be placed
580
+ relative to the site-packages directory and must also be
581
+ quoted (since file names can contain literal commas).
582
+
583
+ This file is written when the package is installed by pip,
584
+ but it might not be written for other installation methods.
585
+ Assume the file is accurate if it exists.
586
+ """
587
+ text = self.read_text('installed-files.txt')
588
+ # Prepend the .egg-info/ subdir to the lines in this file.
589
+ # But this subdir is only available from PathDistribution's
590
+ # self._path.
591
+ subdir = getattr(self, '_path', None)
592
+ if not text or not subdir:
593
+ return
594
+
595
+ paths = (
596
+ py311.relative_fix((subdir / name).resolve())
597
+ .relative_to(self.locate_file('').resolve(), walk_up=True)
598
+ .as_posix()
599
+ for name in text.splitlines()
600
+ )
601
+ return map('"{}"'.format, paths)
602
+
603
+ def _read_files_egginfo_sources(self):
604
+ """
605
+ Read SOURCES.txt and return lines in a similar CSV-parsable
606
+ format as RECORD: each file name must be quoted (since it
607
+ might contain literal commas).
608
+
609
+ Note that SOURCES.txt is not a reliable source for what
610
+ files are installed by a package. This file is generated
611
+ for a source archive, and the files that are present
612
+ there (e.g. setup.py) may not correctly reflect the files
613
+ that are present after the package has been installed.
614
+ """
615
+ text = self.read_text('SOURCES.txt')
616
+ return text and map('"{}"'.format, text.splitlines())
617
+
618
+ @property
619
+ def requires(self) -> Optional[List[str]]:
620
+ """Generated requirements specified for this Distribution"""
621
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
622
+ return reqs and list(reqs)
623
+
624
+ def _read_dist_info_reqs(self):
625
+ return self.metadata.get_all('Requires-Dist')
626
+
627
+ def _read_egg_info_reqs(self):
628
+ source = self.read_text('requires.txt')
629
+ return pass_none(self._deps_from_requires_text)(source)
630
+
631
+ @classmethod
632
+ def _deps_from_requires_text(cls, source):
633
+ return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source))
634
+
635
+ @staticmethod
636
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
637
+ """
638
+ Historically, setuptools would solicit and store 'extra'
639
+ requirements, including those with environment markers,
640
+ in separate sections. More modern tools expect each
641
+ dependency to be defined separately, with any relevant
642
+ extras and environment markers attached directly to that
643
+ requirement. This method converts the former to the
644
+ latter. See _test_deps_from_requires_text for an example.
645
+ """
646
+
647
+ def make_condition(name):
648
+ return name and f'extra == "{name}"'
649
+
650
+ def quoted_marker(section):
651
+ section = section or ''
652
+ extra, sep, markers = section.partition(':')
653
+ if extra and markers:
654
+ markers = f'({markers})'
655
+ conditions = list(filter(None, [markers, make_condition(extra)]))
656
+ return '; ' + ' and '.join(conditions) if conditions else ''
657
+
658
+ def url_req_space(req):
659
+ """
660
+ PEP 508 requires a space between the url_spec and the quoted_marker.
661
+ Ref python/importlib_metadata#357.
662
+ """
663
+ # '@' is uniquely indicative of a url_req.
664
+ return ' ' * ('@' in req)
665
+
666
+ for section in sections:
667
+ space = url_req_space(section.value)
668
+ yield section.value + space + quoted_marker(section.name)
669
+
670
+ @property
671
+ def origin(self):
672
+ return self._load_json('direct_url.json')
673
+
674
+ def _load_json(self, filename):
675
+ # Deferred for performance (python/importlib_metadata#503)
676
+ import json
677
+
678
+ return pass_none(json.loads)(
679
+ self.read_text(filename),
680
+ object_hook=lambda data: types.SimpleNamespace(**data),
681
+ )
682
+
683
+
684
+ class DistributionFinder(MetaPathFinder):
685
+ """
686
+ A MetaPathFinder capable of discovering installed distributions.
687
+
688
+ Custom providers should implement this interface in order to
689
+ supply metadata.
690
+ """
691
+
692
+ class Context:
693
+ """
694
+ Keyword arguments presented by the caller to
695
+ ``distributions()`` or ``Distribution.discover()``
696
+ to narrow the scope of a search for distributions
697
+ in all DistributionFinders.
698
+
699
+ Each DistributionFinder may expect any parameters
700
+ and should attempt to honor the canonical
701
+ parameters defined below when appropriate.
702
+
703
+ This mechanism gives a custom provider a means to
704
+ solicit additional details from the caller beyond
705
+ "name" and "path" when searching distributions.
706
+ For example, imagine a provider that exposes suites
707
+ of packages in either a "public" or "private" ``realm``.
708
+ A caller may wish to query only for distributions in
709
+ a particular realm and could call
710
+ ``distributions(realm="private")`` to signal to the
711
+ custom provider to only include distributions from that
712
+ realm.
713
+ """
714
+
715
+ name = None
716
+ """
717
+ Specific name for which a distribution finder should match.
718
+ A name of ``None`` matches all distributions.
719
+ """
720
+
721
+ def __init__(self, **kwargs):
722
+ vars(self).update(kwargs)
723
+
724
+ @property
725
+ def path(self) -> List[str]:
726
+ """
727
+ The sequence of directory path that a distribution finder
728
+ should search.
729
+
730
+ Typically refers to Python installed package paths such as
731
+ "site-packages" directories and defaults to ``sys.path``.
732
+ """
733
+ return vars(self).get('path', sys.path)
734
+
735
+ @abc.abstractmethod
736
+ def find_distributions(self, context=Context()) -> Iterable[Distribution]:
737
+ """
738
+ Find distributions.
739
+
740
+ Return an iterable of all Distribution instances capable of
741
+ loading the metadata for packages matching the ``context``,
742
+ a DistributionFinder.Context instance.
743
+ """
744
+
745
+
746
+ class FastPath:
747
+ """
748
+ Micro-optimized class for searching a root for children.
749
+
750
+ Root is a path on the file system that may contain metadata
751
+ directories either as natural directories or within a zip file.
752
+
753
+ >>> FastPath('').children()
754
+ ['...']
755
+
756
+ FastPath objects are cached and recycled for any given root.
757
+
758
+ >>> FastPath('foobar') is FastPath('foobar')
759
+ True
760
+ """
761
+
762
+ @functools.lru_cache() # type: ignore[misc]
763
+ def __new__(cls, root):
764
+ return super().__new__(cls)
765
+
766
+ def __init__(self, root):
767
+ self.root = root
768
+
769
+ def joinpath(self, child):
770
+ return pathlib.Path(self.root, child)
771
+
772
+ def children(self):
773
+ with suppress(Exception):
774
+ return os.listdir(self.root or '.')
775
+ with suppress(Exception):
776
+ return self.zip_children()
777
+ return []
778
+
779
+ def zip_children(self):
780
+ # deferred for performance (python/importlib_metadata#502)
781
+ from zipp.compat.overlay import zipfile
782
+
783
+ zip_path = zipfile.Path(self.root)
784
+ names = zip_path.root.namelist()
785
+ self.joinpath = zip_path.joinpath
786
+
787
+ return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names)
788
+
789
+ def search(self, name):
790
+ return self.lookup(self.mtime).search(name)
791
+
792
+ @property
793
+ def mtime(self):
794
+ with suppress(OSError):
795
+ return os.stat(self.root).st_mtime
796
+ self.lookup.cache_clear()
797
+
798
+ @method_cache
799
+ def lookup(self, mtime):
800
+ return Lookup(self)
801
+
802
+
803
+ class Lookup:
804
+ """
805
+ A micro-optimized class for searching a (fast) path for metadata.
806
+ """
807
+
808
+ def __init__(self, path: FastPath):
809
+ """
810
+ Calculate all of the children representing metadata.
811
+
812
+ From the children in the path, calculate early all of the
813
+ children that appear to represent metadata (infos) or legacy
814
+ metadata (eggs).
815
+ """
816
+
817
+ base = os.path.basename(path.root).lower()
818
+ base_is_egg = base.endswith(".egg")
819
+ self.infos = FreezableDefaultDict(list)
820
+ self.eggs = FreezableDefaultDict(list)
821
+
822
+ for child in path.children():
823
+ low = child.lower()
824
+ if low.endswith((".dist-info", ".egg-info")):
825
+ # rpartition is faster than splitext and suitable for this purpose.
826
+ name = low.rpartition(".")[0].partition("-")[0]
827
+ normalized = Prepared.normalize(name)
828
+ self.infos[normalized].append(path.joinpath(child))
829
+ elif base_is_egg and low == "egg-info":
830
+ name = base.rpartition(".")[0].partition("-")[0]
831
+ legacy_normalized = Prepared.legacy_normalize(name)
832
+ self.eggs[legacy_normalized].append(path.joinpath(child))
833
+
834
+ self.infos.freeze()
835
+ self.eggs.freeze()
836
+
837
+ def search(self, prepared: Prepared):
838
+ """
839
+ Yield all infos and eggs matching the Prepared query.
840
+ """
841
+ infos = (
842
+ self.infos[prepared.normalized]
843
+ if prepared
844
+ else itertools.chain.from_iterable(self.infos.values())
845
+ )
846
+ eggs = (
847
+ self.eggs[prepared.legacy_normalized]
848
+ if prepared
849
+ else itertools.chain.from_iterable(self.eggs.values())
850
+ )
851
+ return itertools.chain(infos, eggs)
852
+
853
+
854
+ class Prepared:
855
+ """
856
+ A prepared search query for metadata on a possibly-named package.
857
+
858
+ Pre-calculates the normalization to prevent repeated operations.
859
+
860
+ >>> none = Prepared(None)
861
+ >>> none.normalized
862
+ >>> none.legacy_normalized
863
+ >>> bool(none)
864
+ False
865
+ >>> sample = Prepared('Sample__Pkg-name.foo')
866
+ >>> sample.normalized
867
+ 'sample_pkg_name_foo'
868
+ >>> sample.legacy_normalized
869
+ 'sample__pkg_name.foo'
870
+ >>> bool(sample)
871
+ True
872
+ """
873
+
874
+ normalized = None
875
+ legacy_normalized = None
876
+
877
+ def __init__(self, name: Optional[str]):
878
+ self.name = name
879
+ if name is None:
880
+ return
881
+ self.normalized = self.normalize(name)
882
+ self.legacy_normalized = self.legacy_normalize(name)
883
+
884
+ @staticmethod
885
+ def normalize(name):
886
+ """
887
+ PEP 503 normalization plus dashes as underscores.
888
+ """
889
+ return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
890
+
891
+ @staticmethod
892
+ def legacy_normalize(name):
893
+ """
894
+ Normalize the package name as found in the convention in
895
+ older packaging tools versions and specs.
896
+ """
897
+ return name.lower().replace('-', '_')
898
+
899
+ def __bool__(self):
900
+ return bool(self.name)
901
+
902
+
903
+ @install
904
+ class MetadataPathFinder(NullFinder, DistributionFinder):
905
+ """A degenerate finder for distribution packages on the file system.
906
+
907
+ This finder supplies only a find_distributions() method for versions
908
+ of Python that do not have a PathFinder find_distributions().
909
+ """
910
+
911
+ @classmethod
912
+ def find_distributions(
913
+ cls, context=DistributionFinder.Context()
914
+ ) -> Iterable[PathDistribution]:
915
+ """
916
+ Find distributions.
917
+
918
+ Return an iterable of all Distribution instances capable of
919
+ loading the metadata for packages matching ``context.name``
920
+ (or all names if ``None`` indicated) along the paths in the list
921
+ of directories ``context.path``.
922
+ """
923
+ found = cls._search_paths(context.name, context.path)
924
+ return map(PathDistribution, found)
925
+
926
+ @classmethod
927
+ def _search_paths(cls, name, paths):
928
+ """Find metadata directories in paths heuristically."""
929
+ prepared = Prepared(name)
930
+ return itertools.chain.from_iterable(
931
+ path.search(prepared) for path in map(FastPath, paths)
932
+ )
933
+
934
+ @classmethod
935
+ def invalidate_caches(cls) -> None:
936
+ FastPath.__new__.cache_clear()
937
+
938
+
939
+ class PathDistribution(Distribution):
940
+ def __init__(self, path: SimplePath) -> None:
941
+ """Construct a distribution.
942
+
943
+ :param path: SimplePath indicating the metadata directory.
944
+ """
945
+ self._path = path
946
+
947
+ def read_text(self, filename: str | os.PathLike[str]) -> Optional[str]:
948
+ with suppress(
949
+ FileNotFoundError,
950
+ IsADirectoryError,
951
+ KeyError,
952
+ NotADirectoryError,
953
+ PermissionError,
954
+ ):
955
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
956
+
957
+ return None
958
+
959
+ read_text.__doc__ = Distribution.read_text.__doc__
960
+
961
+ def locate_file(self, path: str | os.PathLike[str]) -> SimplePath:
962
+ return self._path.parent / path
963
+
964
+ @property
965
+ def _normalized_name(self):
966
+ """
967
+ Performance optimization: where possible, resolve the
968
+ normalized name from the file system path.
969
+ """
970
+ stem = os.path.basename(str(self._path))
971
+ return (
972
+ pass_none(Prepared.normalize)(self._name_from_stem(stem))
973
+ or super()._normalized_name
974
+ )
975
+
976
+ @staticmethod
977
+ def _name_from_stem(stem):
978
+ """
979
+ >>> PathDistribution._name_from_stem('foo-3.0.egg-info')
980
+ 'foo'
981
+ >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info')
982
+ 'CherryPy'
983
+ >>> PathDistribution._name_from_stem('face.egg-info')
984
+ 'face'
985
+ >>> PathDistribution._name_from_stem('foo.bar')
986
+ """
987
+ filename, ext = os.path.splitext(stem)
988
+ if ext not in ('.dist-info', '.egg-info'):
989
+ return
990
+ name, sep, rest = filename.partition('-')
991
+ return name
992
+
993
+
994
+ def distribution(distribution_name: str) -> Distribution:
995
+ """Get the ``Distribution`` instance for the named package.
996
+
997
+ :param distribution_name: The name of the distribution package as a string.
998
+ :return: A ``Distribution`` instance (or subclass thereof).
999
+ """
1000
+ return Distribution.from_name(distribution_name)
1001
+
1002
+
1003
+ def distributions(**kwargs) -> Iterable[Distribution]:
1004
+ """Get all ``Distribution`` instances in the current environment.
1005
+
1006
+ :return: An iterable of ``Distribution`` instances.
1007
+ """
1008
+ return Distribution.discover(**kwargs)
1009
+
1010
+
1011
+ def metadata(distribution_name: str) -> _meta.PackageMetadata:
1012
+ """Get the metadata for the named package.
1013
+
1014
+ :param distribution_name: The name of the distribution package to query.
1015
+ :return: A PackageMetadata containing the parsed metadata.
1016
+ """
1017
+ return Distribution.from_name(distribution_name).metadata
1018
+
1019
+
1020
+ def version(distribution_name: str) -> str:
1021
+ """Get the version string for the named package.
1022
+
1023
+ :param distribution_name: The name of the distribution package to query.
1024
+ :return: The version string for the package as defined in the package's
1025
+ "Version" metadata key.
1026
+ """
1027
+ return distribution(distribution_name).version
1028
+
1029
+
1030
+ _unique = functools.partial(
1031
+ unique_everseen,
1032
+ key=py39.normalized_name,
1033
+ )
1034
+ """
1035
+ Wrapper for ``distributions`` to return unique distributions by name.
1036
+ """
1037
+
1038
+
1039
+ def entry_points(**params) -> EntryPoints:
1040
+ """Return EntryPoint objects for all installed packages.
1041
+
1042
+ Pass selection parameters (group or name) to filter the
1043
+ result to entry points matching those properties (see
1044
+ EntryPoints.select()).
1045
+
1046
+ :return: EntryPoints for all installed packages.
1047
+ """
1048
+ eps = itertools.chain.from_iterable(
1049
+ dist.entry_points for dist in _unique(distributions())
1050
+ )
1051
+ return EntryPoints(eps).select(**params)
1052
+
1053
+
1054
+ def files(distribution_name: str) -> Optional[List[PackagePath]]:
1055
+ """Return a list of files for the named package.
1056
+
1057
+ :param distribution_name: The name of the distribution package to query.
1058
+ :return: List of files composing the distribution.
1059
+ """
1060
+ return distribution(distribution_name).files
1061
+
1062
+
1063
+ def requires(distribution_name: str) -> Optional[List[str]]:
1064
+ """
1065
+ Return a list of requirements for the named package.
1066
+
1067
+ :return: An iterable of requirements, suitable for
1068
+ packaging.requirement.Requirement.
1069
+ """
1070
+ return distribution(distribution_name).requires
1071
+
1072
+
1073
+ def packages_distributions() -> Mapping[str, List[str]]:
1074
+ """
1075
+ Return a mapping of top-level packages to their
1076
+ distributions.
1077
+
1078
+ >>> import collections.abc
1079
+ >>> pkgs = packages_distributions()
1080
+ >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values())
1081
+ True
1082
+ """
1083
+ pkg_to_dist = collections.defaultdict(list)
1084
+ for dist in distributions():
1085
+ for pkg in _top_level_declared(dist) or _top_level_inferred(dist):
1086
+ pkg_to_dist[pkg].append(dist.metadata['Name'])
1087
+ return dict(pkg_to_dist)
1088
+
1089
+
1090
+ def _top_level_declared(dist):
1091
+ return (dist.read_text('top_level.txt') or '').split()
1092
+
1093
+
1094
+ def _topmost(name: PackagePath) -> Optional[str]:
1095
+ """
1096
+ Return the top-most parent as long as there is a parent.
1097
+ """
1098
+ top, *rest = name.parts
1099
+ return top if rest else None
1100
+
1101
+
1102
+ def _get_toplevel_name(name: PackagePath) -> str:
1103
+ """
1104
+ Infer a possibly importable module name from a name presumed on
1105
+ sys.path.
1106
+
1107
+ >>> _get_toplevel_name(PackagePath('foo.py'))
1108
+ 'foo'
1109
+ >>> _get_toplevel_name(PackagePath('foo'))
1110
+ 'foo'
1111
+ >>> _get_toplevel_name(PackagePath('foo.pyc'))
1112
+ 'foo'
1113
+ >>> _get_toplevel_name(PackagePath('foo/__init__.py'))
1114
+ 'foo'
1115
+ >>> _get_toplevel_name(PackagePath('foo.pth'))
1116
+ 'foo.pth'
1117
+ >>> _get_toplevel_name(PackagePath('foo.dist-info'))
1118
+ 'foo.dist-info'
1119
+ """
1120
+ # Defer import of inspect for performance (python/cpython#118761)
1121
+ import inspect
1122
+
1123
+ return _topmost(name) or inspect.getmodulename(name) or str(name)
1124
+
1125
+
1126
+ def _top_level_inferred(dist):
1127
+ opt_names = set(map(_get_toplevel_name, always_iterable(dist.files)))
1128
+
1129
+ def importable_name(name):
1130
+ return '.' not in name
1131
+
1132
+ return filter(importable_name, opt_names)
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (42.2 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_adapters.cpython-310.pyc ADDED
Binary file (2.86 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_collections.cpython-310.pyc ADDED
Binary file (1.55 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_compat.cpython-310.pyc ADDED
Binary file (1.88 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_functools.cpython-310.pyc ADDED
Binary file (3.14 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_itertools.cpython-310.pyc ADDED
Binary file (5.05 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_meta.cpython-310.pyc ADDED
Binary file (3.33 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/_text.cpython-310.pyc ADDED
Binary file (3.07 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/__pycache__/diagnose.cpython-310.pyc ADDED
Binary file (840 Bytes). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/_adapters.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import email.message
2
+ import re
3
+ import textwrap
4
+
5
+ from ._text import FoldedCase
6
+
7
+
8
+ class Message(email.message.Message):
9
+ multiple_use_keys = set(
10
+ map(
11
+ FoldedCase,
12
+ [
13
+ 'Classifier',
14
+ 'Obsoletes-Dist',
15
+ 'Platform',
16
+ 'Project-URL',
17
+ 'Provides-Dist',
18
+ 'Provides-Extra',
19
+ 'Requires-Dist',
20
+ 'Requires-External',
21
+ 'Supported-Platform',
22
+ 'Dynamic',
23
+ ],
24
+ )
25
+ )
26
+ """
27
+ Keys that may be indicated multiple times per PEP 566.
28
+ """
29
+
30
+ def __new__(cls, orig: email.message.Message):
31
+ res = super().__new__(cls)
32
+ vars(res).update(vars(orig))
33
+ return res
34
+
35
+ def __init__(self, *args, **kwargs):
36
+ self._headers = self._repair_headers()
37
+
38
+ # suppress spurious error from mypy
39
+ def __iter__(self):
40
+ return super().__iter__()
41
+
42
+ def __getitem__(self, item):
43
+ """
44
+ Override parent behavior to typical dict behavior.
45
+
46
+ ``email.message.Message`` will emit None values for missing
47
+ keys. Typical mappings, including this ``Message``, will raise
48
+ a key error for missing keys.
49
+
50
+ Ref python/importlib_metadata#371.
51
+ """
52
+ res = super().__getitem__(item)
53
+ if res is None:
54
+ raise KeyError(item)
55
+ return res
56
+
57
+ def _repair_headers(self):
58
+ def redent(value):
59
+ "Correct for RFC822 indentation"
60
+ if not value or '\n' not in value:
61
+ return value
62
+ return textwrap.dedent(' ' * 8 + value)
63
+
64
+ headers = [(key, redent(value)) for key, value in vars(self)['_headers']]
65
+ if self._payload:
66
+ headers.append(('Description', self.get_payload()))
67
+ return headers
68
+
69
+ @property
70
+ def json(self):
71
+ """
72
+ Convert PackageMetadata to a JSON-compatible format
73
+ per PEP 0566.
74
+ """
75
+
76
+ def transform(key):
77
+ value = self.get_all(key) if key in self.multiple_use_keys else self[key]
78
+ if key == 'Keywords':
79
+ value = re.split(r'\s+', value)
80
+ tk = key.lower().replace('-', '_')
81
+ return tk, value
82
+
83
+ return dict(map(transform, map(FoldedCase, self)))
deepseek/lib/python3.10/site-packages/importlib_metadata/_collections.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+
3
+
4
+ # from jaraco.collections 3.3
5
+ class FreezableDefaultDict(collections.defaultdict):
6
+ """
7
+ Often it is desirable to prevent the mutation of
8
+ a default dict after its initial construction, such
9
+ as to prevent mutation during iteration.
10
+
11
+ >>> dd = FreezableDefaultDict(list)
12
+ >>> dd[0].append('1')
13
+ >>> dd.freeze()
14
+ >>> dd[1]
15
+ []
16
+ >>> len(dd)
17
+ 1
18
+ """
19
+
20
+ def __missing__(self, key):
21
+ return getattr(self, '_frozen', super().__missing__)(key)
22
+
23
+ def freeze(self):
24
+ self._frozen = lambda key: self.default_factory()
25
+
26
+
27
+ class Pair(collections.namedtuple('Pair', 'name value')):
28
+ @classmethod
29
+ def parse(cls, text):
30
+ return cls(*map(str.strip, text.split("=", 1)))
deepseek/lib/python3.10/site-packages/importlib_metadata/_compat.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import platform
2
+ import sys
3
+
4
+ __all__ = ['install', 'NullFinder']
5
+
6
+
7
+ def install(cls):
8
+ """
9
+ Class decorator for installation on sys.meta_path.
10
+
11
+ Adds the backport DistributionFinder to sys.meta_path and
12
+ attempts to disable the finder functionality of the stdlib
13
+ DistributionFinder.
14
+ """
15
+ sys.meta_path.append(cls())
16
+ disable_stdlib_finder()
17
+ return cls
18
+
19
+
20
+ def disable_stdlib_finder():
21
+ """
22
+ Give the backport primacy for discovering path-based distributions
23
+ by monkey-patching the stdlib O_O.
24
+
25
+ See #91 for more background for rationale on this sketchy
26
+ behavior.
27
+ """
28
+
29
+ def matches(finder):
30
+ return getattr(
31
+ finder, '__module__', None
32
+ ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions')
33
+
34
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
35
+ del finder.find_distributions
36
+
37
+
38
+ class NullFinder:
39
+ """
40
+ A "Finder" (aka "MetaPathFinder") that never finds any modules,
41
+ but may find distributions.
42
+ """
43
+
44
+ @staticmethod
45
+ def find_spec(*args, **kwargs):
46
+ return None
47
+
48
+
49
+ def pypy_partial(val):
50
+ """
51
+ Adjust for variable stacklevel on partial under PyPy.
52
+
53
+ Workaround for #327.
54
+ """
55
+ is_pypy = platform.python_implementation() == 'PyPy'
56
+ return val + is_pypy
deepseek/lib/python3.10/site-packages/importlib_metadata/_functools.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import types
3
+
4
+
5
+ # from jaraco.functools 3.3
6
+ def method_cache(method, cache_wrapper=None):
7
+ """
8
+ Wrap lru_cache to support storing the cache data in the object instances.
9
+
10
+ Abstracts the common paradigm where the method explicitly saves an
11
+ underscore-prefixed protected property on first call and returns that
12
+ subsequently.
13
+
14
+ >>> class MyClass:
15
+ ... calls = 0
16
+ ...
17
+ ... @method_cache
18
+ ... def method(self, value):
19
+ ... self.calls += 1
20
+ ... return value
21
+
22
+ >>> a = MyClass()
23
+ >>> a.method(3)
24
+ 3
25
+ >>> for x in range(75):
26
+ ... res = a.method(x)
27
+ >>> a.calls
28
+ 75
29
+
30
+ Note that the apparent behavior will be exactly like that of lru_cache
31
+ except that the cache is stored on each instance, so values in one
32
+ instance will not flush values from another, and when an instance is
33
+ deleted, so are the cached values for that instance.
34
+
35
+ >>> b = MyClass()
36
+ >>> for x in range(35):
37
+ ... res = b.method(x)
38
+ >>> b.calls
39
+ 35
40
+ >>> a.method(0)
41
+ 0
42
+ >>> a.calls
43
+ 75
44
+
45
+ Note that if method had been decorated with ``functools.lru_cache()``,
46
+ a.calls would have been 76 (due to the cached value of 0 having been
47
+ flushed by the 'b' instance).
48
+
49
+ Clear the cache with ``.cache_clear()``
50
+
51
+ >>> a.method.cache_clear()
52
+
53
+ Same for a method that hasn't yet been called.
54
+
55
+ >>> c = MyClass()
56
+ >>> c.method.cache_clear()
57
+
58
+ Another cache wrapper may be supplied:
59
+
60
+ >>> cache = functools.lru_cache(maxsize=2)
61
+ >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
62
+ >>> a = MyClass()
63
+ >>> a.method2()
64
+ 3
65
+
66
+ Caution - do not subsequently wrap the method with another decorator, such
67
+ as ``@property``, which changes the semantics of the function.
68
+
69
+ See also
70
+ http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
71
+ for another implementation and additional justification.
72
+ """
73
+ cache_wrapper = cache_wrapper or functools.lru_cache()
74
+
75
+ def wrapper(self, *args, **kwargs):
76
+ # it's the first call, replace the method with a cached, bound method
77
+ bound_method = types.MethodType(method, self)
78
+ cached_method = cache_wrapper(bound_method)
79
+ setattr(self, method.__name__, cached_method)
80
+ return cached_method(*args, **kwargs)
81
+
82
+ # Support cache clear even before cache has been created.
83
+ wrapper.cache_clear = lambda: None
84
+
85
+ return wrapper
86
+
87
+
88
+ # From jaraco.functools 3.3
89
+ def pass_none(func):
90
+ """
91
+ Wrap func so it's not called if its first param is None
92
+
93
+ >>> print_text = pass_none(print)
94
+ >>> print_text('text')
95
+ text
96
+ >>> print_text(None)
97
+ """
98
+
99
+ @functools.wraps(func)
100
+ def wrapper(param, *args, **kwargs):
101
+ if param is not None:
102
+ return func(param, *args, **kwargs)
103
+
104
+ return wrapper
deepseek/lib/python3.10/site-packages/importlib_metadata/_itertools.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import defaultdict, deque
2
+ from itertools import filterfalse
3
+
4
+
5
+ def unique_everseen(iterable, key=None):
6
+ "List unique elements, preserving order. Remember all elements ever seen."
7
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
8
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
9
+ seen = set()
10
+ seen_add = seen.add
11
+ if key is None:
12
+ for element in filterfalse(seen.__contains__, iterable):
13
+ seen_add(element)
14
+ yield element
15
+ else:
16
+ for element in iterable:
17
+ k = key(element)
18
+ if k not in seen:
19
+ seen_add(k)
20
+ yield element
21
+
22
+
23
+ # copied from more_itertools 8.8
24
+ def always_iterable(obj, base_type=(str, bytes)):
25
+ """If *obj* is iterable, return an iterator over its items::
26
+
27
+ >>> obj = (1, 2, 3)
28
+ >>> list(always_iterable(obj))
29
+ [1, 2, 3]
30
+
31
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
32
+
33
+ >>> obj = 1
34
+ >>> list(always_iterable(obj))
35
+ [1]
36
+
37
+ If *obj* is ``None``, return an empty iterable:
38
+
39
+ >>> obj = None
40
+ >>> list(always_iterable(None))
41
+ []
42
+
43
+ By default, binary and text strings are not considered iterable::
44
+
45
+ >>> obj = 'foo'
46
+ >>> list(always_iterable(obj))
47
+ ['foo']
48
+
49
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
50
+ returns ``True`` won't be considered iterable.
51
+
52
+ >>> obj = {'a': 1}
53
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
54
+ ['a']
55
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
56
+ [{'a': 1}]
57
+
58
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
59
+ Python considers iterable as iterable:
60
+
61
+ >>> obj = 'foo'
62
+ >>> list(always_iterable(obj, base_type=None))
63
+ ['f', 'o', 'o']
64
+ """
65
+ if obj is None:
66
+ return iter(())
67
+
68
+ if (base_type is not None) and isinstance(obj, base_type):
69
+ return iter((obj,))
70
+
71
+ try:
72
+ return iter(obj)
73
+ except TypeError:
74
+ return iter((obj,))
75
+
76
+
77
+ # Copied from more_itertools 10.3
78
+ class bucket:
79
+ """Wrap *iterable* and return an object that buckets the iterable into
80
+ child iterables based on a *key* function.
81
+
82
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
83
+ >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character
84
+ >>> sorted(list(s)) # Get the keys
85
+ ['a', 'b', 'c']
86
+ >>> a_iterable = s['a']
87
+ >>> next(a_iterable)
88
+ 'a1'
89
+ >>> next(a_iterable)
90
+ 'a2'
91
+ >>> list(s['b'])
92
+ ['b1', 'b2', 'b3']
93
+
94
+ The original iterable will be advanced and its items will be cached until
95
+ they are used by the child iterables. This may require significant storage.
96
+
97
+ By default, attempting to select a bucket to which no items belong will
98
+ exhaust the iterable and cache all values.
99
+ If you specify a *validator* function, selected buckets will instead be
100
+ checked against it.
101
+
102
+ >>> from itertools import count
103
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
104
+ >>> key = lambda x: x % 10 # Bucket by last digit
105
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
106
+ >>> s = bucket(it, key=key, validator=validator)
107
+ >>> 2 in s
108
+ False
109
+ >>> list(s[2])
110
+ []
111
+
112
+ """
113
+
114
+ def __init__(self, iterable, key, validator=None):
115
+ self._it = iter(iterable)
116
+ self._key = key
117
+ self._cache = defaultdict(deque)
118
+ self._validator = validator or (lambda x: True)
119
+
120
+ def __contains__(self, value):
121
+ if not self._validator(value):
122
+ return False
123
+
124
+ try:
125
+ item = next(self[value])
126
+ except StopIteration:
127
+ return False
128
+ else:
129
+ self._cache[value].appendleft(item)
130
+
131
+ return True
132
+
133
+ def _get_values(self, value):
134
+ """
135
+ Helper to yield items from the parent iterator that match *value*.
136
+ Items that don't match are stored in the local cache as they
137
+ are encountered.
138
+ """
139
+ while True:
140
+ # If we've cached some items that match the target value, emit
141
+ # the first one and evict it from the cache.
142
+ if self._cache[value]:
143
+ yield self._cache[value].popleft()
144
+ # Otherwise we need to advance the parent iterator to search for
145
+ # a matching item, caching the rest.
146
+ else:
147
+ while True:
148
+ try:
149
+ item = next(self._it)
150
+ except StopIteration:
151
+ return
152
+ item_value = self._key(item)
153
+ if item_value == value:
154
+ yield item
155
+ break
156
+ elif self._validator(item_value):
157
+ self._cache[item_value].append(item)
158
+
159
+ def __iter__(self):
160
+ for item in self._it:
161
+ item_value = self._key(item)
162
+ if self._validator(item_value):
163
+ self._cache[item_value].append(item)
164
+
165
+ yield from self._cache.keys()
166
+
167
+ def __getitem__(self, value):
168
+ if not self._validator(value):
169
+ return iter(())
170
+
171
+ return self._get_values(value)
deepseek/lib/python3.10/site-packages/importlib_metadata/_meta.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from typing import (
5
+ Any,
6
+ Dict,
7
+ Iterator,
8
+ List,
9
+ Optional,
10
+ Protocol,
11
+ TypeVar,
12
+ Union,
13
+ overload,
14
+ )
15
+
16
+ _T = TypeVar("_T")
17
+
18
+
19
+ class PackageMetadata(Protocol):
20
+ def __len__(self) -> int: ... # pragma: no cover
21
+
22
+ def __contains__(self, item: str) -> bool: ... # pragma: no cover
23
+
24
+ def __getitem__(self, key: str) -> str: ... # pragma: no cover
25
+
26
+ def __iter__(self) -> Iterator[str]: ... # pragma: no cover
27
+
28
+ @overload
29
+ def get(
30
+ self, name: str, failobj: None = None
31
+ ) -> Optional[str]: ... # pragma: no cover
32
+
33
+ @overload
34
+ def get(self, name: str, failobj: _T) -> Union[str, _T]: ... # pragma: no cover
35
+
36
+ # overload per python/importlib_metadata#435
37
+ @overload
38
+ def get_all(
39
+ self, name: str, failobj: None = None
40
+ ) -> Optional[List[Any]]: ... # pragma: no cover
41
+
42
+ @overload
43
+ def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]:
44
+ """
45
+ Return all values associated with a possibly multi-valued key.
46
+ """
47
+
48
+ @property
49
+ def json(self) -> Dict[str, Union[str, List[str]]]:
50
+ """
51
+ A JSON-compatible form of the metadata.
52
+ """
53
+
54
+
55
+ class SimplePath(Protocol):
56
+ """
57
+ A minimal subset of pathlib.Path required by Distribution.
58
+ """
59
+
60
+ def joinpath(
61
+ self, other: Union[str, os.PathLike[str]]
62
+ ) -> SimplePath: ... # pragma: no cover
63
+
64
+ def __truediv__(
65
+ self, other: Union[str, os.PathLike[str]]
66
+ ) -> SimplePath: ... # pragma: no cover
67
+
68
+ @property
69
+ def parent(self) -> SimplePath: ... # pragma: no cover
70
+
71
+ def read_text(self, encoding=None) -> str: ... # pragma: no cover
72
+
73
+ def read_bytes(self) -> bytes: ... # pragma: no cover
74
+
75
+ def exists(self) -> bool: ... # pragma: no cover
deepseek/lib/python3.10/site-packages/importlib_metadata/_text.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+ from ._functools import method_cache
4
+
5
+
6
+ # from jaraco.text 3.5
7
+ class FoldedCase(str):
8
+ """
9
+ A case insensitive string class; behaves just like str
10
+ except compares equal when the only variation is case.
11
+
12
+ >>> s = FoldedCase('hello world')
13
+
14
+ >>> s == 'Hello World'
15
+ True
16
+
17
+ >>> 'Hello World' == s
18
+ True
19
+
20
+ >>> s != 'Hello World'
21
+ False
22
+
23
+ >>> s.index('O')
24
+ 4
25
+
26
+ >>> s.split('O')
27
+ ['hell', ' w', 'rld']
28
+
29
+ >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
30
+ ['alpha', 'Beta', 'GAMMA']
31
+
32
+ Sequence membership is straightforward.
33
+
34
+ >>> "Hello World" in [s]
35
+ True
36
+ >>> s in ["Hello World"]
37
+ True
38
+
39
+ You may test for set inclusion, but candidate and elements
40
+ must both be folded.
41
+
42
+ >>> FoldedCase("Hello World") in {s}
43
+ True
44
+ >>> s in {FoldedCase("Hello World")}
45
+ True
46
+
47
+ String inclusion works as long as the FoldedCase object
48
+ is on the right.
49
+
50
+ >>> "hello" in FoldedCase("Hello World")
51
+ True
52
+
53
+ But not if the FoldedCase object is on the left:
54
+
55
+ >>> FoldedCase('hello') in 'Hello World'
56
+ False
57
+
58
+ In that case, use in_:
59
+
60
+ >>> FoldedCase('hello').in_('Hello World')
61
+ True
62
+
63
+ >>> FoldedCase('hello') > FoldedCase('Hello')
64
+ False
65
+ """
66
+
67
+ def __lt__(self, other):
68
+ return self.lower() < other.lower()
69
+
70
+ def __gt__(self, other):
71
+ return self.lower() > other.lower()
72
+
73
+ def __eq__(self, other):
74
+ return self.lower() == other.lower()
75
+
76
+ def __ne__(self, other):
77
+ return self.lower() != other.lower()
78
+
79
+ def __hash__(self):
80
+ return hash(self.lower())
81
+
82
+ def __contains__(self, other):
83
+ return super().lower().__contains__(other.lower())
84
+
85
+ def in_(self, other):
86
+ "Does self appear in other?"
87
+ return self in FoldedCase(other)
88
+
89
+ # cache lower since it's likely to be called frequently.
90
+ @method_cache
91
+ def lower(self):
92
+ return super().lower()
93
+
94
+ def index(self, sub):
95
+ return self.lower().index(sub.lower())
96
+
97
+ def split(self, splitter=' ', maxsplit=0):
98
+ pattern = re.compile(re.escape(splitter), re.I)
99
+ return pattern.split(self, maxsplit)
deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__init__.py ADDED
File without changes
deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (179 Bytes). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__pycache__/py311.cpython-310.pyc ADDED
Binary file (1.02 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/compat/__pycache__/py39.cpython-310.pyc ADDED
Binary file (1.17 kB). View file
 
deepseek/lib/python3.10/site-packages/importlib_metadata/compat/py39.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Compatibility layer with Python 3.8/3.9
3
+ """
4
+
5
+ from typing import TYPE_CHECKING, Any, Optional
6
+
7
+ if TYPE_CHECKING: # pragma: no cover
8
+ # Prevent circular imports on runtime.
9
+ from .. import Distribution, EntryPoint
10
+ else:
11
+ Distribution = EntryPoint = Any
12
+
13
+
14
+ def normalized_name(dist: Distribution) -> Optional[str]:
15
+ """
16
+ Honor name normalization for distributions that don't provide ``_normalized_name``.
17
+ """
18
+ try:
19
+ return dist._normalized_name
20
+ except AttributeError:
21
+ from .. import Prepared # -> delay to prevent circular imports.
22
+
23
+ return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name'])
24
+
25
+
26
+ def ep_matches(ep: EntryPoint, **params) -> bool:
27
+ """
28
+ Workaround for ``EntryPoint`` objects without the ``matches`` method.
29
+ """
30
+ try:
31
+ return ep.matches(**params)
32
+ except AttributeError:
33
+ from .. import EntryPoint # -> delay to prevent circular imports.
34
+
35
+ # Reconstruct the EntryPoint object to make sure it is compatible.
36
+ return EntryPoint(ep.name, ep.value, ep.group).matches(**params)
deepseek/lib/python3.10/site-packages/importlib_metadata/diagnose.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ from . import Distribution
4
+
5
+
6
+ def inspect(path):
7
+ print("Inspecting", path)
8
+ dists = list(Distribution.discover(path=[path]))
9
+ if not dists:
10
+ return
11
+ print("Found", len(dists), "packages:", end=' ')
12
+ print(', '.join(dist.name for dist in dists))
13
+
14
+
15
+ def run():
16
+ for path in sys.path:
17
+ inspect(path)
18
+
19
+
20
+ if __name__ == '__main__':
21
+ run()
deepseek/lib/python3.10/site-packages/importlib_metadata/py.typed ADDED
File without changes
deepseek/lib/python3.10/site-packages/mistral_common/protocol/instruct/__pycache__/validator.cpython-310.pyc ADDED
Binary file (10.4 kB). View file
 
deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/License.txt ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ LICENSE AGREEMENT FOR NVIDIA SOFTWARE DEVELOPMENT KITS
2
+
3
+ This license agreement, including exhibits attached ("Agreement”) is a legal agreement between you and NVIDIA Corporation ("NVIDIA") and governs your use of a NVIDIA software development kit (“SDK”).
4
+
5
+ Each SDK has its own set of software and materials, but here is a description of the types of items that may be included in a SDK: source code, header files, APIs, data sets and assets (examples include images, textures, models, scenes, videos, native API input/output files), binary software, sample code, libraries, utility programs, programming code and documentation.
6
+
7
+ This Agreement can be accepted only by an adult of legal age of majority in the country in which the SDK is used.
8
+
9
+ If you are entering into this Agreement on behalf of a company or other legal entity, you represent that you have the legal authority to bind the entity to this Agreement, in which case “you” will mean the entity you represent.
10
+
11
+ If you don’t have the required age or authority to accept this Agreement, or if you don’t accept all the terms and conditions of this Agreement, do not download, install or use the SDK.
12
+
13
+ You agree to use the SDK only for purposes that are permitted by (a) this Agreement, and (b) any applicable law, regulation or generally accepted practices or guidelines in the relevant jurisdictions.
14
+
15
+ 1. License.
16
+
17
+ 1.1 Grant
18
+
19
+ Subject to the terms of this Agreement, NVIDIA hereby grants you a non-exclusive, non-transferable license, without the right to sublicense (except as expressly provided in this Agreement) to:
20
+
21
+ (i) Install and use the SDK,
22
+
23
+ (ii) Modify and create derivative works of sample source code delivered in the SDK, and
24
+
25
+ (iii) Distribute those portions of the SDK that are identified in this Agreement as distributable, as incorporated in object code format into a software application that meets the distribution requirements indicated in this Agreement.
26
+
27
+ 1.2 Distribution Requirements
28
+
29
+ These are the distribution requirements for you to exercise the distribution grant:
30
+
31
+ (i) Your application must have material additional functionality, beyond the included portions of the SDK.
32
+
33
+ (ii) The distributable portions of the SDK shall only be accessed by your application.
34
+
35
+ (iii) The following notice shall be included in modifications and derivative works of sample source code distributed: “This software contains source code provided by NVIDIA Corporation.”
36
+
37
+ (iv) Unless a developer tool is identified in this Agreement as distributable, it is delivered for your internal use only.
38
+
39
+ (v) The terms under which you distribute your application must be consistent with the terms of this Agreement, including (without limitation) terms relating to the license grant and license restrictions and protection of NVIDIA’s intellectual property rights. Additionally, you agree that you will protect the privacy, security and legal rights of your application users.
40
+
41
+ (vi) You agree to notify NVIDIA in writing of any known or suspected distribution or use of the SDK not in compliance with the requirements of this Agreement, and to enforce the terms of your agreements with respect to distributed SDK.
42
+
43
+ 1.3 Authorized Users
44
+
45
+ You may allow employees and contractors of your entity or of your subsidiary(ies) to access and use the SDK from your secure network to perform work on your behalf.
46
+
47
+ If you are an academic institution you may allow users enrolled or employed by the academic institution to access and use the SDK from your secure network.
48
+
49
+ You are responsible for the compliance with the terms of this Agreement by your authorized users. If you become aware that your authorized users didn’t follow the terms of this Agreement, you agree to take reasonable steps to resolve the non-compliance and prevent new occurrences.
50
+
51
+ 1.4 Pre-Release SDK
52
+ The SDK versions identified as alpha, beta, preview or otherwise as pre-release, may not be fully functional, may contain errors or design flaws, and may have reduced or different security, privacy, accessibility, availability, and reliability standards relative to commercial versions of NVIDIA software and materials. Use of a pre-release SDK may result in unexpected results, loss of data, project delays or other unpredictable damage or loss.
53
+ You may use a pre-release SDK at your own risk, understanding that pre-release SDKs are not intended for use in production or business-critical systems.
54
+ NVIDIA may choose not to make available a commercial version of any pre-release SDK. NVIDIA may also choose to abandon development and terminate the availability of a pre-release SDK at any time without liability.
55
+ 1.5 Updates
56
+
57
+ NVIDIA may, at its option, make available patches, workarounds or other updates to this SDK. Unless the updates are provided with their separate governing terms, they are deemed part of the SDK licensed to you as provided in this Agreement.
58
+
59
+ You agree that the form and content of the SDK that NVIDIA provides may change without prior notice to you. While NVIDIA generally maintains compatibility between versions, NVIDIA may in some cases make changes that introduce incompatibilities in future versions of the SDK.
60
+
61
+ 1.6 Third Party Licenses
62
+
63
+ The SDK may come bundled with, or otherwise include or be distributed with, third party software licensed by a NVIDIA supplier and/or open source software provided under an open source license. Use of third party software is subject to the third-party license terms, or in the absence of third party terms, the terms of this Agreement. Copyright to third party software is held by the copyright holders indicated in the third-party software or license.
64
+
65
+ 1.7 Reservation of Rights
66
+
67
+ NVIDIA reserves all rights, title and interest in and to the SDK not expressly granted to you under this Agreement.
68
+
69
+ 2. Limitations.
70
+
71
+ The following license limitations apply to your use of the SDK:
72
+
73
+ 2.1 You may not reverse engineer, decompile or disassemble, or remove copyright or other proprietary notices from any portion of the SDK or copies of the SDK.
74
+
75
+ 2.2 Except as expressly provided in this Agreement, you may not copy, sell, rent, sublicense, transfer, distribute, modify, or create derivative works of any portion of the SDK.
76
+
77
+ 2.3 Unless you have an agreement with NVIDIA for this purpose, you may not indicate that an application created with the SDK is sponsored or endorsed by NVIDIA.
78
+
79
+ 2.4 You may not bypass, disable, or circumvent any encryption, security, digital rights management or authentication mechanism in the SDK.
80
+
81
+ 2.5 You may not use the SDK in any manner that would cause it to become subject to an open source software license. As examples, licenses that require as a condition of use, modification, and/or distribution that the SDK be (i) disclosed or distributed in source code form; (ii) licensed for the purpose of making derivative works; or (iii) redistributable at no charge.
82
+
83
+ 2.6 Unless you have an agreement with NVIDIA for this purpose, you may not use the SDK with any system or application where the use or failure of the system or application can reasonably be expected to threaten or result in personal injury, death, or catastrophic loss. Examples include use in avionics, navigation, military, medical, life support or other life critical applications. NVIDIA does not design, test or manufacture the SDK for these critical uses and NVIDIA shall not be liable to you or any third party, in whole or in part, for any claims or damages arising from such uses.
84
+
85
+ 2.7 You agree to defend, indemnify and hold harmless NVIDIA and its affiliates, and their respective employees, contractors, agents, officers and directors, from and against any and all claims, damages, obligations, losses, liabilities, costs or debt, fines, restitutions and expenses (including but not limited to attorney’s fees and costs incident to establishing the right of indemnification) arising out of or related to your use of the SDK outside of the scope of this Agreement, or not in compliance with its terms.
86
+
87
+ 3. Ownership.
88
+
89
+ 3.1 NVIDIA or its licensors hold all rights, title and interest in and to the SDK and its modifications and derivative works, including their respective intellectual property rights, subject to your rights under Section 3.2. This SDK may include software and materials from NVIDIA’s licensors, and these licensors are intended third party beneficiaries that may enforce this Agreement with respect to their intellectual property rights.
90
+
91
+ 3.2 You hold all rights, title and interest in and to your applications and your derivative works of the sample source code delivered in the SDK, including their respective intellectual property rights, subject to NVIDIA’s rights under section 3.1.
92
+
93
+ 3.3 You may, but don’t have to, provide to NVIDIA suggestions, feature requests or other feedback regarding the SDK, including possible enhancements or modifications to the SDK. For any feedback that you voluntarily provide, you hereby grant NVIDIA and its affiliates a perpetual, non-exclusive, worldwide, irrevocable license to use, reproduce, modify, license, sublicense (through multiple tiers of sublicensees), and distribute (through multiple tiers of distributors) it without the payment of any royalties or fees to you. NVIDIA will use feedback at its choice. NVIDIA is constantly looking for ways to improve its products, so you may send feedback to NVIDIA through the developer portal at https://developer.nvidia.com.
94
+
95
+ 4. No Warranties.
96
+
97
+ THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF DEALING OR COURSE OF TRADE.
98
+
99
+ 5. Limitations of Liability.
100
+
101
+ TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS LIMIT.
102
+
103
+ These exclusions and limitations of liability shall apply regardless if NVIDIA or its affiliates have been advised of the possibility of such damages, and regardless of whether a remedy fails its essential purpose. These exclusions and limitations of liability form an essential basis of the bargain between the parties, and, absent any of these exclusions or limitations of liability, the provisions of this Agreement, including, without limitation, the economic terms, would be substantially different.
104
+
105
+ 6. Termination.
106
+
107
+ 6.1 This Agreement will continue to apply until terminated by either you or NVIDIA as described below.
108
+
109
+ 6.2 If you want to terminate this Agreement, you may do so by stopping to use the SDK.
110
+
111
+ 6.3 NVIDIA may, at any time, terminate this Agreement if: (i) you fail to comply with any term of this Agreement and the non-compliance is not fixed within thirty (30) days following notice from NVIDIA (or immediately if you violate NVIDIA’s intellectual property rights); (ii) you commence or participate in any legal proceeding against NVIDIA with respect to the SDK; or (iii) NVIDIA decides to no longer provide the SDK in a country or, in NVIDIA’s sole discretion, the continued use of it is no longer commercially viable.
112
+
113
+ 6.4 Upon any termination of this Agreement, you agree to promptly discontinue use of the SDK and destroy all copies in your possession or control. Your prior distributions in accordance with this Agreement are not affected by the termination of this Agreement. Upon written request, you will certify in writing that you have complied with your commitments under this section. Upon any termination of this Agreement all provisions survive except for the licenses granted to you.
114
+
115
+ 7. General.
116
+
117
+ If you wish to assign this Agreement or your rights and obligations, including by merger, consolidation, dissolution or operation of law, contact NVIDIA to ask for permission. Any attempted assignment not approved by NVIDIA in writing shall be void and of no effect. NVIDIA may assign, delegate or transfer this Agreement and its rights and obligations, and if to a non-affiliate you will be notified.
118
+
119
+ You agree to cooperate with NVIDIA and provide reasonably requested information to verify your compliance with this Agreement.
120
+
121
+ This Agreement will be governed in all respects by the laws of the United States and of the State of Delaware as those laws are applied to contracts entered into and performed entirely within Delaware by Delaware residents, without regard to the conflicts of laws principles. The United Nations Convention on Contracts for the International Sale of Goods is specifically disclaimed. You agree to all terms of this Agreement in the English language.
122
+
123
+ The state or federal courts residing in Santa Clara County, California shall have exclusive jurisdiction over any dispute or claim arising out of this Agreement. Notwithstanding this, you agree that NVIDIA shall still be allowed to apply for injunctive remedies or an equivalent type of urgent legal relief in any jurisdiction.
124
+
125
+ If any court of competent jurisdiction determines that any provision of this Agreement is illegal, invalid or unenforceable, such provision will be construed as limited to the extent necessary to be consistent with and fully enforceable under the law and the remaining provisions will remain in full force and effect. Unless otherwise specified, remedies are cumulative.
126
+
127
+ Each party acknowledges and agrees that the other is an independent contractor in the performance of this Agreement.
128
+
129
+ The SDK has been developed entirely at private expense and is “commercial items” consisting of “commercial computer software” and “commercial computer software documentation” provided with RESTRICTED RIGHTS. Use, duplication or disclosure by the U.S. Government or a U.S. Government subcontractor is subject to the restrictions in this Agreement pursuant to DFARS 227.7202-3(a) or as set forth in subparagraphs (b)(1) and (2) of the Commercial Computer Software - Restricted Rights clause at FAR 52.227-19, as applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas Expressway, Santa Clara, CA 95051.
130
+
131
+ The SDK is subject to United States export laws and regulations. You agree that you will not ship, transfer or export the SDK into any country, or use the SDK in any manner, prohibited by the United States Bureau of Industry and Security or economic sanctions regulations administered by the U.S. Department of Treasury’s Office of Foreign Assets Control (OFAC), or any applicable export laws, restrictions or regulations. These laws include restrictions on destinations, end users and end use. By accepting this Agreement, you confirm that you are not a resident or citizen of any country currently embargoed by the U.S. and that you are not otherwise prohibited from receiving the SDK.
132
+
133
+ Any notice delivered by NVIDIA to you under this Agreement will be delivered via mail, email or fax. You agree that any notices that NVIDIA sends you electronically will satisfy any legal communication requirements. Please direct your legal notices or other correspondence to NVIDIA Corporation, 2788 San Tomas Expressway, Santa Clara, California 95051, United States of America, Attention: Legal Department.
134
+
135
+ This Agreement and any exhibits incorporated into this Agreement constitute the entire agreement of the parties with respect to the subject matter of this Agreement and supersede all prior negotiations or documentation exchanged between the parties relating to this SDK license. Any additional and/or conflicting terms on documents issued by you are null, void, and invalid. Any amendment or waiver under this Agreement shall be in writing and signed by representatives of both parties.
136
+
137
+ (v. January 28, 2020)
138
+
139
+
140
+ cuDNN SUPPLEMENT TO SOFTWARE LICENSE AGREEMENT FOR NVIDIA SOFTWARE DEVELOPMENT KITS
141
+
142
+ The terms in this supplement govern your use of the NVIDIA cuDNN SDK under the terms of your license agreement (“Agreement”) as modified by this supplement. Capitalized terms used but not defined below have the meaning assigned to them in the Agreement.
143
+
144
+ This supplement is an exhibit to the Agreement and is incorporated as an integral part of the Agreement. In the event of conflict between the terms in this supplement and the terms in the Agreement, the terms in this supplement govern.
145
+
146
+ 4.1 License Scope. The SDK is licensed for you to develop applications only for use in systems with NVIDIA GPUs.
147
+
148
+ 2. Distribution. The following portions of the SDK are distributable under the Agreement: the runtime files .so and .h, cudnn64_7.dll, and cudnn.lib.
149
+
150
+ In addition to the rights above, for parties that are developing software intended solely for use on Jetson development kits or Jetson modules and running Linux for Tegra software the following shall apply: the SDK may be distributed in its entirety, as provided by NVIDIA and without separation of its components, for you and/or your licensees to create software development kits for use only on the Jetson platform and running Linux for Tegra software.
151
+
152
+ 3. Licensing. If the distribution terms in this Agreement are not suitable for your organization, or for any questions regarding this Agreement, please contact NVIDIA at nvidia-compute-license-questions@nvidia.com.
153
+ (v. January 28, 2020)
154
+
deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/METADATA ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: nvidia-cudnn-cu12
3
+ Version: 9.1.0.70
4
+ Summary: cuDNN runtime libraries
5
+ Home-page: https://developer.nvidia.com/cuda-zone
6
+ Author: Nvidia CUDA Installer Team
7
+ Author-email: cuda_installer@nvidia.com
8
+ License: NVIDIA Proprietary Software
9
+ Keywords: cuda,nvidia,runtime,machine learning,deep learning
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Intended Audience :: Education
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: License :: Other/Proprietary License
15
+ Classifier: Natural Language :: English
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.5
18
+ Classifier: Programming Language :: Python :: 3.6
19
+ Classifier: Programming Language :: Python :: 3.7
20
+ Classifier: Programming Language :: Python :: 3.8
21
+ Classifier: Programming Language :: Python :: 3.9
22
+ Classifier: Programming Language :: Python :: 3.10
23
+ Classifier: Programming Language :: Python :: 3.11
24
+ Classifier: Programming Language :: Python :: 3 :: Only
25
+ Classifier: Topic :: Scientific/Engineering
26
+ Classifier: Topic :: Scientific/Engineering :: Mathematics
27
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
28
+ Classifier: Topic :: Software Development
29
+ Classifier: Topic :: Software Development :: Libraries
30
+ Classifier: Operating System :: Microsoft :: Windows
31
+ Classifier: Operating System :: POSIX :: Linux
32
+ Requires-Python: >=3
33
+ License-File: License.txt
34
+ Requires-Dist: nvidia-cublas-cu12
35
+
36
+ cuDNN runtime libraries containing primitives for deep neural networks.
deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/REQUESTED ADDED
File without changes
deepseek/lib/python3.10/site-packages/nvidia_cudnn_cu12-9.1.0.70.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.42.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-manylinux2014_x86_64
5
+
deepseek/lib/python3.10/site-packages/websockets/__init__.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+
5
+ from .imports import lazy_import
6
+ from .version import version as __version__ # noqa: F401
7
+
8
+
9
+ __all__ = [
10
+ # .asyncio.client
11
+ "connect",
12
+ "unix_connect",
13
+ # .asyncio.server
14
+ "basic_auth",
15
+ "broadcast",
16
+ "serve",
17
+ "unix_serve",
18
+ # .client
19
+ "ClientProtocol",
20
+ # .datastructures
21
+ "Headers",
22
+ "HeadersLike",
23
+ "MultipleValuesError",
24
+ # .exceptions
25
+ "ConcurrencyError",
26
+ "ConnectionClosed",
27
+ "ConnectionClosedError",
28
+ "ConnectionClosedOK",
29
+ "DuplicateParameter",
30
+ "InvalidHandshake",
31
+ "InvalidHeader",
32
+ "InvalidHeaderFormat",
33
+ "InvalidHeaderValue",
34
+ "InvalidOrigin",
35
+ "InvalidParameterName",
36
+ "InvalidParameterValue",
37
+ "InvalidState",
38
+ "InvalidStatus",
39
+ "InvalidUpgrade",
40
+ "InvalidURI",
41
+ "NegotiationError",
42
+ "PayloadTooBig",
43
+ "ProtocolError",
44
+ "SecurityError",
45
+ "WebSocketException",
46
+ # .server
47
+ "ServerProtocol",
48
+ # .typing
49
+ "Data",
50
+ "ExtensionName",
51
+ "ExtensionParameter",
52
+ "LoggerLike",
53
+ "StatusLike",
54
+ "Origin",
55
+ "Subprotocol",
56
+ ]
57
+
58
+ # When type checking, import non-deprecated aliases eagerly. Else, import on demand.
59
+ if typing.TYPE_CHECKING:
60
+ from .asyncio.client import connect, unix_connect
61
+ from .asyncio.server import basic_auth, broadcast, serve, unix_serve
62
+ from .client import ClientProtocol
63
+ from .datastructures import Headers, HeadersLike, MultipleValuesError
64
+ from .exceptions import (
65
+ ConcurrencyError,
66
+ ConnectionClosed,
67
+ ConnectionClosedError,
68
+ ConnectionClosedOK,
69
+ DuplicateParameter,
70
+ InvalidHandshake,
71
+ InvalidHeader,
72
+ InvalidHeaderFormat,
73
+ InvalidHeaderValue,
74
+ InvalidOrigin,
75
+ InvalidParameterName,
76
+ InvalidParameterValue,
77
+ InvalidState,
78
+ InvalidStatus,
79
+ InvalidUpgrade,
80
+ InvalidURI,
81
+ NegotiationError,
82
+ PayloadTooBig,
83
+ ProtocolError,
84
+ SecurityError,
85
+ WebSocketException,
86
+ )
87
+ from .server import ServerProtocol
88
+ from .typing import (
89
+ Data,
90
+ ExtensionName,
91
+ ExtensionParameter,
92
+ LoggerLike,
93
+ Origin,
94
+ StatusLike,
95
+ Subprotocol,
96
+ )
97
+ else:
98
+ lazy_import(
99
+ globals(),
100
+ aliases={
101
+ # .asyncio.client
102
+ "connect": ".asyncio.client",
103
+ "unix_connect": ".asyncio.client",
104
+ # .asyncio.server
105
+ "basic_auth": ".asyncio.server",
106
+ "broadcast": ".asyncio.server",
107
+ "serve": ".asyncio.server",
108
+ "unix_serve": ".asyncio.server",
109
+ # .client
110
+ "ClientProtocol": ".client",
111
+ # .datastructures
112
+ "Headers": ".datastructures",
113
+ "HeadersLike": ".datastructures",
114
+ "MultipleValuesError": ".datastructures",
115
+ # .exceptions
116
+ "ConcurrencyError": ".exceptions",
117
+ "ConnectionClosed": ".exceptions",
118
+ "ConnectionClosedError": ".exceptions",
119
+ "ConnectionClosedOK": ".exceptions",
120
+ "DuplicateParameter": ".exceptions",
121
+ "InvalidHandshake": ".exceptions",
122
+ "InvalidHeader": ".exceptions",
123
+ "InvalidHeaderFormat": ".exceptions",
124
+ "InvalidHeaderValue": ".exceptions",
125
+ "InvalidOrigin": ".exceptions",
126
+ "InvalidParameterName": ".exceptions",
127
+ "InvalidParameterValue": ".exceptions",
128
+ "InvalidState": ".exceptions",
129
+ "InvalidStatus": ".exceptions",
130
+ "InvalidUpgrade": ".exceptions",
131
+ "InvalidURI": ".exceptions",
132
+ "NegotiationError": ".exceptions",
133
+ "PayloadTooBig": ".exceptions",
134
+ "ProtocolError": ".exceptions",
135
+ "SecurityError": ".exceptions",
136
+ "WebSocketException": ".exceptions",
137
+ # .server
138
+ "ServerProtocol": ".server",
139
+ # .typing
140
+ "Data": ".typing",
141
+ "ExtensionName": ".typing",
142
+ "ExtensionParameter": ".typing",
143
+ "LoggerLike": ".typing",
144
+ "Origin": ".typing",
145
+ "StatusLike": ".typing",
146
+ "Subprotocol": ".typing",
147
+ },
148
+ deprecated_aliases={
149
+ # deprecated in 9.0 - 2021-09-01
150
+ "framing": ".legacy",
151
+ "handshake": ".legacy",
152
+ "parse_uri": ".uri",
153
+ "WebSocketURI": ".uri",
154
+ # deprecated in 14.0 - 2024-11-09
155
+ # .legacy.auth
156
+ "BasicAuthWebSocketServerProtocol": ".legacy.auth",
157
+ "basic_auth_protocol_factory": ".legacy.auth",
158
+ # .legacy.client
159
+ "WebSocketClientProtocol": ".legacy.client",
160
+ # .legacy.exceptions
161
+ "AbortHandshake": ".legacy.exceptions",
162
+ "InvalidMessage": ".legacy.exceptions",
163
+ "InvalidStatusCode": ".legacy.exceptions",
164
+ "RedirectHandshake": ".legacy.exceptions",
165
+ "WebSocketProtocolError": ".legacy.exceptions",
166
+ # .legacy.protocol
167
+ "WebSocketCommonProtocol": ".legacy.protocol",
168
+ # .legacy.server
169
+ "WebSocketServer": ".legacy.server",
170
+ "WebSocketServerProtocol": ".legacy.server",
171
+ },
172
+ )
deepseek/lib/python3.10/site-packages/websockets/__main__.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import os
5
+ import signal
6
+ import sys
7
+ import threading
8
+
9
+
10
+ try:
11
+ import readline # noqa: F401
12
+ except ImportError: # Windows has no `readline` normally
13
+ pass
14
+
15
+ from .sync.client import ClientConnection, connect
16
+ from .version import version as websockets_version
17
+
18
+
19
+ if sys.platform == "win32":
20
+
21
+ def win_enable_vt100() -> None:
22
+ """
23
+ Enable VT-100 for console output on Windows.
24
+
25
+ See also https://github.com/python/cpython/issues/73245.
26
+
27
+ """
28
+ import ctypes
29
+
30
+ STD_OUTPUT_HANDLE = ctypes.c_uint(-11)
31
+ INVALID_HANDLE_VALUE = ctypes.c_uint(-1)
32
+ ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x004
33
+
34
+ handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
35
+ if handle == INVALID_HANDLE_VALUE:
36
+ raise RuntimeError("unable to obtain stdout handle")
37
+
38
+ cur_mode = ctypes.c_uint()
39
+ if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0:
40
+ raise RuntimeError("unable to query current console mode")
41
+
42
+ # ctypes ints lack support for the required bit-OR operation.
43
+ # Temporarily convert to Py int, do the OR and convert back.
44
+ py_int_mode = int.from_bytes(cur_mode, sys.byteorder)
45
+ new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
46
+
47
+ if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0:
48
+ raise RuntimeError("unable to set console mode")
49
+
50
+
51
+ def print_during_input(string: str) -> None:
52
+ sys.stdout.write(
53
+ # Save cursor position
54
+ "\N{ESC}7"
55
+ # Add a new line
56
+ "\N{LINE FEED}"
57
+ # Move cursor up
58
+ "\N{ESC}[A"
59
+ # Insert blank line, scroll last line down
60
+ "\N{ESC}[L"
61
+ # Print string in the inserted blank line
62
+ f"{string}\N{LINE FEED}"
63
+ # Restore cursor position
64
+ "\N{ESC}8"
65
+ # Move cursor down
66
+ "\N{ESC}[B"
67
+ )
68
+ sys.stdout.flush()
69
+
70
+
71
+ def print_over_input(string: str) -> None:
72
+ sys.stdout.write(
73
+ # Move cursor to beginning of line
74
+ "\N{CARRIAGE RETURN}"
75
+ # Delete current line
76
+ "\N{ESC}[K"
77
+ # Print string
78
+ f"{string}\N{LINE FEED}"
79
+ )
80
+ sys.stdout.flush()
81
+
82
+
83
+ def print_incoming_messages(websocket: ClientConnection, stop: threading.Event) -> None:
84
+ for message in websocket:
85
+ if isinstance(message, str):
86
+ print_during_input("< " + message)
87
+ else:
88
+ print_during_input("< (binary) " + message.hex())
89
+ if not stop.is_set():
90
+ # When the server closes the connection, raise KeyboardInterrupt
91
+ # in the main thread to exit the program.
92
+ if sys.platform == "win32":
93
+ ctrl_c = signal.CTRL_C_EVENT
94
+ else:
95
+ ctrl_c = signal.SIGINT
96
+ os.kill(os.getpid(), ctrl_c)
97
+
98
+
99
+ def main() -> None:
100
+ # Parse command line arguments.
101
+ parser = argparse.ArgumentParser(
102
+ prog="python -m websockets",
103
+ description="Interactive WebSocket client.",
104
+ add_help=False,
105
+ )
106
+ group = parser.add_mutually_exclusive_group()
107
+ group.add_argument("--version", action="store_true")
108
+ group.add_argument("uri", metavar="<uri>", nargs="?")
109
+ args = parser.parse_args()
110
+
111
+ if args.version:
112
+ print(f"websockets {websockets_version}")
113
+ return
114
+
115
+ if args.uri is None:
116
+ parser.error("the following arguments are required: <uri>")
117
+
118
+ # If we're on Windows, enable VT100 terminal support.
119
+ if sys.platform == "win32":
120
+ try:
121
+ win_enable_vt100()
122
+ except RuntimeError as exc:
123
+ sys.stderr.write(
124
+ f"Unable to set terminal to VT100 mode. This is only "
125
+ f"supported since Win10 anniversary update. Expect "
126
+ f"weird symbols on the terminal.\nError: {exc}\n"
127
+ )
128
+ sys.stderr.flush()
129
+
130
+ try:
131
+ websocket = connect(args.uri)
132
+ except Exception as exc:
133
+ print(f"Failed to connect to {args.uri}: {exc}.")
134
+ sys.exit(1)
135
+ else:
136
+ print(f"Connected to {args.uri}.")
137
+
138
+ stop = threading.Event()
139
+
140
+ # Start the thread that reads messages from the connection.
141
+ thread = threading.Thread(target=print_incoming_messages, args=(websocket, stop))
142
+ thread.start()
143
+
144
+ # Read from stdin in the main thread in order to receive signals.
145
+ try:
146
+ while True:
147
+ # Since there's no size limit, put_nowait is identical to put.
148
+ message = input("> ")
149
+ websocket.send(message)
150
+ except (KeyboardInterrupt, EOFError): # ^C, ^D
151
+ stop.set()
152
+ websocket.close()
153
+ print_over_input("Connection closed.")
154
+
155
+ thread.join()
156
+
157
+
158
+ if __name__ == "__main__":
159
+ main()
deepseek/lib/python3.10/site-packages/websockets/auth.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import warnings
4
+
5
+
6
+ with warnings.catch_warnings():
7
+ # Suppress redundant DeprecationWarning raised by websockets.legacy.
8
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
9
+ from .legacy.auth import *
10
+ from .legacy.auth import __all__ # noqa: F401
11
+
12
+
13
+ warnings.warn( # deprecated in 14.0 - 2024-11-09
14
+ "websockets.auth, an alias for websockets.legacy.auth, is deprecated; "
15
+ "see https://websockets.readthedocs.io/en/stable/howto/upgrade.html "
16
+ "for upgrade instructions",
17
+ DeprecationWarning,
18
+ )
deepseek/lib/python3.10/site-packages/websockets/client.py ADDED
@@ -0,0 +1,400 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import random
5
+ import warnings
6
+ from collections.abc import Generator, Sequence
7
+ from typing import Any
8
+
9
+ from .datastructures import Headers, MultipleValuesError
10
+ from .exceptions import (
11
+ InvalidHandshake,
12
+ InvalidHeader,
13
+ InvalidHeaderValue,
14
+ InvalidStatus,
15
+ InvalidUpgrade,
16
+ NegotiationError,
17
+ )
18
+ from .extensions import ClientExtensionFactory, Extension
19
+ from .headers import (
20
+ build_authorization_basic,
21
+ build_extension,
22
+ build_host,
23
+ build_subprotocol,
24
+ parse_connection,
25
+ parse_extension,
26
+ parse_subprotocol,
27
+ parse_upgrade,
28
+ )
29
+ from .http11 import Request, Response
30
+ from .imports import lazy_import
31
+ from .protocol import CLIENT, CONNECTING, OPEN, Protocol, State
32
+ from .typing import (
33
+ ConnectionOption,
34
+ ExtensionHeader,
35
+ LoggerLike,
36
+ Origin,
37
+ Subprotocol,
38
+ UpgradeProtocol,
39
+ )
40
+ from .uri import WebSocketURI
41
+ from .utils import accept_key, generate_key
42
+
43
+
44
+ __all__ = ["ClientProtocol"]
45
+
46
+
47
+ class ClientProtocol(Protocol):
48
+ """
49
+ Sans-I/O implementation of a WebSocket client connection.
50
+
51
+ Args:
52
+ wsuri: URI of the WebSocket server, parsed
53
+ with :func:`~websockets.uri.parse_uri`.
54
+ origin: Value of the ``Origin`` header. This is useful when connecting
55
+ to a server that validates the ``Origin`` header to defend against
56
+ Cross-Site WebSocket Hijacking attacks.
57
+ extensions: List of supported extensions, in order in which they
58
+ should be tried.
59
+ subprotocols: List of supported subprotocols, in order of decreasing
60
+ preference.
61
+ state: Initial state of the WebSocket connection.
62
+ max_size: Maximum size of incoming messages in bytes;
63
+ :obj:`None` disables the limit.
64
+ logger: Logger for this connection;
65
+ defaults to ``logging.getLogger("websockets.client")``;
66
+ see the :doc:`logging guide <../../topics/logging>` for details.
67
+
68
+ """
69
+
70
+ def __init__(
71
+ self,
72
+ wsuri: WebSocketURI,
73
+ *,
74
+ origin: Origin | None = None,
75
+ extensions: Sequence[ClientExtensionFactory] | None = None,
76
+ subprotocols: Sequence[Subprotocol] | None = None,
77
+ state: State = CONNECTING,
78
+ max_size: int | None = 2**20,
79
+ logger: LoggerLike | None = None,
80
+ ) -> None:
81
+ super().__init__(
82
+ side=CLIENT,
83
+ state=state,
84
+ max_size=max_size,
85
+ logger=logger,
86
+ )
87
+ self.wsuri = wsuri
88
+ self.origin = origin
89
+ self.available_extensions = extensions
90
+ self.available_subprotocols = subprotocols
91
+ self.key = generate_key()
92
+
93
+ def connect(self) -> Request:
94
+ """
95
+ Create a handshake request to open a connection.
96
+
97
+ You must send the handshake request with :meth:`send_request`.
98
+
99
+ You can modify it before sending it, for example to add HTTP headers.
100
+
101
+ Returns:
102
+ WebSocket handshake request event to send to the server.
103
+
104
+ """
105
+ headers = Headers()
106
+
107
+ headers["Host"] = build_host(
108
+ self.wsuri.host, self.wsuri.port, self.wsuri.secure
109
+ )
110
+
111
+ if self.wsuri.user_info:
112
+ headers["Authorization"] = build_authorization_basic(*self.wsuri.user_info)
113
+
114
+ if self.origin is not None:
115
+ headers["Origin"] = self.origin
116
+
117
+ headers["Upgrade"] = "websocket"
118
+ headers["Connection"] = "Upgrade"
119
+ headers["Sec-WebSocket-Key"] = self.key
120
+ headers["Sec-WebSocket-Version"] = "13"
121
+
122
+ if self.available_extensions is not None:
123
+ extensions_header = build_extension(
124
+ [
125
+ (extension_factory.name, extension_factory.get_request_params())
126
+ for extension_factory in self.available_extensions
127
+ ]
128
+ )
129
+ headers["Sec-WebSocket-Extensions"] = extensions_header
130
+
131
+ if self.available_subprotocols is not None:
132
+ protocol_header = build_subprotocol(self.available_subprotocols)
133
+ headers["Sec-WebSocket-Protocol"] = protocol_header
134
+
135
+ return Request(self.wsuri.resource_name, headers)
136
+
137
+ def process_response(self, response: Response) -> None:
138
+ """
139
+ Check a handshake response.
140
+
141
+ Args:
142
+ request: WebSocket handshake response received from the server.
143
+
144
+ Raises:
145
+ InvalidHandshake: If the handshake response is invalid.
146
+
147
+ """
148
+
149
+ if response.status_code != 101:
150
+ raise InvalidStatus(response)
151
+
152
+ headers = response.headers
153
+
154
+ connection: list[ConnectionOption] = sum(
155
+ [parse_connection(value) for value in headers.get_all("Connection")], []
156
+ )
157
+
158
+ if not any(value.lower() == "upgrade" for value in connection):
159
+ raise InvalidUpgrade(
160
+ "Connection", ", ".join(connection) if connection else None
161
+ )
162
+
163
+ upgrade: list[UpgradeProtocol] = sum(
164
+ [parse_upgrade(value) for value in headers.get_all("Upgrade")], []
165
+ )
166
+
167
+ # For compatibility with non-strict implementations, ignore case when
168
+ # checking the Upgrade header. It's supposed to be 'WebSocket'.
169
+ if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
170
+ raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None)
171
+
172
+ try:
173
+ s_w_accept = headers["Sec-WebSocket-Accept"]
174
+ except KeyError:
175
+ raise InvalidHeader("Sec-WebSocket-Accept") from None
176
+ except MultipleValuesError:
177
+ raise InvalidHeader("Sec-WebSocket-Accept", "multiple values") from None
178
+
179
+ if s_w_accept != accept_key(self.key):
180
+ raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept)
181
+
182
+ self.extensions = self.process_extensions(headers)
183
+
184
+ self.subprotocol = self.process_subprotocol(headers)
185
+
186
+ def process_extensions(self, headers: Headers) -> list[Extension]:
187
+ """
188
+ Handle the Sec-WebSocket-Extensions HTTP response header.
189
+
190
+ Check that each extension is supported, as well as its parameters.
191
+
192
+ :rfc:`6455` leaves the rules up to the specification of each
193
+ extension.
194
+
195
+ To provide this level of flexibility, for each extension accepted by
196
+ the server, we check for a match with each extension available in the
197
+ client configuration. If no match is found, an exception is raised.
198
+
199
+ If several variants of the same extension are accepted by the server,
200
+ it may be configured several times, which won't make sense in general.
201
+ Extensions must implement their own requirements. For this purpose,
202
+ the list of previously accepted extensions is provided.
203
+
204
+ Other requirements, for example related to mandatory extensions or the
205
+ order of extensions, may be implemented by overriding this method.
206
+
207
+ Args:
208
+ headers: WebSocket handshake response headers.
209
+
210
+ Returns:
211
+ List of accepted extensions.
212
+
213
+ Raises:
214
+ InvalidHandshake: To abort the handshake.
215
+
216
+ """
217
+ accepted_extensions: list[Extension] = []
218
+
219
+ extensions = headers.get_all("Sec-WebSocket-Extensions")
220
+
221
+ if extensions:
222
+ if self.available_extensions is None:
223
+ raise NegotiationError("no extensions supported")
224
+
225
+ parsed_extensions: list[ExtensionHeader] = sum(
226
+ [parse_extension(header_value) for header_value in extensions], []
227
+ )
228
+
229
+ for name, response_params in parsed_extensions:
230
+ for extension_factory in self.available_extensions:
231
+ # Skip non-matching extensions based on their name.
232
+ if extension_factory.name != name:
233
+ continue
234
+
235
+ # Skip non-matching extensions based on their params.
236
+ try:
237
+ extension = extension_factory.process_response_params(
238
+ response_params, accepted_extensions
239
+ )
240
+ except NegotiationError:
241
+ continue
242
+
243
+ # Add matching extension to the final list.
244
+ accepted_extensions.append(extension)
245
+
246
+ # Break out of the loop once we have a match.
247
+ break
248
+
249
+ # If we didn't break from the loop, no extension in our list
250
+ # matched what the server sent. Fail the connection.
251
+ else:
252
+ raise NegotiationError(
253
+ f"Unsupported extension: "
254
+ f"name = {name}, params = {response_params}"
255
+ )
256
+
257
+ return accepted_extensions
258
+
259
+ def process_subprotocol(self, headers: Headers) -> Subprotocol | None:
260
+ """
261
+ Handle the Sec-WebSocket-Protocol HTTP response header.
262
+
263
+ If provided, check that it contains exactly one supported subprotocol.
264
+
265
+ Args:
266
+ headers: WebSocket handshake response headers.
267
+
268
+ Returns:
269
+ Subprotocol, if one was selected.
270
+
271
+ """
272
+ subprotocol: Subprotocol | None = None
273
+
274
+ subprotocols = headers.get_all("Sec-WebSocket-Protocol")
275
+
276
+ if subprotocols:
277
+ if self.available_subprotocols is None:
278
+ raise NegotiationError("no subprotocols supported")
279
+
280
+ parsed_subprotocols: Sequence[Subprotocol] = sum(
281
+ [parse_subprotocol(header_value) for header_value in subprotocols], []
282
+ )
283
+
284
+ if len(parsed_subprotocols) > 1:
285
+ raise InvalidHeader(
286
+ "Sec-WebSocket-Protocol",
287
+ f"multiple values: {', '.join(parsed_subprotocols)}",
288
+ )
289
+
290
+ subprotocol = parsed_subprotocols[0]
291
+
292
+ if subprotocol not in self.available_subprotocols:
293
+ raise NegotiationError(f"unsupported subprotocol: {subprotocol}")
294
+
295
+ return subprotocol
296
+
297
+ def send_request(self, request: Request) -> None:
298
+ """
299
+ Send a handshake request to the server.
300
+
301
+ Args:
302
+ request: WebSocket handshake request event.
303
+
304
+ """
305
+ if self.debug:
306
+ self.logger.debug("> GET %s HTTP/1.1", request.path)
307
+ for key, value in request.headers.raw_items():
308
+ self.logger.debug("> %s: %s", key, value)
309
+
310
+ self.writes.append(request.serialize())
311
+
312
+ def parse(self) -> Generator[None]:
313
+ if self.state is CONNECTING:
314
+ try:
315
+ response = yield from Response.parse(
316
+ self.reader.read_line,
317
+ self.reader.read_exact,
318
+ self.reader.read_to_eof,
319
+ )
320
+ except Exception as exc:
321
+ self.handshake_exc = exc
322
+ self.send_eof()
323
+ self.parser = self.discard()
324
+ next(self.parser) # start coroutine
325
+ yield
326
+
327
+ if self.debug:
328
+ code, phrase = response.status_code, response.reason_phrase
329
+ self.logger.debug("< HTTP/1.1 %d %s", code, phrase)
330
+ for key, value in response.headers.raw_items():
331
+ self.logger.debug("< %s: %s", key, value)
332
+ if response.body is not None:
333
+ self.logger.debug("< [body] (%d bytes)", len(response.body))
334
+
335
+ try:
336
+ self.process_response(response)
337
+ except InvalidHandshake as exc:
338
+ response._exception = exc
339
+ self.events.append(response)
340
+ self.handshake_exc = exc
341
+ self.send_eof()
342
+ self.parser = self.discard()
343
+ next(self.parser) # start coroutine
344
+ yield
345
+
346
+ assert self.state is CONNECTING
347
+ self.state = OPEN
348
+ self.events.append(response)
349
+
350
+ yield from super().parse()
351
+
352
+
353
+ class ClientConnection(ClientProtocol):
354
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
355
+ warnings.warn( # deprecated in 11.0 - 2023-04-02
356
+ "ClientConnection was renamed to ClientProtocol",
357
+ DeprecationWarning,
358
+ )
359
+ super().__init__(*args, **kwargs)
360
+
361
+
362
+ BACKOFF_INITIAL_DELAY = float(os.environ.get("WEBSOCKETS_BACKOFF_INITIAL_DELAY", "5"))
363
+ BACKOFF_MIN_DELAY = float(os.environ.get("WEBSOCKETS_BACKOFF_MIN_DELAY", "3.1"))
364
+ BACKOFF_MAX_DELAY = float(os.environ.get("WEBSOCKETS_BACKOFF_MAX_DELAY", "90.0"))
365
+ BACKOFF_FACTOR = float(os.environ.get("WEBSOCKETS_BACKOFF_FACTOR", "1.618"))
366
+
367
+
368
+ def backoff(
369
+ initial_delay: float = BACKOFF_INITIAL_DELAY,
370
+ min_delay: float = BACKOFF_MIN_DELAY,
371
+ max_delay: float = BACKOFF_MAX_DELAY,
372
+ factor: float = BACKOFF_FACTOR,
373
+ ) -> Generator[float]:
374
+ """
375
+ Generate a series of backoff delays between reconnection attempts.
376
+
377
+ Yields:
378
+ How many seconds to wait before retrying to connect.
379
+
380
+ """
381
+ # Add a random initial delay between 0 and 5 seconds.
382
+ # See 7.2.3. Recovering from Abnormal Closure in RFC 6455.
383
+ yield random.random() * initial_delay
384
+ delay = min_delay
385
+ while delay < max_delay:
386
+ yield delay
387
+ delay *= factor
388
+ while True:
389
+ yield max_delay
390
+
391
+
392
+ lazy_import(
393
+ globals(),
394
+ deprecated_aliases={
395
+ # deprecated in 14.0 - 2024-11-09
396
+ "WebSocketClientProtocol": ".legacy.client",
397
+ "connect": ".legacy.client",
398
+ "unix_connect": ".legacy.client",
399
+ },
400
+ )
deepseek/lib/python3.10/site-packages/websockets/connection.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import warnings
4
+
5
+ from .protocol import SEND_EOF, Protocol as Connection, Side, State # noqa: F401
6
+
7
+
8
+ warnings.warn( # deprecated in 11.0 - 2023-04-02
9
+ "websockets.connection was renamed to websockets.protocol "
10
+ "and Connection was renamed to Protocol",
11
+ DeprecationWarning,
12
+ )
deepseek/lib/python3.10/site-packages/websockets/datastructures.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Iterable, Iterator, Mapping, MutableMapping
4
+ from typing import Any, Protocol, Union
5
+
6
+
7
+ __all__ = ["Headers", "HeadersLike", "MultipleValuesError"]
8
+
9
+
10
+ class MultipleValuesError(LookupError):
11
+ """
12
+ Exception raised when :class:`Headers` has multiple values for a key.
13
+
14
+ """
15
+
16
+ def __str__(self) -> str:
17
+ # Implement the same logic as KeyError_str in Objects/exceptions.c.
18
+ if len(self.args) == 1:
19
+ return repr(self.args[0])
20
+ return super().__str__()
21
+
22
+
23
+ class Headers(MutableMapping[str, str]):
24
+ """
25
+ Efficient data structure for manipulating HTTP headers.
26
+
27
+ A :class:`list` of ``(name, values)`` is inefficient for lookups.
28
+
29
+ A :class:`dict` doesn't suffice because header names are case-insensitive
30
+ and multiple occurrences of headers with the same name are possible.
31
+
32
+ :class:`Headers` stores HTTP headers in a hybrid data structure to provide
33
+ efficient insertions and lookups while preserving the original data.
34
+
35
+ In order to account for multiple values with minimal hassle,
36
+ :class:`Headers` follows this logic:
37
+
38
+ - When getting a header with ``headers[name]``:
39
+ - if there's no value, :exc:`KeyError` is raised;
40
+ - if there's exactly one value, it's returned;
41
+ - if there's more than one value, :exc:`MultipleValuesError` is raised.
42
+
43
+ - When setting a header with ``headers[name] = value``, the value is
44
+ appended to the list of values for that header.
45
+
46
+ - When deleting a header with ``del headers[name]``, all values for that
47
+ header are removed (this is slow).
48
+
49
+ Other methods for manipulating headers are consistent with this logic.
50
+
51
+ As long as no header occurs multiple times, :class:`Headers` behaves like
52
+ :class:`dict`, except keys are lower-cased to provide case-insensitivity.
53
+
54
+ Two methods support manipulating multiple values explicitly:
55
+
56
+ - :meth:`get_all` returns a list of all values for a header;
57
+ - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs.
58
+
59
+ """
60
+
61
+ __slots__ = ["_dict", "_list"]
62
+
63
+ # Like dict, Headers accepts an optional "mapping or iterable" argument.
64
+ def __init__(self, *args: HeadersLike, **kwargs: str) -> None:
65
+ self._dict: dict[str, list[str]] = {}
66
+ self._list: list[tuple[str, str]] = []
67
+ self.update(*args, **kwargs)
68
+
69
+ def __str__(self) -> str:
70
+ return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n"
71
+
72
+ def __repr__(self) -> str:
73
+ return f"{self.__class__.__name__}({self._list!r})"
74
+
75
+ def copy(self) -> Headers:
76
+ copy = self.__class__()
77
+ copy._dict = self._dict.copy()
78
+ copy._list = self._list.copy()
79
+ return copy
80
+
81
+ def serialize(self) -> bytes:
82
+ # Since headers only contain ASCII characters, we can keep this simple.
83
+ return str(self).encode()
84
+
85
+ # Collection methods
86
+
87
+ def __contains__(self, key: object) -> bool:
88
+ return isinstance(key, str) and key.lower() in self._dict
89
+
90
+ def __iter__(self) -> Iterator[str]:
91
+ return iter(self._dict)
92
+
93
+ def __len__(self) -> int:
94
+ return len(self._dict)
95
+
96
+ # MutableMapping methods
97
+
98
+ def __getitem__(self, key: str) -> str:
99
+ value = self._dict[key.lower()]
100
+ if len(value) == 1:
101
+ return value[0]
102
+ else:
103
+ raise MultipleValuesError(key)
104
+
105
+ def __setitem__(self, key: str, value: str) -> None:
106
+ self._dict.setdefault(key.lower(), []).append(value)
107
+ self._list.append((key, value))
108
+
109
+ def __delitem__(self, key: str) -> None:
110
+ key_lower = key.lower()
111
+ self._dict.__delitem__(key_lower)
112
+ # This is inefficient. Fortunately deleting HTTP headers is uncommon.
113
+ self._list = [(k, v) for k, v in self._list if k.lower() != key_lower]
114
+
115
+ def __eq__(self, other: Any) -> bool:
116
+ if not isinstance(other, Headers):
117
+ return NotImplemented
118
+ return self._dict == other._dict
119
+
120
+ def clear(self) -> None:
121
+ """
122
+ Remove all headers.
123
+
124
+ """
125
+ self._dict = {}
126
+ self._list = []
127
+
128
+ def update(self, *args: HeadersLike, **kwargs: str) -> None:
129
+ """
130
+ Update from a :class:`Headers` instance and/or keyword arguments.
131
+
132
+ """
133
+ args = tuple(
134
+ arg.raw_items() if isinstance(arg, Headers) else arg for arg in args
135
+ )
136
+ super().update(*args, **kwargs)
137
+
138
+ # Methods for handling multiple values
139
+
140
+ def get_all(self, key: str) -> list[str]:
141
+ """
142
+ Return the (possibly empty) list of all values for a header.
143
+
144
+ Args:
145
+ key: Header name.
146
+
147
+ """
148
+ return self._dict.get(key.lower(), [])
149
+
150
+ def raw_items(self) -> Iterator[tuple[str, str]]:
151
+ """
152
+ Return an iterator of all values as ``(name, value)`` pairs.
153
+
154
+ """
155
+ return iter(self._list)
156
+
157
+
158
+ # copy of _typeshed.SupportsKeysAndGetItem.
159
+ class SupportsKeysAndGetItem(Protocol): # pragma: no cover
160
+ """
161
+ Dict-like types with ``keys() -> str`` and ``__getitem__(key: str) -> str`` methods.
162
+
163
+ """
164
+
165
+ def keys(self) -> Iterable[str]: ...
166
+
167
+ def __getitem__(self, key: str) -> str: ...
168
+
169
+
170
+ # Change to Headers | Mapping[str, str] | ... when dropping Python < 3.10.
171
+ HeadersLike = Union[
172
+ Headers,
173
+ Mapping[str, str],
174
+ Iterable[tuple[str, str]],
175
+ SupportsKeysAndGetItem,
176
+ ]
177
+ """
178
+ Types accepted where :class:`Headers` is expected.
179
+
180
+ In addition to :class:`Headers` itself, this includes dict-like types where both
181
+ keys and values are :class:`str`.
182
+
183
+ """
deepseek/lib/python3.10/site-packages/websockets/exceptions.py ADDED
@@ -0,0 +1,418 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ :mod:`websockets.exceptions` defines the following hierarchy of exceptions.
3
+
4
+ * :exc:`WebSocketException`
5
+ * :exc:`ConnectionClosed`
6
+ * :exc:`ConnectionClosedOK`
7
+ * :exc:`ConnectionClosedError`
8
+ * :exc:`InvalidURI`
9
+ * :exc:`InvalidHandshake`
10
+ * :exc:`SecurityError`
11
+ * :exc:`InvalidMessage` (legacy)
12
+ * :exc:`InvalidStatus`
13
+ * :exc:`InvalidStatusCode` (legacy)
14
+ * :exc:`InvalidHeader`
15
+ * :exc:`InvalidHeaderFormat`
16
+ * :exc:`InvalidHeaderValue`
17
+ * :exc:`InvalidOrigin`
18
+ * :exc:`InvalidUpgrade`
19
+ * :exc:`NegotiationError`
20
+ * :exc:`DuplicateParameter`
21
+ * :exc:`InvalidParameterName`
22
+ * :exc:`InvalidParameterValue`
23
+ * :exc:`AbortHandshake` (legacy)
24
+ * :exc:`RedirectHandshake` (legacy)
25
+ * :exc:`ProtocolError` (Sans-I/O)
26
+ * :exc:`PayloadTooBig` (Sans-I/O)
27
+ * :exc:`InvalidState` (Sans-I/O)
28
+ * :exc:`ConcurrencyError`
29
+
30
+ """
31
+
32
+ from __future__ import annotations
33
+
34
+ import warnings
35
+
36
+ from .imports import lazy_import
37
+
38
+
39
+ __all__ = [
40
+ "WebSocketException",
41
+ "ConnectionClosed",
42
+ "ConnectionClosedOK",
43
+ "ConnectionClosedError",
44
+ "InvalidURI",
45
+ "InvalidHandshake",
46
+ "SecurityError",
47
+ "InvalidStatus",
48
+ "InvalidHeader",
49
+ "InvalidHeaderFormat",
50
+ "InvalidHeaderValue",
51
+ "InvalidOrigin",
52
+ "InvalidUpgrade",
53
+ "NegotiationError",
54
+ "DuplicateParameter",
55
+ "InvalidParameterName",
56
+ "InvalidParameterValue",
57
+ "ProtocolError",
58
+ "PayloadTooBig",
59
+ "InvalidState",
60
+ "ConcurrencyError",
61
+ ]
62
+
63
+
64
+ class WebSocketException(Exception):
65
+ """
66
+ Base class for all exceptions defined by websockets.
67
+
68
+ """
69
+
70
+
71
+ class ConnectionClosed(WebSocketException):
72
+ """
73
+ Raised when trying to interact with a closed connection.
74
+
75
+ Attributes:
76
+ rcvd: If a close frame was received, its code and reason are available
77
+ in ``rcvd.code`` and ``rcvd.reason``.
78
+ sent: If a close frame was sent, its code and reason are available
79
+ in ``sent.code`` and ``sent.reason``.
80
+ rcvd_then_sent: If close frames were received and sent, this attribute
81
+ tells in which order this happened, from the perspective of this
82
+ side of the connection.
83
+
84
+ """
85
+
86
+ def __init__(
87
+ self,
88
+ rcvd: frames.Close | None,
89
+ sent: frames.Close | None,
90
+ rcvd_then_sent: bool | None = None,
91
+ ) -> None:
92
+ self.rcvd = rcvd
93
+ self.sent = sent
94
+ self.rcvd_then_sent = rcvd_then_sent
95
+ assert (self.rcvd_then_sent is None) == (self.rcvd is None or self.sent is None)
96
+
97
+ def __str__(self) -> str:
98
+ if self.rcvd is None:
99
+ if self.sent is None:
100
+ return "no close frame received or sent"
101
+ else:
102
+ return f"sent {self.sent}; no close frame received"
103
+ else:
104
+ if self.sent is None:
105
+ return f"received {self.rcvd}; no close frame sent"
106
+ else:
107
+ if self.rcvd_then_sent:
108
+ return f"received {self.rcvd}; then sent {self.sent}"
109
+ else:
110
+ return f"sent {self.sent}; then received {self.rcvd}"
111
+
112
+ # code and reason attributes are provided for backwards-compatibility
113
+
114
+ @property
115
+ def code(self) -> int:
116
+ warnings.warn( # deprecated in 13.1 - 2024-09-21
117
+ "ConnectionClosed.code is deprecated; "
118
+ "use Protocol.close_code or ConnectionClosed.rcvd.code",
119
+ DeprecationWarning,
120
+ )
121
+ if self.rcvd is None:
122
+ return frames.CloseCode.ABNORMAL_CLOSURE
123
+ return self.rcvd.code
124
+
125
+ @property
126
+ def reason(self) -> str:
127
+ warnings.warn( # deprecated in 13.1 - 2024-09-21
128
+ "ConnectionClosed.reason is deprecated; "
129
+ "use Protocol.close_reason or ConnectionClosed.rcvd.reason",
130
+ DeprecationWarning,
131
+ )
132
+ if self.rcvd is None:
133
+ return ""
134
+ return self.rcvd.reason
135
+
136
+
137
+ class ConnectionClosedOK(ConnectionClosed):
138
+ """
139
+ Like :exc:`ConnectionClosed`, when the connection terminated properly.
140
+
141
+ A close code with code 1000 (OK) or 1001 (going away) or without a code was
142
+ received and sent.
143
+
144
+ """
145
+
146
+
147
+ class ConnectionClosedError(ConnectionClosed):
148
+ """
149
+ Like :exc:`ConnectionClosed`, when the connection terminated with an error.
150
+
151
+ A close frame with a code other than 1000 (OK) or 1001 (going away) was
152
+ received or sent, or the closing handshake didn't complete properly.
153
+
154
+ """
155
+
156
+
157
+ class InvalidURI(WebSocketException):
158
+ """
159
+ Raised when connecting to a URI that isn't a valid WebSocket URI.
160
+
161
+ """
162
+
163
+ def __init__(self, uri: str, msg: str) -> None:
164
+ self.uri = uri
165
+ self.msg = msg
166
+
167
+ def __str__(self) -> str:
168
+ return f"{self.uri} isn't a valid URI: {self.msg}"
169
+
170
+
171
+ class InvalidHandshake(WebSocketException):
172
+ """
173
+ Base class for exceptions raised when the opening handshake fails.
174
+
175
+ """
176
+
177
+
178
+ class SecurityError(InvalidHandshake):
179
+ """
180
+ Raised when a handshake request or response breaks a security rule.
181
+
182
+ Security limits can be configured with :doc:`environment variables
183
+ <../reference/variables>`.
184
+
185
+ """
186
+
187
+
188
+ class InvalidStatus(InvalidHandshake):
189
+ """
190
+ Raised when a handshake response rejects the WebSocket upgrade.
191
+
192
+ """
193
+
194
+ def __init__(self, response: http11.Response) -> None:
195
+ self.response = response
196
+
197
+ def __str__(self) -> str:
198
+ return (
199
+ "server rejected WebSocket connection: "
200
+ f"HTTP {self.response.status_code:d}"
201
+ )
202
+
203
+
204
+ class InvalidHeader(InvalidHandshake):
205
+ """
206
+ Raised when an HTTP header doesn't have a valid format or value.
207
+
208
+ """
209
+
210
+ def __init__(self, name: str, value: str | None = None) -> None:
211
+ self.name = name
212
+ self.value = value
213
+
214
+ def __str__(self) -> str:
215
+ if self.value is None:
216
+ return f"missing {self.name} header"
217
+ elif self.value == "":
218
+ return f"empty {self.name} header"
219
+ else:
220
+ return f"invalid {self.name} header: {self.value}"
221
+
222
+
223
+ class InvalidHeaderFormat(InvalidHeader):
224
+ """
225
+ Raised when an HTTP header cannot be parsed.
226
+
227
+ The format of the header doesn't match the grammar for that header.
228
+
229
+ """
230
+
231
+ def __init__(self, name: str, error: str, header: str, pos: int) -> None:
232
+ super().__init__(name, f"{error} at {pos} in {header}")
233
+
234
+
235
+ class InvalidHeaderValue(InvalidHeader):
236
+ """
237
+ Raised when an HTTP header has a wrong value.
238
+
239
+ The format of the header is correct but the value isn't acceptable.
240
+
241
+ """
242
+
243
+
244
+ class InvalidOrigin(InvalidHeader):
245
+ """
246
+ Raised when the Origin header in a request isn't allowed.
247
+
248
+ """
249
+
250
+ def __init__(self, origin: str | None) -> None:
251
+ super().__init__("Origin", origin)
252
+
253
+
254
+ class InvalidUpgrade(InvalidHeader):
255
+ """
256
+ Raised when the Upgrade or Connection header isn't correct.
257
+
258
+ """
259
+
260
+
261
+ class NegotiationError(InvalidHandshake):
262
+ """
263
+ Raised when negotiating an extension or a subprotocol fails.
264
+
265
+ """
266
+
267
+
268
+ class DuplicateParameter(NegotiationError):
269
+ """
270
+ Raised when a parameter name is repeated in an extension header.
271
+
272
+ """
273
+
274
+ def __init__(self, name: str) -> None:
275
+ self.name = name
276
+
277
+ def __str__(self) -> str:
278
+ return f"duplicate parameter: {self.name}"
279
+
280
+
281
+ class InvalidParameterName(NegotiationError):
282
+ """
283
+ Raised when a parameter name in an extension header is invalid.
284
+
285
+ """
286
+
287
+ def __init__(self, name: str) -> None:
288
+ self.name = name
289
+
290
+ def __str__(self) -> str:
291
+ return f"invalid parameter name: {self.name}"
292
+
293
+
294
+ class InvalidParameterValue(NegotiationError):
295
+ """
296
+ Raised when a parameter value in an extension header is invalid.
297
+
298
+ """
299
+
300
+ def __init__(self, name: str, value: str | None) -> None:
301
+ self.name = name
302
+ self.value = value
303
+
304
+ def __str__(self) -> str:
305
+ if self.value is None:
306
+ return f"missing value for parameter {self.name}"
307
+ elif self.value == "":
308
+ return f"empty value for parameter {self.name}"
309
+ else:
310
+ return f"invalid value for parameter {self.name}: {self.value}"
311
+
312
+
313
+ class ProtocolError(WebSocketException):
314
+ """
315
+ Raised when receiving or sending a frame that breaks the protocol.
316
+
317
+ The Sans-I/O implementation raises this exception when:
318
+
319
+ * receiving or sending a frame that contains invalid data;
320
+ * receiving or sending an invalid sequence of frames.
321
+
322
+ """
323
+
324
+
325
+ class PayloadTooBig(WebSocketException):
326
+ """
327
+ Raised when parsing a frame with a payload that exceeds the maximum size.
328
+
329
+ The Sans-I/O layer uses this exception internally. It doesn't bubble up to
330
+ the I/O layer.
331
+
332
+ The :meth:`~websockets.extensions.Extension.decode` method of extensions
333
+ must raise :exc:`PayloadTooBig` if decoding a frame would exceed the limit.
334
+
335
+ """
336
+
337
+ def __init__(
338
+ self,
339
+ size_or_message: int | None | str,
340
+ max_size: int | None = None,
341
+ cur_size: int | None = None,
342
+ ) -> None:
343
+ if isinstance(size_or_message, str):
344
+ assert max_size is None
345
+ assert cur_size is None
346
+ warnings.warn( # deprecated in 14.0 - 2024-11-09
347
+ "PayloadTooBig(message) is deprecated; "
348
+ "change to PayloadTooBig(size, max_size)",
349
+ DeprecationWarning,
350
+ )
351
+ self.message: str | None = size_or_message
352
+ else:
353
+ self.message = None
354
+ self.size: int | None = size_or_message
355
+ assert max_size is not None
356
+ self.max_size: int = max_size
357
+ self.cur_size: int | None = None
358
+ self.set_current_size(cur_size)
359
+
360
+ def __str__(self) -> str:
361
+ if self.message is not None:
362
+ return self.message
363
+ else:
364
+ message = "frame "
365
+ if self.size is not None:
366
+ message += f"with {self.size} bytes "
367
+ if self.cur_size is not None:
368
+ message += f"after reading {self.cur_size} bytes "
369
+ message += f"exceeds limit of {self.max_size} bytes"
370
+ return message
371
+
372
+ def set_current_size(self, cur_size: int | None) -> None:
373
+ assert self.cur_size is None
374
+ if cur_size is not None:
375
+ self.max_size += cur_size
376
+ self.cur_size = cur_size
377
+
378
+
379
+ class InvalidState(WebSocketException, AssertionError):
380
+ """
381
+ Raised when sending a frame is forbidden in the current state.
382
+
383
+ Specifically, the Sans-I/O layer raises this exception when:
384
+
385
+ * sending a data frame to a connection in a state other
386
+ :attr:`~websockets.protocol.State.OPEN`;
387
+ * sending a control frame to a connection in a state other than
388
+ :attr:`~websockets.protocol.State.OPEN` or
389
+ :attr:`~websockets.protocol.State.CLOSING`.
390
+
391
+ """
392
+
393
+
394
+ class ConcurrencyError(WebSocketException, RuntimeError):
395
+ """
396
+ Raised when receiving or sending messages concurrently.
397
+
398
+ WebSocket is a connection-oriented protocol. Reads must be serialized; so
399
+ must be writes. However, reading and writing concurrently is possible.
400
+
401
+ """
402
+
403
+
404
+ # At the bottom to break import cycles created by type annotations.
405
+ from . import frames, http11 # noqa: E402
406
+
407
+
408
+ lazy_import(
409
+ globals(),
410
+ deprecated_aliases={
411
+ # deprecated in 14.0 - 2024-11-09
412
+ "AbortHandshake": ".legacy.exceptions",
413
+ "InvalidMessage": ".legacy.exceptions",
414
+ "InvalidStatusCode": ".legacy.exceptions",
415
+ "RedirectHandshake": ".legacy.exceptions",
416
+ "WebSocketProtocolError": ".legacy.exceptions",
417
+ },
418
+ )
deepseek/lib/python3.10/site-packages/websockets/frames.py ADDED
@@ -0,0 +1,429 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import dataclasses
4
+ import enum
5
+ import io
6
+ import os
7
+ import secrets
8
+ import struct
9
+ from collections.abc import Generator, Sequence
10
+ from typing import Callable, Union
11
+
12
+ from .exceptions import PayloadTooBig, ProtocolError
13
+
14
+
15
+ try:
16
+ from .speedups import apply_mask
17
+ except ImportError:
18
+ from .utils import apply_mask
19
+
20
+
21
+ __all__ = [
22
+ "Opcode",
23
+ "OP_CONT",
24
+ "OP_TEXT",
25
+ "OP_BINARY",
26
+ "OP_CLOSE",
27
+ "OP_PING",
28
+ "OP_PONG",
29
+ "DATA_OPCODES",
30
+ "CTRL_OPCODES",
31
+ "Frame",
32
+ "Close",
33
+ ]
34
+
35
+
36
+ class Opcode(enum.IntEnum):
37
+ """Opcode values for WebSocket frames."""
38
+
39
+ CONT, TEXT, BINARY = 0x00, 0x01, 0x02
40
+ CLOSE, PING, PONG = 0x08, 0x09, 0x0A
41
+
42
+
43
+ OP_CONT = Opcode.CONT
44
+ OP_TEXT = Opcode.TEXT
45
+ OP_BINARY = Opcode.BINARY
46
+ OP_CLOSE = Opcode.CLOSE
47
+ OP_PING = Opcode.PING
48
+ OP_PONG = Opcode.PONG
49
+
50
+ DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY
51
+ CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG
52
+
53
+
54
+ class CloseCode(enum.IntEnum):
55
+ """Close code values for WebSocket close frames."""
56
+
57
+ NORMAL_CLOSURE = 1000
58
+ GOING_AWAY = 1001
59
+ PROTOCOL_ERROR = 1002
60
+ UNSUPPORTED_DATA = 1003
61
+ # 1004 is reserved
62
+ NO_STATUS_RCVD = 1005
63
+ ABNORMAL_CLOSURE = 1006
64
+ INVALID_DATA = 1007
65
+ POLICY_VIOLATION = 1008
66
+ MESSAGE_TOO_BIG = 1009
67
+ MANDATORY_EXTENSION = 1010
68
+ INTERNAL_ERROR = 1011
69
+ SERVICE_RESTART = 1012
70
+ TRY_AGAIN_LATER = 1013
71
+ BAD_GATEWAY = 1014
72
+ TLS_HANDSHAKE = 1015
73
+
74
+
75
+ # See https://www.iana.org/assignments/websocket/websocket.xhtml
76
+ CLOSE_CODE_EXPLANATIONS: dict[int, str] = {
77
+ CloseCode.NORMAL_CLOSURE: "OK",
78
+ CloseCode.GOING_AWAY: "going away",
79
+ CloseCode.PROTOCOL_ERROR: "protocol error",
80
+ CloseCode.UNSUPPORTED_DATA: "unsupported data",
81
+ CloseCode.NO_STATUS_RCVD: "no status received [internal]",
82
+ CloseCode.ABNORMAL_CLOSURE: "abnormal closure [internal]",
83
+ CloseCode.INVALID_DATA: "invalid frame payload data",
84
+ CloseCode.POLICY_VIOLATION: "policy violation",
85
+ CloseCode.MESSAGE_TOO_BIG: "message too big",
86
+ CloseCode.MANDATORY_EXTENSION: "mandatory extension",
87
+ CloseCode.INTERNAL_ERROR: "internal error",
88
+ CloseCode.SERVICE_RESTART: "service restart",
89
+ CloseCode.TRY_AGAIN_LATER: "try again later",
90
+ CloseCode.BAD_GATEWAY: "bad gateway",
91
+ CloseCode.TLS_HANDSHAKE: "TLS handshake failure [internal]",
92
+ }
93
+
94
+
95
+ # Close code that are allowed in a close frame.
96
+ # Using a set optimizes `code in EXTERNAL_CLOSE_CODES`.
97
+ EXTERNAL_CLOSE_CODES = {
98
+ CloseCode.NORMAL_CLOSURE,
99
+ CloseCode.GOING_AWAY,
100
+ CloseCode.PROTOCOL_ERROR,
101
+ CloseCode.UNSUPPORTED_DATA,
102
+ CloseCode.INVALID_DATA,
103
+ CloseCode.POLICY_VIOLATION,
104
+ CloseCode.MESSAGE_TOO_BIG,
105
+ CloseCode.MANDATORY_EXTENSION,
106
+ CloseCode.INTERNAL_ERROR,
107
+ CloseCode.SERVICE_RESTART,
108
+ CloseCode.TRY_AGAIN_LATER,
109
+ CloseCode.BAD_GATEWAY,
110
+ }
111
+
112
+
113
+ OK_CLOSE_CODES = {
114
+ CloseCode.NORMAL_CLOSURE,
115
+ CloseCode.GOING_AWAY,
116
+ CloseCode.NO_STATUS_RCVD,
117
+ }
118
+
119
+
120
+ BytesLike = bytes, bytearray, memoryview
121
+
122
+
123
+ @dataclasses.dataclass
124
+ class Frame:
125
+ """
126
+ WebSocket frame.
127
+
128
+ Attributes:
129
+ opcode: Opcode.
130
+ data: Payload data.
131
+ fin: FIN bit.
132
+ rsv1: RSV1 bit.
133
+ rsv2: RSV2 bit.
134
+ rsv3: RSV3 bit.
135
+
136
+ Only these fields are needed. The MASK bit, payload length and masking-key
137
+ are handled on the fly when parsing and serializing frames.
138
+
139
+ """
140
+
141
+ opcode: Opcode
142
+ data: Union[bytes, bytearray, memoryview]
143
+ fin: bool = True
144
+ rsv1: bool = False
145
+ rsv2: bool = False
146
+ rsv3: bool = False
147
+
148
+ # Configure if you want to see more in logs. Should be a multiple of 3.
149
+ MAX_LOG_SIZE = int(os.environ.get("WEBSOCKETS_MAX_LOG_SIZE", "75"))
150
+
151
+ def __str__(self) -> str:
152
+ """
153
+ Return a human-readable representation of a frame.
154
+
155
+ """
156
+ coding = None
157
+ length = f"{len(self.data)} byte{'' if len(self.data) == 1 else 's'}"
158
+ non_final = "" if self.fin else "continued"
159
+
160
+ if self.opcode is OP_TEXT:
161
+ # Decoding only the beginning and the end is needlessly hard.
162
+ # Decode the entire payload then elide later if necessary.
163
+ data = repr(bytes(self.data).decode())
164
+ elif self.opcode is OP_BINARY:
165
+ # We'll show at most the first 16 bytes and the last 8 bytes.
166
+ # Encode just what we need, plus two dummy bytes to elide later.
167
+ binary = self.data
168
+ if len(binary) > self.MAX_LOG_SIZE // 3:
169
+ cut = (self.MAX_LOG_SIZE // 3 - 1) // 3 # by default cut = 8
170
+ binary = b"".join([binary[: 2 * cut], b"\x00\x00", binary[-cut:]])
171
+ data = " ".join(f"{byte:02x}" for byte in binary)
172
+ elif self.opcode is OP_CLOSE:
173
+ data = str(Close.parse(self.data))
174
+ elif self.data:
175
+ # We don't know if a Continuation frame contains text or binary.
176
+ # Ping and Pong frames could contain UTF-8.
177
+ # Attempt to decode as UTF-8 and display it as text; fallback to
178
+ # binary. If self.data is a memoryview, it has no decode() method,
179
+ # which raises AttributeError.
180
+ try:
181
+ data = repr(bytes(self.data).decode())
182
+ coding = "text"
183
+ except (UnicodeDecodeError, AttributeError):
184
+ binary = self.data
185
+ if len(binary) > self.MAX_LOG_SIZE // 3:
186
+ cut = (self.MAX_LOG_SIZE // 3 - 1) // 3 # by default cut = 8
187
+ binary = b"".join([binary[: 2 * cut], b"\x00\x00", binary[-cut:]])
188
+ data = " ".join(f"{byte:02x}" for byte in binary)
189
+ coding = "binary"
190
+ else:
191
+ data = "''"
192
+
193
+ if len(data) > self.MAX_LOG_SIZE:
194
+ cut = self.MAX_LOG_SIZE // 3 - 1 # by default cut = 24
195
+ data = data[: 2 * cut] + "..." + data[-cut:]
196
+
197
+ metadata = ", ".join(filter(None, [coding, length, non_final]))
198
+
199
+ return f"{self.opcode.name} {data} [{metadata}]"
200
+
201
+ @classmethod
202
+ def parse(
203
+ cls,
204
+ read_exact: Callable[[int], Generator[None, None, bytes]],
205
+ *,
206
+ mask: bool,
207
+ max_size: int | None = None,
208
+ extensions: Sequence[extensions.Extension] | None = None,
209
+ ) -> Generator[None, None, Frame]:
210
+ """
211
+ Parse a WebSocket frame.
212
+
213
+ This is a generator-based coroutine.
214
+
215
+ Args:
216
+ read_exact: Generator-based coroutine that reads the requested
217
+ bytes or raises an exception if there isn't enough data.
218
+ mask: Whether the frame should be masked i.e. whether the read
219
+ happens on the server side.
220
+ max_size: Maximum payload size in bytes.
221
+ extensions: List of extensions, applied in reverse order.
222
+
223
+ Raises:
224
+ EOFError: If the connection is closed without a full WebSocket frame.
225
+ PayloadTooBig: If the frame's payload size exceeds ``max_size``.
226
+ ProtocolError: If the frame contains incorrect values.
227
+
228
+ """
229
+ # Read the header.
230
+ data = yield from read_exact(2)
231
+ head1, head2 = struct.unpack("!BB", data)
232
+
233
+ # While not Pythonic, this is marginally faster than calling bool().
234
+ fin = True if head1 & 0b10000000 else False
235
+ rsv1 = True if head1 & 0b01000000 else False
236
+ rsv2 = True if head1 & 0b00100000 else False
237
+ rsv3 = True if head1 & 0b00010000 else False
238
+
239
+ try:
240
+ opcode = Opcode(head1 & 0b00001111)
241
+ except ValueError as exc:
242
+ raise ProtocolError("invalid opcode") from exc
243
+
244
+ if (True if head2 & 0b10000000 else False) != mask:
245
+ raise ProtocolError("incorrect masking")
246
+
247
+ length = head2 & 0b01111111
248
+ if length == 126:
249
+ data = yield from read_exact(2)
250
+ (length,) = struct.unpack("!H", data)
251
+ elif length == 127:
252
+ data = yield from read_exact(8)
253
+ (length,) = struct.unpack("!Q", data)
254
+ if max_size is not None and length > max_size:
255
+ raise PayloadTooBig(length, max_size)
256
+ if mask:
257
+ mask_bytes = yield from read_exact(4)
258
+
259
+ # Read the data.
260
+ data = yield from read_exact(length)
261
+ if mask:
262
+ data = apply_mask(data, mask_bytes)
263
+
264
+ frame = cls(opcode, data, fin, rsv1, rsv2, rsv3)
265
+
266
+ if extensions is None:
267
+ extensions = []
268
+ for extension in reversed(extensions):
269
+ frame = extension.decode(frame, max_size=max_size)
270
+
271
+ frame.check()
272
+
273
+ return frame
274
+
275
+ def serialize(
276
+ self,
277
+ *,
278
+ mask: bool,
279
+ extensions: Sequence[extensions.Extension] | None = None,
280
+ ) -> bytes:
281
+ """
282
+ Serialize a WebSocket frame.
283
+
284
+ Args:
285
+ mask: Whether the frame should be masked i.e. whether the write
286
+ happens on the client side.
287
+ extensions: List of extensions, applied in order.
288
+
289
+ Raises:
290
+ ProtocolError: If the frame contains incorrect values.
291
+
292
+ """
293
+ self.check()
294
+
295
+ if extensions is None:
296
+ extensions = []
297
+ for extension in extensions:
298
+ self = extension.encode(self)
299
+
300
+ output = io.BytesIO()
301
+
302
+ # Prepare the header.
303
+ head1 = (
304
+ (0b10000000 if self.fin else 0)
305
+ | (0b01000000 if self.rsv1 else 0)
306
+ | (0b00100000 if self.rsv2 else 0)
307
+ | (0b00010000 if self.rsv3 else 0)
308
+ | self.opcode
309
+ )
310
+
311
+ head2 = 0b10000000 if mask else 0
312
+
313
+ length = len(self.data)
314
+ if length < 126:
315
+ output.write(struct.pack("!BB", head1, head2 | length))
316
+ elif length < 65536:
317
+ output.write(struct.pack("!BBH", head1, head2 | 126, length))
318
+ else:
319
+ output.write(struct.pack("!BBQ", head1, head2 | 127, length))
320
+
321
+ if mask:
322
+ mask_bytes = secrets.token_bytes(4)
323
+ output.write(mask_bytes)
324
+
325
+ # Prepare the data.
326
+ if mask:
327
+ data = apply_mask(self.data, mask_bytes)
328
+ else:
329
+ data = self.data
330
+ output.write(data)
331
+
332
+ return output.getvalue()
333
+
334
+ def check(self) -> None:
335
+ """
336
+ Check that reserved bits and opcode have acceptable values.
337
+
338
+ Raises:
339
+ ProtocolError: If a reserved bit or the opcode is invalid.
340
+
341
+ """
342
+ if self.rsv1 or self.rsv2 or self.rsv3:
343
+ raise ProtocolError("reserved bits must be 0")
344
+
345
+ if self.opcode in CTRL_OPCODES:
346
+ if len(self.data) > 125:
347
+ raise ProtocolError("control frame too long")
348
+ if not self.fin:
349
+ raise ProtocolError("fragmented control frame")
350
+
351
+
352
+ @dataclasses.dataclass
353
+ class Close:
354
+ """
355
+ Code and reason for WebSocket close frames.
356
+
357
+ Attributes:
358
+ code: Close code.
359
+ reason: Close reason.
360
+
361
+ """
362
+
363
+ code: int
364
+ reason: str
365
+
366
+ def __str__(self) -> str:
367
+ """
368
+ Return a human-readable representation of a close code and reason.
369
+
370
+ """
371
+ if 3000 <= self.code < 4000:
372
+ explanation = "registered"
373
+ elif 4000 <= self.code < 5000:
374
+ explanation = "private use"
375
+ else:
376
+ explanation = CLOSE_CODE_EXPLANATIONS.get(self.code, "unknown")
377
+ result = f"{self.code} ({explanation})"
378
+
379
+ if self.reason:
380
+ result = f"{result} {self.reason}"
381
+
382
+ return result
383
+
384
+ @classmethod
385
+ def parse(cls, data: bytes) -> Close:
386
+ """
387
+ Parse the payload of a close frame.
388
+
389
+ Args:
390
+ data: Payload of the close frame.
391
+
392
+ Raises:
393
+ ProtocolError: If data is ill-formed.
394
+ UnicodeDecodeError: If the reason isn't valid UTF-8.
395
+
396
+ """
397
+ if len(data) >= 2:
398
+ (code,) = struct.unpack("!H", data[:2])
399
+ reason = data[2:].decode()
400
+ close = cls(code, reason)
401
+ close.check()
402
+ return close
403
+ elif len(data) == 0:
404
+ return cls(CloseCode.NO_STATUS_RCVD, "")
405
+ else:
406
+ raise ProtocolError("close frame too short")
407
+
408
+ def serialize(self) -> bytes:
409
+ """
410
+ Serialize the payload of a close frame.
411
+
412
+ """
413
+ self.check()
414
+ return struct.pack("!H", self.code) + self.reason.encode()
415
+
416
+ def check(self) -> None:
417
+ """
418
+ Check that the close code has a valid value for a close frame.
419
+
420
+ Raises:
421
+ ProtocolError: If the close code is invalid.
422
+
423
+ """
424
+ if not (self.code in EXTERNAL_CLOSE_CODES or 3000 <= self.code < 5000):
425
+ raise ProtocolError("invalid status code")
426
+
427
+
428
+ # At the bottom to break import cycles created by type annotations.
429
+ from . import extensions # noqa: E402
deepseek/lib/python3.10/site-packages/websockets/headers.py ADDED
@@ -0,0 +1,580 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import base64
4
+ import binascii
5
+ import ipaddress
6
+ import re
7
+ from collections.abc import Sequence
8
+ from typing import Callable, TypeVar, cast
9
+
10
+ from .exceptions import InvalidHeaderFormat, InvalidHeaderValue
11
+ from .typing import (
12
+ ConnectionOption,
13
+ ExtensionHeader,
14
+ ExtensionName,
15
+ ExtensionParameter,
16
+ Subprotocol,
17
+ UpgradeProtocol,
18
+ )
19
+
20
+
21
+ __all__ = [
22
+ "build_host",
23
+ "parse_connection",
24
+ "parse_upgrade",
25
+ "parse_extension",
26
+ "build_extension",
27
+ "parse_subprotocol",
28
+ "build_subprotocol",
29
+ "validate_subprotocols",
30
+ "build_www_authenticate_basic",
31
+ "parse_authorization_basic",
32
+ "build_authorization_basic",
33
+ ]
34
+
35
+
36
+ T = TypeVar("T")
37
+
38
+
39
+ def build_host(host: str, port: int, secure: bool) -> str:
40
+ """
41
+ Build a ``Host`` header.
42
+
43
+ """
44
+ # https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.2
45
+ # IPv6 addresses must be enclosed in brackets.
46
+ try:
47
+ address = ipaddress.ip_address(host)
48
+ except ValueError:
49
+ # host is a hostname
50
+ pass
51
+ else:
52
+ # host is an IP address
53
+ if address.version == 6:
54
+ host = f"[{host}]"
55
+
56
+ if port != (443 if secure else 80):
57
+ host = f"{host}:{port}"
58
+
59
+ return host
60
+
61
+
62
+ # To avoid a dependency on a parsing library, we implement manually the ABNF
63
+ # described in https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 and
64
+ # https://datatracker.ietf.org/doc/html/rfc7230#appendix-B.
65
+
66
+
67
+ def peek_ahead(header: str, pos: int) -> str | None:
68
+ """
69
+ Return the next character from ``header`` at the given position.
70
+
71
+ Return :obj:`None` at the end of ``header``.
72
+
73
+ We never need to peek more than one character ahead.
74
+
75
+ """
76
+ return None if pos == len(header) else header[pos]
77
+
78
+
79
+ _OWS_re = re.compile(r"[\t ]*")
80
+
81
+
82
+ def parse_OWS(header: str, pos: int) -> int:
83
+ """
84
+ Parse optional whitespace from ``header`` at the given position.
85
+
86
+ Return the new position.
87
+
88
+ The whitespace itself isn't returned because it isn't significant.
89
+
90
+ """
91
+ # There's always a match, possibly empty, whose content doesn't matter.
92
+ match = _OWS_re.match(header, pos)
93
+ assert match is not None
94
+ return match.end()
95
+
96
+
97
+ _token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
98
+
99
+
100
+ def parse_token(header: str, pos: int, header_name: str) -> tuple[str, int]:
101
+ """
102
+ Parse a token from ``header`` at the given position.
103
+
104
+ Return the token value and the new position.
105
+
106
+ Raises:
107
+ InvalidHeaderFormat: On invalid inputs.
108
+
109
+ """
110
+ match = _token_re.match(header, pos)
111
+ if match is None:
112
+ raise InvalidHeaderFormat(header_name, "expected token", header, pos)
113
+ return match.group(), match.end()
114
+
115
+
116
+ _quoted_string_re = re.compile(
117
+ r'"(?:[\x09\x20-\x21\x23-\x5b\x5d-\x7e]|\\[\x09\x20-\x7e\x80-\xff])*"'
118
+ )
119
+
120
+
121
+ _unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])")
122
+
123
+
124
+ def parse_quoted_string(header: str, pos: int, header_name: str) -> tuple[str, int]:
125
+ """
126
+ Parse a quoted string from ``header`` at the given position.
127
+
128
+ Return the unquoted value and the new position.
129
+
130
+ Raises:
131
+ InvalidHeaderFormat: On invalid inputs.
132
+
133
+ """
134
+ match = _quoted_string_re.match(header, pos)
135
+ if match is None:
136
+ raise InvalidHeaderFormat(header_name, "expected quoted string", header, pos)
137
+ return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end()
138
+
139
+
140
+ _quotable_re = re.compile(r"[\x09\x20-\x7e\x80-\xff]*")
141
+
142
+
143
+ _quote_re = re.compile(r"([\x22\x5c])")
144
+
145
+
146
+ def build_quoted_string(value: str) -> str:
147
+ """
148
+ Format ``value`` as a quoted string.
149
+
150
+ This is the reverse of :func:`parse_quoted_string`.
151
+
152
+ """
153
+ match = _quotable_re.fullmatch(value)
154
+ if match is None:
155
+ raise ValueError("invalid characters for quoted-string encoding")
156
+ return '"' + _quote_re.sub(r"\\\1", value) + '"'
157
+
158
+
159
+ def parse_list(
160
+ parse_item: Callable[[str, int, str], tuple[T, int]],
161
+ header: str,
162
+ pos: int,
163
+ header_name: str,
164
+ ) -> list[T]:
165
+ """
166
+ Parse a comma-separated list from ``header`` at the given position.
167
+
168
+ This is appropriate for parsing values with the following grammar:
169
+
170
+ 1#item
171
+
172
+ ``parse_item`` parses one item.
173
+
174
+ ``header`` is assumed not to start or end with whitespace.
175
+
176
+ (This function is designed for parsing an entire header value and
177
+ :func:`~websockets.http.read_headers` strips whitespace from values.)
178
+
179
+ Return a list of items.
180
+
181
+ Raises:
182
+ InvalidHeaderFormat: On invalid inputs.
183
+
184
+ """
185
+ # Per https://datatracker.ietf.org/doc/html/rfc7230#section-7, "a recipient
186
+ # MUST parse and ignore a reasonable number of empty list elements";
187
+ # hence while loops that remove extra delimiters.
188
+
189
+ # Remove extra delimiters before the first item.
190
+ while peek_ahead(header, pos) == ",":
191
+ pos = parse_OWS(header, pos + 1)
192
+
193
+ items = []
194
+ while True:
195
+ # Loop invariant: a item starts at pos in header.
196
+ item, pos = parse_item(header, pos, header_name)
197
+ items.append(item)
198
+ pos = parse_OWS(header, pos)
199
+
200
+ # We may have reached the end of the header.
201
+ if pos == len(header):
202
+ break
203
+
204
+ # There must be a delimiter after each element except the last one.
205
+ if peek_ahead(header, pos) == ",":
206
+ pos = parse_OWS(header, pos + 1)
207
+ else:
208
+ raise InvalidHeaderFormat(header_name, "expected comma", header, pos)
209
+
210
+ # Remove extra delimiters before the next item.
211
+ while peek_ahead(header, pos) == ",":
212
+ pos = parse_OWS(header, pos + 1)
213
+
214
+ # We may have reached the end of the header.
215
+ if pos == len(header):
216
+ break
217
+
218
+ # Since we only advance in the header by one character with peek_ahead()
219
+ # or with the end position of a regex match, we can't overshoot the end.
220
+ assert pos == len(header)
221
+
222
+ return items
223
+
224
+
225
+ def parse_connection_option(
226
+ header: str, pos: int, header_name: str
227
+ ) -> tuple[ConnectionOption, int]:
228
+ """
229
+ Parse a Connection option from ``header`` at the given position.
230
+
231
+ Return the protocol value and the new position.
232
+
233
+ Raises:
234
+ InvalidHeaderFormat: On invalid inputs.
235
+
236
+ """
237
+ item, pos = parse_token(header, pos, header_name)
238
+ return cast(ConnectionOption, item), pos
239
+
240
+
241
+ def parse_connection(header: str) -> list[ConnectionOption]:
242
+ """
243
+ Parse a ``Connection`` header.
244
+
245
+ Return a list of HTTP connection options.
246
+
247
+ Args
248
+ header: value of the ``Connection`` header.
249
+
250
+ Raises:
251
+ InvalidHeaderFormat: On invalid inputs.
252
+
253
+ """
254
+ return parse_list(parse_connection_option, header, 0, "Connection")
255
+
256
+
257
+ _protocol_re = re.compile(
258
+ r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+(?:/[-!#$%&\'*+.^_`|~0-9a-zA-Z]+)?"
259
+ )
260
+
261
+
262
+ def parse_upgrade_protocol(
263
+ header: str, pos: int, header_name: str
264
+ ) -> tuple[UpgradeProtocol, int]:
265
+ """
266
+ Parse an Upgrade protocol from ``header`` at the given position.
267
+
268
+ Return the protocol value and the new position.
269
+
270
+ Raises:
271
+ InvalidHeaderFormat: On invalid inputs.
272
+
273
+ """
274
+ match = _protocol_re.match(header, pos)
275
+ if match is None:
276
+ raise InvalidHeaderFormat(header_name, "expected protocol", header, pos)
277
+ return cast(UpgradeProtocol, match.group()), match.end()
278
+
279
+
280
+ def parse_upgrade(header: str) -> list[UpgradeProtocol]:
281
+ """
282
+ Parse an ``Upgrade`` header.
283
+
284
+ Return a list of HTTP protocols.
285
+
286
+ Args:
287
+ header: Value of the ``Upgrade`` header.
288
+
289
+ Raises:
290
+ InvalidHeaderFormat: On invalid inputs.
291
+
292
+ """
293
+ return parse_list(parse_upgrade_protocol, header, 0, "Upgrade")
294
+
295
+
296
+ def parse_extension_item_param(
297
+ header: str, pos: int, header_name: str
298
+ ) -> tuple[ExtensionParameter, int]:
299
+ """
300
+ Parse a single extension parameter from ``header`` at the given position.
301
+
302
+ Return a ``(name, value)`` pair and the new position.
303
+
304
+ Raises:
305
+ InvalidHeaderFormat: On invalid inputs.
306
+
307
+ """
308
+ # Extract parameter name.
309
+ name, pos = parse_token(header, pos, header_name)
310
+ pos = parse_OWS(header, pos)
311
+ # Extract parameter value, if there is one.
312
+ value: str | None = None
313
+ if peek_ahead(header, pos) == "=":
314
+ pos = parse_OWS(header, pos + 1)
315
+ if peek_ahead(header, pos) == '"':
316
+ pos_before = pos # for proper error reporting below
317
+ value, pos = parse_quoted_string(header, pos, header_name)
318
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 says:
319
+ # the value after quoted-string unescaping MUST conform to
320
+ # the 'token' ABNF.
321
+ if _token_re.fullmatch(value) is None:
322
+ raise InvalidHeaderFormat(
323
+ header_name, "invalid quoted header content", header, pos_before
324
+ )
325
+ else:
326
+ value, pos = parse_token(header, pos, header_name)
327
+ pos = parse_OWS(header, pos)
328
+
329
+ return (name, value), pos
330
+
331
+
332
+ def parse_extension_item(
333
+ header: str, pos: int, header_name: str
334
+ ) -> tuple[ExtensionHeader, int]:
335
+ """
336
+ Parse an extension definition from ``header`` at the given position.
337
+
338
+ Return an ``(extension name, parameters)`` pair, where ``parameters`` is a
339
+ list of ``(name, value)`` pairs, and the new position.
340
+
341
+ Raises:
342
+ InvalidHeaderFormat: On invalid inputs.
343
+
344
+ """
345
+ # Extract extension name.
346
+ name, pos = parse_token(header, pos, header_name)
347
+ pos = parse_OWS(header, pos)
348
+ # Extract all parameters.
349
+ parameters = []
350
+ while peek_ahead(header, pos) == ";":
351
+ pos = parse_OWS(header, pos + 1)
352
+ parameter, pos = parse_extension_item_param(header, pos, header_name)
353
+ parameters.append(parameter)
354
+ return (cast(ExtensionName, name), parameters), pos
355
+
356
+
357
+ def parse_extension(header: str) -> list[ExtensionHeader]:
358
+ """
359
+ Parse a ``Sec-WebSocket-Extensions`` header.
360
+
361
+ Return a list of WebSocket extensions and their parameters in this format::
362
+
363
+ [
364
+ (
365
+ 'extension name',
366
+ [
367
+ ('parameter name', 'parameter value'),
368
+ ....
369
+ ]
370
+ ),
371
+ ...
372
+ ]
373
+
374
+ Parameter values are :obj:`None` when no value is provided.
375
+
376
+ Raises:
377
+ InvalidHeaderFormat: On invalid inputs.
378
+
379
+ """
380
+ return parse_list(parse_extension_item, header, 0, "Sec-WebSocket-Extensions")
381
+
382
+
383
+ parse_extension_list = parse_extension # alias for backwards compatibility
384
+
385
+
386
+ def build_extension_item(
387
+ name: ExtensionName, parameters: list[ExtensionParameter]
388
+ ) -> str:
389
+ """
390
+ Build an extension definition.
391
+
392
+ This is the reverse of :func:`parse_extension_item`.
393
+
394
+ """
395
+ return "; ".join(
396
+ [cast(str, name)]
397
+ + [
398
+ # Quoted strings aren't necessary because values are always tokens.
399
+ name if value is None else f"{name}={value}"
400
+ for name, value in parameters
401
+ ]
402
+ )
403
+
404
+
405
+ def build_extension(extensions: Sequence[ExtensionHeader]) -> str:
406
+ """
407
+ Build a ``Sec-WebSocket-Extensions`` header.
408
+
409
+ This is the reverse of :func:`parse_extension`.
410
+
411
+ """
412
+ return ", ".join(
413
+ build_extension_item(name, parameters) for name, parameters in extensions
414
+ )
415
+
416
+
417
+ build_extension_list = build_extension # alias for backwards compatibility
418
+
419
+
420
+ def parse_subprotocol_item(
421
+ header: str, pos: int, header_name: str
422
+ ) -> tuple[Subprotocol, int]:
423
+ """
424
+ Parse a subprotocol from ``header`` at the given position.
425
+
426
+ Return the subprotocol value and the new position.
427
+
428
+ Raises:
429
+ InvalidHeaderFormat: On invalid inputs.
430
+
431
+ """
432
+ item, pos = parse_token(header, pos, header_name)
433
+ return cast(Subprotocol, item), pos
434
+
435
+
436
+ def parse_subprotocol(header: str) -> list[Subprotocol]:
437
+ """
438
+ Parse a ``Sec-WebSocket-Protocol`` header.
439
+
440
+ Return a list of WebSocket subprotocols.
441
+
442
+ Raises:
443
+ InvalidHeaderFormat: On invalid inputs.
444
+
445
+ """
446
+ return parse_list(parse_subprotocol_item, header, 0, "Sec-WebSocket-Protocol")
447
+
448
+
449
+ parse_subprotocol_list = parse_subprotocol # alias for backwards compatibility
450
+
451
+
452
+ def build_subprotocol(subprotocols: Sequence[Subprotocol]) -> str:
453
+ """
454
+ Build a ``Sec-WebSocket-Protocol`` header.
455
+
456
+ This is the reverse of :func:`parse_subprotocol`.
457
+
458
+ """
459
+ return ", ".join(subprotocols)
460
+
461
+
462
+ build_subprotocol_list = build_subprotocol # alias for backwards compatibility
463
+
464
+
465
+ def validate_subprotocols(subprotocols: Sequence[Subprotocol]) -> None:
466
+ """
467
+ Validate that ``subprotocols`` is suitable for :func:`build_subprotocol`.
468
+
469
+ """
470
+ if not isinstance(subprotocols, Sequence):
471
+ raise TypeError("subprotocols must be a list")
472
+ if isinstance(subprotocols, str):
473
+ raise TypeError("subprotocols must be a list, not a str")
474
+ for subprotocol in subprotocols:
475
+ if not _token_re.fullmatch(subprotocol):
476
+ raise ValueError(f"invalid subprotocol: {subprotocol}")
477
+
478
+
479
+ def build_www_authenticate_basic(realm: str) -> str:
480
+ """
481
+ Build a ``WWW-Authenticate`` header for HTTP Basic Auth.
482
+
483
+ Args:
484
+ realm: Identifier of the protection space.
485
+
486
+ """
487
+ # https://datatracker.ietf.org/doc/html/rfc7617#section-2
488
+ realm = build_quoted_string(realm)
489
+ charset = build_quoted_string("UTF-8")
490
+ return f"Basic realm={realm}, charset={charset}"
491
+
492
+
493
+ _token68_re = re.compile(r"[A-Za-z0-9-._~+/]+=*")
494
+
495
+
496
+ def parse_token68(header: str, pos: int, header_name: str) -> tuple[str, int]:
497
+ """
498
+ Parse a token68 from ``header`` at the given position.
499
+
500
+ Return the token value and the new position.
501
+
502
+ Raises:
503
+ InvalidHeaderFormat: On invalid inputs.
504
+
505
+ """
506
+ match = _token68_re.match(header, pos)
507
+ if match is None:
508
+ raise InvalidHeaderFormat(header_name, "expected token68", header, pos)
509
+ return match.group(), match.end()
510
+
511
+
512
+ def parse_end(header: str, pos: int, header_name: str) -> None:
513
+ """
514
+ Check that parsing reached the end of header.
515
+
516
+ """
517
+ if pos < len(header):
518
+ raise InvalidHeaderFormat(header_name, "trailing data", header, pos)
519
+
520
+
521
+ def parse_authorization_basic(header: str) -> tuple[str, str]:
522
+ """
523
+ Parse an ``Authorization`` header for HTTP Basic Auth.
524
+
525
+ Return a ``(username, password)`` tuple.
526
+
527
+ Args:
528
+ header: Value of the ``Authorization`` header.
529
+
530
+ Raises:
531
+ InvalidHeaderFormat: On invalid inputs.
532
+ InvalidHeaderValue: On unsupported inputs.
533
+
534
+ """
535
+ # https://datatracker.ietf.org/doc/html/rfc7235#section-2.1
536
+ # https://datatracker.ietf.org/doc/html/rfc7617#section-2
537
+ scheme, pos = parse_token(header, 0, "Authorization")
538
+ if scheme.lower() != "basic":
539
+ raise InvalidHeaderValue(
540
+ "Authorization",
541
+ f"unsupported scheme: {scheme}",
542
+ )
543
+ if peek_ahead(header, pos) != " ":
544
+ raise InvalidHeaderFormat(
545
+ "Authorization", "expected space after scheme", header, pos
546
+ )
547
+ pos += 1
548
+ basic_credentials, pos = parse_token68(header, pos, "Authorization")
549
+ parse_end(header, pos, "Authorization")
550
+
551
+ try:
552
+ user_pass = base64.b64decode(basic_credentials.encode()).decode()
553
+ except binascii.Error:
554
+ raise InvalidHeaderValue(
555
+ "Authorization",
556
+ "expected base64-encoded credentials",
557
+ ) from None
558
+ try:
559
+ username, password = user_pass.split(":", 1)
560
+ except ValueError:
561
+ raise InvalidHeaderValue(
562
+ "Authorization",
563
+ "expected username:password credentials",
564
+ ) from None
565
+
566
+ return username, password
567
+
568
+
569
+ def build_authorization_basic(username: str, password: str) -> str:
570
+ """
571
+ Build an ``Authorization`` header for HTTP Basic Auth.
572
+
573
+ This is the reverse of :func:`parse_authorization_basic`.
574
+
575
+ """
576
+ # https://datatracker.ietf.org/doc/html/rfc7617#section-2
577
+ assert ":" not in username
578
+ user_pass = f"{username}:{password}"
579
+ basic_credentials = base64.b64encode(user_pass.encode()).decode()
580
+ return "Basic " + basic_credentials
deepseek/lib/python3.10/site-packages/websockets/http.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import warnings
4
+
5
+ from .datastructures import Headers, MultipleValuesError # noqa: F401
6
+
7
+
8
+ with warnings.catch_warnings():
9
+ # Suppress redundant DeprecationWarning raised by websockets.legacy.
10
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
11
+ from .legacy.http import read_request, read_response # noqa: F401
12
+
13
+
14
+ warnings.warn( # deprecated in 9.0 - 2021-09-01
15
+ "Headers and MultipleValuesError were moved "
16
+ "from websockets.http to websockets.datastructures"
17
+ "and read_request and read_response were moved "
18
+ "from websockets.http to websockets.legacy.http",
19
+ DeprecationWarning,
20
+ )
deepseek/lib/python3.10/site-packages/websockets/http11.py ADDED
@@ -0,0 +1,386 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import dataclasses
4
+ import os
5
+ import re
6
+ import sys
7
+ import warnings
8
+ from collections.abc import Generator
9
+ from typing import Callable
10
+
11
+ from .datastructures import Headers
12
+ from .exceptions import SecurityError
13
+ from .version import version as websockets_version
14
+
15
+
16
+ __all__ = ["SERVER", "USER_AGENT", "Request", "Response"]
17
+
18
+
19
+ PYTHON_VERSION = "{}.{}".format(*sys.version_info)
20
+
21
+ # User-Agent header for HTTP requests.
22
+ USER_AGENT = os.environ.get(
23
+ "WEBSOCKETS_USER_AGENT",
24
+ f"Python/{PYTHON_VERSION} websockets/{websockets_version}",
25
+ )
26
+
27
+ # Server header for HTTP responses.
28
+ SERVER = os.environ.get(
29
+ "WEBSOCKETS_SERVER",
30
+ f"Python/{PYTHON_VERSION} websockets/{websockets_version}",
31
+ )
32
+
33
+ # Maximum total size of headers is around 128 * 8 KiB = 1 MiB.
34
+ MAX_NUM_HEADERS = int(os.environ.get("WEBSOCKETS_MAX_NUM_HEADERS", "128"))
35
+
36
+ # Limit request line and header lines. 8KiB is the most common default
37
+ # configuration of popular HTTP servers.
38
+ MAX_LINE_LENGTH = int(os.environ.get("WEBSOCKETS_MAX_LINE_LENGTH", "8192"))
39
+
40
+ # Support for HTTP response bodies is intended to read an error message
41
+ # returned by a server. It isn't designed to perform large file transfers.
42
+ MAX_BODY_SIZE = int(os.environ.get("WEBSOCKETS_MAX_BODY_SIZE", "1_048_576")) # 1 MiB
43
+
44
+
45
+ def d(value: bytes) -> str:
46
+ """
47
+ Decode a bytestring for interpolating into an error message.
48
+
49
+ """
50
+ return value.decode(errors="backslashreplace")
51
+
52
+
53
+ # See https://datatracker.ietf.org/doc/html/rfc7230#appendix-B.
54
+
55
+ # Regex for validating header names.
56
+
57
+ _token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
58
+
59
+ # Regex for validating header values.
60
+
61
+ # We don't attempt to support obsolete line folding.
62
+
63
+ # Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff).
64
+
65
+ # The ABNF is complicated because it attempts to express that optional
66
+ # whitespace is ignored. We strip whitespace and don't revalidate that.
67
+
68
+ # See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
69
+
70
+ _value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*")
71
+
72
+
73
+ @dataclasses.dataclass
74
+ class Request:
75
+ """
76
+ WebSocket handshake request.
77
+
78
+ Attributes:
79
+ path: Request path, including optional query.
80
+ headers: Request headers.
81
+ """
82
+
83
+ path: str
84
+ headers: Headers
85
+ # body isn't useful is the context of this library.
86
+
87
+ _exception: Exception | None = None
88
+
89
+ @property
90
+ def exception(self) -> Exception | None: # pragma: no cover
91
+ warnings.warn( # deprecated in 10.3 - 2022-04-17
92
+ "Request.exception is deprecated; "
93
+ "use ServerProtocol.handshake_exc instead",
94
+ DeprecationWarning,
95
+ )
96
+ return self._exception
97
+
98
+ @classmethod
99
+ def parse(
100
+ cls,
101
+ read_line: Callable[[int], Generator[None, None, bytes]],
102
+ ) -> Generator[None, None, Request]:
103
+ """
104
+ Parse a WebSocket handshake request.
105
+
106
+ This is a generator-based coroutine.
107
+
108
+ The request path isn't URL-decoded or validated in any way.
109
+
110
+ The request path and headers are expected to contain only ASCII
111
+ characters. Other characters are represented with surrogate escapes.
112
+
113
+ :meth:`parse` doesn't attempt to read the request body because
114
+ WebSocket handshake requests don't have one. If the request contains a
115
+ body, it may be read from the data stream after :meth:`parse` returns.
116
+
117
+ Args:
118
+ read_line: Generator-based coroutine that reads a LF-terminated
119
+ line or raises an exception if there isn't enough data
120
+
121
+ Raises:
122
+ EOFError: If the connection is closed without a full HTTP request.
123
+ SecurityError: If the request exceeds a security limit.
124
+ ValueError: If the request isn't well formatted.
125
+
126
+ """
127
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.1
128
+
129
+ # Parsing is simple because fixed values are expected for method and
130
+ # version and because path isn't checked. Since WebSocket software tends
131
+ # to implement HTTP/1.1 strictly, there's little need for lenient parsing.
132
+
133
+ try:
134
+ request_line = yield from parse_line(read_line)
135
+ except EOFError as exc:
136
+ raise EOFError("connection closed while reading HTTP request line") from exc
137
+
138
+ try:
139
+ method, raw_path, protocol = request_line.split(b" ", 2)
140
+ except ValueError: # not enough values to unpack (expected 3, got 1-2)
141
+ raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None
142
+ if protocol != b"HTTP/1.1":
143
+ raise ValueError(
144
+ f"unsupported protocol; expected HTTP/1.1: {d(request_line)}"
145
+ )
146
+ if method != b"GET":
147
+ raise ValueError(f"unsupported HTTP method; expected GET; got {d(method)}")
148
+ path = raw_path.decode("ascii", "surrogateescape")
149
+
150
+ headers = yield from parse_headers(read_line)
151
+
152
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.3
153
+
154
+ if "Transfer-Encoding" in headers:
155
+ raise NotImplementedError("transfer codings aren't supported")
156
+
157
+ if "Content-Length" in headers:
158
+ raise ValueError("unsupported request body")
159
+
160
+ return cls(path, headers)
161
+
162
+ def serialize(self) -> bytes:
163
+ """
164
+ Serialize a WebSocket handshake request.
165
+
166
+ """
167
+ # Since the request line and headers only contain ASCII characters,
168
+ # we can keep this simple.
169
+ request = f"GET {self.path} HTTP/1.1\r\n".encode()
170
+ request += self.headers.serialize()
171
+ return request
172
+
173
+
174
+ @dataclasses.dataclass
175
+ class Response:
176
+ """
177
+ WebSocket handshake response.
178
+
179
+ Attributes:
180
+ status_code: Response code.
181
+ reason_phrase: Response reason.
182
+ headers: Response headers.
183
+ body: Response body, if any.
184
+
185
+ """
186
+
187
+ status_code: int
188
+ reason_phrase: str
189
+ headers: Headers
190
+ body: bytes | None = None
191
+
192
+ _exception: Exception | None = None
193
+
194
+ @property
195
+ def exception(self) -> Exception | None: # pragma: no cover
196
+ warnings.warn( # deprecated in 10.3 - 2022-04-17
197
+ "Response.exception is deprecated; "
198
+ "use ClientProtocol.handshake_exc instead",
199
+ DeprecationWarning,
200
+ )
201
+ return self._exception
202
+
203
+ @classmethod
204
+ def parse(
205
+ cls,
206
+ read_line: Callable[[int], Generator[None, None, bytes]],
207
+ read_exact: Callable[[int], Generator[None, None, bytes]],
208
+ read_to_eof: Callable[[int], Generator[None, None, bytes]],
209
+ ) -> Generator[None, None, Response]:
210
+ """
211
+ Parse a WebSocket handshake response.
212
+
213
+ This is a generator-based coroutine.
214
+
215
+ The reason phrase and headers are expected to contain only ASCII
216
+ characters. Other characters are represented with surrogate escapes.
217
+
218
+ Args:
219
+ read_line: Generator-based coroutine that reads a LF-terminated
220
+ line or raises an exception if there isn't enough data.
221
+ read_exact: Generator-based coroutine that reads the requested
222
+ bytes or raises an exception if there isn't enough data.
223
+ read_to_eof: Generator-based coroutine that reads until the end
224
+ of the stream.
225
+
226
+ Raises:
227
+ EOFError: If the connection is closed without a full HTTP response.
228
+ SecurityError: If the response exceeds a security limit.
229
+ LookupError: If the response isn't well formatted.
230
+ ValueError: If the response isn't well formatted.
231
+
232
+ """
233
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2
234
+
235
+ try:
236
+ status_line = yield from parse_line(read_line)
237
+ except EOFError as exc:
238
+ raise EOFError("connection closed while reading HTTP status line") from exc
239
+
240
+ try:
241
+ protocol, raw_status_code, raw_reason = status_line.split(b" ", 2)
242
+ except ValueError: # not enough values to unpack (expected 3, got 1-2)
243
+ raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None
244
+ if protocol != b"HTTP/1.1":
245
+ raise ValueError(
246
+ f"unsupported protocol; expected HTTP/1.1: {d(status_line)}"
247
+ )
248
+ try:
249
+ status_code = int(raw_status_code)
250
+ except ValueError: # invalid literal for int() with base 10
251
+ raise ValueError(
252
+ f"invalid status code; expected integer; got {d(raw_status_code)}"
253
+ ) from None
254
+ if not 100 <= status_code < 600:
255
+ raise ValueError(
256
+ f"invalid status code; expected 100–599; got {d(raw_status_code)}"
257
+ )
258
+ if not _value_re.fullmatch(raw_reason):
259
+ raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}")
260
+ reason = raw_reason.decode("ascii", "surrogateescape")
261
+
262
+ headers = yield from parse_headers(read_line)
263
+
264
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.3
265
+
266
+ if "Transfer-Encoding" in headers:
267
+ raise NotImplementedError("transfer codings aren't supported")
268
+
269
+ # Since websockets only does GET requests (no HEAD, no CONNECT), all
270
+ # responses except 1xx, 204, and 304 include a message body.
271
+ if 100 <= status_code < 200 or status_code == 204 or status_code == 304:
272
+ body = None
273
+ else:
274
+ content_length: int | None
275
+ try:
276
+ # MultipleValuesError is sufficiently unlikely that we don't
277
+ # attempt to handle it. Instead we document that its parent
278
+ # class, LookupError, may be raised.
279
+ raw_content_length = headers["Content-Length"]
280
+ except KeyError:
281
+ content_length = None
282
+ else:
283
+ content_length = int(raw_content_length)
284
+
285
+ if content_length is None:
286
+ try:
287
+ body = yield from read_to_eof(MAX_BODY_SIZE)
288
+ except RuntimeError:
289
+ raise SecurityError(f"body too large: over {MAX_BODY_SIZE} bytes")
290
+ elif content_length > MAX_BODY_SIZE:
291
+ raise SecurityError(f"body too large: {content_length} bytes")
292
+ else:
293
+ body = yield from read_exact(content_length)
294
+
295
+ return cls(status_code, reason, headers, body)
296
+
297
+ def serialize(self) -> bytes:
298
+ """
299
+ Serialize a WebSocket handshake response.
300
+
301
+ """
302
+ # Since the status line and headers only contain ASCII characters,
303
+ # we can keep this simple.
304
+ response = f"HTTP/1.1 {self.status_code} {self.reason_phrase}\r\n".encode()
305
+ response += self.headers.serialize()
306
+ if self.body is not None:
307
+ response += self.body
308
+ return response
309
+
310
+
311
+ def parse_headers(
312
+ read_line: Callable[[int], Generator[None, None, bytes]],
313
+ ) -> Generator[None, None, Headers]:
314
+ """
315
+ Parse HTTP headers.
316
+
317
+ Non-ASCII characters are represented with surrogate escapes.
318
+
319
+ Args:
320
+ read_line: Generator-based coroutine that reads a LF-terminated line
321
+ or raises an exception if there isn't enough data.
322
+
323
+ Raises:
324
+ EOFError: If the connection is closed without complete headers.
325
+ SecurityError: If the request exceeds a security limit.
326
+ ValueError: If the request isn't well formatted.
327
+
328
+ """
329
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-3.2
330
+
331
+ # We don't attempt to support obsolete line folding.
332
+
333
+ headers = Headers()
334
+ for _ in range(MAX_NUM_HEADERS + 1):
335
+ try:
336
+ line = yield from parse_line(read_line)
337
+ except EOFError as exc:
338
+ raise EOFError("connection closed while reading HTTP headers") from exc
339
+ if line == b"":
340
+ break
341
+
342
+ try:
343
+ raw_name, raw_value = line.split(b":", 1)
344
+ except ValueError: # not enough values to unpack (expected 2, got 1)
345
+ raise ValueError(f"invalid HTTP header line: {d(line)}") from None
346
+ if not _token_re.fullmatch(raw_name):
347
+ raise ValueError(f"invalid HTTP header name: {d(raw_name)}")
348
+ raw_value = raw_value.strip(b" \t")
349
+ if not _value_re.fullmatch(raw_value):
350
+ raise ValueError(f"invalid HTTP header value: {d(raw_value)}")
351
+
352
+ name = raw_name.decode("ascii") # guaranteed to be ASCII at this point
353
+ value = raw_value.decode("ascii", "surrogateescape")
354
+ headers[name] = value
355
+
356
+ else:
357
+ raise SecurityError("too many HTTP headers")
358
+
359
+ return headers
360
+
361
+
362
+ def parse_line(
363
+ read_line: Callable[[int], Generator[None, None, bytes]],
364
+ ) -> Generator[None, None, bytes]:
365
+ """
366
+ Parse a single line.
367
+
368
+ CRLF is stripped from the return value.
369
+
370
+ Args:
371
+ read_line: Generator-based coroutine that reads a LF-terminated line
372
+ or raises an exception if there isn't enough data.
373
+
374
+ Raises:
375
+ EOFError: If the connection is closed without a CRLF.
376
+ SecurityError: If the response exceeds a security limit.
377
+
378
+ """
379
+ try:
380
+ line = yield from read_line(MAX_LINE_LENGTH)
381
+ except RuntimeError:
382
+ raise SecurityError("line too long")
383
+ # Not mandatory but safe - https://datatracker.ietf.org/doc/html/rfc7230#section-3.5
384
+ if not line.endswith(b"\r\n"):
385
+ raise EOFError("line without CRLF")
386
+ return line[:-2]
deepseek/lib/python3.10/site-packages/websockets/imports.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import warnings
4
+ from collections.abc import Iterable
5
+ from typing import Any
6
+
7
+
8
+ __all__ = ["lazy_import"]
9
+
10
+
11
+ def import_name(name: str, source: str, namespace: dict[str, Any]) -> Any:
12
+ """
13
+ Import ``name`` from ``source`` in ``namespace``.
14
+
15
+ There are two use cases:
16
+
17
+ - ``name`` is an object defined in ``source``;
18
+ - ``name`` is a submodule of ``source``.
19
+
20
+ Neither :func:`__import__` nor :func:`~importlib.import_module` does
21
+ exactly this. :func:`__import__` is closer to the intended behavior.
22
+
23
+ """
24
+ level = 0
25
+ while source[level] == ".":
26
+ level += 1
27
+ assert level < len(source), "importing from parent isn't supported"
28
+ module = __import__(source[level:], namespace, None, [name], level)
29
+ return getattr(module, name)
30
+
31
+
32
+ def lazy_import(
33
+ namespace: dict[str, Any],
34
+ aliases: dict[str, str] | None = None,
35
+ deprecated_aliases: dict[str, str] | None = None,
36
+ ) -> None:
37
+ """
38
+ Provide lazy, module-level imports.
39
+
40
+ Typical use::
41
+
42
+ __getattr__, __dir__ = lazy_import(
43
+ globals(),
44
+ aliases={
45
+ "<name>": "<source module>",
46
+ ...
47
+ },
48
+ deprecated_aliases={
49
+ ...,
50
+ }
51
+ )
52
+
53
+ This function defines ``__getattr__`` and ``__dir__`` per :pep:`562`.
54
+
55
+ """
56
+ if aliases is None:
57
+ aliases = {}
58
+ if deprecated_aliases is None:
59
+ deprecated_aliases = {}
60
+
61
+ namespace_set = set(namespace)
62
+ aliases_set = set(aliases)
63
+ deprecated_aliases_set = set(deprecated_aliases)
64
+
65
+ assert not namespace_set & aliases_set, "namespace conflict"
66
+ assert not namespace_set & deprecated_aliases_set, "namespace conflict"
67
+ assert not aliases_set & deprecated_aliases_set, "namespace conflict"
68
+
69
+ package = namespace["__name__"]
70
+
71
+ def __getattr__(name: str) -> Any:
72
+ assert aliases is not None # mypy cannot figure this out
73
+ try:
74
+ source = aliases[name]
75
+ except KeyError:
76
+ pass
77
+ else:
78
+ return import_name(name, source, namespace)
79
+
80
+ assert deprecated_aliases is not None # mypy cannot figure this out
81
+ try:
82
+ source = deprecated_aliases[name]
83
+ except KeyError:
84
+ pass
85
+ else:
86
+ warnings.warn(
87
+ f"{package}.{name} is deprecated",
88
+ DeprecationWarning,
89
+ stacklevel=2,
90
+ )
91
+ return import_name(name, source, namespace)
92
+
93
+ raise AttributeError(f"module {package!r} has no attribute {name!r}")
94
+
95
+ namespace["__getattr__"] = __getattr__
96
+
97
+ def __dir__() -> Iterable[str]:
98
+ return sorted(namespace_set | aliases_set | deprecated_aliases_set)
99
+
100
+ namespace["__dir__"] = __dir__