ZTWHHH commited on
Commit
239ee5e
·
verified ·
1 Parent(s): 622a202

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. deepseek/lib/libncurses++.a +3 -0
  3. deepseek/lib/python3.10/distutils/ccompiler.py +1116 -0
  4. deepseek/lib/python3.10/distutils/config.py +130 -0
  5. deepseek/lib/python3.10/distutils/dep_util.py +92 -0
  6. deepseek/lib/python3.10/distutils/dist.py +1256 -0
  7. deepseek/lib/python3.10/distutils/extension.py +241 -0
  8. deepseek/lib/python3.10/distutils/fancy_getopt.py +457 -0
  9. deepseek/lib/python3.10/distutils/file_util.py +238 -0
  10. deepseek/lib/python3.10/distutils/filelist.py +327 -0
  11. deepseek/lib/python3.10/distutils/tests/__init__.py +41 -0
  12. deepseek/lib/python3.10/distutils/tests/support.py +209 -0
  13. deepseek/lib/python3.10/distutils/tests/test_filelist.py +340 -0
  14. deepseek/lib/python3.10/distutils/tests/test_install_data.py +75 -0
  15. deepseek/lib/python3.10/distutils/tests/test_msvccompiler.py +81 -0
  16. deepseek/lib/python3.10/distutils/text_file.py +286 -0
  17. deepseek/lib/python3.10/distutils/versionpredicate.py +166 -0
  18. deepseek/lib/python3.10/ipaddress.py +2361 -0
  19. deepseek/lib/python3.10/numbers.py +393 -0
  20. deepseek/lib/python3.10/wave.py +513 -0
  21. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/Example.svelte +22 -0
  22. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/Index.svelte +62 -0
  23. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/package.json +43 -0
  24. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/shared/Colorpicker.svelte +416 -0
  25. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/shared/events.ts +28 -0
  26. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Back.svelte +17 -0
  27. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Backward.svelte +16 -0
  28. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Brush.svelte +10 -0
  29. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/BrushSize.svelte +10 -0
  30. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Calendar.svelte +51 -0
  31. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Camera.svelte +17 -0
  32. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Chart.svelte +10 -0
  33. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Chat.svelte +17 -0
  34. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Check.svelte +10 -0
  35. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Clear.svelte +24 -0
  36. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Color.svelte +11 -0
  37. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Community.svelte +6 -0
  38. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Download.svelte +10 -0
  39. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Edit.svelte +14 -0
  40. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Erase.svelte +17 -0
  41. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Eyedropper.svelte +10 -0
  42. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Image.svelte +16 -0
  43. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/ImagePaste.svelte +6 -0
  44. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/JSON.svelte +16 -0
  45. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Layers.svelte +19 -0
  46. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/LineChart.svelte +16 -0
  47. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Maximise.svelte +15 -0
  48. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Maximize.svelte +15 -0
  49. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Microphone.svelte +20 -0
  50. evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Minimize.svelte +13 -0
.gitattributes CHANGED
@@ -1351,3 +1351,4 @@ evalkit_tf437/lib/python3.10/lib-dynload/_codecs_cn.cpython-310-x86_64-linux-gnu
1351
  evalkit_tf446/lib/python3.10/site-packages/torch/distributed/__pycache__/distributed_c10d.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1352
  evalkit_tf446/lib/python3.10/site-packages/torch/_inductor/__pycache__/ir.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1353
  evalkit_tf446/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/hls.CFPBCiRi.js.br filter=lfs diff=lfs merge=lfs -text
 
 
1351
  evalkit_tf446/lib/python3.10/site-packages/torch/distributed/__pycache__/distributed_c10d.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1352
  evalkit_tf446/lib/python3.10/site-packages/torch/_inductor/__pycache__/ir.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1353
  evalkit_tf446/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/hls.CFPBCiRi.js.br filter=lfs diff=lfs merge=lfs -text
1354
+ deepseek/lib/libncurses++.a filter=lfs diff=lfs merge=lfs -text
deepseek/lib/libncurses++.a ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93b48c40f5d7b07e1a8c4bd9419df55c28e250cca1166be4aafd2fc7caf18823
3
+ size 187604
deepseek/lib/python3.10/distutils/ccompiler.py ADDED
@@ -0,0 +1,1116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.ccompiler
2
+
3
+ Contains CCompiler, an abstract base class that defines the interface
4
+ for the Distutils compiler abstraction model."""
5
+
6
+ import sys, os, re
7
+ from distutils.errors import *
8
+ from distutils.spawn import spawn
9
+ from distutils.file_util import move_file
10
+ from distutils.dir_util import mkpath
11
+ from distutils.dep_util import newer_group
12
+ from distutils.util import split_quoted, execute
13
+ from distutils import log
14
+
15
+ class CCompiler:
16
+ """Abstract base class to define the interface that must be implemented
17
+ by real compiler classes. Also has some utility methods used by
18
+ several compiler classes.
19
+
20
+ The basic idea behind a compiler abstraction class is that each
21
+ instance can be used for all the compile/link steps in building a
22
+ single project. Thus, attributes common to all of those compile and
23
+ link steps -- include directories, macros to define, libraries to link
24
+ against, etc. -- are attributes of the compiler instance. To allow for
25
+ variability in how individual files are treated, most of those
26
+ attributes may be varied on a per-compilation or per-link basis.
27
+ """
28
+
29
+ # 'compiler_type' is a class attribute that identifies this class. It
30
+ # keeps code that wants to know what kind of compiler it's dealing with
31
+ # from having to import all possible compiler classes just to do an
32
+ # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type'
33
+ # should really, really be one of the keys of the 'compiler_class'
34
+ # dictionary (see below -- used by the 'new_compiler()' factory
35
+ # function) -- authors of new compiler interface classes are
36
+ # responsible for updating 'compiler_class'!
37
+ compiler_type = None
38
+
39
+ # XXX things not handled by this compiler abstraction model:
40
+ # * client can't provide additional options for a compiler,
41
+ # e.g. warning, optimization, debugging flags. Perhaps this
42
+ # should be the domain of concrete compiler abstraction classes
43
+ # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
44
+ # class should have methods for the common ones.
45
+ # * can't completely override the include or library searchg
46
+ # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
47
+ # I'm not sure how widely supported this is even by Unix
48
+ # compilers, much less on other platforms. And I'm even less
49
+ # sure how useful it is; maybe for cross-compiling, but
50
+ # support for that is a ways off. (And anyways, cross
51
+ # compilers probably have a dedicated binary with the
52
+ # right paths compiled in. I hope.)
53
+ # * can't do really freaky things with the library list/library
54
+ # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
55
+ # different versions of libfoo.a in different locations. I
56
+ # think this is useless without the ability to null out the
57
+ # library search path anyways.
58
+
59
+
60
+ # Subclasses that rely on the standard filename generation methods
61
+ # implemented below should override these; see the comment near
62
+ # those methods ('object_filenames()' et. al.) for details:
63
+ src_extensions = None # list of strings
64
+ obj_extension = None # string
65
+ static_lib_extension = None
66
+ shared_lib_extension = None # string
67
+ static_lib_format = None # format string
68
+ shared_lib_format = None # prob. same as static_lib_format
69
+ exe_extension = None # string
70
+
71
+ # Default language settings. language_map is used to detect a source
72
+ # file or Extension target language, checking source filenames.
73
+ # language_order is used to detect the language precedence, when deciding
74
+ # what language to use when mixing source types. For example, if some
75
+ # extension has two files with ".c" extension, and one with ".cpp", it
76
+ # is still linked as c++.
77
+ language_map = {".c" : "c",
78
+ ".cc" : "c++",
79
+ ".cpp" : "c++",
80
+ ".cxx" : "c++",
81
+ ".m" : "objc",
82
+ }
83
+ language_order = ["c++", "objc", "c"]
84
+
85
+ def __init__(self, verbose=0, dry_run=0, force=0):
86
+ self.dry_run = dry_run
87
+ self.force = force
88
+ self.verbose = verbose
89
+
90
+ # 'output_dir': a common output directory for object, library,
91
+ # shared object, and shared library files
92
+ self.output_dir = None
93
+
94
+ # 'macros': a list of macro definitions (or undefinitions). A
95
+ # macro definition is a 2-tuple (name, value), where the value is
96
+ # either a string or None (no explicit value). A macro
97
+ # undefinition is a 1-tuple (name,).
98
+ self.macros = []
99
+
100
+ # 'include_dirs': a list of directories to search for include files
101
+ self.include_dirs = []
102
+
103
+ # 'libraries': a list of libraries to include in any link
104
+ # (library names, not filenames: eg. "foo" not "libfoo.a")
105
+ self.libraries = []
106
+
107
+ # 'library_dirs': a list of directories to search for libraries
108
+ self.library_dirs = []
109
+
110
+ # 'runtime_library_dirs': a list of directories to search for
111
+ # shared libraries/objects at runtime
112
+ self.runtime_library_dirs = []
113
+
114
+ # 'objects': a list of object files (or similar, such as explicitly
115
+ # named library files) to include on any link
116
+ self.objects = []
117
+
118
+ for key in self.executables.keys():
119
+ self.set_executable(key, self.executables[key])
120
+
121
+ def set_executables(self, **kwargs):
122
+ """Define the executables (and options for them) that will be run
123
+ to perform the various stages of compilation. The exact set of
124
+ executables that may be specified here depends on the compiler
125
+ class (via the 'executables' class attribute), but most will have:
126
+ compiler the C/C++ compiler
127
+ linker_so linker used to create shared objects and libraries
128
+ linker_exe linker used to create binary executables
129
+ archiver static library creator
130
+
131
+ On platforms with a command-line (Unix, DOS/Windows), each of these
132
+ is a string that will be split into executable name and (optional)
133
+ list of arguments. (Splitting the string is done similarly to how
134
+ Unix shells operate: words are delimited by spaces, but quotes and
135
+ backslashes can override this. See
136
+ 'distutils.util.split_quoted()'.)
137
+ """
138
+
139
+ # Note that some CCompiler implementation classes will define class
140
+ # attributes 'cpp', 'cc', etc. with hard-coded executable names;
141
+ # this is appropriate when a compiler class is for exactly one
142
+ # compiler/OS combination (eg. MSVCCompiler). Other compiler
143
+ # classes (UnixCCompiler, in particular) are driven by information
144
+ # discovered at run-time, since there are many different ways to do
145
+ # basically the same things with Unix C compilers.
146
+
147
+ for key in kwargs:
148
+ if key not in self.executables:
149
+ raise ValueError("unknown executable '%s' for class %s" %
150
+ (key, self.__class__.__name__))
151
+ self.set_executable(key, kwargs[key])
152
+
153
+ def set_executable(self, key, value):
154
+ if isinstance(value, str):
155
+ setattr(self, key, split_quoted(value))
156
+ else:
157
+ setattr(self, key, value)
158
+
159
+ def _find_macro(self, name):
160
+ i = 0
161
+ for defn in self.macros:
162
+ if defn[0] == name:
163
+ return i
164
+ i += 1
165
+ return None
166
+
167
+ def _check_macro_definitions(self, definitions):
168
+ """Ensures that every element of 'definitions' is a valid macro
169
+ definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
170
+ nothing if all definitions are OK, raise TypeError otherwise.
171
+ """
172
+ for defn in definitions:
173
+ if not (isinstance(defn, tuple) and
174
+ (len(defn) in (1, 2) and
175
+ (isinstance (defn[1], str) or defn[1] is None)) and
176
+ isinstance (defn[0], str)):
177
+ raise TypeError(("invalid macro definition '%s': " % defn) + \
178
+ "must be tuple (string,), (string, string), or " + \
179
+ "(string, None)")
180
+
181
+
182
+ # -- Bookkeeping methods -------------------------------------------
183
+
184
+ def define_macro(self, name, value=None):
185
+ """Define a preprocessor macro for all compilations driven by this
186
+ compiler object. The optional parameter 'value' should be a
187
+ string; if it is not supplied, then the macro will be defined
188
+ without an explicit value and the exact outcome depends on the
189
+ compiler used (XXX true? does ANSI say anything about this?)
190
+ """
191
+ # Delete from the list of macro definitions/undefinitions if
192
+ # already there (so that this one will take precedence).
193
+ i = self._find_macro (name)
194
+ if i is not None:
195
+ del self.macros[i]
196
+
197
+ self.macros.append((name, value))
198
+
199
+ def undefine_macro(self, name):
200
+ """Undefine a preprocessor macro for all compilations driven by
201
+ this compiler object. If the same macro is defined by
202
+ 'define_macro()' and undefined by 'undefine_macro()' the last call
203
+ takes precedence (including multiple redefinitions or
204
+ undefinitions). If the macro is redefined/undefined on a
205
+ per-compilation basis (ie. in the call to 'compile()'), then that
206
+ takes precedence.
207
+ """
208
+ # Delete from the list of macro definitions/undefinitions if
209
+ # already there (so that this one will take precedence).
210
+ i = self._find_macro (name)
211
+ if i is not None:
212
+ del self.macros[i]
213
+
214
+ undefn = (name,)
215
+ self.macros.append(undefn)
216
+
217
+ def add_include_dir(self, dir):
218
+ """Add 'dir' to the list of directories that will be searched for
219
+ header files. The compiler is instructed to search directories in
220
+ the order in which they are supplied by successive calls to
221
+ 'add_include_dir()'.
222
+ """
223
+ self.include_dirs.append(dir)
224
+
225
+ def set_include_dirs(self, dirs):
226
+ """Set the list of directories that will be searched to 'dirs' (a
227
+ list of strings). Overrides any preceding calls to
228
+ 'add_include_dir()'; subsequence calls to 'add_include_dir()' add
229
+ to the list passed to 'set_include_dirs()'. This does not affect
230
+ any list of standard include directories that the compiler may
231
+ search by default.
232
+ """
233
+ self.include_dirs = dirs[:]
234
+
235
+ def add_library(self, libname):
236
+ """Add 'libname' to the list of libraries that will be included in
237
+ all links driven by this compiler object. Note that 'libname'
238
+ should *not* be the name of a file containing a library, but the
239
+ name of the library itself: the actual filename will be inferred by
240
+ the linker, the compiler, or the compiler class (depending on the
241
+ platform).
242
+
243
+ The linker will be instructed to link against libraries in the
244
+ order they were supplied to 'add_library()' and/or
245
+ 'set_libraries()'. It is perfectly valid to duplicate library
246
+ names; the linker will be instructed to link against libraries as
247
+ many times as they are mentioned.
248
+ """
249
+ self.libraries.append(libname)
250
+
251
+ def set_libraries(self, libnames):
252
+ """Set the list of libraries to be included in all links driven by
253
+ this compiler object to 'libnames' (a list of strings). This does
254
+ not affect any standard system libraries that the linker may
255
+ include by default.
256
+ """
257
+ self.libraries = libnames[:]
258
+
259
+ def add_library_dir(self, dir):
260
+ """Add 'dir' to the list of directories that will be searched for
261
+ libraries specified to 'add_library()' and 'set_libraries()'. The
262
+ linker will be instructed to search for libraries in the order they
263
+ are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
264
+ """
265
+ self.library_dirs.append(dir)
266
+
267
+ def set_library_dirs(self, dirs):
268
+ """Set the list of library search directories to 'dirs' (a list of
269
+ strings). This does not affect any standard library search path
270
+ that the linker may search by default.
271
+ """
272
+ self.library_dirs = dirs[:]
273
+
274
+ def add_runtime_library_dir(self, dir):
275
+ """Add 'dir' to the list of directories that will be searched for
276
+ shared libraries at runtime.
277
+ """
278
+ self.runtime_library_dirs.append(dir)
279
+
280
+ def set_runtime_library_dirs(self, dirs):
281
+ """Set the list of directories to search for shared libraries at
282
+ runtime to 'dirs' (a list of strings). This does not affect any
283
+ standard search path that the runtime linker may search by
284
+ default.
285
+ """
286
+ self.runtime_library_dirs = dirs[:]
287
+
288
+ def add_link_object(self, object):
289
+ """Add 'object' to the list of object files (or analogues, such as
290
+ explicitly named library files or the output of "resource
291
+ compilers") to be included in every link driven by this compiler
292
+ object.
293
+ """
294
+ self.objects.append(object)
295
+
296
+ def set_link_objects(self, objects):
297
+ """Set the list of object files (or analogues) to be included in
298
+ every link to 'objects'. This does not affect any standard object
299
+ files that the linker may include by default (such as system
300
+ libraries).
301
+ """
302
+ self.objects = objects[:]
303
+
304
+
305
+ # -- Private utility methods --------------------------------------
306
+ # (here for the convenience of subclasses)
307
+
308
+ # Helper method to prep compiler in subclass compile() methods
309
+
310
+ def _setup_compile(self, outdir, macros, incdirs, sources, depends,
311
+ extra):
312
+ """Process arguments and decide which source files to compile."""
313
+ if outdir is None:
314
+ outdir = self.output_dir
315
+ elif not isinstance(outdir, str):
316
+ raise TypeError("'output_dir' must be a string or None")
317
+
318
+ if macros is None:
319
+ macros = self.macros
320
+ elif isinstance(macros, list):
321
+ macros = macros + (self.macros or [])
322
+ else:
323
+ raise TypeError("'macros' (if supplied) must be a list of tuples")
324
+
325
+ if incdirs is None:
326
+ incdirs = self.include_dirs
327
+ elif isinstance(incdirs, (list, tuple)):
328
+ incdirs = list(incdirs) + (self.include_dirs or [])
329
+ else:
330
+ raise TypeError(
331
+ "'include_dirs' (if supplied) must be a list of strings")
332
+
333
+ if extra is None:
334
+ extra = []
335
+
336
+ # Get the list of expected output (object) files
337
+ objects = self.object_filenames(sources, strip_dir=0,
338
+ output_dir=outdir)
339
+ assert len(objects) == len(sources)
340
+
341
+ pp_opts = gen_preprocess_options(macros, incdirs)
342
+
343
+ build = {}
344
+ for i in range(len(sources)):
345
+ src = sources[i]
346
+ obj = objects[i]
347
+ ext = os.path.splitext(src)[1]
348
+ self.mkpath(os.path.dirname(obj))
349
+ build[obj] = (src, ext)
350
+
351
+ return macros, objects, extra, pp_opts, build
352
+
353
+ def _get_cc_args(self, pp_opts, debug, before):
354
+ # works for unixccompiler, cygwinccompiler
355
+ cc_args = pp_opts + ['-c']
356
+ if debug:
357
+ cc_args[:0] = ['-g']
358
+ if before:
359
+ cc_args[:0] = before
360
+ return cc_args
361
+
362
+ def _fix_compile_args(self, output_dir, macros, include_dirs):
363
+ """Typecheck and fix-up some of the arguments to the 'compile()'
364
+ method, and return fixed-up values. Specifically: if 'output_dir'
365
+ is None, replaces it with 'self.output_dir'; ensures that 'macros'
366
+ is a list, and augments it with 'self.macros'; ensures that
367
+ 'include_dirs' is a list, and augments it with 'self.include_dirs'.
368
+ Guarantees that the returned values are of the correct type,
369
+ i.e. for 'output_dir' either string or None, and for 'macros' and
370
+ 'include_dirs' either list or None.
371
+ """
372
+ if output_dir is None:
373
+ output_dir = self.output_dir
374
+ elif not isinstance(output_dir, str):
375
+ raise TypeError("'output_dir' must be a string or None")
376
+
377
+ if macros is None:
378
+ macros = self.macros
379
+ elif isinstance(macros, list):
380
+ macros = macros + (self.macros or [])
381
+ else:
382
+ raise TypeError("'macros' (if supplied) must be a list of tuples")
383
+
384
+ if include_dirs is None:
385
+ include_dirs = self.include_dirs
386
+ elif isinstance(include_dirs, (list, tuple)):
387
+ include_dirs = list(include_dirs) + (self.include_dirs or [])
388
+ else:
389
+ raise TypeError(
390
+ "'include_dirs' (if supplied) must be a list of strings")
391
+
392
+ return output_dir, macros, include_dirs
393
+
394
+ def _prep_compile(self, sources, output_dir, depends=None):
395
+ """Decide which source files must be recompiled.
396
+
397
+ Determine the list of object files corresponding to 'sources',
398
+ and figure out which ones really need to be recompiled.
399
+ Return a list of all object files and a dictionary telling
400
+ which source files can be skipped.
401
+ """
402
+ # Get the list of expected output (object) files
403
+ objects = self.object_filenames(sources, output_dir=output_dir)
404
+ assert len(objects) == len(sources)
405
+
406
+ # Return an empty dict for the "which source files can be skipped"
407
+ # return value to preserve API compatibility.
408
+ return objects, {}
409
+
410
+ def _fix_object_args(self, objects, output_dir):
411
+ """Typecheck and fix up some arguments supplied to various methods.
412
+ Specifically: ensure that 'objects' is a list; if output_dir is
413
+ None, replace with self.output_dir. Return fixed versions of
414
+ 'objects' and 'output_dir'.
415
+ """
416
+ if not isinstance(objects, (list, tuple)):
417
+ raise TypeError("'objects' must be a list or tuple of strings")
418
+ objects = list(objects)
419
+
420
+ if output_dir is None:
421
+ output_dir = self.output_dir
422
+ elif not isinstance(output_dir, str):
423
+ raise TypeError("'output_dir' must be a string or None")
424
+
425
+ return (objects, output_dir)
426
+
427
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
428
+ """Typecheck and fix up some of the arguments supplied to the
429
+ 'link_*' methods. Specifically: ensure that all arguments are
430
+ lists, and augment them with their permanent versions
431
+ (eg. 'self.libraries' augments 'libraries'). Return a tuple with
432
+ fixed versions of all arguments.
433
+ """
434
+ if libraries is None:
435
+ libraries = self.libraries
436
+ elif isinstance(libraries, (list, tuple)):
437
+ libraries = list (libraries) + (self.libraries or [])
438
+ else:
439
+ raise TypeError(
440
+ "'libraries' (if supplied) must be a list of strings")
441
+
442
+ if library_dirs is None:
443
+ library_dirs = self.library_dirs
444
+ elif isinstance(library_dirs, (list, tuple)):
445
+ library_dirs = list (library_dirs) + (self.library_dirs or [])
446
+ else:
447
+ raise TypeError(
448
+ "'library_dirs' (if supplied) must be a list of strings")
449
+
450
+ if runtime_library_dirs is None:
451
+ runtime_library_dirs = self.runtime_library_dirs
452
+ elif isinstance(runtime_library_dirs, (list, tuple)):
453
+ runtime_library_dirs = (list(runtime_library_dirs) +
454
+ (self.runtime_library_dirs or []))
455
+ else:
456
+ raise TypeError("'runtime_library_dirs' (if supplied) "
457
+ "must be a list of strings")
458
+
459
+ return (libraries, library_dirs, runtime_library_dirs)
460
+
461
+ def _need_link(self, objects, output_file):
462
+ """Return true if we need to relink the files listed in 'objects'
463
+ to recreate 'output_file'.
464
+ """
465
+ if self.force:
466
+ return True
467
+ else:
468
+ if self.dry_run:
469
+ newer = newer_group (objects, output_file, missing='newer')
470
+ else:
471
+ newer = newer_group (objects, output_file)
472
+ return newer
473
+
474
+ def detect_language(self, sources):
475
+ """Detect the language of a given file, or list of files. Uses
476
+ language_map, and language_order to do the job.
477
+ """
478
+ if not isinstance(sources, list):
479
+ sources = [sources]
480
+ lang = None
481
+ index = len(self.language_order)
482
+ for source in sources:
483
+ base, ext = os.path.splitext(source)
484
+ extlang = self.language_map.get(ext)
485
+ try:
486
+ extindex = self.language_order.index(extlang)
487
+ if extindex < index:
488
+ lang = extlang
489
+ index = extindex
490
+ except ValueError:
491
+ pass
492
+ return lang
493
+
494
+
495
+ # -- Worker methods ------------------------------------------------
496
+ # (must be implemented by subclasses)
497
+
498
+ def preprocess(self, source, output_file=None, macros=None,
499
+ include_dirs=None, extra_preargs=None, extra_postargs=None):
500
+ """Preprocess a single C/C++ source file, named in 'source'.
501
+ Output will be written to file named 'output_file', or stdout if
502
+ 'output_file' not supplied. 'macros' is a list of macro
503
+ definitions as for 'compile()', which will augment the macros set
504
+ with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
505
+ list of directory names that will be added to the default list.
506
+
507
+ Raises PreprocessError on failure.
508
+ """
509
+ pass
510
+
511
+ def compile(self, sources, output_dir=None, macros=None,
512
+ include_dirs=None, debug=0, extra_preargs=None,
513
+ extra_postargs=None, depends=None):
514
+ """Compile one or more source files.
515
+
516
+ 'sources' must be a list of filenames, most likely C/C++
517
+ files, but in reality anything that can be handled by a
518
+ particular compiler and compiler class (eg. MSVCCompiler can
519
+ handle resource files in 'sources'). Return a list of object
520
+ filenames, one per source filename in 'sources'. Depending on
521
+ the implementation, not all source files will necessarily be
522
+ compiled, but all corresponding object filenames will be
523
+ returned.
524
+
525
+ If 'output_dir' is given, object files will be put under it, while
526
+ retaining their original path component. That is, "foo/bar.c"
527
+ normally compiles to "foo/bar.o" (for a Unix implementation); if
528
+ 'output_dir' is "build", then it would compile to
529
+ "build/foo/bar.o".
530
+
531
+ 'macros', if given, must be a list of macro definitions. A macro
532
+ definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
533
+ The former defines a macro; if the value is None, the macro is
534
+ defined without an explicit value. The 1-tuple case undefines a
535
+ macro. Later definitions/redefinitions/ undefinitions take
536
+ precedence.
537
+
538
+ 'include_dirs', if given, must be a list of strings, the
539
+ directories to add to the default include file search path for this
540
+ compilation only.
541
+
542
+ 'debug' is a boolean; if true, the compiler will be instructed to
543
+ output debug symbols in (or alongside) the object file(s).
544
+
545
+ 'extra_preargs' and 'extra_postargs' are implementation- dependent.
546
+ On platforms that have the notion of a command-line (e.g. Unix,
547
+ DOS/Windows), they are most likely lists of strings: extra
548
+ command-line arguments to prepend/append to the compiler command
549
+ line. On other platforms, consult the implementation class
550
+ documentation. In any event, they are intended as an escape hatch
551
+ for those occasions when the abstract compiler framework doesn't
552
+ cut the mustard.
553
+
554
+ 'depends', if given, is a list of filenames that all targets
555
+ depend on. If a source file is older than any file in
556
+ depends, then the source file will be recompiled. This
557
+ supports dependency tracking, but only at a coarse
558
+ granularity.
559
+
560
+ Raises CompileError on failure.
561
+ """
562
+ # A concrete compiler class can either override this method
563
+ # entirely or implement _compile().
564
+ macros, objects, extra_postargs, pp_opts, build = \
565
+ self._setup_compile(output_dir, macros, include_dirs, sources,
566
+ depends, extra_postargs)
567
+ cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
568
+
569
+ for obj in objects:
570
+ try:
571
+ src, ext = build[obj]
572
+ except KeyError:
573
+ continue
574
+ self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
575
+
576
+ # Return *all* object filenames, not just the ones we just built.
577
+ return objects
578
+
579
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
580
+ """Compile 'src' to product 'obj'."""
581
+ # A concrete compiler class that does not override compile()
582
+ # should implement _compile().
583
+ pass
584
+
585
+ def create_static_lib(self, objects, output_libname, output_dir=None,
586
+ debug=0, target_lang=None):
587
+ """Link a bunch of stuff together to create a static library file.
588
+ The "bunch of stuff" consists of the list of object files supplied
589
+ as 'objects', the extra object files supplied to
590
+ 'add_link_object()' and/or 'set_link_objects()', the libraries
591
+ supplied to 'add_library()' and/or 'set_libraries()', and the
592
+ libraries supplied as 'libraries' (if any).
593
+
594
+ 'output_libname' should be a library name, not a filename; the
595
+ filename will be inferred from the library name. 'output_dir' is
596
+ the directory where the library file will be put.
597
+
598
+ 'debug' is a boolean; if true, debugging information will be
599
+ included in the library (note that on most platforms, it is the
600
+ compile step where this matters: the 'debug' flag is included here
601
+ just for consistency).
602
+
603
+ 'target_lang' is the target language for which the given objects
604
+ are being compiled. This allows specific linkage time treatment of
605
+ certain languages.
606
+
607
+ Raises LibError on failure.
608
+ """
609
+ pass
610
+
611
+
612
+ # values for target_desc parameter in link()
613
+ SHARED_OBJECT = "shared_object"
614
+ SHARED_LIBRARY = "shared_library"
615
+ EXECUTABLE = "executable"
616
+
617
+ def link(self,
618
+ target_desc,
619
+ objects,
620
+ output_filename,
621
+ output_dir=None,
622
+ libraries=None,
623
+ library_dirs=None,
624
+ runtime_library_dirs=None,
625
+ export_symbols=None,
626
+ debug=0,
627
+ extra_preargs=None,
628
+ extra_postargs=None,
629
+ build_temp=None,
630
+ target_lang=None):
631
+ """Link a bunch of stuff together to create an executable or
632
+ shared library file.
633
+
634
+ The "bunch of stuff" consists of the list of object files supplied
635
+ as 'objects'. 'output_filename' should be a filename. If
636
+ 'output_dir' is supplied, 'output_filename' is relative to it
637
+ (i.e. 'output_filename' can provide directory components if
638
+ needed).
639
+
640
+ 'libraries' is a list of libraries to link against. These are
641
+ library names, not filenames, since they're translated into
642
+ filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
643
+ on Unix and "foo.lib" on DOS/Windows). However, they can include a
644
+ directory component, which means the linker will look in that
645
+ specific directory rather than searching all the normal locations.
646
+
647
+ 'library_dirs', if supplied, should be a list of directories to
648
+ search for libraries that were specified as bare library names
649
+ (ie. no directory component). These are on top of the system
650
+ default and those supplied to 'add_library_dir()' and/or
651
+ 'set_library_dirs()'. 'runtime_library_dirs' is a list of
652
+ directories that will be embedded into the shared library and used
653
+ to search for other shared libraries that *it* depends on at
654
+ run-time. (This may only be relevant on Unix.)
655
+
656
+ 'export_symbols' is a list of symbols that the shared library will
657
+ export. (This appears to be relevant only on Windows.)
658
+
659
+ 'debug' is as for 'compile()' and 'create_static_lib()', with the
660
+ slight distinction that it actually matters on most platforms (as
661
+ opposed to 'create_static_lib()', which includes a 'debug' flag
662
+ mostly for form's sake).
663
+
664
+ 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
665
+ of course that they supply command-line arguments for the
666
+ particular linker being used).
667
+
668
+ 'target_lang' is the target language for which the given objects
669
+ are being compiled. This allows specific linkage time treatment of
670
+ certain languages.
671
+
672
+ Raises LinkError on failure.
673
+ """
674
+ raise NotImplementedError
675
+
676
+
677
+ # Old 'link_*()' methods, rewritten to use the new 'link()' method.
678
+
679
+ def link_shared_lib(self,
680
+ objects,
681
+ output_libname,
682
+ output_dir=None,
683
+ libraries=None,
684
+ library_dirs=None,
685
+ runtime_library_dirs=None,
686
+ export_symbols=None,
687
+ debug=0,
688
+ extra_preargs=None,
689
+ extra_postargs=None,
690
+ build_temp=None,
691
+ target_lang=None):
692
+ self.link(CCompiler.SHARED_LIBRARY, objects,
693
+ self.library_filename(output_libname, lib_type='shared'),
694
+ output_dir,
695
+ libraries, library_dirs, runtime_library_dirs,
696
+ export_symbols, debug,
697
+ extra_preargs, extra_postargs, build_temp, target_lang)
698
+
699
+
700
+ def link_shared_object(self,
701
+ objects,
702
+ output_filename,
703
+ output_dir=None,
704
+ libraries=None,
705
+ library_dirs=None,
706
+ runtime_library_dirs=None,
707
+ export_symbols=None,
708
+ debug=0,
709
+ extra_preargs=None,
710
+ extra_postargs=None,
711
+ build_temp=None,
712
+ target_lang=None):
713
+ self.link(CCompiler.SHARED_OBJECT, objects,
714
+ output_filename, output_dir,
715
+ libraries, library_dirs, runtime_library_dirs,
716
+ export_symbols, debug,
717
+ extra_preargs, extra_postargs, build_temp, target_lang)
718
+
719
+
720
+ def link_executable(self,
721
+ objects,
722
+ output_progname,
723
+ output_dir=None,
724
+ libraries=None,
725
+ library_dirs=None,
726
+ runtime_library_dirs=None,
727
+ debug=0,
728
+ extra_preargs=None,
729
+ extra_postargs=None,
730
+ target_lang=None):
731
+ self.link(CCompiler.EXECUTABLE, objects,
732
+ self.executable_filename(output_progname), output_dir,
733
+ libraries, library_dirs, runtime_library_dirs, None,
734
+ debug, extra_preargs, extra_postargs, None, target_lang)
735
+
736
+
737
+ # -- Miscellaneous methods -----------------------------------------
738
+ # These are all used by the 'gen_lib_options() function; there is
739
+ # no appropriate default implementation so subclasses should
740
+ # implement all of these.
741
+
742
+ def library_dir_option(self, dir):
743
+ """Return the compiler option to add 'dir' to the list of
744
+ directories searched for libraries.
745
+ """
746
+ raise NotImplementedError
747
+
748
+ def runtime_library_dir_option(self, dir):
749
+ """Return the compiler option to add 'dir' to the list of
750
+ directories searched for runtime libraries.
751
+ """
752
+ raise NotImplementedError
753
+
754
+ def library_option(self, lib):
755
+ """Return the compiler option to add 'lib' to the list of libraries
756
+ linked into the shared library or executable.
757
+ """
758
+ raise NotImplementedError
759
+
760
+ def has_function(self, funcname, includes=None, include_dirs=None,
761
+ libraries=None, library_dirs=None):
762
+ """Return a boolean indicating whether funcname is supported on
763
+ the current platform. The optional arguments can be used to
764
+ augment the compilation environment.
765
+ """
766
+ # this can't be included at module scope because it tries to
767
+ # import math which might not be available at that point - maybe
768
+ # the necessary logic should just be inlined?
769
+ import tempfile
770
+ if includes is None:
771
+ includes = []
772
+ if include_dirs is None:
773
+ include_dirs = []
774
+ if libraries is None:
775
+ libraries = []
776
+ if library_dirs is None:
777
+ library_dirs = []
778
+ fd, fname = tempfile.mkstemp(".c", funcname, text=True)
779
+ f = os.fdopen(fd, "w")
780
+ try:
781
+ for incl in includes:
782
+ f.write("""#include "%s"\n""" % incl)
783
+ f.write("""\
784
+ int main (int argc, char **argv) {
785
+ %s();
786
+ return 0;
787
+ }
788
+ """ % funcname)
789
+ finally:
790
+ f.close()
791
+ try:
792
+ objects = self.compile([fname], include_dirs=include_dirs)
793
+ except CompileError:
794
+ return False
795
+
796
+ try:
797
+ self.link_executable(objects, "a.out",
798
+ libraries=libraries,
799
+ library_dirs=library_dirs)
800
+ except (LinkError, TypeError):
801
+ return False
802
+ return True
803
+
804
+ def find_library_file (self, dirs, lib, debug=0):
805
+ """Search the specified list of directories for a static or shared
806
+ library file 'lib' and return the full path to that file. If
807
+ 'debug' true, look for a debugging version (if that makes sense on
808
+ the current platform). Return None if 'lib' wasn't found in any of
809
+ the specified directories.
810
+ """
811
+ raise NotImplementedError
812
+
813
+ # -- Filename generation methods -----------------------------------
814
+
815
+ # The default implementation of the filename generating methods are
816
+ # prejudiced towards the Unix/DOS/Windows view of the world:
817
+ # * object files are named by replacing the source file extension
818
+ # (eg. .c/.cpp -> .o/.obj)
819
+ # * library files (shared or static) are named by plugging the
820
+ # library name and extension into a format string, eg.
821
+ # "lib%s.%s" % (lib_name, ".a") for Unix static libraries
822
+ # * executables are named by appending an extension (possibly
823
+ # empty) to the program name: eg. progname + ".exe" for
824
+ # Windows
825
+ #
826
+ # To reduce redundant code, these methods expect to find
827
+ # several attributes in the current object (presumably defined
828
+ # as class attributes):
829
+ # * src_extensions -
830
+ # list of C/C++ source file extensions, eg. ['.c', '.cpp']
831
+ # * obj_extension -
832
+ # object file extension, eg. '.o' or '.obj'
833
+ # * static_lib_extension -
834
+ # extension for static library files, eg. '.a' or '.lib'
835
+ # * shared_lib_extension -
836
+ # extension for shared library/object files, eg. '.so', '.dll'
837
+ # * static_lib_format -
838
+ # format string for generating static library filenames,
839
+ # eg. 'lib%s.%s' or '%s.%s'
840
+ # * shared_lib_format
841
+ # format string for generating shared library filenames
842
+ # (probably same as static_lib_format, since the extension
843
+ # is one of the intended parameters to the format string)
844
+ # * exe_extension -
845
+ # extension for executable files, eg. '' or '.exe'
846
+
847
+ def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
848
+ if output_dir is None:
849
+ output_dir = ''
850
+ obj_names = []
851
+ for src_name in source_filenames:
852
+ base, ext = os.path.splitext(src_name)
853
+ base = os.path.splitdrive(base)[1] # Chop off the drive
854
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
855
+ if ext not in self.src_extensions:
856
+ raise UnknownFileError(
857
+ "unknown file type '%s' (from '%s')" % (ext, src_name))
858
+ if strip_dir:
859
+ base = os.path.basename(base)
860
+ obj_names.append(os.path.join(output_dir,
861
+ base + self.obj_extension))
862
+ return obj_names
863
+
864
+ def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
865
+ assert output_dir is not None
866
+ if strip_dir:
867
+ basename = os.path.basename(basename)
868
+ return os.path.join(output_dir, basename + self.shared_lib_extension)
869
+
870
+ def executable_filename(self, basename, strip_dir=0, output_dir=''):
871
+ assert output_dir is not None
872
+ if strip_dir:
873
+ basename = os.path.basename(basename)
874
+ return os.path.join(output_dir, basename + (self.exe_extension or ''))
875
+
876
+ def library_filename(self, libname, lib_type='static', # or 'shared'
877
+ strip_dir=0, output_dir=''):
878
+ assert output_dir is not None
879
+ if lib_type not in ("static", "shared", "dylib", "xcode_stub"):
880
+ raise ValueError(
881
+ "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"")
882
+ fmt = getattr(self, lib_type + "_lib_format")
883
+ ext = getattr(self, lib_type + "_lib_extension")
884
+
885
+ dir, base = os.path.split(libname)
886
+ filename = fmt % (base, ext)
887
+ if strip_dir:
888
+ dir = ''
889
+
890
+ return os.path.join(output_dir, dir, filename)
891
+
892
+
893
+ # -- Utility methods -----------------------------------------------
894
+
895
+ def announce(self, msg, level=1):
896
+ log.debug(msg)
897
+
898
+ def debug_print(self, msg):
899
+ from distutils.debug import DEBUG
900
+ if DEBUG:
901
+ print(msg)
902
+
903
+ def warn(self, msg):
904
+ sys.stderr.write("warning: %s\n" % msg)
905
+
906
+ def execute(self, func, args, msg=None, level=1):
907
+ execute(func, args, msg, self.dry_run)
908
+
909
+ def spawn(self, cmd):
910
+ spawn(cmd, dry_run=self.dry_run)
911
+
912
+ def move_file(self, src, dst):
913
+ return move_file(src, dst, dry_run=self.dry_run)
914
+
915
+ def mkpath (self, name, mode=0o777):
916
+ mkpath(name, mode, dry_run=self.dry_run)
917
+
918
+
919
+ # Map a sys.platform/os.name ('posix', 'nt') to the default compiler
920
+ # type for that platform. Keys are interpreted as re match
921
+ # patterns. Order is important; platform mappings are preferred over
922
+ # OS names.
923
+ _default_compilers = (
924
+
925
+ # Platform string mappings
926
+
927
+ # on a cygwin built python we can use gcc like an ordinary UNIXish
928
+ # compiler
929
+ ('cygwin.*', 'unix'),
930
+
931
+ # OS name mappings
932
+ ('posix', 'unix'),
933
+ ('nt', 'msvc'),
934
+
935
+ )
936
+
937
+ def get_default_compiler(osname=None, platform=None):
938
+ """Determine the default compiler to use for the given platform.
939
+
940
+ osname should be one of the standard Python OS names (i.e. the
941
+ ones returned by os.name) and platform the common value
942
+ returned by sys.platform for the platform in question.
943
+
944
+ The default values are os.name and sys.platform in case the
945
+ parameters are not given.
946
+ """
947
+ if osname is None:
948
+ osname = os.name
949
+ if platform is None:
950
+ platform = sys.platform
951
+ for pattern, compiler in _default_compilers:
952
+ if re.match(pattern, platform) is not None or \
953
+ re.match(pattern, osname) is not None:
954
+ return compiler
955
+ # Default to Unix compiler
956
+ return 'unix'
957
+
958
+ # Map compiler types to (module_name, class_name) pairs -- ie. where to
959
+ # find the code that implements an interface to this compiler. (The module
960
+ # is assumed to be in the 'distutils' package.)
961
+ compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
962
+ "standard UNIX-style compiler"),
963
+ 'msvc': ('_msvccompiler', 'MSVCCompiler',
964
+ "Microsoft Visual C++"),
965
+ 'cygwin': ('cygwinccompiler', 'CygwinCCompiler',
966
+ "Cygwin port of GNU C Compiler for Win32"),
967
+ 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler',
968
+ "Mingw32 port of GNU C Compiler for Win32"),
969
+ 'bcpp': ('bcppcompiler', 'BCPPCompiler',
970
+ "Borland C++ Compiler"),
971
+ }
972
+
973
+ def show_compilers():
974
+ """Print list of available compilers (used by the "--help-compiler"
975
+ options to "build", "build_ext", "build_clib").
976
+ """
977
+ # XXX this "knows" that the compiler option it's describing is
978
+ # "--compiler", which just happens to be the case for the three
979
+ # commands that use it.
980
+ from distutils.fancy_getopt import FancyGetopt
981
+ compilers = []
982
+ for compiler in compiler_class.keys():
983
+ compilers.append(("compiler="+compiler, None,
984
+ compiler_class[compiler][2]))
985
+ compilers.sort()
986
+ pretty_printer = FancyGetopt(compilers)
987
+ pretty_printer.print_help("List of available compilers:")
988
+
989
+
990
+ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
991
+ """Generate an instance of some CCompiler subclass for the supplied
992
+ platform/compiler combination. 'plat' defaults to 'os.name'
993
+ (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
994
+ for that platform. Currently only 'posix' and 'nt' are supported, and
995
+ the default compilers are "traditional Unix interface" (UnixCCompiler
996
+ class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
997
+ possible to ask for a Unix compiler object under Windows, and a
998
+ Microsoft compiler object under Unix -- if you supply a value for
999
+ 'compiler', 'plat' is ignored.
1000
+ """
1001
+ if plat is None:
1002
+ plat = os.name
1003
+
1004
+ try:
1005
+ if compiler is None:
1006
+ compiler = get_default_compiler(plat)
1007
+
1008
+ (module_name, class_name, long_description) = compiler_class[compiler]
1009
+ except KeyError:
1010
+ msg = "don't know how to compile C/C++ code on platform '%s'" % plat
1011
+ if compiler is not None:
1012
+ msg = msg + " with '%s' compiler" % compiler
1013
+ raise DistutilsPlatformError(msg)
1014
+
1015
+ try:
1016
+ module_name = "distutils." + module_name
1017
+ __import__ (module_name)
1018
+ module = sys.modules[module_name]
1019
+ klass = vars(module)[class_name]
1020
+ except ImportError:
1021
+ raise DistutilsModuleError(
1022
+ "can't compile C/C++ code: unable to load module '%s'" % \
1023
+ module_name)
1024
+ except KeyError:
1025
+ raise DistutilsModuleError(
1026
+ "can't compile C/C++ code: unable to find class '%s' "
1027
+ "in module '%s'" % (class_name, module_name))
1028
+
1029
+ # XXX The None is necessary to preserve backwards compatibility
1030
+ # with classes that expect verbose to be the first positional
1031
+ # argument.
1032
+ return klass(None, dry_run, force)
1033
+
1034
+
1035
+ def gen_preprocess_options(macros, include_dirs):
1036
+ """Generate C pre-processor options (-D, -U, -I) as used by at least
1037
+ two types of compilers: the typical Unix compiler and Visual C++.
1038
+ 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
1039
+ means undefine (-U) macro 'name', and (name,value) means define (-D)
1040
+ macro 'name' to 'value'. 'include_dirs' is just a list of directory
1041
+ names to be added to the header file search path (-I). Returns a list
1042
+ of command-line options suitable for either Unix compilers or Visual
1043
+ C++.
1044
+ """
1045
+ # XXX it would be nice (mainly aesthetic, and so we don't generate
1046
+ # stupid-looking command lines) to go over 'macros' and eliminate
1047
+ # redundant definitions/undefinitions (ie. ensure that only the
1048
+ # latest mention of a particular macro winds up on the command
1049
+ # line). I don't think it's essential, though, since most (all?)
1050
+ # Unix C compilers only pay attention to the latest -D or -U
1051
+ # mention of a macro on their command line. Similar situation for
1052
+ # 'include_dirs'. I'm punting on both for now. Anyways, weeding out
1053
+ # redundancies like this should probably be the province of
1054
+ # CCompiler, since the data structures used are inherited from it
1055
+ # and therefore common to all CCompiler classes.
1056
+ pp_opts = []
1057
+ for macro in macros:
1058
+ if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
1059
+ raise TypeError(
1060
+ "bad macro definition '%s': "
1061
+ "each element of 'macros' list must be a 1- or 2-tuple"
1062
+ % macro)
1063
+
1064
+ if len(macro) == 1: # undefine this macro
1065
+ pp_opts.append("-U%s" % macro[0])
1066
+ elif len(macro) == 2:
1067
+ if macro[1] is None: # define with no explicit value
1068
+ pp_opts.append("-D%s" % macro[0])
1069
+ else:
1070
+ # XXX *don't* need to be clever about quoting the
1071
+ # macro value here, because we're going to avoid the
1072
+ # shell at all costs when we spawn the command!
1073
+ pp_opts.append("-D%s=%s" % macro)
1074
+
1075
+ for dir in include_dirs:
1076
+ pp_opts.append("-I%s" % dir)
1077
+ return pp_opts
1078
+
1079
+
1080
+ def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries):
1081
+ """Generate linker options for searching library directories and
1082
+ linking with specific libraries. 'libraries' and 'library_dirs' are,
1083
+ respectively, lists of library names (not filenames!) and search
1084
+ directories. Returns a list of command-line options suitable for use
1085
+ with some compiler (depending on the two format strings passed in).
1086
+ """
1087
+ lib_opts = []
1088
+
1089
+ for dir in library_dirs:
1090
+ lib_opts.append(compiler.library_dir_option(dir))
1091
+
1092
+ for dir in runtime_library_dirs:
1093
+ opt = compiler.runtime_library_dir_option(dir)
1094
+ if isinstance(opt, list):
1095
+ lib_opts = lib_opts + opt
1096
+ else:
1097
+ lib_opts.append(opt)
1098
+
1099
+ # XXX it's important that we *not* remove redundant library mentions!
1100
+ # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
1101
+ # resolve all symbols. I just hope we never have to say "-lfoo obj.o
1102
+ # -lbar" to get things to work -- that's certainly a possibility, but a
1103
+ # pretty nasty way to arrange your C code.
1104
+
1105
+ for lib in libraries:
1106
+ (lib_dir, lib_name) = os.path.split(lib)
1107
+ if lib_dir:
1108
+ lib_file = compiler.find_library_file([lib_dir], lib_name)
1109
+ if lib_file:
1110
+ lib_opts.append(lib_file)
1111
+ else:
1112
+ compiler.warn("no library file corresponding to "
1113
+ "'%s' found (skipping)" % lib)
1114
+ else:
1115
+ lib_opts.append(compiler.library_option (lib))
1116
+ return lib_opts
deepseek/lib/python3.10/distutils/config.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.pypirc
2
+
3
+ Provides the PyPIRCCommand class, the base class for the command classes
4
+ that uses .pypirc in the distutils.command package.
5
+ """
6
+ import os
7
+ from configparser import RawConfigParser
8
+
9
+ from distutils.cmd import Command
10
+
11
+ DEFAULT_PYPIRC = """\
12
+ [distutils]
13
+ index-servers =
14
+ pypi
15
+
16
+ [pypi]
17
+ username:%s
18
+ password:%s
19
+ """
20
+
21
+ class PyPIRCCommand(Command):
22
+ """Base command that knows how to handle the .pypirc file
23
+ """
24
+ DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
25
+ DEFAULT_REALM = 'pypi'
26
+ repository = None
27
+ realm = None
28
+
29
+ user_options = [
30
+ ('repository=', 'r',
31
+ "url of repository [default: %s]" % \
32
+ DEFAULT_REPOSITORY),
33
+ ('show-response', None,
34
+ 'display full response text from server')]
35
+
36
+ boolean_options = ['show-response']
37
+
38
+ def _get_rc_file(self):
39
+ """Returns rc file path."""
40
+ return os.path.join(os.path.expanduser('~'), '.pypirc')
41
+
42
+ def _store_pypirc(self, username, password):
43
+ """Creates a default .pypirc file."""
44
+ rc = self._get_rc_file()
45
+ with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
46
+ f.write(DEFAULT_PYPIRC % (username, password))
47
+
48
+ def _read_pypirc(self):
49
+ """Reads the .pypirc file."""
50
+ rc = self._get_rc_file()
51
+ if os.path.exists(rc):
52
+ self.announce('Using PyPI login from %s' % rc)
53
+ repository = self.repository or self.DEFAULT_REPOSITORY
54
+
55
+ config = RawConfigParser()
56
+ config.read(rc)
57
+ sections = config.sections()
58
+ if 'distutils' in sections:
59
+ # let's get the list of servers
60
+ index_servers = config.get('distutils', 'index-servers')
61
+ _servers = [server.strip() for server in
62
+ index_servers.split('\n')
63
+ if server.strip() != '']
64
+ if _servers == []:
65
+ # nothing set, let's try to get the default pypi
66
+ if 'pypi' in sections:
67
+ _servers = ['pypi']
68
+ else:
69
+ # the file is not properly defined, returning
70
+ # an empty dict
71
+ return {}
72
+ for server in _servers:
73
+ current = {'server': server}
74
+ current['username'] = config.get(server, 'username')
75
+
76
+ # optional params
77
+ for key, default in (('repository',
78
+ self.DEFAULT_REPOSITORY),
79
+ ('realm', self.DEFAULT_REALM),
80
+ ('password', None)):
81
+ if config.has_option(server, key):
82
+ current[key] = config.get(server, key)
83
+ else:
84
+ current[key] = default
85
+
86
+ # work around people having "repository" for the "pypi"
87
+ # section of their config set to the HTTP (rather than
88
+ # HTTPS) URL
89
+ if (server == 'pypi' and
90
+ repository in (self.DEFAULT_REPOSITORY, 'pypi')):
91
+ current['repository'] = self.DEFAULT_REPOSITORY
92
+ return current
93
+
94
+ if (current['server'] == repository or
95
+ current['repository'] == repository):
96
+ return current
97
+ elif 'server-login' in sections:
98
+ # old format
99
+ server = 'server-login'
100
+ if config.has_option(server, 'repository'):
101
+ repository = config.get(server, 'repository')
102
+ else:
103
+ repository = self.DEFAULT_REPOSITORY
104
+ return {'username': config.get(server, 'username'),
105
+ 'password': config.get(server, 'password'),
106
+ 'repository': repository,
107
+ 'server': server,
108
+ 'realm': self.DEFAULT_REALM}
109
+
110
+ return {}
111
+
112
+ def _read_pypi_response(self, response):
113
+ """Read and decode a PyPI HTTP response."""
114
+ import cgi
115
+ content_type = response.getheader('content-type', 'text/plain')
116
+ encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
117
+ return response.read().decode(encoding)
118
+
119
+ def initialize_options(self):
120
+ """Initialize options."""
121
+ self.repository = None
122
+ self.realm = None
123
+ self.show_response = 0
124
+
125
+ def finalize_options(self):
126
+ """Finalizes options."""
127
+ if self.repository is None:
128
+ self.repository = self.DEFAULT_REPOSITORY
129
+ if self.realm is None:
130
+ self.realm = self.DEFAULT_REALM
deepseek/lib/python3.10/distutils/dep_util.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.dep_util
2
+
3
+ Utility functions for simple, timestamp-based dependency of files
4
+ and groups of files; also, function based entirely on such
5
+ timestamp dependency analysis."""
6
+
7
+ import os
8
+ from distutils.errors import DistutilsFileError
9
+
10
+
11
+ def newer (source, target):
12
+ """Return true if 'source' exists and is more recently modified than
13
+ 'target', or if 'source' exists and 'target' doesn't. Return false if
14
+ both exist and 'target' is the same age or younger than 'source'.
15
+ Raise DistutilsFileError if 'source' does not exist.
16
+ """
17
+ if not os.path.exists(source):
18
+ raise DistutilsFileError("file '%s' does not exist" %
19
+ os.path.abspath(source))
20
+ if not os.path.exists(target):
21
+ return 1
22
+
23
+ from stat import ST_MTIME
24
+ mtime1 = os.stat(source)[ST_MTIME]
25
+ mtime2 = os.stat(target)[ST_MTIME]
26
+
27
+ return mtime1 > mtime2
28
+
29
+ # newer ()
30
+
31
+
32
+ def newer_pairwise (sources, targets):
33
+ """Walk two filename lists in parallel, testing if each source is newer
34
+ than its corresponding target. Return a pair of lists (sources,
35
+ targets) where source is newer than target, according to the semantics
36
+ of 'newer()'.
37
+ """
38
+ if len(sources) != len(targets):
39
+ raise ValueError("'sources' and 'targets' must be same length")
40
+
41
+ # build a pair of lists (sources, targets) where source is newer
42
+ n_sources = []
43
+ n_targets = []
44
+ for i in range(len(sources)):
45
+ if newer(sources[i], targets[i]):
46
+ n_sources.append(sources[i])
47
+ n_targets.append(targets[i])
48
+
49
+ return (n_sources, n_targets)
50
+
51
+ # newer_pairwise ()
52
+
53
+
54
+ def newer_group (sources, target, missing='error'):
55
+ """Return true if 'target' is out-of-date with respect to any file
56
+ listed in 'sources'. In other words, if 'target' exists and is newer
57
+ than every file in 'sources', return false; otherwise return true.
58
+ 'missing' controls what we do when a source file is missing; the
59
+ default ("error") is to blow up with an OSError from inside 'stat()';
60
+ if it is "ignore", we silently drop any missing source files; if it is
61
+ "newer", any missing source files make us assume that 'target' is
62
+ out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
63
+ carry out commands that wouldn't work because inputs are missing, but
64
+ that doesn't matter because you're not actually going to run the
65
+ commands).
66
+ """
67
+ # If the target doesn't even exist, then it's definitely out-of-date.
68
+ if not os.path.exists(target):
69
+ return 1
70
+
71
+ # Otherwise we have to find out the hard way: if *any* source file
72
+ # is more recent than 'target', then 'target' is out-of-date and
73
+ # we can immediately return true. If we fall through to the end
74
+ # of the loop, then 'target' is up-to-date and we return false.
75
+ from stat import ST_MTIME
76
+ target_mtime = os.stat(target)[ST_MTIME]
77
+ for source in sources:
78
+ if not os.path.exists(source):
79
+ if missing == 'error': # blow up when we stat() the file
80
+ pass
81
+ elif missing == 'ignore': # missing source dropped from
82
+ continue # target's dependency list
83
+ elif missing == 'newer': # missing source means target is
84
+ return 1 # out-of-date
85
+
86
+ source_mtime = os.stat(source)[ST_MTIME]
87
+ if source_mtime > target_mtime:
88
+ return 1
89
+ else:
90
+ return 0
91
+
92
+ # newer_group ()
deepseek/lib/python3.10/distutils/dist.py ADDED
@@ -0,0 +1,1256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.dist
2
+
3
+ Provides the Distribution class, which represents the module distribution
4
+ being built/installed/distributed.
5
+ """
6
+
7
+ import sys
8
+ import os
9
+ import re
10
+ from email import message_from_file
11
+
12
+ try:
13
+ import warnings
14
+ except ImportError:
15
+ warnings = None
16
+
17
+ from distutils.errors import *
18
+ from distutils.fancy_getopt import FancyGetopt, translate_longopt
19
+ from distutils.util import check_environ, strtobool, rfc822_escape
20
+ from distutils import log
21
+ from distutils.debug import DEBUG
22
+
23
+ # Regex to define acceptable Distutils command names. This is not *quite*
24
+ # the same as a Python NAME -- I don't allow leading underscores. The fact
25
+ # that they're very similar is no coincidence; the default naming scheme is
26
+ # to look for a Python module named after the command.
27
+ command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
28
+
29
+
30
+ def _ensure_list(value, fieldname):
31
+ if isinstance(value, str):
32
+ # a string containing comma separated values is okay. It will
33
+ # be converted to a list by Distribution.finalize_options().
34
+ pass
35
+ elif not isinstance(value, list):
36
+ # passing a tuple or an iterator perhaps, warn and convert
37
+ typename = type(value).__name__
38
+ msg = f"Warning: '{fieldname}' should be a list, got type '{typename}'"
39
+ log.log(log.WARN, msg)
40
+ value = list(value)
41
+ return value
42
+
43
+
44
+ class Distribution:
45
+ """The core of the Distutils. Most of the work hiding behind 'setup'
46
+ is really done within a Distribution instance, which farms the work out
47
+ to the Distutils commands specified on the command line.
48
+
49
+ Setup scripts will almost never instantiate Distribution directly,
50
+ unless the 'setup()' function is totally inadequate to their needs.
51
+ However, it is conceivable that a setup script might wish to subclass
52
+ Distribution for some specialized purpose, and then pass the subclass
53
+ to 'setup()' as the 'distclass' keyword argument. If so, it is
54
+ necessary to respect the expectations that 'setup' has of Distribution.
55
+ See the code for 'setup()', in core.py, for details.
56
+ """
57
+
58
+ # 'global_options' describes the command-line options that may be
59
+ # supplied to the setup script prior to any actual commands.
60
+ # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
61
+ # these global options. This list should be kept to a bare minimum,
62
+ # since every global option is also valid as a command option -- and we
63
+ # don't want to pollute the commands with too many options that they
64
+ # have minimal control over.
65
+ # The fourth entry for verbose means that it can be repeated.
66
+ global_options = [
67
+ ('verbose', 'v', "run verbosely (default)", 1),
68
+ ('quiet', 'q', "run quietly (turns verbosity off)"),
69
+ ('dry-run', 'n', "don't actually do anything"),
70
+ ('help', 'h', "show detailed help message"),
71
+ ('no-user-cfg', None,
72
+ 'ignore pydistutils.cfg in your home directory'),
73
+ ]
74
+
75
+ # 'common_usage' is a short (2-3 line) string describing the common
76
+ # usage of the setup script.
77
+ common_usage = """\
78
+ Common commands: (see '--help-commands' for more)
79
+
80
+ setup.py build will build the package underneath 'build/'
81
+ setup.py install will install the package
82
+ """
83
+
84
+ # options that are not propagated to the commands
85
+ display_options = [
86
+ ('help-commands', None,
87
+ "list all available commands"),
88
+ ('name', None,
89
+ "print package name"),
90
+ ('version', 'V',
91
+ "print package version"),
92
+ ('fullname', None,
93
+ "print <package name>-<version>"),
94
+ ('author', None,
95
+ "print the author's name"),
96
+ ('author-email', None,
97
+ "print the author's email address"),
98
+ ('maintainer', None,
99
+ "print the maintainer's name"),
100
+ ('maintainer-email', None,
101
+ "print the maintainer's email address"),
102
+ ('contact', None,
103
+ "print the maintainer's name if known, else the author's"),
104
+ ('contact-email', None,
105
+ "print the maintainer's email address if known, else the author's"),
106
+ ('url', None,
107
+ "print the URL for this package"),
108
+ ('license', None,
109
+ "print the license of the package"),
110
+ ('licence', None,
111
+ "alias for --license"),
112
+ ('description', None,
113
+ "print the package description"),
114
+ ('long-description', None,
115
+ "print the long package description"),
116
+ ('platforms', None,
117
+ "print the list of platforms"),
118
+ ('classifiers', None,
119
+ "print the list of classifiers"),
120
+ ('keywords', None,
121
+ "print the list of keywords"),
122
+ ('provides', None,
123
+ "print the list of packages/modules provided"),
124
+ ('requires', None,
125
+ "print the list of packages/modules required"),
126
+ ('obsoletes', None,
127
+ "print the list of packages/modules made obsolete")
128
+ ]
129
+ display_option_names = [translate_longopt(x[0]) for x in display_options]
130
+
131
+ # negative options are options that exclude other options
132
+ negative_opt = {'quiet': 'verbose'}
133
+
134
+ # -- Creation/initialization methods -------------------------------
135
+
136
+ def __init__(self, attrs=None):
137
+ """Construct a new Distribution instance: initialize all the
138
+ attributes of a Distribution, and then use 'attrs' (a dictionary
139
+ mapping attribute names to values) to assign some of those
140
+ attributes their "real" values. (Any attributes not mentioned in
141
+ 'attrs' will be assigned to some null value: 0, None, an empty list
142
+ or dictionary, etc.) Most importantly, initialize the
143
+ 'command_obj' attribute to the empty dictionary; this will be
144
+ filled in with real command objects by 'parse_command_line()'.
145
+ """
146
+
147
+ # Default values for our command-line options
148
+ self.verbose = 1
149
+ self.dry_run = 0
150
+ self.help = 0
151
+ for attr in self.display_option_names:
152
+ setattr(self, attr, 0)
153
+
154
+ # Store the distribution meta-data (name, version, author, and so
155
+ # forth) in a separate object -- we're getting to have enough
156
+ # information here (and enough command-line options) that it's
157
+ # worth it. Also delegate 'get_XXX()' methods to the 'metadata'
158
+ # object in a sneaky and underhanded (but efficient!) way.
159
+ self.metadata = DistributionMetadata()
160
+ for basename in self.metadata._METHOD_BASENAMES:
161
+ method_name = "get_" + basename
162
+ setattr(self, method_name, getattr(self.metadata, method_name))
163
+
164
+ # 'cmdclass' maps command names to class objects, so we
165
+ # can 1) quickly figure out which class to instantiate when
166
+ # we need to create a new command object, and 2) have a way
167
+ # for the setup script to override command classes
168
+ self.cmdclass = {}
169
+
170
+ # 'command_packages' is a list of packages in which commands
171
+ # are searched for. The factory for command 'foo' is expected
172
+ # to be named 'foo' in the module 'foo' in one of the packages
173
+ # named here. This list is searched from the left; an error
174
+ # is raised if no named package provides the command being
175
+ # searched for. (Always access using get_command_packages().)
176
+ self.command_packages = None
177
+
178
+ # 'script_name' and 'script_args' are usually set to sys.argv[0]
179
+ # and sys.argv[1:], but they can be overridden when the caller is
180
+ # not necessarily a setup script run from the command-line.
181
+ self.script_name = None
182
+ self.script_args = None
183
+
184
+ # 'command_options' is where we store command options between
185
+ # parsing them (from config files, the command-line, etc.) and when
186
+ # they are actually needed -- ie. when the command in question is
187
+ # instantiated. It is a dictionary of dictionaries of 2-tuples:
188
+ # command_options = { command_name : { option : (source, value) } }
189
+ self.command_options = {}
190
+
191
+ # 'dist_files' is the list of (command, pyversion, file) that
192
+ # have been created by any dist commands run so far. This is
193
+ # filled regardless of whether the run is dry or not. pyversion
194
+ # gives sysconfig.get_python_version() if the dist file is
195
+ # specific to a Python version, 'any' if it is good for all
196
+ # Python versions on the target platform, and '' for a source
197
+ # file. pyversion should not be used to specify minimum or
198
+ # maximum required Python versions; use the metainfo for that
199
+ # instead.
200
+ self.dist_files = []
201
+
202
+ # These options are really the business of various commands, rather
203
+ # than of the Distribution itself. We provide aliases for them in
204
+ # Distribution as a convenience to the developer.
205
+ self.packages = None
206
+ self.package_data = {}
207
+ self.package_dir = None
208
+ self.py_modules = None
209
+ self.libraries = None
210
+ self.headers = None
211
+ self.ext_modules = None
212
+ self.ext_package = None
213
+ self.include_dirs = None
214
+ self.extra_path = None
215
+ self.scripts = None
216
+ self.data_files = None
217
+ self.password = ''
218
+
219
+ # And now initialize bookkeeping stuff that can't be supplied by
220
+ # the caller at all. 'command_obj' maps command names to
221
+ # Command instances -- that's how we enforce that every command
222
+ # class is a singleton.
223
+ self.command_obj = {}
224
+
225
+ # 'have_run' maps command names to boolean values; it keeps track
226
+ # of whether we have actually run a particular command, to make it
227
+ # cheap to "run" a command whenever we think we might need to -- if
228
+ # it's already been done, no need for expensive filesystem
229
+ # operations, we just check the 'have_run' dictionary and carry on.
230
+ # It's only safe to query 'have_run' for a command class that has
231
+ # been instantiated -- a false value will be inserted when the
232
+ # command object is created, and replaced with a true value when
233
+ # the command is successfully run. Thus it's probably best to use
234
+ # '.get()' rather than a straight lookup.
235
+ self.have_run = {}
236
+
237
+ # Now we'll use the attrs dictionary (ultimately, keyword args from
238
+ # the setup script) to possibly override any or all of these
239
+ # distribution options.
240
+
241
+ if attrs:
242
+ # Pull out the set of command options and work on them
243
+ # specifically. Note that this order guarantees that aliased
244
+ # command options will override any supplied redundantly
245
+ # through the general options dictionary.
246
+ options = attrs.get('options')
247
+ if options is not None:
248
+ del attrs['options']
249
+ for (command, cmd_options) in options.items():
250
+ opt_dict = self.get_option_dict(command)
251
+ for (opt, val) in cmd_options.items():
252
+ opt_dict[opt] = ("setup script", val)
253
+
254
+ if 'licence' in attrs:
255
+ attrs['license'] = attrs['licence']
256
+ del attrs['licence']
257
+ msg = "'licence' distribution option is deprecated; use 'license'"
258
+ if warnings is not None:
259
+ warnings.warn(msg)
260
+ else:
261
+ sys.stderr.write(msg + "\n")
262
+
263
+ # Now work on the rest of the attributes. Any attribute that's
264
+ # not already defined is invalid!
265
+ for (key, val) in attrs.items():
266
+ if hasattr(self.metadata, "set_" + key):
267
+ getattr(self.metadata, "set_" + key)(val)
268
+ elif hasattr(self.metadata, key):
269
+ setattr(self.metadata, key, val)
270
+ elif hasattr(self, key):
271
+ setattr(self, key, val)
272
+ else:
273
+ msg = "Unknown distribution option: %s" % repr(key)
274
+ warnings.warn(msg)
275
+
276
+ # no-user-cfg is handled before other command line args
277
+ # because other args override the config files, and this
278
+ # one is needed before we can load the config files.
279
+ # If attrs['script_args'] wasn't passed, assume false.
280
+ #
281
+ # This also make sure we just look at the global options
282
+ self.want_user_cfg = True
283
+
284
+ if self.script_args is not None:
285
+ for arg in self.script_args:
286
+ if not arg.startswith('-'):
287
+ break
288
+ if arg == '--no-user-cfg':
289
+ self.want_user_cfg = False
290
+ break
291
+
292
+ self.finalize_options()
293
+
294
+ def get_option_dict(self, command):
295
+ """Get the option dictionary for a given command. If that
296
+ command's option dictionary hasn't been created yet, then create it
297
+ and return the new dictionary; otherwise, return the existing
298
+ option dictionary.
299
+ """
300
+ dict = self.command_options.get(command)
301
+ if dict is None:
302
+ dict = self.command_options[command] = {}
303
+ return dict
304
+
305
+ def dump_option_dicts(self, header=None, commands=None, indent=""):
306
+ from pprint import pformat
307
+
308
+ if commands is None: # dump all command option dicts
309
+ commands = sorted(self.command_options.keys())
310
+
311
+ if header is not None:
312
+ self.announce(indent + header)
313
+ indent = indent + " "
314
+
315
+ if not commands:
316
+ self.announce(indent + "no commands known yet")
317
+ return
318
+
319
+ for cmd_name in commands:
320
+ opt_dict = self.command_options.get(cmd_name)
321
+ if opt_dict is None:
322
+ self.announce(indent +
323
+ "no option dict for '%s' command" % cmd_name)
324
+ else:
325
+ self.announce(indent +
326
+ "option dict for '%s' command:" % cmd_name)
327
+ out = pformat(opt_dict)
328
+ for line in out.split('\n'):
329
+ self.announce(indent + " " + line)
330
+
331
+ # -- Config file finding/parsing methods ---------------------------
332
+
333
+ def find_config_files(self):
334
+ """Find as many configuration files as should be processed for this
335
+ platform, and return a list of filenames in the order in which they
336
+ should be parsed. The filenames returned are guaranteed to exist
337
+ (modulo nasty race conditions).
338
+
339
+ There are three possible config files: distutils.cfg in the
340
+ Distutils installation directory (ie. where the top-level
341
+ Distutils __inst__.py file lives), a file in the user's home
342
+ directory named .pydistutils.cfg on Unix and pydistutils.cfg
343
+ on Windows/Mac; and setup.cfg in the current directory.
344
+
345
+ The file in the user's home directory can be disabled with the
346
+ --no-user-cfg option.
347
+ """
348
+ files = []
349
+ check_environ()
350
+
351
+ # Where to look for the system-wide Distutils config file
352
+ sys_dir = os.path.dirname(sys.modules['distutils'].__file__)
353
+
354
+ # Look for the system config file
355
+ sys_file = os.path.join(sys_dir, "distutils.cfg")
356
+ if os.path.isfile(sys_file):
357
+ files.append(sys_file)
358
+
359
+ # What to call the per-user config file
360
+ if os.name == 'posix':
361
+ user_filename = ".pydistutils.cfg"
362
+ else:
363
+ user_filename = "pydistutils.cfg"
364
+
365
+ # And look for the user config file
366
+ if self.want_user_cfg:
367
+ user_file = os.path.join(os.path.expanduser('~'), user_filename)
368
+ if os.path.isfile(user_file):
369
+ files.append(user_file)
370
+
371
+ # All platforms support local setup.cfg
372
+ local_file = "setup.cfg"
373
+ if os.path.isfile(local_file):
374
+ files.append(local_file)
375
+
376
+ if DEBUG:
377
+ self.announce("using config files: %s" % ', '.join(files))
378
+
379
+ return files
380
+
381
+ def parse_config_files(self, filenames=None):
382
+ from configparser import ConfigParser
383
+
384
+ # Ignore install directory options if we have a venv
385
+ if sys.prefix != sys.base_prefix:
386
+ ignore_options = [
387
+ 'install-base', 'install-platbase', 'install-lib',
388
+ 'install-platlib', 'install-purelib', 'install-headers',
389
+ 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
390
+ 'home', 'user', 'root']
391
+ else:
392
+ ignore_options = []
393
+
394
+ ignore_options = frozenset(ignore_options)
395
+
396
+ if filenames is None:
397
+ filenames = self.find_config_files()
398
+
399
+ if DEBUG:
400
+ self.announce("Distribution.parse_config_files():")
401
+
402
+ parser = ConfigParser()
403
+ for filename in filenames:
404
+ if DEBUG:
405
+ self.announce(" reading %s" % filename)
406
+ parser.read(filename)
407
+ for section in parser.sections():
408
+ options = parser.options(section)
409
+ opt_dict = self.get_option_dict(section)
410
+
411
+ for opt in options:
412
+ if opt != '__name__' and opt not in ignore_options:
413
+ val = parser.get(section,opt)
414
+ opt = opt.replace('-', '_')
415
+ opt_dict[opt] = (filename, val)
416
+
417
+ # Make the ConfigParser forget everything (so we retain
418
+ # the original filenames that options come from)
419
+ parser.__init__()
420
+
421
+ # If there was a "global" section in the config file, use it
422
+ # to set Distribution options.
423
+
424
+ if 'global' in self.command_options:
425
+ for (opt, (src, val)) in self.command_options['global'].items():
426
+ alias = self.negative_opt.get(opt)
427
+ try:
428
+ if alias:
429
+ setattr(self, alias, not strtobool(val))
430
+ elif opt in ('verbose', 'dry_run'): # ugh!
431
+ setattr(self, opt, strtobool(val))
432
+ else:
433
+ setattr(self, opt, val)
434
+ except ValueError as msg:
435
+ raise DistutilsOptionError(msg)
436
+
437
+ # -- Command-line parsing methods ----------------------------------
438
+
439
+ def parse_command_line(self):
440
+ """Parse the setup script's command line, taken from the
441
+ 'script_args' instance attribute (which defaults to 'sys.argv[1:]'
442
+ -- see 'setup()' in core.py). This list is first processed for
443
+ "global options" -- options that set attributes of the Distribution
444
+ instance. Then, it is alternately scanned for Distutils commands
445
+ and options for that command. Each new command terminates the
446
+ options for the previous command. The allowed options for a
447
+ command are determined by the 'user_options' attribute of the
448
+ command class -- thus, we have to be able to load command classes
449
+ in order to parse the command line. Any error in that 'options'
450
+ attribute raises DistutilsGetoptError; any error on the
451
+ command-line raises DistutilsArgError. If no Distutils commands
452
+ were found on the command line, raises DistutilsArgError. Return
453
+ true if command-line was successfully parsed and we should carry
454
+ on with executing commands; false if no errors but we shouldn't
455
+ execute commands (currently, this only happens if user asks for
456
+ help).
457
+ """
458
+ #
459
+ # We now have enough information to show the Macintosh dialog
460
+ # that allows the user to interactively specify the "command line".
461
+ #
462
+ toplevel_options = self._get_toplevel_options()
463
+
464
+ # We have to parse the command line a bit at a time -- global
465
+ # options, then the first command, then its options, and so on --
466
+ # because each command will be handled by a different class, and
467
+ # the options that are valid for a particular class aren't known
468
+ # until we have loaded the command class, which doesn't happen
469
+ # until we know what the command is.
470
+
471
+ self.commands = []
472
+ parser = FancyGetopt(toplevel_options + self.display_options)
473
+ parser.set_negative_aliases(self.negative_opt)
474
+ parser.set_aliases({'licence': 'license'})
475
+ args = parser.getopt(args=self.script_args, object=self)
476
+ option_order = parser.get_option_order()
477
+ log.set_verbosity(self.verbose)
478
+
479
+ # for display options we return immediately
480
+ if self.handle_display_options(option_order):
481
+ return
482
+ while args:
483
+ args = self._parse_command_opts(parser, args)
484
+ if args is None: # user asked for help (and got it)
485
+ return
486
+
487
+ # Handle the cases of --help as a "global" option, ie.
488
+ # "setup.py --help" and "setup.py --help command ...". For the
489
+ # former, we show global options (--verbose, --dry-run, etc.)
490
+ # and display-only options (--name, --version, etc.); for the
491
+ # latter, we omit the display-only options and show help for
492
+ # each command listed on the command line.
493
+ if self.help:
494
+ self._show_help(parser,
495
+ display_options=len(self.commands) == 0,
496
+ commands=self.commands)
497
+ return
498
+
499
+ # Oops, no commands found -- an end-user error
500
+ if not self.commands:
501
+ raise DistutilsArgError("no commands supplied")
502
+
503
+ # All is well: return true
504
+ return True
505
+
506
+ def _get_toplevel_options(self):
507
+ """Return the non-display options recognized at the top level.
508
+
509
+ This includes options that are recognized *only* at the top
510
+ level as well as options recognized for commands.
511
+ """
512
+ return self.global_options + [
513
+ ("command-packages=", None,
514
+ "list of packages that provide distutils commands"),
515
+ ]
516
+
517
+ def _parse_command_opts(self, parser, args):
518
+ """Parse the command-line options for a single command.
519
+ 'parser' must be a FancyGetopt instance; 'args' must be the list
520
+ of arguments, starting with the current command (whose options
521
+ we are about to parse). Returns a new version of 'args' with
522
+ the next command at the front of the list; will be the empty
523
+ list if there are no more commands on the command line. Returns
524
+ None if the user asked for help on this command.
525
+ """
526
+ # late import because of mutual dependence between these modules
527
+ from distutils.cmd import Command
528
+
529
+ # Pull the current command from the head of the command line
530
+ command = args[0]
531
+ if not command_re.match(command):
532
+ raise SystemExit("invalid command name '%s'" % command)
533
+ self.commands.append(command)
534
+
535
+ # Dig up the command class that implements this command, so we
536
+ # 1) know that it's a valid command, and 2) know which options
537
+ # it takes.
538
+ try:
539
+ cmd_class = self.get_command_class(command)
540
+ except DistutilsModuleError as msg:
541
+ raise DistutilsArgError(msg)
542
+
543
+ # Require that the command class be derived from Command -- want
544
+ # to be sure that the basic "command" interface is implemented.
545
+ if not issubclass(cmd_class, Command):
546
+ raise DistutilsClassError(
547
+ "command class %s must subclass Command" % cmd_class)
548
+
549
+ # Also make sure that the command object provides a list of its
550
+ # known options.
551
+ if not (hasattr(cmd_class, 'user_options') and
552
+ isinstance(cmd_class.user_options, list)):
553
+ msg = ("command class %s must provide "
554
+ "'user_options' attribute (a list of tuples)")
555
+ raise DistutilsClassError(msg % cmd_class)
556
+
557
+ # If the command class has a list of negative alias options,
558
+ # merge it in with the global negative aliases.
559
+ negative_opt = self.negative_opt
560
+ if hasattr(cmd_class, 'negative_opt'):
561
+ negative_opt = negative_opt.copy()
562
+ negative_opt.update(cmd_class.negative_opt)
563
+
564
+ # Check for help_options in command class. They have a different
565
+ # format (tuple of four) so we need to preprocess them here.
566
+ if (hasattr(cmd_class, 'help_options') and
567
+ isinstance(cmd_class.help_options, list)):
568
+ help_options = fix_help_options(cmd_class.help_options)
569
+ else:
570
+ help_options = []
571
+
572
+ # All commands support the global options too, just by adding
573
+ # in 'global_options'.
574
+ parser.set_option_table(self.global_options +
575
+ cmd_class.user_options +
576
+ help_options)
577
+ parser.set_negative_aliases(negative_opt)
578
+ (args, opts) = parser.getopt(args[1:])
579
+ if hasattr(opts, 'help') and opts.help:
580
+ self._show_help(parser, display_options=0, commands=[cmd_class])
581
+ return
582
+
583
+ if (hasattr(cmd_class, 'help_options') and
584
+ isinstance(cmd_class.help_options, list)):
585
+ help_option_found=0
586
+ for (help_option, short, desc, func) in cmd_class.help_options:
587
+ if hasattr(opts, parser.get_attr_name(help_option)):
588
+ help_option_found=1
589
+ if callable(func):
590
+ func()
591
+ else:
592
+ raise DistutilsClassError(
593
+ "invalid help function %r for help option '%s': "
594
+ "must be a callable object (function, etc.)"
595
+ % (func, help_option))
596
+
597
+ if help_option_found:
598
+ return
599
+
600
+ # Put the options from the command-line into their official
601
+ # holding pen, the 'command_options' dictionary.
602
+ opt_dict = self.get_option_dict(command)
603
+ for (name, value) in vars(opts).items():
604
+ opt_dict[name] = ("command line", value)
605
+
606
+ return args
607
+
608
+ def finalize_options(self):
609
+ """Set final values for all the options on the Distribution
610
+ instance, analogous to the .finalize_options() method of Command
611
+ objects.
612
+ """
613
+ for attr in ('keywords', 'platforms'):
614
+ value = getattr(self.metadata, attr)
615
+ if value is None:
616
+ continue
617
+ if isinstance(value, str):
618
+ value = [elm.strip() for elm in value.split(',')]
619
+ setattr(self.metadata, attr, value)
620
+
621
+ def _show_help(self, parser, global_options=1, display_options=1,
622
+ commands=[]):
623
+ """Show help for the setup script command-line in the form of
624
+ several lists of command-line options. 'parser' should be a
625
+ FancyGetopt instance; do not expect it to be returned in the
626
+ same state, as its option table will be reset to make it
627
+ generate the correct help text.
628
+
629
+ If 'global_options' is true, lists the global options:
630
+ --verbose, --dry-run, etc. If 'display_options' is true, lists
631
+ the "display-only" options: --name, --version, etc. Finally,
632
+ lists per-command help for every command name or command class
633
+ in 'commands'.
634
+ """
635
+ # late import because of mutual dependence between these modules
636
+ from distutils.core import gen_usage
637
+ from distutils.cmd import Command
638
+
639
+ if global_options:
640
+ if display_options:
641
+ options = self._get_toplevel_options()
642
+ else:
643
+ options = self.global_options
644
+ parser.set_option_table(options)
645
+ parser.print_help(self.common_usage + "\nGlobal options:")
646
+ print('')
647
+
648
+ if display_options:
649
+ parser.set_option_table(self.display_options)
650
+ parser.print_help(
651
+ "Information display options (just display " +
652
+ "information, ignore any commands)")
653
+ print('')
654
+
655
+ for command in self.commands:
656
+ if isinstance(command, type) and issubclass(command, Command):
657
+ klass = command
658
+ else:
659
+ klass = self.get_command_class(command)
660
+ if (hasattr(klass, 'help_options') and
661
+ isinstance(klass.help_options, list)):
662
+ parser.set_option_table(klass.user_options +
663
+ fix_help_options(klass.help_options))
664
+ else:
665
+ parser.set_option_table(klass.user_options)
666
+ parser.print_help("Options for '%s' command:" % klass.__name__)
667
+ print('')
668
+
669
+ print(gen_usage(self.script_name))
670
+
671
+ def handle_display_options(self, option_order):
672
+ """If there were any non-global "display-only" options
673
+ (--help-commands or the metadata display options) on the command
674
+ line, display the requested info and return true; else return
675
+ false.
676
+ """
677
+ from distutils.core import gen_usage
678
+
679
+ # User just wants a list of commands -- we'll print it out and stop
680
+ # processing now (ie. if they ran "setup --help-commands foo bar",
681
+ # we ignore "foo bar").
682
+ if self.help_commands:
683
+ self.print_commands()
684
+ print('')
685
+ print(gen_usage(self.script_name))
686
+ return 1
687
+
688
+ # If user supplied any of the "display metadata" options, then
689
+ # display that metadata in the order in which the user supplied the
690
+ # metadata options.
691
+ any_display_options = 0
692
+ is_display_option = {}
693
+ for option in self.display_options:
694
+ is_display_option[option[0]] = 1
695
+
696
+ for (opt, val) in option_order:
697
+ if val and is_display_option.get(opt):
698
+ opt = translate_longopt(opt)
699
+ value = getattr(self.metadata, "get_"+opt)()
700
+ if opt in ['keywords', 'platforms']:
701
+ print(','.join(value))
702
+ elif opt in ('classifiers', 'provides', 'requires',
703
+ 'obsoletes'):
704
+ print('\n'.join(value))
705
+ else:
706
+ print(value)
707
+ any_display_options = 1
708
+
709
+ return any_display_options
710
+
711
+ def print_command_list(self, commands, header, max_length):
712
+ """Print a subset of the list of all commands -- used by
713
+ 'print_commands()'.
714
+ """
715
+ print(header + ":")
716
+
717
+ for cmd in commands:
718
+ klass = self.cmdclass.get(cmd)
719
+ if not klass:
720
+ klass = self.get_command_class(cmd)
721
+ try:
722
+ description = klass.description
723
+ except AttributeError:
724
+ description = "(no description available)"
725
+
726
+ print(" %-*s %s" % (max_length, cmd, description))
727
+
728
+ def print_commands(self):
729
+ """Print out a help message listing all available commands with a
730
+ description of each. The list is divided into "standard commands"
731
+ (listed in distutils.command.__all__) and "extra commands"
732
+ (mentioned in self.cmdclass, but not a standard command). The
733
+ descriptions come from the command class attribute
734
+ 'description'.
735
+ """
736
+ import distutils.command
737
+ std_commands = distutils.command.__all__
738
+ is_std = {}
739
+ for cmd in std_commands:
740
+ is_std[cmd] = 1
741
+
742
+ extra_commands = []
743
+ for cmd in self.cmdclass.keys():
744
+ if not is_std.get(cmd):
745
+ extra_commands.append(cmd)
746
+
747
+ max_length = 0
748
+ for cmd in (std_commands + extra_commands):
749
+ if len(cmd) > max_length:
750
+ max_length = len(cmd)
751
+
752
+ self.print_command_list(std_commands,
753
+ "Standard commands",
754
+ max_length)
755
+ if extra_commands:
756
+ print()
757
+ self.print_command_list(extra_commands,
758
+ "Extra commands",
759
+ max_length)
760
+
761
+ def get_command_list(self):
762
+ """Get a list of (command, description) tuples.
763
+ The list is divided into "standard commands" (listed in
764
+ distutils.command.__all__) and "extra commands" (mentioned in
765
+ self.cmdclass, but not a standard command). The descriptions come
766
+ from the command class attribute 'description'.
767
+ """
768
+ # Currently this is only used on Mac OS, for the Mac-only GUI
769
+ # Distutils interface (by Jack Jansen)
770
+ import distutils.command
771
+ std_commands = distutils.command.__all__
772
+ is_std = {}
773
+ for cmd in std_commands:
774
+ is_std[cmd] = 1
775
+
776
+ extra_commands = []
777
+ for cmd in self.cmdclass.keys():
778
+ if not is_std.get(cmd):
779
+ extra_commands.append(cmd)
780
+
781
+ rv = []
782
+ for cmd in (std_commands + extra_commands):
783
+ klass = self.cmdclass.get(cmd)
784
+ if not klass:
785
+ klass = self.get_command_class(cmd)
786
+ try:
787
+ description = klass.description
788
+ except AttributeError:
789
+ description = "(no description available)"
790
+ rv.append((cmd, description))
791
+ return rv
792
+
793
+ # -- Command class/object methods ----------------------------------
794
+
795
+ def get_command_packages(self):
796
+ """Return a list of packages from which commands are loaded."""
797
+ pkgs = self.command_packages
798
+ if not isinstance(pkgs, list):
799
+ if pkgs is None:
800
+ pkgs = ''
801
+ pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
802
+ if "distutils.command" not in pkgs:
803
+ pkgs.insert(0, "distutils.command")
804
+ self.command_packages = pkgs
805
+ return pkgs
806
+
807
+ def get_command_class(self, command):
808
+ """Return the class that implements the Distutils command named by
809
+ 'command'. First we check the 'cmdclass' dictionary; if the
810
+ command is mentioned there, we fetch the class object from the
811
+ dictionary and return it. Otherwise we load the command module
812
+ ("distutils.command." + command) and fetch the command class from
813
+ the module. The loaded class is also stored in 'cmdclass'
814
+ to speed future calls to 'get_command_class()'.
815
+
816
+ Raises DistutilsModuleError if the expected module could not be
817
+ found, or if that module does not define the expected class.
818
+ """
819
+ klass = self.cmdclass.get(command)
820
+ if klass:
821
+ return klass
822
+
823
+ for pkgname in self.get_command_packages():
824
+ module_name = "%s.%s" % (pkgname, command)
825
+ klass_name = command
826
+
827
+ try:
828
+ __import__(module_name)
829
+ module = sys.modules[module_name]
830
+ except ImportError:
831
+ continue
832
+
833
+ try:
834
+ klass = getattr(module, klass_name)
835
+ except AttributeError:
836
+ raise DistutilsModuleError(
837
+ "invalid command '%s' (no class '%s' in module '%s')"
838
+ % (command, klass_name, module_name))
839
+
840
+ self.cmdclass[command] = klass
841
+ return klass
842
+
843
+ raise DistutilsModuleError("invalid command '%s'" % command)
844
+
845
+ def get_command_obj(self, command, create=1):
846
+ """Return the command object for 'command'. Normally this object
847
+ is cached on a previous call to 'get_command_obj()'; if no command
848
+ object for 'command' is in the cache, then we either create and
849
+ return it (if 'create' is true) or return None.
850
+ """
851
+ cmd_obj = self.command_obj.get(command)
852
+ if not cmd_obj and create:
853
+ if DEBUG:
854
+ self.announce("Distribution.get_command_obj(): "
855
+ "creating '%s' command object" % command)
856
+
857
+ klass = self.get_command_class(command)
858
+ cmd_obj = self.command_obj[command] = klass(self)
859
+ self.have_run[command] = 0
860
+
861
+ # Set any options that were supplied in config files
862
+ # or on the command line. (NB. support for error
863
+ # reporting is lame here: any errors aren't reported
864
+ # until 'finalize_options()' is called, which means
865
+ # we won't report the source of the error.)
866
+ options = self.command_options.get(command)
867
+ if options:
868
+ self._set_command_options(cmd_obj, options)
869
+
870
+ return cmd_obj
871
+
872
+ def _set_command_options(self, command_obj, option_dict=None):
873
+ """Set the options for 'command_obj' from 'option_dict'. Basically
874
+ this means copying elements of a dictionary ('option_dict') to
875
+ attributes of an instance ('command').
876
+
877
+ 'command_obj' must be a Command instance. If 'option_dict' is not
878
+ supplied, uses the standard option dictionary for this command
879
+ (from 'self.command_options').
880
+ """
881
+ command_name = command_obj.get_command_name()
882
+ if option_dict is None:
883
+ option_dict = self.get_option_dict(command_name)
884
+
885
+ if DEBUG:
886
+ self.announce(" setting options for '%s' command:" % command_name)
887
+ for (option, (source, value)) in option_dict.items():
888
+ if DEBUG:
889
+ self.announce(" %s = %s (from %s)" % (option, value,
890
+ source))
891
+ try:
892
+ bool_opts = [translate_longopt(o)
893
+ for o in command_obj.boolean_options]
894
+ except AttributeError:
895
+ bool_opts = []
896
+ try:
897
+ neg_opt = command_obj.negative_opt
898
+ except AttributeError:
899
+ neg_opt = {}
900
+
901
+ try:
902
+ is_string = isinstance(value, str)
903
+ if option in neg_opt and is_string:
904
+ setattr(command_obj, neg_opt[option], not strtobool(value))
905
+ elif option in bool_opts and is_string:
906
+ setattr(command_obj, option, strtobool(value))
907
+ elif hasattr(command_obj, option):
908
+ setattr(command_obj, option, value)
909
+ else:
910
+ raise DistutilsOptionError(
911
+ "error in %s: command '%s' has no such option '%s'"
912
+ % (source, command_name, option))
913
+ except ValueError as msg:
914
+ raise DistutilsOptionError(msg)
915
+
916
+ def reinitialize_command(self, command, reinit_subcommands=0):
917
+ """Reinitializes a command to the state it was in when first
918
+ returned by 'get_command_obj()': ie., initialized but not yet
919
+ finalized. This provides the opportunity to sneak option
920
+ values in programmatically, overriding or supplementing
921
+ user-supplied values from the config files and command line.
922
+ You'll have to re-finalize the command object (by calling
923
+ 'finalize_options()' or 'ensure_finalized()') before using it for
924
+ real.
925
+
926
+ 'command' should be a command name (string) or command object. If
927
+ 'reinit_subcommands' is true, also reinitializes the command's
928
+ sub-commands, as declared by the 'sub_commands' class attribute (if
929
+ it has one). See the "install" command for an example. Only
930
+ reinitializes the sub-commands that actually matter, ie. those
931
+ whose test predicates return true.
932
+
933
+ Returns the reinitialized command object.
934
+ """
935
+ from distutils.cmd import Command
936
+ if not isinstance(command, Command):
937
+ command_name = command
938
+ command = self.get_command_obj(command_name)
939
+ else:
940
+ command_name = command.get_command_name()
941
+
942
+ if not command.finalized:
943
+ return command
944
+ command.initialize_options()
945
+ command.finalized = 0
946
+ self.have_run[command_name] = 0
947
+ self._set_command_options(command)
948
+
949
+ if reinit_subcommands:
950
+ for sub in command.get_sub_commands():
951
+ self.reinitialize_command(sub, reinit_subcommands)
952
+
953
+ return command
954
+
955
+ # -- Methods that operate on the Distribution ----------------------
956
+
957
+ def announce(self, msg, level=log.INFO):
958
+ log.log(level, msg)
959
+
960
+ def run_commands(self):
961
+ """Run each command that was seen on the setup script command line.
962
+ Uses the list of commands found and cache of command objects
963
+ created by 'get_command_obj()'.
964
+ """
965
+ for cmd in self.commands:
966
+ self.run_command(cmd)
967
+
968
+ # -- Methods that operate on its Commands --------------------------
969
+
970
+ def run_command(self, command):
971
+ """Do whatever it takes to run a command (including nothing at all,
972
+ if the command has already been run). Specifically: if we have
973
+ already created and run the command named by 'command', return
974
+ silently without doing anything. If the command named by 'command'
975
+ doesn't even have a command object yet, create one. Then invoke
976
+ 'run()' on that command object (or an existing one).
977
+ """
978
+ # Already been here, done that? then return silently.
979
+ if self.have_run.get(command):
980
+ return
981
+
982
+ log.info("running %s", command)
983
+ cmd_obj = self.get_command_obj(command)
984
+ cmd_obj.ensure_finalized()
985
+ cmd_obj.run()
986
+ self.have_run[command] = 1
987
+
988
+ # -- Distribution query methods ------------------------------------
989
+
990
+ def has_pure_modules(self):
991
+ return len(self.packages or self.py_modules or []) > 0
992
+
993
+ def has_ext_modules(self):
994
+ return self.ext_modules and len(self.ext_modules) > 0
995
+
996
+ def has_c_libraries(self):
997
+ return self.libraries and len(self.libraries) > 0
998
+
999
+ def has_modules(self):
1000
+ return self.has_pure_modules() or self.has_ext_modules()
1001
+
1002
+ def has_headers(self):
1003
+ return self.headers and len(self.headers) > 0
1004
+
1005
+ def has_scripts(self):
1006
+ return self.scripts and len(self.scripts) > 0
1007
+
1008
+ def has_data_files(self):
1009
+ return self.data_files and len(self.data_files) > 0
1010
+
1011
+ def is_pure(self):
1012
+ return (self.has_pure_modules() and
1013
+ not self.has_ext_modules() and
1014
+ not self.has_c_libraries())
1015
+
1016
+ # -- Metadata query methods ----------------------------------------
1017
+
1018
+ # If you're looking for 'get_name()', 'get_version()', and so forth,
1019
+ # they are defined in a sneaky way: the constructor binds self.get_XXX
1020
+ # to self.metadata.get_XXX. The actual code is in the
1021
+ # DistributionMetadata class, below.
1022
+
1023
+ class DistributionMetadata:
1024
+ """Dummy class to hold the distribution meta-data: name, version,
1025
+ author, and so forth.
1026
+ """
1027
+
1028
+ _METHOD_BASENAMES = ("name", "version", "author", "author_email",
1029
+ "maintainer", "maintainer_email", "url",
1030
+ "license", "description", "long_description",
1031
+ "keywords", "platforms", "fullname", "contact",
1032
+ "contact_email", "classifiers", "download_url",
1033
+ # PEP 314
1034
+ "provides", "requires", "obsoletes",
1035
+ )
1036
+
1037
+ def __init__(self, path=None):
1038
+ if path is not None:
1039
+ self.read_pkg_file(open(path))
1040
+ else:
1041
+ self.name = None
1042
+ self.version = None
1043
+ self.author = None
1044
+ self.author_email = None
1045
+ self.maintainer = None
1046
+ self.maintainer_email = None
1047
+ self.url = None
1048
+ self.license = None
1049
+ self.description = None
1050
+ self.long_description = None
1051
+ self.keywords = None
1052
+ self.platforms = None
1053
+ self.classifiers = None
1054
+ self.download_url = None
1055
+ # PEP 314
1056
+ self.provides = None
1057
+ self.requires = None
1058
+ self.obsoletes = None
1059
+
1060
+ def read_pkg_file(self, file):
1061
+ """Reads the metadata values from a file object."""
1062
+ msg = message_from_file(file)
1063
+
1064
+ def _read_field(name):
1065
+ value = msg[name]
1066
+ if value == 'UNKNOWN':
1067
+ return None
1068
+ return value
1069
+
1070
+ def _read_list(name):
1071
+ values = msg.get_all(name, None)
1072
+ if values == []:
1073
+ return None
1074
+ return values
1075
+
1076
+ metadata_version = msg['metadata-version']
1077
+ self.name = _read_field('name')
1078
+ self.version = _read_field('version')
1079
+ self.description = _read_field('summary')
1080
+ # we are filling author only.
1081
+ self.author = _read_field('author')
1082
+ self.maintainer = None
1083
+ self.author_email = _read_field('author-email')
1084
+ self.maintainer_email = None
1085
+ self.url = _read_field('home-page')
1086
+ self.license = _read_field('license')
1087
+
1088
+ if 'download-url' in msg:
1089
+ self.download_url = _read_field('download-url')
1090
+ else:
1091
+ self.download_url = None
1092
+
1093
+ self.long_description = _read_field('description')
1094
+ self.description = _read_field('summary')
1095
+
1096
+ if 'keywords' in msg:
1097
+ self.keywords = _read_field('keywords').split(',')
1098
+
1099
+ self.platforms = _read_list('platform')
1100
+ self.classifiers = _read_list('classifier')
1101
+
1102
+ # PEP 314 - these fields only exist in 1.1
1103
+ if metadata_version == '1.1':
1104
+ self.requires = _read_list('requires')
1105
+ self.provides = _read_list('provides')
1106
+ self.obsoletes = _read_list('obsoletes')
1107
+ else:
1108
+ self.requires = None
1109
+ self.provides = None
1110
+ self.obsoletes = None
1111
+
1112
+ def write_pkg_info(self, base_dir):
1113
+ """Write the PKG-INFO file into the release tree.
1114
+ """
1115
+ with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
1116
+ encoding='UTF-8') as pkg_info:
1117
+ self.write_pkg_file(pkg_info)
1118
+
1119
+ def write_pkg_file(self, file):
1120
+ """Write the PKG-INFO format data to a file object.
1121
+ """
1122
+ version = '1.0'
1123
+ if (self.provides or self.requires or self.obsoletes or
1124
+ self.classifiers or self.download_url):
1125
+ version = '1.1'
1126
+
1127
+ file.write('Metadata-Version: %s\n' % version)
1128
+ file.write('Name: %s\n' % self.get_name())
1129
+ file.write('Version: %s\n' % self.get_version())
1130
+ file.write('Summary: %s\n' % self.get_description())
1131
+ file.write('Home-page: %s\n' % self.get_url())
1132
+ file.write('Author: %s\n' % self.get_contact())
1133
+ file.write('Author-email: %s\n' % self.get_contact_email())
1134
+ file.write('License: %s\n' % self.get_license())
1135
+ if self.download_url:
1136
+ file.write('Download-URL: %s\n' % self.download_url)
1137
+
1138
+ long_desc = rfc822_escape(self.get_long_description())
1139
+ file.write('Description: %s\n' % long_desc)
1140
+
1141
+ keywords = ','.join(self.get_keywords())
1142
+ if keywords:
1143
+ file.write('Keywords: %s\n' % keywords)
1144
+
1145
+ self._write_list(file, 'Platform', self.get_platforms())
1146
+ self._write_list(file, 'Classifier', self.get_classifiers())
1147
+
1148
+ # PEP 314
1149
+ self._write_list(file, 'Requires', self.get_requires())
1150
+ self._write_list(file, 'Provides', self.get_provides())
1151
+ self._write_list(file, 'Obsoletes', self.get_obsoletes())
1152
+
1153
+ def _write_list(self, file, name, values):
1154
+ for value in values:
1155
+ file.write('%s: %s\n' % (name, value))
1156
+
1157
+ # -- Metadata query methods ----------------------------------------
1158
+
1159
+ def get_name(self):
1160
+ return self.name or "UNKNOWN"
1161
+
1162
+ def get_version(self):
1163
+ return self.version or "0.0.0"
1164
+
1165
+ def get_fullname(self):
1166
+ return "%s-%s" % (self.get_name(), self.get_version())
1167
+
1168
+ def get_author(self):
1169
+ return self.author or "UNKNOWN"
1170
+
1171
+ def get_author_email(self):
1172
+ return self.author_email or "UNKNOWN"
1173
+
1174
+ def get_maintainer(self):
1175
+ return self.maintainer or "UNKNOWN"
1176
+
1177
+ def get_maintainer_email(self):
1178
+ return self.maintainer_email or "UNKNOWN"
1179
+
1180
+ def get_contact(self):
1181
+ return self.maintainer or self.author or "UNKNOWN"
1182
+
1183
+ def get_contact_email(self):
1184
+ return self.maintainer_email or self.author_email or "UNKNOWN"
1185
+
1186
+ def get_url(self):
1187
+ return self.url or "UNKNOWN"
1188
+
1189
+ def get_license(self):
1190
+ return self.license or "UNKNOWN"
1191
+ get_licence = get_license
1192
+
1193
+ def get_description(self):
1194
+ return self.description or "UNKNOWN"
1195
+
1196
+ def get_long_description(self):
1197
+ return self.long_description or "UNKNOWN"
1198
+
1199
+ def get_keywords(self):
1200
+ return self.keywords or []
1201
+
1202
+ def set_keywords(self, value):
1203
+ self.keywords = _ensure_list(value, 'keywords')
1204
+
1205
+ def get_platforms(self):
1206
+ return self.platforms or ["UNKNOWN"]
1207
+
1208
+ def set_platforms(self, value):
1209
+ self.platforms = _ensure_list(value, 'platforms')
1210
+
1211
+ def get_classifiers(self):
1212
+ return self.classifiers or []
1213
+
1214
+ def set_classifiers(self, value):
1215
+ self.classifiers = _ensure_list(value, 'classifiers')
1216
+
1217
+ def get_download_url(self):
1218
+ return self.download_url or "UNKNOWN"
1219
+
1220
+ # PEP 314
1221
+ def get_requires(self):
1222
+ return self.requires or []
1223
+
1224
+ def set_requires(self, value):
1225
+ import distutils.versionpredicate
1226
+ for v in value:
1227
+ distutils.versionpredicate.VersionPredicate(v)
1228
+ self.requires = list(value)
1229
+
1230
+ def get_provides(self):
1231
+ return self.provides or []
1232
+
1233
+ def set_provides(self, value):
1234
+ value = [v.strip() for v in value]
1235
+ for v in value:
1236
+ import distutils.versionpredicate
1237
+ distutils.versionpredicate.split_provision(v)
1238
+ self.provides = value
1239
+
1240
+ def get_obsoletes(self):
1241
+ return self.obsoletes or []
1242
+
1243
+ def set_obsoletes(self, value):
1244
+ import distutils.versionpredicate
1245
+ for v in value:
1246
+ distutils.versionpredicate.VersionPredicate(v)
1247
+ self.obsoletes = list(value)
1248
+
1249
+ def fix_help_options(options):
1250
+ """Convert a 4-tuple 'help_options' list as found in various command
1251
+ classes to the 3-tuple form required by FancyGetopt.
1252
+ """
1253
+ new_options = []
1254
+ for help_tuple in options:
1255
+ new_options.append(help_tuple[0:3])
1256
+ return new_options
deepseek/lib/python3.10/distutils/extension.py ADDED
@@ -0,0 +1,241 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.extension
2
+
3
+ Provides the Extension class, used to describe C/C++ extension
4
+ modules in setup scripts."""
5
+
6
+ import os
7
+ import re
8
+ import warnings
9
+
10
+ # This class is really only used by the "build_ext" command, so it might
11
+ # make sense to put it in distutils.command.build_ext. However, that
12
+ # module is already big enough, and I want to make this class a bit more
13
+ # complex to simplify some common cases ("foo" module in "foo.c") and do
14
+ # better error-checking ("foo.c" actually exists).
15
+ #
16
+ # Also, putting this in build_ext.py means every setup script would have to
17
+ # import that large-ish module (indirectly, through distutils.core) in
18
+ # order to do anything.
19
+
20
+ class Extension:
21
+ """Just a collection of attributes that describes an extension
22
+ module and everything needed to build it (hopefully in a portable
23
+ way, but there are hooks that let you be as unportable as you need).
24
+
25
+ Instance attributes:
26
+ name : string
27
+ the full name of the extension, including any packages -- ie.
28
+ *not* a filename or pathname, but Python dotted name
29
+ sources : [string]
30
+ list of source filenames, relative to the distribution root
31
+ (where the setup script lives), in Unix form (slash-separated)
32
+ for portability. Source files may be C, C++, SWIG (.i),
33
+ platform-specific resource files, or whatever else is recognized
34
+ by the "build_ext" command as source for a Python extension.
35
+ include_dirs : [string]
36
+ list of directories to search for C/C++ header files (in Unix
37
+ form for portability)
38
+ define_macros : [(name : string, value : string|None)]
39
+ list of macros to define; each macro is defined using a 2-tuple,
40
+ where 'value' is either the string to define it to or None to
41
+ define it without a particular value (equivalent of "#define
42
+ FOO" in source or -DFOO on Unix C compiler command line)
43
+ undef_macros : [string]
44
+ list of macros to undefine explicitly
45
+ library_dirs : [string]
46
+ list of directories to search for C/C++ libraries at link time
47
+ libraries : [string]
48
+ list of library names (not filenames or paths) to link against
49
+ runtime_library_dirs : [string]
50
+ list of directories to search for C/C++ libraries at run time
51
+ (for shared extensions, this is when the extension is loaded)
52
+ extra_objects : [string]
53
+ list of extra files to link with (eg. object files not implied
54
+ by 'sources', static library that must be explicitly specified,
55
+ binary resource files, etc.)
56
+ extra_compile_args : [string]
57
+ any extra platform- and compiler-specific information to use
58
+ when compiling the source files in 'sources'. For platforms and
59
+ compilers where "command line" makes sense, this is typically a
60
+ list of command-line arguments, but for other platforms it could
61
+ be anything.
62
+ extra_link_args : [string]
63
+ any extra platform- and compiler-specific information to use
64
+ when linking object files together to create the extension (or
65
+ to create a new static Python interpreter). Similar
66
+ interpretation as for 'extra_compile_args'.
67
+ export_symbols : [string]
68
+ list of symbols to be exported from a shared extension. Not
69
+ used on all platforms, and not generally necessary for Python
70
+ extensions, which typically export exactly one symbol: "init" +
71
+ extension_name.
72
+ swig_opts : [string]
73
+ any extra options to pass to SWIG if a source file has the .i
74
+ extension.
75
+ depends : [string]
76
+ list of files that the extension depends on
77
+ language : string
78
+ extension language (i.e. "c", "c++", "objc"). Will be detected
79
+ from the source extensions if not provided.
80
+ optional : boolean
81
+ specifies that a build failure in the extension should not abort the
82
+ build process, but simply not install the failing extension.
83
+ """
84
+
85
+ # When adding arguments to this constructor, be sure to update
86
+ # setup_keywords in core.py.
87
+ def __init__(self, name, sources,
88
+ include_dirs=None,
89
+ define_macros=None,
90
+ undef_macros=None,
91
+ library_dirs=None,
92
+ libraries=None,
93
+ runtime_library_dirs=None,
94
+ extra_objects=None,
95
+ extra_compile_args=None,
96
+ extra_link_args=None,
97
+ export_symbols=None,
98
+ swig_opts = None,
99
+ depends=None,
100
+ language=None,
101
+ optional=None,
102
+ **kw # To catch unknown keywords
103
+ ):
104
+ if not isinstance(name, str):
105
+ raise AssertionError("'name' must be a string")
106
+ if not (isinstance(sources, list) and
107
+ all(isinstance(v, str) for v in sources)):
108
+ raise AssertionError("'sources' must be a list of strings")
109
+
110
+ self.name = name
111
+ self.sources = sources
112
+ self.include_dirs = include_dirs or []
113
+ self.define_macros = define_macros or []
114
+ self.undef_macros = undef_macros or []
115
+ self.library_dirs = library_dirs or []
116
+ self.libraries = libraries or []
117
+ self.runtime_library_dirs = runtime_library_dirs or []
118
+ self.extra_objects = extra_objects or []
119
+ self.extra_compile_args = extra_compile_args or []
120
+ self.extra_link_args = extra_link_args or []
121
+ self.export_symbols = export_symbols or []
122
+ self.swig_opts = swig_opts or []
123
+ self.depends = depends or []
124
+ self.language = language
125
+ self.optional = optional
126
+
127
+ # If there are unknown keyword options, warn about them
128
+ if len(kw) > 0:
129
+ options = [repr(option) for option in kw]
130
+ options = ', '.join(sorted(options))
131
+ msg = "Unknown Extension options: %s" % options
132
+ warnings.warn(msg)
133
+
134
+ def __repr__(self):
135
+ return '<%s.%s(%r) at %#x>' % (
136
+ self.__class__.__module__,
137
+ self.__class__.__qualname__,
138
+ self.name,
139
+ id(self))
140
+
141
+
142
+ def read_setup_file(filename):
143
+ """Reads a Setup file and returns Extension instances."""
144
+ from distutils.sysconfig import (parse_makefile, expand_makefile_vars,
145
+ _variable_rx)
146
+
147
+ from distutils.text_file import TextFile
148
+ from distutils.util import split_quoted
149
+
150
+ # First pass over the file to gather "VAR = VALUE" assignments.
151
+ vars = parse_makefile(filename)
152
+
153
+ # Second pass to gobble up the real content: lines of the form
154
+ # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
155
+ file = TextFile(filename,
156
+ strip_comments=1, skip_blanks=1, join_lines=1,
157
+ lstrip_ws=1, rstrip_ws=1)
158
+ try:
159
+ extensions = []
160
+
161
+ while True:
162
+ line = file.readline()
163
+ if line is None: # eof
164
+ break
165
+ if re.match(_variable_rx, line): # VAR=VALUE, handled in first pass
166
+ continue
167
+
168
+ if line[0] == line[-1] == "*":
169
+ file.warn("'%s' lines not handled yet" % line)
170
+ continue
171
+
172
+ line = expand_makefile_vars(line, vars)
173
+ words = split_quoted(line)
174
+
175
+ # NB. this parses a slightly different syntax than the old
176
+ # makesetup script: here, there must be exactly one extension per
177
+ # line, and it must be the first word of the line. I have no idea
178
+ # why the old syntax supported multiple extensions per line, as
179
+ # they all wind up being the same.
180
+
181
+ module = words[0]
182
+ ext = Extension(module, [])
183
+ append_next_word = None
184
+
185
+ for word in words[1:]:
186
+ if append_next_word is not None:
187
+ append_next_word.append(word)
188
+ append_next_word = None
189
+ continue
190
+
191
+ suffix = os.path.splitext(word)[1]
192
+ switch = word[0:2] ; value = word[2:]
193
+
194
+ if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
195
+ # hmm, should we do something about C vs. C++ sources?
196
+ # or leave it up to the CCompiler implementation to
197
+ # worry about?
198
+ ext.sources.append(word)
199
+ elif switch == "-I":
200
+ ext.include_dirs.append(value)
201
+ elif switch == "-D":
202
+ equals = value.find("=")
203
+ if equals == -1: # bare "-DFOO" -- no value
204
+ ext.define_macros.append((value, None))
205
+ else: # "-DFOO=blah"
206
+ ext.define_macros.append((value[0:equals],
207
+ value[equals+2:]))
208
+ elif switch == "-U":
209
+ ext.undef_macros.append(value)
210
+ elif switch == "-C": # only here 'cause makesetup has it!
211
+ ext.extra_compile_args.append(word)
212
+ elif switch == "-l":
213
+ ext.libraries.append(value)
214
+ elif switch == "-L":
215
+ ext.library_dirs.append(value)
216
+ elif switch == "-R":
217
+ ext.runtime_library_dirs.append(value)
218
+ elif word == "-rpath":
219
+ append_next_word = ext.runtime_library_dirs
220
+ elif word == "-Xlinker":
221
+ append_next_word = ext.extra_link_args
222
+ elif word == "-Xcompiler":
223
+ append_next_word = ext.extra_compile_args
224
+ elif switch == "-u":
225
+ ext.extra_link_args.append(word)
226
+ if not value:
227
+ append_next_word = ext.extra_link_args
228
+ elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
229
+ # NB. a really faithful emulation of makesetup would
230
+ # append a .o file to extra_objects only if it
231
+ # had a slash in it; otherwise, it would s/.o/.c/
232
+ # and append it to sources. Hmmmm.
233
+ ext.extra_objects.append(word)
234
+ else:
235
+ file.warn("unrecognized argument '%s'" % word)
236
+
237
+ extensions.append(ext)
238
+ finally:
239
+ file.close()
240
+
241
+ return extensions
deepseek/lib/python3.10/distutils/fancy_getopt.py ADDED
@@ -0,0 +1,457 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.fancy_getopt
2
+
3
+ Wrapper around the standard getopt module that provides the following
4
+ additional features:
5
+ * short and long options are tied together
6
+ * options have help strings, so fancy_getopt could potentially
7
+ create a complete usage summary
8
+ * options set attributes of a passed-in object
9
+ """
10
+
11
+ import sys, string, re
12
+ import getopt
13
+ from distutils.errors import *
14
+
15
+ # Much like command_re in distutils.core, this is close to but not quite
16
+ # the same as a Python NAME -- except, in the spirit of most GNU
17
+ # utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
18
+ # The similarities to NAME are again not a coincidence...
19
+ longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
20
+ longopt_re = re.compile(r'^%s$' % longopt_pat)
21
+
22
+ # For recognizing "negative alias" options, eg. "quiet=!verbose"
23
+ neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
24
+
25
+ # This is used to translate long options to legitimate Python identifiers
26
+ # (for use as attributes of some object).
27
+ longopt_xlate = str.maketrans('-', '_')
28
+
29
+ class FancyGetopt:
30
+ """Wrapper around the standard 'getopt()' module that provides some
31
+ handy extra functionality:
32
+ * short and long options are tied together
33
+ * options have help strings, and help text can be assembled
34
+ from them
35
+ * options set attributes of a passed-in object
36
+ * boolean options can have "negative aliases" -- eg. if
37
+ --quiet is the "negative alias" of --verbose, then "--quiet"
38
+ on the command line sets 'verbose' to false
39
+ """
40
+
41
+ def __init__(self, option_table=None):
42
+ # The option table is (currently) a list of tuples. The
43
+ # tuples may have 3 or four values:
44
+ # (long_option, short_option, help_string [, repeatable])
45
+ # if an option takes an argument, its long_option should have '='
46
+ # appended; short_option should just be a single character, no ':'
47
+ # in any case. If a long_option doesn't have a corresponding
48
+ # short_option, short_option should be None. All option tuples
49
+ # must have long options.
50
+ self.option_table = option_table
51
+
52
+ # 'option_index' maps long option names to entries in the option
53
+ # table (ie. those 3-tuples).
54
+ self.option_index = {}
55
+ if self.option_table:
56
+ self._build_index()
57
+
58
+ # 'alias' records (duh) alias options; {'foo': 'bar'} means
59
+ # --foo is an alias for --bar
60
+ self.alias = {}
61
+
62
+ # 'negative_alias' keeps track of options that are the boolean
63
+ # opposite of some other option
64
+ self.negative_alias = {}
65
+
66
+ # These keep track of the information in the option table. We
67
+ # don't actually populate these structures until we're ready to
68
+ # parse the command-line, since the 'option_table' passed in here
69
+ # isn't necessarily the final word.
70
+ self.short_opts = []
71
+ self.long_opts = []
72
+ self.short2long = {}
73
+ self.attr_name = {}
74
+ self.takes_arg = {}
75
+
76
+ # And 'option_order' is filled up in 'getopt()'; it records the
77
+ # original order of options (and their values) on the command-line,
78
+ # but expands short options, converts aliases, etc.
79
+ self.option_order = []
80
+
81
+ def _build_index(self):
82
+ self.option_index.clear()
83
+ for option in self.option_table:
84
+ self.option_index[option[0]] = option
85
+
86
+ def set_option_table(self, option_table):
87
+ self.option_table = option_table
88
+ self._build_index()
89
+
90
+ def add_option(self, long_option, short_option=None, help_string=None):
91
+ if long_option in self.option_index:
92
+ raise DistutilsGetoptError(
93
+ "option conflict: already an option '%s'" % long_option)
94
+ else:
95
+ option = (long_option, short_option, help_string)
96
+ self.option_table.append(option)
97
+ self.option_index[long_option] = option
98
+
99
+ def has_option(self, long_option):
100
+ """Return true if the option table for this parser has an
101
+ option with long name 'long_option'."""
102
+ return long_option in self.option_index
103
+
104
+ def get_attr_name(self, long_option):
105
+ """Translate long option name 'long_option' to the form it
106
+ has as an attribute of some object: ie., translate hyphens
107
+ to underscores."""
108
+ return long_option.translate(longopt_xlate)
109
+
110
+ def _check_alias_dict(self, aliases, what):
111
+ assert isinstance(aliases, dict)
112
+ for (alias, opt) in aliases.items():
113
+ if alias not in self.option_index:
114
+ raise DistutilsGetoptError(("invalid %s '%s': "
115
+ "option '%s' not defined") % (what, alias, alias))
116
+ if opt not in self.option_index:
117
+ raise DistutilsGetoptError(("invalid %s '%s': "
118
+ "aliased option '%s' not defined") % (what, alias, opt))
119
+
120
+ def set_aliases(self, alias):
121
+ """Set the aliases for this option parser."""
122
+ self._check_alias_dict(alias, "alias")
123
+ self.alias = alias
124
+
125
+ def set_negative_aliases(self, negative_alias):
126
+ """Set the negative aliases for this option parser.
127
+ 'negative_alias' should be a dictionary mapping option names to
128
+ option names, both the key and value must already be defined
129
+ in the option table."""
130
+ self._check_alias_dict(negative_alias, "negative alias")
131
+ self.negative_alias = negative_alias
132
+
133
+ def _grok_option_table(self):
134
+ """Populate the various data structures that keep tabs on the
135
+ option table. Called by 'getopt()' before it can do anything
136
+ worthwhile.
137
+ """
138
+ self.long_opts = []
139
+ self.short_opts = []
140
+ self.short2long.clear()
141
+ self.repeat = {}
142
+
143
+ for option in self.option_table:
144
+ if len(option) == 3:
145
+ long, short, help = option
146
+ repeat = 0
147
+ elif len(option) == 4:
148
+ long, short, help, repeat = option
149
+ else:
150
+ # the option table is part of the code, so simply
151
+ # assert that it is correct
152
+ raise ValueError("invalid option tuple: %r" % (option,))
153
+
154
+ # Type- and value-check the option names
155
+ if not isinstance(long, str) or len(long) < 2:
156
+ raise DistutilsGetoptError(("invalid long option '%s': "
157
+ "must be a string of length >= 2") % long)
158
+
159
+ if (not ((short is None) or
160
+ (isinstance(short, str) and len(short) == 1))):
161
+ raise DistutilsGetoptError("invalid short option '%s': "
162
+ "must a single character or None" % short)
163
+
164
+ self.repeat[long] = repeat
165
+ self.long_opts.append(long)
166
+
167
+ if long[-1] == '=': # option takes an argument?
168
+ if short: short = short + ':'
169
+ long = long[0:-1]
170
+ self.takes_arg[long] = 1
171
+ else:
172
+ # Is option is a "negative alias" for some other option (eg.
173
+ # "quiet" == "!verbose")?
174
+ alias_to = self.negative_alias.get(long)
175
+ if alias_to is not None:
176
+ if self.takes_arg[alias_to]:
177
+ raise DistutilsGetoptError(
178
+ "invalid negative alias '%s': "
179
+ "aliased option '%s' takes a value"
180
+ % (long, alias_to))
181
+
182
+ self.long_opts[-1] = long # XXX redundant?!
183
+ self.takes_arg[long] = 0
184
+
185
+ # If this is an alias option, make sure its "takes arg" flag is
186
+ # the same as the option it's aliased to.
187
+ alias_to = self.alias.get(long)
188
+ if alias_to is not None:
189
+ if self.takes_arg[long] != self.takes_arg[alias_to]:
190
+ raise DistutilsGetoptError(
191
+ "invalid alias '%s': inconsistent with "
192
+ "aliased option '%s' (one of them takes a value, "
193
+ "the other doesn't"
194
+ % (long, alias_to))
195
+
196
+ # Now enforce some bondage on the long option name, so we can
197
+ # later translate it to an attribute name on some object. Have
198
+ # to do this a bit late to make sure we've removed any trailing
199
+ # '='.
200
+ if not longopt_re.match(long):
201
+ raise DistutilsGetoptError(
202
+ "invalid long option name '%s' "
203
+ "(must be letters, numbers, hyphens only" % long)
204
+
205
+ self.attr_name[long] = self.get_attr_name(long)
206
+ if short:
207
+ self.short_opts.append(short)
208
+ self.short2long[short[0]] = long
209
+
210
+ def getopt(self, args=None, object=None):
211
+ """Parse command-line options in args. Store as attributes on object.
212
+
213
+ If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
214
+ 'object' is None or not supplied, creates a new OptionDummy
215
+ object, stores option values there, and returns a tuple (args,
216
+ object). If 'object' is supplied, it is modified in place and
217
+ 'getopt()' just returns 'args'; in both cases, the returned
218
+ 'args' is a modified copy of the passed-in 'args' list, which
219
+ is left untouched.
220
+ """
221
+ if args is None:
222
+ args = sys.argv[1:]
223
+ if object is None:
224
+ object = OptionDummy()
225
+ created_object = True
226
+ else:
227
+ created_object = False
228
+
229
+ self._grok_option_table()
230
+
231
+ short_opts = ' '.join(self.short_opts)
232
+ try:
233
+ opts, args = getopt.getopt(args, short_opts, self.long_opts)
234
+ except getopt.error as msg:
235
+ raise DistutilsArgError(msg)
236
+
237
+ for opt, val in opts:
238
+ if len(opt) == 2 and opt[0] == '-': # it's a short option
239
+ opt = self.short2long[opt[1]]
240
+ else:
241
+ assert len(opt) > 2 and opt[:2] == '--'
242
+ opt = opt[2:]
243
+
244
+ alias = self.alias.get(opt)
245
+ if alias:
246
+ opt = alias
247
+
248
+ if not self.takes_arg[opt]: # boolean option?
249
+ assert val == '', "boolean option can't have value"
250
+ alias = self.negative_alias.get(opt)
251
+ if alias:
252
+ opt = alias
253
+ val = 0
254
+ else:
255
+ val = 1
256
+
257
+ attr = self.attr_name[opt]
258
+ # The only repeating option at the moment is 'verbose'.
259
+ # It has a negative option -q quiet, which should set verbose = 0.
260
+ if val and self.repeat.get(attr) is not None:
261
+ val = getattr(object, attr, 0) + 1
262
+ setattr(object, attr, val)
263
+ self.option_order.append((opt, val))
264
+
265
+ # for opts
266
+ if created_object:
267
+ return args, object
268
+ else:
269
+ return args
270
+
271
+ def get_option_order(self):
272
+ """Returns the list of (option, value) tuples processed by the
273
+ previous run of 'getopt()'. Raises RuntimeError if
274
+ 'getopt()' hasn't been called yet.
275
+ """
276
+ if self.option_order is None:
277
+ raise RuntimeError("'getopt()' hasn't been called yet")
278
+ else:
279
+ return self.option_order
280
+
281
+ def generate_help(self, header=None):
282
+ """Generate help text (a list of strings, one per suggested line of
283
+ output) from the option table for this FancyGetopt object.
284
+ """
285
+ # Blithely assume the option table is good: probably wouldn't call
286
+ # 'generate_help()' unless you've already called 'getopt()'.
287
+
288
+ # First pass: determine maximum length of long option names
289
+ max_opt = 0
290
+ for option in self.option_table:
291
+ long = option[0]
292
+ short = option[1]
293
+ l = len(long)
294
+ if long[-1] == '=':
295
+ l = l - 1
296
+ if short is not None:
297
+ l = l + 5 # " (-x)" where short == 'x'
298
+ if l > max_opt:
299
+ max_opt = l
300
+
301
+ opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
302
+
303
+ # Typical help block looks like this:
304
+ # --foo controls foonabulation
305
+ # Help block for longest option looks like this:
306
+ # --flimflam set the flim-flam level
307
+ # and with wrapped text:
308
+ # --flimflam set the flim-flam level (must be between
309
+ # 0 and 100, except on Tuesdays)
310
+ # Options with short names will have the short name shown (but
311
+ # it doesn't contribute to max_opt):
312
+ # --foo (-f) controls foonabulation
313
+ # If adding the short option would make the left column too wide,
314
+ # we push the explanation off to the next line
315
+ # --flimflam (-l)
316
+ # set the flim-flam level
317
+ # Important parameters:
318
+ # - 2 spaces before option block start lines
319
+ # - 2 dashes for each long option name
320
+ # - min. 2 spaces between option and explanation (gutter)
321
+ # - 5 characters (incl. space) for short option name
322
+
323
+ # Now generate lines of help text. (If 80 columns were good enough
324
+ # for Jesus, then 78 columns are good enough for me!)
325
+ line_width = 78
326
+ text_width = line_width - opt_width
327
+ big_indent = ' ' * opt_width
328
+ if header:
329
+ lines = [header]
330
+ else:
331
+ lines = ['Option summary:']
332
+
333
+ for option in self.option_table:
334
+ long, short, help = option[:3]
335
+ text = wrap_text(help, text_width)
336
+ if long[-1] == '=':
337
+ long = long[0:-1]
338
+
339
+ # Case 1: no short option at all (makes life easy)
340
+ if short is None:
341
+ if text:
342
+ lines.append(" --%-*s %s" % (max_opt, long, text[0]))
343
+ else:
344
+ lines.append(" --%-*s " % (max_opt, long))
345
+
346
+ # Case 2: we have a short option, so we have to include it
347
+ # just after the long option
348
+ else:
349
+ opt_names = "%s (-%s)" % (long, short)
350
+ if text:
351
+ lines.append(" --%-*s %s" %
352
+ (max_opt, opt_names, text[0]))
353
+ else:
354
+ lines.append(" --%-*s" % opt_names)
355
+
356
+ for l in text[1:]:
357
+ lines.append(big_indent + l)
358
+ return lines
359
+
360
+ def print_help(self, header=None, file=None):
361
+ if file is None:
362
+ file = sys.stdout
363
+ for line in self.generate_help(header):
364
+ file.write(line + "\n")
365
+
366
+
367
+ def fancy_getopt(options, negative_opt, object, args):
368
+ parser = FancyGetopt(options)
369
+ parser.set_negative_aliases(negative_opt)
370
+ return parser.getopt(args, object)
371
+
372
+
373
+ WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace}
374
+
375
+ def wrap_text(text, width):
376
+ """wrap_text(text : string, width : int) -> [string]
377
+
378
+ Split 'text' into multiple lines of no more than 'width' characters
379
+ each, and return the list of strings that results.
380
+ """
381
+ if text is None:
382
+ return []
383
+ if len(text) <= width:
384
+ return [text]
385
+
386
+ text = text.expandtabs()
387
+ text = text.translate(WS_TRANS)
388
+ chunks = re.split(r'( +|-+)', text)
389
+ chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
390
+ lines = []
391
+
392
+ while chunks:
393
+ cur_line = [] # list of chunks (to-be-joined)
394
+ cur_len = 0 # length of current line
395
+
396
+ while chunks:
397
+ l = len(chunks[0])
398
+ if cur_len + l <= width: # can squeeze (at least) this chunk in
399
+ cur_line.append(chunks[0])
400
+ del chunks[0]
401
+ cur_len = cur_len + l
402
+ else: # this line is full
403
+ # drop last chunk if all space
404
+ if cur_line and cur_line[-1][0] == ' ':
405
+ del cur_line[-1]
406
+ break
407
+
408
+ if chunks: # any chunks left to process?
409
+ # if the current line is still empty, then we had a single
410
+ # chunk that's too big too fit on a line -- so we break
411
+ # down and break it up at the line width
412
+ if cur_len == 0:
413
+ cur_line.append(chunks[0][0:width])
414
+ chunks[0] = chunks[0][width:]
415
+
416
+ # all-whitespace chunks at the end of a line can be discarded
417
+ # (and we know from the re.split above that if a chunk has
418
+ # *any* whitespace, it is *all* whitespace)
419
+ if chunks[0][0] == ' ':
420
+ del chunks[0]
421
+
422
+ # and store this line in the list-of-all-lines -- as a single
423
+ # string, of course!
424
+ lines.append(''.join(cur_line))
425
+
426
+ return lines
427
+
428
+
429
+ def translate_longopt(opt):
430
+ """Convert a long option name to a valid Python identifier by
431
+ changing "-" to "_".
432
+ """
433
+ return opt.translate(longopt_xlate)
434
+
435
+
436
+ class OptionDummy:
437
+ """Dummy class just used as a place to hold command-line option
438
+ values as instance attributes."""
439
+
440
+ def __init__(self, options=[]):
441
+ """Create a new OptionDummy instance. The attributes listed in
442
+ 'options' will be initialized to None."""
443
+ for opt in options:
444
+ setattr(self, opt, None)
445
+
446
+
447
+ if __name__ == "__main__":
448
+ text = """\
449
+ Tra-la-la, supercalifragilisticexpialidocious.
450
+ How *do* you spell that odd word, anyways?
451
+ (Someone ask Mary -- she'll know [or she'll
452
+ say, "How should I know?"].)"""
453
+
454
+ for w in (10, 20, 30, 40):
455
+ print("width: %d" % w)
456
+ print("\n".join(wrap_text(text, w)))
457
+ print()
deepseek/lib/python3.10/distutils/file_util.py ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.file_util
2
+
3
+ Utility functions for operating on single files.
4
+ """
5
+
6
+ import os
7
+ from distutils.errors import DistutilsFileError
8
+ from distutils import log
9
+
10
+ # for generating verbose output in 'copy_file()'
11
+ _copy_action = { None: 'copying',
12
+ 'hard': 'hard linking',
13
+ 'sym': 'symbolically linking' }
14
+
15
+
16
+ def _copy_file_contents(src, dst, buffer_size=16*1024):
17
+ """Copy the file 'src' to 'dst'; both must be filenames. Any error
18
+ opening either file, reading from 'src', or writing to 'dst', raises
19
+ DistutilsFileError. Data is read/written in chunks of 'buffer_size'
20
+ bytes (default 16k). No attempt is made to handle anything apart from
21
+ regular files.
22
+ """
23
+ # Stolen from shutil module in the standard library, but with
24
+ # custom error-handling added.
25
+ fsrc = None
26
+ fdst = None
27
+ try:
28
+ try:
29
+ fsrc = open(src, 'rb')
30
+ except OSError as e:
31
+ raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror))
32
+
33
+ if os.path.exists(dst):
34
+ try:
35
+ os.unlink(dst)
36
+ except OSError as e:
37
+ raise DistutilsFileError(
38
+ "could not delete '%s': %s" % (dst, e.strerror))
39
+
40
+ try:
41
+ fdst = open(dst, 'wb')
42
+ except OSError as e:
43
+ raise DistutilsFileError(
44
+ "could not create '%s': %s" % (dst, e.strerror))
45
+
46
+ while True:
47
+ try:
48
+ buf = fsrc.read(buffer_size)
49
+ except OSError as e:
50
+ raise DistutilsFileError(
51
+ "could not read from '%s': %s" % (src, e.strerror))
52
+
53
+ if not buf:
54
+ break
55
+
56
+ try:
57
+ fdst.write(buf)
58
+ except OSError as e:
59
+ raise DistutilsFileError(
60
+ "could not write to '%s': %s" % (dst, e.strerror))
61
+ finally:
62
+ if fdst:
63
+ fdst.close()
64
+ if fsrc:
65
+ fsrc.close()
66
+
67
+ def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0,
68
+ link=None, verbose=1, dry_run=0):
69
+ """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
70
+ copied there with the same name; otherwise, it must be a filename. (If
71
+ the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
72
+ is true (the default), the file's mode (type and permission bits, or
73
+ whatever is analogous on the current platform) is copied. If
74
+ 'preserve_times' is true (the default), the last-modified and
75
+ last-access times are copied as well. If 'update' is true, 'src' will
76
+ only be copied if 'dst' does not exist, or if 'dst' does exist but is
77
+ older than 'src'.
78
+
79
+ 'link' allows you to make hard links (os.link) or symbolic links
80
+ (os.symlink) instead of copying: set it to "hard" or "sym"; if it is
81
+ None (the default), files are copied. Don't set 'link' on systems that
82
+ don't support it: 'copy_file()' doesn't check if hard or symbolic
83
+ linking is available. If hardlink fails, falls back to
84
+ _copy_file_contents().
85
+
86
+ Under Mac OS, uses the native file copy function in macostools; on
87
+ other systems, uses '_copy_file_contents()' to copy file contents.
88
+
89
+ Return a tuple (dest_name, copied): 'dest_name' is the actual name of
90
+ the output file, and 'copied' is true if the file was copied (or would
91
+ have been copied, if 'dry_run' true).
92
+ """
93
+ # XXX if the destination file already exists, we clobber it if
94
+ # copying, but blow up if linking. Hmmm. And I don't know what
95
+ # macostools.copyfile() does. Should definitely be consistent, and
96
+ # should probably blow up if destination exists and we would be
97
+ # changing it (ie. it's not already a hard/soft link to src OR
98
+ # (not update) and (src newer than dst).
99
+
100
+ from distutils.dep_util import newer
101
+ from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
102
+
103
+ if not os.path.isfile(src):
104
+ raise DistutilsFileError(
105
+ "can't copy '%s': doesn't exist or not a regular file" % src)
106
+
107
+ if os.path.isdir(dst):
108
+ dir = dst
109
+ dst = os.path.join(dst, os.path.basename(src))
110
+ else:
111
+ dir = os.path.dirname(dst)
112
+
113
+ if update and not newer(src, dst):
114
+ if verbose >= 1:
115
+ log.debug("not copying %s (output up-to-date)", src)
116
+ return (dst, 0)
117
+
118
+ try:
119
+ action = _copy_action[link]
120
+ except KeyError:
121
+ raise ValueError("invalid value '%s' for 'link' argument" % link)
122
+
123
+ if verbose >= 1:
124
+ if os.path.basename(dst) == os.path.basename(src):
125
+ log.info("%s %s -> %s", action, src, dir)
126
+ else:
127
+ log.info("%s %s -> %s", action, src, dst)
128
+
129
+ if dry_run:
130
+ return (dst, 1)
131
+
132
+ # If linking (hard or symbolic), use the appropriate system call
133
+ # (Unix only, of course, but that's the caller's responsibility)
134
+ elif link == 'hard':
135
+ if not (os.path.exists(dst) and os.path.samefile(src, dst)):
136
+ try:
137
+ os.link(src, dst)
138
+ return (dst, 1)
139
+ except OSError:
140
+ # If hard linking fails, fall back on copying file
141
+ # (some special filesystems don't support hard linking
142
+ # even under Unix, see issue #8876).
143
+ pass
144
+ elif link == 'sym':
145
+ if not (os.path.exists(dst) and os.path.samefile(src, dst)):
146
+ os.symlink(src, dst)
147
+ return (dst, 1)
148
+
149
+ # Otherwise (non-Mac, not linking), copy the file contents and
150
+ # (optionally) copy the times and mode.
151
+ _copy_file_contents(src, dst)
152
+ if preserve_mode or preserve_times:
153
+ st = os.stat(src)
154
+
155
+ # According to David Ascher <da@ski.org>, utime() should be done
156
+ # before chmod() (at least under NT).
157
+ if preserve_times:
158
+ os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
159
+ if preserve_mode:
160
+ os.chmod(dst, S_IMODE(st[ST_MODE]))
161
+
162
+ return (dst, 1)
163
+
164
+
165
+ # XXX I suspect this is Unix-specific -- need porting help!
166
+ def move_file (src, dst,
167
+ verbose=1,
168
+ dry_run=0):
169
+
170
+ """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
171
+ be moved into it with the same name; otherwise, 'src' is just renamed
172
+ to 'dst'. Return the new full name of the file.
173
+
174
+ Handles cross-device moves on Unix using 'copy_file()'. What about
175
+ other systems???
176
+ """
177
+ from os.path import exists, isfile, isdir, basename, dirname
178
+ import errno
179
+
180
+ if verbose >= 1:
181
+ log.info("moving %s -> %s", src, dst)
182
+
183
+ if dry_run:
184
+ return dst
185
+
186
+ if not isfile(src):
187
+ raise DistutilsFileError("can't move '%s': not a regular file" % src)
188
+
189
+ if isdir(dst):
190
+ dst = os.path.join(dst, basename(src))
191
+ elif exists(dst):
192
+ raise DistutilsFileError(
193
+ "can't move '%s': destination '%s' already exists" %
194
+ (src, dst))
195
+
196
+ if not isdir(dirname(dst)):
197
+ raise DistutilsFileError(
198
+ "can't move '%s': destination '%s' not a valid path" %
199
+ (src, dst))
200
+
201
+ copy_it = False
202
+ try:
203
+ os.rename(src, dst)
204
+ except OSError as e:
205
+ (num, msg) = e.args
206
+ if num == errno.EXDEV:
207
+ copy_it = True
208
+ else:
209
+ raise DistutilsFileError(
210
+ "couldn't move '%s' to '%s': %s" % (src, dst, msg))
211
+
212
+ if copy_it:
213
+ copy_file(src, dst, verbose=verbose)
214
+ try:
215
+ os.unlink(src)
216
+ except OSError as e:
217
+ (num, msg) = e.args
218
+ try:
219
+ os.unlink(dst)
220
+ except OSError:
221
+ pass
222
+ raise DistutilsFileError(
223
+ "couldn't move '%s' to '%s' by copy/delete: "
224
+ "delete '%s' failed: %s"
225
+ % (src, dst, src, msg))
226
+ return dst
227
+
228
+
229
+ def write_file (filename, contents):
230
+ """Create a file with the specified name and write 'contents' (a
231
+ sequence of strings without line terminators) to it.
232
+ """
233
+ f = open(filename, "w")
234
+ try:
235
+ for line in contents:
236
+ f.write(line + "\n")
237
+ finally:
238
+ f.close()
deepseek/lib/python3.10/distutils/filelist.py ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """distutils.filelist
2
+
3
+ Provides the FileList class, used for poking about the filesystem
4
+ and building lists of files.
5
+ """
6
+
7
+ import os, re
8
+ import fnmatch
9
+ import functools
10
+ from distutils.util import convert_path
11
+ from distutils.errors import DistutilsTemplateError, DistutilsInternalError
12
+ from distutils import log
13
+
14
+ class FileList:
15
+ """A list of files built by on exploring the filesystem and filtered by
16
+ applying various patterns to what we find there.
17
+
18
+ Instance attributes:
19
+ dir
20
+ directory from which files will be taken -- only used if
21
+ 'allfiles' not supplied to constructor
22
+ files
23
+ list of filenames currently being built/filtered/manipulated
24
+ allfiles
25
+ complete list of files under consideration (ie. without any
26
+ filtering applied)
27
+ """
28
+
29
+ def __init__(self, warn=None, debug_print=None):
30
+ # ignore argument to FileList, but keep them for backwards
31
+ # compatibility
32
+ self.allfiles = None
33
+ self.files = []
34
+
35
+ def set_allfiles(self, allfiles):
36
+ self.allfiles = allfiles
37
+
38
+ def findall(self, dir=os.curdir):
39
+ self.allfiles = findall(dir)
40
+
41
+ def debug_print(self, msg):
42
+ """Print 'msg' to stdout if the global DEBUG (taken from the
43
+ DISTUTILS_DEBUG environment variable) flag is true.
44
+ """
45
+ from distutils.debug import DEBUG
46
+ if DEBUG:
47
+ print(msg)
48
+
49
+ # -- List-like methods ---------------------------------------------
50
+
51
+ def append(self, item):
52
+ self.files.append(item)
53
+
54
+ def extend(self, items):
55
+ self.files.extend(items)
56
+
57
+ def sort(self):
58
+ # Not a strict lexical sort!
59
+ sortable_files = sorted(map(os.path.split, self.files))
60
+ self.files = []
61
+ for sort_tuple in sortable_files:
62
+ self.files.append(os.path.join(*sort_tuple))
63
+
64
+
65
+ # -- Other miscellaneous utility methods ---------------------------
66
+
67
+ def remove_duplicates(self):
68
+ # Assumes list has been sorted!
69
+ for i in range(len(self.files) - 1, 0, -1):
70
+ if self.files[i] == self.files[i - 1]:
71
+ del self.files[i]
72
+
73
+
74
+ # -- "File template" methods ---------------------------------------
75
+
76
+ def _parse_template_line(self, line):
77
+ words = line.split()
78
+ action = words[0]
79
+
80
+ patterns = dir = dir_pattern = None
81
+
82
+ if action in ('include', 'exclude',
83
+ 'global-include', 'global-exclude'):
84
+ if len(words) < 2:
85
+ raise DistutilsTemplateError(
86
+ "'%s' expects <pattern1> <pattern2> ..." % action)
87
+ patterns = [convert_path(w) for w in words[1:]]
88
+ elif action in ('recursive-include', 'recursive-exclude'):
89
+ if len(words) < 3:
90
+ raise DistutilsTemplateError(
91
+ "'%s' expects <dir> <pattern1> <pattern2> ..." % action)
92
+ dir = convert_path(words[1])
93
+ patterns = [convert_path(w) for w in words[2:]]
94
+ elif action in ('graft', 'prune'):
95
+ if len(words) != 2:
96
+ raise DistutilsTemplateError(
97
+ "'%s' expects a single <dir_pattern>" % action)
98
+ dir_pattern = convert_path(words[1])
99
+ else:
100
+ raise DistutilsTemplateError("unknown action '%s'" % action)
101
+
102
+ return (action, patterns, dir, dir_pattern)
103
+
104
+ def process_template_line(self, line):
105
+ # Parse the line: split it up, make sure the right number of words
106
+ # is there, and return the relevant words. 'action' is always
107
+ # defined: it's the first word of the line. Which of the other
108
+ # three are defined depends on the action; it'll be either
109
+ # patterns, (dir and patterns), or (dir_pattern).
110
+ (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
111
+
112
+ # OK, now we know that the action is valid and we have the
113
+ # right number of words on the line for that action -- so we
114
+ # can proceed with minimal error-checking.
115
+ if action == 'include':
116
+ self.debug_print("include " + ' '.join(patterns))
117
+ for pattern in patterns:
118
+ if not self.include_pattern(pattern, anchor=1):
119
+ log.warn("warning: no files found matching '%s'",
120
+ pattern)
121
+
122
+ elif action == 'exclude':
123
+ self.debug_print("exclude " + ' '.join(patterns))
124
+ for pattern in patterns:
125
+ if not self.exclude_pattern(pattern, anchor=1):
126
+ log.warn(("warning: no previously-included files "
127
+ "found matching '%s'"), pattern)
128
+
129
+ elif action == 'global-include':
130
+ self.debug_print("global-include " + ' '.join(patterns))
131
+ for pattern in patterns:
132
+ if not self.include_pattern(pattern, anchor=0):
133
+ log.warn(("warning: no files found matching '%s' "
134
+ "anywhere in distribution"), pattern)
135
+
136
+ elif action == 'global-exclude':
137
+ self.debug_print("global-exclude " + ' '.join(patterns))
138
+ for pattern in patterns:
139
+ if not self.exclude_pattern(pattern, anchor=0):
140
+ log.warn(("warning: no previously-included files matching "
141
+ "'%s' found anywhere in distribution"),
142
+ pattern)
143
+
144
+ elif action == 'recursive-include':
145
+ self.debug_print("recursive-include %s %s" %
146
+ (dir, ' '.join(patterns)))
147
+ for pattern in patterns:
148
+ if not self.include_pattern(pattern, prefix=dir):
149
+ log.warn(("warning: no files found matching '%s' "
150
+ "under directory '%s'"),
151
+ pattern, dir)
152
+
153
+ elif action == 'recursive-exclude':
154
+ self.debug_print("recursive-exclude %s %s" %
155
+ (dir, ' '.join(patterns)))
156
+ for pattern in patterns:
157
+ if not self.exclude_pattern(pattern, prefix=dir):
158
+ log.warn(("warning: no previously-included files matching "
159
+ "'%s' found under directory '%s'"),
160
+ pattern, dir)
161
+
162
+ elif action == 'graft':
163
+ self.debug_print("graft " + dir_pattern)
164
+ if not self.include_pattern(None, prefix=dir_pattern):
165
+ log.warn("warning: no directories found matching '%s'",
166
+ dir_pattern)
167
+
168
+ elif action == 'prune':
169
+ self.debug_print("prune " + dir_pattern)
170
+ if not self.exclude_pattern(None, prefix=dir_pattern):
171
+ log.warn(("no previously-included directories found "
172
+ "matching '%s'"), dir_pattern)
173
+ else:
174
+ raise DistutilsInternalError(
175
+ "this cannot happen: invalid action '%s'" % action)
176
+
177
+
178
+ # -- Filtering/selection methods -----------------------------------
179
+
180
+ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
181
+ """Select strings (presumably filenames) from 'self.files' that
182
+ match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
183
+ are not quite the same as implemented by the 'fnmatch' module: '*'
184
+ and '?' match non-special characters, where "special" is platform-
185
+ dependent: slash on Unix; colon, slash, and backslash on
186
+ DOS/Windows; and colon on Mac OS.
187
+
188
+ If 'anchor' is true (the default), then the pattern match is more
189
+ stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
190
+ 'anchor' is false, both of these will match.
191
+
192
+ If 'prefix' is supplied, then only filenames starting with 'prefix'
193
+ (itself a pattern) and ending with 'pattern', with anything in between
194
+ them, will match. 'anchor' is ignored in this case.
195
+
196
+ If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
197
+ 'pattern' is assumed to be either a string containing a regex or a
198
+ regex object -- no translation is done, the regex is just compiled
199
+ and used as-is.
200
+
201
+ Selected strings will be added to self.files.
202
+
203
+ Return True if files are found, False otherwise.
204
+ """
205
+ # XXX docstring lying about what the special chars are?
206
+ files_found = False
207
+ pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
208
+ self.debug_print("include_pattern: applying regex r'%s'" %
209
+ pattern_re.pattern)
210
+
211
+ # delayed loading of allfiles list
212
+ if self.allfiles is None:
213
+ self.findall()
214
+
215
+ for name in self.allfiles:
216
+ if pattern_re.search(name):
217
+ self.debug_print(" adding " + name)
218
+ self.files.append(name)
219
+ files_found = True
220
+ return files_found
221
+
222
+
223
+ def exclude_pattern (self, pattern,
224
+ anchor=1, prefix=None, is_regex=0):
225
+ """Remove strings (presumably filenames) from 'files' that match
226
+ 'pattern'. Other parameters are the same as for
227
+ 'include_pattern()', above.
228
+ The list 'self.files' is modified in place.
229
+ Return True if files are found, False otherwise.
230
+ """
231
+ files_found = False
232
+ pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
233
+ self.debug_print("exclude_pattern: applying regex r'%s'" %
234
+ pattern_re.pattern)
235
+ for i in range(len(self.files)-1, -1, -1):
236
+ if pattern_re.search(self.files[i]):
237
+ self.debug_print(" removing " + self.files[i])
238
+ del self.files[i]
239
+ files_found = True
240
+ return files_found
241
+
242
+
243
+ # ----------------------------------------------------------------------
244
+ # Utility functions
245
+
246
+ def _find_all_simple(path):
247
+ """
248
+ Find all files under 'path'
249
+ """
250
+ results = (
251
+ os.path.join(base, file)
252
+ for base, dirs, files in os.walk(path, followlinks=True)
253
+ for file in files
254
+ )
255
+ return filter(os.path.isfile, results)
256
+
257
+
258
+ def findall(dir=os.curdir):
259
+ """
260
+ Find all files under 'dir' and return the list of full filenames.
261
+ Unless dir is '.', return full filenames with dir prepended.
262
+ """
263
+ files = _find_all_simple(dir)
264
+ if dir == os.curdir:
265
+ make_rel = functools.partial(os.path.relpath, start=dir)
266
+ files = map(make_rel, files)
267
+ return list(files)
268
+
269
+
270
+ def glob_to_re(pattern):
271
+ """Translate a shell-like glob pattern to a regular expression; return
272
+ a string containing the regex. Differs from 'fnmatch.translate()' in
273
+ that '*' does not match "special characters" (which are
274
+ platform-specific).
275
+ """
276
+ pattern_re = fnmatch.translate(pattern)
277
+
278
+ # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
279
+ # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
280
+ # and by extension they shouldn't match such "special characters" under
281
+ # any OS. So change all non-escaped dots in the RE to match any
282
+ # character except the special characters (currently: just os.sep).
283
+ sep = os.sep
284
+ if os.sep == '\\':
285
+ # we're using a regex to manipulate a regex, so we need
286
+ # to escape the backslash twice
287
+ sep = r'\\\\'
288
+ escaped = r'\1[^%s]' % sep
289
+ pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
290
+ return pattern_re
291
+
292
+
293
+ def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
294
+ """Translate a shell-like wildcard pattern to a compiled regular
295
+ expression. Return the compiled regex. If 'is_regex' true,
296
+ then 'pattern' is directly compiled to a regex (if it's a string)
297
+ or just returned as-is (assumes it's a regex object).
298
+ """
299
+ if is_regex:
300
+ if isinstance(pattern, str):
301
+ return re.compile(pattern)
302
+ else:
303
+ return pattern
304
+
305
+ # ditch start and end characters
306
+ start, _, end = glob_to_re('_').partition('_')
307
+
308
+ if pattern:
309
+ pattern_re = glob_to_re(pattern)
310
+ assert pattern_re.startswith(start) and pattern_re.endswith(end)
311
+ else:
312
+ pattern_re = ''
313
+
314
+ if prefix is not None:
315
+ prefix_re = glob_to_re(prefix)
316
+ assert prefix_re.startswith(start) and prefix_re.endswith(end)
317
+ prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
318
+ sep = os.sep
319
+ if os.sep == '\\':
320
+ sep = r'\\'
321
+ pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
322
+ pattern_re = r'%s\A%s%s.*%s%s' % (start, prefix_re, sep, pattern_re, end)
323
+ else: # no prefix -- respect anchor flag
324
+ if anchor:
325
+ pattern_re = r'%s\A%s' % (start, pattern_re[len(start):])
326
+
327
+ return re.compile(pattern_re)
deepseek/lib/python3.10/distutils/tests/__init__.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Test suite for distutils.
2
+
3
+ This test suite consists of a collection of test modules in the
4
+ distutils.tests package. Each test module has a name starting with
5
+ 'test' and contains a function test_suite(). The function is expected
6
+ to return an initialized unittest.TestSuite instance.
7
+
8
+ Tests for the command classes in the distutils.command package are
9
+ included in distutils.tests as well, instead of using a separate
10
+ distutils.command.tests package, since command identification is done
11
+ by import rather than matching pre-defined names.
12
+
13
+ """
14
+
15
+ import os
16
+ import sys
17
+ import unittest
18
+ from test.support import run_unittest
19
+ from test.support.warnings_helper import save_restore_warnings_filters
20
+
21
+
22
+ here = os.path.dirname(__file__) or os.curdir
23
+
24
+
25
+ def test_suite():
26
+ suite = unittest.TestSuite()
27
+ for fn in os.listdir(here):
28
+ if fn.startswith("test") and fn.endswith(".py"):
29
+ modname = "distutils.tests." + fn[:-3]
30
+ # bpo-40055: Save/restore warnings filters to leave them unchanged.
31
+ # Importing tests imports docutils which imports pkg_resources
32
+ # which adds a warnings filter.
33
+ with save_restore_warnings_filters():
34
+ __import__(modname)
35
+ module = sys.modules[modname]
36
+ suite.addTest(module.test_suite())
37
+ return suite
38
+
39
+
40
+ if __name__ == "__main__":
41
+ run_unittest(test_suite())
deepseek/lib/python3.10/distutils/tests/support.py ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support code for distutils test cases."""
2
+ import os
3
+ import sys
4
+ import shutil
5
+ import tempfile
6
+ import unittest
7
+ import sysconfig
8
+ from copy import deepcopy
9
+ from test.support import os_helper
10
+
11
+ from distutils import log
12
+ from distutils.log import DEBUG, INFO, WARN, ERROR, FATAL
13
+ from distutils.core import Distribution
14
+
15
+
16
+ class LoggingSilencer(object):
17
+
18
+ def setUp(self):
19
+ super().setUp()
20
+ self.threshold = log.set_threshold(log.FATAL)
21
+ # catching warnings
22
+ # when log will be replaced by logging
23
+ # we won't need such monkey-patch anymore
24
+ self._old_log = log.Log._log
25
+ log.Log._log = self._log
26
+ self.logs = []
27
+
28
+ def tearDown(self):
29
+ log.set_threshold(self.threshold)
30
+ log.Log._log = self._old_log
31
+ super().tearDown()
32
+
33
+ def _log(self, level, msg, args):
34
+ if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
35
+ raise ValueError('%s wrong log level' % str(level))
36
+ if not isinstance(msg, str):
37
+ raise TypeError("msg should be str, not '%.200s'"
38
+ % (type(msg).__name__))
39
+ self.logs.append((level, msg, args))
40
+
41
+ def get_logs(self, *levels):
42
+ return [msg % args for level, msg, args
43
+ in self.logs if level in levels]
44
+
45
+ def clear_logs(self):
46
+ self.logs = []
47
+
48
+
49
+ class TempdirManager(object):
50
+ """Mix-in class that handles temporary directories for test cases.
51
+
52
+ This is intended to be used with unittest.TestCase.
53
+ """
54
+
55
+ def setUp(self):
56
+ super().setUp()
57
+ self.old_cwd = os.getcwd()
58
+ self.tempdirs = []
59
+
60
+ def tearDown(self):
61
+ # Restore working dir, for Solaris and derivatives, where rmdir()
62
+ # on the current directory fails.
63
+ os.chdir(self.old_cwd)
64
+ super().tearDown()
65
+ while self.tempdirs:
66
+ tmpdir = self.tempdirs.pop()
67
+ os_helper.rmtree(tmpdir)
68
+
69
+ def mkdtemp(self):
70
+ """Create a temporary directory that will be cleaned up.
71
+
72
+ Returns the path of the directory.
73
+ """
74
+ d = tempfile.mkdtemp()
75
+ self.tempdirs.append(d)
76
+ return d
77
+
78
+ def write_file(self, path, content='xxx'):
79
+ """Writes a file in the given path.
80
+
81
+
82
+ path can be a string or a sequence.
83
+ """
84
+ if isinstance(path, (list, tuple)):
85
+ path = os.path.join(*path)
86
+ f = open(path, 'w')
87
+ try:
88
+ f.write(content)
89
+ finally:
90
+ f.close()
91
+
92
+ def create_dist(self, pkg_name='foo', **kw):
93
+ """Will generate a test environment.
94
+
95
+ This function creates:
96
+ - a Distribution instance using keywords
97
+ - a temporary directory with a package structure
98
+
99
+ It returns the package directory and the distribution
100
+ instance.
101
+ """
102
+ tmp_dir = self.mkdtemp()
103
+ pkg_dir = os.path.join(tmp_dir, pkg_name)
104
+ os.mkdir(pkg_dir)
105
+ dist = Distribution(attrs=kw)
106
+
107
+ return pkg_dir, dist
108
+
109
+
110
+ class DummyCommand:
111
+ """Class to store options for retrieval via set_undefined_options()."""
112
+
113
+ def __init__(self, **kwargs):
114
+ for kw, val in kwargs.items():
115
+ setattr(self, kw, val)
116
+
117
+ def ensure_finalized(self):
118
+ pass
119
+
120
+
121
+ class EnvironGuard(object):
122
+
123
+ def setUp(self):
124
+ super(EnvironGuard, self).setUp()
125
+ self.old_environ = deepcopy(os.environ)
126
+
127
+ def tearDown(self):
128
+ for key, value in self.old_environ.items():
129
+ if os.environ.get(key) != value:
130
+ os.environ[key] = value
131
+
132
+ for key in tuple(os.environ.keys()):
133
+ if key not in self.old_environ:
134
+ del os.environ[key]
135
+
136
+ super(EnvironGuard, self).tearDown()
137
+
138
+
139
+ def copy_xxmodule_c(directory):
140
+ """Helper for tests that need the xxmodule.c source file.
141
+
142
+ Example use:
143
+
144
+ def test_compile(self):
145
+ copy_xxmodule_c(self.tmpdir)
146
+ self.assertIn('xxmodule.c', os.listdir(self.tmpdir))
147
+
148
+ If the source file can be found, it will be copied to *directory*. If not,
149
+ the test will be skipped. Errors during copy are not caught.
150
+ """
151
+ filename = _get_xxmodule_path()
152
+ if filename is None:
153
+ raise unittest.SkipTest('cannot find xxmodule.c (test must run in '
154
+ 'the python build dir)')
155
+ shutil.copy(filename, directory)
156
+
157
+
158
+ def _get_xxmodule_path():
159
+ srcdir = sysconfig.get_config_var('srcdir')
160
+ candidates = [
161
+ # use installed copy if available
162
+ os.path.join(os.path.dirname(__file__), 'xxmodule.c'),
163
+ # otherwise try using copy from build directory
164
+ os.path.join(srcdir, 'Modules', 'xxmodule.c'),
165
+ # srcdir mysteriously can be $srcdir/Lib/distutils/tests when
166
+ # this file is run from its parent directory, so walk up the
167
+ # tree to find the real srcdir
168
+ os.path.join(srcdir, '..', '..', '..', 'Modules', 'xxmodule.c'),
169
+ ]
170
+ for path in candidates:
171
+ if os.path.exists(path):
172
+ return path
173
+
174
+
175
+ def fixup_build_ext(cmd):
176
+ """Function needed to make build_ext tests pass.
177
+
178
+ When Python was built with --enable-shared on Unix, -L. is not enough to
179
+ find libpython<blah>.so, because regrtest runs in a tempdir, not in the
180
+ source directory where the .so lives.
181
+
182
+ When Python was built with in debug mode on Windows, build_ext commands
183
+ need their debug attribute set, and it is not done automatically for
184
+ some reason.
185
+
186
+ This function handles both of these things. Example use:
187
+
188
+ cmd = build_ext(dist)
189
+ support.fixup_build_ext(cmd)
190
+ cmd.ensure_finalized()
191
+
192
+ Unlike most other Unix platforms, Mac OS X embeds absolute paths
193
+ to shared libraries into executables, so the fixup is not needed there.
194
+ """
195
+ if os.name == 'nt':
196
+ cmd.debug = sys.executable.endswith('_d.exe')
197
+ elif sysconfig.get_config_var('Py_ENABLE_SHARED'):
198
+ # To further add to the shared builds fun on Unix, we can't just add
199
+ # library_dirs to the Extension() instance because that doesn't get
200
+ # plumbed through to the final compiler command.
201
+ runshared = sysconfig.get_config_var('RUNSHARED')
202
+ if runshared is None:
203
+ cmd.library_dirs = ['.']
204
+ else:
205
+ if sys.platform == 'darwin':
206
+ cmd.library_dirs = []
207
+ else:
208
+ name, equals, value = runshared.partition('=')
209
+ cmd.library_dirs = [d for d in value.split(os.pathsep) if d]
deepseek/lib/python3.10/distutils/tests/test_filelist.py ADDED
@@ -0,0 +1,340 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils.filelist."""
2
+ import os
3
+ import re
4
+ import unittest
5
+ from distutils import debug
6
+ from distutils.log import WARN
7
+ from distutils.errors import DistutilsTemplateError
8
+ from distutils.filelist import glob_to_re, translate_pattern, FileList
9
+ from distutils import filelist
10
+
11
+ from test.support import os_helper
12
+ from test.support import captured_stdout, run_unittest
13
+ from distutils.tests import support
14
+
15
+ MANIFEST_IN = """\
16
+ include ok
17
+ include xo
18
+ exclude xo
19
+ include foo.tmp
20
+ include buildout.cfg
21
+ global-include *.x
22
+ global-include *.txt
23
+ global-exclude *.tmp
24
+ recursive-include f *.oo
25
+ recursive-exclude global *.x
26
+ graft dir
27
+ prune dir3
28
+ """
29
+
30
+
31
+ def make_local_path(s):
32
+ """Converts '/' in a string to os.sep"""
33
+ return s.replace('/', os.sep)
34
+
35
+
36
+ class FileListTestCase(support.LoggingSilencer,
37
+ unittest.TestCase):
38
+
39
+ def assertNoWarnings(self):
40
+ self.assertEqual(self.get_logs(WARN), [])
41
+ self.clear_logs()
42
+
43
+ def assertWarnings(self):
44
+ self.assertGreater(len(self.get_logs(WARN)), 0)
45
+ self.clear_logs()
46
+
47
+ def test_glob_to_re(self):
48
+ sep = os.sep
49
+ if os.sep == '\\':
50
+ sep = re.escape(os.sep)
51
+
52
+ for glob, regex in (
53
+ # simple cases
54
+ ('foo*', r'(?s:foo[^%(sep)s]*)\Z'),
55
+ ('foo?', r'(?s:foo[^%(sep)s])\Z'),
56
+ ('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'),
57
+ # special cases
58
+ (r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'),
59
+ (r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'),
60
+ ('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'),
61
+ (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')):
62
+ regex = regex % {'sep': sep}
63
+ self.assertEqual(glob_to_re(glob), regex)
64
+
65
+ def test_process_template_line(self):
66
+ # testing all MANIFEST.in template patterns
67
+ file_list = FileList()
68
+ l = make_local_path
69
+
70
+ # simulated file list
71
+ file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt',
72
+ 'buildout.cfg',
73
+ # filelist does not filter out VCS directories,
74
+ # it's sdist that does
75
+ l('.hg/last-message.txt'),
76
+ l('global/one.txt'),
77
+ l('global/two.txt'),
78
+ l('global/files.x'),
79
+ l('global/here.tmp'),
80
+ l('f/o/f.oo'),
81
+ l('dir/graft-one'),
82
+ l('dir/dir2/graft2'),
83
+ l('dir3/ok'),
84
+ l('dir3/sub/ok.txt'),
85
+ ]
86
+
87
+ for line in MANIFEST_IN.split('\n'):
88
+ if line.strip() == '':
89
+ continue
90
+ file_list.process_template_line(line)
91
+
92
+ wanted = ['ok',
93
+ 'buildout.cfg',
94
+ 'four.txt',
95
+ l('.hg/last-message.txt'),
96
+ l('global/one.txt'),
97
+ l('global/two.txt'),
98
+ l('f/o/f.oo'),
99
+ l('dir/graft-one'),
100
+ l('dir/dir2/graft2'),
101
+ ]
102
+
103
+ self.assertEqual(file_list.files, wanted)
104
+
105
+ def test_debug_print(self):
106
+ file_list = FileList()
107
+ with captured_stdout() as stdout:
108
+ file_list.debug_print('xxx')
109
+ self.assertEqual(stdout.getvalue(), '')
110
+
111
+ debug.DEBUG = True
112
+ try:
113
+ with captured_stdout() as stdout:
114
+ file_list.debug_print('xxx')
115
+ self.assertEqual(stdout.getvalue(), 'xxx\n')
116
+ finally:
117
+ debug.DEBUG = False
118
+
119
+ def test_set_allfiles(self):
120
+ file_list = FileList()
121
+ files = ['a', 'b', 'c']
122
+ file_list.set_allfiles(files)
123
+ self.assertEqual(file_list.allfiles, files)
124
+
125
+ def test_remove_duplicates(self):
126
+ file_list = FileList()
127
+ file_list.files = ['a', 'b', 'a', 'g', 'c', 'g']
128
+ # files must be sorted beforehand (sdist does it)
129
+ file_list.sort()
130
+ file_list.remove_duplicates()
131
+ self.assertEqual(file_list.files, ['a', 'b', 'c', 'g'])
132
+
133
+ def test_translate_pattern(self):
134
+ # not regex
135
+ self.assertTrue(hasattr(
136
+ translate_pattern('a', anchor=True, is_regex=False),
137
+ 'search'))
138
+
139
+ # is a regex
140
+ regex = re.compile('a')
141
+ self.assertEqual(
142
+ translate_pattern(regex, anchor=True, is_regex=True),
143
+ regex)
144
+
145
+ # plain string flagged as regex
146
+ self.assertTrue(hasattr(
147
+ translate_pattern('a', anchor=True, is_regex=True),
148
+ 'search'))
149
+
150
+ # glob support
151
+ self.assertTrue(translate_pattern(
152
+ '*.py', anchor=True, is_regex=False).search('filelist.py'))
153
+
154
+ def test_exclude_pattern(self):
155
+ # return False if no match
156
+ file_list = FileList()
157
+ self.assertFalse(file_list.exclude_pattern('*.py'))
158
+
159
+ # return True if files match
160
+ file_list = FileList()
161
+ file_list.files = ['a.py', 'b.py']
162
+ self.assertTrue(file_list.exclude_pattern('*.py'))
163
+
164
+ # test excludes
165
+ file_list = FileList()
166
+ file_list.files = ['a.py', 'a.txt']
167
+ file_list.exclude_pattern('*.py')
168
+ self.assertEqual(file_list.files, ['a.txt'])
169
+
170
+ def test_include_pattern(self):
171
+ # return False if no match
172
+ file_list = FileList()
173
+ file_list.set_allfiles([])
174
+ self.assertFalse(file_list.include_pattern('*.py'))
175
+
176
+ # return True if files match
177
+ file_list = FileList()
178
+ file_list.set_allfiles(['a.py', 'b.txt'])
179
+ self.assertTrue(file_list.include_pattern('*.py'))
180
+
181
+ # test * matches all files
182
+ file_list = FileList()
183
+ self.assertIsNone(file_list.allfiles)
184
+ file_list.set_allfiles(['a.py', 'b.txt'])
185
+ file_list.include_pattern('*')
186
+ self.assertEqual(file_list.allfiles, ['a.py', 'b.txt'])
187
+
188
+ def test_process_template(self):
189
+ l = make_local_path
190
+ # invalid lines
191
+ file_list = FileList()
192
+ for action in ('include', 'exclude', 'global-include',
193
+ 'global-exclude', 'recursive-include',
194
+ 'recursive-exclude', 'graft', 'prune', 'blarg'):
195
+ self.assertRaises(DistutilsTemplateError,
196
+ file_list.process_template_line, action)
197
+
198
+ # include
199
+ file_list = FileList()
200
+ file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
201
+
202
+ file_list.process_template_line('include *.py')
203
+ self.assertEqual(file_list.files, ['a.py'])
204
+ self.assertNoWarnings()
205
+
206
+ file_list.process_template_line('include *.rb')
207
+ self.assertEqual(file_list.files, ['a.py'])
208
+ self.assertWarnings()
209
+
210
+ # exclude
211
+ file_list = FileList()
212
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
213
+
214
+ file_list.process_template_line('exclude *.py')
215
+ self.assertEqual(file_list.files, ['b.txt', l('d/c.py')])
216
+ self.assertNoWarnings()
217
+
218
+ file_list.process_template_line('exclude *.rb')
219
+ self.assertEqual(file_list.files, ['b.txt', l('d/c.py')])
220
+ self.assertWarnings()
221
+
222
+ # global-include
223
+ file_list = FileList()
224
+ file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
225
+
226
+ file_list.process_template_line('global-include *.py')
227
+ self.assertEqual(file_list.files, ['a.py', l('d/c.py')])
228
+ self.assertNoWarnings()
229
+
230
+ file_list.process_template_line('global-include *.rb')
231
+ self.assertEqual(file_list.files, ['a.py', l('d/c.py')])
232
+ self.assertWarnings()
233
+
234
+ # global-exclude
235
+ file_list = FileList()
236
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
237
+
238
+ file_list.process_template_line('global-exclude *.py')
239
+ self.assertEqual(file_list.files, ['b.txt'])
240
+ self.assertNoWarnings()
241
+
242
+ file_list.process_template_line('global-exclude *.rb')
243
+ self.assertEqual(file_list.files, ['b.txt'])
244
+ self.assertWarnings()
245
+
246
+ # recursive-include
247
+ file_list = FileList()
248
+ file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'),
249
+ l('d/d/e.py')])
250
+
251
+ file_list.process_template_line('recursive-include d *.py')
252
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
253
+ self.assertNoWarnings()
254
+
255
+ file_list.process_template_line('recursive-include e *.py')
256
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
257
+ self.assertWarnings()
258
+
259
+ # recursive-exclude
260
+ file_list = FileList()
261
+ file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
262
+
263
+ file_list.process_template_line('recursive-exclude d *.py')
264
+ self.assertEqual(file_list.files, ['a.py', l('d/c.txt')])
265
+ self.assertNoWarnings()
266
+
267
+ file_list.process_template_line('recursive-exclude e *.py')
268
+ self.assertEqual(file_list.files, ['a.py', l('d/c.txt')])
269
+ self.assertWarnings()
270
+
271
+ # graft
272
+ file_list = FileList()
273
+ file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'),
274
+ l('f/f.py')])
275
+
276
+ file_list.process_template_line('graft d')
277
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
278
+ self.assertNoWarnings()
279
+
280
+ file_list.process_template_line('graft e')
281
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
282
+ self.assertWarnings()
283
+
284
+ # prune
285
+ file_list = FileList()
286
+ file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
287
+
288
+ file_list.process_template_line('prune d')
289
+ self.assertEqual(file_list.files, ['a.py', l('f/f.py')])
290
+ self.assertNoWarnings()
291
+
292
+ file_list.process_template_line('prune e')
293
+ self.assertEqual(file_list.files, ['a.py', l('f/f.py')])
294
+ self.assertWarnings()
295
+
296
+
297
+ class FindAllTestCase(unittest.TestCase):
298
+ @os_helper.skip_unless_symlink
299
+ def test_missing_symlink(self):
300
+ with os_helper.temp_cwd():
301
+ os.symlink('foo', 'bar')
302
+ self.assertEqual(filelist.findall(), [])
303
+
304
+ def test_basic_discovery(self):
305
+ """
306
+ When findall is called with no parameters or with
307
+ '.' as the parameter, the dot should be omitted from
308
+ the results.
309
+ """
310
+ with os_helper.temp_cwd():
311
+ os.mkdir('foo')
312
+ file1 = os.path.join('foo', 'file1.txt')
313
+ os_helper.create_empty_file(file1)
314
+ os.mkdir('bar')
315
+ file2 = os.path.join('bar', 'file2.txt')
316
+ os_helper.create_empty_file(file2)
317
+ expected = [file2, file1]
318
+ self.assertEqual(sorted(filelist.findall()), expected)
319
+
320
+ def test_non_local_discovery(self):
321
+ """
322
+ When findall is called with another path, the full
323
+ path name should be returned.
324
+ """
325
+ with os_helper.temp_dir() as temp_dir:
326
+ file1 = os.path.join(temp_dir, 'file1.txt')
327
+ os_helper.create_empty_file(file1)
328
+ expected = [file1]
329
+ self.assertEqual(filelist.findall(temp_dir), expected)
330
+
331
+
332
+ def test_suite():
333
+ return unittest.TestSuite([
334
+ unittest.makeSuite(FileListTestCase),
335
+ unittest.makeSuite(FindAllTestCase),
336
+ ])
337
+
338
+
339
+ if __name__ == "__main__":
340
+ run_unittest(test_suite())
deepseek/lib/python3.10/distutils/tests/test_install_data.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils.command.install_data."""
2
+ import os
3
+ import unittest
4
+
5
+ from distutils.command.install_data import install_data
6
+ from distutils.tests import support
7
+ from test.support import run_unittest
8
+
9
+ class InstallDataTestCase(support.TempdirManager,
10
+ support.LoggingSilencer,
11
+ support.EnvironGuard,
12
+ unittest.TestCase):
13
+
14
+ def test_simple_run(self):
15
+ pkg_dir, dist = self.create_dist()
16
+ cmd = install_data(dist)
17
+ cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
18
+
19
+ # data_files can contain
20
+ # - simple files
21
+ # - a tuple with a path, and a list of file
22
+ one = os.path.join(pkg_dir, 'one')
23
+ self.write_file(one, 'xxx')
24
+ inst2 = os.path.join(pkg_dir, 'inst2')
25
+ two = os.path.join(pkg_dir, 'two')
26
+ self.write_file(two, 'xxx')
27
+
28
+ cmd.data_files = [one, (inst2, [two])]
29
+ self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])])
30
+
31
+ # let's run the command
32
+ cmd.ensure_finalized()
33
+ cmd.run()
34
+
35
+ # let's check the result
36
+ self.assertEqual(len(cmd.get_outputs()), 2)
37
+ rtwo = os.path.split(two)[-1]
38
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
39
+ rone = os.path.split(one)[-1]
40
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
41
+ cmd.outfiles = []
42
+
43
+ # let's try with warn_dir one
44
+ cmd.warn_dir = 1
45
+ cmd.ensure_finalized()
46
+ cmd.run()
47
+
48
+ # let's check the result
49
+ self.assertEqual(len(cmd.get_outputs()), 2)
50
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
51
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
52
+ cmd.outfiles = []
53
+
54
+ # now using root and empty dir
55
+ cmd.root = os.path.join(pkg_dir, 'root')
56
+ inst3 = os.path.join(cmd.install_dir, 'inst3')
57
+ inst4 = os.path.join(pkg_dir, 'inst4')
58
+ three = os.path.join(cmd.install_dir, 'three')
59
+ self.write_file(three, 'xx')
60
+ cmd.data_files = [one, (inst2, [two]),
61
+ ('inst3', [three]),
62
+ (inst4, [])]
63
+ cmd.ensure_finalized()
64
+ cmd.run()
65
+
66
+ # let's check the result
67
+ self.assertEqual(len(cmd.get_outputs()), 4)
68
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
69
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
70
+
71
+ def test_suite():
72
+ return unittest.makeSuite(InstallDataTestCase)
73
+
74
+ if __name__ == "__main__":
75
+ run_unittest(test_suite())
deepseek/lib/python3.10/distutils/tests/test_msvccompiler.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Tests for distutils._msvccompiler."""
2
+ import sys
3
+ import unittest
4
+ import os
5
+
6
+ from distutils.errors import DistutilsPlatformError
7
+ from distutils.tests import support
8
+ from test.support import run_unittest
9
+
10
+
11
+ SKIP_MESSAGE = (None if sys.platform == "win32" else
12
+ "These tests are only for win32")
13
+
14
+ @unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
15
+ class msvccompilerTestCase(support.TempdirManager,
16
+ unittest.TestCase):
17
+
18
+ def test_no_compiler(self):
19
+ import distutils._msvccompiler as _msvccompiler
20
+ # makes sure query_vcvarsall raises
21
+ # a DistutilsPlatformError if the compiler
22
+ # is not found
23
+ def _find_vcvarsall(plat_spec):
24
+ return None, None
25
+
26
+ old_find_vcvarsall = _msvccompiler._find_vcvarsall
27
+ _msvccompiler._find_vcvarsall = _find_vcvarsall
28
+ try:
29
+ self.assertRaises(DistutilsPlatformError,
30
+ _msvccompiler._get_vc_env,
31
+ 'wont find this version')
32
+ finally:
33
+ _msvccompiler._find_vcvarsall = old_find_vcvarsall
34
+
35
+ def test_get_vc_env_unicode(self):
36
+ import distutils._msvccompiler as _msvccompiler
37
+
38
+ test_var = 'ṰḖṤṪ┅ṼẨṜ'
39
+ test_value = '₃⁴₅'
40
+
41
+ # Ensure we don't early exit from _get_vc_env
42
+ old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
43
+ os.environ[test_var] = test_value
44
+ try:
45
+ env = _msvccompiler._get_vc_env('x86')
46
+ self.assertIn(test_var.lower(), env)
47
+ self.assertEqual(test_value, env[test_var.lower()])
48
+ finally:
49
+ os.environ.pop(test_var)
50
+ if old_distutils_use_sdk:
51
+ os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
52
+
53
+ def test_get_vc2017(self):
54
+ import distutils._msvccompiler as _msvccompiler
55
+
56
+ # This function cannot be mocked, so pass it if we find VS 2017
57
+ # and mark it skipped if we do not.
58
+ version, path = _msvccompiler._find_vc2017()
59
+ if version:
60
+ self.assertGreaterEqual(version, 15)
61
+ self.assertTrue(os.path.isdir(path))
62
+ else:
63
+ raise unittest.SkipTest("VS 2017 is not installed")
64
+
65
+ def test_get_vc2015(self):
66
+ import distutils._msvccompiler as _msvccompiler
67
+
68
+ # This function cannot be mocked, so pass it if we find VS 2015
69
+ # and mark it skipped if we do not.
70
+ version, path = _msvccompiler._find_vc2015()
71
+ if version:
72
+ self.assertGreaterEqual(version, 14)
73
+ self.assertTrue(os.path.isdir(path))
74
+ else:
75
+ raise unittest.SkipTest("VS 2015 is not installed")
76
+
77
+ def test_suite():
78
+ return unittest.makeSuite(msvccompilerTestCase)
79
+
80
+ if __name__ == "__main__":
81
+ run_unittest(test_suite())
deepseek/lib/python3.10/distutils/text_file.py ADDED
@@ -0,0 +1,286 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """text_file
2
+
3
+ provides the TextFile class, which gives an interface to text files
4
+ that (optionally) takes care of stripping comments, ignoring blank
5
+ lines, and joining lines with backslashes."""
6
+
7
+ import sys, io
8
+
9
+
10
+ class TextFile:
11
+ """Provides a file-like object that takes care of all the things you
12
+ commonly want to do when processing a text file that has some
13
+ line-by-line syntax: strip comments (as long as "#" is your
14
+ comment character), skip blank lines, join adjacent lines by
15
+ escaping the newline (ie. backslash at end of line), strip
16
+ leading and/or trailing whitespace. All of these are optional
17
+ and independently controllable.
18
+
19
+ Provides a 'warn()' method so you can generate warning messages that
20
+ report physical line number, even if the logical line in question
21
+ spans multiple physical lines. Also provides 'unreadline()' for
22
+ implementing line-at-a-time lookahead.
23
+
24
+ Constructor is called as:
25
+
26
+ TextFile (filename=None, file=None, **options)
27
+
28
+ It bombs (RuntimeError) if both 'filename' and 'file' are None;
29
+ 'filename' should be a string, and 'file' a file object (or
30
+ something that provides 'readline()' and 'close()' methods). It is
31
+ recommended that you supply at least 'filename', so that TextFile
32
+ can include it in warning messages. If 'file' is not supplied,
33
+ TextFile creates its own using 'io.open()'.
34
+
35
+ The options are all boolean, and affect the value returned by
36
+ 'readline()':
37
+ strip_comments [default: true]
38
+ strip from "#" to end-of-line, as well as any whitespace
39
+ leading up to the "#" -- unless it is escaped by a backslash
40
+ lstrip_ws [default: false]
41
+ strip leading whitespace from each line before returning it
42
+ rstrip_ws [default: true]
43
+ strip trailing whitespace (including line terminator!) from
44
+ each line before returning it
45
+ skip_blanks [default: true}
46
+ skip lines that are empty *after* stripping comments and
47
+ whitespace. (If both lstrip_ws and rstrip_ws are false,
48
+ then some lines may consist of solely whitespace: these will
49
+ *not* be skipped, even if 'skip_blanks' is true.)
50
+ join_lines [default: false]
51
+ if a backslash is the last non-newline character on a line
52
+ after stripping comments and whitespace, join the following line
53
+ to it to form one "logical line"; if N consecutive lines end
54
+ with a backslash, then N+1 physical lines will be joined to
55
+ form one logical line.
56
+ collapse_join [default: false]
57
+ strip leading whitespace from lines that are joined to their
58
+ predecessor; only matters if (join_lines and not lstrip_ws)
59
+ errors [default: 'strict']
60
+ error handler used to decode the file content
61
+
62
+ Note that since 'rstrip_ws' can strip the trailing newline, the
63
+ semantics of 'readline()' must differ from those of the builtin file
64
+ object's 'readline()' method! In particular, 'readline()' returns
65
+ None for end-of-file: an empty string might just be a blank line (or
66
+ an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
67
+ not."""
68
+
69
+ default_options = { 'strip_comments': 1,
70
+ 'skip_blanks': 1,
71
+ 'lstrip_ws': 0,
72
+ 'rstrip_ws': 1,
73
+ 'join_lines': 0,
74
+ 'collapse_join': 0,
75
+ 'errors': 'strict',
76
+ }
77
+
78
+ def __init__(self, filename=None, file=None, **options):
79
+ """Construct a new TextFile object. At least one of 'filename'
80
+ (a string) and 'file' (a file-like object) must be supplied.
81
+ They keyword argument options are described above and affect
82
+ the values returned by 'readline()'."""
83
+ if filename is None and file is None:
84
+ raise RuntimeError("you must supply either or both of 'filename' and 'file'")
85
+
86
+ # set values for all options -- either from client option hash
87
+ # or fallback to default_options
88
+ for opt in self.default_options.keys():
89
+ if opt in options:
90
+ setattr(self, opt, options[opt])
91
+ else:
92
+ setattr(self, opt, self.default_options[opt])
93
+
94
+ # sanity check client option hash
95
+ for opt in options.keys():
96
+ if opt not in self.default_options:
97
+ raise KeyError("invalid TextFile option '%s'" % opt)
98
+
99
+ if file is None:
100
+ self.open(filename)
101
+ else:
102
+ self.filename = filename
103
+ self.file = file
104
+ self.current_line = 0 # assuming that file is at BOF!
105
+
106
+ # 'linebuf' is a stack of lines that will be emptied before we
107
+ # actually read from the file; it's only populated by an
108
+ # 'unreadline()' operation
109
+ self.linebuf = []
110
+
111
+ def open(self, filename):
112
+ """Open a new file named 'filename'. This overrides both the
113
+ 'filename' and 'file' arguments to the constructor."""
114
+ self.filename = filename
115
+ self.file = io.open(self.filename, 'r', errors=self.errors)
116
+ self.current_line = 0
117
+
118
+ def close(self):
119
+ """Close the current file and forget everything we know about it
120
+ (filename, current line number)."""
121
+ file = self.file
122
+ self.file = None
123
+ self.filename = None
124
+ self.current_line = None
125
+ file.close()
126
+
127
+ def gen_error(self, msg, line=None):
128
+ outmsg = []
129
+ if line is None:
130
+ line = self.current_line
131
+ outmsg.append(self.filename + ", ")
132
+ if isinstance(line, (list, tuple)):
133
+ outmsg.append("lines %d-%d: " % tuple(line))
134
+ else:
135
+ outmsg.append("line %d: " % line)
136
+ outmsg.append(str(msg))
137
+ return "".join(outmsg)
138
+
139
+ def error(self, msg, line=None):
140
+ raise ValueError("error: " + self.gen_error(msg, line))
141
+
142
+ def warn(self, msg, line=None):
143
+ """Print (to stderr) a warning message tied to the current logical
144
+ line in the current file. If the current logical line in the
145
+ file spans multiple physical lines, the warning refers to the
146
+ whole range, eg. "lines 3-5". If 'line' supplied, it overrides
147
+ the current line number; it may be a list or tuple to indicate a
148
+ range of physical lines, or an integer for a single physical
149
+ line."""
150
+ sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
151
+
152
+ def readline(self):
153
+ """Read and return a single logical line from the current file (or
154
+ from an internal buffer if lines have previously been "unread"
155
+ with 'unreadline()'). If the 'join_lines' option is true, this
156
+ may involve reading multiple physical lines concatenated into a
157
+ single string. Updates the current line number, so calling
158
+ 'warn()' after 'readline()' emits a warning about the physical
159
+ line(s) just read. Returns None on end-of-file, since the empty
160
+ string can occur if 'rstrip_ws' is true but 'strip_blanks' is
161
+ not."""
162
+ # If any "unread" lines waiting in 'linebuf', return the top
163
+ # one. (We don't actually buffer read-ahead data -- lines only
164
+ # get put in 'linebuf' if the client explicitly does an
165
+ # 'unreadline()'.
166
+ if self.linebuf:
167
+ line = self.linebuf[-1]
168
+ del self.linebuf[-1]
169
+ return line
170
+
171
+ buildup_line = ''
172
+
173
+ while True:
174
+ # read the line, make it None if EOF
175
+ line = self.file.readline()
176
+ if line == '':
177
+ line = None
178
+
179
+ if self.strip_comments and line:
180
+
181
+ # Look for the first "#" in the line. If none, never
182
+ # mind. If we find one and it's the first character, or
183
+ # is not preceded by "\", then it starts a comment --
184
+ # strip the comment, strip whitespace before it, and
185
+ # carry on. Otherwise, it's just an escaped "#", so
186
+ # unescape it (and any other escaped "#"'s that might be
187
+ # lurking in there) and otherwise leave the line alone.
188
+
189
+ pos = line.find("#")
190
+ if pos == -1: # no "#" -- no comments
191
+ pass
192
+
193
+ # It's definitely a comment -- either "#" is the first
194
+ # character, or it's elsewhere and unescaped.
195
+ elif pos == 0 or line[pos-1] != "\\":
196
+ # Have to preserve the trailing newline, because it's
197
+ # the job of a later step (rstrip_ws) to remove it --
198
+ # and if rstrip_ws is false, we'd better preserve it!
199
+ # (NB. this means that if the final line is all comment
200
+ # and has no trailing newline, we will think that it's
201
+ # EOF; I think that's OK.)
202
+ eol = (line[-1] == '\n') and '\n' or ''
203
+ line = line[0:pos] + eol
204
+
205
+ # If all that's left is whitespace, then skip line
206
+ # *now*, before we try to join it to 'buildup_line' --
207
+ # that way constructs like
208
+ # hello \\
209
+ # # comment that should be ignored
210
+ # there
211
+ # result in "hello there".
212
+ if line.strip() == "":
213
+ continue
214
+ else: # it's an escaped "#"
215
+ line = line.replace("\\#", "#")
216
+
217
+ # did previous line end with a backslash? then accumulate
218
+ if self.join_lines and buildup_line:
219
+ # oops: end of file
220
+ if line is None:
221
+ self.warn("continuation line immediately precedes "
222
+ "end-of-file")
223
+ return buildup_line
224
+
225
+ if self.collapse_join:
226
+ line = line.lstrip()
227
+ line = buildup_line + line
228
+
229
+ # careful: pay attention to line number when incrementing it
230
+ if isinstance(self.current_line, list):
231
+ self.current_line[1] = self.current_line[1] + 1
232
+ else:
233
+ self.current_line = [self.current_line,
234
+ self.current_line + 1]
235
+ # just an ordinary line, read it as usual
236
+ else:
237
+ if line is None: # eof
238
+ return None
239
+
240
+ # still have to be careful about incrementing the line number!
241
+ if isinstance(self.current_line, list):
242
+ self.current_line = self.current_line[1] + 1
243
+ else:
244
+ self.current_line = self.current_line + 1
245
+
246
+ # strip whitespace however the client wants (leading and
247
+ # trailing, or one or the other, or neither)
248
+ if self.lstrip_ws and self.rstrip_ws:
249
+ line = line.strip()
250
+ elif self.lstrip_ws:
251
+ line = line.lstrip()
252
+ elif self.rstrip_ws:
253
+ line = line.rstrip()
254
+
255
+ # blank line (whether we rstrip'ed or not)? skip to next line
256
+ # if appropriate
257
+ if (line == '' or line == '\n') and self.skip_blanks:
258
+ continue
259
+
260
+ if self.join_lines:
261
+ if line[-1] == '\\':
262
+ buildup_line = line[:-1]
263
+ continue
264
+
265
+ if line[-2:] == '\\\n':
266
+ buildup_line = line[0:-2] + '\n'
267
+ continue
268
+
269
+ # well, I guess there's some actual content there: return it
270
+ return line
271
+
272
+ def readlines(self):
273
+ """Read and return the list of all logical lines remaining in the
274
+ current file."""
275
+ lines = []
276
+ while True:
277
+ line = self.readline()
278
+ if line is None:
279
+ return lines
280
+ lines.append(line)
281
+
282
+ def unreadline(self, line):
283
+ """Push 'line' (a string) onto an internal buffer that will be
284
+ checked by future 'readline()' calls. Handy for implementing
285
+ a parser with line-at-a-time lookahead."""
286
+ self.linebuf.append(line)
deepseek/lib/python3.10/distutils/versionpredicate.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module for parsing and testing package version predicate strings.
2
+ """
3
+ import re
4
+ import distutils.version
5
+ import operator
6
+
7
+
8
+ re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)",
9
+ re.ASCII)
10
+ # (package) (rest)
11
+
12
+ re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
13
+ re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
14
+ # (comp) (version)
15
+
16
+
17
+ def splitUp(pred):
18
+ """Parse a single version comparison.
19
+
20
+ Return (comparison string, StrictVersion)
21
+ """
22
+ res = re_splitComparison.match(pred)
23
+ if not res:
24
+ raise ValueError("bad package restriction syntax: %r" % pred)
25
+ comp, verStr = res.groups()
26
+ return (comp, distutils.version.StrictVersion(verStr))
27
+
28
+ compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
29
+ ">": operator.gt, ">=": operator.ge, "!=": operator.ne}
30
+
31
+ class VersionPredicate:
32
+ """Parse and test package version predicates.
33
+
34
+ >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
35
+
36
+ The `name` attribute provides the full dotted name that is given::
37
+
38
+ >>> v.name
39
+ 'pyepat.abc'
40
+
41
+ The str() of a `VersionPredicate` provides a normalized
42
+ human-readable version of the expression::
43
+
44
+ >>> print(v)
45
+ pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
46
+
47
+ The `satisfied_by()` method can be used to determine with a given
48
+ version number is included in the set described by the version
49
+ restrictions::
50
+
51
+ >>> v.satisfied_by('1.1')
52
+ True
53
+ >>> v.satisfied_by('1.4')
54
+ True
55
+ >>> v.satisfied_by('1.0')
56
+ False
57
+ >>> v.satisfied_by('4444.4')
58
+ False
59
+ >>> v.satisfied_by('1555.1b3')
60
+ False
61
+
62
+ `VersionPredicate` is flexible in accepting extra whitespace::
63
+
64
+ >>> v = VersionPredicate(' pat( == 0.1 ) ')
65
+ >>> v.name
66
+ 'pat'
67
+ >>> v.satisfied_by('0.1')
68
+ True
69
+ >>> v.satisfied_by('0.2')
70
+ False
71
+
72
+ If any version numbers passed in do not conform to the
73
+ restrictions of `StrictVersion`, a `ValueError` is raised::
74
+
75
+ >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
76
+ Traceback (most recent call last):
77
+ ...
78
+ ValueError: invalid version number '1.2zb3'
79
+
80
+ It the module or package name given does not conform to what's
81
+ allowed as a legal module or package name, `ValueError` is
82
+ raised::
83
+
84
+ >>> v = VersionPredicate('foo-bar')
85
+ Traceback (most recent call last):
86
+ ...
87
+ ValueError: expected parenthesized list: '-bar'
88
+
89
+ >>> v = VersionPredicate('foo bar (12.21)')
90
+ Traceback (most recent call last):
91
+ ...
92
+ ValueError: expected parenthesized list: 'bar (12.21)'
93
+
94
+ """
95
+
96
+ def __init__(self, versionPredicateStr):
97
+ """Parse a version predicate string.
98
+ """
99
+ # Fields:
100
+ # name: package name
101
+ # pred: list of (comparison string, StrictVersion)
102
+
103
+ versionPredicateStr = versionPredicateStr.strip()
104
+ if not versionPredicateStr:
105
+ raise ValueError("empty package restriction")
106
+ match = re_validPackage.match(versionPredicateStr)
107
+ if not match:
108
+ raise ValueError("bad package name in %r" % versionPredicateStr)
109
+ self.name, paren = match.groups()
110
+ paren = paren.strip()
111
+ if paren:
112
+ match = re_paren.match(paren)
113
+ if not match:
114
+ raise ValueError("expected parenthesized list: %r" % paren)
115
+ str = match.groups()[0]
116
+ self.pred = [splitUp(aPred) for aPred in str.split(",")]
117
+ if not self.pred:
118
+ raise ValueError("empty parenthesized list in %r"
119
+ % versionPredicateStr)
120
+ else:
121
+ self.pred = []
122
+
123
+ def __str__(self):
124
+ if self.pred:
125
+ seq = [cond + " " + str(ver) for cond, ver in self.pred]
126
+ return self.name + " (" + ", ".join(seq) + ")"
127
+ else:
128
+ return self.name
129
+
130
+ def satisfied_by(self, version):
131
+ """True if version is compatible with all the predicates in self.
132
+ The parameter version must be acceptable to the StrictVersion
133
+ constructor. It may be either a string or StrictVersion.
134
+ """
135
+ for cond, ver in self.pred:
136
+ if not compmap[cond](version, ver):
137
+ return False
138
+ return True
139
+
140
+
141
+ _provision_rx = None
142
+
143
+ def split_provision(value):
144
+ """Return the name and optional version number of a provision.
145
+
146
+ The version number, if given, will be returned as a `StrictVersion`
147
+ instance, otherwise it will be `None`.
148
+
149
+ >>> split_provision('mypkg')
150
+ ('mypkg', None)
151
+ >>> split_provision(' mypkg( 1.2 ) ')
152
+ ('mypkg', StrictVersion ('1.2'))
153
+ """
154
+ global _provision_rx
155
+ if _provision_rx is None:
156
+ _provision_rx = re.compile(
157
+ r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$",
158
+ re.ASCII)
159
+ value = value.strip()
160
+ m = _provision_rx.match(value)
161
+ if not m:
162
+ raise ValueError("illegal provides specification: %r" % value)
163
+ ver = m.group(2) or None
164
+ if ver:
165
+ ver = distutils.version.StrictVersion(ver)
166
+ return m.group(1), ver
deepseek/lib/python3.10/ipaddress.py ADDED
@@ -0,0 +1,2361 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2007 Google Inc.
2
+ # Licensed to PSF under a Contributor Agreement.
3
+
4
+ """A fast, lightweight IPv4/IPv6 manipulation library in Python.
5
+
6
+ This library is used to create/poke/manipulate IPv4 and IPv6 addresses
7
+ and networks.
8
+
9
+ """
10
+
11
+ __version__ = '1.0'
12
+
13
+
14
+ import functools
15
+
16
+ IPV4LENGTH = 32
17
+ IPV6LENGTH = 128
18
+
19
+
20
+ class AddressValueError(ValueError):
21
+ """A Value Error related to the address."""
22
+
23
+
24
+ class NetmaskValueError(ValueError):
25
+ """A Value Error related to the netmask."""
26
+
27
+
28
+ def ip_address(address):
29
+ """Take an IP string/int and return an object of the correct type.
30
+
31
+ Args:
32
+ address: A string or integer, the IP address. Either IPv4 or
33
+ IPv6 addresses may be supplied; integers less than 2**32 will
34
+ be considered to be IPv4 by default.
35
+
36
+ Returns:
37
+ An IPv4Address or IPv6Address object.
38
+
39
+ Raises:
40
+ ValueError: if the *address* passed isn't either a v4 or a v6
41
+ address
42
+
43
+ """
44
+ try:
45
+ return IPv4Address(address)
46
+ except (AddressValueError, NetmaskValueError):
47
+ pass
48
+
49
+ try:
50
+ return IPv6Address(address)
51
+ except (AddressValueError, NetmaskValueError):
52
+ pass
53
+
54
+ raise ValueError(f'{address!r} does not appear to be an IPv4 or IPv6 address')
55
+
56
+
57
+ def ip_network(address, strict=True):
58
+ """Take an IP string/int and return an object of the correct type.
59
+
60
+ Args:
61
+ address: A string or integer, the IP network. Either IPv4 or
62
+ IPv6 networks may be supplied; integers less than 2**32 will
63
+ be considered to be IPv4 by default.
64
+
65
+ Returns:
66
+ An IPv4Network or IPv6Network object.
67
+
68
+ Raises:
69
+ ValueError: if the string passed isn't either a v4 or a v6
70
+ address. Or if the network has host bits set.
71
+
72
+ """
73
+ try:
74
+ return IPv4Network(address, strict)
75
+ except (AddressValueError, NetmaskValueError):
76
+ pass
77
+
78
+ try:
79
+ return IPv6Network(address, strict)
80
+ except (AddressValueError, NetmaskValueError):
81
+ pass
82
+
83
+ raise ValueError(f'{address!r} does not appear to be an IPv4 or IPv6 network')
84
+
85
+
86
+ def ip_interface(address):
87
+ """Take an IP string/int and return an object of the correct type.
88
+
89
+ Args:
90
+ address: A string or integer, the IP address. Either IPv4 or
91
+ IPv6 addresses may be supplied; integers less than 2**32 will
92
+ be considered to be IPv4 by default.
93
+
94
+ Returns:
95
+ An IPv4Interface or IPv6Interface object.
96
+
97
+ Raises:
98
+ ValueError: if the string passed isn't either a v4 or a v6
99
+ address.
100
+
101
+ Notes:
102
+ The IPv?Interface classes describe an Address on a particular
103
+ Network, so they're basically a combination of both the Address
104
+ and Network classes.
105
+
106
+ """
107
+ try:
108
+ return IPv4Interface(address)
109
+ except (AddressValueError, NetmaskValueError):
110
+ pass
111
+
112
+ try:
113
+ return IPv6Interface(address)
114
+ except (AddressValueError, NetmaskValueError):
115
+ pass
116
+
117
+ raise ValueError(f'{address!r} does not appear to be an IPv4 or IPv6 interface')
118
+
119
+
120
+ def v4_int_to_packed(address):
121
+ """Represent an address as 4 packed bytes in network (big-endian) order.
122
+
123
+ Args:
124
+ address: An integer representation of an IPv4 IP address.
125
+
126
+ Returns:
127
+ The integer address packed as 4 bytes in network (big-endian) order.
128
+
129
+ Raises:
130
+ ValueError: If the integer is negative or too large to be an
131
+ IPv4 IP address.
132
+
133
+ """
134
+ try:
135
+ return address.to_bytes(4, 'big')
136
+ except OverflowError:
137
+ raise ValueError("Address negative or too large for IPv4")
138
+
139
+
140
+ def v6_int_to_packed(address):
141
+ """Represent an address as 16 packed bytes in network (big-endian) order.
142
+
143
+ Args:
144
+ address: An integer representation of an IPv6 IP address.
145
+
146
+ Returns:
147
+ The integer address packed as 16 bytes in network (big-endian) order.
148
+
149
+ """
150
+ try:
151
+ return address.to_bytes(16, 'big')
152
+ except OverflowError:
153
+ raise ValueError("Address negative or too large for IPv6")
154
+
155
+
156
+ def _split_optional_netmask(address):
157
+ """Helper to split the netmask and raise AddressValueError if needed"""
158
+ addr = str(address).split('/')
159
+ if len(addr) > 2:
160
+ raise AddressValueError(f"Only one '/' permitted in {address!r}")
161
+ return addr
162
+
163
+
164
+ def _find_address_range(addresses):
165
+ """Find a sequence of sorted deduplicated IPv#Address.
166
+
167
+ Args:
168
+ addresses: a list of IPv#Address objects.
169
+
170
+ Yields:
171
+ A tuple containing the first and last IP addresses in the sequence.
172
+
173
+ """
174
+ it = iter(addresses)
175
+ first = last = next(it)
176
+ for ip in it:
177
+ if ip._ip != last._ip + 1:
178
+ yield first, last
179
+ first = ip
180
+ last = ip
181
+ yield first, last
182
+
183
+
184
+ def _count_righthand_zero_bits(number, bits):
185
+ """Count the number of zero bits on the right hand side.
186
+
187
+ Args:
188
+ number: an integer.
189
+ bits: maximum number of bits to count.
190
+
191
+ Returns:
192
+ The number of zero bits on the right hand side of the number.
193
+
194
+ """
195
+ if number == 0:
196
+ return bits
197
+ return min(bits, (~number & (number-1)).bit_length())
198
+
199
+
200
+ def summarize_address_range(first, last):
201
+ """Summarize a network range given the first and last IP addresses.
202
+
203
+ Example:
204
+ >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
205
+ ... IPv4Address('192.0.2.130')))
206
+ ... #doctest: +NORMALIZE_WHITESPACE
207
+ [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
208
+ IPv4Network('192.0.2.130/32')]
209
+
210
+ Args:
211
+ first: the first IPv4Address or IPv6Address in the range.
212
+ last: the last IPv4Address or IPv6Address in the range.
213
+
214
+ Returns:
215
+ An iterator of the summarized IPv(4|6) network objects.
216
+
217
+ Raise:
218
+ TypeError:
219
+ If the first and last objects are not IP addresses.
220
+ If the first and last objects are not the same version.
221
+ ValueError:
222
+ If the last object is not greater than the first.
223
+ If the version of the first address is not 4 or 6.
224
+
225
+ """
226
+ if (not (isinstance(first, _BaseAddress) and
227
+ isinstance(last, _BaseAddress))):
228
+ raise TypeError('first and last must be IP addresses, not networks')
229
+ if first.version != last.version:
230
+ raise TypeError("%s and %s are not of the same version" % (
231
+ first, last))
232
+ if first > last:
233
+ raise ValueError('last IP address must be greater than first')
234
+
235
+ if first.version == 4:
236
+ ip = IPv4Network
237
+ elif first.version == 6:
238
+ ip = IPv6Network
239
+ else:
240
+ raise ValueError('unknown IP version')
241
+
242
+ ip_bits = first._max_prefixlen
243
+ first_int = first._ip
244
+ last_int = last._ip
245
+ while first_int <= last_int:
246
+ nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
247
+ (last_int - first_int + 1).bit_length() - 1)
248
+ net = ip((first_int, ip_bits - nbits))
249
+ yield net
250
+ first_int += 1 << nbits
251
+ if first_int - 1 == ip._ALL_ONES:
252
+ break
253
+
254
+
255
+ def _collapse_addresses_internal(addresses):
256
+ """Loops through the addresses, collapsing concurrent netblocks.
257
+
258
+ Example:
259
+
260
+ ip1 = IPv4Network('192.0.2.0/26')
261
+ ip2 = IPv4Network('192.0.2.64/26')
262
+ ip3 = IPv4Network('192.0.2.128/26')
263
+ ip4 = IPv4Network('192.0.2.192/26')
264
+
265
+ _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
266
+ [IPv4Network('192.0.2.0/24')]
267
+
268
+ This shouldn't be called directly; it is called via
269
+ collapse_addresses([]).
270
+
271
+ Args:
272
+ addresses: A list of IPv4Network's or IPv6Network's
273
+
274
+ Returns:
275
+ A list of IPv4Network's or IPv6Network's depending on what we were
276
+ passed.
277
+
278
+ """
279
+ # First merge
280
+ to_merge = list(addresses)
281
+ subnets = {}
282
+ while to_merge:
283
+ net = to_merge.pop()
284
+ supernet = net.supernet()
285
+ existing = subnets.get(supernet)
286
+ if existing is None:
287
+ subnets[supernet] = net
288
+ elif existing != net:
289
+ # Merge consecutive subnets
290
+ del subnets[supernet]
291
+ to_merge.append(supernet)
292
+ # Then iterate over resulting networks, skipping subsumed subnets
293
+ last = None
294
+ for net in sorted(subnets.values()):
295
+ if last is not None:
296
+ # Since they are sorted, last.network_address <= net.network_address
297
+ # is a given.
298
+ if last.broadcast_address >= net.broadcast_address:
299
+ continue
300
+ yield net
301
+ last = net
302
+
303
+
304
+ def collapse_addresses(addresses):
305
+ """Collapse a list of IP objects.
306
+
307
+ Example:
308
+ collapse_addresses([IPv4Network('192.0.2.0/25'),
309
+ IPv4Network('192.0.2.128/25')]) ->
310
+ [IPv4Network('192.0.2.0/24')]
311
+
312
+ Args:
313
+ addresses: An iterable of IPv4Network or IPv6Network objects.
314
+
315
+ Returns:
316
+ An iterator of the collapsed IPv(4|6)Network objects.
317
+
318
+ Raises:
319
+ TypeError: If passed a list of mixed version objects.
320
+
321
+ """
322
+ addrs = []
323
+ ips = []
324
+ nets = []
325
+
326
+ # split IP addresses and networks
327
+ for ip in addresses:
328
+ if isinstance(ip, _BaseAddress):
329
+ if ips and ips[-1]._version != ip._version:
330
+ raise TypeError("%s and %s are not of the same version" % (
331
+ ip, ips[-1]))
332
+ ips.append(ip)
333
+ elif ip._prefixlen == ip._max_prefixlen:
334
+ if ips and ips[-1]._version != ip._version:
335
+ raise TypeError("%s and %s are not of the same version" % (
336
+ ip, ips[-1]))
337
+ try:
338
+ ips.append(ip.ip)
339
+ except AttributeError:
340
+ ips.append(ip.network_address)
341
+ else:
342
+ if nets and nets[-1]._version != ip._version:
343
+ raise TypeError("%s and %s are not of the same version" % (
344
+ ip, nets[-1]))
345
+ nets.append(ip)
346
+
347
+ # sort and dedup
348
+ ips = sorted(set(ips))
349
+
350
+ # find consecutive address ranges in the sorted sequence and summarize them
351
+ if ips:
352
+ for first, last in _find_address_range(ips):
353
+ addrs.extend(summarize_address_range(first, last))
354
+
355
+ return _collapse_addresses_internal(addrs + nets)
356
+
357
+
358
+ def get_mixed_type_key(obj):
359
+ """Return a key suitable for sorting between networks and addresses.
360
+
361
+ Address and Network objects are not sortable by default; they're
362
+ fundamentally different so the expression
363
+
364
+ IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
365
+
366
+ doesn't make any sense. There are some times however, where you may wish
367
+ to have ipaddress sort these for you anyway. If you need to do this, you
368
+ can use this function as the key= argument to sorted().
369
+
370
+ Args:
371
+ obj: either a Network or Address object.
372
+ Returns:
373
+ appropriate key.
374
+
375
+ """
376
+ if isinstance(obj, _BaseNetwork):
377
+ return obj._get_networks_key()
378
+ elif isinstance(obj, _BaseAddress):
379
+ return obj._get_address_key()
380
+ return NotImplemented
381
+
382
+
383
+ class _IPAddressBase:
384
+
385
+ """The mother class."""
386
+
387
+ __slots__ = ()
388
+
389
+ @property
390
+ def exploded(self):
391
+ """Return the longhand version of the IP address as a string."""
392
+ return self._explode_shorthand_ip_string()
393
+
394
+ @property
395
+ def compressed(self):
396
+ """Return the shorthand version of the IP address as a string."""
397
+ return str(self)
398
+
399
+ @property
400
+ def reverse_pointer(self):
401
+ """The name of the reverse DNS pointer for the IP address, e.g.:
402
+ >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
403
+ '1.0.0.127.in-addr.arpa'
404
+ >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
405
+ '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
406
+
407
+ """
408
+ return self._reverse_pointer()
409
+
410
+ @property
411
+ def version(self):
412
+ msg = '%200s has no version specified' % (type(self),)
413
+ raise NotImplementedError(msg)
414
+
415
+ def _check_int_address(self, address):
416
+ if address < 0:
417
+ msg = "%d (< 0) is not permitted as an IPv%d address"
418
+ raise AddressValueError(msg % (address, self._version))
419
+ if address > self._ALL_ONES:
420
+ msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
421
+ raise AddressValueError(msg % (address, self._max_prefixlen,
422
+ self._version))
423
+
424
+ def _check_packed_address(self, address, expected_len):
425
+ address_len = len(address)
426
+ if address_len != expected_len:
427
+ msg = "%r (len %d != %d) is not permitted as an IPv%d address"
428
+ raise AddressValueError(msg % (address, address_len,
429
+ expected_len, self._version))
430
+
431
+ @classmethod
432
+ def _ip_int_from_prefix(cls, prefixlen):
433
+ """Turn the prefix length into a bitwise netmask
434
+
435
+ Args:
436
+ prefixlen: An integer, the prefix length.
437
+
438
+ Returns:
439
+ An integer.
440
+
441
+ """
442
+ return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
443
+
444
+ @classmethod
445
+ def _prefix_from_ip_int(cls, ip_int):
446
+ """Return prefix length from the bitwise netmask.
447
+
448
+ Args:
449
+ ip_int: An integer, the netmask in expanded bitwise format
450
+
451
+ Returns:
452
+ An integer, the prefix length.
453
+
454
+ Raises:
455
+ ValueError: If the input intermingles zeroes & ones
456
+ """
457
+ trailing_zeroes = _count_righthand_zero_bits(ip_int,
458
+ cls._max_prefixlen)
459
+ prefixlen = cls._max_prefixlen - trailing_zeroes
460
+ leading_ones = ip_int >> trailing_zeroes
461
+ all_ones = (1 << prefixlen) - 1
462
+ if leading_ones != all_ones:
463
+ byteslen = cls._max_prefixlen // 8
464
+ details = ip_int.to_bytes(byteslen, 'big')
465
+ msg = 'Netmask pattern %r mixes zeroes & ones'
466
+ raise ValueError(msg % details)
467
+ return prefixlen
468
+
469
+ @classmethod
470
+ def _report_invalid_netmask(cls, netmask_str):
471
+ msg = '%r is not a valid netmask' % netmask_str
472
+ raise NetmaskValueError(msg) from None
473
+
474
+ @classmethod
475
+ def _prefix_from_prefix_string(cls, prefixlen_str):
476
+ """Return prefix length from a numeric string
477
+
478
+ Args:
479
+ prefixlen_str: The string to be converted
480
+
481
+ Returns:
482
+ An integer, the prefix length.
483
+
484
+ Raises:
485
+ NetmaskValueError: If the input is not a valid netmask
486
+ """
487
+ # int allows a leading +/- as well as surrounding whitespace,
488
+ # so we ensure that isn't the case
489
+ if not (prefixlen_str.isascii() and prefixlen_str.isdigit()):
490
+ cls._report_invalid_netmask(prefixlen_str)
491
+ try:
492
+ prefixlen = int(prefixlen_str)
493
+ except ValueError:
494
+ cls._report_invalid_netmask(prefixlen_str)
495
+ if not (0 <= prefixlen <= cls._max_prefixlen):
496
+ cls._report_invalid_netmask(prefixlen_str)
497
+ return prefixlen
498
+
499
+ @classmethod
500
+ def _prefix_from_ip_string(cls, ip_str):
501
+ """Turn a netmask/hostmask string into a prefix length
502
+
503
+ Args:
504
+ ip_str: The netmask/hostmask to be converted
505
+
506
+ Returns:
507
+ An integer, the prefix length.
508
+
509
+ Raises:
510
+ NetmaskValueError: If the input is not a valid netmask/hostmask
511
+ """
512
+ # Parse the netmask/hostmask like an IP address.
513
+ try:
514
+ ip_int = cls._ip_int_from_string(ip_str)
515
+ except AddressValueError:
516
+ cls._report_invalid_netmask(ip_str)
517
+
518
+ # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
519
+ # Note that the two ambiguous cases (all-ones and all-zeroes) are
520
+ # treated as netmasks.
521
+ try:
522
+ return cls._prefix_from_ip_int(ip_int)
523
+ except ValueError:
524
+ pass
525
+
526
+ # Invert the bits, and try matching a /0+1+/ hostmask instead.
527
+ ip_int ^= cls._ALL_ONES
528
+ try:
529
+ return cls._prefix_from_ip_int(ip_int)
530
+ except ValueError:
531
+ cls._report_invalid_netmask(ip_str)
532
+
533
+ @classmethod
534
+ def _split_addr_prefix(cls, address):
535
+ """Helper function to parse address of Network/Interface.
536
+
537
+ Arg:
538
+ address: Argument of Network/Interface.
539
+
540
+ Returns:
541
+ (addr, prefix) tuple.
542
+ """
543
+ # a packed address or integer
544
+ if isinstance(address, (bytes, int)):
545
+ return address, cls._max_prefixlen
546
+
547
+ if not isinstance(address, tuple):
548
+ # Assume input argument to be string or any object representation
549
+ # which converts into a formatted IP prefix string.
550
+ address = _split_optional_netmask(address)
551
+
552
+ # Constructing from a tuple (addr, [mask])
553
+ if len(address) > 1:
554
+ return address
555
+ return address[0], cls._max_prefixlen
556
+
557
+ def __reduce__(self):
558
+ return self.__class__, (str(self),)
559
+
560
+
561
+ _address_fmt_re = None
562
+
563
+ @functools.total_ordering
564
+ class _BaseAddress(_IPAddressBase):
565
+
566
+ """A generic IP object.
567
+
568
+ This IP class contains the version independent methods which are
569
+ used by single IP addresses.
570
+ """
571
+
572
+ __slots__ = ()
573
+
574
+ def __int__(self):
575
+ return self._ip
576
+
577
+ def __eq__(self, other):
578
+ try:
579
+ return (self._ip == other._ip
580
+ and self._version == other._version)
581
+ except AttributeError:
582
+ return NotImplemented
583
+
584
+ def __lt__(self, other):
585
+ if not isinstance(other, _BaseAddress):
586
+ return NotImplemented
587
+ if self._version != other._version:
588
+ raise TypeError('%s and %s are not of the same version' % (
589
+ self, other))
590
+ if self._ip != other._ip:
591
+ return self._ip < other._ip
592
+ return False
593
+
594
+ # Shorthand for Integer addition and subtraction. This is not
595
+ # meant to ever support addition/subtraction of addresses.
596
+ def __add__(self, other):
597
+ if not isinstance(other, int):
598
+ return NotImplemented
599
+ return self.__class__(int(self) + other)
600
+
601
+ def __sub__(self, other):
602
+ if not isinstance(other, int):
603
+ return NotImplemented
604
+ return self.__class__(int(self) - other)
605
+
606
+ def __repr__(self):
607
+ return '%s(%r)' % (self.__class__.__name__, str(self))
608
+
609
+ def __str__(self):
610
+ return str(self._string_from_ip_int(self._ip))
611
+
612
+ def __hash__(self):
613
+ return hash(hex(int(self._ip)))
614
+
615
+ def _get_address_key(self):
616
+ return (self._version, self)
617
+
618
+ def __reduce__(self):
619
+ return self.__class__, (self._ip,)
620
+
621
+ def __format__(self, fmt):
622
+ """Returns an IP address as a formatted string.
623
+
624
+ Supported presentation types are:
625
+ 's': returns the IP address as a string (default)
626
+ 'b': converts to binary and returns a zero-padded string
627
+ 'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string
628
+ 'n': the same as 'b' for IPv4 and 'x' for IPv6
629
+
630
+ For binary and hex presentation types, the alternate form specifier
631
+ '#' and the grouping option '_' are supported.
632
+ """
633
+
634
+ # Support string formatting
635
+ if not fmt or fmt[-1] == 's':
636
+ return format(str(self), fmt)
637
+
638
+ # From here on down, support for 'bnXx'
639
+ global _address_fmt_re
640
+ if _address_fmt_re is None:
641
+ import re
642
+ _address_fmt_re = re.compile('(#?)(_?)([xbnX])')
643
+
644
+ m = _address_fmt_re.fullmatch(fmt)
645
+ if not m:
646
+ return super().__format__(fmt)
647
+
648
+ alternate, grouping, fmt_base = m.groups()
649
+
650
+ # Set some defaults
651
+ if fmt_base == 'n':
652
+ if self._version == 4:
653
+ fmt_base = 'b' # Binary is default for ipv4
654
+ else:
655
+ fmt_base = 'x' # Hex is default for ipv6
656
+
657
+ if fmt_base == 'b':
658
+ padlen = self._max_prefixlen
659
+ else:
660
+ padlen = self._max_prefixlen // 4
661
+
662
+ if grouping:
663
+ padlen += padlen // 4 - 1
664
+
665
+ if alternate:
666
+ padlen += 2 # 0b or 0x
667
+
668
+ return format(int(self), f'{alternate}0{padlen}{grouping}{fmt_base}')
669
+
670
+
671
+ @functools.total_ordering
672
+ class _BaseNetwork(_IPAddressBase):
673
+ """A generic IP network object.
674
+
675
+ This IP class contains the version independent methods which are
676
+ used by networks.
677
+ """
678
+
679
+ def __repr__(self):
680
+ return '%s(%r)' % (self.__class__.__name__, str(self))
681
+
682
+ def __str__(self):
683
+ return '%s/%d' % (self.network_address, self.prefixlen)
684
+
685
+ def hosts(self):
686
+ """Generate Iterator over usable hosts in a network.
687
+
688
+ This is like __iter__ except it doesn't return the network
689
+ or broadcast addresses.
690
+
691
+ """
692
+ network = int(self.network_address)
693
+ broadcast = int(self.broadcast_address)
694
+ for x in range(network + 1, broadcast):
695
+ yield self._address_class(x)
696
+
697
+ def __iter__(self):
698
+ network = int(self.network_address)
699
+ broadcast = int(self.broadcast_address)
700
+ for x in range(network, broadcast + 1):
701
+ yield self._address_class(x)
702
+
703
+ def __getitem__(self, n):
704
+ network = int(self.network_address)
705
+ broadcast = int(self.broadcast_address)
706
+ if n >= 0:
707
+ if network + n > broadcast:
708
+ raise IndexError('address out of range')
709
+ return self._address_class(network + n)
710
+ else:
711
+ n += 1
712
+ if broadcast + n < network:
713
+ raise IndexError('address out of range')
714
+ return self._address_class(broadcast + n)
715
+
716
+ def __lt__(self, other):
717
+ if not isinstance(other, _BaseNetwork):
718
+ return NotImplemented
719
+ if self._version != other._version:
720
+ raise TypeError('%s and %s are not of the same version' % (
721
+ self, other))
722
+ if self.network_address != other.network_address:
723
+ return self.network_address < other.network_address
724
+ if self.netmask != other.netmask:
725
+ return self.netmask < other.netmask
726
+ return False
727
+
728
+ def __eq__(self, other):
729
+ try:
730
+ return (self._version == other._version and
731
+ self.network_address == other.network_address and
732
+ int(self.netmask) == int(other.netmask))
733
+ except AttributeError:
734
+ return NotImplemented
735
+
736
+ def __hash__(self):
737
+ return hash(int(self.network_address) ^ int(self.netmask))
738
+
739
+ def __contains__(self, other):
740
+ # always false if one is v4 and the other is v6.
741
+ if self._version != other._version:
742
+ return False
743
+ # dealing with another network.
744
+ if isinstance(other, _BaseNetwork):
745
+ return False
746
+ # dealing with another address
747
+ else:
748
+ # address
749
+ return other._ip & self.netmask._ip == self.network_address._ip
750
+
751
+ def overlaps(self, other):
752
+ """Tell if self is partly contained in other."""
753
+ return self.network_address in other or (
754
+ self.broadcast_address in other or (
755
+ other.network_address in self or (
756
+ other.broadcast_address in self)))
757
+
758
+ @functools.cached_property
759
+ def broadcast_address(self):
760
+ return self._address_class(int(self.network_address) |
761
+ int(self.hostmask))
762
+
763
+ @functools.cached_property
764
+ def hostmask(self):
765
+ return self._address_class(int(self.netmask) ^ self._ALL_ONES)
766
+
767
+ @property
768
+ def with_prefixlen(self):
769
+ return '%s/%d' % (self.network_address, self._prefixlen)
770
+
771
+ @property
772
+ def with_netmask(self):
773
+ return '%s/%s' % (self.network_address, self.netmask)
774
+
775
+ @property
776
+ def with_hostmask(self):
777
+ return '%s/%s' % (self.network_address, self.hostmask)
778
+
779
+ @property
780
+ def num_addresses(self):
781
+ """Number of hosts in the current subnet."""
782
+ return int(self.broadcast_address) - int(self.network_address) + 1
783
+
784
+ @property
785
+ def _address_class(self):
786
+ # Returning bare address objects (rather than interfaces) allows for
787
+ # more consistent behaviour across the network address, broadcast
788
+ # address and individual host addresses.
789
+ msg = '%200s has no associated address class' % (type(self),)
790
+ raise NotImplementedError(msg)
791
+
792
+ @property
793
+ def prefixlen(self):
794
+ return self._prefixlen
795
+
796
+ def address_exclude(self, other):
797
+ """Remove an address from a larger block.
798
+
799
+ For example:
800
+
801
+ addr1 = ip_network('192.0.2.0/28')
802
+ addr2 = ip_network('192.0.2.1/32')
803
+ list(addr1.address_exclude(addr2)) =
804
+ [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
805
+ IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
806
+
807
+ or IPv6:
808
+
809
+ addr1 = ip_network('2001:db8::1/32')
810
+ addr2 = ip_network('2001:db8::1/128')
811
+ list(addr1.address_exclude(addr2)) =
812
+ [ip_network('2001:db8::1/128'),
813
+ ip_network('2001:db8::2/127'),
814
+ ip_network('2001:db8::4/126'),
815
+ ip_network('2001:db8::8/125'),
816
+ ...
817
+ ip_network('2001:db8:8000::/33')]
818
+
819
+ Args:
820
+ other: An IPv4Network or IPv6Network object of the same type.
821
+
822
+ Returns:
823
+ An iterator of the IPv(4|6)Network objects which is self
824
+ minus other.
825
+
826
+ Raises:
827
+ TypeError: If self and other are of differing address
828
+ versions, or if other is not a network object.
829
+ ValueError: If other is not completely contained by self.
830
+
831
+ """
832
+ if not self._version == other._version:
833
+ raise TypeError("%s and %s are not of the same version" % (
834
+ self, other))
835
+
836
+ if not isinstance(other, _BaseNetwork):
837
+ raise TypeError("%s is not a network object" % other)
838
+
839
+ if not other.subnet_of(self):
840
+ raise ValueError('%s not contained in %s' % (other, self))
841
+ if other == self:
842
+ return
843
+
844
+ # Make sure we're comparing the network of other.
845
+ other = other.__class__('%s/%s' % (other.network_address,
846
+ other.prefixlen))
847
+
848
+ s1, s2 = self.subnets()
849
+ while s1 != other and s2 != other:
850
+ if other.subnet_of(s1):
851
+ yield s2
852
+ s1, s2 = s1.subnets()
853
+ elif other.subnet_of(s2):
854
+ yield s1
855
+ s1, s2 = s2.subnets()
856
+ else:
857
+ # If we got here, there's a bug somewhere.
858
+ raise AssertionError('Error performing exclusion: '
859
+ 's1: %s s2: %s other: %s' %
860
+ (s1, s2, other))
861
+ if s1 == other:
862
+ yield s2
863
+ elif s2 == other:
864
+ yield s1
865
+ else:
866
+ # If we got here, there's a bug somewhere.
867
+ raise AssertionError('Error performing exclusion: '
868
+ 's1: %s s2: %s other: %s' %
869
+ (s1, s2, other))
870
+
871
+ def compare_networks(self, other):
872
+ """Compare two IP objects.
873
+
874
+ This is only concerned about the comparison of the integer
875
+ representation of the network addresses. This means that the
876
+ host bits aren't considered at all in this method. If you want
877
+ to compare host bits, you can easily enough do a
878
+ 'HostA._ip < HostB._ip'
879
+
880
+ Args:
881
+ other: An IP object.
882
+
883
+ Returns:
884
+ If the IP versions of self and other are the same, returns:
885
+
886
+ -1 if self < other:
887
+ eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
888
+ IPv6Network('2001:db8::1000/124') <
889
+ IPv6Network('2001:db8::2000/124')
890
+ 0 if self == other
891
+ eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
892
+ IPv6Network('2001:db8::1000/124') ==
893
+ IPv6Network('2001:db8::1000/124')
894
+ 1 if self > other
895
+ eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
896
+ IPv6Network('2001:db8::2000/124') >
897
+ IPv6Network('2001:db8::1000/124')
898
+
899
+ Raises:
900
+ TypeError if the IP versions are different.
901
+
902
+ """
903
+ # does this need to raise a ValueError?
904
+ if self._version != other._version:
905
+ raise TypeError('%s and %s are not of the same type' % (
906
+ self, other))
907
+ # self._version == other._version below here:
908
+ if self.network_address < other.network_address:
909
+ return -1
910
+ if self.network_address > other.network_address:
911
+ return 1
912
+ # self.network_address == other.network_address below here:
913
+ if self.netmask < other.netmask:
914
+ return -1
915
+ if self.netmask > other.netmask:
916
+ return 1
917
+ return 0
918
+
919
+ def _get_networks_key(self):
920
+ """Network-only key function.
921
+
922
+ Returns an object that identifies this address' network and
923
+ netmask. This function is a suitable "key" argument for sorted()
924
+ and list.sort().
925
+
926
+ """
927
+ return (self._version, self.network_address, self.netmask)
928
+
929
+ def subnets(self, prefixlen_diff=1, new_prefix=None):
930
+ """The subnets which join to make the current subnet.
931
+
932
+ In the case that self contains only one IP
933
+ (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
934
+ for IPv6), yield an iterator with just ourself.
935
+
936
+ Args:
937
+ prefixlen_diff: An integer, the amount the prefix length
938
+ should be increased by. This should not be set if
939
+ new_prefix is also set.
940
+ new_prefix: The desired new prefix length. This must be a
941
+ larger number (smaller prefix) than the existing prefix.
942
+ This should not be set if prefixlen_diff is also set.
943
+
944
+ Returns:
945
+ An iterator of IPv(4|6) objects.
946
+
947
+ Raises:
948
+ ValueError: The prefixlen_diff is too small or too large.
949
+ OR
950
+ prefixlen_diff and new_prefix are both set or new_prefix
951
+ is a smaller number than the current prefix (smaller
952
+ number means a larger network)
953
+
954
+ """
955
+ if self._prefixlen == self._max_prefixlen:
956
+ yield self
957
+ return
958
+
959
+ if new_prefix is not None:
960
+ if new_prefix < self._prefixlen:
961
+ raise ValueError('new prefix must be longer')
962
+ if prefixlen_diff != 1:
963
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
964
+ prefixlen_diff = new_prefix - self._prefixlen
965
+
966
+ if prefixlen_diff < 0:
967
+ raise ValueError('prefix length diff must be > 0')
968
+ new_prefixlen = self._prefixlen + prefixlen_diff
969
+
970
+ if new_prefixlen > self._max_prefixlen:
971
+ raise ValueError(
972
+ 'prefix length diff %d is invalid for netblock %s' % (
973
+ new_prefixlen, self))
974
+
975
+ start = int(self.network_address)
976
+ end = int(self.broadcast_address) + 1
977
+ step = (int(self.hostmask) + 1) >> prefixlen_diff
978
+ for new_addr in range(start, end, step):
979
+ current = self.__class__((new_addr, new_prefixlen))
980
+ yield current
981
+
982
+ def supernet(self, prefixlen_diff=1, new_prefix=None):
983
+ """The supernet containing the current network.
984
+
985
+ Args:
986
+ prefixlen_diff: An integer, the amount the prefix length of
987
+ the network should be decreased by. For example, given a
988
+ /24 network and a prefixlen_diff of 3, a supernet with a
989
+ /21 netmask is returned.
990
+
991
+ Returns:
992
+ An IPv4 network object.
993
+
994
+ Raises:
995
+ ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
996
+ a negative prefix length.
997
+ OR
998
+ If prefixlen_diff and new_prefix are both set or new_prefix is a
999
+ larger number than the current prefix (larger number means a
1000
+ smaller network)
1001
+
1002
+ """
1003
+ if self._prefixlen == 0:
1004
+ return self
1005
+
1006
+ if new_prefix is not None:
1007
+ if new_prefix > self._prefixlen:
1008
+ raise ValueError('new prefix must be shorter')
1009
+ if prefixlen_diff != 1:
1010
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
1011
+ prefixlen_diff = self._prefixlen - new_prefix
1012
+
1013
+ new_prefixlen = self.prefixlen - prefixlen_diff
1014
+ if new_prefixlen < 0:
1015
+ raise ValueError(
1016
+ 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
1017
+ (self.prefixlen, prefixlen_diff))
1018
+ return self.__class__((
1019
+ int(self.network_address) & (int(self.netmask) << prefixlen_diff),
1020
+ new_prefixlen
1021
+ ))
1022
+
1023
+ @property
1024
+ def is_multicast(self):
1025
+ """Test if the address is reserved for multicast use.
1026
+
1027
+ Returns:
1028
+ A boolean, True if the address is a multicast address.
1029
+ See RFC 2373 2.7 for details.
1030
+
1031
+ """
1032
+ return (self.network_address.is_multicast and
1033
+ self.broadcast_address.is_multicast)
1034
+
1035
+ @staticmethod
1036
+ def _is_subnet_of(a, b):
1037
+ try:
1038
+ # Always false if one is v4 and the other is v6.
1039
+ if a._version != b._version:
1040
+ raise TypeError(f"{a} and {b} are not of the same version")
1041
+ return (b.network_address <= a.network_address and
1042
+ b.broadcast_address >= a.broadcast_address)
1043
+ except AttributeError:
1044
+ raise TypeError(f"Unable to test subnet containment "
1045
+ f"between {a} and {b}")
1046
+
1047
+ def subnet_of(self, other):
1048
+ """Return True if this network is a subnet of other."""
1049
+ return self._is_subnet_of(self, other)
1050
+
1051
+ def supernet_of(self, other):
1052
+ """Return True if this network is a supernet of other."""
1053
+ return self._is_subnet_of(other, self)
1054
+
1055
+ @property
1056
+ def is_reserved(self):
1057
+ """Test if the address is otherwise IETF reserved.
1058
+
1059
+ Returns:
1060
+ A boolean, True if the address is within one of the
1061
+ reserved IPv6 Network ranges.
1062
+
1063
+ """
1064
+ return (self.network_address.is_reserved and
1065
+ self.broadcast_address.is_reserved)
1066
+
1067
+ @property
1068
+ def is_link_local(self):
1069
+ """Test if the address is reserved for link-local.
1070
+
1071
+ Returns:
1072
+ A boolean, True if the address is reserved per RFC 4291.
1073
+
1074
+ """
1075
+ return (self.network_address.is_link_local and
1076
+ self.broadcast_address.is_link_local)
1077
+
1078
+ @property
1079
+ def is_private(self):
1080
+ """Test if this address is allocated for private networks.
1081
+
1082
+ Returns:
1083
+ A boolean, True if the address is reserved per
1084
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
1085
+
1086
+ """
1087
+ return (self.network_address.is_private and
1088
+ self.broadcast_address.is_private)
1089
+
1090
+ @property
1091
+ def is_global(self):
1092
+ """Test if this address is allocated for public networks.
1093
+
1094
+ Returns:
1095
+ A boolean, True if the address is not reserved per
1096
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
1097
+
1098
+ """
1099
+ return not self.is_private
1100
+
1101
+ @property
1102
+ def is_unspecified(self):
1103
+ """Test if the address is unspecified.
1104
+
1105
+ Returns:
1106
+ A boolean, True if this is the unspecified address as defined in
1107
+ RFC 2373 2.5.2.
1108
+
1109
+ """
1110
+ return (self.network_address.is_unspecified and
1111
+ self.broadcast_address.is_unspecified)
1112
+
1113
+ @property
1114
+ def is_loopback(self):
1115
+ """Test if the address is a loopback address.
1116
+
1117
+ Returns:
1118
+ A boolean, True if the address is a loopback address as defined in
1119
+ RFC 2373 2.5.3.
1120
+
1121
+ """
1122
+ return (self.network_address.is_loopback and
1123
+ self.broadcast_address.is_loopback)
1124
+
1125
+ class _BaseV4:
1126
+
1127
+ """Base IPv4 object.
1128
+
1129
+ The following methods are used by IPv4 objects in both single IP
1130
+ addresses and networks.
1131
+
1132
+ """
1133
+
1134
+ __slots__ = ()
1135
+ _version = 4
1136
+ # Equivalent to 255.255.255.255 or 32 bits of 1's.
1137
+ _ALL_ONES = (2**IPV4LENGTH) - 1
1138
+
1139
+ _max_prefixlen = IPV4LENGTH
1140
+ # There are only a handful of valid v4 netmasks, so we cache them all
1141
+ # when constructed (see _make_netmask()).
1142
+ _netmask_cache = {}
1143
+
1144
+ def _explode_shorthand_ip_string(self):
1145
+ return str(self)
1146
+
1147
+ @classmethod
1148
+ def _make_netmask(cls, arg):
1149
+ """Make a (netmask, prefix_len) tuple from the given argument.
1150
+
1151
+ Argument can be:
1152
+ - an integer (the prefix length)
1153
+ - a string representing the prefix length (e.g. "24")
1154
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
1155
+ """
1156
+ if arg not in cls._netmask_cache:
1157
+ if isinstance(arg, int):
1158
+ prefixlen = arg
1159
+ if not (0 <= prefixlen <= cls._max_prefixlen):
1160
+ cls._report_invalid_netmask(prefixlen)
1161
+ else:
1162
+ try:
1163
+ # Check for a netmask in prefix length form
1164
+ prefixlen = cls._prefix_from_prefix_string(arg)
1165
+ except NetmaskValueError:
1166
+ # Check for a netmask or hostmask in dotted-quad form.
1167
+ # This may raise NetmaskValueError.
1168
+ prefixlen = cls._prefix_from_ip_string(arg)
1169
+ netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
1170
+ cls._netmask_cache[arg] = netmask, prefixlen
1171
+ return cls._netmask_cache[arg]
1172
+
1173
+ @classmethod
1174
+ def _ip_int_from_string(cls, ip_str):
1175
+ """Turn the given IP string into an integer for comparison.
1176
+
1177
+ Args:
1178
+ ip_str: A string, the IP ip_str.
1179
+
1180
+ Returns:
1181
+ The IP ip_str as an integer.
1182
+
1183
+ Raises:
1184
+ AddressValueError: if ip_str isn't a valid IPv4 Address.
1185
+
1186
+ """
1187
+ if not ip_str:
1188
+ raise AddressValueError('Address cannot be empty')
1189
+
1190
+ octets = ip_str.split('.')
1191
+ if len(octets) != 4:
1192
+ raise AddressValueError("Expected 4 octets in %r" % ip_str)
1193
+
1194
+ try:
1195
+ return int.from_bytes(map(cls._parse_octet, octets), 'big')
1196
+ except ValueError as exc:
1197
+ raise AddressValueError("%s in %r" % (exc, ip_str)) from None
1198
+
1199
+ @classmethod
1200
+ def _parse_octet(cls, octet_str):
1201
+ """Convert a decimal octet into an integer.
1202
+
1203
+ Args:
1204
+ octet_str: A string, the number to parse.
1205
+
1206
+ Returns:
1207
+ The octet as an integer.
1208
+
1209
+ Raises:
1210
+ ValueError: if the octet isn't strictly a decimal from [0..255].
1211
+
1212
+ """
1213
+ if not octet_str:
1214
+ raise ValueError("Empty octet not permitted")
1215
+ # Reject non-ASCII digits.
1216
+ if not (octet_str.isascii() and octet_str.isdigit()):
1217
+ msg = "Only decimal digits permitted in %r"
1218
+ raise ValueError(msg % octet_str)
1219
+ # We do the length check second, since the invalid character error
1220
+ # is likely to be more informative for the user
1221
+ if len(octet_str) > 3:
1222
+ msg = "At most 3 characters permitted in %r"
1223
+ raise ValueError(msg % octet_str)
1224
+ # Handle leading zeros as strict as glibc's inet_pton()
1225
+ # See security bug bpo-36384
1226
+ if octet_str != '0' and octet_str[0] == '0':
1227
+ msg = "Leading zeros are not permitted in %r"
1228
+ raise ValueError(msg % octet_str)
1229
+ # Convert to integer (we know digits are legal)
1230
+ octet_int = int(octet_str, 10)
1231
+ if octet_int > 255:
1232
+ raise ValueError("Octet %d (> 255) not permitted" % octet_int)
1233
+ return octet_int
1234
+
1235
+ @classmethod
1236
+ def _string_from_ip_int(cls, ip_int):
1237
+ """Turns a 32-bit integer into dotted decimal notation.
1238
+
1239
+ Args:
1240
+ ip_int: An integer, the IP address.
1241
+
1242
+ Returns:
1243
+ The IP address as a string in dotted decimal notation.
1244
+
1245
+ """
1246
+ return '.'.join(map(str, ip_int.to_bytes(4, 'big')))
1247
+
1248
+ def _reverse_pointer(self):
1249
+ """Return the reverse DNS pointer name for the IPv4 address.
1250
+
1251
+ This implements the method described in RFC1035 3.5.
1252
+
1253
+ """
1254
+ reverse_octets = str(self).split('.')[::-1]
1255
+ return '.'.join(reverse_octets) + '.in-addr.arpa'
1256
+
1257
+ @property
1258
+ def max_prefixlen(self):
1259
+ return self._max_prefixlen
1260
+
1261
+ @property
1262
+ def version(self):
1263
+ return self._version
1264
+
1265
+
1266
+ class IPv4Address(_BaseV4, _BaseAddress):
1267
+
1268
+ """Represent and manipulate single IPv4 Addresses."""
1269
+
1270
+ __slots__ = ('_ip', '__weakref__')
1271
+
1272
+ def __init__(self, address):
1273
+
1274
+ """
1275
+ Args:
1276
+ address: A string or integer representing the IP
1277
+
1278
+ Additionally, an integer can be passed, so
1279
+ IPv4Address('192.0.2.1') == IPv4Address(3221225985).
1280
+ or, more generally
1281
+ IPv4Address(int(IPv4Address('192.0.2.1'))) ==
1282
+ IPv4Address('192.0.2.1')
1283
+
1284
+ Raises:
1285
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
1286
+
1287
+ """
1288
+ # Efficient constructor from integer.
1289
+ if isinstance(address, int):
1290
+ self._check_int_address(address)
1291
+ self._ip = address
1292
+ return
1293
+
1294
+ # Constructing from a packed address
1295
+ if isinstance(address, bytes):
1296
+ self._check_packed_address(address, 4)
1297
+ self._ip = int.from_bytes(address, 'big')
1298
+ return
1299
+
1300
+ # Assume input argument to be string or any object representation
1301
+ # which converts into a formatted IP string.
1302
+ addr_str = str(address)
1303
+ if '/' in addr_str:
1304
+ raise AddressValueError(f"Unexpected '/' in {address!r}")
1305
+ self._ip = self._ip_int_from_string(addr_str)
1306
+
1307
+ @property
1308
+ def packed(self):
1309
+ """The binary representation of this address."""
1310
+ return v4_int_to_packed(self._ip)
1311
+
1312
+ @property
1313
+ def is_reserved(self):
1314
+ """Test if the address is otherwise IETF reserved.
1315
+
1316
+ Returns:
1317
+ A boolean, True if the address is within the
1318
+ reserved IPv4 Network range.
1319
+
1320
+ """
1321
+ return self in self._constants._reserved_network
1322
+
1323
+ @property
1324
+ @functools.lru_cache()
1325
+ def is_private(self):
1326
+ """``True`` if the address is defined as not globally reachable by
1327
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
1328
+ (for IPv6) with the following exceptions:
1329
+
1330
+ * ``is_private`` is ``False`` for ``100.64.0.0/10``
1331
+ * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
1332
+ semantics of the underlying IPv4 addresses and the following condition holds
1333
+ (see :attr:`IPv6Address.ipv4_mapped`)::
1334
+
1335
+ address.is_private == address.ipv4_mapped.is_private
1336
+
1337
+ ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10``
1338
+ IPv4 range where they are both ``False``.
1339
+ """
1340
+ return (
1341
+ any(self in net for net in self._constants._private_networks)
1342
+ and all(self not in net for net in self._constants._private_networks_exceptions)
1343
+ )
1344
+
1345
+ @property
1346
+ @functools.lru_cache()
1347
+ def is_global(self):
1348
+ """``True`` if the address is defined as globally reachable by
1349
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
1350
+ (for IPv6) with the following exception:
1351
+
1352
+ For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
1353
+ semantics of the underlying IPv4 addresses and the following condition holds
1354
+ (see :attr:`IPv6Address.ipv4_mapped`)::
1355
+
1356
+ address.is_global == address.ipv4_mapped.is_global
1357
+
1358
+ ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10``
1359
+ IPv4 range where they are both ``False``.
1360
+ """
1361
+ return self not in self._constants._public_network and not self.is_private
1362
+
1363
+ @property
1364
+ def is_multicast(self):
1365
+ """Test if the address is reserved for multicast use.
1366
+
1367
+ Returns:
1368
+ A boolean, True if the address is multicast.
1369
+ See RFC 3171 for details.
1370
+
1371
+ """
1372
+ return self in self._constants._multicast_network
1373
+
1374
+ @property
1375
+ def is_unspecified(self):
1376
+ """Test if the address is unspecified.
1377
+
1378
+ Returns:
1379
+ A boolean, True if this is the unspecified address as defined in
1380
+ RFC 5735 3.
1381
+
1382
+ """
1383
+ return self == self._constants._unspecified_address
1384
+
1385
+ @property
1386
+ def is_loopback(self):
1387
+ """Test if the address is a loopback address.
1388
+
1389
+ Returns:
1390
+ A boolean, True if the address is a loopback per RFC 3330.
1391
+
1392
+ """
1393
+ return self in self._constants._loopback_network
1394
+
1395
+ @property
1396
+ def is_link_local(self):
1397
+ """Test if the address is reserved for link-local.
1398
+
1399
+ Returns:
1400
+ A boolean, True if the address is link-local per RFC 3927.
1401
+
1402
+ """
1403
+ return self in self._constants._linklocal_network
1404
+
1405
+
1406
+ class IPv4Interface(IPv4Address):
1407
+
1408
+ def __init__(self, address):
1409
+ addr, mask = self._split_addr_prefix(address)
1410
+
1411
+ IPv4Address.__init__(self, addr)
1412
+ self.network = IPv4Network((addr, mask), strict=False)
1413
+ self.netmask = self.network.netmask
1414
+ self._prefixlen = self.network._prefixlen
1415
+
1416
+ @functools.cached_property
1417
+ def hostmask(self):
1418
+ return self.network.hostmask
1419
+
1420
+ def __str__(self):
1421
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
1422
+ self._prefixlen)
1423
+
1424
+ def __eq__(self, other):
1425
+ address_equal = IPv4Address.__eq__(self, other)
1426
+ if address_equal is NotImplemented or not address_equal:
1427
+ return address_equal
1428
+ try:
1429
+ return self.network == other.network
1430
+ except AttributeError:
1431
+ # An interface with an associated network is NOT the
1432
+ # same as an unassociated address. That's why the hash
1433
+ # takes the extra info into account.
1434
+ return False
1435
+
1436
+ def __lt__(self, other):
1437
+ address_less = IPv4Address.__lt__(self, other)
1438
+ if address_less is NotImplemented:
1439
+ return NotImplemented
1440
+ try:
1441
+ return (self.network < other.network or
1442
+ self.network == other.network and address_less)
1443
+ except AttributeError:
1444
+ # We *do* allow addresses and interfaces to be sorted. The
1445
+ # unassociated address is considered less than all interfaces.
1446
+ return False
1447
+
1448
+ def __hash__(self):
1449
+ return hash((self._ip, self._prefixlen, int(self.network.network_address)))
1450
+
1451
+ __reduce__ = _IPAddressBase.__reduce__
1452
+
1453
+ @property
1454
+ def ip(self):
1455
+ return IPv4Address(self._ip)
1456
+
1457
+ @property
1458
+ def with_prefixlen(self):
1459
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
1460
+ self._prefixlen)
1461
+
1462
+ @property
1463
+ def with_netmask(self):
1464
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
1465
+ self.netmask)
1466
+
1467
+ @property
1468
+ def with_hostmask(self):
1469
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
1470
+ self.hostmask)
1471
+
1472
+
1473
+ class IPv4Network(_BaseV4, _BaseNetwork):
1474
+
1475
+ """This class represents and manipulates 32-bit IPv4 network + addresses..
1476
+
1477
+ Attributes: [examples for IPv4Network('192.0.2.0/27')]
1478
+ .network_address: IPv4Address('192.0.2.0')
1479
+ .hostmask: IPv4Address('0.0.0.31')
1480
+ .broadcast_address: IPv4Address('192.0.2.32')
1481
+ .netmask: IPv4Address('255.255.255.224')
1482
+ .prefixlen: 27
1483
+
1484
+ """
1485
+ # Class to use when creating address objects
1486
+ _address_class = IPv4Address
1487
+
1488
+ def __init__(self, address, strict=True):
1489
+ """Instantiate a new IPv4 network object.
1490
+
1491
+ Args:
1492
+ address: A string or integer representing the IP [& network].
1493
+ '192.0.2.0/24'
1494
+ '192.0.2.0/255.255.255.0'
1495
+ '192.0.2.0/0.0.0.255'
1496
+ are all functionally the same in IPv4. Similarly,
1497
+ '192.0.2.1'
1498
+ '192.0.2.1/255.255.255.255'
1499
+ '192.0.2.1/32'
1500
+ are also functionally equivalent. That is to say, failing to
1501
+ provide a subnetmask will create an object with a mask of /32.
1502
+
1503
+ If the mask (portion after the / in the argument) is given in
1504
+ dotted quad form, it is treated as a netmask if it starts with a
1505
+ non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
1506
+ starts with a zero field (e.g. 0.255.255.255 == /8), with the
1507
+ single exception of an all-zero mask which is treated as a
1508
+ netmask == /0. If no mask is given, a default of /32 is used.
1509
+
1510
+ Additionally, an integer can be passed, so
1511
+ IPv4Network('192.0.2.1') == IPv4Network(3221225985)
1512
+ or, more generally
1513
+ IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
1514
+ IPv4Interface('192.0.2.1')
1515
+
1516
+ Raises:
1517
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
1518
+ NetmaskValueError: If the netmask isn't valid for
1519
+ an IPv4 address.
1520
+ ValueError: If strict is True and a network address is not
1521
+ supplied.
1522
+ """
1523
+ addr, mask = self._split_addr_prefix(address)
1524
+
1525
+ self.network_address = IPv4Address(addr)
1526
+ self.netmask, self._prefixlen = self._make_netmask(mask)
1527
+ packed = int(self.network_address)
1528
+ if packed & int(self.netmask) != packed:
1529
+ if strict:
1530
+ raise ValueError('%s has host bits set' % self)
1531
+ else:
1532
+ self.network_address = IPv4Address(packed &
1533
+ int(self.netmask))
1534
+
1535
+ if self._prefixlen == (self._max_prefixlen - 1):
1536
+ self.hosts = self.__iter__
1537
+ elif self._prefixlen == (self._max_prefixlen):
1538
+ self.hosts = lambda: [IPv4Address(addr)]
1539
+
1540
+ @property
1541
+ @functools.lru_cache()
1542
+ def is_global(self):
1543
+ """Test if this address is allocated for public networks.
1544
+
1545
+ Returns:
1546
+ A boolean, True if the address is not reserved per
1547
+ iana-ipv4-special-registry.
1548
+
1549
+ """
1550
+ return (not (self.network_address in IPv4Network('100.64.0.0/10') and
1551
+ self.broadcast_address in IPv4Network('100.64.0.0/10')) and
1552
+ not self.is_private)
1553
+
1554
+
1555
+ class _IPv4Constants:
1556
+ _linklocal_network = IPv4Network('169.254.0.0/16')
1557
+
1558
+ _loopback_network = IPv4Network('127.0.0.0/8')
1559
+
1560
+ _multicast_network = IPv4Network('224.0.0.0/4')
1561
+
1562
+ _public_network = IPv4Network('100.64.0.0/10')
1563
+
1564
+ # Not globally reachable address blocks listed on
1565
+ # https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml
1566
+ _private_networks = [
1567
+ IPv4Network('0.0.0.0/8'),
1568
+ IPv4Network('10.0.0.0/8'),
1569
+ IPv4Network('127.0.0.0/8'),
1570
+ IPv4Network('169.254.0.0/16'),
1571
+ IPv4Network('172.16.0.0/12'),
1572
+ IPv4Network('192.0.0.0/24'),
1573
+ IPv4Network('192.0.0.170/31'),
1574
+ IPv4Network('192.0.2.0/24'),
1575
+ IPv4Network('192.168.0.0/16'),
1576
+ IPv4Network('198.18.0.0/15'),
1577
+ IPv4Network('198.51.100.0/24'),
1578
+ IPv4Network('203.0.113.0/24'),
1579
+ IPv4Network('240.0.0.0/4'),
1580
+ IPv4Network('255.255.255.255/32'),
1581
+ ]
1582
+
1583
+ _private_networks_exceptions = [
1584
+ IPv4Network('192.0.0.9/32'),
1585
+ IPv4Network('192.0.0.10/32'),
1586
+ ]
1587
+
1588
+ _reserved_network = IPv4Network('240.0.0.0/4')
1589
+
1590
+ _unspecified_address = IPv4Address('0.0.0.0')
1591
+
1592
+
1593
+ IPv4Address._constants = _IPv4Constants
1594
+
1595
+
1596
+ class _BaseV6:
1597
+
1598
+ """Base IPv6 object.
1599
+
1600
+ The following methods are used by IPv6 objects in both single IP
1601
+ addresses and networks.
1602
+
1603
+ """
1604
+
1605
+ __slots__ = ()
1606
+ _version = 6
1607
+ _ALL_ONES = (2**IPV6LENGTH) - 1
1608
+ _HEXTET_COUNT = 8
1609
+ _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
1610
+ _max_prefixlen = IPV6LENGTH
1611
+
1612
+ # There are only a bunch of valid v6 netmasks, so we cache them all
1613
+ # when constructed (see _make_netmask()).
1614
+ _netmask_cache = {}
1615
+
1616
+ @classmethod
1617
+ def _make_netmask(cls, arg):
1618
+ """Make a (netmask, prefix_len) tuple from the given argument.
1619
+
1620
+ Argument can be:
1621
+ - an integer (the prefix length)
1622
+ - a string representing the prefix length (e.g. "24")
1623
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
1624
+ """
1625
+ if arg not in cls._netmask_cache:
1626
+ if isinstance(arg, int):
1627
+ prefixlen = arg
1628
+ if not (0 <= prefixlen <= cls._max_prefixlen):
1629
+ cls._report_invalid_netmask(prefixlen)
1630
+ else:
1631
+ prefixlen = cls._prefix_from_prefix_string(arg)
1632
+ netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
1633
+ cls._netmask_cache[arg] = netmask, prefixlen
1634
+ return cls._netmask_cache[arg]
1635
+
1636
+ @classmethod
1637
+ def _ip_int_from_string(cls, ip_str):
1638
+ """Turn an IPv6 ip_str into an integer.
1639
+
1640
+ Args:
1641
+ ip_str: A string, the IPv6 ip_str.
1642
+
1643
+ Returns:
1644
+ An int, the IPv6 address
1645
+
1646
+ Raises:
1647
+ AddressValueError: if ip_str isn't a valid IPv6 Address.
1648
+
1649
+ """
1650
+ if not ip_str:
1651
+ raise AddressValueError('Address cannot be empty')
1652
+
1653
+ parts = ip_str.split(':')
1654
+
1655
+ # An IPv6 address needs at least 2 colons (3 parts).
1656
+ _min_parts = 3
1657
+ if len(parts) < _min_parts:
1658
+ msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
1659
+ raise AddressValueError(msg)
1660
+
1661
+ # If the address has an IPv4-style suffix, convert it to hexadecimal.
1662
+ if '.' in parts[-1]:
1663
+ try:
1664
+ ipv4_int = IPv4Address(parts.pop())._ip
1665
+ except AddressValueError as exc:
1666
+ raise AddressValueError("%s in %r" % (exc, ip_str)) from None
1667
+ parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
1668
+ parts.append('%x' % (ipv4_int & 0xFFFF))
1669
+
1670
+ # An IPv6 address can't have more than 8 colons (9 parts).
1671
+ # The extra colon comes from using the "::" notation for a single
1672
+ # leading or trailing zero part.
1673
+ _max_parts = cls._HEXTET_COUNT + 1
1674
+ if len(parts) > _max_parts:
1675
+ msg = "At most %d colons permitted in %r" % (_max_parts-1, ip_str)
1676
+ raise AddressValueError(msg)
1677
+
1678
+ # Disregarding the endpoints, find '::' with nothing in between.
1679
+ # This indicates that a run of zeroes has been skipped.
1680
+ skip_index = None
1681
+ for i in range(1, len(parts) - 1):
1682
+ if not parts[i]:
1683
+ if skip_index is not None:
1684
+ # Can't have more than one '::'
1685
+ msg = "At most one '::' permitted in %r" % ip_str
1686
+ raise AddressValueError(msg)
1687
+ skip_index = i
1688
+
1689
+ # parts_hi is the number of parts to copy from above/before the '::'
1690
+ # parts_lo is the number of parts to copy from below/after the '::'
1691
+ if skip_index is not None:
1692
+ # If we found a '::', then check if it also covers the endpoints.
1693
+ parts_hi = skip_index
1694
+ parts_lo = len(parts) - skip_index - 1
1695
+ if not parts[0]:
1696
+ parts_hi -= 1
1697
+ if parts_hi:
1698
+ msg = "Leading ':' only permitted as part of '::' in %r"
1699
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
1700
+ if not parts[-1]:
1701
+ parts_lo -= 1
1702
+ if parts_lo:
1703
+ msg = "Trailing ':' only permitted as part of '::' in %r"
1704
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
1705
+ parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
1706
+ if parts_skipped < 1:
1707
+ msg = "Expected at most %d other parts with '::' in %r"
1708
+ raise AddressValueError(msg % (cls._HEXTET_COUNT-1, ip_str))
1709
+ else:
1710
+ # Otherwise, allocate the entire address to parts_hi. The
1711
+ # endpoints could still be empty, but _parse_hextet() will check
1712
+ # for that.
1713
+ if len(parts) != cls._HEXTET_COUNT:
1714
+ msg = "Exactly %d parts expected without '::' in %r"
1715
+ raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
1716
+ if not parts[0]:
1717
+ msg = "Leading ':' only permitted as part of '::' in %r"
1718
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
1719
+ if not parts[-1]:
1720
+ msg = "Trailing ':' only permitted as part of '::' in %r"
1721
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
1722
+ parts_hi = len(parts)
1723
+ parts_lo = 0
1724
+ parts_skipped = 0
1725
+
1726
+ try:
1727
+ # Now, parse the hextets into a 128-bit integer.
1728
+ ip_int = 0
1729
+ for i in range(parts_hi):
1730
+ ip_int <<= 16
1731
+ ip_int |= cls._parse_hextet(parts[i])
1732
+ ip_int <<= 16 * parts_skipped
1733
+ for i in range(-parts_lo, 0):
1734
+ ip_int <<= 16
1735
+ ip_int |= cls._parse_hextet(parts[i])
1736
+ return ip_int
1737
+ except ValueError as exc:
1738
+ raise AddressValueError("%s in %r" % (exc, ip_str)) from None
1739
+
1740
+ @classmethod
1741
+ def _parse_hextet(cls, hextet_str):
1742
+ """Convert an IPv6 hextet string into an integer.
1743
+
1744
+ Args:
1745
+ hextet_str: A string, the number to parse.
1746
+
1747
+ Returns:
1748
+ The hextet as an integer.
1749
+
1750
+ Raises:
1751
+ ValueError: if the input isn't strictly a hex number from
1752
+ [0..FFFF].
1753
+
1754
+ """
1755
+ # Reject non-ASCII digits.
1756
+ if not cls._HEX_DIGITS.issuperset(hextet_str):
1757
+ raise ValueError("Only hex digits permitted in %r" % hextet_str)
1758
+ # We do the length check second, since the invalid character error
1759
+ # is likely to be more informative for the user
1760
+ if len(hextet_str) > 4:
1761
+ msg = "At most 4 characters permitted in %r"
1762
+ raise ValueError(msg % hextet_str)
1763
+ # Length check means we can skip checking the integer value
1764
+ return int(hextet_str, 16)
1765
+
1766
+ @classmethod
1767
+ def _compress_hextets(cls, hextets):
1768
+ """Compresses a list of hextets.
1769
+
1770
+ Compresses a list of strings, replacing the longest continuous
1771
+ sequence of "0" in the list with "" and adding empty strings at
1772
+ the beginning or at the end of the string such that subsequently
1773
+ calling ":".join(hextets) will produce the compressed version of
1774
+ the IPv6 address.
1775
+
1776
+ Args:
1777
+ hextets: A list of strings, the hextets to compress.
1778
+
1779
+ Returns:
1780
+ A list of strings.
1781
+
1782
+ """
1783
+ best_doublecolon_start = -1
1784
+ best_doublecolon_len = 0
1785
+ doublecolon_start = -1
1786
+ doublecolon_len = 0
1787
+ for index, hextet in enumerate(hextets):
1788
+ if hextet == '0':
1789
+ doublecolon_len += 1
1790
+ if doublecolon_start == -1:
1791
+ # Start of a sequence of zeros.
1792
+ doublecolon_start = index
1793
+ if doublecolon_len > best_doublecolon_len:
1794
+ # This is the longest sequence of zeros so far.
1795
+ best_doublecolon_len = doublecolon_len
1796
+ best_doublecolon_start = doublecolon_start
1797
+ else:
1798
+ doublecolon_len = 0
1799
+ doublecolon_start = -1
1800
+
1801
+ if best_doublecolon_len > 1:
1802
+ best_doublecolon_end = (best_doublecolon_start +
1803
+ best_doublecolon_len)
1804
+ # For zeros at the end of the address.
1805
+ if best_doublecolon_end == len(hextets):
1806
+ hextets += ['']
1807
+ hextets[best_doublecolon_start:best_doublecolon_end] = ['']
1808
+ # For zeros at the beginning of the address.
1809
+ if best_doublecolon_start == 0:
1810
+ hextets = [''] + hextets
1811
+
1812
+ return hextets
1813
+
1814
+ @classmethod
1815
+ def _string_from_ip_int(cls, ip_int=None):
1816
+ """Turns a 128-bit integer into hexadecimal notation.
1817
+
1818
+ Args:
1819
+ ip_int: An integer, the IP address.
1820
+
1821
+ Returns:
1822
+ A string, the hexadecimal representation of the address.
1823
+
1824
+ Raises:
1825
+ ValueError: The address is bigger than 128 bits of all ones.
1826
+
1827
+ """
1828
+ if ip_int is None:
1829
+ ip_int = int(cls._ip)
1830
+
1831
+ if ip_int > cls._ALL_ONES:
1832
+ raise ValueError('IPv6 address is too large')
1833
+
1834
+ hex_str = '%032x' % ip_int
1835
+ hextets = ['%x' % int(hex_str[x:x+4], 16) for x in range(0, 32, 4)]
1836
+
1837
+ hextets = cls._compress_hextets(hextets)
1838
+ return ':'.join(hextets)
1839
+
1840
+ def _explode_shorthand_ip_string(self):
1841
+ """Expand a shortened IPv6 address.
1842
+
1843
+ Returns:
1844
+ A string, the expanded IPv6 address.
1845
+
1846
+ """
1847
+ if isinstance(self, IPv6Network):
1848
+ ip_str = str(self.network_address)
1849
+ elif isinstance(self, IPv6Interface):
1850
+ ip_str = str(self.ip)
1851
+ else:
1852
+ ip_str = str(self)
1853
+
1854
+ ip_int = self._ip_int_from_string(ip_str)
1855
+ hex_str = '%032x' % ip_int
1856
+ parts = [hex_str[x:x+4] for x in range(0, 32, 4)]
1857
+ if isinstance(self, (_BaseNetwork, IPv6Interface)):
1858
+ return '%s/%d' % (':'.join(parts), self._prefixlen)
1859
+ return ':'.join(parts)
1860
+
1861
+ def _reverse_pointer(self):
1862
+ """Return the reverse DNS pointer name for the IPv6 address.
1863
+
1864
+ This implements the method described in RFC3596 2.5.
1865
+
1866
+ """
1867
+ reverse_chars = self.exploded[::-1].replace(':', '')
1868
+ return '.'.join(reverse_chars) + '.ip6.arpa'
1869
+
1870
+ @staticmethod
1871
+ def _split_scope_id(ip_str):
1872
+ """Helper function to parse IPv6 string address with scope id.
1873
+
1874
+ See RFC 4007 for details.
1875
+
1876
+ Args:
1877
+ ip_str: A string, the IPv6 address.
1878
+
1879
+ Returns:
1880
+ (addr, scope_id) tuple.
1881
+
1882
+ """
1883
+ addr, sep, scope_id = ip_str.partition('%')
1884
+ if not sep:
1885
+ scope_id = None
1886
+ elif not scope_id or '%' in scope_id:
1887
+ raise AddressValueError('Invalid IPv6 address: "%r"' % ip_str)
1888
+ return addr, scope_id
1889
+
1890
+ @property
1891
+ def max_prefixlen(self):
1892
+ return self._max_prefixlen
1893
+
1894
+ @property
1895
+ def version(self):
1896
+ return self._version
1897
+
1898
+
1899
+ class IPv6Address(_BaseV6, _BaseAddress):
1900
+
1901
+ """Represent and manipulate single IPv6 Addresses."""
1902
+
1903
+ __slots__ = ('_ip', '_scope_id', '__weakref__')
1904
+
1905
+ def __init__(self, address):
1906
+ """Instantiate a new IPv6 address object.
1907
+
1908
+ Args:
1909
+ address: A string or integer representing the IP
1910
+
1911
+ Additionally, an integer can be passed, so
1912
+ IPv6Address('2001:db8::') ==
1913
+ IPv6Address(42540766411282592856903984951653826560)
1914
+ or, more generally
1915
+ IPv6Address(int(IPv6Address('2001:db8::'))) ==
1916
+ IPv6Address('2001:db8::')
1917
+
1918
+ Raises:
1919
+ AddressValueError: If address isn't a valid IPv6 address.
1920
+
1921
+ """
1922
+ # Efficient constructor from integer.
1923
+ if isinstance(address, int):
1924
+ self._check_int_address(address)
1925
+ self._ip = address
1926
+ self._scope_id = None
1927
+ return
1928
+
1929
+ # Constructing from a packed address
1930
+ if isinstance(address, bytes):
1931
+ self._check_packed_address(address, 16)
1932
+ self._ip = int.from_bytes(address, 'big')
1933
+ self._scope_id = None
1934
+ return
1935
+
1936
+ # Assume input argument to be string or any object representation
1937
+ # which converts into a formatted IP string.
1938
+ addr_str = str(address)
1939
+ if '/' in addr_str:
1940
+ raise AddressValueError(f"Unexpected '/' in {address!r}")
1941
+ addr_str, self._scope_id = self._split_scope_id(addr_str)
1942
+
1943
+ self._ip = self._ip_int_from_string(addr_str)
1944
+
1945
+ def __str__(self):
1946
+ ip_str = super().__str__()
1947
+ return ip_str + '%' + self._scope_id if self._scope_id else ip_str
1948
+
1949
+ def __hash__(self):
1950
+ return hash((self._ip, self._scope_id))
1951
+
1952
+ def __eq__(self, other):
1953
+ address_equal = super().__eq__(other)
1954
+ if address_equal is NotImplemented:
1955
+ return NotImplemented
1956
+ if not address_equal:
1957
+ return False
1958
+ return self._scope_id == getattr(other, '_scope_id', None)
1959
+
1960
+ @property
1961
+ def scope_id(self):
1962
+ """Identifier of a particular zone of the address's scope.
1963
+
1964
+ See RFC 4007 for details.
1965
+
1966
+ Returns:
1967
+ A string identifying the zone of the address if specified, else None.
1968
+
1969
+ """
1970
+ return self._scope_id
1971
+
1972
+ @property
1973
+ def packed(self):
1974
+ """The binary representation of this address."""
1975
+ return v6_int_to_packed(self._ip)
1976
+
1977
+ @property
1978
+ def is_multicast(self):
1979
+ """Test if the address is reserved for multicast use.
1980
+
1981
+ Returns:
1982
+ A boolean, True if the address is a multicast address.
1983
+ See RFC 2373 2.7 for details.
1984
+
1985
+ """
1986
+ ipv4_mapped = self.ipv4_mapped
1987
+ if ipv4_mapped is not None:
1988
+ return ipv4_mapped.is_multicast
1989
+ return self in self._constants._multicast_network
1990
+
1991
+ @property
1992
+ def is_reserved(self):
1993
+ """Test if the address is otherwise IETF reserved.
1994
+
1995
+ Returns:
1996
+ A boolean, True if the address is within one of the
1997
+ reserved IPv6 Network ranges.
1998
+
1999
+ """
2000
+ ipv4_mapped = self.ipv4_mapped
2001
+ if ipv4_mapped is not None:
2002
+ return ipv4_mapped.is_reserved
2003
+ return any(self in x for x in self._constants._reserved_networks)
2004
+
2005
+ @property
2006
+ def is_link_local(self):
2007
+ """Test if the address is reserved for link-local.
2008
+
2009
+ Returns:
2010
+ A boolean, True if the address is reserved per RFC 4291.
2011
+
2012
+ """
2013
+ ipv4_mapped = self.ipv4_mapped
2014
+ if ipv4_mapped is not None:
2015
+ return ipv4_mapped.is_link_local
2016
+ return self in self._constants._linklocal_network
2017
+
2018
+ @property
2019
+ def is_site_local(self):
2020
+ """Test if the address is reserved for site-local.
2021
+
2022
+ Note that the site-local address space has been deprecated by RFC 3879.
2023
+ Use is_private to test if this address is in the space of unique local
2024
+ addresses as defined by RFC 4193.
2025
+
2026
+ Returns:
2027
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
2028
+
2029
+ """
2030
+ return self in self._constants._sitelocal_network
2031
+
2032
+ @property
2033
+ @functools.lru_cache()
2034
+ def is_private(self):
2035
+ """``True`` if the address is defined as not globally reachable by
2036
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
2037
+ (for IPv6) with the following exceptions:
2038
+
2039
+ * ``is_private`` is ``False`` for ``100.64.0.0/10``
2040
+ * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
2041
+ semantics of the underlying IPv4 addresses and the following condition holds
2042
+ (see :attr:`IPv6Address.ipv4_mapped`)::
2043
+
2044
+ address.is_private == address.ipv4_mapped.is_private
2045
+
2046
+ ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10``
2047
+ IPv4 range where they are both ``False``.
2048
+ """
2049
+ ipv4_mapped = self.ipv4_mapped
2050
+ if ipv4_mapped is not None:
2051
+ return ipv4_mapped.is_private
2052
+ return (
2053
+ any(self in net for net in self._constants._private_networks)
2054
+ and all(self not in net for net in self._constants._private_networks_exceptions)
2055
+ )
2056
+
2057
+ @property
2058
+ def is_global(self):
2059
+ """``True`` if the address is defined as globally reachable by
2060
+ iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_
2061
+ (for IPv6) with the following exception:
2062
+
2063
+ For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the
2064
+ semantics of the underlying IPv4 addresses and the following condition holds
2065
+ (see :attr:`IPv6Address.ipv4_mapped`)::
2066
+
2067
+ address.is_global == address.ipv4_mapped.is_global
2068
+
2069
+ ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10``
2070
+ IPv4 range where they are both ``False``.
2071
+ """
2072
+ ipv4_mapped = self.ipv4_mapped
2073
+ if ipv4_mapped is not None:
2074
+ return ipv4_mapped.is_global
2075
+ return not self.is_private
2076
+
2077
+ @property
2078
+ def is_unspecified(self):
2079
+ """Test if the address is unspecified.
2080
+
2081
+ Returns:
2082
+ A boolean, True if this is the unspecified address as defined in
2083
+ RFC 2373 2.5.2.
2084
+
2085
+ """
2086
+ ipv4_mapped = self.ipv4_mapped
2087
+ if ipv4_mapped is not None:
2088
+ return ipv4_mapped.is_unspecified
2089
+ return self._ip == 0
2090
+
2091
+ @property
2092
+ def is_loopback(self):
2093
+ """Test if the address is a loopback address.
2094
+
2095
+ Returns:
2096
+ A boolean, True if the address is a loopback address as defined in
2097
+ RFC 2373 2.5.3.
2098
+
2099
+ """
2100
+ ipv4_mapped = self.ipv4_mapped
2101
+ if ipv4_mapped is not None:
2102
+ return ipv4_mapped.is_loopback
2103
+ return self._ip == 1
2104
+
2105
+ @property
2106
+ def ipv4_mapped(self):
2107
+ """Return the IPv4 mapped address.
2108
+
2109
+ Returns:
2110
+ If the IPv6 address is a v4 mapped address, return the
2111
+ IPv4 mapped address. Return None otherwise.
2112
+
2113
+ """
2114
+ if (self._ip >> 32) != 0xFFFF:
2115
+ return None
2116
+ return IPv4Address(self._ip & 0xFFFFFFFF)
2117
+
2118
+ @property
2119
+ def teredo(self):
2120
+ """Tuple of embedded teredo IPs.
2121
+
2122
+ Returns:
2123
+ Tuple of the (server, client) IPs or None if the address
2124
+ doesn't appear to be a teredo address (doesn't start with
2125
+ 2001::/32)
2126
+
2127
+ """
2128
+ if (self._ip >> 96) != 0x20010000:
2129
+ return None
2130
+ return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
2131
+ IPv4Address(~self._ip & 0xFFFFFFFF))
2132
+
2133
+ @property
2134
+ def sixtofour(self):
2135
+ """Return the IPv4 6to4 embedded address.
2136
+
2137
+ Returns:
2138
+ The IPv4 6to4-embedded address if present or None if the
2139
+ address doesn't appear to contain a 6to4 embedded address.
2140
+
2141
+ """
2142
+ if (self._ip >> 112) != 0x2002:
2143
+ return None
2144
+ return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
2145
+
2146
+
2147
+ class IPv6Interface(IPv6Address):
2148
+
2149
+ def __init__(self, address):
2150
+ addr, mask = self._split_addr_prefix(address)
2151
+
2152
+ IPv6Address.__init__(self, addr)
2153
+ self.network = IPv6Network((addr, mask), strict=False)
2154
+ self.netmask = self.network.netmask
2155
+ self._prefixlen = self.network._prefixlen
2156
+
2157
+ @functools.cached_property
2158
+ def hostmask(self):
2159
+ return self.network.hostmask
2160
+
2161
+ def __str__(self):
2162
+ return '%s/%d' % (super().__str__(),
2163
+ self._prefixlen)
2164
+
2165
+ def __eq__(self, other):
2166
+ address_equal = IPv6Address.__eq__(self, other)
2167
+ if address_equal is NotImplemented or not address_equal:
2168
+ return address_equal
2169
+ try:
2170
+ return self.network == other.network
2171
+ except AttributeError:
2172
+ # An interface with an associated network is NOT the
2173
+ # same as an unassociated address. That's why the hash
2174
+ # takes the extra info into account.
2175
+ return False
2176
+
2177
+ def __lt__(self, other):
2178
+ address_less = IPv6Address.__lt__(self, other)
2179
+ if address_less is NotImplemented:
2180
+ return address_less
2181
+ try:
2182
+ return (self.network < other.network or
2183
+ self.network == other.network and address_less)
2184
+ except AttributeError:
2185
+ # We *do* allow addresses and interfaces to be sorted. The
2186
+ # unassociated address is considered less than all interfaces.
2187
+ return False
2188
+
2189
+ def __hash__(self):
2190
+ return hash((self._ip, self._prefixlen, int(self.network.network_address)))
2191
+
2192
+ __reduce__ = _IPAddressBase.__reduce__
2193
+
2194
+ @property
2195
+ def ip(self):
2196
+ return IPv6Address(self._ip)
2197
+
2198
+ @property
2199
+ def with_prefixlen(self):
2200
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
2201
+ self._prefixlen)
2202
+
2203
+ @property
2204
+ def with_netmask(self):
2205
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
2206
+ self.netmask)
2207
+
2208
+ @property
2209
+ def with_hostmask(self):
2210
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
2211
+ self.hostmask)
2212
+
2213
+ @property
2214
+ def is_unspecified(self):
2215
+ return self._ip == 0 and self.network.is_unspecified
2216
+
2217
+ @property
2218
+ def is_loopback(self):
2219
+ return super().is_loopback and self.network.is_loopback
2220
+
2221
+
2222
+ class IPv6Network(_BaseV6, _BaseNetwork):
2223
+
2224
+ """This class represents and manipulates 128-bit IPv6 networks.
2225
+
2226
+ Attributes: [examples for IPv6('2001:db8::1000/124')]
2227
+ .network_address: IPv6Address('2001:db8::1000')
2228
+ .hostmask: IPv6Address('::f')
2229
+ .broadcast_address: IPv6Address('2001:db8::100f')
2230
+ .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
2231
+ .prefixlen: 124
2232
+
2233
+ """
2234
+
2235
+ # Class to use when creating address objects
2236
+ _address_class = IPv6Address
2237
+
2238
+ def __init__(self, address, strict=True):
2239
+ """Instantiate a new IPv6 Network object.
2240
+
2241
+ Args:
2242
+ address: A string or integer representing the IPv6 network or the
2243
+ IP and prefix/netmask.
2244
+ '2001:db8::/128'
2245
+ '2001:db8:0000:0000:0000:0000:0000:0000/128'
2246
+ '2001:db8::'
2247
+ are all functionally the same in IPv6. That is to say,
2248
+ failing to provide a subnetmask will create an object with
2249
+ a mask of /128.
2250
+
2251
+ Additionally, an integer can be passed, so
2252
+ IPv6Network('2001:db8::') ==
2253
+ IPv6Network(42540766411282592856903984951653826560)
2254
+ or, more generally
2255
+ IPv6Network(int(IPv6Network('2001:db8::'))) ==
2256
+ IPv6Network('2001:db8::')
2257
+
2258
+ strict: A boolean. If true, ensure that we have been passed
2259
+ A true network address, eg, 2001:db8::1000/124 and not an
2260
+ IP address on a network, eg, 2001:db8::1/124.
2261
+
2262
+ Raises:
2263
+ AddressValueError: If address isn't a valid IPv6 address.
2264
+ NetmaskValueError: If the netmask isn't valid for
2265
+ an IPv6 address.
2266
+ ValueError: If strict was True and a network address was not
2267
+ supplied.
2268
+ """
2269
+ addr, mask = self._split_addr_prefix(address)
2270
+
2271
+ self.network_address = IPv6Address(addr)
2272
+ self.netmask, self._prefixlen = self._make_netmask(mask)
2273
+ packed = int(self.network_address)
2274
+ if packed & int(self.netmask) != packed:
2275
+ if strict:
2276
+ raise ValueError('%s has host bits set' % self)
2277
+ else:
2278
+ self.network_address = IPv6Address(packed &
2279
+ int(self.netmask))
2280
+
2281
+ if self._prefixlen == (self._max_prefixlen - 1):
2282
+ self.hosts = self.__iter__
2283
+ elif self._prefixlen == self._max_prefixlen:
2284
+ self.hosts = lambda: [IPv6Address(addr)]
2285
+
2286
+ def hosts(self):
2287
+ """Generate Iterator over usable hosts in a network.
2288
+
2289
+ This is like __iter__ except it doesn't return the
2290
+ Subnet-Router anycast address.
2291
+
2292
+ """
2293
+ network = int(self.network_address)
2294
+ broadcast = int(self.broadcast_address)
2295
+ for x in range(network + 1, broadcast + 1):
2296
+ yield self._address_class(x)
2297
+
2298
+ @property
2299
+ def is_site_local(self):
2300
+ """Test if the address is reserved for site-local.
2301
+
2302
+ Note that the site-local address space has been deprecated by RFC 3879.
2303
+ Use is_private to test if this address is in the space of unique local
2304
+ addresses as defined by RFC 4193.
2305
+
2306
+ Returns:
2307
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
2308
+
2309
+ """
2310
+ return (self.network_address.is_site_local and
2311
+ self.broadcast_address.is_site_local)
2312
+
2313
+
2314
+ class _IPv6Constants:
2315
+
2316
+ _linklocal_network = IPv6Network('fe80::/10')
2317
+
2318
+ _multicast_network = IPv6Network('ff00::/8')
2319
+
2320
+ # Not globally reachable address blocks listed on
2321
+ # https://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml
2322
+ _private_networks = [
2323
+ IPv6Network('::1/128'),
2324
+ IPv6Network('::/128'),
2325
+ IPv6Network('::ffff:0:0/96'),
2326
+ IPv6Network('64:ff9b:1::/48'),
2327
+ IPv6Network('100::/64'),
2328
+ IPv6Network('2001::/23'),
2329
+ IPv6Network('2001:db8::/32'),
2330
+ # IANA says N/A, let's consider it not globally reachable to be safe
2331
+ IPv6Network('2002::/16'),
2332
+ # RFC 9637: https://www.rfc-editor.org/rfc/rfc9637.html#section-6-2.2
2333
+ IPv6Network('3fff::/20'),
2334
+ IPv6Network('fc00::/7'),
2335
+ IPv6Network('fe80::/10'),
2336
+ ]
2337
+
2338
+ _private_networks_exceptions = [
2339
+ IPv6Network('2001:1::1/128'),
2340
+ IPv6Network('2001:1::2/128'),
2341
+ IPv6Network('2001:3::/32'),
2342
+ IPv6Network('2001:4:112::/48'),
2343
+ IPv6Network('2001:20::/28'),
2344
+ IPv6Network('2001:30::/28'),
2345
+ ]
2346
+
2347
+ _reserved_networks = [
2348
+ IPv6Network('::/8'), IPv6Network('100::/8'),
2349
+ IPv6Network('200::/7'), IPv6Network('400::/6'),
2350
+ IPv6Network('800::/5'), IPv6Network('1000::/4'),
2351
+ IPv6Network('4000::/3'), IPv6Network('6000::/3'),
2352
+ IPv6Network('8000::/3'), IPv6Network('A000::/3'),
2353
+ IPv6Network('C000::/3'), IPv6Network('E000::/4'),
2354
+ IPv6Network('F000::/5'), IPv6Network('F800::/6'),
2355
+ IPv6Network('FE00::/9'),
2356
+ ]
2357
+
2358
+ _sitelocal_network = IPv6Network('fec0::/10')
2359
+
2360
+
2361
+ IPv6Address._constants = _IPv6Constants
deepseek/lib/python3.10/numbers.py ADDED
@@ -0,0 +1,393 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2007 Google, Inc. All Rights Reserved.
2
+ # Licensed to PSF under a Contributor Agreement.
3
+
4
+ """Abstract Base Classes (ABCs) for numbers, according to PEP 3141.
5
+
6
+ TODO: Fill out more detailed documentation on the operators."""
7
+
8
+ from abc import ABCMeta, abstractmethod
9
+
10
+ __all__ = ["Number", "Complex", "Real", "Rational", "Integral"]
11
+
12
+ class Number(metaclass=ABCMeta):
13
+ """All numbers inherit from this class.
14
+
15
+ If you just want to check if an argument x is a number, without
16
+ caring what kind, use isinstance(x, Number).
17
+ """
18
+ __slots__ = ()
19
+
20
+ # Concrete numeric types must provide their own hash implementation
21
+ __hash__ = None
22
+
23
+
24
+ ## Notes on Decimal
25
+ ## ----------------
26
+ ## Decimal has all of the methods specified by the Real abc, but it should
27
+ ## not be registered as a Real because decimals do not interoperate with
28
+ ## binary floats (i.e. Decimal('3.14') + 2.71828 is undefined). But,
29
+ ## abstract reals are expected to interoperate (i.e. R1 + R2 should be
30
+ ## expected to work if R1 and R2 are both Reals).
31
+
32
+ class Complex(Number):
33
+ """Complex defines the operations that work on the builtin complex type.
34
+
35
+ In short, those are: a conversion to complex, .real, .imag, +, -,
36
+ *, /, **, abs(), .conjugate, ==, and !=.
37
+
38
+ If it is given heterogeneous arguments, and doesn't have special
39
+ knowledge about them, it should fall back to the builtin complex
40
+ type as described below.
41
+ """
42
+
43
+ __slots__ = ()
44
+
45
+ @abstractmethod
46
+ def __complex__(self):
47
+ """Return a builtin complex instance. Called for complex(self)."""
48
+
49
+ def __bool__(self):
50
+ """True if self != 0. Called for bool(self)."""
51
+ return self != 0
52
+
53
+ @property
54
+ @abstractmethod
55
+ def real(self):
56
+ """Retrieve the real component of this number.
57
+
58
+ This should subclass Real.
59
+ """
60
+ raise NotImplementedError
61
+
62
+ @property
63
+ @abstractmethod
64
+ def imag(self):
65
+ """Retrieve the imaginary component of this number.
66
+
67
+ This should subclass Real.
68
+ """
69
+ raise NotImplementedError
70
+
71
+ @abstractmethod
72
+ def __add__(self, other):
73
+ """self + other"""
74
+ raise NotImplementedError
75
+
76
+ @abstractmethod
77
+ def __radd__(self, other):
78
+ """other + self"""
79
+ raise NotImplementedError
80
+
81
+ @abstractmethod
82
+ def __neg__(self):
83
+ """-self"""
84
+ raise NotImplementedError
85
+
86
+ @abstractmethod
87
+ def __pos__(self):
88
+ """+self"""
89
+ raise NotImplementedError
90
+
91
+ def __sub__(self, other):
92
+ """self - other"""
93
+ return self + -other
94
+
95
+ def __rsub__(self, other):
96
+ """other - self"""
97
+ return -self + other
98
+
99
+ @abstractmethod
100
+ def __mul__(self, other):
101
+ """self * other"""
102
+ raise NotImplementedError
103
+
104
+ @abstractmethod
105
+ def __rmul__(self, other):
106
+ """other * self"""
107
+ raise NotImplementedError
108
+
109
+ @abstractmethod
110
+ def __truediv__(self, other):
111
+ """self / other: Should promote to float when necessary."""
112
+ raise NotImplementedError
113
+
114
+ @abstractmethod
115
+ def __rtruediv__(self, other):
116
+ """other / self"""
117
+ raise NotImplementedError
118
+
119
+ @abstractmethod
120
+ def __pow__(self, exponent):
121
+ """self**exponent; should promote to float or complex when necessary."""
122
+ raise NotImplementedError
123
+
124
+ @abstractmethod
125
+ def __rpow__(self, base):
126
+ """base ** self"""
127
+ raise NotImplementedError
128
+
129
+ @abstractmethod
130
+ def __abs__(self):
131
+ """Returns the Real distance from 0. Called for abs(self)."""
132
+ raise NotImplementedError
133
+
134
+ @abstractmethod
135
+ def conjugate(self):
136
+ """(x+y*i).conjugate() returns (x-y*i)."""
137
+ raise NotImplementedError
138
+
139
+ @abstractmethod
140
+ def __eq__(self, other):
141
+ """self == other"""
142
+ raise NotImplementedError
143
+
144
+ Complex.register(complex)
145
+
146
+
147
+ class Real(Complex):
148
+ """To Complex, Real adds the operations that work on real numbers.
149
+
150
+ In short, those are: a conversion to float, trunc(), divmod,
151
+ %, <, <=, >, and >=.
152
+
153
+ Real also provides defaults for the derived operations.
154
+ """
155
+
156
+ __slots__ = ()
157
+
158
+ @abstractmethod
159
+ def __float__(self):
160
+ """Any Real can be converted to a native float object.
161
+
162
+ Called for float(self)."""
163
+ raise NotImplementedError
164
+
165
+ @abstractmethod
166
+ def __trunc__(self):
167
+ """trunc(self): Truncates self to an Integral.
168
+
169
+ Returns an Integral i such that:
170
+ * i>0 iff self>0;
171
+ * abs(i) <= abs(self);
172
+ * for any Integral j satisfying the first two conditions,
173
+ abs(i) >= abs(j) [i.e. i has "maximal" abs among those].
174
+ i.e. "truncate towards 0".
175
+ """
176
+ raise NotImplementedError
177
+
178
+ @abstractmethod
179
+ def __floor__(self):
180
+ """Finds the greatest Integral <= self."""
181
+ raise NotImplementedError
182
+
183
+ @abstractmethod
184
+ def __ceil__(self):
185
+ """Finds the least Integral >= self."""
186
+ raise NotImplementedError
187
+
188
+ @abstractmethod
189
+ def __round__(self, ndigits=None):
190
+ """Rounds self to ndigits decimal places, defaulting to 0.
191
+
192
+ If ndigits is omitted or None, returns an Integral, otherwise
193
+ returns a Real. Rounds half toward even.
194
+ """
195
+ raise NotImplementedError
196
+
197
+ def __divmod__(self, other):
198
+ """divmod(self, other): The pair (self // other, self % other).
199
+
200
+ Sometimes this can be computed faster than the pair of
201
+ operations.
202
+ """
203
+ return (self // other, self % other)
204
+
205
+ def __rdivmod__(self, other):
206
+ """divmod(other, self): The pair (self // other, self % other).
207
+
208
+ Sometimes this can be computed faster than the pair of
209
+ operations.
210
+ """
211
+ return (other // self, other % self)
212
+
213
+ @abstractmethod
214
+ def __floordiv__(self, other):
215
+ """self // other: The floor() of self/other."""
216
+ raise NotImplementedError
217
+
218
+ @abstractmethod
219
+ def __rfloordiv__(self, other):
220
+ """other // self: The floor() of other/self."""
221
+ raise NotImplementedError
222
+
223
+ @abstractmethod
224
+ def __mod__(self, other):
225
+ """self % other"""
226
+ raise NotImplementedError
227
+
228
+ @abstractmethod
229
+ def __rmod__(self, other):
230
+ """other % self"""
231
+ raise NotImplementedError
232
+
233
+ @abstractmethod
234
+ def __lt__(self, other):
235
+ """self < other
236
+
237
+ < on Reals defines a total ordering, except perhaps for NaN."""
238
+ raise NotImplementedError
239
+
240
+ @abstractmethod
241
+ def __le__(self, other):
242
+ """self <= other"""
243
+ raise NotImplementedError
244
+
245
+ # Concrete implementations of Complex abstract methods.
246
+ def __complex__(self):
247
+ """complex(self) == complex(float(self), 0)"""
248
+ return complex(float(self))
249
+
250
+ @property
251
+ def real(self):
252
+ """Real numbers are their real component."""
253
+ return +self
254
+
255
+ @property
256
+ def imag(self):
257
+ """Real numbers have no imaginary component."""
258
+ return 0
259
+
260
+ def conjugate(self):
261
+ """Conjugate is a no-op for Reals."""
262
+ return +self
263
+
264
+ Real.register(float)
265
+
266
+
267
+ class Rational(Real):
268
+ """.numerator and .denominator should be in lowest terms."""
269
+
270
+ __slots__ = ()
271
+
272
+ @property
273
+ @abstractmethod
274
+ def numerator(self):
275
+ raise NotImplementedError
276
+
277
+ @property
278
+ @abstractmethod
279
+ def denominator(self):
280
+ raise NotImplementedError
281
+
282
+ # Concrete implementation of Real's conversion to float.
283
+ def __float__(self):
284
+ """float(self) = self.numerator / self.denominator
285
+
286
+ It's important that this conversion use the integer's "true"
287
+ division rather than casting one side to float before dividing
288
+ so that ratios of huge integers convert without overflowing.
289
+
290
+ """
291
+ return int(self.numerator) / int(self.denominator)
292
+
293
+
294
+ class Integral(Rational):
295
+ """Integral adds methods that work on integral numbers.
296
+
297
+ In short, these are conversion to int, pow with modulus, and the
298
+ bit-string operations.
299
+ """
300
+
301
+ __slots__ = ()
302
+
303
+ @abstractmethod
304
+ def __int__(self):
305
+ """int(self)"""
306
+ raise NotImplementedError
307
+
308
+ def __index__(self):
309
+ """Called whenever an index is needed, such as in slicing"""
310
+ return int(self)
311
+
312
+ @abstractmethod
313
+ def __pow__(self, exponent, modulus=None):
314
+ """self ** exponent % modulus, but maybe faster.
315
+
316
+ Accept the modulus argument if you want to support the
317
+ 3-argument version of pow(). Raise a TypeError if exponent < 0
318
+ or any argument isn't Integral. Otherwise, just implement the
319
+ 2-argument version described in Complex.
320
+ """
321
+ raise NotImplementedError
322
+
323
+ @abstractmethod
324
+ def __lshift__(self, other):
325
+ """self << other"""
326
+ raise NotImplementedError
327
+
328
+ @abstractmethod
329
+ def __rlshift__(self, other):
330
+ """other << self"""
331
+ raise NotImplementedError
332
+
333
+ @abstractmethod
334
+ def __rshift__(self, other):
335
+ """self >> other"""
336
+ raise NotImplementedError
337
+
338
+ @abstractmethod
339
+ def __rrshift__(self, other):
340
+ """other >> self"""
341
+ raise NotImplementedError
342
+
343
+ @abstractmethod
344
+ def __and__(self, other):
345
+ """self & other"""
346
+ raise NotImplementedError
347
+
348
+ @abstractmethod
349
+ def __rand__(self, other):
350
+ """other & self"""
351
+ raise NotImplementedError
352
+
353
+ @abstractmethod
354
+ def __xor__(self, other):
355
+ """self ^ other"""
356
+ raise NotImplementedError
357
+
358
+ @abstractmethod
359
+ def __rxor__(self, other):
360
+ """other ^ self"""
361
+ raise NotImplementedError
362
+
363
+ @abstractmethod
364
+ def __or__(self, other):
365
+ """self | other"""
366
+ raise NotImplementedError
367
+
368
+ @abstractmethod
369
+ def __ror__(self, other):
370
+ """other | self"""
371
+ raise NotImplementedError
372
+
373
+ @abstractmethod
374
+ def __invert__(self):
375
+ """~self"""
376
+ raise NotImplementedError
377
+
378
+ # Concrete implementations of Rational and Real abstract methods.
379
+ def __float__(self):
380
+ """float(self) == float(int(self))"""
381
+ return float(int(self))
382
+
383
+ @property
384
+ def numerator(self):
385
+ """Integers are their own numerators."""
386
+ return +self
387
+
388
+ @property
389
+ def denominator(self):
390
+ """Integers have a denominator of 1."""
391
+ return 1
392
+
393
+ Integral.register(int)
deepseek/lib/python3.10/wave.py ADDED
@@ -0,0 +1,513 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Stuff to parse WAVE files.
2
+
3
+ Usage.
4
+
5
+ Reading WAVE files:
6
+ f = wave.open(file, 'r')
7
+ where file is either the name of a file or an open file pointer.
8
+ The open file pointer must have methods read(), seek(), and close().
9
+ When the setpos() and rewind() methods are not used, the seek()
10
+ method is not necessary.
11
+
12
+ This returns an instance of a class with the following public methods:
13
+ getnchannels() -- returns number of audio channels (1 for
14
+ mono, 2 for stereo)
15
+ getsampwidth() -- returns sample width in bytes
16
+ getframerate() -- returns sampling frequency
17
+ getnframes() -- returns number of audio frames
18
+ getcomptype() -- returns compression type ('NONE' for linear samples)
19
+ getcompname() -- returns human-readable version of
20
+ compression type ('not compressed' linear samples)
21
+ getparams() -- returns a namedtuple consisting of all of the
22
+ above in the above order
23
+ getmarkers() -- returns None (for compatibility with the
24
+ aifc module)
25
+ getmark(id) -- raises an error since the mark does not
26
+ exist (for compatibility with the aifc module)
27
+ readframes(n) -- returns at most n frames of audio
28
+ rewind() -- rewind to the beginning of the audio stream
29
+ setpos(pos) -- seek to the specified position
30
+ tell() -- return the current position
31
+ close() -- close the instance (make it unusable)
32
+ The position returned by tell() and the position given to setpos()
33
+ are compatible and have nothing to do with the actual position in the
34
+ file.
35
+ The close() method is called automatically when the class instance
36
+ is destroyed.
37
+
38
+ Writing WAVE files:
39
+ f = wave.open(file, 'w')
40
+ where file is either the name of a file or an open file pointer.
41
+ The open file pointer must have methods write(), tell(), seek(), and
42
+ close().
43
+
44
+ This returns an instance of a class with the following public methods:
45
+ setnchannels(n) -- set the number of channels
46
+ setsampwidth(n) -- set the sample width
47
+ setframerate(n) -- set the frame rate
48
+ setnframes(n) -- set the number of frames
49
+ setcomptype(type, name)
50
+ -- set the compression type and the
51
+ human-readable compression type
52
+ setparams(tuple)
53
+ -- set all parameters at once
54
+ tell() -- return current position in output file
55
+ writeframesraw(data)
56
+ -- write audio frames without patching up the
57
+ file header
58
+ writeframes(data)
59
+ -- write audio frames and patch up the file header
60
+ close() -- patch up the file header and close the
61
+ output file
62
+ You should set the parameters before the first writeframesraw or
63
+ writeframes. The total number of frames does not need to be set,
64
+ but when it is set to the correct value, the header does not have to
65
+ be patched up.
66
+ It is best to first set all parameters, perhaps possibly the
67
+ compression type, and then write audio frames using writeframesraw.
68
+ When all frames have been written, either call writeframes(b'') or
69
+ close() to patch up the sizes in the header.
70
+ The close() method is called automatically when the class instance
71
+ is destroyed.
72
+ """
73
+
74
+ from chunk import Chunk
75
+ from collections import namedtuple
76
+ import audioop
77
+ import builtins
78
+ import struct
79
+ import sys
80
+
81
+
82
+ __all__ = ["open", "Error", "Wave_read", "Wave_write"]
83
+
84
+ class Error(Exception):
85
+ pass
86
+
87
+ WAVE_FORMAT_PCM = 0x0001
88
+
89
+ _array_fmts = None, 'b', 'h', None, 'i'
90
+
91
+ _wave_params = namedtuple('_wave_params',
92
+ 'nchannels sampwidth framerate nframes comptype compname')
93
+
94
+ class Wave_read:
95
+ """Variables used in this class:
96
+
97
+ These variables are available to the user though appropriate
98
+ methods of this class:
99
+ _file -- the open file with methods read(), close(), and seek()
100
+ set through the __init__() method
101
+ _nchannels -- the number of audio channels
102
+ available through the getnchannels() method
103
+ _nframes -- the number of audio frames
104
+ available through the getnframes() method
105
+ _sampwidth -- the number of bytes per audio sample
106
+ available through the getsampwidth() method
107
+ _framerate -- the sampling frequency
108
+ available through the getframerate() method
109
+ _comptype -- the AIFF-C compression type ('NONE' if AIFF)
110
+ available through the getcomptype() method
111
+ _compname -- the human-readable AIFF-C compression type
112
+ available through the getcomptype() method
113
+ _soundpos -- the position in the audio stream
114
+ available through the tell() method, set through the
115
+ setpos() method
116
+
117
+ These variables are used internally only:
118
+ _fmt_chunk_read -- 1 iff the FMT chunk has been read
119
+ _data_seek_needed -- 1 iff positioned correctly in audio
120
+ file for readframes()
121
+ _data_chunk -- instantiation of a chunk class for the DATA chunk
122
+ _framesize -- size of one frame in the file
123
+ """
124
+
125
+ def initfp(self, file):
126
+ self._convert = None
127
+ self._soundpos = 0
128
+ self._file = Chunk(file, bigendian = 0)
129
+ if self._file.getname() != b'RIFF':
130
+ raise Error('file does not start with RIFF id')
131
+ if self._file.read(4) != b'WAVE':
132
+ raise Error('not a WAVE file')
133
+ self._fmt_chunk_read = 0
134
+ self._data_chunk = None
135
+ while 1:
136
+ self._data_seek_needed = 1
137
+ try:
138
+ chunk = Chunk(self._file, bigendian = 0)
139
+ except EOFError:
140
+ break
141
+ chunkname = chunk.getname()
142
+ if chunkname == b'fmt ':
143
+ self._read_fmt_chunk(chunk)
144
+ self._fmt_chunk_read = 1
145
+ elif chunkname == b'data':
146
+ if not self._fmt_chunk_read:
147
+ raise Error('data chunk before fmt chunk')
148
+ self._data_chunk = chunk
149
+ self._nframes = chunk.chunksize // self._framesize
150
+ self._data_seek_needed = 0
151
+ break
152
+ chunk.skip()
153
+ if not self._fmt_chunk_read or not self._data_chunk:
154
+ raise Error('fmt chunk and/or data chunk missing')
155
+
156
+ def __init__(self, f):
157
+ self._i_opened_the_file = None
158
+ if isinstance(f, str):
159
+ f = builtins.open(f, 'rb')
160
+ self._i_opened_the_file = f
161
+ # else, assume it is an open file object already
162
+ try:
163
+ self.initfp(f)
164
+ except:
165
+ if self._i_opened_the_file:
166
+ f.close()
167
+ raise
168
+
169
+ def __del__(self):
170
+ self.close()
171
+
172
+ def __enter__(self):
173
+ return self
174
+
175
+ def __exit__(self, *args):
176
+ self.close()
177
+
178
+ #
179
+ # User visible methods.
180
+ #
181
+ def getfp(self):
182
+ return self._file
183
+
184
+ def rewind(self):
185
+ self._data_seek_needed = 1
186
+ self._soundpos = 0
187
+
188
+ def close(self):
189
+ self._file = None
190
+ file = self._i_opened_the_file
191
+ if file:
192
+ self._i_opened_the_file = None
193
+ file.close()
194
+
195
+ def tell(self):
196
+ return self._soundpos
197
+
198
+ def getnchannels(self):
199
+ return self._nchannels
200
+
201
+ def getnframes(self):
202
+ return self._nframes
203
+
204
+ def getsampwidth(self):
205
+ return self._sampwidth
206
+
207
+ def getframerate(self):
208
+ return self._framerate
209
+
210
+ def getcomptype(self):
211
+ return self._comptype
212
+
213
+ def getcompname(self):
214
+ return self._compname
215
+
216
+ def getparams(self):
217
+ return _wave_params(self.getnchannels(), self.getsampwidth(),
218
+ self.getframerate(), self.getnframes(),
219
+ self.getcomptype(), self.getcompname())
220
+
221
+ def getmarkers(self):
222
+ return None
223
+
224
+ def getmark(self, id):
225
+ raise Error('no marks')
226
+
227
+ def setpos(self, pos):
228
+ if pos < 0 or pos > self._nframes:
229
+ raise Error('position not in range')
230
+ self._soundpos = pos
231
+ self._data_seek_needed = 1
232
+
233
+ def readframes(self, nframes):
234
+ if self._data_seek_needed:
235
+ self._data_chunk.seek(0, 0)
236
+ pos = self._soundpos * self._framesize
237
+ if pos:
238
+ self._data_chunk.seek(pos, 0)
239
+ self._data_seek_needed = 0
240
+ if nframes == 0:
241
+ return b''
242
+ data = self._data_chunk.read(nframes * self._framesize)
243
+ if self._sampwidth != 1 and sys.byteorder == 'big':
244
+ data = audioop.byteswap(data, self._sampwidth)
245
+ if self._convert and data:
246
+ data = self._convert(data)
247
+ self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
248
+ return data
249
+
250
+ #
251
+ # Internal methods.
252
+ #
253
+
254
+ def _read_fmt_chunk(self, chunk):
255
+ try:
256
+ wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack_from('<HHLLH', chunk.read(14))
257
+ except struct.error:
258
+ raise EOFError from None
259
+ if wFormatTag == WAVE_FORMAT_PCM:
260
+ try:
261
+ sampwidth = struct.unpack_from('<H', chunk.read(2))[0]
262
+ except struct.error:
263
+ raise EOFError from None
264
+ self._sampwidth = (sampwidth + 7) // 8
265
+ if not self._sampwidth:
266
+ raise Error('bad sample width')
267
+ else:
268
+ raise Error('unknown format: %r' % (wFormatTag,))
269
+ if not self._nchannels:
270
+ raise Error('bad # of channels')
271
+ self._framesize = self._nchannels * self._sampwidth
272
+ self._comptype = 'NONE'
273
+ self._compname = 'not compressed'
274
+
275
+ class Wave_write:
276
+ """Variables used in this class:
277
+
278
+ These variables are user settable through appropriate methods
279
+ of this class:
280
+ _file -- the open file with methods write(), close(), tell(), seek()
281
+ set through the __init__() method
282
+ _comptype -- the AIFF-C compression type ('NONE' in AIFF)
283
+ set through the setcomptype() or setparams() method
284
+ _compname -- the human-readable AIFF-C compression type
285
+ set through the setcomptype() or setparams() method
286
+ _nchannels -- the number of audio channels
287
+ set through the setnchannels() or setparams() method
288
+ _sampwidth -- the number of bytes per audio sample
289
+ set through the setsampwidth() or setparams() method
290
+ _framerate -- the sampling frequency
291
+ set through the setframerate() or setparams() method
292
+ _nframes -- the number of audio frames written to the header
293
+ set through the setnframes() or setparams() method
294
+
295
+ These variables are used internally only:
296
+ _datalength -- the size of the audio samples written to the header
297
+ _nframeswritten -- the number of frames actually written
298
+ _datawritten -- the size of the audio samples actually written
299
+ """
300
+
301
+ def __init__(self, f):
302
+ self._i_opened_the_file = None
303
+ if isinstance(f, str):
304
+ f = builtins.open(f, 'wb')
305
+ self._i_opened_the_file = f
306
+ try:
307
+ self.initfp(f)
308
+ except:
309
+ if self._i_opened_the_file:
310
+ f.close()
311
+ raise
312
+
313
+ def initfp(self, file):
314
+ self._file = file
315
+ self._convert = None
316
+ self._nchannels = 0
317
+ self._sampwidth = 0
318
+ self._framerate = 0
319
+ self._nframes = 0
320
+ self._nframeswritten = 0
321
+ self._datawritten = 0
322
+ self._datalength = 0
323
+ self._headerwritten = False
324
+
325
+ def __del__(self):
326
+ self.close()
327
+
328
+ def __enter__(self):
329
+ return self
330
+
331
+ def __exit__(self, *args):
332
+ self.close()
333
+
334
+ #
335
+ # User visible methods.
336
+ #
337
+ def setnchannels(self, nchannels):
338
+ if self._datawritten:
339
+ raise Error('cannot change parameters after starting to write')
340
+ if nchannels < 1:
341
+ raise Error('bad # of channels')
342
+ self._nchannels = nchannels
343
+
344
+ def getnchannels(self):
345
+ if not self._nchannels:
346
+ raise Error('number of channels not set')
347
+ return self._nchannels
348
+
349
+ def setsampwidth(self, sampwidth):
350
+ if self._datawritten:
351
+ raise Error('cannot change parameters after starting to write')
352
+ if sampwidth < 1 or sampwidth > 4:
353
+ raise Error('bad sample width')
354
+ self._sampwidth = sampwidth
355
+
356
+ def getsampwidth(self):
357
+ if not self._sampwidth:
358
+ raise Error('sample width not set')
359
+ return self._sampwidth
360
+
361
+ def setframerate(self, framerate):
362
+ if self._datawritten:
363
+ raise Error('cannot change parameters after starting to write')
364
+ if framerate <= 0:
365
+ raise Error('bad frame rate')
366
+ self._framerate = int(round(framerate))
367
+
368
+ def getframerate(self):
369
+ if not self._framerate:
370
+ raise Error('frame rate not set')
371
+ return self._framerate
372
+
373
+ def setnframes(self, nframes):
374
+ if self._datawritten:
375
+ raise Error('cannot change parameters after starting to write')
376
+ self._nframes = nframes
377
+
378
+ def getnframes(self):
379
+ return self._nframeswritten
380
+
381
+ def setcomptype(self, comptype, compname):
382
+ if self._datawritten:
383
+ raise Error('cannot change parameters after starting to write')
384
+ if comptype not in ('NONE',):
385
+ raise Error('unsupported compression type')
386
+ self._comptype = comptype
387
+ self._compname = compname
388
+
389
+ def getcomptype(self):
390
+ return self._comptype
391
+
392
+ def getcompname(self):
393
+ return self._compname
394
+
395
+ def setparams(self, params):
396
+ nchannels, sampwidth, framerate, nframes, comptype, compname = params
397
+ if self._datawritten:
398
+ raise Error('cannot change parameters after starting to write')
399
+ self.setnchannels(nchannels)
400
+ self.setsampwidth(sampwidth)
401
+ self.setframerate(framerate)
402
+ self.setnframes(nframes)
403
+ self.setcomptype(comptype, compname)
404
+
405
+ def getparams(self):
406
+ if not self._nchannels or not self._sampwidth or not self._framerate:
407
+ raise Error('not all parameters set')
408
+ return _wave_params(self._nchannels, self._sampwidth, self._framerate,
409
+ self._nframes, self._comptype, self._compname)
410
+
411
+ def setmark(self, id, pos, name):
412
+ raise Error('setmark() not supported')
413
+
414
+ def getmark(self, id):
415
+ raise Error('no marks')
416
+
417
+ def getmarkers(self):
418
+ return None
419
+
420
+ def tell(self):
421
+ return self._nframeswritten
422
+
423
+ def writeframesraw(self, data):
424
+ if not isinstance(data, (bytes, bytearray)):
425
+ data = memoryview(data).cast('B')
426
+ self._ensure_header_written(len(data))
427
+ nframes = len(data) // (self._sampwidth * self._nchannels)
428
+ if self._convert:
429
+ data = self._convert(data)
430
+ if self._sampwidth != 1 and sys.byteorder == 'big':
431
+ data = audioop.byteswap(data, self._sampwidth)
432
+ self._file.write(data)
433
+ self._datawritten += len(data)
434
+ self._nframeswritten = self._nframeswritten + nframes
435
+
436
+ def writeframes(self, data):
437
+ self.writeframesraw(data)
438
+ if self._datalength != self._datawritten:
439
+ self._patchheader()
440
+
441
+ def close(self):
442
+ try:
443
+ if self._file:
444
+ self._ensure_header_written(0)
445
+ if self._datalength != self._datawritten:
446
+ self._patchheader()
447
+ self._file.flush()
448
+ finally:
449
+ self._file = None
450
+ file = self._i_opened_the_file
451
+ if file:
452
+ self._i_opened_the_file = None
453
+ file.close()
454
+
455
+ #
456
+ # Internal methods.
457
+ #
458
+
459
+ def _ensure_header_written(self, datasize):
460
+ if not self._headerwritten:
461
+ if not self._nchannels:
462
+ raise Error('# channels not specified')
463
+ if not self._sampwidth:
464
+ raise Error('sample width not specified')
465
+ if not self._framerate:
466
+ raise Error('sampling rate not specified')
467
+ self._write_header(datasize)
468
+
469
+ def _write_header(self, initlength):
470
+ assert not self._headerwritten
471
+ self._file.write(b'RIFF')
472
+ if not self._nframes:
473
+ self._nframes = initlength // (self._nchannels * self._sampwidth)
474
+ self._datalength = self._nframes * self._nchannels * self._sampwidth
475
+ try:
476
+ self._form_length_pos = self._file.tell()
477
+ except (AttributeError, OSError):
478
+ self._form_length_pos = None
479
+ self._file.write(struct.pack('<L4s4sLHHLLHH4s',
480
+ 36 + self._datalength, b'WAVE', b'fmt ', 16,
481
+ WAVE_FORMAT_PCM, self._nchannels, self._framerate,
482
+ self._nchannels * self._framerate * self._sampwidth,
483
+ self._nchannels * self._sampwidth,
484
+ self._sampwidth * 8, b'data'))
485
+ if self._form_length_pos is not None:
486
+ self._data_length_pos = self._file.tell()
487
+ self._file.write(struct.pack('<L', self._datalength))
488
+ self._headerwritten = True
489
+
490
+ def _patchheader(self):
491
+ assert self._headerwritten
492
+ if self._datawritten == self._datalength:
493
+ return
494
+ curpos = self._file.tell()
495
+ self._file.seek(self._form_length_pos, 0)
496
+ self._file.write(struct.pack('<L', 36 + self._datawritten))
497
+ self._file.seek(self._data_length_pos, 0)
498
+ self._file.write(struct.pack('<L', self._datawritten))
499
+ self._file.seek(curpos, 0)
500
+ self._datalength = self._datawritten
501
+
502
+ def open(f, mode=None):
503
+ if mode is None:
504
+ if hasattr(f, 'mode'):
505
+ mode = f.mode
506
+ else:
507
+ mode = 'rb'
508
+ if mode in ('r', 'rb'):
509
+ return Wave_read(f)
510
+ elif mode in ('w', 'wb'):
511
+ return Wave_write(f)
512
+ else:
513
+ raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/Example.svelte ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <script lang="ts">
2
+ export let value: string | null;
3
+ export let type: "gallery" | "table";
4
+ export let selected = false;
5
+ </script>
6
+
7
+ <div
8
+ style="background-color: {value ? value : 'black'}"
9
+ class:table={type === "table"}
10
+ class:gallery={type === "gallery"}
11
+ class:selected
12
+ />
13
+
14
+ <style>
15
+ div {
16
+ width: var(--size-10);
17
+ height: var(--size-10);
18
+ }
19
+ .table {
20
+ margin: 0 auto;
21
+ }
22
+ </style>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/Index.svelte ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svelte:options accessors={true} />
2
+
3
+ <script context="module" lang="ts">
4
+ export { default as BaseColorPicker } from "./shared/Colorpicker.svelte";
5
+ export { default as BaseExample } from "./Example.svelte";
6
+ </script>
7
+
8
+ <script lang="ts">
9
+ import type { Gradio } from "@gradio/utils";
10
+ import Colorpicker from "./shared/Colorpicker.svelte";
11
+ import { Block } from "@gradio/atoms";
12
+ import { StatusTracker } from "@gradio/statustracker";
13
+ import type { LoadingStatus } from "@gradio/statustracker";
14
+
15
+ export let label = "ColorPicker";
16
+ export let info: string | undefined = undefined;
17
+ export let elem_id = "";
18
+ export let elem_classes: string[] = [];
19
+ export let visible = true;
20
+ export let value: string;
21
+ export let value_is_output = false;
22
+ export let show_label: boolean;
23
+ export let container = true;
24
+ export let scale: number | null = null;
25
+ export let min_width: number | undefined = undefined;
26
+ export let loading_status: LoadingStatus;
27
+ export let root: string;
28
+ export let gradio: Gradio<{
29
+ change: never;
30
+ input: never;
31
+ submit: never;
32
+ blur: never;
33
+ focus: never;
34
+ clear_status: LoadingStatus;
35
+ }>;
36
+ export let interactive: boolean;
37
+ export let disabled = false;
38
+ </script>
39
+
40
+ <Block {visible} {elem_id} {elem_classes} {container} {scale} {min_width}>
41
+ <StatusTracker
42
+ autoscroll={gradio.autoscroll}
43
+ i18n={gradio.i18n}
44
+ {...loading_status}
45
+ on:clear_status={() => gradio.dispatch("clear_status", loading_status)}
46
+ />
47
+
48
+ <Colorpicker
49
+ bind:value
50
+ bind:value_is_output
51
+ {root}
52
+ {label}
53
+ {info}
54
+ {show_label}
55
+ disabled={!interactive || disabled}
56
+ on:change={() => gradio.dispatch("change")}
57
+ on:input={() => gradio.dispatch("input")}
58
+ on:submit={() => gradio.dispatch("submit")}
59
+ on:blur={() => gradio.dispatch("blur")}
60
+ on:focus={() => gradio.dispatch("focus")}
61
+ />
62
+ </Block>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/package.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "@gradio/colorpicker",
3
+ "version": "0.4.1",
4
+ "description": "Gradio UI packages",
5
+ "type": "module",
6
+ "author": "",
7
+ "license": "ISC",
8
+ "private": false,
9
+ "main_changeset": true,
10
+ "main": "./Index.svelte",
11
+ "exports": {
12
+ ".": {
13
+ "gradio": "./Index.svelte",
14
+ "svelte": "./dist/Index.svelte",
15
+ "types": "./dist/Index.svelte.d.ts"
16
+ },
17
+ "./example": {
18
+ "gradio": "./Example.svelte",
19
+ "svelte": "./dist/Example.svelte",
20
+ "types": "./dist/Example.svelte.d.ts"
21
+ },
22
+ "./package.json": "./package.json"
23
+ },
24
+ "dependencies": {
25
+ "@gradio/atoms": "workspace:^",
26
+ "@gradio/statustracker": "workspace:^",
27
+ "@gradio/utils": "workspace:^",
28
+ "@gradio/icons": "workspace:^",
29
+ "tinycolor2": "^1.6.0",
30
+ "@types/tinycolor2": "^1.4.6"
31
+ },
32
+ "devDependencies": {
33
+ "@gradio/preview": "workspace:^"
34
+ },
35
+ "peerDependencies": {
36
+ "svelte": "^4.0.0"
37
+ },
38
+ "repository": {
39
+ "type": "git",
40
+ "url": "git+https://github.com/gradio-app/gradio.git",
41
+ "directory": "js/colorpicker"
42
+ }
43
+ }
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/shared/Colorpicker.svelte ADDED
@@ -0,0 +1,416 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <script lang="ts">
2
+ import { createEventDispatcher, afterUpdate, onMount, tick } from "svelte";
3
+ import tinycolor from "tinycolor2";
4
+ import { BlockTitle } from "@gradio/atoms";
5
+ import { click_outside } from "./events";
6
+ import { Eyedropper } from "@gradio/icons";
7
+ import { hsva_to_rgba, format_color } from "./utils";
8
+
9
+ export let value = "#000000";
10
+ export let value_is_output = false;
11
+ export let label: string;
12
+ export let info: string | undefined = undefined;
13
+ export let disabled = false;
14
+ export let show_label = true;
15
+ export let root: string;
16
+
17
+ export let current_mode: "hex" | "rgb" | "hsl" = "hex";
18
+ export let dialog_open = false;
19
+
20
+ let eyedropper_supported = false;
21
+
22
+ let sl_wrap: HTMLDivElement;
23
+ let hue_wrap: HTMLDivElement;
24
+
25
+ const dispatch = createEventDispatcher<{
26
+ change: string;
27
+ click_outside: void;
28
+ input: undefined;
29
+ submit: undefined;
30
+ blur: undefined;
31
+ focus: undefined;
32
+ selected: string;
33
+ close: void;
34
+ }>();
35
+
36
+ let sl_marker_pos = [0, 0];
37
+ let sl_rect: DOMRect | null = null;
38
+ let sl_moving = false;
39
+ let sl = [0, 0];
40
+
41
+ let hue = 0;
42
+ let hue_marker_pos = 0;
43
+ let hue_rect: DOMRect | null = null;
44
+ let hue_moving = false;
45
+
46
+ function handle_hue_down(
47
+ event: MouseEvent & { currentTarget: HTMLDivElement }
48
+ ): void {
49
+ hue_rect = event.currentTarget.getBoundingClientRect();
50
+ hue_moving = true;
51
+ update_hue_from_mouse(event.clientX);
52
+ }
53
+
54
+ function update_hue_from_mouse(x: number): void {
55
+ if (!hue_rect) return;
56
+ const _x = Math.max(0, Math.min(x - hue_rect.left, hue_rect.width)); // Get the x-coordinate relative to the box
57
+ hue_marker_pos = _x;
58
+ const _hue = (_x / hue_rect.width) * 360; // Scale the x position to a hue value (0-360)
59
+
60
+ hue = _hue;
61
+
62
+ value = hsva_to_rgba({ h: _hue, s: sl[0], v: sl[1], a: 1 });
63
+ }
64
+
65
+ function update_color_from_mouse(x: number, y: number): void {
66
+ if (!sl_rect) return;
67
+ const _x = Math.max(0, Math.min(x - sl_rect.left, sl_rect.width));
68
+ const _y = Math.max(0, Math.min(y - sl_rect.top, sl_rect.height));
69
+ sl_marker_pos = [_x, _y];
70
+ const _hsva = {
71
+ h: hue * 1,
72
+ s: _x / sl_rect.width,
73
+ v: 1 - _y / sl_rect.height,
74
+ a: 1
75
+ };
76
+
77
+ sl = [_hsva.s, _hsva.v];
78
+
79
+ value = hsva_to_rgba(_hsva);
80
+ }
81
+
82
+ function handle_sl_down(
83
+ event: MouseEvent & { currentTarget: HTMLDivElement }
84
+ ): void {
85
+ sl_moving = true;
86
+ sl_rect = event.currentTarget.getBoundingClientRect();
87
+ update_color_from_mouse(event.clientX, event.clientY);
88
+ }
89
+
90
+ function handle_move(event: MouseEvent): void {
91
+ if (sl_moving) update_color_from_mouse(event.clientX, event.clientY);
92
+ if (hue_moving) update_hue_from_mouse(event.clientX);
93
+ }
94
+
95
+ function handle_end(): void {
96
+ sl_moving = false;
97
+ hue_moving = false;
98
+ }
99
+
100
+ async function update_mouse_from_color(color: string): Promise<void> {
101
+ if (sl_moving || hue_moving) return;
102
+ await tick();
103
+ if (!color) return;
104
+
105
+ if (!sl_rect && sl_wrap) {
106
+ sl_rect = sl_wrap.getBoundingClientRect();
107
+ }
108
+
109
+ if (!hue_rect && hue_wrap) {
110
+ hue_rect = hue_wrap.getBoundingClientRect();
111
+ }
112
+
113
+ // Exit if we still don't have valid rectangles
114
+ if (!sl_rect || !hue_rect) return;
115
+
116
+ const hsva = tinycolor(color).toHsv();
117
+ const _x = hsva.s * sl_rect.width;
118
+ const _y = (1 - hsva.v) * sl_rect.height;
119
+ sl_marker_pos = [_x, _y];
120
+ sl = [hsva.s, hsva.v];
121
+ hue = hsva.h;
122
+ hue_marker_pos = (hsva.h / 360) * hue_rect.width;
123
+ }
124
+
125
+ function request_eyedropper(): void {
126
+ // @ts-ignore
127
+ const eyeDropper = new EyeDropper();
128
+
129
+ eyeDropper.open().then((result: { sRGBHex: string }) => {
130
+ value = result.sRGBHex;
131
+ });
132
+ }
133
+
134
+ const modes = [
135
+ ["Hex", "hex"],
136
+ ["RGB", "rgb"],
137
+ ["HSL", "hsl"]
138
+ ] as const;
139
+
140
+ $: color_string = format_color(value, current_mode);
141
+ $: color_string && dispatch("selected", color_string);
142
+
143
+ onMount(async () => {
144
+ // @ts-ignore
145
+ eyedropper_supported = window !== undefined && !!window.EyeDropper;
146
+ });
147
+
148
+ function handle_click_outside(): void {
149
+ dialog_open = false;
150
+ }
151
+
152
+ function handle_change(): void {
153
+ dispatch("change", value);
154
+ if (!value_is_output) {
155
+ dispatch("input");
156
+ }
157
+ }
158
+
159
+ afterUpdate(() => {
160
+ value_is_output = false;
161
+ });
162
+
163
+ $: update_mouse_from_color(value);
164
+ $: value, handle_change();
165
+
166
+ function handle_click(): void {
167
+ dispatch("selected", color_string);
168
+ dispatch("close");
169
+ }
170
+ </script>
171
+
172
+ <BlockTitle {root} {show_label} {info}>{label}</BlockTitle>
173
+ <button
174
+ class="dialog-button"
175
+ style:background={value}
176
+ {disabled}
177
+ on:click={() => {
178
+ update_mouse_from_color(value);
179
+ dialog_open = !dialog_open;
180
+ }}
181
+ />
182
+
183
+ <svelte:window on:mousemove={handle_move} on:mouseup={handle_end} />
184
+
185
+ {#if dialog_open}
186
+ <div
187
+ class="color-picker"
188
+ on:focus
189
+ on:blur
190
+ use:click_outside={handle_click_outside}
191
+ >
192
+ <!-- svelte-ignore a11y-no-static-element-interactions -->
193
+ <div
194
+ class="color-gradient"
195
+ on:mousedown={handle_sl_down}
196
+ style="--hue:{hue}"
197
+ bind:this={sl_wrap}
198
+ >
199
+ <div
200
+ class="marker"
201
+ style:transform="translate({sl_marker_pos[0]}px,{sl_marker_pos[1]}px)"
202
+ style:background={value}
203
+ />
204
+ </div>
205
+ <!-- svelte-ignore a11y-no-static-element-interactions -->
206
+ <div class="hue-slider" on:mousedown={handle_hue_down} bind:this={hue_wrap}>
207
+ <div
208
+ class="marker"
209
+ style:background={"hsl(" + hue + ", 100%, 50%)"}
210
+ style:transform="translateX({hue_marker_pos}px)"
211
+ />
212
+ </div>
213
+
214
+ <div class="input">
215
+ <button class="swatch" style:background={value} on:click={handle_click}
216
+ ></button>
217
+ <div>
218
+ <div class="input-wrap">
219
+ <input
220
+ type="text"
221
+ bind:value={color_string}
222
+ on:change={(e) => (value = e.currentTarget.value)}
223
+ />
224
+ <button class="eyedropper" on:click={request_eyedropper}>
225
+ {#if eyedropper_supported}
226
+ <Eyedropper />
227
+ {/if}
228
+ </button>
229
+ </div>
230
+
231
+ <div class="buttons">
232
+ {#each modes as [label, value]}
233
+ <button
234
+ class="button"
235
+ class:active={current_mode === value}
236
+ on:click={() => (current_mode = value)}>{label}</button
237
+ >
238
+ {/each}
239
+ </div>
240
+ </div>
241
+ </div>
242
+ </div>
243
+ {/if}
244
+
245
+ <style>
246
+ .dialog-button {
247
+ display: block;
248
+ width: var(--size-10);
249
+ height: var(--size-5);
250
+ border: var(--block-border-width) solid var(--block-border-color);
251
+ }
252
+
253
+ .dialog-button:disabled {
254
+ cursor: not-allowed;
255
+ }
256
+
257
+ .input {
258
+ display: flex;
259
+ align-items: center;
260
+ padding: 0 10px 15px;
261
+ }
262
+
263
+ .input input {
264
+ height: 30px;
265
+ width: 100%;
266
+ flex-shrink: 1;
267
+ border-bottom-left-radius: 0;
268
+ border: 1px solid var(--block-border-color);
269
+ letter-spacing: -0.05rem;
270
+ border-left: none;
271
+ border-right: none;
272
+ font-family: var(--font-mono);
273
+ font-size: var(--scale-000);
274
+ padding-left: 15px;
275
+ padding-right: 0;
276
+ background-color: var(--background-fill-secondary);
277
+ color: var(--block-label-text-color);
278
+ }
279
+
280
+ .swatch {
281
+ width: 50px;
282
+ height: 50px;
283
+ border-top-left-radius: 15px;
284
+ border-bottom-left-radius: 15px;
285
+ flex-shrink: 0;
286
+ border: 1px solid var(--block-border-color);
287
+ }
288
+
289
+ .color-picker {
290
+ width: 230px;
291
+ background: var(--background-fill-secondary);
292
+ border: 1px solid var(--block-border-color);
293
+ border-radius: var(--block-radius);
294
+ margin: var(--spacing-sm) 0;
295
+ }
296
+
297
+ .buttons {
298
+ height: 20px;
299
+ display: flex;
300
+ justify-content: stretch;
301
+ gap: 0px;
302
+ }
303
+
304
+ .buttons button {
305
+ display: flex;
306
+ align-items: center;
307
+ justify-content: center;
308
+ border: 1px solid var(--block-border-color);
309
+ background: var(--background-fill-secondary);
310
+ padding: 3px 6px;
311
+ font-size: var(--scale-000);
312
+ cursor: pointer;
313
+ border-right: none;
314
+ width: 100%;
315
+ border-top: none;
316
+ }
317
+
318
+ .buttons button:first-child {
319
+ border-left: none;
320
+ }
321
+
322
+ .buttons button:last-child {
323
+ border-bottom-right-radius: 15px;
324
+ border-right: 1px solid var(--block-border-color);
325
+ }
326
+
327
+ .buttons button:hover {
328
+ background: var(--background-fill-secondary-hover);
329
+ font-weight: var(--weight-bold);
330
+ }
331
+
332
+ .buttons button.active {
333
+ background: var(--background-fill-secondary);
334
+ font-weight: var(--weight-bold);
335
+ }
336
+
337
+ .input-wrap {
338
+ display: flex;
339
+ }
340
+
341
+ .color-gradient {
342
+ position: relative;
343
+ --hue: white;
344
+ background: linear-gradient(rgba(0, 0, 0, 0), #000),
345
+ linear-gradient(90deg, #fff, hsl(var(--hue), 100%, 50%));
346
+ width: 100%;
347
+ height: 150px;
348
+ border-radius: var(--radius-sm) var(--radius-sm) 0 0;
349
+ }
350
+
351
+ .hue-slider {
352
+ position: relative;
353
+ width: 90%;
354
+ margin: 10px auto;
355
+ height: 10px;
356
+ border-radius: 5px;
357
+ background: linear-gradient(
358
+ to right,
359
+ hsl(0, 100%, 50%) 0%,
360
+ #ff0 17%,
361
+ lime 33%,
362
+ cyan 50%,
363
+ blue 67%,
364
+ magenta 83%,
365
+ red 100%
366
+ );
367
+ }
368
+
369
+ .swatch {
370
+ width: 50px;
371
+ height: 50px;
372
+ border-top-left-radius: 15px;
373
+ border-bottom-left-radius: 15px;
374
+ flex-shrink: 0;
375
+ border: 1px solid var(--block-border-color);
376
+ }
377
+
378
+ .eyedropper {
379
+ display: flex;
380
+ align-items: center;
381
+ justify-content: center;
382
+ width: 25px;
383
+ height: 30px;
384
+ border-top-right-radius: 15px;
385
+ border: 1px solid var(--block-border-color);
386
+ border-left: none;
387
+ background: var(--background-fill-secondary);
388
+ height: 30px;
389
+ padding: 7px 7px 5px 0px;
390
+ cursor: pointer;
391
+ }
392
+
393
+ .marker {
394
+ position: absolute;
395
+ width: 14px;
396
+ height: 14px;
397
+ border-radius: 50%;
398
+ border: 2px solid white;
399
+ top: -2px;
400
+ left: -7px;
401
+ box-shadow: 0 1px 5px rgba(0, 0, 0, 0.1);
402
+ pointer-events: none;
403
+ }
404
+
405
+ input {
406
+ width: 100%;
407
+ height: 30px;
408
+ border: 1px solid var(--block-border-color);
409
+ border-radius: var(--radius-sm);
410
+ padding: 0 var(--size-2);
411
+ font-family: var(--font-mono);
412
+ font-size: var(--scale-000);
413
+ color: var(--block-label-text-color);
414
+ background-color: var(--background-fill-primary);
415
+ }
416
+ </style>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/colorpicker/shared/events.ts ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Svelte action to handle clicks outside of a DOM node
3
+ * @param node DOM node to check the click is outside of
4
+ * @param callback callback function to call if click is outside
5
+ * @returns svelte action return object with destroy method to remove event listener
6
+ */
7
+ export function click_outside(
8
+ node: Node,
9
+ callback: (arg: MouseEvent) => void
10
+ ): any {
11
+ const handle_click = (event: MouseEvent): void => {
12
+ if (
13
+ node &&
14
+ !node.contains(event.target as Node) &&
15
+ !event.defaultPrevented
16
+ ) {
17
+ callback(event);
18
+ }
19
+ };
20
+
21
+ document.addEventListener("mousedown", handle_click, true);
22
+
23
+ return {
24
+ destroy() {
25
+ document.removeEventListener("mousedown", handle_click, true);
26
+ }
27
+ };
28
+ }
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Back.svelte ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="12px"
4
+ height="24px"
5
+ fill="currentColor"
6
+ stroke-width="1.5"
7
+ viewBox="0 0 12 24"
8
+ >
9
+ <path
10
+ d="M9 6L3 12L9 18"
11
+ stroke="currentColor"
12
+ stroke-width="2"
13
+ stroke-linecap="round"
14
+ stroke-linejoin="round"
15
+ fill="none"
16
+ />
17
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Backward.svelte ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="24px"
4
+ height="24px"
5
+ fill="currentColor"
6
+ stroke-width="1.5"
7
+ viewBox="0 0 24 24"
8
+ color="currentColor"
9
+ ><path
10
+ stroke="currentColor"
11
+ stroke-width="1.5"
12
+ stroke-linecap="round"
13
+ stroke-linejoin="round"
14
+ d="M21.044 5.704a.6.6 0 0 1 .956.483v11.626a.6.6 0 0 1-.956.483l-7.889-5.813a.6.6 0 0 1 0-.966l7.89-5.813ZM10.044 5.704a.6.6 0 0 1 .956.483v11.626a.6.6 0 0 1-.956.483l-7.888-5.813a.6.6 0 0 1 0-.966l7.888-5.813Z"
15
+ /></svg
16
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Brush.svelte ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg width="100%" height="100%" viewBox="0 0 32 32">
2
+ <path
3
+ d="M28.828 3.172a4.094 4.094 0 0 0-5.656 0L4.05 22.292A6.954 6.954 0 0 0 2 27.242V30h2.756a6.952 6.952 0 0 0 4.95-2.05L28.828 8.829a3.999 3.999 0 0 0 0-5.657zM10.91 18.26l2.829 2.829l-2.122 2.121l-2.828-2.828zm-2.619 8.276A4.966 4.966 0 0 1 4.756 28H4v-.759a4.967 4.967 0 0 1 1.464-3.535l1.91-1.91l2.829 2.828zM27.415 7.414l-12.261 12.26l-2.829-2.828l12.262-12.26a2.047 2.047 0 0 1 2.828 0a2 2 0 0 1 0 2.828z"
4
+ fill="currentColor"
5
+ />
6
+ <path
7
+ d="M6.5 15a3.5 3.5 0 0 1-2.475-5.974l3.5-3.5a1.502 1.502 0 0 0 0-2.121a1.537 1.537 0 0 0-2.121 0L3.415 5.394L2 3.98l1.99-1.988a3.585 3.585 0 0 1 4.95 0a3.504 3.504 0 0 1 0 4.949L5.439 10.44a1.502 1.502 0 0 0 0 2.121a1.537 1.537 0 0 0 2.122 0l4.024-4.024L13 9.95l-4.025 4.024A3.475 3.475 0 0 1 6.5 15z"
8
+ fill="currentColor"
9
+ />
10
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/BrushSize.svelte ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ ><path
7
+ fill="currentColor"
8
+ d="M2.753 2.933a.75.75 0 0 1 .814-.68l3.043.272c2.157.205 4.224.452 5.922.732c1.66.273 3.073.594 3.844.983c.197.1.412.233.578.415c.176.192.352.506.28.9c-.067.356-.304.59-.487.729a3.001 3.001 0 0 1-.695.369c-1.02.404-2.952.79-5.984 1.169c-1.442.18-2.489.357-3.214.522c.205.045.43.089.674.132c.992.174 2.241.323 3.568.437a31.21 31.21 0 0 1 3.016.398c.46.087.893.186 1.261.296c.352.105.707.236.971.412c.13.086.304.225.42.437a.988.988 0 0 1 .063.141A1.75 1.75 0 0 0 14.5 12.25v.158c-.758.154-1.743.302-2.986.444c-2.124.243-3.409.55-4.117.859c-.296.128-.442.236-.508.3c.026.037.073.094.156.17c.15.138.369.29.65.45c.56.316 1.282.61 1.979.838l2.637.814a.75.75 0 1 1-.443 1.433l-2.655-.819c-.754-.247-1.58-.578-2.257-.96a5.082 5.082 0 0 1-.924-.65c-.255-.233-.513-.544-.62-.935c-.12-.441-.016-.88.274-1.244c.261-.328.656-.574 1.113-.773c.92-.4 2.387-.727 4.545-.974c1.366-.156 2.354-.313 3.041-.462a16.007 16.007 0 0 0-.552-.114a29.716 29.716 0 0 0-2.865-.378c-1.352-.116-2.649-.27-3.7-.454c-.524-.092-1-.194-1.395-.307c-.376-.106-.75-.241-1.021-.426a1.186 1.186 0 0 1-.43-.49a.934.934 0 0 1 .059-.873c.13-.213.32-.352.472-.442a3.23 3.23 0 0 1 .559-.251c.807-.287 2.222-.562 4.37-.83c2.695-.338 4.377-.666 5.295-.962c-.638-.21-1.623-.427-2.89-.635c-1.65-.273-3.679-.515-5.816-.718l-3.038-.272a.75.75 0 0 1-.68-.814M17 12.25a.75.75 0 0 0-1.5 0v4.19l-.72-.72a.75.75 0 1 0-1.06 1.06l2 2a.75.75 0 0 0 1.06 0l2-2a.75.75 0 1 0-1.06-1.06l-.72.72z"
9
+ /></svg
10
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Calendar.svelte ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="24px"
4
+ height="24px"
5
+ viewBox="0 0 24 24"
6
+ >
7
+ <rect
8
+ x="2"
9
+ y="4"
10
+ width="20"
11
+ height="18"
12
+ stroke="currentColor"
13
+ stroke-width="2"
14
+ stroke-linecap="round"
15
+ stroke-linejoin="round"
16
+ fill="none"
17
+ />
18
+ <line
19
+ x1="2"
20
+ y1="9"
21
+ x2="22"
22
+ y2="9"
23
+ stroke="currentColor"
24
+ stroke-width="2"
25
+ stroke-linecap="round"
26
+ stroke-linejoin="round"
27
+ fill="none"
28
+ />
29
+ <line
30
+ x1="7"
31
+ y1="2"
32
+ x2="7"
33
+ y2="6"
34
+ stroke="currentColor"
35
+ stroke-width="2"
36
+ stroke-linecap="round"
37
+ stroke-linejoin="round"
38
+ fill="none"
39
+ />
40
+ <line
41
+ x1="17"
42
+ y1="2"
43
+ x2="17"
44
+ y2="6"
45
+ stroke="currentColor"
46
+ stroke-width="2"
47
+ stroke-linecap="round"
48
+ stroke-linejoin="round"
49
+ fill="none"
50
+ />
51
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Camera.svelte ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ fill="none"
7
+ stroke="currentColor"
8
+ stroke-width="1.5"
9
+ stroke-linecap="round"
10
+ stroke-linejoin="round"
11
+ class="feather feather-camera"
12
+ >
13
+ <path
14
+ d="M23 19a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h4l2-3h6l2 3h4a2 2 0 0 1 2 2z"
15
+ />
16
+ <circle cx="12" cy="13" r="4" />
17
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Chart.svelte ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg viewBox="0 0 32 32">
2
+ <path
3
+ d="M28.828 3.172a4.094 4.094 0 0 0-5.656 0L4.05 22.292A6.954 6.954 0 0 0 2 27.242V30h2.756a6.952 6.952 0 0 0 4.95-2.05L28.828 8.829a3.999 3.999 0 0 0 0-5.657zM10.91 18.26l2.829 2.829l-2.122 2.121l-2.828-2.828zm-2.619 8.276A4.966 4.966 0 0 1 4.756 28H4v-.759a4.967 4.967 0 0 1 1.464-3.535l1.91-1.91l2.829 2.828zM27.415 7.414l-12.261 12.26l-2.829-2.828l12.262-12.26a2.047 2.047 0 0 1 2.828 0a2 2 0 0 1 0 2.828z"
4
+ fill="currentColor"
5
+ />
6
+ <path
7
+ d="M6.5 15a3.5 3.5 0 0 1-2.475-5.974l3.5-3.5a1.502 1.502 0 0 0 0-2.121a1.537 1.537 0 0 0-2.121 0L3.415 5.394L2 3.98l1.99-1.988a3.585 3.585 0 0 1 4.95 0a3.504 3.504 0 0 1 0 4.949L5.439 10.44a1.502 1.502 0 0 0 0 2.121a1.537 1.537 0 0 0 2.122 0l4.024-4.024L13 9.95l-4.025 4.024A3.475 3.475 0 0 1 6.5 15z"
8
+ fill="currentColor"
9
+ />
10
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Chat.svelte ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ xmlns:xlink="http://www.w3.org/1999/xlink"
4
+ aria-hidden="true"
5
+ role="img"
6
+ class="iconify iconify--carbon"
7
+ width="100%"
8
+ height="100%"
9
+ preserveAspectRatio="xMidYMid meet"
10
+ viewBox="0 0 32 32"
11
+ >
12
+ <path
13
+ fill="currentColor"
14
+ d="M17.74 30L16 29l4-7h6a2 2 0 0 0 2-2V8a2 2 0 0 0-2-2H6a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h9v2H6a4 4 0 0 1-4-4V8a4 4 0 0 1 4-4h20a4 4 0 0 1 4 4v12a4 4 0 0 1-4 4h-4.84Z"
15
+ />
16
+ <path fill="currentColor" d="M8 10h16v2H8zm0 6h10v2H8z" />
17
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Check.svelte ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ viewBox="2 0 20 20"
4
+ fill="none"
5
+ stroke="currentColor"
6
+ aria-hidden="true"
7
+ stroke-width="1.5"
8
+ stroke-linecap="round"
9
+ stroke-linejoin="round"><polyline points="20 6 9 17 4 12" /></svg
10
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Clear.svelte ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ width="100%"
3
+ height="100%"
4
+ viewBox="0 0 24 24"
5
+ version="1.1"
6
+ xmlns="http://www.w3.org/2000/svg"
7
+ xmlns:xlink="http://www.w3.org/1999/xlink"
8
+ xml:space="preserve"
9
+ stroke="currentColor"
10
+ style="fill-rule:evenodd;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;"
11
+ >
12
+ <g
13
+ transform="matrix(1.14096,-0.140958,-0.140958,1.14096,-0.0559523,0.0559523)"
14
+ >
15
+ <path
16
+ d="M18,6L6.087,17.913"
17
+ style="fill:none;fill-rule:nonzero;stroke-width:2px;"
18
+ />
19
+ </g>
20
+ <path
21
+ d="M4.364,4.364L19.636,19.636"
22
+ style="fill:none;fill-rule:nonzero;stroke-width:2px;"
23
+ />
24
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Color.svelte ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg width="100%" height="100%" viewBox="0 0 32 32">
2
+ <circle cx="10" cy="12" r="2" fill="currentColor" />
3
+ <circle cx="16" cy="9" r="2" fill="currentColor" />
4
+ <circle cx="22" cy="12" r="2" fill="currentColor" />
5
+ <circle cx="23" cy="18" r="2" fill="currentColor" />
6
+ <circle cx="19" cy="23" r="2" fill="currentColor" />
7
+ <path
8
+ fill="currentColor"
9
+ d="M16.54 2A14 14 0 0 0 2 16a4.82 4.82 0 0 0 6.09 4.65l1.12-.31a3 3 0 0 1 3.79 2.9V27a3 3 0 0 0 3 3a14 14 0 0 0 14-14.54A14.05 14.05 0 0 0 16.54 2Zm8.11 22.31A11.93 11.93 0 0 1 16 28a1 1 0 0 1-1-1v-3.76a5 5 0 0 0-5-5a5.07 5.07 0 0 0-1.33.18l-1.12.31A2.82 2.82 0 0 1 4 16A12 12 0 0 1 16.47 4A12.18 12.18 0 0 1 28 15.53a11.89 11.89 0 0 1-3.35 8.79Z"
10
+ />
11
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Community.svelte ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <svg id="icon" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32"
2
+ ><path
3
+ d="M23,20a5,5,0,0,0-3.89,1.89L11.8,17.32a4.46,4.46,0,0,0,0-2.64l7.31-4.57A5,5,0,1,0,18,7a4.79,4.79,0,0,0,.2,1.32l-7.31,4.57a5,5,0,1,0,0,6.22l7.31,4.57A4.79,4.79,0,0,0,18,25a5,5,0,1,0,5-5ZM23,4a3,3,0,1,1-3,3A3,3,0,0,1,23,4ZM7,19a3,3,0,1,1,3-3A3,3,0,0,1,7,19Zm16,9a3,3,0,1,1,3-3A3,3,0,0,1,23,28Z"
4
+ fill="currentColor"
5
+ /></svg
6
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Download.svelte ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 32 32"
6
+ ><path
7
+ fill="currentColor"
8
+ d="M26 24v4H6v-4H4v4a2 2 0 0 0 2 2h20a2 2 0 0 0 2-2v-4zm0-10l-1.41-1.41L17 20.17V2h-2v18.17l-7.59-7.58L6 14l10 10l10-10z"
9
+ /></svg
10
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Edit.svelte ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ fill="none"
7
+ stroke="currentColor"
8
+ stroke-width="1.5"
9
+ stroke-linecap="round"
10
+ stroke-linejoin="round"
11
+ class="feather feather-edit-2"
12
+ >
13
+ <path d="M17 3a2.828 2.828 0 1 1 4 4L7.5 20.5 2 22l1.5-5.5L17 3z" />
14
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Erase.svelte ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ ><g fill="none"
7
+ ><path
8
+ fill="currentColor"
9
+ d="m5.505 11.41l.53.53l-.53-.53ZM3 14.952h-.75H3ZM9.048 21v.75V21ZM11.41 5.505l-.53-.53l.53.53Zm1.831 12.34a.75.75 0 0 0 1.06-1.061l-1.06 1.06ZM7.216 9.697a.75.75 0 1 0-1.06 1.061l1.06-1.06Zm10.749 2.362l-5.905 5.905l1.06 1.06l5.905-5.904l-1.06-1.06Zm-11.93-.12l5.905-5.905l-1.06-1.06l-5.905 5.904l1.06 1.06Zm0 6.025c-.85-.85-1.433-1.436-1.812-1.933c-.367-.481-.473-.79-.473-1.08h-1.5c0 .749.312 1.375.78 1.99c.455.596 1.125 1.263 1.945 2.083l1.06-1.06Zm-1.06-7.086c-.82.82-1.49 1.488-1.945 2.084c-.468.614-.78 1.24-.78 1.99h1.5c0-.29.106-.6.473-1.08c.38-.498.962-1.083 1.812-1.933l-1.06-1.06Zm7.085 7.086c-.85.85-1.435 1.433-1.933 1.813c-.48.366-.79.472-1.08.472v1.5c.75 0 1.376-.312 1.99-.78c.596-.455 1.264-1.125 2.084-1.945l-1.06-1.06Zm-7.085 1.06c.82.82 1.487 1.49 2.084 1.945c.614.468 1.24.78 1.989.78v-1.5c-.29 0-.599-.106-1.08-.473c-.497-.38-1.083-.962-1.933-1.812l-1.06 1.06Zm12.99-12.99c.85.85 1.433 1.436 1.813 1.933c.366.481.472.79.472 1.08h1.5c0-.749-.312-1.375-.78-1.99c-.455-.596-1.125-1.263-1.945-2.083l-1.06 1.06Zm1.06 7.086c.82-.82 1.49-1.488 1.945-2.084c.468-.614.78-1.24.78-1.99h-1.5c0 .29-.106.6-.473 1.08c-.38.498-.962 1.083-1.812 1.933l1.06 1.06Zm0-8.146c-.82-.82-1.487-1.49-2.084-1.945c-.614-.468-1.24-.78-1.989-.78v1.5c.29 0 .599.106 1.08.473c.497.38 1.083.962 1.933 1.812l1.06-1.06Zm-7.085 1.06c.85-.85 1.435-1.433 1.933-1.812c.48-.367.79-.473 1.08-.473v-1.5c-.75 0-1.376.312-1.99.78c-.596.455-1.264 1.125-2.084 1.945l1.06 1.06Zm2.362 10.749L7.216 9.698l-1.06 1.061l7.085 7.085l1.06-1.06Z"
10
+ /><path
11
+ stroke="currentColor"
12
+ stroke-linecap="round"
13
+ stroke-width="1.5"
14
+ d="M9 21h12"
15
+ /></g
16
+ ></svg
17
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Eyedropper.svelte ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ ><path
7
+ fill="currentColor"
8
+ d="M21.03 2.97a3.578 3.578 0 0 0-5.06 0L14 4.94l-.013-.013a1.75 1.75 0 0 0-2.475 0l-.585.586a1.75 1.75 0 0 0 0 2.475l.012.012l-6.78 6.78a2.25 2.25 0 0 0-.659 1.592v.687l-1.28 2.347c-.836 1.533.841 3.21 2.374 2.375l2.347-1.28h.688a2.25 2.25 0 0 0 1.59-.66L16 13.061l.012.012a1.75 1.75 0 0 0 2.475 0l.586-.585a1.75 1.75 0 0 0 0-2.475L19.061 10l1.97-1.97a3.578 3.578 0 0 0 0-5.06ZM12 9.061l2.94 2.94l-6.78 6.78a.75.75 0 0 1-.531.22H6.75a.75.75 0 0 0-.359.09l-2.515 1.373a.234.234 0 0 1-.159.032a.264.264 0 0 1-.138-.075a.264.264 0 0 1-.075-.138a.234.234 0 0 1 .033-.158l1.372-2.515A.75.75 0 0 0 5 17.25v-.878a.75.75 0 0 1 .22-.53L12 9.06Z"
9
+ /></svg
10
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Image.svelte ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ fill="none"
7
+ stroke="currentColor"
8
+ stroke-width="1.5"
9
+ stroke-linecap="round"
10
+ stroke-linejoin="round"
11
+ class="feather feather-image"
12
+ >
13
+ <rect x="3" y="3" width="18" height="18" rx="2" ry="2" />
14
+ <circle cx="8.5" cy="8.5" r="1.5" />
15
+ <polyline points="21 15 16 10 5 21" />
16
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/ImagePaste.svelte ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"
2
+ ><path
3
+ fill="currentColor"
4
+ d="M13.75 2a2.25 2.25 0 0 1 2.236 2.002V4h1.764A2.25 2.25 0 0 1 20 6.25V11h-1.5V6.25a.75.75 0 0 0-.75-.75h-2.129c-.404.603-1.091 1-1.871 1h-3.5c-.78 0-1.467-.397-1.871-1H6.25a.75.75 0 0 0-.75.75v13.5c0 .414.336.75.75.75h4.78a4 4 0 0 0 .505 1.5H6.25A2.25 2.25 0 0 1 4 19.75V6.25A2.25 2.25 0 0 1 6.25 4h1.764a2.25 2.25 0 0 1 2.236-2zm2.245 2.096L16 4.25q0-.078-.005-.154M13.75 3.5h-3.5a.75.75 0 0 0 0 1.5h3.5a.75.75 0 0 0 0-1.5M15 12a3 3 0 0 0-3 3v5c0 .556.151 1.077.415 1.524l3.494-3.494a2.25 2.25 0 0 1 3.182 0l3.494 3.494c.264-.447.415-.968.415-1.524v-5a3 3 0 0 0-3-3zm0 11a3 3 0 0 1-1.524-.415l3.494-3.494a.75.75 0 0 1 1.06 0l3.494 3.494A3 3 0 0 1 20 23zm5-7a1 1 0 1 1 0-2 1 1 0 0 1 0 2"
5
+ /></svg
6
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/JSON.svelte ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ xmlns:xlink="http://www.w3.org/1999/xlink"
4
+ aria-hidden="true"
5
+ role="img"
6
+ class="iconify iconify--mdi"
7
+ width="100%"
8
+ height="100%"
9
+ preserveAspectRatio="xMidYMid meet"
10
+ viewBox="0 0 24 24"
11
+ >
12
+ <path
13
+ fill="currentColor"
14
+ d="M5 3h2v2H5v5a2 2 0 0 1-2 2a2 2 0 0 1 2 2v5h2v2H5c-1.07-.27-2-.9-2-2v-4a2 2 0 0 0-2-2H0v-2h1a2 2 0 0 0 2-2V5a2 2 0 0 1 2-2m14 0a2 2 0 0 1 2 2v4a2 2 0 0 0 2 2h1v2h-1a2 2 0 0 0-2 2v4a2 2 0 0 1-2 2h-2v-2h2v-5a2 2 0 0 1 2-2a2 2 0 0 1-2-2V5h-2V3h2m-7 12a1 1 0 0 1 1 1a1 1 0 0 1-1 1a1 1 0 0 1-1-1a1 1 0 0 1 1-1m-4 0a1 1 0 0 1 1 1a1 1 0 0 1-1 1a1 1 0 0 1-1-1a1 1 0 0 1 1-1m8 0a1 1 0 0 1 1 1a1 1 0 0 1-1 1a1 1 0 0 1-1-1a1 1 0 0 1 1-1Z"
15
+ />
16
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Layers.svelte ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ width="100%"
3
+ height="100%"
4
+ viewBox="0 0 17 17"
5
+ fill="none"
6
+ xmlns="http://www.w3.org/2000/svg"
7
+ >
8
+ <path
9
+ d="M1.35327 10.9495L6.77663 15.158C7.12221 15.4229 7.50051 15.5553 7.91154 15.5553C8.32258 15.5553 8.70126 15.4229 9.0476 15.158L14.471 10.9495"
10
+ stroke="currentColor"
11
+ stroke-width="1.5"
12
+ stroke-linecap="round"
13
+ />
14
+ <path
15
+ d="M7.23461 11.4324C7.23406 11.432 7.2335 11.4316 7.23295 11.4312L1.81496 7.2268C1.81471 7.22661 1.81446 7.22641 1.8142 7.22621C1.52269 6.99826 1.39429 6.73321 1.39429 6.37014C1.39429 6.00782 1.52236 5.74301 1.81325 5.51507C1.8136 5.5148 1.81394 5.51453 1.81428 5.51426L7.2331 1.30812C7.45645 1.13785 7.67632 1.06653 7.91159 1.06653C8.14692 1.06653 8.36622 1.13787 8.58861 1.30787C8.58915 1.30828 8.58969 1.30869 8.59023 1.30911L14.0082 5.51462C14.0085 5.51485 14.0088 5.51507 14.0091 5.51529C14.3008 5.74345 14.4289 6.00823 14.4289 6.37014C14.4289 6.73356 14.3006 6.99862 14.01 7.22634C14.0096 7.22662 14.0093 7.22689 14.0089 7.22717L8.59007 11.4322C8.36672 11.6024 8.14686 11.6738 7.91159 11.6738C7.67628 11.6738 7.45699 11.6024 7.23461 11.4324Z"
16
+ stroke="currentColor"
17
+ stroke-width="1.5"
18
+ />
19
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/LineChart.svelte ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ xmlns:xlink="http://www.w3.org/1999/xlink"
4
+ aria-hidden="true"
5
+ role="img"
6
+ class="iconify iconify--carbon"
7
+ width="100%"
8
+ height="100%"
9
+ preserveAspectRatio="xMidYMid meet"
10
+ viewBox="0 0 32 32"
11
+ >
12
+ <path
13
+ fill="currentColor"
14
+ d="M4 2H2v26a2 2 0 0 0 2 2h26v-2H4v-3h22v-8H4v-4h14V5H4Zm20 17v4H4v-4ZM16 7v4H4V7Z"
15
+ />
16
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Maximise.svelte ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ fill="none"
7
+ stroke="currentColor"
8
+ stroke-width="1.5"
9
+ stroke-linecap="round"
10
+ stroke-linejoin="round"
11
+ >
12
+ <path
13
+ d="M8 3H5a2 2 0 0 0-2 2v3m18 0V5a2 2 0 0 0-2-2h-3m0 18h3a2 2 0 0 0 2-2v-3M3 16v3a2 2 0 0 0 2 2h3"
14
+ />
15
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Maximize.svelte ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ viewBox="0 0 24 24"
4
+ fill="none"
5
+ stroke="currentColor"
6
+ stroke-width="2"
7
+ stroke-linecap="round"
8
+ stroke-linejoin="round"
9
+ class="feather feather-maximize"
10
+ >
11
+ <path
12
+ d="M8 3H5a2 2 0 0 0-2 2v3m18 0V5a2 2 0 0 0-2-2h-3m0 18h3a2 2 0 0 0 2-2v-3M3 16v3a2 2 0 0 0 2 2h3"
13
+ >
14
+ </path>
15
+ </svg>
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Microphone.svelte ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ width="100%"
4
+ height="100%"
5
+ viewBox="0 0 24 24"
6
+ fill="none"
7
+ stroke="currentColor"
8
+ stroke-width="2"
9
+ stroke-linecap="round"
10
+ stroke-linejoin="round"
11
+ class="feather feather-mic"
12
+ ><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z" /><path
13
+ d="M19 10v2a7 7 0 0 1-14 0v-2"
14
+ /><line x1="12" y1="19" x2="12" y2="23" /><line
15
+ x1="8"
16
+ y1="23"
17
+ x2="16"
18
+ y2="23"
19
+ /></svg
20
+ >
evalkit_tf446/lib/python3.10/site-packages/gradio/_frontend_code/icons/src/Minimize.svelte ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <svg
2
+ xmlns="http://www.w3.org/2000/svg"
3
+ viewBox="0 0 24 24"
4
+ fill="none"
5
+ stroke="currentColor"
6
+ stroke-width="2"
7
+ stroke-linecap="round"
8
+ stroke-linejoin="round"
9
+ class="feather feather-minimize"
10
+ ><path
11
+ d="M8 3v3a2 2 0 0 1-2 2H3m18 0h-3a2 2 0 0 1-2-2V3m0 18v-3a2 2 0 0 1 2-2h3M3 16h3a2 2 0 0 1 2 2v3"
12
+ ></path></svg
13
+ >