ZTWHHH commited on
Commit
75bebda
·
verified ·
1 Parent(s): d66b080

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +4 -0
  2. vllm/lib/python3.10/site-packages/multiprocess/__init__.py +66 -0
  3. vllm/lib/python3.10/site-packages/multiprocess/context.py +376 -0
  4. vllm/lib/python3.10/site-packages/multiprocess/managers.py +1378 -0
  5. vllm/lib/python3.10/site-packages/multiprocess/popen_forkserver.py +74 -0
  6. vllm/lib/python3.10/site-packages/multiprocess/popen_spawn_posix.py +72 -0
  7. vllm/lib/python3.10/site-packages/py/__pycache__/__init__.cpython-310.pyc +0 -0
  8. vllm/lib/python3.10/site-packages/py/__pycache__/__metainfo.cpython-310.pyc +0 -0
  9. vllm/lib/python3.10/site-packages/py/__pycache__/_error.cpython-310.pyc +0 -0
  10. vllm/lib/python3.10/site-packages/py/__pycache__/_std.cpython-310.pyc +0 -0
  11. vllm/lib/python3.10/site-packages/py/__pycache__/test.cpython-310.pyc +0 -0
  12. vllm/lib/python3.10/site-packages/py/_log/__init__.py +2 -0
  13. vllm/lib/python3.10/site-packages/py/_log/__pycache__/__init__.cpython-310.pyc +0 -0
  14. vllm/lib/python3.10/site-packages/py/_log/__pycache__/log.cpython-310.pyc +0 -0
  15. vllm/lib/python3.10/site-packages/py/_log/__pycache__/warning.cpython-310.pyc +0 -0
  16. vllm/lib/python3.10/site-packages/py/_log/log.py +206 -0
  17. vllm/lib/python3.10/site-packages/py/_log/warning.py +79 -0
  18. vllm/lib/python3.10/site-packages/py/_path/__init__.py +1 -0
  19. vllm/lib/python3.10/site-packages/py/_path/__pycache__/__init__.cpython-310.pyc +0 -0
  20. vllm/lib/python3.10/site-packages/py/_path/__pycache__/cacheutil.cpython-310.pyc +0 -0
  21. vllm/lib/python3.10/site-packages/py/_path/__pycache__/common.cpython-310.pyc +0 -0
  22. vllm/lib/python3.10/site-packages/py/_path/__pycache__/local.cpython-310.pyc +0 -0
  23. vllm/lib/python3.10/site-packages/py/_path/__pycache__/svnurl.cpython-310.pyc +0 -0
  24. vllm/lib/python3.10/site-packages/py/_path/__pycache__/svnwc.cpython-310.pyc +0 -0
  25. vllm/lib/python3.10/site-packages/py/_path/cacheutil.py +114 -0
  26. vllm/lib/python3.10/site-packages/py/_path/local.py +1030 -0
  27. vllm/lib/python3.10/site-packages/py/_path/svnurl.py +380 -0
  28. vllm/lib/python3.10/site-packages/py/_path/svnwc.py +1240 -0
  29. vllm/lib/python3.10/site-packages/py/_process/__init__.py +1 -0
  30. vllm/lib/python3.10/site-packages/py/_process/__pycache__/__init__.cpython-310.pyc +0 -0
  31. vllm/lib/python3.10/site-packages/py/_process/__pycache__/cmdexec.cpython-310.pyc +0 -0
  32. vllm/lib/python3.10/site-packages/py/_process/__pycache__/forkedfunc.cpython-310.pyc +0 -0
  33. vllm/lib/python3.10/site-packages/py/_process/__pycache__/killproc.cpython-310.pyc +0 -0
  34. vllm/lib/python3.10/site-packages/py/_process/cmdexec.py +49 -0
  35. vllm/lib/python3.10/site-packages/py/_process/forkedfunc.py +120 -0
  36. vllm/lib/python3.10/site-packages/py/_process/killproc.py +23 -0
  37. vllm/lib/python3.10/site-packages/py/_vendored_packages/__pycache__/__init__.cpython-310.pyc +0 -0
  38. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/INSTALLER +1 -0
  39. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/LICENSE +18 -0
  40. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/METADATA +125 -0
  41. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/RECORD +11 -0
  42. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/REQUESTED +0 -0
  43. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/WHEEL +6 -0
  44. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/top_level.txt +1 -0
  45. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/__init__.py +217 -0
  46. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/__pycache__/__init__.cpython-310.pyc +0 -0
  47. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/__pycache__/version.cpython-310.pyc +0 -0
  48. vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/version.py +5 -0
  49. vllm/lib/python3.10/site-packages/py/_vendored_packages/iniconfig-1.1.1.dist-info/INSTALLER +1 -0
  50. vllm/lib/python3.10/site-packages/py/_vendored_packages/iniconfig-1.1.1.dist-info/RECORD +11 -0
.gitattributes CHANGED
@@ -1578,3 +1578,7 @@ vllm/lib/python3.10/site-packages/multidict/_multidict.cpython-310-x86_64-linux-
1578
  vllm/lib/python3.10/site-packages/pydantic_core/_pydantic_core.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
1579
  vllm/lib/python3.10/site-packages/torio/lib/_torio_ffmpeg6.so filter=lfs diff=lfs merge=lfs -text
1580
  vllm/lib/python3.10/site-packages/propcache/_helpers_c.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
1578
  vllm/lib/python3.10/site-packages/pydantic_core/_pydantic_core.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
1579
  vllm/lib/python3.10/site-packages/torio/lib/_torio_ffmpeg6.so filter=lfs diff=lfs merge=lfs -text
1580
  vllm/lib/python3.10/site-packages/propcache/_helpers_c.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
1581
+ vllm/lib/python3.10/site-packages/virtualenv/seed/wheels/embed/setuptools-75.8.0-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
1582
+ vllm/lib/python3.10/site-packages/pydantic_core/__pycache__/core_schema.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1583
+ vllm/lib/python3.10/site-packages/virtualenv/seed/wheels/embed/pip-24.3.1-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
1584
+ vllm/lib/python3.10/site-packages/virtualenv/seed/wheels/embed/setuptools-75.3.0-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
vllm/lib/python3.10/site-packages/multiprocess/__init__.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Package analogous to 'threading.py' but using processes
3
+ #
4
+ # multiprocessing/__init__.py
5
+ #
6
+ # This package is intended to duplicate the functionality (and much of
7
+ # the API) of threading.py but uses processes instead of threads. A
8
+ # subpackage 'multiprocessing.dummy' has the same API but is a simple
9
+ # wrapper for 'threading'.
10
+ #
11
+ # Original: Copyright (c) 2006-2008, R Oudkerk
12
+ # Original: Licensed to PSF under a Contributor Agreement.
13
+ # Forked by Mike McKerns, to support enhanced serialization.
14
+
15
+ # author, version, license, and long description
16
+ try: # the package is installed
17
+ from .__info__ import __version__, __author__, __doc__, __license__
18
+ except: # pragma: no cover
19
+ import os
20
+ import sys
21
+ root = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
22
+ sys.path.append(root)
23
+ # get distribution meta info
24
+ from version import (__version__, __author__,
25
+ get_license_text, get_readme_as_rst)
26
+ __license__ = get_license_text(os.path.join(root, 'LICENSE'))
27
+ __license__ = "\n%s" % __license__
28
+ __doc__ = get_readme_as_rst(os.path.join(root, 'README.md'))
29
+ del os, sys, root, get_license_text, get_readme_as_rst
30
+
31
+
32
+ import sys
33
+ from . import context
34
+
35
+ #
36
+ # Copy stuff from default context
37
+ #
38
+
39
+ __all__ = [x for x in dir(context._default_context) if not x.startswith('_')]
40
+ globals().update((name, getattr(context._default_context, name)) for name in __all__)
41
+
42
+ #
43
+ # XXX These should not really be documented or public.
44
+ #
45
+
46
+ SUBDEBUG = 5
47
+ SUBWARNING = 25
48
+
49
+ #
50
+ # Alias for main module -- will be reset by bootstrapping child processes
51
+ #
52
+
53
+ if '__main__' in sys.modules:
54
+ sys.modules['__mp_main__'] = sys.modules['__main__']
55
+
56
+
57
+ def license():
58
+ """print license"""
59
+ print (__license__)
60
+ return
61
+
62
+ def citation():
63
+ """print citation"""
64
+ print (__doc__[-491:-118])
65
+ return
66
+
vllm/lib/python3.10/site-packages/multiprocess/context.py ADDED
@@ -0,0 +1,376 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import threading
4
+
5
+ from . import process
6
+ from . import reduction
7
+
8
+ __all__ = ()
9
+
10
+ #
11
+ # Exceptions
12
+ #
13
+
14
+ class ProcessError(Exception):
15
+ pass
16
+
17
+ class BufferTooShort(ProcessError):
18
+ pass
19
+
20
+ class TimeoutError(ProcessError):
21
+ pass
22
+
23
+ class AuthenticationError(ProcessError):
24
+ pass
25
+
26
+ #
27
+ # Base type for contexts. Bound methods of an instance of this type are included in __all__ of __init__.py
28
+ #
29
+
30
+ class BaseContext(object):
31
+
32
+ ProcessError = ProcessError
33
+ BufferTooShort = BufferTooShort
34
+ TimeoutError = TimeoutError
35
+ AuthenticationError = AuthenticationError
36
+
37
+ current_process = staticmethod(process.current_process)
38
+ parent_process = staticmethod(process.parent_process)
39
+ active_children = staticmethod(process.active_children)
40
+
41
+ def cpu_count(self):
42
+ '''Returns the number of CPUs in the system'''
43
+ num = os.cpu_count()
44
+ if num is None:
45
+ raise NotImplementedError('cannot determine number of cpus')
46
+ else:
47
+ return num
48
+
49
+ def Manager(self):
50
+ '''Returns a manager associated with a running server process
51
+
52
+ The managers methods such as `Lock()`, `Condition()` and `Queue()`
53
+ can be used to create shared objects.
54
+ '''
55
+ from .managers import SyncManager
56
+ m = SyncManager(ctx=self.get_context())
57
+ m.start()
58
+ return m
59
+
60
+ def Pipe(self, duplex=True):
61
+ '''Returns two connection object connected by a pipe'''
62
+ from .connection import Pipe
63
+ return Pipe(duplex)
64
+
65
+ def Lock(self):
66
+ '''Returns a non-recursive lock object'''
67
+ from .synchronize import Lock
68
+ return Lock(ctx=self.get_context())
69
+
70
+ def RLock(self):
71
+ '''Returns a recursive lock object'''
72
+ from .synchronize import RLock
73
+ return RLock(ctx=self.get_context())
74
+
75
+ def Condition(self, lock=None):
76
+ '''Returns a condition object'''
77
+ from .synchronize import Condition
78
+ return Condition(lock, ctx=self.get_context())
79
+
80
+ def Semaphore(self, value=1):
81
+ '''Returns a semaphore object'''
82
+ from .synchronize import Semaphore
83
+ return Semaphore(value, ctx=self.get_context())
84
+
85
+ def BoundedSemaphore(self, value=1):
86
+ '''Returns a bounded semaphore object'''
87
+ from .synchronize import BoundedSemaphore
88
+ return BoundedSemaphore(value, ctx=self.get_context())
89
+
90
+ def Event(self):
91
+ '''Returns an event object'''
92
+ from .synchronize import Event
93
+ return Event(ctx=self.get_context())
94
+
95
+ def Barrier(self, parties, action=None, timeout=None):
96
+ '''Returns a barrier object'''
97
+ from .synchronize import Barrier
98
+ return Barrier(parties, action, timeout, ctx=self.get_context())
99
+
100
+ def Queue(self, maxsize=0):
101
+ '''Returns a queue object'''
102
+ from .queues import Queue
103
+ return Queue(maxsize, ctx=self.get_context())
104
+
105
+ def JoinableQueue(self, maxsize=0):
106
+ '''Returns a queue object'''
107
+ from .queues import JoinableQueue
108
+ return JoinableQueue(maxsize, ctx=self.get_context())
109
+
110
+ def SimpleQueue(self):
111
+ '''Returns a queue object'''
112
+ from .queues import SimpleQueue
113
+ return SimpleQueue(ctx=self.get_context())
114
+
115
+ def Pool(self, processes=None, initializer=None, initargs=(),
116
+ maxtasksperchild=None):
117
+ '''Returns a process pool object'''
118
+ from .pool import Pool
119
+ return Pool(processes, initializer, initargs, maxtasksperchild,
120
+ context=self.get_context())
121
+
122
+ def RawValue(self, typecode_or_type, *args):
123
+ '''Returns a shared object'''
124
+ from .sharedctypes import RawValue
125
+ return RawValue(typecode_or_type, *args)
126
+
127
+ def RawArray(self, typecode_or_type, size_or_initializer):
128
+ '''Returns a shared array'''
129
+ from .sharedctypes import RawArray
130
+ return RawArray(typecode_or_type, size_or_initializer)
131
+
132
+ def Value(self, typecode_or_type, *args, lock=True):
133
+ '''Returns a synchronized shared object'''
134
+ from .sharedctypes import Value
135
+ return Value(typecode_or_type, *args, lock=lock,
136
+ ctx=self.get_context())
137
+
138
+ def Array(self, typecode_or_type, size_or_initializer, *, lock=True):
139
+ '''Returns a synchronized shared array'''
140
+ from .sharedctypes import Array
141
+ return Array(typecode_or_type, size_or_initializer, lock=lock,
142
+ ctx=self.get_context())
143
+
144
+ def freeze_support(self):
145
+ '''Check whether this is a fake forked process in a frozen executable.
146
+ If so then run code specified by commandline and exit.
147
+ '''
148
+ if sys.platform == 'win32' and getattr(sys, 'frozen', False):
149
+ from .spawn import freeze_support
150
+ freeze_support()
151
+
152
+ def get_logger(self):
153
+ '''Return package logger -- if it does not already exist then
154
+ it is created.
155
+ '''
156
+ from .util import get_logger
157
+ return get_logger()
158
+
159
+ def log_to_stderr(self, level=None):
160
+ '''Turn on logging and add a handler which prints to stderr'''
161
+ from .util import log_to_stderr
162
+ return log_to_stderr(level)
163
+
164
+ def allow_connection_pickling(self):
165
+ '''Install support for sending connections and sockets
166
+ between processes
167
+ '''
168
+ # This is undocumented. In previous versions of multiprocessing
169
+ # its only effect was to make socket objects inheritable on Windows.
170
+ from . import connection
171
+
172
+ def set_executable(self, executable):
173
+ '''Sets the path to a python.exe or pythonw.exe binary used to run
174
+ child processes instead of sys.executable when using the 'spawn'
175
+ start method. Useful for people embedding Python.
176
+ '''
177
+ from .spawn import set_executable
178
+ set_executable(executable)
179
+
180
+ def set_forkserver_preload(self, module_names):
181
+ '''Set list of module names to try to load in forkserver process.
182
+ This is really just a hint.
183
+ '''
184
+ from .forkserver import set_forkserver_preload
185
+ set_forkserver_preload(module_names)
186
+
187
+ def get_context(self, method=None):
188
+ if method is None:
189
+ return self
190
+ try:
191
+ ctx = _concrete_contexts[method]
192
+ except KeyError:
193
+ raise ValueError('cannot find context for %r' % method) from None
194
+ ctx._check_available()
195
+ return ctx
196
+
197
+ def get_start_method(self, allow_none=False):
198
+ return self._name
199
+
200
+ def set_start_method(self, method, force=False):
201
+ raise ValueError('cannot set start method of concrete context')
202
+
203
+ @property
204
+ def reducer(self):
205
+ '''Controls how objects will be reduced to a form that can be
206
+ shared with other processes.'''
207
+ return globals().get('reduction')
208
+
209
+ @reducer.setter
210
+ def reducer(self, reduction):
211
+ globals()['reduction'] = reduction
212
+
213
+ def _check_available(self):
214
+ pass
215
+
216
+ #
217
+ # Type of default context -- underlying context can be set at most once
218
+ #
219
+
220
+ class Process(process.BaseProcess):
221
+ _start_method = None
222
+ @staticmethod
223
+ def _Popen(process_obj):
224
+ return _default_context.get_context().Process._Popen(process_obj)
225
+
226
+ @staticmethod
227
+ def _after_fork():
228
+ return _default_context.get_context().Process._after_fork()
229
+
230
+ class DefaultContext(BaseContext):
231
+ Process = Process
232
+
233
+ def __init__(self, context):
234
+ self._default_context = context
235
+ self._actual_context = None
236
+
237
+ def get_context(self, method=None):
238
+ if method is None:
239
+ if self._actual_context is None:
240
+ self._actual_context = self._default_context
241
+ return self._actual_context
242
+ else:
243
+ return super().get_context(method)
244
+
245
+ def set_start_method(self, method, force=False):
246
+ if self._actual_context is not None and not force:
247
+ raise RuntimeError('context has already been set')
248
+ if method is None and force:
249
+ self._actual_context = None
250
+ return
251
+ self._actual_context = self.get_context(method)
252
+
253
+ def get_start_method(self, allow_none=False):
254
+ if self._actual_context is None:
255
+ if allow_none:
256
+ return None
257
+ self._actual_context = self._default_context
258
+ return self._actual_context._name
259
+
260
+ def get_all_start_methods(self):
261
+ if sys.platform == 'win32':
262
+ return ['spawn']
263
+ else:
264
+ methods = ['spawn', 'fork'] if sys.platform == 'darwin' else ['fork', 'spawn']
265
+ if reduction.HAVE_SEND_HANDLE:
266
+ methods.append('forkserver')
267
+ return methods
268
+
269
+
270
+ #
271
+ # Context types for fixed start method
272
+ #
273
+
274
+ if sys.platform != 'win32':
275
+
276
+ class ForkProcess(process.BaseProcess):
277
+ _start_method = 'fork'
278
+ @staticmethod
279
+ def _Popen(process_obj):
280
+ from .popen_fork import Popen
281
+ return Popen(process_obj)
282
+
283
+ class SpawnProcess(process.BaseProcess):
284
+ _start_method = 'spawn'
285
+ @staticmethod
286
+ def _Popen(process_obj):
287
+ from .popen_spawn_posix import Popen
288
+ return Popen(process_obj)
289
+
290
+ @staticmethod
291
+ def _after_fork():
292
+ # process is spawned, nothing to do
293
+ pass
294
+
295
+ class ForkServerProcess(process.BaseProcess):
296
+ _start_method = 'forkserver'
297
+ @staticmethod
298
+ def _Popen(process_obj):
299
+ from .popen_forkserver import Popen
300
+ return Popen(process_obj)
301
+
302
+ class ForkContext(BaseContext):
303
+ _name = 'fork'
304
+ Process = ForkProcess
305
+
306
+ class SpawnContext(BaseContext):
307
+ _name = 'spawn'
308
+ Process = SpawnProcess
309
+
310
+ class ForkServerContext(BaseContext):
311
+ _name = 'forkserver'
312
+ Process = ForkServerProcess
313
+ def _check_available(self):
314
+ if not reduction.HAVE_SEND_HANDLE:
315
+ raise ValueError('forkserver start method not available')
316
+
317
+ _concrete_contexts = {
318
+ 'fork': ForkContext(),
319
+ 'spawn': SpawnContext(),
320
+ 'forkserver': ForkServerContext(),
321
+ }
322
+ if sys.platform == 'darwin':
323
+ # bpo-33725: running arbitrary code after fork() is no longer reliable
324
+ # on macOS since macOS 10.14 (Mojave). Use spawn by default instead.
325
+ _default_context = DefaultContext(_concrete_contexts['fork']) #FIXME: spawn
326
+ else:
327
+ _default_context = DefaultContext(_concrete_contexts['fork'])
328
+
329
+ else:
330
+
331
+ class SpawnProcess(process.BaseProcess):
332
+ _start_method = 'spawn'
333
+ @staticmethod
334
+ def _Popen(process_obj):
335
+ from .popen_spawn_win32 import Popen
336
+ return Popen(process_obj)
337
+
338
+ @staticmethod
339
+ def _after_fork():
340
+ # process is spawned, nothing to do
341
+ pass
342
+
343
+ class SpawnContext(BaseContext):
344
+ _name = 'spawn'
345
+ Process = SpawnProcess
346
+
347
+ _concrete_contexts = {
348
+ 'spawn': SpawnContext(),
349
+ }
350
+ _default_context = DefaultContext(_concrete_contexts['spawn'])
351
+
352
+ #
353
+ # Force the start method
354
+ #
355
+
356
+ def _force_start_method(method):
357
+ _default_context._actual_context = _concrete_contexts[method]
358
+
359
+ #
360
+ # Check that the current thread is spawning a child process
361
+ #
362
+
363
+ _tls = threading.local()
364
+
365
+ def get_spawning_popen():
366
+ return getattr(_tls, 'spawning_popen', None)
367
+
368
+ def set_spawning_popen(popen):
369
+ _tls.spawning_popen = popen
370
+
371
+ def assert_spawning(obj):
372
+ if get_spawning_popen() is None:
373
+ raise RuntimeError(
374
+ '%s objects should only be shared between processes'
375
+ ' through inheritance' % type(obj).__name__
376
+ )
vllm/lib/python3.10/site-packages/multiprocess/managers.py ADDED
@@ -0,0 +1,1378 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Module providing manager classes for dealing
3
+ # with shared objects
4
+ #
5
+ # multiprocessing/managers.py
6
+ #
7
+ # Copyright (c) 2006-2008, R Oudkerk
8
+ # Licensed to PSF under a Contributor Agreement.
9
+ #
10
+
11
+ __all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
12
+
13
+ #
14
+ # Imports
15
+ #
16
+
17
+ import sys
18
+ import threading
19
+ import signal
20
+ import array
21
+ import queue
22
+ import time
23
+ import types
24
+ import os
25
+ from os import getpid
26
+
27
+ from traceback import format_exc
28
+
29
+ from . import connection
30
+ from .context import reduction, get_spawning_popen, ProcessError
31
+ from . import pool
32
+ from . import process
33
+ from . import util
34
+ from . import get_context
35
+ try:
36
+ from . import shared_memory
37
+ except ImportError:
38
+ HAS_SHMEM = False
39
+ else:
40
+ HAS_SHMEM = True
41
+ __all__.append('SharedMemoryManager')
42
+
43
+ #
44
+ # Register some things for pickling
45
+ #
46
+
47
+ def reduce_array(a):
48
+ return array.array, (a.typecode, a.tobytes())
49
+ reduction.register(array.array, reduce_array)
50
+
51
+ view_types = [type(getattr({}, name)()) for name in ('items','keys','values')]
52
+ if view_types[0] is not list: # only needed in Py3.0
53
+ def rebuild_as_list(obj):
54
+ return list, (list(obj),)
55
+ for view_type in view_types:
56
+ reduction.register(view_type, rebuild_as_list)
57
+
58
+ #
59
+ # Type for identifying shared objects
60
+ #
61
+
62
+ class Token(object):
63
+ '''
64
+ Type to uniquely identify a shared object
65
+ '''
66
+ __slots__ = ('typeid', 'address', 'id')
67
+
68
+ def __init__(self, typeid, address, id):
69
+ (self.typeid, self.address, self.id) = (typeid, address, id)
70
+
71
+ def __getstate__(self):
72
+ return (self.typeid, self.address, self.id)
73
+
74
+ def __setstate__(self, state):
75
+ (self.typeid, self.address, self.id) = state
76
+
77
+ def __repr__(self):
78
+ return '%s(typeid=%r, address=%r, id=%r)' % \
79
+ (self.__class__.__name__, self.typeid, self.address, self.id)
80
+
81
+ #
82
+ # Function for communication with a manager's server process
83
+ #
84
+
85
+ def dispatch(c, id, methodname, args=(), kwds={}):
86
+ '''
87
+ Send a message to manager using connection `c` and return response
88
+ '''
89
+ c.send((id, methodname, args, kwds))
90
+ kind, result = c.recv()
91
+ if kind == '#RETURN':
92
+ return result
93
+ raise convert_to_error(kind, result)
94
+
95
+ def convert_to_error(kind, result):
96
+ if kind == '#ERROR':
97
+ return result
98
+ elif kind in ('#TRACEBACK', '#UNSERIALIZABLE'):
99
+ if not isinstance(result, str):
100
+ raise TypeError(
101
+ "Result {0!r} (kind '{1}') type is {2}, not str".format(
102
+ result, kind, type(result)))
103
+ if kind == '#UNSERIALIZABLE':
104
+ return RemoteError('Unserializable message: %s\n' % result)
105
+ else:
106
+ return RemoteError(result)
107
+ else:
108
+ return ValueError('Unrecognized message type {!r}'.format(kind))
109
+
110
+ class RemoteError(Exception):
111
+ def __str__(self):
112
+ return ('\n' + '-'*75 + '\n' + str(self.args[0]) + '-'*75)
113
+
114
+ #
115
+ # Functions for finding the method names of an object
116
+ #
117
+
118
+ def all_methods(obj):
119
+ '''
120
+ Return a list of names of methods of `obj`
121
+ '''
122
+ temp = []
123
+ for name in dir(obj):
124
+ func = getattr(obj, name)
125
+ if callable(func):
126
+ temp.append(name)
127
+ return temp
128
+
129
+ def public_methods(obj):
130
+ '''
131
+ Return a list of names of methods of `obj` which do not start with '_'
132
+ '''
133
+ return [name for name in all_methods(obj) if name[0] != '_']
134
+
135
+ #
136
+ # Server which is run in a process controlled by a manager
137
+ #
138
+
139
+ class Server(object):
140
+ '''
141
+ Server class which runs in a process controlled by a manager object
142
+ '''
143
+ public = ['shutdown', 'create', 'accept_connection', 'get_methods',
144
+ 'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref']
145
+
146
+ def __init__(self, registry, address, authkey, serializer):
147
+ if not isinstance(authkey, bytes):
148
+ raise TypeError(
149
+ "Authkey {0!r} is type {1!s}, not bytes".format(
150
+ authkey, type(authkey)))
151
+ self.registry = registry
152
+ self.authkey = process.AuthenticationString(authkey)
153
+ Listener, Client = listener_client[serializer]
154
+
155
+ # do authentication later
156
+ self.listener = Listener(address=address, backlog=16)
157
+ self.address = self.listener.address
158
+
159
+ self.id_to_obj = {'0': (None, ())}
160
+ self.id_to_refcount = {}
161
+ self.id_to_local_proxy_obj = {}
162
+ self.mutex = threading.Lock()
163
+
164
+ def serve_forever(self):
165
+ '''
166
+ Run the server forever
167
+ '''
168
+ self.stop_event = threading.Event()
169
+ process.current_process()._manager_server = self
170
+ try:
171
+ accepter = threading.Thread(target=self.accepter)
172
+ accepter.daemon = True
173
+ accepter.start()
174
+ try:
175
+ while not self.stop_event.is_set():
176
+ self.stop_event.wait(1)
177
+ except (KeyboardInterrupt, SystemExit):
178
+ pass
179
+ finally:
180
+ if sys.stdout != sys.__stdout__: # what about stderr?
181
+ util.debug('resetting stdout, stderr')
182
+ sys.stdout = sys.__stdout__
183
+ sys.stderr = sys.__stderr__
184
+ sys.exit(0)
185
+
186
+ def accepter(self):
187
+ while True:
188
+ try:
189
+ c = self.listener.accept()
190
+ except OSError:
191
+ continue
192
+ t = threading.Thread(target=self.handle_request, args=(c,))
193
+ t.daemon = True
194
+ t.start()
195
+
196
+ def _handle_request(self, c):
197
+ request = None
198
+ try:
199
+ connection.deliver_challenge(c, self.authkey)
200
+ connection.answer_challenge(c, self.authkey)
201
+ request = c.recv()
202
+ ignore, funcname, args, kwds = request
203
+ assert funcname in self.public, '%r unrecognized' % funcname
204
+ func = getattr(self, funcname)
205
+ except Exception:
206
+ msg = ('#TRACEBACK', format_exc())
207
+ else:
208
+ try:
209
+ result = func(c, *args, **kwds)
210
+ except Exception:
211
+ msg = ('#TRACEBACK', format_exc())
212
+ else:
213
+ msg = ('#RETURN', result)
214
+
215
+ try:
216
+ c.send(msg)
217
+ except Exception as e:
218
+ try:
219
+ c.send(('#TRACEBACK', format_exc()))
220
+ except Exception:
221
+ pass
222
+ util.info('Failure to send message: %r', msg)
223
+ util.info(' ... request was %r', request)
224
+ util.info(' ... exception was %r', e)
225
+
226
+ def handle_request(self, conn):
227
+ '''
228
+ Handle a new connection
229
+ '''
230
+ try:
231
+ self._handle_request(conn)
232
+ except SystemExit:
233
+ # Server.serve_client() calls sys.exit(0) on EOF
234
+ pass
235
+ finally:
236
+ conn.close()
237
+
238
+ def serve_client(self, conn):
239
+ '''
240
+ Handle requests from the proxies in a particular process/thread
241
+ '''
242
+ util.debug('starting server thread to service %r',
243
+ threading.current_thread().name)
244
+
245
+ recv = conn.recv
246
+ send = conn.send
247
+ id_to_obj = self.id_to_obj
248
+
249
+ while not self.stop_event.is_set():
250
+
251
+ try:
252
+ methodname = obj = None
253
+ request = recv()
254
+ ident, methodname, args, kwds = request
255
+ try:
256
+ obj, exposed, gettypeid = id_to_obj[ident]
257
+ except KeyError as ke:
258
+ try:
259
+ obj, exposed, gettypeid = \
260
+ self.id_to_local_proxy_obj[ident]
261
+ except KeyError:
262
+ raise ke
263
+
264
+ if methodname not in exposed:
265
+ raise AttributeError(
266
+ 'method %r of %r object is not in exposed=%r' %
267
+ (methodname, type(obj), exposed)
268
+ )
269
+
270
+ function = getattr(obj, methodname)
271
+
272
+ try:
273
+ res = function(*args, **kwds)
274
+ except Exception as e:
275
+ msg = ('#ERROR', e)
276
+ else:
277
+ typeid = gettypeid and gettypeid.get(methodname, None)
278
+ if typeid:
279
+ rident, rexposed = self.create(conn, typeid, res)
280
+ token = Token(typeid, self.address, rident)
281
+ msg = ('#PROXY', (rexposed, token))
282
+ else:
283
+ msg = ('#RETURN', res)
284
+
285
+ except AttributeError:
286
+ if methodname is None:
287
+ msg = ('#TRACEBACK', format_exc())
288
+ else:
289
+ try:
290
+ fallback_func = self.fallback_mapping[methodname]
291
+ result = fallback_func(
292
+ self, conn, ident, obj, *args, **kwds
293
+ )
294
+ msg = ('#RETURN', result)
295
+ except Exception:
296
+ msg = ('#TRACEBACK', format_exc())
297
+
298
+ except EOFError:
299
+ util.debug('got EOF -- exiting thread serving %r',
300
+ threading.current_thread().name)
301
+ sys.exit(0)
302
+
303
+ except Exception:
304
+ msg = ('#TRACEBACK', format_exc())
305
+
306
+ try:
307
+ try:
308
+ send(msg)
309
+ except Exception:
310
+ send(('#UNSERIALIZABLE', format_exc()))
311
+ except Exception as e:
312
+ util.info('exception in thread serving %r',
313
+ threading.current_thread().name)
314
+ util.info(' ... message was %r', msg)
315
+ util.info(' ... exception was %r', e)
316
+ conn.close()
317
+ sys.exit(1)
318
+
319
+ def fallback_getvalue(self, conn, ident, obj):
320
+ return obj
321
+
322
+ def fallback_str(self, conn, ident, obj):
323
+ return str(obj)
324
+
325
+ def fallback_repr(self, conn, ident, obj):
326
+ return repr(obj)
327
+
328
+ fallback_mapping = {
329
+ '__str__':fallback_str,
330
+ '__repr__':fallback_repr,
331
+ '#GETVALUE':fallback_getvalue
332
+ }
333
+
334
+ def dummy(self, c):
335
+ pass
336
+
337
+ def debug_info(self, c):
338
+ '''
339
+ Return some info --- useful to spot problems with refcounting
340
+ '''
341
+ # Perhaps include debug info about 'c'?
342
+ with self.mutex:
343
+ result = []
344
+ keys = list(self.id_to_refcount.keys())
345
+ keys.sort()
346
+ for ident in keys:
347
+ if ident != '0':
348
+ result.append(' %s: refcount=%s\n %s' %
349
+ (ident, self.id_to_refcount[ident],
350
+ str(self.id_to_obj[ident][0])[:75]))
351
+ return '\n'.join(result)
352
+
353
+ def number_of_objects(self, c):
354
+ '''
355
+ Number of shared objects
356
+ '''
357
+ # Doesn't use (len(self.id_to_obj) - 1) as we shouldn't count ident='0'
358
+ return len(self.id_to_refcount)
359
+
360
+ def shutdown(self, c):
361
+ '''
362
+ Shutdown this process
363
+ '''
364
+ try:
365
+ util.debug('manager received shutdown message')
366
+ c.send(('#RETURN', None))
367
+ except:
368
+ import traceback
369
+ traceback.print_exc()
370
+ finally:
371
+ self.stop_event.set()
372
+
373
+ def create(self, c, typeid, /, *args, **kwds):
374
+ '''
375
+ Create a new shared object and return its id
376
+ '''
377
+ with self.mutex:
378
+ callable, exposed, method_to_typeid, proxytype = \
379
+ self.registry[typeid]
380
+
381
+ if callable is None:
382
+ if kwds or (len(args) != 1):
383
+ raise ValueError(
384
+ "Without callable, must have one non-keyword argument")
385
+ obj = args[0]
386
+ else:
387
+ obj = callable(*args, **kwds)
388
+
389
+ if exposed is None:
390
+ exposed = public_methods(obj)
391
+ if method_to_typeid is not None:
392
+ if not isinstance(method_to_typeid, dict):
393
+ raise TypeError(
394
+ "Method_to_typeid {0!r}: type {1!s}, not dict".format(
395
+ method_to_typeid, type(method_to_typeid)))
396
+ exposed = list(exposed) + list(method_to_typeid)
397
+
398
+ ident = '%x' % id(obj) # convert to string because xmlrpclib
399
+ # only has 32 bit signed integers
400
+ util.debug('%r callable returned object with id %r', typeid, ident)
401
+
402
+ self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid)
403
+ if ident not in self.id_to_refcount:
404
+ self.id_to_refcount[ident] = 0
405
+
406
+ self.incref(c, ident)
407
+ return ident, tuple(exposed)
408
+
409
+ def get_methods(self, c, token):
410
+ '''
411
+ Return the methods of the shared object indicated by token
412
+ '''
413
+ return tuple(self.id_to_obj[token.id][1])
414
+
415
+ def accept_connection(self, c, name):
416
+ '''
417
+ Spawn a new thread to serve this connection
418
+ '''
419
+ threading.current_thread().name = name
420
+ c.send(('#RETURN', None))
421
+ self.serve_client(c)
422
+
423
+ def incref(self, c, ident):
424
+ with self.mutex:
425
+ try:
426
+ self.id_to_refcount[ident] += 1
427
+ except KeyError as ke:
428
+ # If no external references exist but an internal (to the
429
+ # manager) still does and a new external reference is created
430
+ # from it, restore the manager's tracking of it from the
431
+ # previously stashed internal ref.
432
+ if ident in self.id_to_local_proxy_obj:
433
+ self.id_to_refcount[ident] = 1
434
+ self.id_to_obj[ident] = \
435
+ self.id_to_local_proxy_obj[ident]
436
+ obj, exposed, gettypeid = self.id_to_obj[ident]
437
+ util.debug('Server re-enabled tracking & INCREF %r', ident)
438
+ else:
439
+ raise ke
440
+
441
+ def decref(self, c, ident):
442
+ if ident not in self.id_to_refcount and \
443
+ ident in self.id_to_local_proxy_obj:
444
+ util.debug('Server DECREF skipping %r', ident)
445
+ return
446
+
447
+ with self.mutex:
448
+ if self.id_to_refcount[ident] <= 0:
449
+ raise AssertionError(
450
+ "Id {0!s} ({1!r}) has refcount {2:n}, not 1+".format(
451
+ ident, self.id_to_obj[ident],
452
+ self.id_to_refcount[ident]))
453
+ self.id_to_refcount[ident] -= 1
454
+ if self.id_to_refcount[ident] == 0:
455
+ del self.id_to_refcount[ident]
456
+
457
+ if ident not in self.id_to_refcount:
458
+ # Two-step process in case the object turns out to contain other
459
+ # proxy objects (e.g. a managed list of managed lists).
460
+ # Otherwise, deleting self.id_to_obj[ident] would trigger the
461
+ # deleting of the stored value (another managed object) which would
462
+ # in turn attempt to acquire the mutex that is already held here.
463
+ self.id_to_obj[ident] = (None, (), None) # thread-safe
464
+ util.debug('disposing of obj with id %r', ident)
465
+ with self.mutex:
466
+ del self.id_to_obj[ident]
467
+
468
+
469
+ #
470
+ # Class to represent state of a manager
471
+ #
472
+
473
+ class State(object):
474
+ __slots__ = ['value']
475
+ INITIAL = 0
476
+ STARTED = 1
477
+ SHUTDOWN = 2
478
+
479
+ #
480
+ # Mapping from serializer name to Listener and Client types
481
+ #
482
+
483
+ listener_client = { #XXX: register dill?
484
+ 'pickle' : (connection.Listener, connection.Client),
485
+ 'xmlrpclib' : (connection.XmlListener, connection.XmlClient)
486
+ }
487
+
488
+ #
489
+ # Definition of BaseManager
490
+ #
491
+
492
+ class BaseManager(object):
493
+ '''
494
+ Base class for managers
495
+ '''
496
+ _registry = {}
497
+ _Server = Server
498
+
499
+ def __init__(self, address=None, authkey=None, serializer='pickle',
500
+ ctx=None):
501
+ if authkey is None:
502
+ authkey = process.current_process().authkey
503
+ self._address = address # XXX not final address if eg ('', 0)
504
+ self._authkey = process.AuthenticationString(authkey)
505
+ self._state = State()
506
+ self._state.value = State.INITIAL
507
+ self._serializer = serializer
508
+ self._Listener, self._Client = listener_client[serializer]
509
+ self._ctx = ctx or get_context()
510
+
511
+ def get_server(self):
512
+ '''
513
+ Return server object with serve_forever() method and address attribute
514
+ '''
515
+ if self._state.value != State.INITIAL:
516
+ if self._state.value == State.STARTED:
517
+ raise ProcessError("Already started server")
518
+ elif self._state.value == State.SHUTDOWN:
519
+ raise ProcessError("Manager has shut down")
520
+ else:
521
+ raise ProcessError(
522
+ "Unknown state {!r}".format(self._state.value))
523
+ return Server(self._registry, self._address,
524
+ self._authkey, self._serializer)
525
+
526
+ def connect(self):
527
+ '''
528
+ Connect manager object to the server process
529
+ '''
530
+ Listener, Client = listener_client[self._serializer]
531
+ conn = Client(self._address, authkey=self._authkey)
532
+ dispatch(conn, None, 'dummy')
533
+ self._state.value = State.STARTED
534
+
535
+ def start(self, initializer=None, initargs=()):
536
+ '''
537
+ Spawn a server process for this manager object
538
+ '''
539
+ if self._state.value != State.INITIAL:
540
+ if self._state.value == State.STARTED:
541
+ raise ProcessError("Already started server")
542
+ elif self._state.value == State.SHUTDOWN:
543
+ raise ProcessError("Manager has shut down")
544
+ else:
545
+ raise ProcessError(
546
+ "Unknown state {!r}".format(self._state.value))
547
+
548
+ if initializer is not None and not callable(initializer):
549
+ raise TypeError('initializer must be a callable')
550
+
551
+ # pipe over which we will retrieve address of server
552
+ reader, writer = connection.Pipe(duplex=False)
553
+
554
+ # spawn process which runs a server
555
+ self._process = self._ctx.Process(
556
+ target=type(self)._run_server,
557
+ args=(self._registry, self._address, self._authkey,
558
+ self._serializer, writer, initializer, initargs),
559
+ )
560
+ ident = ':'.join(str(i) for i in self._process._identity)
561
+ self._process.name = type(self).__name__ + '-' + ident
562
+ self._process.start()
563
+
564
+ # get address of server
565
+ writer.close()
566
+ self._address = reader.recv()
567
+ reader.close()
568
+
569
+ # register a finalizer
570
+ self._state.value = State.STARTED
571
+ self.shutdown = util.Finalize(
572
+ self, type(self)._finalize_manager,
573
+ args=(self._process, self._address, self._authkey,
574
+ self._state, self._Client),
575
+ exitpriority=0
576
+ )
577
+
578
+ @classmethod
579
+ def _run_server(cls, registry, address, authkey, serializer, writer,
580
+ initializer=None, initargs=()):
581
+ '''
582
+ Create a server, report its address and run it
583
+ '''
584
+ # bpo-36368: protect server process from KeyboardInterrupt signals
585
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
586
+
587
+ if initializer is not None:
588
+ initializer(*initargs)
589
+
590
+ # create server
591
+ server = cls._Server(registry, address, authkey, serializer)
592
+
593
+ # inform parent process of the server's address
594
+ writer.send(server.address)
595
+ writer.close()
596
+
597
+ # run the manager
598
+ util.info('manager serving at %r', server.address)
599
+ server.serve_forever()
600
+
601
+ def _create(self, typeid, /, *args, **kwds):
602
+ '''
603
+ Create a new shared object; return the token and exposed tuple
604
+ '''
605
+ assert self._state.value == State.STARTED, 'server not yet started'
606
+ conn = self._Client(self._address, authkey=self._authkey)
607
+ try:
608
+ id, exposed = dispatch(conn, None, 'create', (typeid,)+args, kwds)
609
+ finally:
610
+ conn.close()
611
+ return Token(typeid, self._address, id), exposed
612
+
613
+ def join(self, timeout=None):
614
+ '''
615
+ Join the manager process (if it has been spawned)
616
+ '''
617
+ if self._process is not None:
618
+ self._process.join(timeout)
619
+ if not self._process.is_alive():
620
+ self._process = None
621
+
622
+ def _debug_info(self):
623
+ '''
624
+ Return some info about the servers shared objects and connections
625
+ '''
626
+ conn = self._Client(self._address, authkey=self._authkey)
627
+ try:
628
+ return dispatch(conn, None, 'debug_info')
629
+ finally:
630
+ conn.close()
631
+
632
+ def _number_of_objects(self):
633
+ '''
634
+ Return the number of shared objects
635
+ '''
636
+ conn = self._Client(self._address, authkey=self._authkey)
637
+ try:
638
+ return dispatch(conn, None, 'number_of_objects')
639
+ finally:
640
+ conn.close()
641
+
642
+ def __enter__(self):
643
+ if self._state.value == State.INITIAL:
644
+ self.start()
645
+ if self._state.value != State.STARTED:
646
+ if self._state.value == State.INITIAL:
647
+ raise ProcessError("Unable to start server")
648
+ elif self._state.value == State.SHUTDOWN:
649
+ raise ProcessError("Manager has shut down")
650
+ else:
651
+ raise ProcessError(
652
+ "Unknown state {!r}".format(self._state.value))
653
+ return self
654
+
655
+ def __exit__(self, exc_type, exc_val, exc_tb):
656
+ self.shutdown()
657
+
658
+ @staticmethod
659
+ def _finalize_manager(process, address, authkey, state, _Client):
660
+ '''
661
+ Shutdown the manager process; will be registered as a finalizer
662
+ '''
663
+ if process.is_alive():
664
+ util.info('sending shutdown message to manager')
665
+ try:
666
+ conn = _Client(address, authkey=authkey)
667
+ try:
668
+ dispatch(conn, None, 'shutdown')
669
+ finally:
670
+ conn.close()
671
+ except Exception:
672
+ pass
673
+
674
+ process.join(timeout=1.0)
675
+ if process.is_alive():
676
+ util.info('manager still alive')
677
+ if hasattr(process, 'terminate'):
678
+ util.info('trying to `terminate()` manager process')
679
+ process.terminate()
680
+ process.join(timeout=1.0)
681
+ if process.is_alive():
682
+ util.info('manager still alive after terminate')
683
+
684
+ state.value = State.SHUTDOWN
685
+ try:
686
+ del BaseProxy._address_to_local[address]
687
+ except KeyError:
688
+ pass
689
+
690
+ @property
691
+ def address(self):
692
+ return self._address
693
+
694
+ @classmethod
695
+ def register(cls, typeid, callable=None, proxytype=None, exposed=None,
696
+ method_to_typeid=None, create_method=True):
697
+ '''
698
+ Register a typeid with the manager type
699
+ '''
700
+ if '_registry' not in cls.__dict__:
701
+ cls._registry = cls._registry.copy()
702
+
703
+ if proxytype is None:
704
+ proxytype = AutoProxy
705
+
706
+ exposed = exposed or getattr(proxytype, '_exposed_', None)
707
+
708
+ method_to_typeid = method_to_typeid or \
709
+ getattr(proxytype, '_method_to_typeid_', None)
710
+
711
+ if method_to_typeid:
712
+ for key, value in list(method_to_typeid.items()): # isinstance?
713
+ assert type(key) is str, '%r is not a string' % key
714
+ assert type(value) is str, '%r is not a string' % value
715
+
716
+ cls._registry[typeid] = (
717
+ callable, exposed, method_to_typeid, proxytype
718
+ )
719
+
720
+ if create_method:
721
+ def temp(self, /, *args, **kwds):
722
+ util.debug('requesting creation of a shared %r object', typeid)
723
+ token, exp = self._create(typeid, *args, **kwds)
724
+ proxy = proxytype(
725
+ token, self._serializer, manager=self,
726
+ authkey=self._authkey, exposed=exp
727
+ )
728
+ conn = self._Client(token.address, authkey=self._authkey)
729
+ dispatch(conn, None, 'decref', (token.id,))
730
+ return proxy
731
+ temp.__name__ = typeid
732
+ setattr(cls, typeid, temp)
733
+
734
+ #
735
+ # Subclass of set which get cleared after a fork
736
+ #
737
+
738
+ class ProcessLocalSet(set):
739
+ def __init__(self):
740
+ util.register_after_fork(self, lambda obj: obj.clear())
741
+ def __reduce__(self):
742
+ return type(self), ()
743
+
744
+ #
745
+ # Definition of BaseProxy
746
+ #
747
+
748
+ class BaseProxy(object):
749
+ '''
750
+ A base for proxies of shared objects
751
+ '''
752
+ _address_to_local = {}
753
+ _mutex = util.ForkAwareThreadLock()
754
+
755
+ def __init__(self, token, serializer, manager=None,
756
+ authkey=None, exposed=None, incref=True, manager_owned=False):
757
+ with BaseProxy._mutex:
758
+ tls_idset = BaseProxy._address_to_local.get(token.address, None)
759
+ if tls_idset is None:
760
+ tls_idset = util.ForkAwareLocal(), ProcessLocalSet()
761
+ BaseProxy._address_to_local[token.address] = tls_idset
762
+
763
+ # self._tls is used to record the connection used by this
764
+ # thread to communicate with the manager at token.address
765
+ self._tls = tls_idset[0]
766
+
767
+ # self._idset is used to record the identities of all shared
768
+ # objects for which the current process owns references and
769
+ # which are in the manager at token.address
770
+ self._idset = tls_idset[1]
771
+
772
+ self._token = token
773
+ self._id = self._token.id
774
+ self._manager = manager
775
+ self._serializer = serializer
776
+ self._Client = listener_client[serializer][1]
777
+
778
+ # Should be set to True only when a proxy object is being created
779
+ # on the manager server; primary use case: nested proxy objects.
780
+ # RebuildProxy detects when a proxy is being created on the manager
781
+ # and sets this value appropriately.
782
+ self._owned_by_manager = manager_owned
783
+
784
+ if authkey is not None:
785
+ self._authkey = process.AuthenticationString(authkey)
786
+ elif self._manager is not None:
787
+ self._authkey = self._manager._authkey
788
+ else:
789
+ self._authkey = process.current_process().authkey
790
+
791
+ if incref:
792
+ self._incref()
793
+
794
+ util.register_after_fork(self, BaseProxy._after_fork)
795
+
796
+ def _connect(self):
797
+ util.debug('making connection to manager')
798
+ name = process.current_process().name
799
+ if threading.current_thread().name != 'MainThread':
800
+ name += '|' + threading.current_thread().name
801
+ conn = self._Client(self._token.address, authkey=self._authkey)
802
+ dispatch(conn, None, 'accept_connection', (name,))
803
+ self._tls.connection = conn
804
+
805
+ def _callmethod(self, methodname, args=(), kwds={}):
806
+ '''
807
+ Try to call a method of the referent and return a copy of the result
808
+ '''
809
+ try:
810
+ conn = self._tls.connection
811
+ except AttributeError:
812
+ util.debug('thread %r does not own a connection',
813
+ threading.current_thread().name)
814
+ self._connect()
815
+ conn = self._tls.connection
816
+
817
+ conn.send((self._id, methodname, args, kwds))
818
+ kind, result = conn.recv()
819
+
820
+ if kind == '#RETURN':
821
+ return result
822
+ elif kind == '#PROXY':
823
+ exposed, token = result
824
+ proxytype = self._manager._registry[token.typeid][-1]
825
+ token.address = self._token.address
826
+ proxy = proxytype(
827
+ token, self._serializer, manager=self._manager,
828
+ authkey=self._authkey, exposed=exposed
829
+ )
830
+ conn = self._Client(token.address, authkey=self._authkey)
831
+ dispatch(conn, None, 'decref', (token.id,))
832
+ return proxy
833
+ raise convert_to_error(kind, result)
834
+
835
+ def _getvalue(self):
836
+ '''
837
+ Get a copy of the value of the referent
838
+ '''
839
+ return self._callmethod('#GETVALUE')
840
+
841
+ def _incref(self):
842
+ if self._owned_by_manager:
843
+ util.debug('owned_by_manager skipped INCREF of %r', self._token.id)
844
+ return
845
+
846
+ conn = self._Client(self._token.address, authkey=self._authkey)
847
+ dispatch(conn, None, 'incref', (self._id,))
848
+ util.debug('INCREF %r', self._token.id)
849
+
850
+ self._idset.add(self._id)
851
+
852
+ state = self._manager and self._manager._state
853
+
854
+ self._close = util.Finalize(
855
+ self, BaseProxy._decref,
856
+ args=(self._token, self._authkey, state,
857
+ self._tls, self._idset, self._Client),
858
+ exitpriority=10
859
+ )
860
+
861
+ @staticmethod
862
+ def _decref(token, authkey, state, tls, idset, _Client):
863
+ idset.discard(token.id)
864
+
865
+ # check whether manager is still alive
866
+ if state is None or state.value == State.STARTED:
867
+ # tell manager this process no longer cares about referent
868
+ try:
869
+ util.debug('DECREF %r', token.id)
870
+ conn = _Client(token.address, authkey=authkey)
871
+ dispatch(conn, None, 'decref', (token.id,))
872
+ except Exception as e:
873
+ util.debug('... decref failed %s', e)
874
+
875
+ else:
876
+ util.debug('DECREF %r -- manager already shutdown', token.id)
877
+
878
+ # check whether we can close this thread's connection because
879
+ # the process owns no more references to objects for this manager
880
+ if not idset and hasattr(tls, 'connection'):
881
+ util.debug('thread %r has no more proxies so closing conn',
882
+ threading.current_thread().name)
883
+ tls.connection.close()
884
+ del tls.connection
885
+
886
+ def _after_fork(self):
887
+ self._manager = None
888
+ try:
889
+ self._incref()
890
+ except Exception as e:
891
+ # the proxy may just be for a manager which has shutdown
892
+ util.info('incref failed: %s' % e)
893
+
894
+ def __reduce__(self):
895
+ kwds = {}
896
+ if get_spawning_popen() is not None:
897
+ kwds['authkey'] = self._authkey
898
+
899
+ if getattr(self, '_isauto', False):
900
+ kwds['exposed'] = self._exposed_
901
+ return (RebuildProxy,
902
+ (AutoProxy, self._token, self._serializer, kwds))
903
+ else:
904
+ return (RebuildProxy,
905
+ (type(self), self._token, self._serializer, kwds))
906
+
907
+ def __deepcopy__(self, memo):
908
+ return self._getvalue()
909
+
910
+ def __repr__(self):
911
+ return '<%s object, typeid %r at %#x>' % \
912
+ (type(self).__name__, self._token.typeid, id(self))
913
+
914
+ def __str__(self):
915
+ '''
916
+ Return representation of the referent (or a fall-back if that fails)
917
+ '''
918
+ try:
919
+ return self._callmethod('__repr__')
920
+ except Exception:
921
+ return repr(self)[:-1] + "; '__str__()' failed>"
922
+
923
+ #
924
+ # Function used for unpickling
925
+ #
926
+
927
+ def RebuildProxy(func, token, serializer, kwds):
928
+ '''
929
+ Function used for unpickling proxy objects.
930
+ '''
931
+ server = getattr(process.current_process(), '_manager_server', None)
932
+ if server and server.address == token.address:
933
+ util.debug('Rebuild a proxy owned by manager, token=%r', token)
934
+ kwds['manager_owned'] = True
935
+ if token.id not in server.id_to_local_proxy_obj:
936
+ server.id_to_local_proxy_obj[token.id] = \
937
+ server.id_to_obj[token.id]
938
+ incref = (
939
+ kwds.pop('incref', True) and
940
+ not getattr(process.current_process(), '_inheriting', False)
941
+ )
942
+ return func(token, serializer, incref=incref, **kwds)
943
+
944
+ #
945
+ # Functions to create proxies and proxy types
946
+ #
947
+
948
+ def MakeProxyType(name, exposed, _cache={}):
949
+ '''
950
+ Return a proxy type whose methods are given by `exposed`
951
+ '''
952
+ exposed = tuple(exposed)
953
+ try:
954
+ return _cache[(name, exposed)]
955
+ except KeyError:
956
+ pass
957
+
958
+ dic = {}
959
+
960
+ for meth in exposed:
961
+ exec('''def %s(self, /, *args, **kwds):
962
+ return self._callmethod(%r, args, kwds)''' % (meth, meth), dic)
963
+
964
+ ProxyType = type(name, (BaseProxy,), dic)
965
+ ProxyType._exposed_ = exposed
966
+ _cache[(name, exposed)] = ProxyType
967
+ return ProxyType
968
+
969
+
970
+ def AutoProxy(token, serializer, manager=None, authkey=None,
971
+ exposed=None, incref=True, manager_owned=False):
972
+ '''
973
+ Return an auto-proxy for `token`
974
+ '''
975
+ _Client = listener_client[serializer][1]
976
+
977
+ if exposed is None:
978
+ conn = _Client(token.address, authkey=authkey)
979
+ try:
980
+ exposed = dispatch(conn, None, 'get_methods', (token,))
981
+ finally:
982
+ conn.close()
983
+
984
+ if authkey is None and manager is not None:
985
+ authkey = manager._authkey
986
+ if authkey is None:
987
+ authkey = process.current_process().authkey
988
+
989
+ ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
990
+ proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
991
+ incref=incref, manager_owned=manager_owned)
992
+ proxy._isauto = True
993
+ return proxy
994
+
995
+ #
996
+ # Types/callables which we will register with SyncManager
997
+ #
998
+
999
+ class Namespace(object):
1000
+ def __init__(self, /, **kwds):
1001
+ self.__dict__.update(kwds)
1002
+ def __repr__(self):
1003
+ items = list(self.__dict__.items())
1004
+ temp = []
1005
+ for name, value in items:
1006
+ if not name.startswith('_'):
1007
+ temp.append('%s=%r' % (name, value))
1008
+ temp.sort()
1009
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(temp))
1010
+
1011
+ class Value(object):
1012
+ def __init__(self, typecode, value, lock=True):
1013
+ self._typecode = typecode
1014
+ self._value = value
1015
+ def get(self):
1016
+ return self._value
1017
+ def set(self, value):
1018
+ self._value = value
1019
+ def __repr__(self):
1020
+ return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
1021
+ value = property(get, set)
1022
+
1023
+ def Array(typecode, sequence, lock=True):
1024
+ return array.array(typecode, sequence)
1025
+
1026
+ #
1027
+ # Proxy types used by SyncManager
1028
+ #
1029
+
1030
+ class IteratorProxy(BaseProxy):
1031
+ _exposed_ = ('__next__', 'send', 'throw', 'close')
1032
+ def __iter__(self):
1033
+ return self
1034
+ def __next__(self, *args):
1035
+ return self._callmethod('__next__', args)
1036
+ def send(self, *args):
1037
+ return self._callmethod('send', args)
1038
+ def throw(self, *args):
1039
+ return self._callmethod('throw', args)
1040
+ def close(self, *args):
1041
+ return self._callmethod('close', args)
1042
+
1043
+
1044
+ class AcquirerProxy(BaseProxy):
1045
+ _exposed_ = ('acquire', 'release')
1046
+ def acquire(self, blocking=True, timeout=None):
1047
+ args = (blocking,) if timeout is None else (blocking, timeout)
1048
+ return self._callmethod('acquire', args)
1049
+ def release(self):
1050
+ return self._callmethod('release')
1051
+ def __enter__(self):
1052
+ return self._callmethod('acquire')
1053
+ def __exit__(self, exc_type, exc_val, exc_tb):
1054
+ return self._callmethod('release')
1055
+
1056
+
1057
+ class ConditionProxy(AcquirerProxy):
1058
+ _exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
1059
+ def wait(self, timeout=None):
1060
+ return self._callmethod('wait', (timeout,))
1061
+ def notify(self, n=1):
1062
+ return self._callmethod('notify', (n,))
1063
+ def notify_all(self):
1064
+ return self._callmethod('notify_all')
1065
+ def wait_for(self, predicate, timeout=None):
1066
+ result = predicate()
1067
+ if result:
1068
+ return result
1069
+ if timeout is not None:
1070
+ endtime = getattr(time,'monotonic',time.time)() + timeout
1071
+ else:
1072
+ endtime = None
1073
+ waittime = None
1074
+ while not result:
1075
+ if endtime is not None:
1076
+ waittime = endtime - getattr(time,'monotonic',time.time)()
1077
+ if waittime <= 0:
1078
+ break
1079
+ self.wait(waittime)
1080
+ result = predicate()
1081
+ return result
1082
+
1083
+
1084
+ class EventProxy(BaseProxy):
1085
+ _exposed_ = ('is_set', 'set', 'clear', 'wait')
1086
+ def is_set(self):
1087
+ return self._callmethod('is_set')
1088
+ def set(self):
1089
+ return self._callmethod('set')
1090
+ def clear(self):
1091
+ return self._callmethod('clear')
1092
+ def wait(self, timeout=None):
1093
+ return self._callmethod('wait', (timeout,))
1094
+
1095
+
1096
+ class BarrierProxy(BaseProxy):
1097
+ _exposed_ = ('__getattribute__', 'wait', 'abort', 'reset')
1098
+ def wait(self, timeout=None):
1099
+ return self._callmethod('wait', (timeout,))
1100
+ def abort(self):
1101
+ return self._callmethod('abort')
1102
+ def reset(self):
1103
+ return self._callmethod('reset')
1104
+ @property
1105
+ def parties(self):
1106
+ return self._callmethod('__getattribute__', ('parties',))
1107
+ @property
1108
+ def n_waiting(self):
1109
+ return self._callmethod('__getattribute__', ('n_waiting',))
1110
+ @property
1111
+ def broken(self):
1112
+ return self._callmethod('__getattribute__', ('broken',))
1113
+
1114
+
1115
+ class NamespaceProxy(BaseProxy):
1116
+ _exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
1117
+ def __getattr__(self, key):
1118
+ if key[0] == '_':
1119
+ return object.__getattribute__(self, key)
1120
+ callmethod = object.__getattribute__(self, '_callmethod')
1121
+ return callmethod('__getattribute__', (key,))
1122
+ def __setattr__(self, key, value):
1123
+ if key[0] == '_':
1124
+ return object.__setattr__(self, key, value)
1125
+ callmethod = object.__getattribute__(self, '_callmethod')
1126
+ return callmethod('__setattr__', (key, value))
1127
+ def __delattr__(self, key):
1128
+ if key[0] == '_':
1129
+ return object.__delattr__(self, key)
1130
+ callmethod = object.__getattribute__(self, '_callmethod')
1131
+ return callmethod('__delattr__', (key,))
1132
+
1133
+
1134
+ class ValueProxy(BaseProxy):
1135
+ _exposed_ = ('get', 'set')
1136
+ def get(self):
1137
+ return self._callmethod('get')
1138
+ def set(self, value):
1139
+ return self._callmethod('set', (value,))
1140
+ value = property(get, set)
1141
+
1142
+ __class_getitem__ = classmethod(types.GenericAlias)
1143
+
1144
+
1145
+ BaseListProxy = MakeProxyType('BaseListProxy', (
1146
+ '__add__', '__contains__', '__delitem__', '__getitem__', '__len__',
1147
+ '__mul__', '__reversed__', '__rmul__', '__setitem__',
1148
+ 'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
1149
+ 'reverse', 'sort', '__imul__'
1150
+ ))
1151
+ class ListProxy(BaseListProxy):
1152
+ def __iadd__(self, value):
1153
+ self._callmethod('extend', (value,))
1154
+ return self
1155
+ def __imul__(self, value):
1156
+ self._callmethod('__imul__', (value,))
1157
+ return self
1158
+
1159
+
1160
+ DictProxy = MakeProxyType('DictProxy', (
1161
+ '__contains__', '__delitem__', '__getitem__', '__iter__', '__len__',
1162
+ '__setitem__', 'clear', 'copy', 'get', 'items',
1163
+ 'keys', 'pop', 'popitem', 'setdefault', 'update', 'values'
1164
+ ))
1165
+ DictProxy._method_to_typeid_ = {
1166
+ '__iter__': 'Iterator',
1167
+ }
1168
+
1169
+
1170
+ ArrayProxy = MakeProxyType('ArrayProxy', (
1171
+ '__len__', '__getitem__', '__setitem__'
1172
+ ))
1173
+
1174
+
1175
+ BasePoolProxy = MakeProxyType('PoolProxy', (
1176
+ 'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
1177
+ 'map', 'map_async', 'starmap', 'starmap_async', 'terminate',
1178
+ ))
1179
+ BasePoolProxy._method_to_typeid_ = {
1180
+ 'apply_async': 'AsyncResult',
1181
+ 'map_async': 'AsyncResult',
1182
+ 'starmap_async': 'AsyncResult',
1183
+ 'imap': 'Iterator',
1184
+ 'imap_unordered': 'Iterator'
1185
+ }
1186
+ class PoolProxy(BasePoolProxy):
1187
+ def __enter__(self):
1188
+ return self
1189
+ def __exit__(self, exc_type, exc_val, exc_tb):
1190
+ self.terminate()
1191
+
1192
+ #
1193
+ # Definition of SyncManager
1194
+ #
1195
+
1196
+ class SyncManager(BaseManager):
1197
+ '''
1198
+ Subclass of `BaseManager` which supports a number of shared object types.
1199
+
1200
+ The types registered are those intended for the synchronization
1201
+ of threads, plus `dict`, `list` and `Namespace`.
1202
+
1203
+ The `multiprocess.Manager()` function creates started instances of
1204
+ this class.
1205
+ '''
1206
+
1207
+ SyncManager.register('Queue', queue.Queue)
1208
+ SyncManager.register('JoinableQueue', queue.Queue)
1209
+ SyncManager.register('Event', threading.Event, EventProxy)
1210
+ SyncManager.register('Lock', threading.Lock, AcquirerProxy)
1211
+ SyncManager.register('RLock', threading.RLock, AcquirerProxy)
1212
+ SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy)
1213
+ SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore,
1214
+ AcquirerProxy)
1215
+ SyncManager.register('Condition', threading.Condition, ConditionProxy)
1216
+ SyncManager.register('Barrier', threading.Barrier, BarrierProxy)
1217
+ SyncManager.register('Pool', pool.Pool, PoolProxy)
1218
+ SyncManager.register('list', list, ListProxy)
1219
+ SyncManager.register('dict', dict, DictProxy)
1220
+ SyncManager.register('Value', Value, ValueProxy)
1221
+ SyncManager.register('Array', Array, ArrayProxy)
1222
+ SyncManager.register('Namespace', Namespace, NamespaceProxy)
1223
+
1224
+ # types returned by methods of PoolProxy
1225
+ SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
1226
+ SyncManager.register('AsyncResult', create_method=False)
1227
+
1228
+ #
1229
+ # Definition of SharedMemoryManager and SharedMemoryServer
1230
+ #
1231
+
1232
+ if HAS_SHMEM:
1233
+ class _SharedMemoryTracker:
1234
+ "Manages one or more shared memory segments."
1235
+
1236
+ def __init__(self, name, segment_names=[]):
1237
+ self.shared_memory_context_name = name
1238
+ self.segment_names = segment_names
1239
+
1240
+ def register_segment(self, segment_name):
1241
+ "Adds the supplied shared memory block name to tracker."
1242
+ util.debug(f"Register segment {segment_name!r} in pid {getpid()}")
1243
+ self.segment_names.append(segment_name)
1244
+
1245
+ def destroy_segment(self, segment_name):
1246
+ """Calls unlink() on the shared memory block with the supplied name
1247
+ and removes it from the list of blocks being tracked."""
1248
+ util.debug(f"Destroy segment {segment_name!r} in pid {getpid()}")
1249
+ self.segment_names.remove(segment_name)
1250
+ segment = shared_memory.SharedMemory(segment_name)
1251
+ segment.close()
1252
+ segment.unlink()
1253
+
1254
+ def unlink(self):
1255
+ "Calls destroy_segment() on all tracked shared memory blocks."
1256
+ for segment_name in self.segment_names[:]:
1257
+ self.destroy_segment(segment_name)
1258
+
1259
+ def __del__(self):
1260
+ util.debug(f"Call {self.__class__.__name__}.__del__ in {getpid()}")
1261
+ self.unlink()
1262
+
1263
+ def __getstate__(self):
1264
+ return (self.shared_memory_context_name, self.segment_names)
1265
+
1266
+ def __setstate__(self, state):
1267
+ self.__init__(*state)
1268
+
1269
+
1270
+ class SharedMemoryServer(Server):
1271
+
1272
+ public = Server.public + \
1273
+ ['track_segment', 'release_segment', 'list_segments']
1274
+
1275
+ def __init__(self, *args, **kwargs):
1276
+ Server.__init__(self, *args, **kwargs)
1277
+ address = self.address
1278
+ # The address of Linux abstract namespaces can be bytes
1279
+ if isinstance(address, bytes):
1280
+ address = os.fsdecode(address)
1281
+ self.shared_memory_context = \
1282
+ _SharedMemoryTracker(f"shm_{address}_{getpid()}")
1283
+ util.debug(f"SharedMemoryServer started by pid {getpid()}")
1284
+
1285
+ def create(self, c, typeid, /, *args, **kwargs):
1286
+ """Create a new distributed-shared object (not backed by a shared
1287
+ memory block) and return its id to be used in a Proxy Object."""
1288
+ # Unless set up as a shared proxy, don't make shared_memory_context
1289
+ # a standard part of kwargs. This makes things easier for supplying
1290
+ # simple functions.
1291
+ if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"):
1292
+ kwargs['shared_memory_context'] = self.shared_memory_context
1293
+ return Server.create(self, c, typeid, *args, **kwargs)
1294
+
1295
+ def shutdown(self, c):
1296
+ "Call unlink() on all tracked shared memory, terminate the Server."
1297
+ self.shared_memory_context.unlink()
1298
+ return Server.shutdown(self, c)
1299
+
1300
+ def track_segment(self, c, segment_name):
1301
+ "Adds the supplied shared memory block name to Server's tracker."
1302
+ self.shared_memory_context.register_segment(segment_name)
1303
+
1304
+ def release_segment(self, c, segment_name):
1305
+ """Calls unlink() on the shared memory block with the supplied name
1306
+ and removes it from the tracker instance inside the Server."""
1307
+ self.shared_memory_context.destroy_segment(segment_name)
1308
+
1309
+ def list_segments(self, c):
1310
+ """Returns a list of names of shared memory blocks that the Server
1311
+ is currently tracking."""
1312
+ return self.shared_memory_context.segment_names
1313
+
1314
+
1315
+ class SharedMemoryManager(BaseManager):
1316
+ """Like SyncManager but uses SharedMemoryServer instead of Server.
1317
+
1318
+ It provides methods for creating and returning SharedMemory instances
1319
+ and for creating a list-like object (ShareableList) backed by shared
1320
+ memory. It also provides methods that create and return Proxy Objects
1321
+ that support synchronization across processes (i.e. multi-process-safe
1322
+ locks and semaphores).
1323
+ """
1324
+
1325
+ _Server = SharedMemoryServer
1326
+
1327
+ def __init__(self, *args, **kwargs):
1328
+ if os.name == "posix":
1329
+ # bpo-36867: Ensure the resource_tracker is running before
1330
+ # launching the manager process, so that concurrent
1331
+ # shared_memory manipulation both in the manager and in the
1332
+ # current process does not create two resource_tracker
1333
+ # processes.
1334
+ from . import resource_tracker
1335
+ resource_tracker.ensure_running()
1336
+ BaseManager.__init__(self, *args, **kwargs)
1337
+ util.debug(f"{self.__class__.__name__} created by pid {getpid()}")
1338
+
1339
+ def __del__(self):
1340
+ util.debug(f"{self.__class__.__name__}.__del__ by pid {getpid()}")
1341
+ pass
1342
+
1343
+ def get_server(self):
1344
+ 'Better than monkeypatching for now; merge into Server ultimately'
1345
+ if self._state.value != State.INITIAL:
1346
+ if self._state.value == State.STARTED:
1347
+ raise ProcessError("Already started SharedMemoryServer")
1348
+ elif self._state.value == State.SHUTDOWN:
1349
+ raise ProcessError("SharedMemoryManager has shut down")
1350
+ else:
1351
+ raise ProcessError(
1352
+ "Unknown state {!r}".format(self._state.value))
1353
+ return self._Server(self._registry, self._address,
1354
+ self._authkey, self._serializer)
1355
+
1356
+ def SharedMemory(self, size):
1357
+ """Returns a new SharedMemory instance with the specified size in
1358
+ bytes, to be tracked by the manager."""
1359
+ with self._Client(self._address, authkey=self._authkey) as conn:
1360
+ sms = shared_memory.SharedMemory(None, create=True, size=size)
1361
+ try:
1362
+ dispatch(conn, None, 'track_segment', (sms.name,))
1363
+ except BaseException as e:
1364
+ sms.unlink()
1365
+ raise e
1366
+ return sms
1367
+
1368
+ def ShareableList(self, sequence):
1369
+ """Returns a new ShareableList instance populated with the values
1370
+ from the input sequence, to be tracked by the manager."""
1371
+ with self._Client(self._address, authkey=self._authkey) as conn:
1372
+ sl = shared_memory.ShareableList(sequence)
1373
+ try:
1374
+ dispatch(conn, None, 'track_segment', (sl.shm.name,))
1375
+ except BaseException as e:
1376
+ sl.shm.unlink()
1377
+ raise e
1378
+ return sl
vllm/lib/python3.10/site-packages/multiprocess/popen_forkserver.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import io
2
+ import os
3
+
4
+ from .context import reduction, set_spawning_popen
5
+ if not reduction.HAVE_SEND_HANDLE:
6
+ raise ImportError('No support for sending fds between processes')
7
+ from . import forkserver
8
+ from . import popen_fork
9
+ from . import spawn
10
+ from . import util
11
+
12
+
13
+ __all__ = ['Popen']
14
+
15
+ #
16
+ # Wrapper for an fd used while launching a process
17
+ #
18
+
19
+ class _DupFd(object):
20
+ def __init__(self, ind):
21
+ self.ind = ind
22
+ def detach(self):
23
+ return forkserver.get_inherited_fds()[self.ind]
24
+
25
+ #
26
+ # Start child process using a server process
27
+ #
28
+
29
+ class Popen(popen_fork.Popen):
30
+ method = 'forkserver'
31
+ DupFd = _DupFd
32
+
33
+ def __init__(self, process_obj):
34
+ self._fds = []
35
+ super().__init__(process_obj)
36
+
37
+ def duplicate_for_child(self, fd):
38
+ self._fds.append(fd)
39
+ return len(self._fds) - 1
40
+
41
+ def _launch(self, process_obj):
42
+ prep_data = spawn.get_preparation_data(process_obj._name)
43
+ buf = io.BytesIO()
44
+ set_spawning_popen(self)
45
+ try:
46
+ reduction.dump(prep_data, buf)
47
+ reduction.dump(process_obj, buf)
48
+ finally:
49
+ set_spawning_popen(None)
50
+
51
+ self.sentinel, w = forkserver.connect_to_new_process(self._fds)
52
+ # Keep a duplicate of the data pipe's write end as a sentinel of the
53
+ # parent process used by the child process.
54
+ _parent_w = os.dup(w)
55
+ self.finalizer = util.Finalize(self, util.close_fds,
56
+ (_parent_w, self.sentinel))
57
+ with open(w, 'wb', closefd=True) as f:
58
+ f.write(buf.getbuffer())
59
+ self.pid = forkserver.read_signed(self.sentinel)
60
+
61
+ def poll(self, flag=os.WNOHANG):
62
+ if self.returncode is None:
63
+ from multiprocess.connection import wait
64
+ timeout = 0 if flag == os.WNOHANG else None
65
+ if not wait([self.sentinel], timeout):
66
+ return None
67
+ try:
68
+ self.returncode = forkserver.read_signed(self.sentinel)
69
+ except (OSError, EOFError):
70
+ # This should not happen usually, but perhaps the forkserver
71
+ # process itself got killed
72
+ self.returncode = 255
73
+
74
+ return self.returncode
vllm/lib/python3.10/site-packages/multiprocess/popen_spawn_posix.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import io
2
+ import os
3
+
4
+ from .context import reduction, set_spawning_popen
5
+ from . import popen_fork
6
+ from . import spawn
7
+ from . import util
8
+
9
+ __all__ = ['Popen']
10
+
11
+
12
+ #
13
+ # Wrapper for an fd used while launching a process
14
+ #
15
+
16
+ class _DupFd(object):
17
+ def __init__(self, fd):
18
+ self.fd = fd
19
+ def detach(self):
20
+ return self.fd
21
+
22
+ #
23
+ # Start child process using a fresh interpreter
24
+ #
25
+
26
+ class Popen(popen_fork.Popen):
27
+ method = 'spawn'
28
+ DupFd = _DupFd
29
+
30
+ def __init__(self, process_obj):
31
+ self._fds = []
32
+ super().__init__(process_obj)
33
+
34
+ def duplicate_for_child(self, fd):
35
+ self._fds.append(fd)
36
+ return fd
37
+
38
+ def _launch(self, process_obj):
39
+ from . import resource_tracker
40
+ tracker_fd = resource_tracker.getfd()
41
+ self._fds.append(tracker_fd)
42
+ prep_data = spawn.get_preparation_data(process_obj._name)
43
+ fp = io.BytesIO()
44
+ set_spawning_popen(self)
45
+ try:
46
+ reduction.dump(prep_data, fp)
47
+ reduction.dump(process_obj, fp)
48
+ finally:
49
+ set_spawning_popen(None)
50
+
51
+ parent_r = child_w = child_r = parent_w = None
52
+ try:
53
+ parent_r, child_w = os.pipe()
54
+ child_r, parent_w = os.pipe()
55
+ cmd = spawn.get_command_line(tracker_fd=tracker_fd,
56
+ pipe_handle=child_r)
57
+ self._fds.extend([child_r, child_w])
58
+ self.pid = util.spawnv_passfds(spawn.get_executable(),
59
+ cmd, self._fds)
60
+ self.sentinel = parent_r
61
+ with open(parent_w, 'wb', closefd=False) as f:
62
+ f.write(fp.getbuffer())
63
+ finally:
64
+ fds_to_close = []
65
+ for fd in (parent_r, parent_w):
66
+ if fd is not None:
67
+ fds_to_close.append(fd)
68
+ self.finalizer = util.Finalize(self, util.close_fds, fds_to_close)
69
+
70
+ for fd in (child_r, child_w):
71
+ if fd is not None:
72
+ os.close(fd)
vllm/lib/python3.10/site-packages/py/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (4.31 kB). View file
 
vllm/lib/python3.10/site-packages/py/__pycache__/__metainfo.cpython-310.pyc ADDED
Binary file (227 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/__pycache__/_error.cpython-310.pyc ADDED
Binary file (2.69 kB). View file
 
vllm/lib/python3.10/site-packages/py/__pycache__/_std.cpython-310.pyc ADDED
Binary file (1.16 kB). View file
 
vllm/lib/python3.10/site-packages/py/__pycache__/test.cpython-310.pyc ADDED
Binary file (291 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/_log/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """ logging API ('producers' and 'consumers' connected via keywords) """
2
+
vllm/lib/python3.10/site-packages/py/_log/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (235 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/_log/__pycache__/log.cpython-310.pyc ADDED
Binary file (7.39 kB). View file
 
vllm/lib/python3.10/site-packages/py/_log/__pycache__/warning.cpython-310.pyc ADDED
Binary file (2.27 kB). View file
 
vllm/lib/python3.10/site-packages/py/_log/log.py ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ basic logging functionality based on a producer/consumer scheme.
3
+
4
+ XXX implement this API: (maybe put it into slogger.py?)
5
+
6
+ log = Logger(
7
+ info=py.log.STDOUT,
8
+ debug=py.log.STDOUT,
9
+ command=None)
10
+ log.info("hello", "world")
11
+ log.command("hello", "world")
12
+
13
+ log = Logger(info=Logger(something=...),
14
+ debug=py.log.STDOUT,
15
+ command=None)
16
+ """
17
+ import py
18
+ import sys
19
+
20
+
21
+ class Message(object):
22
+ def __init__(self, keywords, args):
23
+ self.keywords = keywords
24
+ self.args = args
25
+
26
+ def content(self):
27
+ return " ".join(map(str, self.args))
28
+
29
+ def prefix(self):
30
+ return "[%s] " % (":".join(self.keywords))
31
+
32
+ def __str__(self):
33
+ return self.prefix() + self.content()
34
+
35
+
36
+ class Producer(object):
37
+ """ (deprecated) Log producer API which sends messages to be logged
38
+ to a 'consumer' object, which then prints them to stdout,
39
+ stderr, files, etc. Used extensively by PyPy-1.1.
40
+ """
41
+
42
+ Message = Message # to allow later customization
43
+ keywords2consumer = {}
44
+
45
+ def __init__(self, keywords, keywordmapper=None, **kw):
46
+ if hasattr(keywords, 'split'):
47
+ keywords = tuple(keywords.split())
48
+ self._keywords = keywords
49
+ if keywordmapper is None:
50
+ keywordmapper = default_keywordmapper
51
+ self._keywordmapper = keywordmapper
52
+
53
+ def __repr__(self):
54
+ return "<py.log.Producer %s>" % ":".join(self._keywords)
55
+
56
+ def __getattr__(self, name):
57
+ if '_' in name:
58
+ raise AttributeError(name)
59
+ producer = self.__class__(self._keywords + (name,))
60
+ setattr(self, name, producer)
61
+ return producer
62
+
63
+ def __call__(self, *args):
64
+ """ write a message to the appropriate consumer(s) """
65
+ func = self._keywordmapper.getconsumer(self._keywords)
66
+ if func is not None:
67
+ func(self.Message(self._keywords, args))
68
+
69
+ class KeywordMapper:
70
+ def __init__(self):
71
+ self.keywords2consumer = {}
72
+
73
+ def getstate(self):
74
+ return self.keywords2consumer.copy()
75
+
76
+ def setstate(self, state):
77
+ self.keywords2consumer.clear()
78
+ self.keywords2consumer.update(state)
79
+
80
+ def getconsumer(self, keywords):
81
+ """ return a consumer matching the given keywords.
82
+
83
+ tries to find the most suitable consumer by walking, starting from
84
+ the back, the list of keywords, the first consumer matching a
85
+ keyword is returned (falling back to py.log.default)
86
+ """
87
+ for i in range(len(keywords), 0, -1):
88
+ try:
89
+ return self.keywords2consumer[keywords[:i]]
90
+ except KeyError:
91
+ continue
92
+ return self.keywords2consumer.get('default', default_consumer)
93
+
94
+ def setconsumer(self, keywords, consumer):
95
+ """ set a consumer for a set of keywords. """
96
+ # normalize to tuples
97
+ if isinstance(keywords, str):
98
+ keywords = tuple(filter(None, keywords.split()))
99
+ elif hasattr(keywords, '_keywords'):
100
+ keywords = keywords._keywords
101
+ elif not isinstance(keywords, tuple):
102
+ raise TypeError("key %r is not a string or tuple" % (keywords,))
103
+ if consumer is not None and not py.builtin.callable(consumer):
104
+ if not hasattr(consumer, 'write'):
105
+ raise TypeError(
106
+ "%r should be None, callable or file-like" % (consumer,))
107
+ consumer = File(consumer)
108
+ self.keywords2consumer[keywords] = consumer
109
+
110
+
111
+ def default_consumer(msg):
112
+ """ the default consumer, prints the message to stdout (using 'print') """
113
+ sys.stderr.write(str(msg)+"\n")
114
+
115
+ default_keywordmapper = KeywordMapper()
116
+
117
+
118
+ def setconsumer(keywords, consumer):
119
+ default_keywordmapper.setconsumer(keywords, consumer)
120
+
121
+
122
+ def setstate(state):
123
+ default_keywordmapper.setstate(state)
124
+
125
+
126
+ def getstate():
127
+ return default_keywordmapper.getstate()
128
+
129
+ #
130
+ # Consumers
131
+ #
132
+
133
+
134
+ class File(object):
135
+ """ log consumer wrapping a file(-like) object """
136
+ def __init__(self, f):
137
+ assert hasattr(f, 'write')
138
+ # assert isinstance(f, file) or not hasattr(f, 'open')
139
+ self._file = f
140
+
141
+ def __call__(self, msg):
142
+ """ write a message to the log """
143
+ self._file.write(str(msg) + "\n")
144
+ if hasattr(self._file, 'flush'):
145
+ self._file.flush()
146
+
147
+
148
+ class Path(object):
149
+ """ log consumer that opens and writes to a Path """
150
+ def __init__(self, filename, append=False,
151
+ delayed_create=False, buffering=False):
152
+ self._append = append
153
+ self._filename = str(filename)
154
+ self._buffering = buffering
155
+ if not delayed_create:
156
+ self._openfile()
157
+
158
+ def _openfile(self):
159
+ mode = self._append and 'a' or 'w'
160
+ f = open(self._filename, mode)
161
+ self._file = f
162
+
163
+ def __call__(self, msg):
164
+ """ write a message to the log """
165
+ if not hasattr(self, "_file"):
166
+ self._openfile()
167
+ self._file.write(str(msg) + "\n")
168
+ if not self._buffering:
169
+ self._file.flush()
170
+
171
+
172
+ def STDOUT(msg):
173
+ """ consumer that writes to sys.stdout """
174
+ sys.stdout.write(str(msg)+"\n")
175
+
176
+
177
+ def STDERR(msg):
178
+ """ consumer that writes to sys.stderr """
179
+ sys.stderr.write(str(msg)+"\n")
180
+
181
+
182
+ class Syslog:
183
+ """ consumer that writes to the syslog daemon """
184
+
185
+ def __init__(self, priority=None):
186
+ if priority is None:
187
+ priority = self.LOG_INFO
188
+ self.priority = priority
189
+
190
+ def __call__(self, msg):
191
+ """ write a message to the log """
192
+ import syslog
193
+ syslog.syslog(self.priority, str(msg))
194
+
195
+
196
+ try:
197
+ import syslog
198
+ except ImportError:
199
+ pass
200
+ else:
201
+ for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
202
+ _prio = "LOG_" + _prio
203
+ try:
204
+ setattr(Syslog, _prio, getattr(syslog, _prio))
205
+ except AttributeError:
206
+ pass
vllm/lib/python3.10/site-packages/py/_log/warning.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import py, sys
2
+
3
+ class DeprecationWarning(DeprecationWarning):
4
+ def __init__(self, msg, path, lineno):
5
+ self.msg = msg
6
+ self.path = path
7
+ self.lineno = lineno
8
+ def __repr__(self):
9
+ return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
10
+ def __str__(self):
11
+ return self.msg
12
+
13
+ def _apiwarn(startversion, msg, stacklevel=2, function=None):
14
+ # below is mostly COPIED from python2.4/warnings.py's def warn()
15
+ # Get context information
16
+ if isinstance(stacklevel, str):
17
+ frame = sys._getframe(1)
18
+ level = 1
19
+ found = frame.f_code.co_filename.find(stacklevel) != -1
20
+ while frame:
21
+ co = frame.f_code
22
+ if co.co_filename.find(stacklevel) == -1:
23
+ if found:
24
+ stacklevel = level
25
+ break
26
+ else:
27
+ found = True
28
+ level += 1
29
+ frame = frame.f_back
30
+ else:
31
+ stacklevel = 1
32
+ msg = "%s (since version %s)" %(msg, startversion)
33
+ warn(msg, stacklevel=stacklevel+1, function=function)
34
+
35
+
36
+ def warn(msg, stacklevel=1, function=None):
37
+ if function is not None:
38
+ import inspect
39
+ filename = inspect.getfile(function)
40
+ lineno = py.code.getrawcode(function).co_firstlineno
41
+ else:
42
+ try:
43
+ caller = sys._getframe(stacklevel)
44
+ except ValueError:
45
+ globals = sys.__dict__
46
+ lineno = 1
47
+ else:
48
+ globals = caller.f_globals
49
+ lineno = caller.f_lineno
50
+ if '__name__' in globals:
51
+ module = globals['__name__']
52
+ else:
53
+ module = "<string>"
54
+ filename = globals.get('__file__')
55
+ if filename:
56
+ fnl = filename.lower()
57
+ if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
58
+ filename = filename[:-1]
59
+ elif fnl.endswith("$py.class"):
60
+ filename = filename.replace('$py.class', '.py')
61
+ else:
62
+ if module == "__main__":
63
+ try:
64
+ filename = sys.argv[0]
65
+ except AttributeError:
66
+ # embedded interpreters don't have sys.argv, see bug #839151
67
+ filename = '__main__'
68
+ if not filename:
69
+ filename = module
70
+ path = py.path.local(filename)
71
+ warning = DeprecationWarning(msg, path, lineno)
72
+ import warnings
73
+ warnings.warn_explicit(warning, category=Warning,
74
+ filename=str(warning.path),
75
+ lineno=warning.lineno,
76
+ registry=warnings.__dict__.setdefault(
77
+ "__warningsregistry__", {})
78
+ )
79
+
vllm/lib/python3.10/site-packages/py/_path/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ """ unified file system api """
vllm/lib/python3.10/site-packages/py/_path/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (195 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/_path/__pycache__/cacheutil.cpython-310.pyc ADDED
Binary file (4.76 kB). View file
 
vllm/lib/python3.10/site-packages/py/_path/__pycache__/common.cpython-310.pyc ADDED
Binary file (14.9 kB). View file
 
vllm/lib/python3.10/site-packages/py/_path/__pycache__/local.cpython-310.pyc ADDED
Binary file (31.2 kB). View file
 
vllm/lib/python3.10/site-packages/py/_path/__pycache__/svnurl.cpython-310.pyc ADDED
Binary file (13.3 kB). View file
 
vllm/lib/python3.10/site-packages/py/_path/__pycache__/svnwc.cpython-310.pyc ADDED
Binary file (35.9 kB). View file
 
vllm/lib/python3.10/site-packages/py/_path/cacheutil.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This module contains multithread-safe cache implementations.
3
+
4
+ All Caches have
5
+
6
+ getorbuild(key, builder)
7
+ delentry(key)
8
+
9
+ methods and allow configuration when instantiating the cache class.
10
+ """
11
+ from time import time as gettime
12
+
13
+ class BasicCache(object):
14
+ def __init__(self, maxentries=128):
15
+ self.maxentries = maxentries
16
+ self.prunenum = int(maxentries - maxentries/8)
17
+ self._dict = {}
18
+
19
+ def clear(self):
20
+ self._dict.clear()
21
+
22
+ def _getentry(self, key):
23
+ return self._dict[key]
24
+
25
+ def _putentry(self, key, entry):
26
+ self._prunelowestweight()
27
+ self._dict[key] = entry
28
+
29
+ def delentry(self, key, raising=False):
30
+ try:
31
+ del self._dict[key]
32
+ except KeyError:
33
+ if raising:
34
+ raise
35
+
36
+ def getorbuild(self, key, builder):
37
+ try:
38
+ entry = self._getentry(key)
39
+ except KeyError:
40
+ entry = self._build(key, builder)
41
+ self._putentry(key, entry)
42
+ return entry.value
43
+
44
+ def _prunelowestweight(self):
45
+ """ prune out entries with lowest weight. """
46
+ numentries = len(self._dict)
47
+ if numentries >= self.maxentries:
48
+ # evict according to entry's weight
49
+ items = [(entry.weight, key)
50
+ for key, entry in self._dict.items()]
51
+ items.sort()
52
+ index = numentries - self.prunenum
53
+ if index > 0:
54
+ for weight, key in items[:index]:
55
+ # in MT situations the element might be gone
56
+ self.delentry(key, raising=False)
57
+
58
+ class BuildcostAccessCache(BasicCache):
59
+ """ A BuildTime/Access-counting cache implementation.
60
+ the weight of a value is computed as the product of
61
+
62
+ num-accesses-of-a-value * time-to-build-the-value
63
+
64
+ The values with the least such weights are evicted
65
+ if the cache maxentries threshold is superceded.
66
+ For implementation flexibility more than one object
67
+ might be evicted at a time.
68
+ """
69
+ # time function to use for measuring build-times
70
+
71
+ def _build(self, key, builder):
72
+ start = gettime()
73
+ val = builder()
74
+ end = gettime()
75
+ return WeightedCountingEntry(val, end-start)
76
+
77
+
78
+ class WeightedCountingEntry(object):
79
+ def __init__(self, value, oneweight):
80
+ self._value = value
81
+ self.weight = self._oneweight = oneweight
82
+
83
+ def value(self):
84
+ self.weight += self._oneweight
85
+ return self._value
86
+ value = property(value)
87
+
88
+ class AgingCache(BasicCache):
89
+ """ This cache prunes out cache entries that are too old.
90
+ """
91
+ def __init__(self, maxentries=128, maxseconds=10.0):
92
+ super(AgingCache, self).__init__(maxentries)
93
+ self.maxseconds = maxseconds
94
+
95
+ def _getentry(self, key):
96
+ entry = self._dict[key]
97
+ if entry.isexpired():
98
+ self.delentry(key)
99
+ raise KeyError(key)
100
+ return entry
101
+
102
+ def _build(self, key, builder):
103
+ val = builder()
104
+ entry = AgingEntry(val, gettime() + self.maxseconds)
105
+ return entry
106
+
107
+ class AgingEntry(object):
108
+ def __init__(self, value, expirationtime):
109
+ self.value = value
110
+ self.weight = expirationtime
111
+
112
+ def isexpired(self):
113
+ t = gettime()
114
+ return t >= self.weight
vllm/lib/python3.10/site-packages/py/_path/local.py ADDED
@@ -0,0 +1,1030 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ local path implementation.
3
+ """
4
+ from __future__ import with_statement
5
+
6
+ from contextlib import contextmanager
7
+ import sys, os, atexit, io, uuid
8
+ import py
9
+ from py._path import common
10
+ from py._path.common import iswin32, fspath
11
+ from stat import S_ISLNK, S_ISDIR, S_ISREG
12
+
13
+ from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
14
+
15
+ if sys.version_info > (3,0):
16
+ def map_as_list(func, iter):
17
+ return list(map(func, iter))
18
+ else:
19
+ map_as_list = map
20
+
21
+ ALLOW_IMPORTLIB_MODE = sys.version_info > (3,5)
22
+ if ALLOW_IMPORTLIB_MODE:
23
+ import importlib
24
+
25
+
26
+ class Stat(object):
27
+ def __getattr__(self, name):
28
+ return getattr(self._osstatresult, "st_" + name)
29
+
30
+ def __init__(self, path, osstatresult):
31
+ self.path = path
32
+ self._osstatresult = osstatresult
33
+
34
+ @property
35
+ def owner(self):
36
+ if iswin32:
37
+ raise NotImplementedError("XXX win32")
38
+ import pwd
39
+ entry = py.error.checked_call(pwd.getpwuid, self.uid)
40
+ return entry[0]
41
+
42
+ @property
43
+ def group(self):
44
+ """ return group name of file. """
45
+ if iswin32:
46
+ raise NotImplementedError("XXX win32")
47
+ import grp
48
+ entry = py.error.checked_call(grp.getgrgid, self.gid)
49
+ return entry[0]
50
+
51
+ def isdir(self):
52
+ return S_ISDIR(self._osstatresult.st_mode)
53
+
54
+ def isfile(self):
55
+ return S_ISREG(self._osstatresult.st_mode)
56
+
57
+ def islink(self):
58
+ st = self.path.lstat()
59
+ return S_ISLNK(self._osstatresult.st_mode)
60
+
61
+ class PosixPath(common.PathBase):
62
+ def chown(self, user, group, rec=0):
63
+ """ change ownership to the given user and group.
64
+ user and group may be specified by a number or
65
+ by a name. if rec is True change ownership
66
+ recursively.
67
+ """
68
+ uid = getuserid(user)
69
+ gid = getgroupid(group)
70
+ if rec:
71
+ for x in self.visit(rec=lambda x: x.check(link=0)):
72
+ if x.check(link=0):
73
+ py.error.checked_call(os.chown, str(x), uid, gid)
74
+ py.error.checked_call(os.chown, str(self), uid, gid)
75
+
76
+ def readlink(self):
77
+ """ return value of a symbolic link. """
78
+ return py.error.checked_call(os.readlink, self.strpath)
79
+
80
+ def mklinkto(self, oldname):
81
+ """ posix style hard link to another name. """
82
+ py.error.checked_call(os.link, str(oldname), str(self))
83
+
84
+ def mksymlinkto(self, value, absolute=1):
85
+ """ create a symbolic link with the given value (pointing to another name). """
86
+ if absolute:
87
+ py.error.checked_call(os.symlink, str(value), self.strpath)
88
+ else:
89
+ base = self.common(value)
90
+ # with posix local paths '/' is always a common base
91
+ relsource = self.__class__(value).relto(base)
92
+ reldest = self.relto(base)
93
+ n = reldest.count(self.sep)
94
+ target = self.sep.join(('..', )*n + (relsource, ))
95
+ py.error.checked_call(os.symlink, target, self.strpath)
96
+
97
+ def getuserid(user):
98
+ import pwd
99
+ if not isinstance(user, int):
100
+ user = pwd.getpwnam(user)[2]
101
+ return user
102
+
103
+ def getgroupid(group):
104
+ import grp
105
+ if not isinstance(group, int):
106
+ group = grp.getgrnam(group)[2]
107
+ return group
108
+
109
+ FSBase = not iswin32 and PosixPath or common.PathBase
110
+
111
+ class LocalPath(FSBase):
112
+ """ object oriented interface to os.path and other local filesystem
113
+ related information.
114
+ """
115
+ class ImportMismatchError(ImportError):
116
+ """ raised on pyimport() if there is a mismatch of __file__'s"""
117
+
118
+ sep = os.sep
119
+ class Checkers(common.Checkers):
120
+ def _stat(self):
121
+ try:
122
+ return self._statcache
123
+ except AttributeError:
124
+ try:
125
+ self._statcache = self.path.stat()
126
+ except py.error.ELOOP:
127
+ self._statcache = self.path.lstat()
128
+ return self._statcache
129
+
130
+ def dir(self):
131
+ return S_ISDIR(self._stat().mode)
132
+
133
+ def file(self):
134
+ return S_ISREG(self._stat().mode)
135
+
136
+ def exists(self):
137
+ return self._stat()
138
+
139
+ def link(self):
140
+ st = self.path.lstat()
141
+ return S_ISLNK(st.mode)
142
+
143
+ def __init__(self, path=None, expanduser=False):
144
+ """ Initialize and return a local Path instance.
145
+
146
+ Path can be relative to the current directory.
147
+ If path is None it defaults to the current working directory.
148
+ If expanduser is True, tilde-expansion is performed.
149
+ Note that Path instances always carry an absolute path.
150
+ Note also that passing in a local path object will simply return
151
+ the exact same path object. Use new() to get a new copy.
152
+ """
153
+ if path is None:
154
+ self.strpath = py.error.checked_call(os.getcwd)
155
+ else:
156
+ try:
157
+ path = fspath(path)
158
+ except TypeError:
159
+ raise ValueError("can only pass None, Path instances "
160
+ "or non-empty strings to LocalPath")
161
+ if expanduser:
162
+ path = os.path.expanduser(path)
163
+ self.strpath = abspath(path)
164
+
165
+ def __hash__(self):
166
+ s = self.strpath
167
+ if iswin32:
168
+ s = s.lower()
169
+ return hash(s)
170
+
171
+ def __eq__(self, other):
172
+ s1 = fspath(self)
173
+ try:
174
+ s2 = fspath(other)
175
+ except TypeError:
176
+ return False
177
+ if iswin32:
178
+ s1 = s1.lower()
179
+ try:
180
+ s2 = s2.lower()
181
+ except AttributeError:
182
+ return False
183
+ return s1 == s2
184
+
185
+ def __ne__(self, other):
186
+ return not (self == other)
187
+
188
+ def __lt__(self, other):
189
+ return fspath(self) < fspath(other)
190
+
191
+ def __gt__(self, other):
192
+ return fspath(self) > fspath(other)
193
+
194
+ def samefile(self, other):
195
+ """ return True if 'other' references the same file as 'self'.
196
+ """
197
+ other = fspath(other)
198
+ if not isabs(other):
199
+ other = abspath(other)
200
+ if self == other:
201
+ return True
202
+ if not hasattr(os.path, "samefile"):
203
+ return False
204
+ return py.error.checked_call(
205
+ os.path.samefile, self.strpath, other)
206
+
207
+ def remove(self, rec=1, ignore_errors=False):
208
+ """ remove a file or directory (or a directory tree if rec=1).
209
+ if ignore_errors is True, errors while removing directories will
210
+ be ignored.
211
+ """
212
+ if self.check(dir=1, link=0):
213
+ if rec:
214
+ # force remove of readonly files on windows
215
+ if iswin32:
216
+ self.chmod(0o700, rec=1)
217
+ import shutil
218
+ py.error.checked_call(
219
+ shutil.rmtree, self.strpath,
220
+ ignore_errors=ignore_errors)
221
+ else:
222
+ py.error.checked_call(os.rmdir, self.strpath)
223
+ else:
224
+ if iswin32:
225
+ self.chmod(0o700)
226
+ py.error.checked_call(os.remove, self.strpath)
227
+
228
+ def computehash(self, hashtype="md5", chunksize=524288):
229
+ """ return hexdigest of hashvalue for this file. """
230
+ try:
231
+ try:
232
+ import hashlib as mod
233
+ except ImportError:
234
+ if hashtype == "sha1":
235
+ hashtype = "sha"
236
+ mod = __import__(hashtype)
237
+ hash = getattr(mod, hashtype)()
238
+ except (AttributeError, ImportError):
239
+ raise ValueError("Don't know how to compute %r hash" %(hashtype,))
240
+ f = self.open('rb')
241
+ try:
242
+ while 1:
243
+ buf = f.read(chunksize)
244
+ if not buf:
245
+ return hash.hexdigest()
246
+ hash.update(buf)
247
+ finally:
248
+ f.close()
249
+
250
+ def new(self, **kw):
251
+ """ create a modified version of this path.
252
+ the following keyword arguments modify various path parts::
253
+
254
+ a:/some/path/to/a/file.ext
255
+ xx drive
256
+ xxxxxxxxxxxxxxxxx dirname
257
+ xxxxxxxx basename
258
+ xxxx purebasename
259
+ xxx ext
260
+ """
261
+ obj = object.__new__(self.__class__)
262
+ if not kw:
263
+ obj.strpath = self.strpath
264
+ return obj
265
+ drive, dirname, basename, purebasename,ext = self._getbyspec(
266
+ "drive,dirname,basename,purebasename,ext")
267
+ if 'basename' in kw:
268
+ if 'purebasename' in kw or 'ext' in kw:
269
+ raise ValueError("invalid specification %r" % kw)
270
+ else:
271
+ pb = kw.setdefault('purebasename', purebasename)
272
+ try:
273
+ ext = kw['ext']
274
+ except KeyError:
275
+ pass
276
+ else:
277
+ if ext and not ext.startswith('.'):
278
+ ext = '.' + ext
279
+ kw['basename'] = pb + ext
280
+
281
+ if ('dirname' in kw and not kw['dirname']):
282
+ kw['dirname'] = drive
283
+ else:
284
+ kw.setdefault('dirname', dirname)
285
+ kw.setdefault('sep', self.sep)
286
+ obj.strpath = normpath(
287
+ "%(dirname)s%(sep)s%(basename)s" % kw)
288
+ return obj
289
+
290
+ def _getbyspec(self, spec):
291
+ """ see new for what 'spec' can be. """
292
+ res = []
293
+ parts = self.strpath.split(self.sep)
294
+
295
+ args = filter(None, spec.split(',') )
296
+ append = res.append
297
+ for name in args:
298
+ if name == 'drive':
299
+ append(parts[0])
300
+ elif name == 'dirname':
301
+ append(self.sep.join(parts[:-1]))
302
+ else:
303
+ basename = parts[-1]
304
+ if name == 'basename':
305
+ append(basename)
306
+ else:
307
+ i = basename.rfind('.')
308
+ if i == -1:
309
+ purebasename, ext = basename, ''
310
+ else:
311
+ purebasename, ext = basename[:i], basename[i:]
312
+ if name == 'purebasename':
313
+ append(purebasename)
314
+ elif name == 'ext':
315
+ append(ext)
316
+ else:
317
+ raise ValueError("invalid part specification %r" % name)
318
+ return res
319
+
320
+ def dirpath(self, *args, **kwargs):
321
+ """ return the directory path joined with any given path arguments. """
322
+ if not kwargs:
323
+ path = object.__new__(self.__class__)
324
+ path.strpath = dirname(self.strpath)
325
+ if args:
326
+ path = path.join(*args)
327
+ return path
328
+ return super(LocalPath, self).dirpath(*args, **kwargs)
329
+
330
+ def join(self, *args, **kwargs):
331
+ """ return a new path by appending all 'args' as path
332
+ components. if abs=1 is used restart from root if any
333
+ of the args is an absolute path.
334
+ """
335
+ sep = self.sep
336
+ strargs = [fspath(arg) for arg in args]
337
+ strpath = self.strpath
338
+ if kwargs.get('abs'):
339
+ newargs = []
340
+ for arg in reversed(strargs):
341
+ if isabs(arg):
342
+ strpath = arg
343
+ strargs = newargs
344
+ break
345
+ newargs.insert(0, arg)
346
+ # special case for when we have e.g. strpath == "/"
347
+ actual_sep = "" if strpath.endswith(sep) else sep
348
+ for arg in strargs:
349
+ arg = arg.strip(sep)
350
+ if iswin32:
351
+ # allow unix style paths even on windows.
352
+ arg = arg.strip('/')
353
+ arg = arg.replace('/', sep)
354
+ strpath = strpath + actual_sep + arg
355
+ actual_sep = sep
356
+ obj = object.__new__(self.__class__)
357
+ obj.strpath = normpath(strpath)
358
+ return obj
359
+
360
+ def open(self, mode='r', ensure=False, encoding=None):
361
+ """ return an opened file with the given mode.
362
+
363
+ If ensure is True, create parent directories if needed.
364
+ """
365
+ if ensure:
366
+ self.dirpath().ensure(dir=1)
367
+ if encoding:
368
+ return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
369
+ return py.error.checked_call(open, self.strpath, mode)
370
+
371
+ def _fastjoin(self, name):
372
+ child = object.__new__(self.__class__)
373
+ child.strpath = self.strpath + self.sep + name
374
+ return child
375
+
376
+ def islink(self):
377
+ return islink(self.strpath)
378
+
379
+ def check(self, **kw):
380
+ if not kw:
381
+ return exists(self.strpath)
382
+ if len(kw) == 1:
383
+ if "dir" in kw:
384
+ return not kw["dir"] ^ isdir(self.strpath)
385
+ if "file" in kw:
386
+ return not kw["file"] ^ isfile(self.strpath)
387
+ return super(LocalPath, self).check(**kw)
388
+
389
+ _patternchars = set("*?[" + os.path.sep)
390
+ def listdir(self, fil=None, sort=None):
391
+ """ list directory contents, possibly filter by the given fil func
392
+ and possibly sorted.
393
+ """
394
+ if fil is None and sort is None:
395
+ names = py.error.checked_call(os.listdir, self.strpath)
396
+ return map_as_list(self._fastjoin, names)
397
+ if isinstance(fil, py.builtin._basestring):
398
+ if not self._patternchars.intersection(fil):
399
+ child = self._fastjoin(fil)
400
+ if exists(child.strpath):
401
+ return [child]
402
+ return []
403
+ fil = common.FNMatcher(fil)
404
+ names = py.error.checked_call(os.listdir, self.strpath)
405
+ res = []
406
+ for name in names:
407
+ child = self._fastjoin(name)
408
+ if fil is None or fil(child):
409
+ res.append(child)
410
+ self._sortlist(res, sort)
411
+ return res
412
+
413
+ def size(self):
414
+ """ return size of the underlying file object """
415
+ return self.stat().size
416
+
417
+ def mtime(self):
418
+ """ return last modification time of the path. """
419
+ return self.stat().mtime
420
+
421
+ def copy(self, target, mode=False, stat=False):
422
+ """ copy path to target.
423
+
424
+ If mode is True, will copy copy permission from path to target.
425
+ If stat is True, copy permission, last modification
426
+ time, last access time, and flags from path to target.
427
+ """
428
+ if self.check(file=1):
429
+ if target.check(dir=1):
430
+ target = target.join(self.basename)
431
+ assert self!=target
432
+ copychunked(self, target)
433
+ if mode:
434
+ copymode(self.strpath, target.strpath)
435
+ if stat:
436
+ copystat(self, target)
437
+ else:
438
+ def rec(p):
439
+ return p.check(link=0)
440
+ for x in self.visit(rec=rec):
441
+ relpath = x.relto(self)
442
+ newx = target.join(relpath)
443
+ newx.dirpath().ensure(dir=1)
444
+ if x.check(link=1):
445
+ newx.mksymlinkto(x.readlink())
446
+ continue
447
+ elif x.check(file=1):
448
+ copychunked(x, newx)
449
+ elif x.check(dir=1):
450
+ newx.ensure(dir=1)
451
+ if mode:
452
+ copymode(x.strpath, newx.strpath)
453
+ if stat:
454
+ copystat(x, newx)
455
+
456
+ def rename(self, target):
457
+ """ rename this path to target. """
458
+ target = fspath(target)
459
+ return py.error.checked_call(os.rename, self.strpath, target)
460
+
461
+ def dump(self, obj, bin=1):
462
+ """ pickle object into path location"""
463
+ f = self.open('wb')
464
+ import pickle
465
+ try:
466
+ py.error.checked_call(pickle.dump, obj, f, bin)
467
+ finally:
468
+ f.close()
469
+
470
+ def mkdir(self, *args):
471
+ """ create & return the directory joined with args. """
472
+ p = self.join(*args)
473
+ py.error.checked_call(os.mkdir, fspath(p))
474
+ return p
475
+
476
+ def write_binary(self, data, ensure=False):
477
+ """ write binary data into path. If ensure is True create
478
+ missing parent directories.
479
+ """
480
+ if ensure:
481
+ self.dirpath().ensure(dir=1)
482
+ with self.open('wb') as f:
483
+ f.write(data)
484
+
485
+ def write_text(self, data, encoding, ensure=False):
486
+ """ write text data into path using the specified encoding.
487
+ If ensure is True create missing parent directories.
488
+ """
489
+ if ensure:
490
+ self.dirpath().ensure(dir=1)
491
+ with self.open('w', encoding=encoding) as f:
492
+ f.write(data)
493
+
494
+ def write(self, data, mode='w', ensure=False):
495
+ """ write data into path. If ensure is True create
496
+ missing parent directories.
497
+ """
498
+ if ensure:
499
+ self.dirpath().ensure(dir=1)
500
+ if 'b' in mode:
501
+ if not py.builtin._isbytes(data):
502
+ raise ValueError("can only process bytes")
503
+ else:
504
+ if not py.builtin._istext(data):
505
+ if not py.builtin._isbytes(data):
506
+ data = str(data)
507
+ else:
508
+ data = py.builtin._totext(data, sys.getdefaultencoding())
509
+ f = self.open(mode)
510
+ try:
511
+ f.write(data)
512
+ finally:
513
+ f.close()
514
+
515
+ def _ensuredirs(self):
516
+ parent = self.dirpath()
517
+ if parent == self:
518
+ return self
519
+ if parent.check(dir=0):
520
+ parent._ensuredirs()
521
+ if self.check(dir=0):
522
+ try:
523
+ self.mkdir()
524
+ except py.error.EEXIST:
525
+ # race condition: file/dir created by another thread/process.
526
+ # complain if it is not a dir
527
+ if self.check(dir=0):
528
+ raise
529
+ return self
530
+
531
+ def ensure(self, *args, **kwargs):
532
+ """ ensure that an args-joined path exists (by default as
533
+ a file). if you specify a keyword argument 'dir=True'
534
+ then the path is forced to be a directory path.
535
+ """
536
+ p = self.join(*args)
537
+ if kwargs.get('dir', 0):
538
+ return p._ensuredirs()
539
+ else:
540
+ p.dirpath()._ensuredirs()
541
+ if not p.check(file=1):
542
+ p.open('w').close()
543
+ return p
544
+
545
+ def stat(self, raising=True):
546
+ """ Return an os.stat() tuple. """
547
+ if raising == True:
548
+ return Stat(self, py.error.checked_call(os.stat, self.strpath))
549
+ try:
550
+ return Stat(self, os.stat(self.strpath))
551
+ except KeyboardInterrupt:
552
+ raise
553
+ except Exception:
554
+ return None
555
+
556
+ def lstat(self):
557
+ """ Return an os.lstat() tuple. """
558
+ return Stat(self, py.error.checked_call(os.lstat, self.strpath))
559
+
560
+ def setmtime(self, mtime=None):
561
+ """ set modification time for the given path. if 'mtime' is None
562
+ (the default) then the file's mtime is set to current time.
563
+
564
+ Note that the resolution for 'mtime' is platform dependent.
565
+ """
566
+ if mtime is None:
567
+ return py.error.checked_call(os.utime, self.strpath, mtime)
568
+ try:
569
+ return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
570
+ except py.error.EINVAL:
571
+ return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
572
+
573
+ def chdir(self):
574
+ """ change directory to self and return old current directory """
575
+ try:
576
+ old = self.__class__()
577
+ except py.error.ENOENT:
578
+ old = None
579
+ py.error.checked_call(os.chdir, self.strpath)
580
+ return old
581
+
582
+
583
+ @contextmanager
584
+ def as_cwd(self):
585
+ """
586
+ Return a context manager, which changes to the path's dir during the
587
+ managed "with" context.
588
+ On __enter__ it returns the old dir, which might be ``None``.
589
+ """
590
+ old = self.chdir()
591
+ try:
592
+ yield old
593
+ finally:
594
+ if old is not None:
595
+ old.chdir()
596
+
597
+ def realpath(self):
598
+ """ return a new path which contains no symbolic links."""
599
+ return self.__class__(os.path.realpath(self.strpath))
600
+
601
+ def atime(self):
602
+ """ return last access time of the path. """
603
+ return self.stat().atime
604
+
605
+ def __repr__(self):
606
+ return 'local(%r)' % self.strpath
607
+
608
+ def __str__(self):
609
+ """ return string representation of the Path. """
610
+ return self.strpath
611
+
612
+ def chmod(self, mode, rec=0):
613
+ """ change permissions to the given mode. If mode is an
614
+ integer it directly encodes the os-specific modes.
615
+ if rec is True perform recursively.
616
+ """
617
+ if not isinstance(mode, int):
618
+ raise TypeError("mode %r must be an integer" % (mode,))
619
+ if rec:
620
+ for x in self.visit(rec=rec):
621
+ py.error.checked_call(os.chmod, str(x), mode)
622
+ py.error.checked_call(os.chmod, self.strpath, mode)
623
+
624
+ def pypkgpath(self):
625
+ """ return the Python package path by looking for the last
626
+ directory upwards which still contains an __init__.py.
627
+ Return None if a pkgpath can not be determined.
628
+ """
629
+ pkgpath = None
630
+ for parent in self.parts(reverse=True):
631
+ if parent.isdir():
632
+ if not parent.join('__init__.py').exists():
633
+ break
634
+ if not isimportable(parent.basename):
635
+ break
636
+ pkgpath = parent
637
+ return pkgpath
638
+
639
+ def _ensuresyspath(self, ensuremode, path):
640
+ if ensuremode:
641
+ s = str(path)
642
+ if ensuremode == "append":
643
+ if s not in sys.path:
644
+ sys.path.append(s)
645
+ else:
646
+ if s != sys.path[0]:
647
+ sys.path.insert(0, s)
648
+
649
+ def pyimport(self, modname=None, ensuresyspath=True):
650
+ """ return path as an imported python module.
651
+
652
+ If modname is None, look for the containing package
653
+ and construct an according module name.
654
+ The module will be put/looked up in sys.modules.
655
+ if ensuresyspath is True then the root dir for importing
656
+ the file (taking __init__.py files into account) will
657
+ be prepended to sys.path if it isn't there already.
658
+ If ensuresyspath=="append" the root dir will be appended
659
+ if it isn't already contained in sys.path.
660
+ if ensuresyspath is False no modification of syspath happens.
661
+
662
+ Special value of ensuresyspath=="importlib" is intended
663
+ purely for using in pytest, it is capable only of importing
664
+ separate .py files outside packages, e.g. for test suite
665
+ without any __init__.py file. It effectively allows having
666
+ same-named test modules in different places and offers
667
+ mild opt-in via this option. Note that it works only in
668
+ recent versions of python.
669
+ """
670
+ if not self.check():
671
+ raise py.error.ENOENT(self)
672
+
673
+ if ensuresyspath == 'importlib':
674
+ if modname is None:
675
+ modname = self.purebasename
676
+ if not ALLOW_IMPORTLIB_MODE:
677
+ raise ImportError(
678
+ "Can't use importlib due to old version of Python")
679
+ spec = importlib.util.spec_from_file_location(
680
+ modname, str(self))
681
+ if spec is None:
682
+ raise ImportError(
683
+ "Can't find module %s at location %s" %
684
+ (modname, str(self))
685
+ )
686
+ mod = importlib.util.module_from_spec(spec)
687
+ spec.loader.exec_module(mod)
688
+ return mod
689
+
690
+ pkgpath = None
691
+ if modname is None:
692
+ pkgpath = self.pypkgpath()
693
+ if pkgpath is not None:
694
+ pkgroot = pkgpath.dirpath()
695
+ names = self.new(ext="").relto(pkgroot).split(self.sep)
696
+ if names[-1] == "__init__":
697
+ names.pop()
698
+ modname = ".".join(names)
699
+ else:
700
+ pkgroot = self.dirpath()
701
+ modname = self.purebasename
702
+
703
+ self._ensuresyspath(ensuresyspath, pkgroot)
704
+ __import__(modname)
705
+ mod = sys.modules[modname]
706
+ if self.basename == "__init__.py":
707
+ return mod # we don't check anything as we might
708
+ # be in a namespace package ... too icky to check
709
+ modfile = mod.__file__
710
+ if modfile[-4:] in ('.pyc', '.pyo'):
711
+ modfile = modfile[:-1]
712
+ elif modfile.endswith('$py.class'):
713
+ modfile = modfile[:-9] + '.py'
714
+ if modfile.endswith(os.path.sep + "__init__.py"):
715
+ if self.basename != "__init__.py":
716
+ modfile = modfile[:-12]
717
+ try:
718
+ issame = self.samefile(modfile)
719
+ except py.error.ENOENT:
720
+ issame = False
721
+ if not issame:
722
+ ignore = os.getenv('PY_IGNORE_IMPORTMISMATCH')
723
+ if ignore != '1':
724
+ raise self.ImportMismatchError(modname, modfile, self)
725
+ return mod
726
+ else:
727
+ try:
728
+ return sys.modules[modname]
729
+ except KeyError:
730
+ # we have a custom modname, do a pseudo-import
731
+ import types
732
+ mod = types.ModuleType(modname)
733
+ mod.__file__ = str(self)
734
+ sys.modules[modname] = mod
735
+ try:
736
+ py.builtin.execfile(str(self), mod.__dict__)
737
+ except:
738
+ del sys.modules[modname]
739
+ raise
740
+ return mod
741
+
742
+ def sysexec(self, *argv, **popen_opts):
743
+ """ return stdout text from executing a system child process,
744
+ where the 'self' path points to executable.
745
+ The process is directly invoked and not through a system shell.
746
+ """
747
+ from subprocess import Popen, PIPE
748
+ argv = map_as_list(str, argv)
749
+ popen_opts['stdout'] = popen_opts['stderr'] = PIPE
750
+ proc = Popen([str(self)] + argv, **popen_opts)
751
+ stdout, stderr = proc.communicate()
752
+ ret = proc.wait()
753
+ if py.builtin._isbytes(stdout):
754
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
755
+ if ret != 0:
756
+ if py.builtin._isbytes(stderr):
757
+ stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
758
+ raise py.process.cmdexec.Error(ret, ret, str(self),
759
+ stdout, stderr,)
760
+ return stdout
761
+
762
+ def sysfind(cls, name, checker=None, paths=None):
763
+ """ return a path object found by looking at the systems
764
+ underlying PATH specification. If the checker is not None
765
+ it will be invoked to filter matching paths. If a binary
766
+ cannot be found, None is returned
767
+ Note: This is probably not working on plain win32 systems
768
+ but may work on cygwin.
769
+ """
770
+ if isabs(name):
771
+ p = py.path.local(name)
772
+ if p.check(file=1):
773
+ return p
774
+ else:
775
+ if paths is None:
776
+ if iswin32:
777
+ paths = os.environ['Path'].split(';')
778
+ if '' not in paths and '.' not in paths:
779
+ paths.append('.')
780
+ try:
781
+ systemroot = os.environ['SYSTEMROOT']
782
+ except KeyError:
783
+ pass
784
+ else:
785
+ paths = [path.replace('%SystemRoot%', systemroot)
786
+ for path in paths]
787
+ else:
788
+ paths = os.environ['PATH'].split(':')
789
+ tryadd = []
790
+ if iswin32:
791
+ tryadd += os.environ['PATHEXT'].split(os.pathsep)
792
+ tryadd.append("")
793
+
794
+ for x in paths:
795
+ for addext in tryadd:
796
+ p = py.path.local(x).join(name, abs=True) + addext
797
+ try:
798
+ if p.check(file=1):
799
+ if checker:
800
+ if not checker(p):
801
+ continue
802
+ return p
803
+ except py.error.EACCES:
804
+ pass
805
+ return None
806
+ sysfind = classmethod(sysfind)
807
+
808
+ def _gethomedir(cls):
809
+ try:
810
+ x = os.environ['HOME']
811
+ except KeyError:
812
+ try:
813
+ x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
814
+ except KeyError:
815
+ return None
816
+ return cls(x)
817
+ _gethomedir = classmethod(_gethomedir)
818
+
819
+ # """
820
+ # special class constructors for local filesystem paths
821
+ # """
822
+ @classmethod
823
+ def get_temproot(cls):
824
+ """ return the system's temporary directory
825
+ (where tempfiles are usually created in)
826
+ """
827
+ import tempfile
828
+ return py.path.local(tempfile.gettempdir())
829
+
830
+ @classmethod
831
+ def mkdtemp(cls, rootdir=None):
832
+ """ return a Path object pointing to a fresh new temporary directory
833
+ (which we created ourself).
834
+ """
835
+ import tempfile
836
+ if rootdir is None:
837
+ rootdir = cls.get_temproot()
838
+ return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
839
+
840
+ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
841
+ lock_timeout=172800): # two days
842
+ """ return unique directory with a number greater than the current
843
+ maximum one. The number is assumed to start directly after prefix.
844
+ if keep is true directories with a number less than (maxnum-keep)
845
+ will be removed. If .lock files are used (lock_timeout non-zero),
846
+ algorithm is multi-process safe.
847
+ """
848
+ if rootdir is None:
849
+ rootdir = cls.get_temproot()
850
+
851
+ nprefix = prefix.lower()
852
+ def parse_num(path):
853
+ """ parse the number out of a path (if it matches the prefix) """
854
+ nbasename = path.basename.lower()
855
+ if nbasename.startswith(nprefix):
856
+ try:
857
+ return int(nbasename[len(nprefix):])
858
+ except ValueError:
859
+ pass
860
+
861
+ def create_lockfile(path):
862
+ """ exclusively create lockfile. Throws when failed """
863
+ mypid = os.getpid()
864
+ lockfile = path.join('.lock')
865
+ if hasattr(lockfile, 'mksymlinkto'):
866
+ lockfile.mksymlinkto(str(mypid))
867
+ else:
868
+ fd = py.error.checked_call(os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
869
+ with os.fdopen(fd, 'w') as f:
870
+ f.write(str(mypid))
871
+ return lockfile
872
+
873
+ def atexit_remove_lockfile(lockfile):
874
+ """ ensure lockfile is removed at process exit """
875
+ mypid = os.getpid()
876
+ def try_remove_lockfile():
877
+ # in a fork() situation, only the last process should
878
+ # remove the .lock, otherwise the other processes run the
879
+ # risk of seeing their temporary dir disappear. For now
880
+ # we remove the .lock in the parent only (i.e. we assume
881
+ # that the children finish before the parent).
882
+ if os.getpid() != mypid:
883
+ return
884
+ try:
885
+ lockfile.remove()
886
+ except py.error.Error:
887
+ pass
888
+ atexit.register(try_remove_lockfile)
889
+
890
+ # compute the maximum number currently in use with the prefix
891
+ lastmax = None
892
+ while True:
893
+ maxnum = -1
894
+ for path in rootdir.listdir():
895
+ num = parse_num(path)
896
+ if num is not None:
897
+ maxnum = max(maxnum, num)
898
+
899
+ # make the new directory
900
+ try:
901
+ udir = rootdir.mkdir(prefix + str(maxnum+1))
902
+ if lock_timeout:
903
+ lockfile = create_lockfile(udir)
904
+ atexit_remove_lockfile(lockfile)
905
+ except (py.error.EEXIST, py.error.ENOENT, py.error.EBUSY):
906
+ # race condition (1): another thread/process created the dir
907
+ # in the meantime - try again
908
+ # race condition (2): another thread/process spuriously acquired
909
+ # lock treating empty directory as candidate
910
+ # for removal - try again
911
+ # race condition (3): another thread/process tried to create the lock at
912
+ # the same time (happened in Python 3.3 on Windows)
913
+ # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa
914
+ if lastmax == maxnum:
915
+ raise
916
+ lastmax = maxnum
917
+ continue
918
+ break
919
+
920
+ def get_mtime(path):
921
+ """ read file modification time """
922
+ try:
923
+ return path.lstat().mtime
924
+ except py.error.Error:
925
+ pass
926
+
927
+ garbage_prefix = prefix + 'garbage-'
928
+
929
+ def is_garbage(path):
930
+ """ check if path denotes directory scheduled for removal """
931
+ bn = path.basename
932
+ return bn.startswith(garbage_prefix)
933
+
934
+ # prune old directories
935
+ udir_time = get_mtime(udir)
936
+ if keep and udir_time:
937
+ for path in rootdir.listdir():
938
+ num = parse_num(path)
939
+ if num is not None and num <= (maxnum - keep):
940
+ try:
941
+ # try acquiring lock to remove directory as exclusive user
942
+ if lock_timeout:
943
+ create_lockfile(path)
944
+ except (py.error.EEXIST, py.error.ENOENT, py.error.EBUSY):
945
+ path_time = get_mtime(path)
946
+ if not path_time:
947
+ # assume directory doesn't exist now
948
+ continue
949
+ if abs(udir_time - path_time) < lock_timeout:
950
+ # assume directory with lockfile exists
951
+ # and lock timeout hasn't expired yet
952
+ continue
953
+
954
+ # path dir locked for exclusive use
955
+ # and scheduled for removal to avoid another thread/process
956
+ # treating it as a new directory or removal candidate
957
+ garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4()))
958
+ try:
959
+ path.rename(garbage_path)
960
+ garbage_path.remove(rec=1)
961
+ except KeyboardInterrupt:
962
+ raise
963
+ except: # this might be py.error.Error, WindowsError ...
964
+ pass
965
+ if is_garbage(path):
966
+ try:
967
+ path.remove(rec=1)
968
+ except KeyboardInterrupt:
969
+ raise
970
+ except: # this might be py.error.Error, WindowsError ...
971
+ pass
972
+
973
+ # make link...
974
+ try:
975
+ username = os.environ['USER'] #linux, et al
976
+ except KeyError:
977
+ try:
978
+ username = os.environ['USERNAME'] #windows
979
+ except KeyError:
980
+ username = 'current'
981
+
982
+ src = str(udir)
983
+ dest = src[:src.rfind('-')] + '-' + username
984
+ try:
985
+ os.unlink(dest)
986
+ except OSError:
987
+ pass
988
+ try:
989
+ os.symlink(src, dest)
990
+ except (OSError, AttributeError, NotImplementedError):
991
+ pass
992
+
993
+ return udir
994
+ make_numbered_dir = classmethod(make_numbered_dir)
995
+
996
+
997
+ def copymode(src, dest):
998
+ """ copy permission from src to dst. """
999
+ import shutil
1000
+ shutil.copymode(src, dest)
1001
+
1002
+
1003
+ def copystat(src, dest):
1004
+ """ copy permission, last modification time,
1005
+ last access time, and flags from src to dst."""
1006
+ import shutil
1007
+ shutil.copystat(str(src), str(dest))
1008
+
1009
+
1010
+ def copychunked(src, dest):
1011
+ chunksize = 524288 # half a meg of bytes
1012
+ fsrc = src.open('rb')
1013
+ try:
1014
+ fdest = dest.open('wb')
1015
+ try:
1016
+ while 1:
1017
+ buf = fsrc.read(chunksize)
1018
+ if not buf:
1019
+ break
1020
+ fdest.write(buf)
1021
+ finally:
1022
+ fdest.close()
1023
+ finally:
1024
+ fsrc.close()
1025
+
1026
+
1027
+ def isimportable(name):
1028
+ if name and (name[0].isalpha() or name[0] == '_'):
1029
+ name = name.replace("_", '')
1030
+ return not name or name.isalnum()
vllm/lib/python3.10/site-packages/py/_path/svnurl.py ADDED
@@ -0,0 +1,380 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ module defining a subversion path object based on the external
3
+ command 'svn'. This modules aims to work with svn 1.3 and higher
4
+ but might also interact well with earlier versions.
5
+ """
6
+
7
+ import os, sys, time, re
8
+ import py
9
+ from py import path, process
10
+ from py._path import common
11
+ from py._path import svnwc as svncommon
12
+ from py._path.cacheutil import BuildcostAccessCache, AgingCache
13
+
14
+ DEBUG=False
15
+
16
+ class SvnCommandPath(svncommon.SvnPathBase):
17
+ """ path implementation that offers access to (possibly remote) subversion
18
+ repositories. """
19
+
20
+ _lsrevcache = BuildcostAccessCache(maxentries=128)
21
+ _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
22
+
23
+ def __new__(cls, path, rev=None, auth=None):
24
+ self = object.__new__(cls)
25
+ if isinstance(path, cls):
26
+ rev = path.rev
27
+ auth = path.auth
28
+ path = path.strpath
29
+ svncommon.checkbadchars(path)
30
+ path = path.rstrip('/')
31
+ self.strpath = path
32
+ self.rev = rev
33
+ self.auth = auth
34
+ return self
35
+
36
+ def __repr__(self):
37
+ if self.rev == -1:
38
+ return 'svnurl(%r)' % self.strpath
39
+ else:
40
+ return 'svnurl(%r, %r)' % (self.strpath, self.rev)
41
+
42
+ def _svnwithrev(self, cmd, *args):
43
+ """ execute an svn command, append our own url and revision """
44
+ if self.rev is None:
45
+ return self._svnwrite(cmd, *args)
46
+ else:
47
+ args = ['-r', self.rev] + list(args)
48
+ return self._svnwrite(cmd, *args)
49
+
50
+ def _svnwrite(self, cmd, *args):
51
+ """ execute an svn command, append our own url """
52
+ l = ['svn %s' % cmd]
53
+ args = ['"%s"' % self._escape(item) for item in args]
54
+ l.extend(args)
55
+ l.append('"%s"' % self._encodedurl())
56
+ # fixing the locale because we can't otherwise parse
57
+ string = " ".join(l)
58
+ if DEBUG:
59
+ print("execing %s" % string)
60
+ out = self._svncmdexecauth(string)
61
+ return out
62
+
63
+ def _svncmdexecauth(self, cmd):
64
+ """ execute an svn command 'as is' """
65
+ cmd = svncommon.fixlocale() + cmd
66
+ if self.auth is not None:
67
+ cmd += ' ' + self.auth.makecmdoptions()
68
+ return self._cmdexec(cmd)
69
+
70
+ def _cmdexec(self, cmd):
71
+ try:
72
+ out = process.cmdexec(cmd)
73
+ except py.process.cmdexec.Error:
74
+ e = sys.exc_info()[1]
75
+ if (e.err.find('File Exists') != -1 or
76
+ e.err.find('File already exists') != -1):
77
+ raise py.error.EEXIST(self)
78
+ raise
79
+ return out
80
+
81
+ def _svnpopenauth(self, cmd):
82
+ """ execute an svn command, return a pipe for reading stdin """
83
+ cmd = svncommon.fixlocale() + cmd
84
+ if self.auth is not None:
85
+ cmd += ' ' + self.auth.makecmdoptions()
86
+ return self._popen(cmd)
87
+
88
+ def _popen(self, cmd):
89
+ return os.popen(cmd)
90
+
91
+ def _encodedurl(self):
92
+ return self._escape(self.strpath)
93
+
94
+ def _norev_delentry(self, path):
95
+ auth = self.auth and self.auth.makecmdoptions() or None
96
+ self._lsnorevcache.delentry((str(path), auth))
97
+
98
+ def open(self, mode='r'):
99
+ """ return an opened file with the given mode. """
100
+ if mode not in ("r", "rU",):
101
+ raise ValueError("mode %r not supported" % (mode,))
102
+ assert self.check(file=1) # svn cat returns an empty file otherwise
103
+ if self.rev is None:
104
+ return self._svnpopenauth('svn cat "%s"' % (
105
+ self._escape(self.strpath), ))
106
+ else:
107
+ return self._svnpopenauth('svn cat -r %s "%s"' % (
108
+ self.rev, self._escape(self.strpath)))
109
+
110
+ def dirpath(self, *args, **kwargs):
111
+ """ return the directory path of the current path joined
112
+ with any given path arguments.
113
+ """
114
+ l = self.strpath.split(self.sep)
115
+ if len(l) < 4:
116
+ raise py.error.EINVAL(self, "base is not valid")
117
+ elif len(l) == 4:
118
+ return self.join(*args, **kwargs)
119
+ else:
120
+ return self.new(basename='').join(*args, **kwargs)
121
+
122
+ # modifying methods (cache must be invalidated)
123
+ def mkdir(self, *args, **kwargs):
124
+ """ create & return the directory joined with args.
125
+ pass a 'msg' keyword argument to set the commit message.
126
+ """
127
+ commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
128
+ createpath = self.join(*args)
129
+ createpath._svnwrite('mkdir', '-m', commit_msg)
130
+ self._norev_delentry(createpath.dirpath())
131
+ return createpath
132
+
133
+ def copy(self, target, msg='copied by py lib invocation'):
134
+ """ copy path to target with checkin message msg."""
135
+ if getattr(target, 'rev', None) is not None:
136
+ raise py.error.EINVAL(target, "revisions are immutable")
137
+ self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
138
+ self._escape(self), self._escape(target)))
139
+ self._norev_delentry(target.dirpath())
140
+
141
+ def rename(self, target, msg="renamed by py lib invocation"):
142
+ """ rename this path to target with checkin message msg. """
143
+ if getattr(self, 'rev', None) is not None:
144
+ raise py.error.EINVAL(self, "revisions are immutable")
145
+ self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
146
+ msg, self._escape(self), self._escape(target)))
147
+ self._norev_delentry(self.dirpath())
148
+ self._norev_delentry(self)
149
+
150
+ def remove(self, rec=1, msg='removed by py lib invocation'):
151
+ """ remove a file or directory (or a directory tree if rec=1) with
152
+ checkin message msg."""
153
+ if self.rev is not None:
154
+ raise py.error.EINVAL(self, "revisions are immutable")
155
+ self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
156
+ self._norev_delentry(self.dirpath())
157
+
158
+ def export(self, topath):
159
+ """ export to a local path
160
+
161
+ topath should not exist prior to calling this, returns a
162
+ py.path.local instance
163
+ """
164
+ topath = py.path.local(topath)
165
+ args = ['"%s"' % (self._escape(self),),
166
+ '"%s"' % (self._escape(topath),)]
167
+ if self.rev is not None:
168
+ args = ['-r', str(self.rev)] + args
169
+ self._svncmdexecauth('svn export %s' % (' '.join(args),))
170
+ return topath
171
+
172
+ def ensure(self, *args, **kwargs):
173
+ """ ensure that an args-joined path exists (by default as
174
+ a file). If you specify a keyword argument 'dir=True'
175
+ then the path is forced to be a directory path.
176
+ """
177
+ if getattr(self, 'rev', None) is not None:
178
+ raise py.error.EINVAL(self, "revisions are immutable")
179
+ target = self.join(*args)
180
+ dir = kwargs.get('dir', 0)
181
+ for x in target.parts(reverse=True):
182
+ if x.check():
183
+ break
184
+ else:
185
+ raise py.error.ENOENT(target, "has not any valid base!")
186
+ if x == target:
187
+ if not x.check(dir=dir):
188
+ raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
189
+ return x
190
+ tocreate = target.relto(x)
191
+ basename = tocreate.split(self.sep, 1)[0]
192
+ tempdir = py.path.local.mkdtemp()
193
+ try:
194
+ tempdir.ensure(tocreate, dir=dir)
195
+ cmd = 'svn import -m "%s" "%s" "%s"' % (
196
+ "ensure %s" % self._escape(tocreate),
197
+ self._escape(tempdir.join(basename)),
198
+ x.join(basename)._encodedurl())
199
+ self._svncmdexecauth(cmd)
200
+ self._norev_delentry(x)
201
+ finally:
202
+ tempdir.remove()
203
+ return target
204
+
205
+ # end of modifying methods
206
+ def _propget(self, name):
207
+ res = self._svnwithrev('propget', name)
208
+ return res[:-1] # strip trailing newline
209
+
210
+ def _proplist(self):
211
+ res = self._svnwithrev('proplist')
212
+ lines = res.split('\n')
213
+ lines = [x.strip() for x in lines[1:]]
214
+ return svncommon.PropListDict(self, lines)
215
+
216
+ def info(self):
217
+ """ return an Info structure with svn-provided information. """
218
+ parent = self.dirpath()
219
+ nameinfo_seq = parent._listdir_nameinfo()
220
+ bn = self.basename
221
+ for name, info in nameinfo_seq:
222
+ if name == bn:
223
+ return info
224
+ raise py.error.ENOENT(self)
225
+
226
+
227
+ def _listdir_nameinfo(self):
228
+ """ return sequence of name-info directory entries of self """
229
+ def builder():
230
+ try:
231
+ res = self._svnwithrev('ls', '-v')
232
+ except process.cmdexec.Error:
233
+ e = sys.exc_info()[1]
234
+ if e.err.find('non-existent in that revision') != -1:
235
+ raise py.error.ENOENT(self, e.err)
236
+ elif e.err.find("E200009:") != -1:
237
+ raise py.error.ENOENT(self, e.err)
238
+ elif e.err.find('File not found') != -1:
239
+ raise py.error.ENOENT(self, e.err)
240
+ elif e.err.find('not part of a repository')!=-1:
241
+ raise py.error.ENOENT(self, e.err)
242
+ elif e.err.find('Unable to open')!=-1:
243
+ raise py.error.ENOENT(self, e.err)
244
+ elif e.err.lower().find('method not allowed')!=-1:
245
+ raise py.error.EACCES(self, e.err)
246
+ raise py.error.Error(e.err)
247
+ lines = res.split('\n')
248
+ nameinfo_seq = []
249
+ for lsline in lines:
250
+ if lsline:
251
+ info = InfoSvnCommand(lsline)
252
+ if info._name != '.': # svn 1.5 produces '.' dirs,
253
+ nameinfo_seq.append((info._name, info))
254
+ nameinfo_seq.sort()
255
+ return nameinfo_seq
256
+ auth = self.auth and self.auth.makecmdoptions() or None
257
+ if self.rev is not None:
258
+ return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
259
+ builder)
260
+ else:
261
+ return self._lsnorevcache.getorbuild((self.strpath, auth),
262
+ builder)
263
+
264
+ def listdir(self, fil=None, sort=None):
265
+ """ list directory contents, possibly filter by the given fil func
266
+ and possibly sorted.
267
+ """
268
+ if isinstance(fil, str):
269
+ fil = common.FNMatcher(fil)
270
+ nameinfo_seq = self._listdir_nameinfo()
271
+ if len(nameinfo_seq) == 1:
272
+ name, info = nameinfo_seq[0]
273
+ if name == self.basename and info.kind == 'file':
274
+ #if not self.check(dir=1):
275
+ raise py.error.ENOTDIR(self)
276
+ paths = [self.join(name) for (name, info) in nameinfo_seq]
277
+ if fil:
278
+ paths = [x for x in paths if fil(x)]
279
+ self._sortlist(paths, sort)
280
+ return paths
281
+
282
+
283
+ def log(self, rev_start=None, rev_end=1, verbose=False):
284
+ """ return a list of LogEntry instances for this path.
285
+ rev_start is the starting revision (defaulting to the first one).
286
+ rev_end is the last revision (defaulting to HEAD).
287
+ if verbose is True, then the LogEntry instances also know which files changed.
288
+ """
289
+ assert self.check() #make it simpler for the pipe
290
+ rev_start = rev_start is None and "HEAD" or rev_start
291
+ rev_end = rev_end is None and "HEAD" or rev_end
292
+
293
+ if rev_start == "HEAD" and rev_end == 1:
294
+ rev_opt = ""
295
+ else:
296
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
297
+ verbose_opt = verbose and "-v" or ""
298
+ xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
299
+ (rev_opt, verbose_opt, self.strpath))
300
+ from xml.dom import minidom
301
+ tree = minidom.parse(xmlpipe)
302
+ result = []
303
+ for logentry in filter(None, tree.firstChild.childNodes):
304
+ if logentry.nodeType == logentry.ELEMENT_NODE:
305
+ result.append(svncommon.LogEntry(logentry))
306
+ return result
307
+
308
+ #01234567890123456789012345678901234567890123467
309
+ # 2256 hpk 165 Nov 24 17:55 __init__.py
310
+ # XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
311
+ # 1312 johnny 1627 May 05 14:32 test_decorators.py
312
+ #
313
+ class InfoSvnCommand:
314
+ # the '0?' part in the middle is an indication of whether the resource is
315
+ # locked, see 'svn help ls'
316
+ lspattern = re.compile(
317
+ r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
318
+ r'*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
319
+ def __init__(self, line):
320
+ # this is a typical line from 'svn ls http://...'
321
+ #_ 1127 jum 0 Jul 13 15:28 branch/
322
+ match = self.lspattern.match(line)
323
+ data = match.groupdict()
324
+ self._name = data['file']
325
+ if self._name[-1] == '/':
326
+ self._name = self._name[:-1]
327
+ self.kind = 'dir'
328
+ else:
329
+ self.kind = 'file'
330
+ #self.has_props = l.pop(0) == 'P'
331
+ self.created_rev = int(data['rev'])
332
+ self.last_author = data['author']
333
+ self.size = data['size'] and int(data['size']) or 0
334
+ self.mtime = parse_time_with_missing_year(data['date'])
335
+ self.time = self.mtime * 1000000
336
+
337
+ def __eq__(self, other):
338
+ return self.__dict__ == other.__dict__
339
+
340
+
341
+ #____________________________________________________
342
+ #
343
+ # helper functions
344
+ #____________________________________________________
345
+ def parse_time_with_missing_year(timestr):
346
+ """ analyze the time part from a single line of "svn ls -v"
347
+ the svn output doesn't show the year makes the 'timestr'
348
+ ambigous.
349
+ """
350
+ import calendar
351
+ t_now = time.gmtime()
352
+
353
+ tparts = timestr.split()
354
+ month = time.strptime(tparts.pop(0), '%b')[1]
355
+ day = time.strptime(tparts.pop(0), '%d')[2]
356
+ last = tparts.pop(0) # year or hour:minute
357
+ try:
358
+ if ":" in last:
359
+ raise ValueError()
360
+ year = time.strptime(last, '%Y')[0]
361
+ hour = minute = 0
362
+ except ValueError:
363
+ hour, minute = time.strptime(last, '%H:%M')[3:5]
364
+ year = t_now[0]
365
+
366
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
367
+ if t_result > t_now:
368
+ year -= 1
369
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
370
+ return calendar.timegm(t_result)
371
+
372
+ class PathEntry:
373
+ def __init__(self, ppart):
374
+ self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
375
+ self.action = ppart.getAttribute('action').encode('UTF-8')
376
+ if self.action == 'A':
377
+ self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
378
+ if self.copyfrom_path:
379
+ self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
380
+
vllm/lib/python3.10/site-packages/py/_path/svnwc.py ADDED
@@ -0,0 +1,1240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ svn-Command based Implementation of a Subversion WorkingCopy Path.
3
+
4
+ SvnWCCommandPath is the main class.
5
+
6
+ """
7
+
8
+ import os, sys, time, re, calendar
9
+ import py
10
+ import subprocess
11
+ from py._path import common
12
+
13
+ #-----------------------------------------------------------
14
+ # Caching latest repository revision and repo-paths
15
+ # (getting them is slow with the current implementations)
16
+ #
17
+ # XXX make mt-safe
18
+ #-----------------------------------------------------------
19
+
20
+ class cache:
21
+ proplist = {}
22
+ info = {}
23
+ entries = {}
24
+ prop = {}
25
+
26
+ class RepoEntry:
27
+ def __init__(self, url, rev, timestamp):
28
+ self.url = url
29
+ self.rev = rev
30
+ self.timestamp = timestamp
31
+
32
+ def __str__(self):
33
+ return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
34
+
35
+ class RepoCache:
36
+ """ The Repocache manages discovered repository paths
37
+ and their revisions. If inside a timeout the cache
38
+ will even return the revision of the root.
39
+ """
40
+ timeout = 20 # seconds after which we forget that we know the last revision
41
+
42
+ def __init__(self):
43
+ self.repos = []
44
+
45
+ def clear(self):
46
+ self.repos = []
47
+
48
+ def put(self, url, rev, timestamp=None):
49
+ if rev is None:
50
+ return
51
+ if timestamp is None:
52
+ timestamp = time.time()
53
+
54
+ for entry in self.repos:
55
+ if url == entry.url:
56
+ entry.timestamp = timestamp
57
+ entry.rev = rev
58
+ #print "set repo", entry
59
+ break
60
+ else:
61
+ entry = RepoEntry(url, rev, timestamp)
62
+ self.repos.append(entry)
63
+ #print "appended repo", entry
64
+
65
+ def get(self, url):
66
+ now = time.time()
67
+ for entry in self.repos:
68
+ if url.startswith(entry.url):
69
+ if now < entry.timestamp + self.timeout:
70
+ #print "returning immediate Etrny", entry
71
+ return entry.url, entry.rev
72
+ return entry.url, -1
73
+ return url, -1
74
+
75
+ repositories = RepoCache()
76
+
77
+
78
+ # svn support code
79
+
80
+ ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
81
+ if sys.platform == "win32":
82
+ ALLOWED_CHARS += ":"
83
+ ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
84
+
85
+ def _getsvnversion(ver=[]):
86
+ try:
87
+ return ver[0]
88
+ except IndexError:
89
+ v = py.process.cmdexec("svn -q --version")
90
+ v.strip()
91
+ v = '.'.join(v.split('.')[:2])
92
+ ver.append(v)
93
+ return v
94
+
95
+ def _escape_helper(text):
96
+ text = str(text)
97
+ if sys.platform != 'win32':
98
+ text = str(text).replace('$', '\\$')
99
+ return text
100
+
101
+ def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
102
+ for c in str(text):
103
+ if c.isalnum():
104
+ continue
105
+ if c in allowed_chars:
106
+ continue
107
+ return True
108
+ return False
109
+
110
+ def checkbadchars(url):
111
+ # (hpk) not quite sure about the exact purpose, guido w.?
112
+ proto, uri = url.split("://", 1)
113
+ if proto != "file":
114
+ host, uripath = uri.split('/', 1)
115
+ # only check for bad chars in the non-protocol parts
116
+ if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
117
+ or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
118
+ raise ValueError("bad char in %r" % (url, ))
119
+
120
+
121
+ #_______________________________________________________________
122
+
123
+ class SvnPathBase(common.PathBase):
124
+ """ Base implementation for SvnPath implementations. """
125
+ sep = '/'
126
+
127
+ def _geturl(self):
128
+ return self.strpath
129
+ url = property(_geturl, None, None, "url of this svn-path.")
130
+
131
+ def __str__(self):
132
+ """ return a string representation (including rev-number) """
133
+ return self.strpath
134
+
135
+ def __hash__(self):
136
+ return hash(self.strpath)
137
+
138
+ def new(self, **kw):
139
+ """ create a modified version of this path. A 'rev' argument
140
+ indicates a new revision.
141
+ the following keyword arguments modify various path parts::
142
+
143
+ http://host.com/repo/path/file.ext
144
+ |-----------------------| dirname
145
+ |------| basename
146
+ |--| purebasename
147
+ |--| ext
148
+ """
149
+ obj = object.__new__(self.__class__)
150
+ obj.rev = kw.get('rev', self.rev)
151
+ obj.auth = kw.get('auth', self.auth)
152
+ dirname, basename, purebasename, ext = self._getbyspec(
153
+ "dirname,basename,purebasename,ext")
154
+ if 'basename' in kw:
155
+ if 'purebasename' in kw or 'ext' in kw:
156
+ raise ValueError("invalid specification %r" % kw)
157
+ else:
158
+ pb = kw.setdefault('purebasename', purebasename)
159
+ ext = kw.setdefault('ext', ext)
160
+ if ext and not ext.startswith('.'):
161
+ ext = '.' + ext
162
+ kw['basename'] = pb + ext
163
+
164
+ kw.setdefault('dirname', dirname)
165
+ kw.setdefault('sep', self.sep)
166
+ if kw['basename']:
167
+ obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
168
+ else:
169
+ obj.strpath = "%(dirname)s" % kw
170
+ return obj
171
+
172
+ def _getbyspec(self, spec):
173
+ """ get specified parts of the path. 'arg' is a string
174
+ with comma separated path parts. The parts are returned
175
+ in exactly the order of the specification.
176
+
177
+ you may specify the following parts:
178
+
179
+ http://host.com/repo/path/file.ext
180
+ |-----------------------| dirname
181
+ |------| basename
182
+ |--| purebasename
183
+ |--| ext
184
+ """
185
+ res = []
186
+ parts = self.strpath.split(self.sep)
187
+ for name in spec.split(','):
188
+ name = name.strip()
189
+ if name == 'dirname':
190
+ res.append(self.sep.join(parts[:-1]))
191
+ elif name == 'basename':
192
+ res.append(parts[-1])
193
+ else:
194
+ basename = parts[-1]
195
+ i = basename.rfind('.')
196
+ if i == -1:
197
+ purebasename, ext = basename, ''
198
+ else:
199
+ purebasename, ext = basename[:i], basename[i:]
200
+ if name == 'purebasename':
201
+ res.append(purebasename)
202
+ elif name == 'ext':
203
+ res.append(ext)
204
+ else:
205
+ raise NameError("Don't know part %r" % name)
206
+ return res
207
+
208
+ def __eq__(self, other):
209
+ """ return true if path and rev attributes each match """
210
+ return (str(self) == str(other) and
211
+ (self.rev == other.rev or self.rev == other.rev))
212
+
213
+ def __ne__(self, other):
214
+ return not self == other
215
+
216
+ def join(self, *args):
217
+ """ return a new Path (with the same revision) which is composed
218
+ of the self Path followed by 'args' path components.
219
+ """
220
+ if not args:
221
+ return self
222
+
223
+ args = tuple([arg.strip(self.sep) for arg in args])
224
+ parts = (self.strpath, ) + args
225
+ newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
226
+ return newpath
227
+
228
+ def propget(self, name):
229
+ """ return the content of the given property. """
230
+ value = self._propget(name)
231
+ return value
232
+
233
+ def proplist(self):
234
+ """ list all property names. """
235
+ content = self._proplist()
236
+ return content
237
+
238
+ def size(self):
239
+ """ Return the size of the file content of the Path. """
240
+ return self.info().size
241
+
242
+ def mtime(self):
243
+ """ Return the last modification time of the file. """
244
+ return self.info().mtime
245
+
246
+ # shared help methods
247
+
248
+ def _escape(self, cmd):
249
+ return _escape_helper(cmd)
250
+
251
+
252
+ #def _childmaxrev(self):
253
+ # """ return maximum revision number of childs (or self.rev if no childs) """
254
+ # rev = self.rev
255
+ # for name, info in self._listdir_nameinfo():
256
+ # rev = max(rev, info.created_rev)
257
+ # return rev
258
+
259
+ #def _getlatestrevision(self):
260
+ # """ return latest repo-revision for this path. """
261
+ # url = self.strpath
262
+ # path = self.__class__(url, None)
263
+ #
264
+ # # we need a long walk to find the root-repo and revision
265
+ # while 1:
266
+ # try:
267
+ # rev = max(rev, path._childmaxrev())
268
+ # previous = path
269
+ # path = path.dirpath()
270
+ # except (IOError, process.cmdexec.Error):
271
+ # break
272
+ # if rev is None:
273
+ # raise IOError, "could not determine newest repo revision for %s" % self
274
+ # return rev
275
+
276
+ class Checkers(common.Checkers):
277
+ def dir(self):
278
+ try:
279
+ return self.path.info().kind == 'dir'
280
+ except py.error.Error:
281
+ return self._listdirworks()
282
+
283
+ def _listdirworks(self):
284
+ try:
285
+ self.path.listdir()
286
+ except py.error.ENOENT:
287
+ return False
288
+ else:
289
+ return True
290
+
291
+ def file(self):
292
+ try:
293
+ return self.path.info().kind == 'file'
294
+ except py.error.ENOENT:
295
+ return False
296
+
297
+ def exists(self):
298
+ try:
299
+ return self.path.info()
300
+ except py.error.ENOENT:
301
+ return self._listdirworks()
302
+
303
+ def parse_apr_time(timestr):
304
+ i = timestr.rfind('.')
305
+ if i == -1:
306
+ raise ValueError("could not parse %s" % timestr)
307
+ timestr = timestr[:i]
308
+ parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
309
+ return time.mktime(parsedtime)
310
+
311
+ class PropListDict(dict):
312
+ """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
313
+ def __init__(self, path, keynames):
314
+ dict.__init__(self, [(x, None) for x in keynames])
315
+ self.path = path
316
+
317
+ def __getitem__(self, key):
318
+ value = dict.__getitem__(self, key)
319
+ if value is None:
320
+ value = self.path.propget(key)
321
+ dict.__setitem__(self, key, value)
322
+ return value
323
+
324
+ def fixlocale():
325
+ if sys.platform != 'win32':
326
+ return 'LC_ALL=C '
327
+ return ''
328
+
329
+ # some nasty chunk of code to solve path and url conversion and quoting issues
330
+ ILLEGAL_CHARS = '* | \\ / : < > ? \t \n \x0b \x0c \r'.split(' ')
331
+ if os.sep in ILLEGAL_CHARS:
332
+ ILLEGAL_CHARS.remove(os.sep)
333
+ ISWINDOWS = sys.platform == 'win32'
334
+ _reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
335
+ def _check_path(path):
336
+ illegal = ILLEGAL_CHARS[:]
337
+ sp = path.strpath
338
+ if ISWINDOWS:
339
+ illegal.remove(':')
340
+ if not _reg_allow_disk.match(sp):
341
+ raise ValueError('path may not contain a colon (:)')
342
+ for char in sp:
343
+ if char not in string.printable or char in illegal:
344
+ raise ValueError('illegal character %r in path' % (char,))
345
+
346
+ def path_to_fspath(path, addat=True):
347
+ _check_path(path)
348
+ sp = path.strpath
349
+ if addat and path.rev != -1:
350
+ sp = '%s@%s' % (sp, path.rev)
351
+ elif addat:
352
+ sp = '%s@HEAD' % (sp,)
353
+ return sp
354
+
355
+ def url_from_path(path):
356
+ fspath = path_to_fspath(path, False)
357
+ from urllib import quote
358
+ if ISWINDOWS:
359
+ match = _reg_allow_disk.match(fspath)
360
+ fspath = fspath.replace('\\', '/')
361
+ if match.group(1):
362
+ fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
363
+ quote(fspath[len(match.group(1)):]))
364
+ else:
365
+ fspath = quote(fspath)
366
+ else:
367
+ fspath = quote(fspath)
368
+ if path.rev != -1:
369
+ fspath = '%s@%s' % (fspath, path.rev)
370
+ else:
371
+ fspath = '%s@HEAD' % (fspath,)
372
+ return 'file://%s' % (fspath,)
373
+
374
+ class SvnAuth(object):
375
+ """ container for auth information for Subversion """
376
+ def __init__(self, username, password, cache_auth=True, interactive=True):
377
+ self.username = username
378
+ self.password = password
379
+ self.cache_auth = cache_auth
380
+ self.interactive = interactive
381
+
382
+ def makecmdoptions(self):
383
+ uname = self.username.replace('"', '\\"')
384
+ passwd = self.password.replace('"', '\\"')
385
+ ret = []
386
+ if uname:
387
+ ret.append('--username="%s"' % (uname,))
388
+ if passwd:
389
+ ret.append('--password="%s"' % (passwd,))
390
+ if not self.cache_auth:
391
+ ret.append('--no-auth-cache')
392
+ if not self.interactive:
393
+ ret.append('--non-interactive')
394
+ return ' '.join(ret)
395
+
396
+ def __str__(self):
397
+ return "<SvnAuth username=%s ...>" %(self.username,)
398
+
399
+ rex_blame = re.compile(r'\s*(\d+)\s+(\S+) (.*)')
400
+
401
+ class SvnWCCommandPath(common.PathBase):
402
+ """ path implementation offering access/modification to svn working copies.
403
+ It has methods similar to the functions in os.path and similar to the
404
+ commands of the svn client.
405
+ """
406
+ sep = os.sep
407
+
408
+ def __new__(cls, wcpath=None, auth=None):
409
+ self = object.__new__(cls)
410
+ if isinstance(wcpath, cls):
411
+ if wcpath.__class__ == cls:
412
+ return wcpath
413
+ wcpath = wcpath.localpath
414
+ if _check_for_bad_chars(str(wcpath),
415
+ ALLOWED_CHARS):
416
+ raise ValueError("bad char in wcpath %s" % (wcpath, ))
417
+ self.localpath = py.path.local(wcpath)
418
+ self.auth = auth
419
+ return self
420
+
421
+ strpath = property(lambda x: str(x.localpath), None, None, "string path")
422
+ rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
423
+
424
+ def __eq__(self, other):
425
+ return self.localpath == getattr(other, 'localpath', None)
426
+
427
+ def _geturl(self):
428
+ if getattr(self, '_url', None) is None:
429
+ info = self.info()
430
+ self._url = info.url #SvnPath(info.url, info.rev)
431
+ assert isinstance(self._url, py.builtin._basestring)
432
+ return self._url
433
+
434
+ url = property(_geturl, None, None, "url of this WC item")
435
+
436
+ def _escape(self, cmd):
437
+ return _escape_helper(cmd)
438
+
439
+ def dump(self, obj):
440
+ """ pickle object into path location"""
441
+ return self.localpath.dump(obj)
442
+
443
+ def svnurl(self):
444
+ """ return current SvnPath for this WC-item. """
445
+ info = self.info()
446
+ return py.path.svnurl(info.url)
447
+
448
+ def __repr__(self):
449
+ return "svnwc(%r)" % (self.strpath) # , self._url)
450
+
451
+ def __str__(self):
452
+ return str(self.localpath)
453
+
454
+ def _makeauthoptions(self):
455
+ if self.auth is None:
456
+ return ''
457
+ return self.auth.makecmdoptions()
458
+
459
+ def _authsvn(self, cmd, args=None):
460
+ args = args and list(args) or []
461
+ args.append(self._makeauthoptions())
462
+ return self._svn(cmd, *args)
463
+
464
+ def _svn(self, cmd, *args):
465
+ l = ['svn %s' % cmd]
466
+ args = [self._escape(item) for item in args]
467
+ l.extend(args)
468
+ l.append('"%s"' % self._escape(self.strpath))
469
+ # try fixing the locale because we can't otherwise parse
470
+ string = fixlocale() + " ".join(l)
471
+ try:
472
+ try:
473
+ key = 'LC_MESSAGES'
474
+ hold = os.environ.get(key)
475
+ os.environ[key] = 'C'
476
+ out = py.process.cmdexec(string)
477
+ finally:
478
+ if hold:
479
+ os.environ[key] = hold
480
+ else:
481
+ del os.environ[key]
482
+ except py.process.cmdexec.Error:
483
+ e = sys.exc_info()[1]
484
+ strerr = e.err.lower()
485
+ if strerr.find('not found') != -1:
486
+ raise py.error.ENOENT(self)
487
+ elif strerr.find("E200009:") != -1:
488
+ raise py.error.ENOENT(self)
489
+ if (strerr.find('file exists') != -1 or
490
+ strerr.find('file already exists') != -1 or
491
+ strerr.find('w150002:') != -1 or
492
+ strerr.find("can't create directory") != -1):
493
+ raise py.error.EEXIST(strerr) #self)
494
+ raise
495
+ return out
496
+
497
+ def switch(self, url):
498
+ """ switch to given URL. """
499
+ self._authsvn('switch', [url])
500
+
501
+ def checkout(self, url=None, rev=None):
502
+ """ checkout from url to local wcpath. """
503
+ args = []
504
+ if url is None:
505
+ url = self.url
506
+ if rev is None or rev == -1:
507
+ if (sys.platform != 'win32' and
508
+ _getsvnversion() == '1.3'):
509
+ url += "@HEAD"
510
+ else:
511
+ if _getsvnversion() == '1.3':
512
+ url += "@%d" % rev
513
+ else:
514
+ args.append('-r' + str(rev))
515
+ args.append(url)
516
+ self._authsvn('co', args)
517
+
518
+ def update(self, rev='HEAD', interactive=True):
519
+ """ update working copy item to given revision. (None -> HEAD). """
520
+ opts = ['-r', rev]
521
+ if not interactive:
522
+ opts.append("--non-interactive")
523
+ self._authsvn('up', opts)
524
+
525
+ def write(self, content, mode='w'):
526
+ """ write content into local filesystem wc. """
527
+ self.localpath.write(content, mode)
528
+
529
+ def dirpath(self, *args):
530
+ """ return the directory Path of the current Path. """
531
+ return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
532
+
533
+ def _ensuredirs(self):
534
+ parent = self.dirpath()
535
+ if parent.check(dir=0):
536
+ parent._ensuredirs()
537
+ if self.check(dir=0):
538
+ self.mkdir()
539
+ return self
540
+
541
+ def ensure(self, *args, **kwargs):
542
+ """ ensure that an args-joined path exists (by default as
543
+ a file). if you specify a keyword argument 'directory=True'
544
+ then the path is forced to be a directory path.
545
+ """
546
+ p = self.join(*args)
547
+ if p.check():
548
+ if p.check(versioned=False):
549
+ p.add()
550
+ return p
551
+ if kwargs.get('dir', 0):
552
+ return p._ensuredirs()
553
+ parent = p.dirpath()
554
+ parent._ensuredirs()
555
+ p.write("")
556
+ p.add()
557
+ return p
558
+
559
+ def mkdir(self, *args):
560
+ """ create & return the directory joined with args. """
561
+ if args:
562
+ return self.join(*args).mkdir()
563
+ else:
564
+ self._svn('mkdir')
565
+ return self
566
+
567
+ def add(self):
568
+ """ add ourself to svn """
569
+ self._svn('add')
570
+
571
+ def remove(self, rec=1, force=1):
572
+ """ remove a file or a directory tree. 'rec'ursive is
573
+ ignored and considered always true (because of
574
+ underlying svn semantics.
575
+ """
576
+ assert rec, "svn cannot remove non-recursively"
577
+ if not self.check(versioned=True):
578
+ # not added to svn (anymore?), just remove
579
+ py.path.local(self).remove()
580
+ return
581
+ flags = []
582
+ if force:
583
+ flags.append('--force')
584
+ self._svn('remove', *flags)
585
+
586
+ def copy(self, target):
587
+ """ copy path to target."""
588
+ py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
589
+
590
+ def rename(self, target):
591
+ """ rename this path to target. """
592
+ py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
593
+
594
+ def lock(self):
595
+ """ set a lock (exclusive) on the resource """
596
+ out = self._authsvn('lock').strip()
597
+ if not out:
598
+ # warning or error, raise exception
599
+ raise ValueError("unknown error in svn lock command")
600
+
601
+ def unlock(self):
602
+ """ unset a previously set lock """
603
+ out = self._authsvn('unlock').strip()
604
+ if out.startswith('svn:'):
605
+ # warning or error, raise exception
606
+ raise Exception(out[4:])
607
+
608
+ def cleanup(self):
609
+ """ remove any locks from the resource """
610
+ # XXX should be fixed properly!!!
611
+ try:
612
+ self.unlock()
613
+ except:
614
+ pass
615
+
616
+ def status(self, updates=0, rec=0, externals=0):
617
+ """ return (collective) Status object for this file. """
618
+ # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
619
+ # 2201 2192 jum test
620
+ # XXX
621
+ if externals:
622
+ raise ValueError("XXX cannot perform status() "
623
+ "on external items yet")
624
+ else:
625
+ #1.2 supports: externals = '--ignore-externals'
626
+ externals = ''
627
+ if rec:
628
+ rec= ''
629
+ else:
630
+ rec = '--non-recursive'
631
+
632
+ # XXX does not work on all subversion versions
633
+ #if not externals:
634
+ # externals = '--ignore-externals'
635
+
636
+ if updates:
637
+ updates = '-u'
638
+ else:
639
+ updates = ''
640
+
641
+ try:
642
+ cmd = 'status -v --xml --no-ignore %s %s %s' % (
643
+ updates, rec, externals)
644
+ out = self._authsvn(cmd)
645
+ except py.process.cmdexec.Error:
646
+ cmd = 'status -v --no-ignore %s %s %s' % (
647
+ updates, rec, externals)
648
+ out = self._authsvn(cmd)
649
+ rootstatus = WCStatus(self).fromstring(out, self)
650
+ else:
651
+ rootstatus = XMLWCStatus(self).fromstring(out, self)
652
+ return rootstatus
653
+
654
+ def diff(self, rev=None):
655
+ """ return a diff of the current path against revision rev (defaulting
656
+ to the last one).
657
+ """
658
+ args = []
659
+ if rev is not None:
660
+ args.append("-r %d" % rev)
661
+ out = self._authsvn('diff', args)
662
+ return out
663
+
664
+ def blame(self):
665
+ """ return a list of tuples of three elements:
666
+ (revision, commiter, line)
667
+ """
668
+ out = self._svn('blame')
669
+ result = []
670
+ blamelines = out.splitlines()
671
+ reallines = py.path.svnurl(self.url).readlines()
672
+ for i, (blameline, line) in enumerate(
673
+ zip(blamelines, reallines)):
674
+ m = rex_blame.match(blameline)
675
+ if not m:
676
+ raise ValueError("output line %r of svn blame does not match "
677
+ "expected format" % (line, ))
678
+ rev, name, _ = m.groups()
679
+ result.append((int(rev), name, line))
680
+ return result
681
+
682
+ _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
683
+ def commit(self, msg='', rec=1):
684
+ """ commit with support for non-recursive commits """
685
+ # XXX i guess escaping should be done better here?!?
686
+ cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
687
+ if not rec:
688
+ cmd += ' -N'
689
+ out = self._authsvn(cmd)
690
+ try:
691
+ del cache.info[self]
692
+ except KeyError:
693
+ pass
694
+ if out:
695
+ m = self._rex_commit.match(out)
696
+ return int(m.group(1))
697
+
698
+ def propset(self, name, value, *args):
699
+ """ set property name to value on this path. """
700
+ d = py.path.local.mkdtemp()
701
+ try:
702
+ p = d.join('value')
703
+ p.write(value)
704
+ self._svn('propset', name, '--file', str(p), *args)
705
+ finally:
706
+ d.remove()
707
+
708
+ def propget(self, name):
709
+ """ get property name on this path. """
710
+ res = self._svn('propget', name)
711
+ return res[:-1] # strip trailing newline
712
+
713
+ def propdel(self, name):
714
+ """ delete property name on this path. """
715
+ res = self._svn('propdel', name)
716
+ return res[:-1] # strip trailing newline
717
+
718
+ def proplist(self, rec=0):
719
+ """ return a mapping of property names to property values.
720
+ If rec is True, then return a dictionary mapping sub-paths to such mappings.
721
+ """
722
+ if rec:
723
+ res = self._svn('proplist -R')
724
+ return make_recursive_propdict(self, res)
725
+ else:
726
+ res = self._svn('proplist')
727
+ lines = res.split('\n')
728
+ lines = [x.strip() for x in lines[1:]]
729
+ return PropListDict(self, lines)
730
+
731
+ def revert(self, rec=0):
732
+ """ revert the local changes of this path. if rec is True, do so
733
+ recursively. """
734
+ if rec:
735
+ result = self._svn('revert -R')
736
+ else:
737
+ result = self._svn('revert')
738
+ return result
739
+
740
+ def new(self, **kw):
741
+ """ create a modified version of this path. A 'rev' argument
742
+ indicates a new revision.
743
+ the following keyword arguments modify various path parts:
744
+
745
+ http://host.com/repo/path/file.ext
746
+ |-----------------------| dirname
747
+ |------| basename
748
+ |--| purebasename
749
+ |--| ext
750
+ """
751
+ if kw:
752
+ localpath = self.localpath.new(**kw)
753
+ else:
754
+ localpath = self.localpath
755
+ return self.__class__(localpath, auth=self.auth)
756
+
757
+ def join(self, *args, **kwargs):
758
+ """ return a new Path (with the same revision) which is composed
759
+ of the self Path followed by 'args' path components.
760
+ """
761
+ if not args:
762
+ return self
763
+ localpath = self.localpath.join(*args, **kwargs)
764
+ return self.__class__(localpath, auth=self.auth)
765
+
766
+ def info(self, usecache=1):
767
+ """ return an Info structure with svn-provided information. """
768
+ info = usecache and cache.info.get(self)
769
+ if not info:
770
+ try:
771
+ output = self._svn('info')
772
+ except py.process.cmdexec.Error:
773
+ e = sys.exc_info()[1]
774
+ if e.err.find('Path is not a working copy directory') != -1:
775
+ raise py.error.ENOENT(self, e.err)
776
+ elif e.err.find("is not under version control") != -1:
777
+ raise py.error.ENOENT(self, e.err)
778
+ raise
779
+ # XXX SVN 1.3 has output on stderr instead of stdout (while it does
780
+ # return 0!), so a bit nasty, but we assume no output is output
781
+ # to stderr...
782
+ if (output.strip() == '' or
783
+ output.lower().find('not a versioned resource') != -1):
784
+ raise py.error.ENOENT(self, output)
785
+ info = InfoSvnWCCommand(output)
786
+
787
+ # Can't reliably compare on Windows without access to win32api
788
+ if sys.platform != 'win32':
789
+ if info.path != self.localpath:
790
+ raise py.error.ENOENT(self, "not a versioned resource:" +
791
+ " %s != %s" % (info.path, self.localpath))
792
+ cache.info[self] = info
793
+ return info
794
+
795
+ def listdir(self, fil=None, sort=None):
796
+ """ return a sequence of Paths.
797
+
798
+ listdir will return either a tuple or a list of paths
799
+ depending on implementation choices.
800
+ """
801
+ if isinstance(fil, str):
802
+ fil = common.FNMatcher(fil)
803
+ # XXX unify argument naming with LocalPath.listdir
804
+ def notsvn(path):
805
+ return path.basename != '.svn'
806
+
807
+ paths = []
808
+ for localpath in self.localpath.listdir(notsvn):
809
+ p = self.__class__(localpath, auth=self.auth)
810
+ if notsvn(p) and (not fil or fil(p)):
811
+ paths.append(p)
812
+ self._sortlist(paths, sort)
813
+ return paths
814
+
815
+ def open(self, mode='r'):
816
+ """ return an opened file with the given mode. """
817
+ return open(self.strpath, mode)
818
+
819
+ def _getbyspec(self, spec):
820
+ return self.localpath._getbyspec(spec)
821
+
822
+ class Checkers(py.path.local.Checkers):
823
+ def __init__(self, path):
824
+ self.svnwcpath = path
825
+ self.path = path.localpath
826
+ def versioned(self):
827
+ try:
828
+ s = self.svnwcpath.info()
829
+ except (py.error.ENOENT, py.error.EEXIST):
830
+ return False
831
+ except py.process.cmdexec.Error:
832
+ e = sys.exc_info()[1]
833
+ if e.err.find('is not a working copy')!=-1:
834
+ return False
835
+ if e.err.lower().find('not a versioned resource') != -1:
836
+ return False
837
+ raise
838
+ else:
839
+ return True
840
+
841
+ def log(self, rev_start=None, rev_end=1, verbose=False):
842
+ """ return a list of LogEntry instances for this path.
843
+ rev_start is the starting revision (defaulting to the first one).
844
+ rev_end is the last revision (defaulting to HEAD).
845
+ if verbose is True, then the LogEntry instances also know which files changed.
846
+ """
847
+ assert self.check() # make it simpler for the pipe
848
+ rev_start = rev_start is None and "HEAD" or rev_start
849
+ rev_end = rev_end is None and "HEAD" or rev_end
850
+ if rev_start == "HEAD" and rev_end == 1:
851
+ rev_opt = ""
852
+ else:
853
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
854
+ verbose_opt = verbose and "-v" or ""
855
+ locale_env = fixlocale()
856
+ # some blather on stderr
857
+ auth_opt = self._makeauthoptions()
858
+ #stdin, stdout, stderr = os.popen3(locale_env +
859
+ # 'svn log --xml %s %s %s "%s"' % (
860
+ # rev_opt, verbose_opt, auth_opt,
861
+ # self.strpath))
862
+ cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
863
+ rev_opt, verbose_opt, auth_opt, self.strpath)
864
+
865
+ popen = subprocess.Popen(cmd,
866
+ stdout=subprocess.PIPE,
867
+ stderr=subprocess.PIPE,
868
+ shell=True,
869
+ )
870
+ stdout, stderr = popen.communicate()
871
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
872
+ minidom,ExpatError = importxml()
873
+ try:
874
+ tree = minidom.parseString(stdout)
875
+ except ExpatError:
876
+ raise ValueError('no such revision')
877
+ result = []
878
+ for logentry in filter(None, tree.firstChild.childNodes):
879
+ if logentry.nodeType == logentry.ELEMENT_NODE:
880
+ result.append(LogEntry(logentry))
881
+ return result
882
+
883
+ def size(self):
884
+ """ Return the size of the file content of the Path. """
885
+ return self.info().size
886
+
887
+ def mtime(self):
888
+ """ Return the last modification time of the file. """
889
+ return self.info().mtime
890
+
891
+ def __hash__(self):
892
+ return hash((self.strpath, self.__class__, self.auth))
893
+
894
+
895
+ class WCStatus:
896
+ attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
897
+ 'deleted', 'prop_modified', 'unknown', 'update_available',
898
+ 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
899
+ )
900
+
901
+ def __init__(self, wcpath, rev=None, modrev=None, author=None):
902
+ self.wcpath = wcpath
903
+ self.rev = rev
904
+ self.modrev = modrev
905
+ self.author = author
906
+
907
+ for name in self.attrnames:
908
+ setattr(self, name, [])
909
+
910
+ def allpath(self, sort=True, **kw):
911
+ d = {}
912
+ for name in self.attrnames:
913
+ if name not in kw or kw[name]:
914
+ for path in getattr(self, name):
915
+ d[path] = 1
916
+ l = d.keys()
917
+ if sort:
918
+ l.sort()
919
+ return l
920
+
921
+ # XXX a bit scary to assume there's always 2 spaces between username and
922
+ # path, however with win32 allowing spaces in user names there doesn't
923
+ # seem to be a more solid approach :(
924
+ _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
925
+
926
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
927
+ """ return a new WCStatus object from data 's'
928
+ """
929
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
930
+ update_rev = None
931
+ for line in data.split('\n'):
932
+ if not line.strip():
933
+ continue
934
+ #print "processing %r" % line
935
+ flags, rest = line[:8], line[8:]
936
+ # first column
937
+ c0,c1,c2,c3,c4,c5,x6,c7 = flags
938
+ #if '*' in line:
939
+ # print "flags", repr(flags), "rest", repr(rest)
940
+
941
+ if c0 in '?XI':
942
+ fn = line.split(None, 1)[1]
943
+ if c0 == '?':
944
+ wcpath = rootwcpath.join(fn, abs=1)
945
+ rootstatus.unknown.append(wcpath)
946
+ elif c0 == 'X':
947
+ wcpath = rootwcpath.__class__(
948
+ rootwcpath.localpath.join(fn, abs=1),
949
+ auth=rootwcpath.auth)
950
+ rootstatus.external.append(wcpath)
951
+ elif c0 == 'I':
952
+ wcpath = rootwcpath.join(fn, abs=1)
953
+ rootstatus.ignored.append(wcpath)
954
+
955
+ continue
956
+
957
+ #elif c0 in '~!' or c4 == 'S':
958
+ # raise NotImplementedError("received flag %r" % c0)
959
+
960
+ m = WCStatus._rex_status.match(rest)
961
+ if not m:
962
+ if c7 == '*':
963
+ fn = rest.strip()
964
+ wcpath = rootwcpath.join(fn, abs=1)
965
+ rootstatus.update_available.append(wcpath)
966
+ continue
967
+ if line.lower().find('against revision:')!=-1:
968
+ update_rev = int(rest.split(':')[1].strip())
969
+ continue
970
+ if line.lower().find('status on external') > -1:
971
+ # XXX not sure what to do here... perhaps we want to
972
+ # store some state instead of just continuing, as right
973
+ # now it makes the top-level external get added twice
974
+ # (once as external, once as 'normal' unchanged item)
975
+ # because of the way SVN presents external items
976
+ continue
977
+ # keep trying
978
+ raise ValueError("could not parse line %r" % line)
979
+ else:
980
+ rev, modrev, author, fn = m.groups()
981
+ wcpath = rootwcpath.join(fn, abs=1)
982
+ #assert wcpath.check()
983
+ if c0 == 'M':
984
+ assert wcpath.check(file=1), "didn't expect a directory with changed content here"
985
+ rootstatus.modified.append(wcpath)
986
+ elif c0 == 'A' or c3 == '+' :
987
+ rootstatus.added.append(wcpath)
988
+ elif c0 == 'D':
989
+ rootstatus.deleted.append(wcpath)
990
+ elif c0 == 'C':
991
+ rootstatus.conflict.append(wcpath)
992
+ elif c0 == '~':
993
+ rootstatus.kindmismatch.append(wcpath)
994
+ elif c0 == '!':
995
+ rootstatus.incomplete.append(wcpath)
996
+ elif c0 == 'R':
997
+ rootstatus.replaced.append(wcpath)
998
+ elif not c0.strip():
999
+ rootstatus.unchanged.append(wcpath)
1000
+ else:
1001
+ raise NotImplementedError("received flag %r" % c0)
1002
+
1003
+ if c1 == 'M':
1004
+ rootstatus.prop_modified.append(wcpath)
1005
+ # XXX do we cover all client versions here?
1006
+ if c2 == 'L' or c5 == 'K':
1007
+ rootstatus.locked.append(wcpath)
1008
+ if c7 == '*':
1009
+ rootstatus.update_available.append(wcpath)
1010
+
1011
+ if wcpath == rootwcpath:
1012
+ rootstatus.rev = rev
1013
+ rootstatus.modrev = modrev
1014
+ rootstatus.author = author
1015
+ if update_rev:
1016
+ rootstatus.update_rev = update_rev
1017
+ continue
1018
+ return rootstatus
1019
+ fromstring = staticmethod(fromstring)
1020
+
1021
+ class XMLWCStatus(WCStatus):
1022
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
1023
+ """ parse 'data' (XML string as outputted by svn st) into a status obj
1024
+ """
1025
+ # XXX for externals, the path is shown twice: once
1026
+ # with external information, and once with full info as if
1027
+ # the item was a normal non-external... the current way of
1028
+ # dealing with this issue is by ignoring it - this does make
1029
+ # externals appear as external items as well as 'normal',
1030
+ # unchanged ones in the status object so this is far from ideal
1031
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
1032
+ update_rev = None
1033
+ minidom, ExpatError = importxml()
1034
+ try:
1035
+ doc = minidom.parseString(data)
1036
+ except ExpatError:
1037
+ e = sys.exc_info()[1]
1038
+ raise ValueError(str(e))
1039
+ urevels = doc.getElementsByTagName('against')
1040
+ if urevels:
1041
+ rootstatus.update_rev = urevels[-1].getAttribute('revision')
1042
+ for entryel in doc.getElementsByTagName('entry'):
1043
+ path = entryel.getAttribute('path')
1044
+ statusel = entryel.getElementsByTagName('wc-status')[0]
1045
+ itemstatus = statusel.getAttribute('item')
1046
+
1047
+ if itemstatus == 'unversioned':
1048
+ wcpath = rootwcpath.join(path, abs=1)
1049
+ rootstatus.unknown.append(wcpath)
1050
+ continue
1051
+ elif itemstatus == 'external':
1052
+ wcpath = rootwcpath.__class__(
1053
+ rootwcpath.localpath.join(path, abs=1),
1054
+ auth=rootwcpath.auth)
1055
+ rootstatus.external.append(wcpath)
1056
+ continue
1057
+ elif itemstatus == 'ignored':
1058
+ wcpath = rootwcpath.join(path, abs=1)
1059
+ rootstatus.ignored.append(wcpath)
1060
+ continue
1061
+ elif itemstatus == 'incomplete':
1062
+ wcpath = rootwcpath.join(path, abs=1)
1063
+ rootstatus.incomplete.append(wcpath)
1064
+ continue
1065
+
1066
+ rev = statusel.getAttribute('revision')
1067
+ if itemstatus == 'added' or itemstatus == 'none':
1068
+ rev = '0'
1069
+ modrev = '?'
1070
+ author = '?'
1071
+ date = ''
1072
+ elif itemstatus == "replaced":
1073
+ pass
1074
+ else:
1075
+ #print entryel.toxml()
1076
+ commitel = entryel.getElementsByTagName('commit')[0]
1077
+ if commitel:
1078
+ modrev = commitel.getAttribute('revision')
1079
+ author = ''
1080
+ author_els = commitel.getElementsByTagName('author')
1081
+ if author_els:
1082
+ for c in author_els[0].childNodes:
1083
+ author += c.nodeValue
1084
+ date = ''
1085
+ for c in commitel.getElementsByTagName('date')[0]\
1086
+ .childNodes:
1087
+ date += c.nodeValue
1088
+
1089
+ wcpath = rootwcpath.join(path, abs=1)
1090
+
1091
+ assert itemstatus != 'modified' or wcpath.check(file=1), (
1092
+ 'did\'t expect a directory with changed content here')
1093
+
1094
+ itemattrname = {
1095
+ 'normal': 'unchanged',
1096
+ 'unversioned': 'unknown',
1097
+ 'conflicted': 'conflict',
1098
+ 'none': 'added',
1099
+ }.get(itemstatus, itemstatus)
1100
+
1101
+ attr = getattr(rootstatus, itemattrname)
1102
+ attr.append(wcpath)
1103
+
1104
+ propsstatus = statusel.getAttribute('props')
1105
+ if propsstatus not in ('none', 'normal'):
1106
+ rootstatus.prop_modified.append(wcpath)
1107
+
1108
+ if wcpath == rootwcpath:
1109
+ rootstatus.rev = rev
1110
+ rootstatus.modrev = modrev
1111
+ rootstatus.author = author
1112
+ rootstatus.date = date
1113
+
1114
+ # handle repos-status element (remote info)
1115
+ rstatusels = entryel.getElementsByTagName('repos-status')
1116
+ if rstatusels:
1117
+ rstatusel = rstatusels[0]
1118
+ ritemstatus = rstatusel.getAttribute('item')
1119
+ if ritemstatus in ('added', 'modified'):
1120
+ rootstatus.update_available.append(wcpath)
1121
+
1122
+ lockels = entryel.getElementsByTagName('lock')
1123
+ if len(lockels):
1124
+ rootstatus.locked.append(wcpath)
1125
+
1126
+ return rootstatus
1127
+ fromstring = staticmethod(fromstring)
1128
+
1129
+ class InfoSvnWCCommand:
1130
+ def __init__(self, output):
1131
+ # Path: test
1132
+ # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
1133
+ # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
1134
+ # Revision: 2151
1135
+ # Node Kind: directory
1136
+ # Schedule: normal
1137
+ # Last Changed Author: hpk
1138
+ # Last Changed Rev: 2100
1139
+ # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
1140
+ # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
1141
+
1142
+ d = {}
1143
+ for line in output.split('\n'):
1144
+ if not line.strip():
1145
+ continue
1146
+ key, value = line.split(':', 1)
1147
+ key = key.lower().replace(' ', '')
1148
+ value = value.strip()
1149
+ d[key] = value
1150
+ try:
1151
+ self.url = d['url']
1152
+ except KeyError:
1153
+ raise ValueError("Not a versioned resource")
1154
+ #raise ValueError, "Not a versioned resource %r" % path
1155
+ self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
1156
+ try:
1157
+ self.rev = int(d['revision'])
1158
+ except KeyError:
1159
+ self.rev = None
1160
+
1161
+ self.path = py.path.local(d['path'])
1162
+ self.size = self.path.size()
1163
+ if 'lastchangedrev' in d:
1164
+ self.created_rev = int(d['lastchangedrev'])
1165
+ if 'lastchangedauthor' in d:
1166
+ self.last_author = d['lastchangedauthor']
1167
+ if 'lastchangeddate' in d:
1168
+ self.mtime = parse_wcinfotime(d['lastchangeddate'])
1169
+ self.time = self.mtime * 1000000
1170
+
1171
+ def __eq__(self, other):
1172
+ return self.__dict__ == other.__dict__
1173
+
1174
+ def parse_wcinfotime(timestr):
1175
+ """ Returns seconds since epoch, UTC. """
1176
+ # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
1177
+ m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
1178
+ if not m:
1179
+ raise ValueError("timestring %r does not match" % timestr)
1180
+ timestr, timezone = m.groups()
1181
+ # do not handle timezone specially, return value should be UTC
1182
+ parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
1183
+ return calendar.timegm(parsedtime)
1184
+
1185
+ def make_recursive_propdict(wcroot,
1186
+ output,
1187
+ rex = re.compile("Properties on '(.*)':")):
1188
+ """ Return a dictionary of path->PropListDict mappings. """
1189
+ lines = [x for x in output.split('\n') if x]
1190
+ pdict = {}
1191
+ while lines:
1192
+ line = lines.pop(0)
1193
+ m = rex.match(line)
1194
+ if not m:
1195
+ raise ValueError("could not parse propget-line: %r" % line)
1196
+ path = m.groups()[0]
1197
+ wcpath = wcroot.join(path, abs=1)
1198
+ propnames = []
1199
+ while lines and lines[0].startswith(' '):
1200
+ propname = lines.pop(0).strip()
1201
+ propnames.append(propname)
1202
+ assert propnames, "must have found properties!"
1203
+ pdict[wcpath] = PropListDict(wcpath, propnames)
1204
+ return pdict
1205
+
1206
+
1207
+ def importxml(cache=[]):
1208
+ if cache:
1209
+ return cache
1210
+ from xml.dom import minidom
1211
+ from xml.parsers.expat import ExpatError
1212
+ cache.extend([minidom, ExpatError])
1213
+ return cache
1214
+
1215
+ class LogEntry:
1216
+ def __init__(self, logentry):
1217
+ self.rev = int(logentry.getAttribute('revision'))
1218
+ for lpart in filter(None, logentry.childNodes):
1219
+ if lpart.nodeType == lpart.ELEMENT_NODE:
1220
+ if lpart.nodeName == 'author':
1221
+ self.author = lpart.firstChild.nodeValue
1222
+ elif lpart.nodeName == 'msg':
1223
+ if lpart.firstChild:
1224
+ self.msg = lpart.firstChild.nodeValue
1225
+ else:
1226
+ self.msg = ''
1227
+ elif lpart.nodeName == 'date':
1228
+ #2003-07-29T20:05:11.598637Z
1229
+ timestr = lpart.firstChild.nodeValue
1230
+ self.date = parse_apr_time(timestr)
1231
+ elif lpart.nodeName == 'paths':
1232
+ self.strpaths = []
1233
+ for ppart in filter(None, lpart.childNodes):
1234
+ if ppart.nodeType == ppart.ELEMENT_NODE:
1235
+ self.strpaths.append(PathEntry(ppart))
1236
+ def __repr__(self):
1237
+ return '<Logentry rev=%d author=%s date=%s>' % (
1238
+ self.rev, self.author, self.date)
1239
+
1240
+
vllm/lib/python3.10/site-packages/py/_process/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ """ high-level sub-process handling """
vllm/lib/python3.10/site-packages/py/_process/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (206 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/_process/__pycache__/cmdexec.cpython-310.pyc ADDED
Binary file (1.89 kB). View file
 
vllm/lib/python3.10/site-packages/py/_process/__pycache__/forkedfunc.cpython-310.pyc ADDED
Binary file (3.72 kB). View file
 
vllm/lib/python3.10/site-packages/py/_process/__pycache__/killproc.cpython-310.pyc ADDED
Binary file (992 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/_process/cmdexec.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import subprocess
3
+ import py
4
+ from subprocess import Popen, PIPE
5
+
6
+ def cmdexec(cmd):
7
+ """ return unicode output of executing 'cmd' in a separate process.
8
+
9
+ raise cmdexec.Error exeception if the command failed.
10
+ the exception will provide an 'err' attribute containing
11
+ the error-output from the command.
12
+ if the subprocess module does not provide a proper encoding/unicode strings
13
+ sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
14
+ """
15
+ process = subprocess.Popen(cmd, shell=True,
16
+ universal_newlines=True,
17
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
18
+ out, err = process.communicate()
19
+ if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
20
+ try:
21
+ default_encoding = sys.getdefaultencoding() # jython may not have it
22
+ except AttributeError:
23
+ default_encoding = sys.stdout.encoding or 'UTF-8'
24
+ out = unicode(out, process.stdout.encoding or default_encoding)
25
+ err = unicode(err, process.stderr.encoding or default_encoding)
26
+ status = process.poll()
27
+ if status:
28
+ raise ExecutionFailed(status, status, cmd, out, err)
29
+ return out
30
+
31
+ class ExecutionFailed(py.error.Error):
32
+ def __init__(self, status, systemstatus, cmd, out, err):
33
+ Exception.__init__(self)
34
+ self.status = status
35
+ self.systemstatus = systemstatus
36
+ self.cmd = cmd
37
+ self.err = err
38
+ self.out = out
39
+
40
+ def __str__(self):
41
+ return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
42
+
43
+ # export the exception under the name 'py.process.cmdexec.Error'
44
+ cmdexec.Error = ExecutionFailed
45
+ try:
46
+ ExecutionFailed.__module__ = 'py.process.cmdexec'
47
+ ExecutionFailed.__name__ = 'Error'
48
+ except (AttributeError, TypeError):
49
+ pass
vllm/lib/python3.10/site-packages/py/_process/forkedfunc.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ """
3
+ ForkedFunc provides a way to run a function in a forked process
4
+ and get at its return value, stdout and stderr output as well
5
+ as signals and exitstatusus.
6
+ """
7
+
8
+ import py
9
+ import os
10
+ import sys
11
+ import marshal
12
+
13
+
14
+ def get_unbuffered_io(fd, filename):
15
+ f = open(str(filename), "w")
16
+ if fd != f.fileno():
17
+ os.dup2(f.fileno(), fd)
18
+ class AutoFlush:
19
+ def write(self, data):
20
+ f.write(data)
21
+ f.flush()
22
+ def __getattr__(self, name):
23
+ return getattr(f, name)
24
+ return AutoFlush()
25
+
26
+
27
+ class ForkedFunc:
28
+ EXITSTATUS_EXCEPTION = 3
29
+
30
+
31
+ def __init__(self, fun, args=None, kwargs=None, nice_level=0,
32
+ child_on_start=None, child_on_exit=None):
33
+ if args is None:
34
+ args = []
35
+ if kwargs is None:
36
+ kwargs = {}
37
+ self.fun = fun
38
+ self.args = args
39
+ self.kwargs = kwargs
40
+ self.tempdir = tempdir = py.path.local.mkdtemp()
41
+ self.RETVAL = tempdir.ensure('retval')
42
+ self.STDOUT = tempdir.ensure('stdout')
43
+ self.STDERR = tempdir.ensure('stderr')
44
+
45
+ pid = os.fork()
46
+ if pid: # in parent process
47
+ self.pid = pid
48
+ else: # in child process
49
+ self.pid = None
50
+ self._child(nice_level, child_on_start, child_on_exit)
51
+
52
+ def _child(self, nice_level, child_on_start, child_on_exit):
53
+ # right now we need to call a function, but first we need to
54
+ # map all IO that might happen
55
+ sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
56
+ sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
57
+ retvalf = self.RETVAL.open("wb")
58
+ EXITSTATUS = 0
59
+ try:
60
+ if nice_level:
61
+ os.nice(nice_level)
62
+ try:
63
+ if child_on_start is not None:
64
+ child_on_start()
65
+ retval = self.fun(*self.args, **self.kwargs)
66
+ retvalf.write(marshal.dumps(retval))
67
+ if child_on_exit is not None:
68
+ child_on_exit()
69
+ except:
70
+ excinfo = py.code.ExceptionInfo()
71
+ stderr.write(str(excinfo._getreprcrash()))
72
+ EXITSTATUS = self.EXITSTATUS_EXCEPTION
73
+ finally:
74
+ stdout.close()
75
+ stderr.close()
76
+ retvalf.close()
77
+ os.close(1)
78
+ os.close(2)
79
+ os._exit(EXITSTATUS)
80
+
81
+ def waitfinish(self, waiter=os.waitpid):
82
+ pid, systemstatus = waiter(self.pid, 0)
83
+ if systemstatus:
84
+ if os.WIFSIGNALED(systemstatus):
85
+ exitstatus = os.WTERMSIG(systemstatus) + 128
86
+ else:
87
+ exitstatus = os.WEXITSTATUS(systemstatus)
88
+ else:
89
+ exitstatus = 0
90
+ signal = systemstatus & 0x7f
91
+ if not exitstatus and not signal:
92
+ retval = self.RETVAL.open('rb')
93
+ try:
94
+ retval_data = retval.read()
95
+ finally:
96
+ retval.close()
97
+ retval = marshal.loads(retval_data)
98
+ else:
99
+ retval = None
100
+ stdout = self.STDOUT.read()
101
+ stderr = self.STDERR.read()
102
+ self._removetemp()
103
+ return Result(exitstatus, signal, retval, stdout, stderr)
104
+
105
+ def _removetemp(self):
106
+ if self.tempdir.check():
107
+ self.tempdir.remove()
108
+
109
+ def __del__(self):
110
+ if self.pid is not None: # only clean up in main process
111
+ self._removetemp()
112
+
113
+
114
+ class Result(object):
115
+ def __init__(self, exitstatus, signal, retval, stdout, stderr):
116
+ self.exitstatus = exitstatus
117
+ self.signal = signal
118
+ self.retval = retval
119
+ self.out = stdout
120
+ self.err = stderr
vllm/lib/python3.10/site-packages/py/_process/killproc.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import py
2
+ import os, sys
3
+
4
+ if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
5
+ try:
6
+ import ctypes
7
+ except ImportError:
8
+ def dokill(pid):
9
+ py.process.cmdexec("taskkill /F /PID %d" %(pid,))
10
+ else:
11
+ def dokill(pid):
12
+ PROCESS_TERMINATE = 1
13
+ handle = ctypes.windll.kernel32.OpenProcess(
14
+ PROCESS_TERMINATE, False, pid)
15
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
16
+ ctypes.windll.kernel32.CloseHandle(handle)
17
+ else:
18
+ def dokill(pid):
19
+ os.kill(pid, 15)
20
+
21
+ def kill(pid):
22
+ """ kill process by id. """
23
+ dokill(pid)
vllm/lib/python3.10/site-packages/py/_vendored_packages/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (171 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/LICENSE ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Permission is hereby granted, free of charge, to any person obtaining a copy
3
+ of this software and associated documentation files (the "Software"), to deal
4
+ in the Software without restriction, including without limitation the rights
5
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
6
+ copies of the Software, and to permit persons to whom the Software is
7
+ furnished to do so, subject to the following conditions:
8
+
9
+ The above copyright notice and this permission notice shall be included in all
10
+ copies or substantial portions of the Software.
11
+
12
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
13
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
14
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
15
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
16
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
17
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
18
+ SOFTWARE.
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/METADATA ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: apipkg
3
+ Version: 2.0.0
4
+ Summary: apipkg: namespace control and lazy-import mechanism
5
+ Home-page: https://github.com/pytest-dev/apipkg
6
+ Author: holger krekel
7
+ Maintainer: Ronny Pfannschmidt
8
+ Maintainer-email: opensource@ronnypfannschmidt.de
9
+ License: MIT
10
+ Platform: unix
11
+ Platform: linux
12
+ Platform: osx
13
+ Platform: cygwin
14
+ Platform: win32
15
+ Classifier: Development Status :: 4 - Beta
16
+ Classifier: Intended Audience :: Developers
17
+ Classifier: License :: OSI Approved :: MIT License
18
+ Classifier: Operating System :: MacOS :: MacOS X
19
+ Classifier: Operating System :: Microsoft :: Windows
20
+ Classifier: Operating System :: POSIX
21
+ Classifier: Programming Language :: Python
22
+ Classifier: Programming Language :: Python :: 2
23
+ Classifier: Programming Language :: Python :: 2.7
24
+ Classifier: Programming Language :: Python :: 3
25
+ Classifier: Programming Language :: Python :: 3.4
26
+ Classifier: Programming Language :: Python :: 3.5
27
+ Classifier: Programming Language :: Python :: 3.6
28
+ Classifier: Programming Language :: Python :: 3.7
29
+ Classifier: Programming Language :: Python :: 3.8
30
+ Classifier: Programming Language :: Python :: 3.9
31
+ Classifier: Programming Language :: Python :: Implementation :: CPython
32
+ Classifier: Topic :: Software Development :: Libraries
33
+ Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7
34
+ Description-Content-Type: text/x-rst
35
+ License-File: LICENSE
36
+
37
+ Welcome to apipkg !
38
+ -------------------
39
+
40
+ With apipkg you can control the exported namespace of a Python package and
41
+ greatly reduce the number of imports for your users.
42
+ It is a `small pure Python module`_ that works on CPython 2.7 and 3.4+,
43
+ Jython and PyPy. It cooperates well with Python's ``help()`` system,
44
+ custom importers (PEP302) and common command-line completion tools.
45
+
46
+ Usage is very simple: you can require 'apipkg' as a dependency or you
47
+ can copy paste the ~200 lines of code into your project.
48
+
49
+
50
+ Tutorial example
51
+ -------------------
52
+
53
+ Here is a simple ``mypkg`` package that specifies one namespace
54
+ and exports two objects imported from different modules::
55
+
56
+
57
+ # mypkg/__init__.py
58
+ import apipkg
59
+ apipkg.initpkg(__name__, {
60
+ 'path': {
61
+ 'Class1': "_mypkg.somemodule:Class1",
62
+ 'clsattr': "_mypkg.othermodule:Class2.attr",
63
+ }
64
+ }
65
+
66
+ The package is initialized with a dictionary as namespace.
67
+
68
+ You need to create a ``_mypkg`` package with a ``somemodule.py``
69
+ and ``othermodule.py`` containing the respective classes.
70
+ The ``_mypkg`` is not special - it's a completely
71
+ regular Python package.
72
+
73
+ Namespace dictionaries contain ``name: value`` mappings
74
+ where the value may be another namespace dictionary or
75
+ a string specifying an import location. On accessing
76
+ an namespace attribute an import will be performed::
77
+
78
+ >>> import mypkg
79
+ >>> mypkg.path
80
+ <ApiModule 'mypkg.path'>
81
+ >>> mypkg.path.Class1 # '_mypkg.somemodule' gets imported now
82
+ <class _mypkg.somemodule.Class1 at 0xb7d428fc>
83
+ >>> mypkg.path.clsattr # '_mypkg.othermodule' gets imported now
84
+ 4 # the value of _mypkg.othermodule.Class2.attr
85
+
86
+ The ``mypkg.path`` namespace and its two entries are
87
+ loaded when they are accessed. This means:
88
+
89
+ * lazy loading - only what is actually needed is ever loaded
90
+
91
+ * only the root "mypkg" ever needs to be imported to get
92
+ access to the complete functionality
93
+
94
+ * the underlying modules are also accessible, for example::
95
+
96
+ from mypkg.sub import Class1
97
+
98
+
99
+ Including apipkg in your package
100
+ --------------------------------------
101
+
102
+ If you don't want to add an ``apipkg`` dependency to your package you
103
+ can copy the `apipkg.py`_ file somewhere to your own package,
104
+ for example ``_mypkg/apipkg.py`` in the above example. You
105
+ then import the ``initpkg`` function from that new place and
106
+ are good to go.
107
+
108
+ .. _`small pure Python module`:
109
+ .. _`apipkg.py`: https://github.com/pytest-dev/apipkg/blob/master/src/apipkg/__init__.py
110
+
111
+ Feedback?
112
+ -----------------------
113
+
114
+ If you have questions you are welcome to
115
+
116
+ * join the **#pytest** channel on irc.libera.chat_
117
+ (using an IRC client, via webchat_, or via Matrix_).
118
+ * create an issue on the bugtracker_
119
+
120
+ .. _irc.libera.chat: ircs://irc.libera.chat:6697/#pytest
121
+ .. _webchat: https://web.libera.chat/#pytest
122
+ .. _matrix: https://matrix.to/#/%23pytest:libera.chat
123
+ .. _bugtracker: https://github.com/pytest-dev/apipkg/issues
124
+
125
+
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/RECORD ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apipkg-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ apipkg-2.0.0.dist-info/LICENSE,sha256=6J7tEHTTqUMZi6E5uAhE9bRFuGC7p0qK6twGEFZhZOo,1054
3
+ apipkg-2.0.0.dist-info/METADATA,sha256=GqNwkxraK5UTxObLVXTLc2UqktOPwZnKqdk2ThzHX0A,4292
4
+ apipkg-2.0.0.dist-info/RECORD,,
5
+ apipkg-2.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ apipkg-2.0.0.dist-info/WHEEL,sha256=WzZ8cwjh8l0jtULNjYq1Hpr-WCqCRgPr--TX4P5I1Wo,110
7
+ apipkg-2.0.0.dist-info/top_level.txt,sha256=3TGS6nmN7kjxhUK4LpPCB3QkQI34QYGrT0ZQGWajoZ8,7
8
+ apipkg/__init__.py,sha256=gpbD3O57S9f-LsO2e-XwI6IGISayicfnCq3B5y_8frg,6978
9
+ apipkg/__pycache__/__init__.cpython-39.pyc,,
10
+ apipkg/__pycache__/version.cpython-39.pyc,,
11
+ apipkg/version.py,sha256=bgZFg-f3UKhgE-z2w8RoFrwqRBzJBZkM4_jKFiYB9eU,142
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/REQUESTED ADDED
File without changes
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/WHEEL ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.37.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py2-none-any
5
+ Tag: py3-none-any
6
+
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg-2.0.0.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ apipkg
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/__init__.py ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ apipkg: control the exported namespace of a Python package.
3
+
4
+ see https://pypi.python.org/pypi/apipkg
5
+
6
+ (c) holger krekel, 2009 - MIT license
7
+ """
8
+ import os
9
+ import sys
10
+ from types import ModuleType
11
+
12
+ from .version import version as __version__ # NOQA:F401
13
+
14
+
15
+ def _py_abspath(path):
16
+ """
17
+ special version of abspath
18
+ that will leave paths from jython jars alone
19
+ """
20
+ if path.startswith("__pyclasspath__"):
21
+
22
+ return path
23
+ else:
24
+ return os.path.abspath(path)
25
+
26
+
27
+ def distribution_version(name):
28
+ """try to get the version of the named distribution,
29
+ returs None on failure"""
30
+ from pkg_resources import get_distribution, DistributionNotFound
31
+
32
+ try:
33
+ dist = get_distribution(name)
34
+ except DistributionNotFound:
35
+ pass
36
+ else:
37
+ return dist.version
38
+
39
+
40
+ def initpkg(pkgname, exportdefs, attr=None, eager=False):
41
+ """ initialize given package from the export definitions. """
42
+ attr = attr or {}
43
+ oldmod = sys.modules.get(pkgname)
44
+ d = {}
45
+ f = getattr(oldmod, "__file__", None)
46
+ if f:
47
+ f = _py_abspath(f)
48
+ d["__file__"] = f
49
+ if hasattr(oldmod, "__version__"):
50
+ d["__version__"] = oldmod.__version__
51
+ if hasattr(oldmod, "__loader__"):
52
+ d["__loader__"] = oldmod.__loader__
53
+ if hasattr(oldmod, "__path__"):
54
+ d["__path__"] = [_py_abspath(p) for p in oldmod.__path__]
55
+ if hasattr(oldmod, "__package__"):
56
+ d["__package__"] = oldmod.__package__
57
+ if "__doc__" not in exportdefs and getattr(oldmod, "__doc__", None):
58
+ d["__doc__"] = oldmod.__doc__
59
+ d["__spec__"] = getattr(oldmod, "__spec__", None)
60
+ d.update(attr)
61
+ if hasattr(oldmod, "__dict__"):
62
+ oldmod.__dict__.update(d)
63
+ mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
64
+ sys.modules[pkgname] = mod
65
+ # eagerload in bypthon to avoid their monkeypatching breaking packages
66
+ if "bpython" in sys.modules or eager:
67
+ for module in list(sys.modules.values()):
68
+ if isinstance(module, ApiModule):
69
+ module.__dict__
70
+ return mod
71
+
72
+
73
+ def importobj(modpath, attrname):
74
+ """imports a module, then resolves the attrname on it"""
75
+ module = __import__(modpath, None, None, ["__doc__"])
76
+ if not attrname:
77
+ return module
78
+
79
+ retval = module
80
+ names = attrname.split(".")
81
+ for x in names:
82
+ retval = getattr(retval, x)
83
+ return retval
84
+
85
+
86
+ class ApiModule(ModuleType):
87
+ """the magical lazy-loading module standing"""
88
+
89
+ def __docget(self):
90
+ try:
91
+ return self.__doc
92
+ except AttributeError:
93
+ if "__doc__" in self.__map__:
94
+ return self.__makeattr("__doc__")
95
+
96
+ def __docset(self, value):
97
+ self.__doc = value
98
+
99
+ __doc__ = property(__docget, __docset)
100
+
101
+ def __init__(self, name, importspec, implprefix=None, attr=None):
102
+ self.__name__ = name
103
+ self.__all__ = [x for x in importspec if x != "__onfirstaccess__"]
104
+ self.__map__ = {}
105
+ self.__implprefix__ = implprefix or name
106
+ if attr:
107
+ for name, val in attr.items():
108
+ # print "setting", self.__name__, name, val
109
+ setattr(self, name, val)
110
+ for name, importspec in importspec.items():
111
+ if isinstance(importspec, dict):
112
+ subname = "{}.{}".format(self.__name__, name)
113
+ apimod = ApiModule(subname, importspec, implprefix)
114
+ sys.modules[subname] = apimod
115
+ setattr(self, name, apimod)
116
+ else:
117
+ parts = importspec.split(":")
118
+ modpath = parts.pop(0)
119
+ attrname = parts and parts[0] or ""
120
+ if modpath[0] == ".":
121
+ modpath = implprefix + modpath
122
+
123
+ if not attrname:
124
+ subname = "{}.{}".format(self.__name__, name)
125
+ apimod = AliasModule(subname, modpath)
126
+ sys.modules[subname] = apimod
127
+ if "." not in name:
128
+ setattr(self, name, apimod)
129
+ else:
130
+ self.__map__[name] = (modpath, attrname)
131
+
132
+ def __repr__(self):
133
+ repr_list = []
134
+ if hasattr(self, "__version__"):
135
+ repr_list.append("version=" + repr(self.__version__))
136
+ if hasattr(self, "__file__"):
137
+ repr_list.append("from " + repr(self.__file__))
138
+ if repr_list:
139
+ return "<ApiModule {!r} {}>".format(self.__name__, " ".join(repr_list))
140
+ return "<ApiModule {!r}>".format(self.__name__)
141
+
142
+ def __makeattr(self, name):
143
+ """lazily compute value for name or raise AttributeError if unknown."""
144
+ # print "makeattr", self.__name__, name
145
+ target = None
146
+ if "__onfirstaccess__" in self.__map__:
147
+ target = self.__map__.pop("__onfirstaccess__")
148
+ importobj(*target)()
149
+ try:
150
+ modpath, attrname = self.__map__[name]
151
+ except KeyError:
152
+ if target is not None and name != "__onfirstaccess__":
153
+ # retry, onfirstaccess might have set attrs
154
+ return getattr(self, name)
155
+ raise AttributeError(name)
156
+ else:
157
+ result = importobj(modpath, attrname)
158
+ setattr(self, name, result)
159
+ try:
160
+ del self.__map__[name]
161
+ except KeyError:
162
+ pass # in a recursive-import situation a double-del can happen
163
+ return result
164
+
165
+ __getattr__ = __makeattr
166
+
167
+ @property
168
+ def __dict__(self):
169
+ # force all the content of the module
170
+ # to be loaded when __dict__ is read
171
+ dictdescr = ModuleType.__dict__["__dict__"]
172
+ dict = dictdescr.__get__(self)
173
+ if dict is not None:
174
+ hasattr(self, "some")
175
+ for name in self.__all__:
176
+ try:
177
+ self.__makeattr(name)
178
+ except AttributeError:
179
+ pass
180
+ return dict
181
+
182
+
183
+ def AliasModule(modname, modpath, attrname=None):
184
+ mod = []
185
+
186
+ def getmod():
187
+ if not mod:
188
+ x = importobj(modpath, None)
189
+ if attrname is not None:
190
+ x = getattr(x, attrname)
191
+ mod.append(x)
192
+ return mod[0]
193
+
194
+ x = modpath + ("." + attrname if attrname else "")
195
+ repr_result = "<AliasModule {!r} for {!r}>".format(modname, x)
196
+
197
+ class AliasModule(ModuleType):
198
+ def __repr__(self):
199
+ return repr_result
200
+
201
+ def __getattribute__(self, name):
202
+ try:
203
+ return getattr(getmod(), name)
204
+ except ImportError:
205
+ if modpath == "pytest" and attrname is None:
206
+ # hack for pylibs py.test
207
+ return None
208
+ else:
209
+ raise
210
+
211
+ def __setattr__(self, name, value):
212
+ setattr(getmod(), name, value)
213
+
214
+ def __delattr__(self, name):
215
+ delattr(getmod(), name)
216
+
217
+ return AliasModule(str(modname))
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (6.31 kB). View file
 
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/__pycache__/version.cpython-310.pyc ADDED
Binary file (232 Bytes). View file
 
vllm/lib/python3.10/site-packages/py/_vendored_packages/apipkg/version.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # coding: utf-8
2
+ # file generated by setuptools_scm
3
+ # don't change, don't track in version control
4
+ version = '2.0.0'
5
+ version_tuple = (2, 0, 0)
vllm/lib/python3.10/site-packages/py/_vendored_packages/iniconfig-1.1.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
vllm/lib/python3.10/site-packages/py/_vendored_packages/iniconfig-1.1.1.dist-info/RECORD ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ iniconfig-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ iniconfig-1.1.1.dist-info/LICENSE,sha256=KvaAw570k_uCgwNW0dPfGstaBgM8ui3sehniHKp3qGY,1061
3
+ iniconfig-1.1.1.dist-info/METADATA,sha256=_4-oFKpRXuZv5rzepScpXRwhq6DzqsgbnA5ZpgMUMcs,2405
4
+ iniconfig-1.1.1.dist-info/RECORD,,
5
+ iniconfig-1.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ iniconfig-1.1.1.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
7
+ iniconfig-1.1.1.dist-info/top_level.txt,sha256=7KfM0fugdlToj9UW7enKXk2HYALQD8qHiyKtjhSzgN8,10
8
+ iniconfig/__init__.py,sha256=-pBe5AF_6aAwo1CxJQ8i_zJq6ejc6IxHta7qk2tNJhY,5208
9
+ iniconfig/__init__.pyi,sha256=-4KOctzq28ohRmTZsqlH6aylyFqsNKxYqtk1dteypi4,1205
10
+ iniconfig/__pycache__/__init__.cpython-39.pyc,,
11
+ iniconfig/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0