arcticlatent commited on
Commit
256be03
·
verified ·
1 Parent(s): 2bbce8d

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +17 -0
  2. .venv/.gitignore +1 -0
  3. .venv/.lock +0 -0
  4. .venv/CACHEDIR.TAG +1 -0
  5. .venv/Lib/site-packages/__pycache__/_virtualenv.cpython-312.pyc +0 -0
  6. .venv/Lib/site-packages/__pycache__/typing_extensions.cpython-312.pyc +3 -0
  7. .venv/Lib/site-packages/_virtualenv.pth +3 -0
  8. .venv/Lib/site-packages/_virtualenv.py +101 -0
  9. .venv/Lib/site-packages/_yaml/__init__.py +33 -0
  10. .venv/Lib/site-packages/anyio-4.12.1.dist-info/INSTALLER +1 -0
  11. .venv/Lib/site-packages/anyio-4.12.1.dist-info/METADATA +96 -0
  12. .venv/Lib/site-packages/anyio-4.12.1.dist-info/RECORD +51 -0
  13. .venv/Lib/site-packages/anyio-4.12.1.dist-info/REQUESTED +0 -0
  14. .venv/Lib/site-packages/anyio-4.12.1.dist-info/WHEEL +5 -0
  15. .venv/Lib/site-packages/anyio-4.12.1.dist-info/entry_points.txt +2 -0
  16. .venv/Lib/site-packages/anyio-4.12.1.dist-info/licenses/LICENSE +20 -0
  17. .venv/Lib/site-packages/anyio-4.12.1.dist-info/top_level.txt +1 -0
  18. .venv/Lib/site-packages/anyio/__init__.py +111 -0
  19. .venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-312.pyc +0 -0
  20. .venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc +0 -0
  21. .venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc +0 -0
  22. .venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc +0 -0
  23. .venv/Lib/site-packages/anyio/_backends/__init__.py +0 -0
  24. .venv/Lib/site-packages/anyio/_backends/_asyncio.py +0 -0
  25. .venv/Lib/site-packages/anyio/_backends/_trio.py +1346 -0
  26. .venv/Lib/site-packages/anyio/_core/__init__.py +0 -0
  27. .venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc +0 -0
  28. .venv/Lib/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc +0 -0
  29. .venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc +0 -0
  30. .venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc +0 -0
  31. .venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc +0 -0
  32. .venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc +0 -0
  33. .venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc +0 -0
  34. .venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc +0 -0
  35. .venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc +0 -0
  36. .venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc +0 -0
  37. .venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc +0 -0
  38. .venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc +0 -0
  39. .venv/Lib/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc +0 -0
  40. .venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc +0 -0
  41. .venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc +0 -0
  42. .venv/Lib/site-packages/anyio/_core/_asyncio_selector_thread.py +167 -0
  43. .venv/Lib/site-packages/anyio/_core/_contextmanagers.py +200 -0
  44. .venv/Lib/site-packages/anyio/_core/_eventloop.py +234 -0
  45. .venv/Lib/site-packages/anyio/_core/_exceptions.py +156 -0
  46. .venv/Lib/site-packages/anyio/_core/_fileio.py +797 -0
  47. .venv/Lib/site-packages/anyio/_core/_resources.py +18 -0
  48. .venv/Lib/site-packages/anyio/_core/_signals.py +29 -0
  49. .venv/Lib/site-packages/anyio/_core/_sockets.py +1003 -0
  50. .venv/Lib/site-packages/anyio/_core/_streams.py +52 -0
.gitattributes CHANGED
@@ -33,3 +33,20 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ .venv/Lib/site-packages/__pycache__/typing_extensions.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
37
+ .venv/Lib/site-packages/click/__pycache__/core.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
38
+ .venv/Lib/site-packages/hf_xet/hf_xet.pyd filter=lfs diff=lfs merge=lfs -text
39
+ .venv/Lib/site-packages/huggingface_hub/__pycache__/hf_api.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
40
+ .venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
41
+ .venv/Lib/site-packages/yaml/_yaml.cp312-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
42
+ .venv/Scripts/python.exe filter=lfs diff=lfs merge=lfs -text
43
+ .venv/Scripts/pythonw.exe filter=lfs diff=lfs merge=lfs -text
44
+ FlashAttention/flash_attn-2.8.3+cu128torch2.7.0cxx11abiFALSE-cp312-cp312-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
45
+ FlashAttention/flash_attn-2.8.3+cu128torch2.8.0cxx11abiFALSE-cp312-cp312-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
46
+ FlashAttention/flash_attn-2.8.3+cu130torch2.9.1cxx11abiTRUE-cp312-cp312-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
47
+ SageAttention/sageattention-2.2.0+cu128torch2.7.1.post3-cp39-abi3-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
48
+ SageAttention/sageattention-2.2.0+cu128torch2.8.0.post3-cp39-abi3-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
49
+ SageAttention/sageattention-2.2.0+cu130torch2.9.0andhigher.post4-cp39-abi3-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
50
+ SageAttention3/sageattn3-1.0.0+cu128torch271-cp312-cp312-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
51
+ SageAttention3/sageattn3-1.0.0+cu128torch280-cp312-cp312-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
52
+ SageAttention3/sageattn3-1.0.0+cu130torch291-cp312-cp312-win_amd64.whl filter=lfs diff=lfs merge=lfs -text
.venv/.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ *
.venv/.lock ADDED
File without changes
.venv/CACHEDIR.TAG ADDED
@@ -0,0 +1 @@
 
 
1
+ Signature: 8a477f597d28d172789f06886806bc55
.venv/Lib/site-packages/__pycache__/_virtualenv.cpython-312.pyc ADDED
Binary file (4.1 kB). View file
 
.venv/Lib/site-packages/__pycache__/typing_extensions.cpython-312.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93c45dfd5dd226cb78f3273990262229c812510c28a8d6c2353b03b8aaee1e72
3
+ size 163631
.venv/Lib/site-packages/_virtualenv.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69ac3d8f27e679c81b94ab30b3b56e9cd138219b1ba94a1fa3606d5a76a1433d
3
+ size 18
.venv/Lib/site-packages/_virtualenv.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Patches that are applied at runtime to the virtual environment."""
2
+
3
+ import os
4
+ import sys
5
+
6
+ VIRTUALENV_PATCH_FILE = os.path.join(__file__)
7
+
8
+
9
+ def patch_dist(dist):
10
+ """
11
+ Distutils allows user to configure some arguments via a configuration file:
12
+ https://docs.python.org/3.11/install/index.html#distutils-configuration-files.
13
+
14
+ Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
15
+ """ # noqa: D205
16
+ # we cannot allow some install config as that would get packages installed outside of the virtual environment
17
+ old_parse_config_files = dist.Distribution.parse_config_files
18
+
19
+ def parse_config_files(self, *args, **kwargs):
20
+ result = old_parse_config_files(self, *args, **kwargs)
21
+ install = self.get_option_dict("install")
22
+
23
+ if "prefix" in install: # the prefix governs where to install the libraries
24
+ install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix)
25
+ for base in ("purelib", "platlib", "headers", "scripts", "data"):
26
+ key = f"install_{base}"
27
+ if key in install: # do not allow global configs to hijack venv paths
28
+ install.pop(key, None)
29
+ return result
30
+
31
+ dist.Distribution.parse_config_files = parse_config_files
32
+
33
+
34
+ # Import hook that patches some modules to ignore configuration values that break package installation in case
35
+ # of virtual environments.
36
+ _DISTUTILS_PATCH = "distutils.dist", "setuptools.dist"
37
+ # https://docs.python.org/3/library/importlib.html#setting-up-an-importer
38
+
39
+
40
+ class _Finder:
41
+ """A meta path finder that allows patching the imported distutils modules."""
42
+
43
+ fullname = None
44
+
45
+ # lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup,
46
+ # because there are gevent-based applications that need to be first to import threading by themselves.
47
+ # See https://github.com/pypa/virtualenv/issues/1895 for details.
48
+ lock = [] # noqa: RUF012
49
+
50
+ def find_spec(self, fullname, path, target=None): # noqa: ARG002
51
+ if fullname in _DISTUTILS_PATCH and self.fullname is None:
52
+ # initialize lock[0] lazily
53
+ if len(self.lock) == 0:
54
+ import threading
55
+
56
+ lock = threading.Lock()
57
+ # there is possibility that two threads T1 and T2 are simultaneously running into find_spec,
58
+ # observing .lock as empty, and further going into hereby initialization. However due to the GIL,
59
+ # list.append() operation is atomic and this way only one of the threads will "win" to put the lock
60
+ # - that every thread will use - into .lock[0].
61
+ # https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
62
+ self.lock.append(lock)
63
+
64
+ from functools import partial
65
+ from importlib.util import find_spec
66
+
67
+ with self.lock[0]:
68
+ self.fullname = fullname
69
+ try:
70
+ spec = find_spec(fullname, path)
71
+ if spec is not None:
72
+ # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work
73
+ is_new_api = hasattr(spec.loader, "exec_module")
74
+ func_name = "exec_module" if is_new_api else "load_module"
75
+ old = getattr(spec.loader, func_name)
76
+ func = self.exec_module if is_new_api else self.load_module
77
+ if old is not func:
78
+ try: # noqa: SIM105
79
+ setattr(spec.loader, func_name, partial(func, old))
80
+ except AttributeError:
81
+ pass # C-Extension loaders are r/o such as zipimporter with <3.7
82
+ return spec
83
+ finally:
84
+ self.fullname = None
85
+ return None
86
+
87
+ @staticmethod
88
+ def exec_module(old, module):
89
+ old(module)
90
+ if module.__name__ in _DISTUTILS_PATCH:
91
+ patch_dist(module)
92
+
93
+ @staticmethod
94
+ def load_module(old, name):
95
+ module = old(name)
96
+ if module.__name__ in _DISTUTILS_PATCH:
97
+ patch_dist(module)
98
+ return module
99
+
100
+
101
+ sys.meta_path.insert(0, _Finder())
.venv/Lib/site-packages/_yaml/__init__.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This is a stub package designed to roughly emulate the _yaml
2
+ # extension module, which previously existed as a standalone module
3
+ # and has been moved into the `yaml` package namespace.
4
+ # It does not perfectly mimic its old counterpart, but should get
5
+ # close enough for anyone who's relying on it even when they shouldn't.
6
+ import yaml
7
+
8
+ # in some circumstances, the yaml module we imoprted may be from a different version, so we need
9
+ # to tread carefully when poking at it here (it may not have the attributes we expect)
10
+ if not getattr(yaml, '__with_libyaml__', False):
11
+ from sys import version_info
12
+
13
+ exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
14
+ raise exc("No module named '_yaml'")
15
+ else:
16
+ from yaml._yaml import *
17
+ import warnings
18
+ warnings.warn(
19
+ 'The _yaml extension module is now located at yaml._yaml'
20
+ ' and its location is subject to change. To use the'
21
+ ' LibYAML-based parser and emitter, import from `yaml`:'
22
+ ' `from yaml import CLoader as Loader, CDumper as Dumper`.',
23
+ DeprecationWarning
24
+ )
25
+ del warnings
26
+ # Don't `del yaml` here because yaml is actually an existing
27
+ # namespace member of _yaml.
28
+
29
+ __name__ = '_yaml'
30
+ # If the module is top-level (i.e. not a part of any specific package)
31
+ # then the attribute should be set to ''.
32
+ # https://docs.python.org/3.8/library/types.html
33
+ __package__ = ''
.venv/Lib/site-packages/anyio-4.12.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ uv
.venv/Lib/site-packages/anyio-4.12.1.dist-info/METADATA ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.4
2
+ Name: anyio
3
+ Version: 4.12.1
4
+ Summary: High-level concurrency and networking framework on top of asyncio or Trio
5
+ Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
6
+ License-Expression: MIT
7
+ Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
8
+ Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
9
+ Project-URL: Source code, https://github.com/agronholm/anyio
10
+ Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
11
+ Classifier: Development Status :: 5 - Production/Stable
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: Framework :: AnyIO
14
+ Classifier: Typing :: Typed
15
+ Classifier: Programming Language :: Python
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
+ Classifier: Programming Language :: Python :: 3.14
23
+ Requires-Python: >=3.9
24
+ Description-Content-Type: text/x-rst
25
+ License-File: LICENSE
26
+ Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11"
27
+ Requires-Dist: idna>=2.8
28
+ Requires-Dist: typing_extensions>=4.5; python_version < "3.13"
29
+ Provides-Extra: trio
30
+ Requires-Dist: trio>=0.32.0; python_version >= "3.10" and extra == "trio"
31
+ Requires-Dist: trio>=0.31.0; python_version < "3.10" and extra == "trio"
32
+ Dynamic: license-file
33
+
34
+ .. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
35
+ :target: https://github.com/agronholm/anyio/actions/workflows/test.yml
36
+ :alt: Build Status
37
+ .. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
38
+ :target: https://coveralls.io/github/agronholm/anyio?branch=master
39
+ :alt: Code Coverage
40
+ .. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
41
+ :target: https://anyio.readthedocs.io/en/latest/?badge=latest
42
+ :alt: Documentation
43
+ .. image:: https://badges.gitter.im/gitterHQ/gitter.svg
44
+ :target: https://gitter.im/python-trio/AnyIO
45
+ :alt: Gitter chat
46
+
47
+ AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
48
+ Trio_. It implements Trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
49
+ with the native SC of Trio itself.
50
+
51
+ Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
52
+ Trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
53
+ refactoring necessary. It will blend in with the native libraries of your chosen backend.
54
+
55
+ To find out why you might want to use AnyIO's APIs instead of asyncio's, you can read about it
56
+ `here <https://anyio.readthedocs.io/en/stable/why.html>`_.
57
+
58
+ Documentation
59
+ -------------
60
+
61
+ View full documentation at: https://anyio.readthedocs.io/
62
+
63
+ Features
64
+ --------
65
+
66
+ AnyIO offers the following functionality:
67
+
68
+ * Task groups (nurseries_ in trio terminology)
69
+ * High-level networking (TCP, UDP and UNIX sockets)
70
+
71
+ * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
72
+ 3.8)
73
+ * async/await style UDP sockets (unlike asyncio where you still have to use Transports and
74
+ Protocols)
75
+
76
+ * A versatile API for byte streams and object streams
77
+ * Inter-task synchronization and communication (locks, conditions, events, semaphores, object
78
+ streams)
79
+ * Worker threads
80
+ * Subprocesses
81
+ * Subinterpreter support for code parallelization (on Python 3.13 and later)
82
+ * Asynchronous file I/O (using worker threads)
83
+ * Signal handling
84
+ * Asynchronous version of the functools_ module
85
+
86
+ AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
87
+ It even works with the popular Hypothesis_ library.
88
+
89
+ .. _asyncio: https://docs.python.org/3/library/asyncio.html
90
+ .. _Trio: https://github.com/python-trio/trio
91
+ .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
92
+ .. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
93
+ .. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
94
+ .. _pytest: https://docs.pytest.org/en/latest/
95
+ .. _functools: https://docs.python.org/3/library/functools.html
96
+ .. _Hypothesis: https://hypothesis.works/
.venv/Lib/site-packages/anyio-4.12.1.dist-info/RECORD ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ anyio-4.12.1.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
2
+ anyio-4.12.1.dist-info/METADATA,sha256=DfiDab9Tmmcfy802lOLTMEHJQShkOSbopCwqCYbLuJk,4277
3
+ anyio-4.12.1.dist-info/RECORD,,
4
+ anyio-4.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ anyio-4.12.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
6
+ anyio-4.12.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
7
+ anyio-4.12.1.dist-info/licenses/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
8
+ anyio-4.12.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
9
+ anyio/__init__.py,sha256=7iDVqMUprUuKNY91FuoKqayAhR-OY136YDPI6P78HHk,6170
10
+ anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ anyio/_backends/_asyncio.py,sha256=xG6qv60mgGnL0mK82dxjH2b8hlkMlJ-x2BqIq3qv70Y,98863
12
+ anyio/_backends/_trio.py,sha256=30Rctb7lm8g63ZHljVPVnj5aH-uK6oQvphjwUBoAzuI,41456
13
+ anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626
15
+ anyio/_core/_contextmanagers.py,sha256=YInBCabiEeS-UaP_Jdxa1CaFC71ETPW8HZTHIM8Rsc8,7215
16
+ anyio/_core/_eventloop.py,sha256=c2EdcBX-xnKwxPcC4Pjn3_qG9I-x4IWFO2R9RqCGjM4,6448
17
+ anyio/_core/_exceptions.py,sha256=Y3aq-Wxd7Q2HqwSg7nZPvRsHEuGazv_qeet6gqEBdPk,4407
18
+ anyio/_core/_fileio.py,sha256=uc7t10Vb-If7GbdWM_zFf-ajUe6uek63fSt7IBLlZW0,25731
19
+ anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
20
+ anyio/_core/_signals.py,sha256=mjTBB2hTKNPRlU0IhnijeQedpWOGERDiMjSlJQsFrug,1016
21
+ anyio/_core/_sockets.py,sha256=RBXHcUqZt5gg_-OOfgHVv8uq2FSKk1uVUzTdpjBoI1o,34977
22
+ anyio/_core/_streams.py,sha256=FczFwIgDpnkK0bODWJXMpsUJYdvAD04kaUaGzJU8DK0,1806
23
+ anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047
24
+ anyio/_core/_synchronization.py,sha256=MgVVqFzvt580tHC31LiOcq1G6aryut--xRG4Ff8KwxQ,20869
25
+ anyio/_core/_tasks.py,sha256=pVB7K6AAulzUM8YgXAeqNZG44nSyZ1bYJjH8GznC00I,5435
26
+ anyio/_core/_tempfile.py,sha256=lHb7CW4FyIlpkf5ADAf4VmLHCKwEHF9nxqNyBCFFUiA,19697
27
+ anyio/_core/_testing.py,sha256=u7MPqGXwpTxqI7hclSdNA30z2GH1Nw258uwKvy_RfBg,2340
28
+ anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
29
+ anyio/abc/__init__.py,sha256=6mWhcl_pGXhrgZVHP_TCfMvIXIOp9mroEFM90fYCU_U,2869
30
+ anyio/abc/_eventloop.py,sha256=GlzgB3UJGgG6Kr7olpjOZ-o00PghecXuofVDQ_5611Q,10749
31
+ anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
32
+ anyio/abc/_sockets.py,sha256=ECTY0jLEF18gryANHR3vFzXzGdZ-xPwELq1QdgOb0Jo,13258
33
+ anyio/abc/_streams.py,sha256=005GKSCXGprxnhucILboSqc2JFovECZk9m3p-qqxXVc,7640
34
+ anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
35
+ anyio/abc/_tasks.py,sha256=KC7wrciE48AINOI-AhPutnFhe1ewfP7QnamFlDzqesQ,3721
36
+ anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
37
+ anyio/from_thread.py,sha256=L-0w1HxJ6BSb-KuVi57k5Tkc3yzQrx3QK5tAxMPcY-0,19141
38
+ anyio/functools.py,sha256=HWj7GBEmc0Z-mZg3uok7Z7ZJn0rEC_0Pzbt0nYUDaTQ,10973
39
+ anyio/lowlevel.py,sha256=AyKLVK3LaWSoK39LkCKxE4_GDMLKZBNqTrLUgk63y80,5158
40
+ anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ anyio/pytest_plugin.py,sha256=3jAFQn0jv_pyoWE2GBBlHaj9sqXj4e8vob0_hgrsXE8,10244
42
+ anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
+ anyio/streams/buffered.py,sha256=2R3PeJhe4EXrdYqz44Y6-Eg9R6DrmlsYrP36Ir43-po,6263
44
+ anyio/streams/file.py,sha256=4WZ7XGz5WNu39FQHvqbe__TQ0HDP9OOhgO1mk9iVpVU,4470
45
+ anyio/streams/memory.py,sha256=F0zwzvFJKAhX_LRZGoKzzqDC2oMM-f-yyTBrEYEGOaU,10740
46
+ anyio/streams/stapled.py,sha256=T8Xqwf8K6EgURPxbt1N4i7A8BAk-gScv-GRhjLXIf_o,4390
47
+ anyio/streams/text.py,sha256=BcVAGJw1VRvtIqnv-o0Rb0pwH7p8vwlvl21xHq522ag,5765
48
+ anyio/streams/tls.py,sha256=Jpxy0Mfbcp1BxHCwE-YjSSFaLnIBbnnwur-excYThs4,15368
49
+ anyio/to_interpreter.py,sha256=_mLngrMy97TMR6VbW4Y6YzDUk9ZuPcQMPlkuyRh3C9k,7100
50
+ anyio/to_process.py,sha256=J7gAA_YOuoHqnpDAf5fm1Qu6kOmTzdFbiDNvnV755vk,9798
51
+ anyio/to_thread.py,sha256=menEgXYmUV7Fjg_9WqCV95P9MAtQS8BzPGGcWB_QnfQ,2687
.venv/Lib/site-packages/anyio-4.12.1.dist-info/REQUESTED ADDED
File without changes
.venv/Lib/site-packages/anyio-4.12.1.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
.venv/Lib/site-packages/anyio-4.12.1.dist-info/entry_points.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [pytest11]
2
+ anyio = anyio.pytest_plugin
.venv/Lib/site-packages/anyio-4.12.1.dist-info/licenses/LICENSE ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2018 Alex Grönholm
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
6
+ this software and associated documentation files (the "Software"), to deal in
7
+ the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9
+ the Software, and to permit persons to whom the Software is furnished to do so,
10
+ subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
.venv/Lib/site-packages/anyio-4.12.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ anyio
.venv/Lib/site-packages/anyio/__init__.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin
4
+ from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin
5
+ from ._core._eventloop import current_time as current_time
6
+ from ._core._eventloop import get_all_backends as get_all_backends
7
+ from ._core._eventloop import get_available_backends as get_available_backends
8
+ from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
9
+ from ._core._eventloop import run as run
10
+ from ._core._eventloop import sleep as sleep
11
+ from ._core._eventloop import sleep_forever as sleep_forever
12
+ from ._core._eventloop import sleep_until as sleep_until
13
+ from ._core._exceptions import BrokenResourceError as BrokenResourceError
14
+ from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter
15
+ from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
16
+ from ._core._exceptions import BusyResourceError as BusyResourceError
17
+ from ._core._exceptions import ClosedResourceError as ClosedResourceError
18
+ from ._core._exceptions import ConnectionFailed as ConnectionFailed
19
+ from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
20
+ from ._core._exceptions import EndOfStream as EndOfStream
21
+ from ._core._exceptions import IncompleteRead as IncompleteRead
22
+ from ._core._exceptions import NoEventLoopError as NoEventLoopError
23
+ from ._core._exceptions import RunFinishedError as RunFinishedError
24
+ from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
25
+ from ._core._exceptions import WouldBlock as WouldBlock
26
+ from ._core._fileio import AsyncFile as AsyncFile
27
+ from ._core._fileio import Path as Path
28
+ from ._core._fileio import open_file as open_file
29
+ from ._core._fileio import wrap_file as wrap_file
30
+ from ._core._resources import aclose_forcefully as aclose_forcefully
31
+ from ._core._signals import open_signal_receiver as open_signal_receiver
32
+ from ._core._sockets import TCPConnectable as TCPConnectable
33
+ from ._core._sockets import UNIXConnectable as UNIXConnectable
34
+ from ._core._sockets import as_connectable as as_connectable
35
+ from ._core._sockets import connect_tcp as connect_tcp
36
+ from ._core._sockets import connect_unix as connect_unix
37
+ from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
38
+ from ._core._sockets import (
39
+ create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
40
+ )
41
+ from ._core._sockets import create_tcp_listener as create_tcp_listener
42
+ from ._core._sockets import create_udp_socket as create_udp_socket
43
+ from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
44
+ from ._core._sockets import create_unix_listener as create_unix_listener
45
+ from ._core._sockets import getaddrinfo as getaddrinfo
46
+ from ._core._sockets import getnameinfo as getnameinfo
47
+ from ._core._sockets import notify_closing as notify_closing
48
+ from ._core._sockets import wait_readable as wait_readable
49
+ from ._core._sockets import wait_socket_readable as wait_socket_readable
50
+ from ._core._sockets import wait_socket_writable as wait_socket_writable
51
+ from ._core._sockets import wait_writable as wait_writable
52
+ from ._core._streams import create_memory_object_stream as create_memory_object_stream
53
+ from ._core._subprocesses import open_process as open_process
54
+ from ._core._subprocesses import run_process as run_process
55
+ from ._core._synchronization import CapacityLimiter as CapacityLimiter
56
+ from ._core._synchronization import (
57
+ CapacityLimiterStatistics as CapacityLimiterStatistics,
58
+ )
59
+ from ._core._synchronization import Condition as Condition
60
+ from ._core._synchronization import ConditionStatistics as ConditionStatistics
61
+ from ._core._synchronization import Event as Event
62
+ from ._core._synchronization import EventStatistics as EventStatistics
63
+ from ._core._synchronization import Lock as Lock
64
+ from ._core._synchronization import LockStatistics as LockStatistics
65
+ from ._core._synchronization import ResourceGuard as ResourceGuard
66
+ from ._core._synchronization import Semaphore as Semaphore
67
+ from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
68
+ from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
69
+ from ._core._tasks import CancelScope as CancelScope
70
+ from ._core._tasks import create_task_group as create_task_group
71
+ from ._core._tasks import current_effective_deadline as current_effective_deadline
72
+ from ._core._tasks import fail_after as fail_after
73
+ from ._core._tasks import move_on_after as move_on_after
74
+ from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
75
+ from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
76
+ from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
77
+ from ._core._tempfile import TemporaryFile as TemporaryFile
78
+ from ._core._tempfile import gettempdir as gettempdir
79
+ from ._core._tempfile import gettempdirb as gettempdirb
80
+ from ._core._tempfile import mkdtemp as mkdtemp
81
+ from ._core._tempfile import mkstemp as mkstemp
82
+ from ._core._testing import TaskInfo as TaskInfo
83
+ from ._core._testing import get_current_task as get_current_task
84
+ from ._core._testing import get_running_tasks as get_running_tasks
85
+ from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
86
+ from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
87
+ from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
88
+ from ._core._typedattr import typed_attribute as typed_attribute
89
+
90
+ # Re-export imports so they look like they live directly in this package
91
+ for __value in list(locals().values()):
92
+ if getattr(__value, "__module__", "").startswith("anyio."):
93
+ __value.__module__ = __name__
94
+
95
+
96
+ del __value
97
+
98
+
99
+ def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]:
100
+ """Support deprecated aliases."""
101
+ if attr == "BrokenWorkerIntepreter":
102
+ import warnings
103
+
104
+ warnings.warn(
105
+ "The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.",
106
+ DeprecationWarning,
107
+ stacklevel=2,
108
+ )
109
+ return BrokenWorkerInterpreter
110
+
111
+ raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")
.venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (4.61 kB). View file
 
.venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc ADDED
Binary file (25.8 kB). View file
 
.venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc ADDED
Binary file (7.99 kB). View file
 
.venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc ADDED
Binary file (3.19 kB). View file
 
.venv/Lib/site-packages/anyio/_backends/__init__.py ADDED
File without changes
.venv/Lib/site-packages/anyio/_backends/_asyncio.py ADDED
The diff for this file is too large to render. See raw diff
 
.venv/Lib/site-packages/anyio/_backends/_trio.py ADDED
@@ -0,0 +1,1346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import array
4
+ import math
5
+ import os
6
+ import socket
7
+ import sys
8
+ import types
9
+ import weakref
10
+ from collections.abc import (
11
+ AsyncGenerator,
12
+ AsyncIterator,
13
+ Awaitable,
14
+ Callable,
15
+ Collection,
16
+ Coroutine,
17
+ Iterable,
18
+ Sequence,
19
+ )
20
+ from contextlib import AbstractContextManager
21
+ from dataclasses import dataclass
22
+ from io import IOBase
23
+ from os import PathLike
24
+ from signal import Signals
25
+ from socket import AddressFamily, SocketKind
26
+ from types import TracebackType
27
+ from typing import (
28
+ IO,
29
+ TYPE_CHECKING,
30
+ Any,
31
+ Generic,
32
+ NoReturn,
33
+ TypeVar,
34
+ cast,
35
+ overload,
36
+ )
37
+
38
+ import trio.from_thread
39
+ import trio.lowlevel
40
+ from outcome import Error, Outcome, Value
41
+ from trio.lowlevel import (
42
+ current_root_task,
43
+ current_task,
44
+ notify_closing,
45
+ wait_readable,
46
+ wait_writable,
47
+ )
48
+ from trio.socket import SocketType as TrioSocketType
49
+ from trio.to_thread import run_sync
50
+
51
+ from .. import (
52
+ CapacityLimiterStatistics,
53
+ EventStatistics,
54
+ LockStatistics,
55
+ RunFinishedError,
56
+ TaskInfo,
57
+ WouldBlock,
58
+ abc,
59
+ )
60
+ from .._core._eventloop import claim_worker_thread
61
+ from .._core._exceptions import (
62
+ BrokenResourceError,
63
+ BusyResourceError,
64
+ ClosedResourceError,
65
+ EndOfStream,
66
+ )
67
+ from .._core._sockets import convert_ipv6_sockaddr
68
+ from .._core._streams import create_memory_object_stream
69
+ from .._core._synchronization import (
70
+ CapacityLimiter as BaseCapacityLimiter,
71
+ )
72
+ from .._core._synchronization import Event as BaseEvent
73
+ from .._core._synchronization import Lock as BaseLock
74
+ from .._core._synchronization import (
75
+ ResourceGuard,
76
+ SemaphoreStatistics,
77
+ )
78
+ from .._core._synchronization import Semaphore as BaseSemaphore
79
+ from .._core._tasks import CancelScope as BaseCancelScope
80
+ from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType
81
+ from ..abc._eventloop import AsyncBackend, StrOrBytesPath
82
+ from ..streams.memory import MemoryObjectSendStream
83
+
84
+ if TYPE_CHECKING:
85
+ from _typeshed import FileDescriptorLike
86
+
87
+ if sys.version_info >= (3, 10):
88
+ from typing import ParamSpec
89
+ else:
90
+ from typing_extensions import ParamSpec
91
+
92
+ if sys.version_info >= (3, 11):
93
+ from typing import TypeVarTuple, Unpack
94
+ else:
95
+ from exceptiongroup import BaseExceptionGroup
96
+ from typing_extensions import TypeVarTuple, Unpack
97
+
98
+ T = TypeVar("T")
99
+ T_Retval = TypeVar("T_Retval")
100
+ T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType)
101
+ PosArgsT = TypeVarTuple("PosArgsT")
102
+ P = ParamSpec("P")
103
+
104
+
105
+ #
106
+ # Event loop
107
+ #
108
+
109
+ RunVar = trio.lowlevel.RunVar
110
+
111
+
112
+ #
113
+ # Timeouts and cancellation
114
+ #
115
+
116
+
117
+ class CancelScope(BaseCancelScope):
118
+ def __new__(
119
+ cls, original: trio.CancelScope | None = None, **kwargs: object
120
+ ) -> CancelScope:
121
+ return object.__new__(cls)
122
+
123
+ def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None:
124
+ self.__original = original or trio.CancelScope(**kwargs)
125
+
126
+ def __enter__(self) -> CancelScope:
127
+ self.__original.__enter__()
128
+ return self
129
+
130
+ def __exit__(
131
+ self,
132
+ exc_type: type[BaseException] | None,
133
+ exc_val: BaseException | None,
134
+ exc_tb: TracebackType | None,
135
+ ) -> bool:
136
+ return self.__original.__exit__(exc_type, exc_val, exc_tb)
137
+
138
+ def cancel(self, reason: str | None = None) -> None:
139
+ self.__original.cancel(reason)
140
+
141
+ @property
142
+ def deadline(self) -> float:
143
+ return self.__original.deadline
144
+
145
+ @deadline.setter
146
+ def deadline(self, value: float) -> None:
147
+ self.__original.deadline = value
148
+
149
+ @property
150
+ def cancel_called(self) -> bool:
151
+ return self.__original.cancel_called
152
+
153
+ @property
154
+ def cancelled_caught(self) -> bool:
155
+ return self.__original.cancelled_caught
156
+
157
+ @property
158
+ def shield(self) -> bool:
159
+ return self.__original.shield
160
+
161
+ @shield.setter
162
+ def shield(self, value: bool) -> None:
163
+ self.__original.shield = value
164
+
165
+
166
+ #
167
+ # Task groups
168
+ #
169
+
170
+
171
+ class TaskGroup(abc.TaskGroup):
172
+ def __init__(self) -> None:
173
+ self._active = False
174
+ self._nursery_manager = trio.open_nursery(strict_exception_groups=True)
175
+ self.cancel_scope = None # type: ignore[assignment]
176
+
177
+ async def __aenter__(self) -> TaskGroup:
178
+ self._active = True
179
+ self._nursery = await self._nursery_manager.__aenter__()
180
+ self.cancel_scope = CancelScope(self._nursery.cancel_scope)
181
+ return self
182
+
183
+ async def __aexit__(
184
+ self,
185
+ exc_type: type[BaseException] | None,
186
+ exc_val: BaseException | None,
187
+ exc_tb: TracebackType | None,
188
+ ) -> bool:
189
+ try:
190
+ # trio.Nursery.__exit__ returns bool; .open_nursery has wrong type
191
+ return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value]
192
+ except BaseExceptionGroup as exc:
193
+ if not exc.split(trio.Cancelled)[1]:
194
+ raise trio.Cancelled._create() from exc
195
+
196
+ raise
197
+ finally:
198
+ del exc_val, exc_tb
199
+ self._active = False
200
+
201
+ def start_soon(
202
+ self,
203
+ func: Callable[[Unpack[PosArgsT]], Awaitable[Any]],
204
+ *args: Unpack[PosArgsT],
205
+ name: object = None,
206
+ ) -> None:
207
+ if not self._active:
208
+ raise RuntimeError(
209
+ "This task group is not active; no new tasks can be started."
210
+ )
211
+
212
+ self._nursery.start_soon(func, *args, name=name)
213
+
214
+ async def start(
215
+ self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
216
+ ) -> Any:
217
+ if not self._active:
218
+ raise RuntimeError(
219
+ "This task group is not active; no new tasks can be started."
220
+ )
221
+
222
+ return await self._nursery.start(func, *args, name=name)
223
+
224
+
225
+ #
226
+ # Subprocesses
227
+ #
228
+
229
+
230
+ @dataclass(eq=False)
231
+ class ReceiveStreamWrapper(abc.ByteReceiveStream):
232
+ _stream: trio.abc.ReceiveStream
233
+
234
+ async def receive(self, max_bytes: int | None = None) -> bytes:
235
+ try:
236
+ data = await self._stream.receive_some(max_bytes)
237
+ except trio.ClosedResourceError as exc:
238
+ raise ClosedResourceError from exc.__cause__
239
+ except trio.BrokenResourceError as exc:
240
+ raise BrokenResourceError from exc.__cause__
241
+
242
+ if data:
243
+ return bytes(data)
244
+ else:
245
+ raise EndOfStream
246
+
247
+ async def aclose(self) -> None:
248
+ await self._stream.aclose()
249
+
250
+
251
+ @dataclass(eq=False)
252
+ class SendStreamWrapper(abc.ByteSendStream):
253
+ _stream: trio.abc.SendStream
254
+
255
+ async def send(self, item: bytes) -> None:
256
+ try:
257
+ await self._stream.send_all(item)
258
+ except trio.ClosedResourceError as exc:
259
+ raise ClosedResourceError from exc.__cause__
260
+ except trio.BrokenResourceError as exc:
261
+ raise BrokenResourceError from exc.__cause__
262
+
263
+ async def aclose(self) -> None:
264
+ await self._stream.aclose()
265
+
266
+
267
+ @dataclass(eq=False)
268
+ class Process(abc.Process):
269
+ _process: trio.Process
270
+ _stdin: abc.ByteSendStream | None
271
+ _stdout: abc.ByteReceiveStream | None
272
+ _stderr: abc.ByteReceiveStream | None
273
+
274
+ async def aclose(self) -> None:
275
+ with CancelScope(shield=True):
276
+ if self._stdin:
277
+ await self._stdin.aclose()
278
+ if self._stdout:
279
+ await self._stdout.aclose()
280
+ if self._stderr:
281
+ await self._stderr.aclose()
282
+
283
+ try:
284
+ await self.wait()
285
+ except BaseException:
286
+ self.kill()
287
+ with CancelScope(shield=True):
288
+ await self.wait()
289
+ raise
290
+
291
+ async def wait(self) -> int:
292
+ return await self._process.wait()
293
+
294
+ def terminate(self) -> None:
295
+ self._process.terminate()
296
+
297
+ def kill(self) -> None:
298
+ self._process.kill()
299
+
300
+ def send_signal(self, signal: Signals) -> None:
301
+ self._process.send_signal(signal)
302
+
303
+ @property
304
+ def pid(self) -> int:
305
+ return self._process.pid
306
+
307
+ @property
308
+ def returncode(self) -> int | None:
309
+ return self._process.returncode
310
+
311
+ @property
312
+ def stdin(self) -> abc.ByteSendStream | None:
313
+ return self._stdin
314
+
315
+ @property
316
+ def stdout(self) -> abc.ByteReceiveStream | None:
317
+ return self._stdout
318
+
319
+ @property
320
+ def stderr(self) -> abc.ByteReceiveStream | None:
321
+ return self._stderr
322
+
323
+
324
+ class _ProcessPoolShutdownInstrument(trio.abc.Instrument):
325
+ def after_run(self) -> None:
326
+ super().after_run()
327
+
328
+
329
+ current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar(
330
+ "current_default_worker_process_limiter"
331
+ )
332
+
333
+
334
+ async def _shutdown_process_pool(workers: set[abc.Process]) -> None:
335
+ try:
336
+ await trio.sleep(math.inf)
337
+ except trio.Cancelled:
338
+ for process in workers:
339
+ if process.returncode is None:
340
+ process.kill()
341
+
342
+ with CancelScope(shield=True):
343
+ for process in workers:
344
+ await process.aclose()
345
+
346
+
347
+ #
348
+ # Sockets and networking
349
+ #
350
+
351
+
352
+ class _TrioSocketMixin(Generic[T_SockAddr]):
353
+ def __init__(self, trio_socket: TrioSocketType) -> None:
354
+ self._trio_socket = trio_socket
355
+ self._closed = False
356
+
357
+ def _check_closed(self) -> None:
358
+ if self._closed:
359
+ raise ClosedResourceError
360
+ if self._trio_socket.fileno() < 0:
361
+ raise BrokenResourceError
362
+
363
+ @property
364
+ def _raw_socket(self) -> socket.socket:
365
+ return self._trio_socket._sock # type: ignore[attr-defined]
366
+
367
+ async def aclose(self) -> None:
368
+ if self._trio_socket.fileno() >= 0:
369
+ self._closed = True
370
+ self._trio_socket.close()
371
+
372
+ def _convert_socket_error(self, exc: BaseException) -> NoReturn:
373
+ if isinstance(exc, trio.ClosedResourceError):
374
+ raise ClosedResourceError from exc
375
+ elif self._trio_socket.fileno() < 0 and self._closed:
376
+ raise ClosedResourceError from None
377
+ elif isinstance(exc, OSError):
378
+ raise BrokenResourceError from exc
379
+ else:
380
+ raise exc
381
+
382
+
383
+ class SocketStream(_TrioSocketMixin, abc.SocketStream):
384
+ def __init__(self, trio_socket: TrioSocketType) -> None:
385
+ super().__init__(trio_socket)
386
+ self._receive_guard = ResourceGuard("reading from")
387
+ self._send_guard = ResourceGuard("writing to")
388
+
389
+ async def receive(self, max_bytes: int = 65536) -> bytes:
390
+ with self._receive_guard:
391
+ try:
392
+ data = await self._trio_socket.recv(max_bytes)
393
+ except BaseException as exc:
394
+ self._convert_socket_error(exc)
395
+
396
+ if data:
397
+ return data
398
+ else:
399
+ raise EndOfStream
400
+
401
+ async def send(self, item: bytes) -> None:
402
+ with self._send_guard:
403
+ view = memoryview(item)
404
+ while view:
405
+ try:
406
+ bytes_sent = await self._trio_socket.send(view)
407
+ except BaseException as exc:
408
+ self._convert_socket_error(exc)
409
+
410
+ view = view[bytes_sent:]
411
+
412
+ async def send_eof(self) -> None:
413
+ self._trio_socket.shutdown(socket.SHUT_WR)
414
+
415
+
416
+ class UNIXSocketStream(SocketStream, abc.UNIXSocketStream):
417
+ async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
418
+ if not isinstance(msglen, int) or msglen < 0:
419
+ raise ValueError("msglen must be a non-negative integer")
420
+ if not isinstance(maxfds, int) or maxfds < 1:
421
+ raise ValueError("maxfds must be a positive integer")
422
+
423
+ fds = array.array("i")
424
+ await trio.lowlevel.checkpoint()
425
+ with self._receive_guard:
426
+ while True:
427
+ try:
428
+ message, ancdata, flags, addr = await self._trio_socket.recvmsg(
429
+ msglen, socket.CMSG_LEN(maxfds * fds.itemsize)
430
+ )
431
+ except BaseException as exc:
432
+ self._convert_socket_error(exc)
433
+ else:
434
+ if not message and not ancdata:
435
+ raise EndOfStream
436
+
437
+ break
438
+
439
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
440
+ if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
441
+ raise RuntimeError(
442
+ f"Received unexpected ancillary data; message = {message!r}, "
443
+ f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}"
444
+ )
445
+
446
+ fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
447
+
448
+ return message, list(fds)
449
+
450
+ async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
451
+ if not message:
452
+ raise ValueError("message must not be empty")
453
+ if not fds:
454
+ raise ValueError("fds must not be empty")
455
+
456
+ filenos: list[int] = []
457
+ for fd in fds:
458
+ if isinstance(fd, int):
459
+ filenos.append(fd)
460
+ elif isinstance(fd, IOBase):
461
+ filenos.append(fd.fileno())
462
+
463
+ fdarray = array.array("i", filenos)
464
+ await trio.lowlevel.checkpoint()
465
+ with self._send_guard:
466
+ while True:
467
+ try:
468
+ await self._trio_socket.sendmsg(
469
+ [message],
470
+ [
471
+ (
472
+ socket.SOL_SOCKET,
473
+ socket.SCM_RIGHTS,
474
+ fdarray,
475
+ )
476
+ ],
477
+ )
478
+ break
479
+ except BaseException as exc:
480
+ self._convert_socket_error(exc)
481
+
482
+
483
+ class TCPSocketListener(_TrioSocketMixin, abc.SocketListener):
484
+ def __init__(self, raw_socket: socket.socket):
485
+ super().__init__(trio.socket.from_stdlib_socket(raw_socket))
486
+ self._accept_guard = ResourceGuard("accepting connections from")
487
+
488
+ async def accept(self) -> SocketStream:
489
+ with self._accept_guard:
490
+ try:
491
+ trio_socket, _addr = await self._trio_socket.accept()
492
+ except BaseException as exc:
493
+ self._convert_socket_error(exc)
494
+
495
+ trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
496
+ return SocketStream(trio_socket)
497
+
498
+
499
+ class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener):
500
+ def __init__(self, raw_socket: socket.socket):
501
+ super().__init__(trio.socket.from_stdlib_socket(raw_socket))
502
+ self._accept_guard = ResourceGuard("accepting connections from")
503
+
504
+ async def accept(self) -> UNIXSocketStream:
505
+ with self._accept_guard:
506
+ try:
507
+ trio_socket, _addr = await self._trio_socket.accept()
508
+ except BaseException as exc:
509
+ self._convert_socket_error(exc)
510
+
511
+ return UNIXSocketStream(trio_socket)
512
+
513
+
514
+ class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket):
515
+ def __init__(self, trio_socket: TrioSocketType) -> None:
516
+ super().__init__(trio_socket)
517
+ self._receive_guard = ResourceGuard("reading from")
518
+ self._send_guard = ResourceGuard("writing to")
519
+
520
+ async def receive(self) -> tuple[bytes, IPSockAddrType]:
521
+ with self._receive_guard:
522
+ try:
523
+ data, addr = await self._trio_socket.recvfrom(65536)
524
+ return data, convert_ipv6_sockaddr(addr)
525
+ except BaseException as exc:
526
+ self._convert_socket_error(exc)
527
+
528
+ async def send(self, item: UDPPacketType) -> None:
529
+ with self._send_guard:
530
+ try:
531
+ await self._trio_socket.sendto(*item)
532
+ except BaseException as exc:
533
+ self._convert_socket_error(exc)
534
+
535
+
536
+ class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket):
537
+ def __init__(self, trio_socket: TrioSocketType) -> None:
538
+ super().__init__(trio_socket)
539
+ self._receive_guard = ResourceGuard("reading from")
540
+ self._send_guard = ResourceGuard("writing to")
541
+
542
+ async def receive(self) -> bytes:
543
+ with self._receive_guard:
544
+ try:
545
+ return await self._trio_socket.recv(65536)
546
+ except BaseException as exc:
547
+ self._convert_socket_error(exc)
548
+
549
+ async def send(self, item: bytes) -> None:
550
+ with self._send_guard:
551
+ try:
552
+ await self._trio_socket.send(item)
553
+ except BaseException as exc:
554
+ self._convert_socket_error(exc)
555
+
556
+
557
+ class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket):
558
+ def __init__(self, trio_socket: TrioSocketType) -> None:
559
+ super().__init__(trio_socket)
560
+ self._receive_guard = ResourceGuard("reading from")
561
+ self._send_guard = ResourceGuard("writing to")
562
+
563
+ async def receive(self) -> UNIXDatagramPacketType:
564
+ with self._receive_guard:
565
+ try:
566
+ data, addr = await self._trio_socket.recvfrom(65536)
567
+ return data, addr
568
+ except BaseException as exc:
569
+ self._convert_socket_error(exc)
570
+
571
+ async def send(self, item: UNIXDatagramPacketType) -> None:
572
+ with self._send_guard:
573
+ try:
574
+ await self._trio_socket.sendto(*item)
575
+ except BaseException as exc:
576
+ self._convert_socket_error(exc)
577
+
578
+
579
+ class ConnectedUNIXDatagramSocket(
580
+ _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket
581
+ ):
582
+ def __init__(self, trio_socket: TrioSocketType) -> None:
583
+ super().__init__(trio_socket)
584
+ self._receive_guard = ResourceGuard("reading from")
585
+ self._send_guard = ResourceGuard("writing to")
586
+
587
+ async def receive(self) -> bytes:
588
+ with self._receive_guard:
589
+ try:
590
+ return await self._trio_socket.recv(65536)
591
+ except BaseException as exc:
592
+ self._convert_socket_error(exc)
593
+
594
+ async def send(self, item: bytes) -> None:
595
+ with self._send_guard:
596
+ try:
597
+ await self._trio_socket.send(item)
598
+ except BaseException as exc:
599
+ self._convert_socket_error(exc)
600
+
601
+
602
+ #
603
+ # Synchronization
604
+ #
605
+
606
+
607
+ class Event(BaseEvent):
608
+ def __new__(cls) -> Event:
609
+ return object.__new__(cls)
610
+
611
+ def __init__(self) -> None:
612
+ self.__original = trio.Event()
613
+
614
+ def is_set(self) -> bool:
615
+ return self.__original.is_set()
616
+
617
+ async def wait(self) -> None:
618
+ return await self.__original.wait()
619
+
620
+ def statistics(self) -> EventStatistics:
621
+ orig_statistics = self.__original.statistics()
622
+ return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting)
623
+
624
+ def set(self) -> None:
625
+ self.__original.set()
626
+
627
+
628
+ class Lock(BaseLock):
629
+ def __new__(cls, *, fast_acquire: bool = False) -> Lock:
630
+ return object.__new__(cls)
631
+
632
+ def __init__(self, *, fast_acquire: bool = False) -> None:
633
+ self._fast_acquire = fast_acquire
634
+ self.__original = trio.Lock()
635
+
636
+ @staticmethod
637
+ def _convert_runtime_error_msg(exc: RuntimeError) -> None:
638
+ if exc.args == ("attempt to re-acquire an already held Lock",):
639
+ exc.args = ("Attempted to acquire an already held Lock",)
640
+
641
+ async def acquire(self) -> None:
642
+ if not self._fast_acquire:
643
+ try:
644
+ await self.__original.acquire()
645
+ except RuntimeError as exc:
646
+ self._convert_runtime_error_msg(exc)
647
+ raise
648
+
649
+ return
650
+
651
+ # This is the "fast path" where we don't let other tasks run
652
+ await trio.lowlevel.checkpoint_if_cancelled()
653
+ try:
654
+ self.__original.acquire_nowait()
655
+ except trio.WouldBlock:
656
+ await self.__original._lot.park()
657
+ except RuntimeError as exc:
658
+ self._convert_runtime_error_msg(exc)
659
+ raise
660
+
661
+ def acquire_nowait(self) -> None:
662
+ try:
663
+ self.__original.acquire_nowait()
664
+ except trio.WouldBlock:
665
+ raise WouldBlock from None
666
+ except RuntimeError as exc:
667
+ self._convert_runtime_error_msg(exc)
668
+ raise
669
+
670
+ def locked(self) -> bool:
671
+ return self.__original.locked()
672
+
673
+ def release(self) -> None:
674
+ self.__original.release()
675
+
676
+ def statistics(self) -> LockStatistics:
677
+ orig_statistics = self.__original.statistics()
678
+ owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None
679
+ return LockStatistics(
680
+ orig_statistics.locked, owner, orig_statistics.tasks_waiting
681
+ )
682
+
683
+
684
+ class Semaphore(BaseSemaphore):
685
+ def __new__(
686
+ cls,
687
+ initial_value: int,
688
+ *,
689
+ max_value: int | None = None,
690
+ fast_acquire: bool = False,
691
+ ) -> Semaphore:
692
+ return object.__new__(cls)
693
+
694
+ def __init__(
695
+ self,
696
+ initial_value: int,
697
+ *,
698
+ max_value: int | None = None,
699
+ fast_acquire: bool = False,
700
+ ) -> None:
701
+ super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire)
702
+ self.__original = trio.Semaphore(initial_value, max_value=max_value)
703
+
704
+ async def acquire(self) -> None:
705
+ if not self._fast_acquire:
706
+ await self.__original.acquire()
707
+ return
708
+
709
+ # This is the "fast path" where we don't let other tasks run
710
+ await trio.lowlevel.checkpoint_if_cancelled()
711
+ try:
712
+ self.__original.acquire_nowait()
713
+ except trio.WouldBlock:
714
+ await self.__original._lot.park()
715
+
716
+ def acquire_nowait(self) -> None:
717
+ try:
718
+ self.__original.acquire_nowait()
719
+ except trio.WouldBlock:
720
+ raise WouldBlock from None
721
+
722
+ @property
723
+ def max_value(self) -> int | None:
724
+ return self.__original.max_value
725
+
726
+ @property
727
+ def value(self) -> int:
728
+ return self.__original.value
729
+
730
+ def release(self) -> None:
731
+ self.__original.release()
732
+
733
+ def statistics(self) -> SemaphoreStatistics:
734
+ orig_statistics = self.__original.statistics()
735
+ return SemaphoreStatistics(orig_statistics.tasks_waiting)
736
+
737
+
738
+ class CapacityLimiter(BaseCapacityLimiter):
739
+ def __new__(
740
+ cls,
741
+ total_tokens: float | None = None,
742
+ *,
743
+ original: trio.CapacityLimiter | None = None,
744
+ ) -> CapacityLimiter:
745
+ return object.__new__(cls)
746
+
747
+ def __init__(
748
+ self,
749
+ total_tokens: float | None = None,
750
+ *,
751
+ original: trio.CapacityLimiter | None = None,
752
+ ) -> None:
753
+ if original is not None:
754
+ self.__original = original
755
+ else:
756
+ assert total_tokens is not None
757
+ self.__original = trio.CapacityLimiter(total_tokens)
758
+
759
+ async def __aenter__(self) -> None:
760
+ return await self.__original.__aenter__()
761
+
762
+ async def __aexit__(
763
+ self,
764
+ exc_type: type[BaseException] | None,
765
+ exc_val: BaseException | None,
766
+ exc_tb: TracebackType | None,
767
+ ) -> None:
768
+ await self.__original.__aexit__(exc_type, exc_val, exc_tb)
769
+
770
+ @property
771
+ def total_tokens(self) -> float:
772
+ return self.__original.total_tokens
773
+
774
+ @total_tokens.setter
775
+ def total_tokens(self, value: float) -> None:
776
+ self.__original.total_tokens = value
777
+
778
+ @property
779
+ def borrowed_tokens(self) -> int:
780
+ return self.__original.borrowed_tokens
781
+
782
+ @property
783
+ def available_tokens(self) -> float:
784
+ return self.__original.available_tokens
785
+
786
+ def acquire_nowait(self) -> None:
787
+ self.__original.acquire_nowait()
788
+
789
+ def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
790
+ self.__original.acquire_on_behalf_of_nowait(borrower)
791
+
792
+ async def acquire(self) -> None:
793
+ await self.__original.acquire()
794
+
795
+ async def acquire_on_behalf_of(self, borrower: object) -> None:
796
+ await self.__original.acquire_on_behalf_of(borrower)
797
+
798
+ def release(self) -> None:
799
+ return self.__original.release()
800
+
801
+ def release_on_behalf_of(self, borrower: object) -> None:
802
+ return self.__original.release_on_behalf_of(borrower)
803
+
804
+ def statistics(self) -> CapacityLimiterStatistics:
805
+ orig = self.__original.statistics()
806
+ return CapacityLimiterStatistics(
807
+ borrowed_tokens=orig.borrowed_tokens,
808
+ total_tokens=orig.total_tokens,
809
+ borrowers=tuple(orig.borrowers),
810
+ tasks_waiting=orig.tasks_waiting,
811
+ )
812
+
813
+
814
+ _capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper")
815
+
816
+
817
+ #
818
+ # Signal handling
819
+ #
820
+
821
+
822
+ class _SignalReceiver:
823
+ _iterator: AsyncIterator[int]
824
+
825
+ def __init__(self, signals: tuple[Signals, ...]):
826
+ self._signals = signals
827
+
828
+ def __enter__(self) -> _SignalReceiver:
829
+ self._cm = trio.open_signal_receiver(*self._signals)
830
+ self._iterator = self._cm.__enter__()
831
+ return self
832
+
833
+ def __exit__(
834
+ self,
835
+ exc_type: type[BaseException] | None,
836
+ exc_val: BaseException | None,
837
+ exc_tb: TracebackType | None,
838
+ ) -> bool | None:
839
+ return self._cm.__exit__(exc_type, exc_val, exc_tb)
840
+
841
+ def __aiter__(self) -> _SignalReceiver:
842
+ return self
843
+
844
+ async def __anext__(self) -> Signals:
845
+ signum = await self._iterator.__anext__()
846
+ return Signals(signum)
847
+
848
+
849
+ #
850
+ # Testing and debugging
851
+ #
852
+
853
+
854
+ class TestRunner(abc.TestRunner):
855
+ def __init__(self, **options: Any) -> None:
856
+ from queue import Queue
857
+
858
+ self._call_queue: Queue[Callable[[], object]] = Queue()
859
+ self._send_stream: MemoryObjectSendStream | None = None
860
+ self._options = options
861
+
862
+ def __exit__(
863
+ self,
864
+ exc_type: type[BaseException] | None,
865
+ exc_val: BaseException | None,
866
+ exc_tb: types.TracebackType | None,
867
+ ) -> None:
868
+ if self._send_stream:
869
+ self._send_stream.close()
870
+ while self._send_stream is not None:
871
+ self._call_queue.get()()
872
+
873
+ async def _run_tests_and_fixtures(self) -> None:
874
+ self._send_stream, receive_stream = create_memory_object_stream(1)
875
+ with receive_stream:
876
+ async for coro, outcome_holder in receive_stream:
877
+ try:
878
+ retval = await coro
879
+ except BaseException as exc:
880
+ outcome_holder.append(Error(exc))
881
+ else:
882
+ outcome_holder.append(Value(retval))
883
+
884
+ def _main_task_finished(self, outcome: object) -> None:
885
+ self._send_stream = None
886
+
887
+ def _call_in_runner_task(
888
+ self,
889
+ func: Callable[P, Awaitable[T_Retval]],
890
+ *args: P.args,
891
+ **kwargs: P.kwargs,
892
+ ) -> T_Retval:
893
+ if self._send_stream is None:
894
+ trio.lowlevel.start_guest_run(
895
+ self._run_tests_and_fixtures,
896
+ run_sync_soon_threadsafe=self._call_queue.put,
897
+ done_callback=self._main_task_finished,
898
+ **self._options,
899
+ )
900
+ while self._send_stream is None:
901
+ self._call_queue.get()()
902
+
903
+ outcome_holder: list[Outcome] = []
904
+ self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder))
905
+ while not outcome_holder:
906
+ self._call_queue.get()()
907
+
908
+ return outcome_holder[0].unwrap()
909
+
910
+ def run_asyncgen_fixture(
911
+ self,
912
+ fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]],
913
+ kwargs: dict[str, Any],
914
+ ) -> Iterable[T_Retval]:
915
+ asyncgen = fixture_func(**kwargs)
916
+ fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None)
917
+
918
+ yield fixturevalue
919
+
920
+ try:
921
+ self._call_in_runner_task(asyncgen.asend, None)
922
+ except StopAsyncIteration:
923
+ pass
924
+ else:
925
+ self._call_in_runner_task(asyncgen.aclose)
926
+ raise RuntimeError("Async generator fixture did not stop")
927
+
928
+ def run_fixture(
929
+ self,
930
+ fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]],
931
+ kwargs: dict[str, Any],
932
+ ) -> T_Retval:
933
+ return self._call_in_runner_task(fixture_func, **kwargs)
934
+
935
+ def run_test(
936
+ self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
937
+ ) -> None:
938
+ self._call_in_runner_task(test_func, **kwargs)
939
+
940
+
941
+ class TrioTaskInfo(TaskInfo):
942
+ def __init__(self, task: trio.lowlevel.Task):
943
+ parent_id = None
944
+ if task.parent_nursery and task.parent_nursery.parent_task:
945
+ parent_id = id(task.parent_nursery.parent_task)
946
+
947
+ super().__init__(id(task), parent_id, task.name, task.coro)
948
+ self._task = weakref.proxy(task)
949
+
950
+ def has_pending_cancellation(self) -> bool:
951
+ try:
952
+ return self._task._cancel_status.effectively_cancelled
953
+ except ReferenceError:
954
+ # If the task is no longer around, it surely doesn't have a cancellation
955
+ # pending
956
+ return False
957
+
958
+
959
+ class TrioBackend(AsyncBackend):
960
+ @classmethod
961
+ def run(
962
+ cls,
963
+ func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
964
+ args: tuple[Unpack[PosArgsT]],
965
+ kwargs: dict[str, Any],
966
+ options: dict[str, Any],
967
+ ) -> T_Retval:
968
+ return trio.run(func, *args)
969
+
970
+ @classmethod
971
+ def current_token(cls) -> object:
972
+ return trio.lowlevel.current_trio_token()
973
+
974
+ @classmethod
975
+ def current_time(cls) -> float:
976
+ return trio.current_time()
977
+
978
+ @classmethod
979
+ def cancelled_exception_class(cls) -> type[BaseException]:
980
+ return trio.Cancelled
981
+
982
+ @classmethod
983
+ async def checkpoint(cls) -> None:
984
+ await trio.lowlevel.checkpoint()
985
+
986
+ @classmethod
987
+ async def checkpoint_if_cancelled(cls) -> None:
988
+ await trio.lowlevel.checkpoint_if_cancelled()
989
+
990
+ @classmethod
991
+ async def cancel_shielded_checkpoint(cls) -> None:
992
+ await trio.lowlevel.cancel_shielded_checkpoint()
993
+
994
+ @classmethod
995
+ async def sleep(cls, delay: float) -> None:
996
+ await trio.sleep(delay)
997
+
998
+ @classmethod
999
+ def create_cancel_scope(
1000
+ cls, *, deadline: float = math.inf, shield: bool = False
1001
+ ) -> abc.CancelScope:
1002
+ return CancelScope(deadline=deadline, shield=shield)
1003
+
1004
+ @classmethod
1005
+ def current_effective_deadline(cls) -> float:
1006
+ return trio.current_effective_deadline()
1007
+
1008
+ @classmethod
1009
+ def create_task_group(cls) -> abc.TaskGroup:
1010
+ return TaskGroup()
1011
+
1012
+ @classmethod
1013
+ def create_event(cls) -> abc.Event:
1014
+ return Event()
1015
+
1016
+ @classmethod
1017
+ def create_lock(cls, *, fast_acquire: bool) -> Lock:
1018
+ return Lock(fast_acquire=fast_acquire)
1019
+
1020
+ @classmethod
1021
+ def create_semaphore(
1022
+ cls,
1023
+ initial_value: int,
1024
+ *,
1025
+ max_value: int | None = None,
1026
+ fast_acquire: bool = False,
1027
+ ) -> abc.Semaphore:
1028
+ return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire)
1029
+
1030
+ @classmethod
1031
+ def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter:
1032
+ return CapacityLimiter(total_tokens)
1033
+
1034
+ @classmethod
1035
+ async def run_sync_in_worker_thread(
1036
+ cls,
1037
+ func: Callable[[Unpack[PosArgsT]], T_Retval],
1038
+ args: tuple[Unpack[PosArgsT]],
1039
+ abandon_on_cancel: bool = False,
1040
+ limiter: abc.CapacityLimiter | None = None,
1041
+ ) -> T_Retval:
1042
+ def wrapper() -> T_Retval:
1043
+ with claim_worker_thread(TrioBackend, token):
1044
+ return func(*args)
1045
+
1046
+ token = TrioBackend.current_token()
1047
+ return await run_sync(
1048
+ wrapper,
1049
+ abandon_on_cancel=abandon_on_cancel,
1050
+ limiter=cast(trio.CapacityLimiter, limiter),
1051
+ )
1052
+
1053
+ @classmethod
1054
+ def check_cancelled(cls) -> None:
1055
+ trio.from_thread.check_cancelled()
1056
+
1057
+ @classmethod
1058
+ def run_async_from_thread(
1059
+ cls,
1060
+ func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
1061
+ args: tuple[Unpack[PosArgsT]],
1062
+ token: object,
1063
+ ) -> T_Retval:
1064
+ trio_token = cast("trio.lowlevel.TrioToken | None", token)
1065
+ try:
1066
+ return trio.from_thread.run(func, *args, trio_token=trio_token)
1067
+ except trio.RunFinishedError:
1068
+ raise RunFinishedError from None
1069
+
1070
+ @classmethod
1071
+ def run_sync_from_thread(
1072
+ cls,
1073
+ func: Callable[[Unpack[PosArgsT]], T_Retval],
1074
+ args: tuple[Unpack[PosArgsT]],
1075
+ token: object,
1076
+ ) -> T_Retval:
1077
+ trio_token = cast("trio.lowlevel.TrioToken | None", token)
1078
+ try:
1079
+ return trio.from_thread.run_sync(func, *args, trio_token=trio_token)
1080
+ except trio.RunFinishedError:
1081
+ raise RunFinishedError from None
1082
+
1083
+ @classmethod
1084
+ async def open_process(
1085
+ cls,
1086
+ command: StrOrBytesPath | Sequence[StrOrBytesPath],
1087
+ *,
1088
+ stdin: int | IO[Any] | None,
1089
+ stdout: int | IO[Any] | None,
1090
+ stderr: int | IO[Any] | None,
1091
+ **kwargs: Any,
1092
+ ) -> Process:
1093
+ def convert_item(item: StrOrBytesPath) -> str:
1094
+ str_or_bytes = os.fspath(item)
1095
+ if isinstance(str_or_bytes, str):
1096
+ return str_or_bytes
1097
+ else:
1098
+ return os.fsdecode(str_or_bytes)
1099
+
1100
+ if isinstance(command, (str, bytes, PathLike)):
1101
+ process = await trio.lowlevel.open_process(
1102
+ convert_item(command),
1103
+ stdin=stdin,
1104
+ stdout=stdout,
1105
+ stderr=stderr,
1106
+ shell=True,
1107
+ **kwargs,
1108
+ )
1109
+ else:
1110
+ process = await trio.lowlevel.open_process(
1111
+ [convert_item(item) for item in command],
1112
+ stdin=stdin,
1113
+ stdout=stdout,
1114
+ stderr=stderr,
1115
+ shell=False,
1116
+ **kwargs,
1117
+ )
1118
+
1119
+ stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None
1120
+ stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None
1121
+ stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None
1122
+ return Process(process, stdin_stream, stdout_stream, stderr_stream)
1123
+
1124
+ @classmethod
1125
+ def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None:
1126
+ trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers)
1127
+
1128
+ @classmethod
1129
+ async def connect_tcp(
1130
+ cls, host: str, port: int, local_address: IPSockAddrType | None = None
1131
+ ) -> SocketStream:
1132
+ family = socket.AF_INET6 if ":" in host else socket.AF_INET
1133
+ trio_socket = trio.socket.socket(family)
1134
+ trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
1135
+ if local_address:
1136
+ await trio_socket.bind(local_address)
1137
+
1138
+ try:
1139
+ await trio_socket.connect((host, port))
1140
+ except BaseException:
1141
+ trio_socket.close()
1142
+ raise
1143
+
1144
+ return SocketStream(trio_socket)
1145
+
1146
+ @classmethod
1147
+ async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream:
1148
+ trio_socket = trio.socket.socket(socket.AF_UNIX)
1149
+ try:
1150
+ await trio_socket.connect(path)
1151
+ except BaseException:
1152
+ trio_socket.close()
1153
+ raise
1154
+
1155
+ return UNIXSocketStream(trio_socket)
1156
+
1157
+ @classmethod
1158
+ def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener:
1159
+ return TCPSocketListener(sock)
1160
+
1161
+ @classmethod
1162
+ def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener:
1163
+ return UNIXSocketListener(sock)
1164
+
1165
+ @classmethod
1166
+ async def create_udp_socket(
1167
+ cls,
1168
+ family: socket.AddressFamily,
1169
+ local_address: IPSockAddrType | None,
1170
+ remote_address: IPSockAddrType | None,
1171
+ reuse_port: bool,
1172
+ ) -> UDPSocket | ConnectedUDPSocket:
1173
+ trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM)
1174
+
1175
+ if reuse_port:
1176
+ trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
1177
+
1178
+ if local_address:
1179
+ await trio_socket.bind(local_address)
1180
+
1181
+ if remote_address:
1182
+ await trio_socket.connect(remote_address)
1183
+ return ConnectedUDPSocket(trio_socket)
1184
+ else:
1185
+ return UDPSocket(trio_socket)
1186
+
1187
+ @classmethod
1188
+ @overload
1189
+ async def create_unix_datagram_socket(
1190
+ cls, raw_socket: socket.socket, remote_path: None
1191
+ ) -> abc.UNIXDatagramSocket: ...
1192
+
1193
+ @classmethod
1194
+ @overload
1195
+ async def create_unix_datagram_socket(
1196
+ cls, raw_socket: socket.socket, remote_path: str | bytes
1197
+ ) -> abc.ConnectedUNIXDatagramSocket: ...
1198
+
1199
+ @classmethod
1200
+ async def create_unix_datagram_socket(
1201
+ cls, raw_socket: socket.socket, remote_path: str | bytes | None
1202
+ ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket:
1203
+ trio_socket = trio.socket.from_stdlib_socket(raw_socket)
1204
+
1205
+ if remote_path:
1206
+ await trio_socket.connect(remote_path)
1207
+ return ConnectedUNIXDatagramSocket(trio_socket)
1208
+ else:
1209
+ return UNIXDatagramSocket(trio_socket)
1210
+
1211
+ @classmethod
1212
+ async def getaddrinfo(
1213
+ cls,
1214
+ host: bytes | str | None,
1215
+ port: str | int | None,
1216
+ *,
1217
+ family: int | AddressFamily = 0,
1218
+ type: int | SocketKind = 0,
1219
+ proto: int = 0,
1220
+ flags: int = 0,
1221
+ ) -> Sequence[
1222
+ tuple[
1223
+ AddressFamily,
1224
+ SocketKind,
1225
+ int,
1226
+ str,
1227
+ tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes],
1228
+ ]
1229
+ ]:
1230
+ return await trio.socket.getaddrinfo(host, port, family, type, proto, flags)
1231
+
1232
+ @classmethod
1233
+ async def getnameinfo(
1234
+ cls, sockaddr: IPSockAddrType, flags: int = 0
1235
+ ) -> tuple[str, str]:
1236
+ return await trio.socket.getnameinfo(sockaddr, flags)
1237
+
1238
+ @classmethod
1239
+ async def wait_readable(cls, obj: FileDescriptorLike) -> None:
1240
+ try:
1241
+ await wait_readable(obj)
1242
+ except trio.ClosedResourceError as exc:
1243
+ raise ClosedResourceError().with_traceback(exc.__traceback__) from None
1244
+ except trio.BusyResourceError:
1245
+ raise BusyResourceError("reading from") from None
1246
+
1247
+ @classmethod
1248
+ async def wait_writable(cls, obj: FileDescriptorLike) -> None:
1249
+ try:
1250
+ await wait_writable(obj)
1251
+ except trio.ClosedResourceError as exc:
1252
+ raise ClosedResourceError().with_traceback(exc.__traceback__) from None
1253
+ except trio.BusyResourceError:
1254
+ raise BusyResourceError("writing to") from None
1255
+
1256
+ @classmethod
1257
+ def notify_closing(cls, obj: FileDescriptorLike) -> None:
1258
+ notify_closing(obj)
1259
+
1260
+ @classmethod
1261
+ async def wrap_listener_socket(cls, sock: socket.socket) -> abc.SocketListener:
1262
+ return TCPSocketListener(sock)
1263
+
1264
+ @classmethod
1265
+ async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream:
1266
+ trio_sock = trio.socket.from_stdlib_socket(sock)
1267
+ return SocketStream(trio_sock)
1268
+
1269
+ @classmethod
1270
+ async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream:
1271
+ trio_sock = trio.socket.from_stdlib_socket(sock)
1272
+ return UNIXSocketStream(trio_sock)
1273
+
1274
+ @classmethod
1275
+ async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket:
1276
+ trio_sock = trio.socket.from_stdlib_socket(sock)
1277
+ return UDPSocket(trio_sock)
1278
+
1279
+ @classmethod
1280
+ async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket:
1281
+ trio_sock = trio.socket.from_stdlib_socket(sock)
1282
+ return ConnectedUDPSocket(trio_sock)
1283
+
1284
+ @classmethod
1285
+ async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket:
1286
+ trio_sock = trio.socket.from_stdlib_socket(sock)
1287
+ return UNIXDatagramSocket(trio_sock)
1288
+
1289
+ @classmethod
1290
+ async def wrap_connected_unix_datagram_socket(
1291
+ cls, sock: socket.socket
1292
+ ) -> ConnectedUNIXDatagramSocket:
1293
+ trio_sock = trio.socket.from_stdlib_socket(sock)
1294
+ return ConnectedUNIXDatagramSocket(trio_sock)
1295
+
1296
+ @classmethod
1297
+ def current_default_thread_limiter(cls) -> CapacityLimiter:
1298
+ try:
1299
+ return _capacity_limiter_wrapper.get()
1300
+ except LookupError:
1301
+ limiter = CapacityLimiter(
1302
+ original=trio.to_thread.current_default_thread_limiter()
1303
+ )
1304
+ _capacity_limiter_wrapper.set(limiter)
1305
+ return limiter
1306
+
1307
+ @classmethod
1308
+ def open_signal_receiver(
1309
+ cls, *signals: Signals
1310
+ ) -> AbstractContextManager[AsyncIterator[Signals]]:
1311
+ return _SignalReceiver(signals)
1312
+
1313
+ @classmethod
1314
+ def get_current_task(cls) -> TaskInfo:
1315
+ task = current_task()
1316
+ return TrioTaskInfo(task)
1317
+
1318
+ @classmethod
1319
+ def get_running_tasks(cls) -> Sequence[TaskInfo]:
1320
+ root_task = current_root_task()
1321
+ assert root_task
1322
+ task_infos = [TrioTaskInfo(root_task)]
1323
+ nurseries = root_task.child_nurseries
1324
+ while nurseries:
1325
+ new_nurseries: list[trio.Nursery] = []
1326
+ for nursery in nurseries:
1327
+ for task in nursery.child_tasks:
1328
+ task_infos.append(TrioTaskInfo(task))
1329
+ new_nurseries.extend(task.child_nurseries)
1330
+
1331
+ nurseries = new_nurseries
1332
+
1333
+ return task_infos
1334
+
1335
+ @classmethod
1336
+ async def wait_all_tasks_blocked(cls) -> None:
1337
+ from trio.testing import wait_all_tasks_blocked
1338
+
1339
+ await wait_all_tasks_blocked()
1340
+
1341
+ @classmethod
1342
+ def create_test_runner(cls, options: dict[str, Any]) -> TestRunner:
1343
+ return TestRunner(**options)
1344
+
1345
+
1346
+ backend_class = TrioBackend
.venv/Lib/site-packages/anyio/_core/__init__.py ADDED
File without changes
.venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (176 Bytes). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc ADDED
Binary file (8.99 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc ADDED
Binary file (8.17 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc ADDED
Binary file (7.41 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc ADDED
Binary file (43.4 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc ADDED
Binary file (918 Bytes). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc ADDED
Binary file (1.37 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc ADDED
Binary file (40.6 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc ADDED
Binary file (2.33 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc ADDED
Binary file (9.62 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc ADDED
Binary file (32.9 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc ADDED
Binary file (7.68 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc ADDED
Binary file (28.1 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc ADDED
Binary file (3.78 kB). View file
 
.venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc ADDED
Binary file (3.82 kB). View file
 
.venv/Lib/site-packages/anyio/_core/_asyncio_selector_thread.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import socket
5
+ import threading
6
+ from collections.abc import Callable
7
+ from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector
8
+ from typing import TYPE_CHECKING, Any
9
+
10
+ if TYPE_CHECKING:
11
+ from _typeshed import FileDescriptorLike
12
+
13
+ _selector_lock = threading.Lock()
14
+ _selector: Selector | None = None
15
+
16
+
17
+ class Selector:
18
+ def __init__(self) -> None:
19
+ self._thread = threading.Thread(target=self.run, name="AnyIO socket selector")
20
+ self._selector = DefaultSelector()
21
+ self._send, self._receive = socket.socketpair()
22
+ self._send.setblocking(False)
23
+ self._receive.setblocking(False)
24
+ # This somewhat reduces the amount of memory wasted queueing up data
25
+ # for wakeups. With these settings, maximum number of 1-byte sends
26
+ # before getting BlockingIOError:
27
+ # Linux 4.8: 6
28
+ # macOS (darwin 15.5): 1
29
+ # Windows 10: 525347
30
+ # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send
31
+ # blocking, even on non-blocking sockets, so don't do that.)
32
+ self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1)
33
+ self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1)
34
+ # On Windows this is a TCP socket so this might matter. On other
35
+ # platforms this fails b/c AF_UNIX sockets aren't actually TCP.
36
+ try:
37
+ self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
38
+ except OSError:
39
+ pass
40
+
41
+ self._selector.register(self._receive, EVENT_READ)
42
+ self._closed = False
43
+
44
+ def start(self) -> None:
45
+ self._thread.start()
46
+ threading._register_atexit(self._stop) # type: ignore[attr-defined]
47
+
48
+ def _stop(self) -> None:
49
+ global _selector
50
+ self._closed = True
51
+ self._notify_self()
52
+ self._send.close()
53
+ self._thread.join()
54
+ self._selector.unregister(self._receive)
55
+ self._receive.close()
56
+ self._selector.close()
57
+ _selector = None
58
+ assert not self._selector.get_map(), (
59
+ "selector still has registered file descriptors after shutdown"
60
+ )
61
+
62
+ def _notify_self(self) -> None:
63
+ try:
64
+ self._send.send(b"\x00")
65
+ except BlockingIOError:
66
+ pass
67
+
68
+ def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
69
+ loop = asyncio.get_running_loop()
70
+ try:
71
+ key = self._selector.get_key(fd)
72
+ except KeyError:
73
+ self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)})
74
+ else:
75
+ if EVENT_READ in key.data:
76
+ raise ValueError(
77
+ "this file descriptor is already registered for reading"
78
+ )
79
+
80
+ key.data[EVENT_READ] = loop, callback
81
+ self._selector.modify(fd, key.events | EVENT_READ, key.data)
82
+
83
+ self._notify_self()
84
+
85
+ def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
86
+ loop = asyncio.get_running_loop()
87
+ try:
88
+ key = self._selector.get_key(fd)
89
+ except KeyError:
90
+ self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)})
91
+ else:
92
+ if EVENT_WRITE in key.data:
93
+ raise ValueError(
94
+ "this file descriptor is already registered for writing"
95
+ )
96
+
97
+ key.data[EVENT_WRITE] = loop, callback
98
+ self._selector.modify(fd, key.events | EVENT_WRITE, key.data)
99
+
100
+ self._notify_self()
101
+
102
+ def remove_reader(self, fd: FileDescriptorLike) -> bool:
103
+ try:
104
+ key = self._selector.get_key(fd)
105
+ except KeyError:
106
+ return False
107
+
108
+ if new_events := key.events ^ EVENT_READ:
109
+ del key.data[EVENT_READ]
110
+ self._selector.modify(fd, new_events, key.data)
111
+ else:
112
+ self._selector.unregister(fd)
113
+
114
+ return True
115
+
116
+ def remove_writer(self, fd: FileDescriptorLike) -> bool:
117
+ try:
118
+ key = self._selector.get_key(fd)
119
+ except KeyError:
120
+ return False
121
+
122
+ if new_events := key.events ^ EVENT_WRITE:
123
+ del key.data[EVENT_WRITE]
124
+ self._selector.modify(fd, new_events, key.data)
125
+ else:
126
+ self._selector.unregister(fd)
127
+
128
+ return True
129
+
130
+ def run(self) -> None:
131
+ while not self._closed:
132
+ for key, events in self._selector.select():
133
+ if key.fileobj is self._receive:
134
+ try:
135
+ while self._receive.recv(4096):
136
+ pass
137
+ except BlockingIOError:
138
+ pass
139
+
140
+ continue
141
+
142
+ if events & EVENT_READ:
143
+ loop, callback = key.data[EVENT_READ]
144
+ self.remove_reader(key.fd)
145
+ try:
146
+ loop.call_soon_threadsafe(callback)
147
+ except RuntimeError:
148
+ pass # the loop was already closed
149
+
150
+ if events & EVENT_WRITE:
151
+ loop, callback = key.data[EVENT_WRITE]
152
+ self.remove_writer(key.fd)
153
+ try:
154
+ loop.call_soon_threadsafe(callback)
155
+ except RuntimeError:
156
+ pass # the loop was already closed
157
+
158
+
159
+ def get_selector() -> Selector:
160
+ global _selector
161
+
162
+ with _selector_lock:
163
+ if _selector is None:
164
+ _selector = Selector()
165
+ _selector.start()
166
+
167
+ return _selector
.venv/Lib/site-packages/anyio/_core/_contextmanagers.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from abc import abstractmethod
4
+ from contextlib import AbstractAsyncContextManager, AbstractContextManager
5
+ from inspect import isasyncgen, iscoroutine, isgenerator
6
+ from types import TracebackType
7
+ from typing import Protocol, TypeVar, cast, final
8
+
9
+ _T_co = TypeVar("_T_co", covariant=True)
10
+ _ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None")
11
+
12
+
13
+ class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]):
14
+ def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ...
15
+
16
+
17
+ class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]):
18
+ def __asynccontextmanager__(
19
+ self,
20
+ ) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ...
21
+
22
+
23
+ class ContextManagerMixin:
24
+ """
25
+ Mixin class providing context manager functionality via a generator-based
26
+ implementation.
27
+
28
+ This class allows you to implement a context manager via :meth:`__contextmanager__`
29
+ which should return a generator. The mechanics are meant to mirror those of
30
+ :func:`@contextmanager <contextlib.contextmanager>`.
31
+
32
+ .. note:: Classes using this mix-in are not reentrant as context managers, meaning
33
+ that once you enter it, you can't re-enter before first exiting it.
34
+
35
+ .. seealso:: :doc:`contextmanagers`
36
+ """
37
+
38
+ __cm: AbstractContextManager[object, bool | None] | None = None
39
+
40
+ @final
41
+ def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co:
42
+ # Needed for mypy to assume self still has the __cm member
43
+ assert isinstance(self, ContextManagerMixin)
44
+ if self.__cm is not None:
45
+ raise RuntimeError(
46
+ f"this {self.__class__.__qualname__} has already been entered"
47
+ )
48
+
49
+ cm = self.__contextmanager__()
50
+ if not isinstance(cm, AbstractContextManager):
51
+ if isgenerator(cm):
52
+ raise TypeError(
53
+ "__contextmanager__() returned a generator object instead of "
54
+ "a context manager. Did you forget to add the @contextmanager "
55
+ "decorator?"
56
+ )
57
+
58
+ raise TypeError(
59
+ f"__contextmanager__() did not return a context manager object, "
60
+ f"but {cm.__class__!r}"
61
+ )
62
+
63
+ if cm is self:
64
+ raise TypeError(
65
+ f"{self.__class__.__qualname__}.__contextmanager__() returned "
66
+ f"self. Did you forget to add the @contextmanager decorator and a "
67
+ f"'yield' statement?"
68
+ )
69
+
70
+ value = cm.__enter__()
71
+ self.__cm = cm
72
+ return value
73
+
74
+ @final
75
+ def __exit__(
76
+ self: _SupportsCtxMgr[object, _ExitT_co],
77
+ exc_type: type[BaseException] | None,
78
+ exc_val: BaseException | None,
79
+ exc_tb: TracebackType | None,
80
+ ) -> _ExitT_co:
81
+ # Needed for mypy to assume self still has the __cm member
82
+ assert isinstance(self, ContextManagerMixin)
83
+ if self.__cm is None:
84
+ raise RuntimeError(
85
+ f"this {self.__class__.__qualname__} has not been entered yet"
86
+ )
87
+
88
+ # Prevent circular references
89
+ cm = self.__cm
90
+ del self.__cm
91
+
92
+ return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb))
93
+
94
+ @abstractmethod
95
+ def __contextmanager__(self) -> AbstractContextManager[object, bool | None]:
96
+ """
97
+ Implement your context manager logic here.
98
+
99
+ This method **must** be decorated with
100
+ :func:`@contextmanager <contextlib.contextmanager>`.
101
+
102
+ .. note:: Remember that the ``yield`` will raise any exception raised in the
103
+ enclosed context block, so use a ``finally:`` block to clean up resources!
104
+
105
+ :return: a context manager object
106
+ """
107
+
108
+
109
+ class AsyncContextManagerMixin:
110
+ """
111
+ Mixin class providing async context manager functionality via a generator-based
112
+ implementation.
113
+
114
+ This class allows you to implement a context manager via
115
+ :meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of
116
+ :func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
117
+
118
+ .. note:: Classes using this mix-in are not reentrant as context managers, meaning
119
+ that once you enter it, you can't re-enter before first exiting it.
120
+
121
+ .. seealso:: :doc:`contextmanagers`
122
+ """
123
+
124
+ __cm: AbstractAsyncContextManager[object, bool | None] | None = None
125
+
126
+ @final
127
+ async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co:
128
+ # Needed for mypy to assume self still has the __cm member
129
+ assert isinstance(self, AsyncContextManagerMixin)
130
+ if self.__cm is not None:
131
+ raise RuntimeError(
132
+ f"this {self.__class__.__qualname__} has already been entered"
133
+ )
134
+
135
+ cm = self.__asynccontextmanager__()
136
+ if not isinstance(cm, AbstractAsyncContextManager):
137
+ if isasyncgen(cm):
138
+ raise TypeError(
139
+ "__asynccontextmanager__() returned an async generator instead of "
140
+ "an async context manager. Did you forget to add the "
141
+ "@asynccontextmanager decorator?"
142
+ )
143
+ elif iscoroutine(cm):
144
+ cm.close()
145
+ raise TypeError(
146
+ "__asynccontextmanager__() returned a coroutine object instead of "
147
+ "an async context manager. Did you forget to add the "
148
+ "@asynccontextmanager decorator and a 'yield' statement?"
149
+ )
150
+
151
+ raise TypeError(
152
+ f"__asynccontextmanager__() did not return an async context manager, "
153
+ f"but {cm.__class__!r}"
154
+ )
155
+
156
+ if cm is self:
157
+ raise TypeError(
158
+ f"{self.__class__.__qualname__}.__asynccontextmanager__() returned "
159
+ f"self. Did you forget to add the @asynccontextmanager decorator and a "
160
+ f"'yield' statement?"
161
+ )
162
+
163
+ value = await cm.__aenter__()
164
+ self.__cm = cm
165
+ return value
166
+
167
+ @final
168
+ async def __aexit__(
169
+ self: _SupportsAsyncCtxMgr[object, _ExitT_co],
170
+ exc_type: type[BaseException] | None,
171
+ exc_val: BaseException | None,
172
+ exc_tb: TracebackType | None,
173
+ ) -> _ExitT_co:
174
+ assert isinstance(self, AsyncContextManagerMixin)
175
+ if self.__cm is None:
176
+ raise RuntimeError(
177
+ f"this {self.__class__.__qualname__} has not been entered yet"
178
+ )
179
+
180
+ # Prevent circular references
181
+ cm = self.__cm
182
+ del self.__cm
183
+
184
+ return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb))
185
+
186
+ @abstractmethod
187
+ def __asynccontextmanager__(
188
+ self,
189
+ ) -> AbstractAsyncContextManager[object, bool | None]:
190
+ """
191
+ Implement your async context manager logic here.
192
+
193
+ This method **must** be decorated with
194
+ :func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
195
+
196
+ .. note:: Remember that the ``yield`` will raise any exception raised in the
197
+ enclosed context block, so use a ``finally:`` block to clean up resources!
198
+
199
+ :return: an async context manager object
200
+ """
.venv/Lib/site-packages/anyio/_core/_eventloop.py ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import math
4
+ import sys
5
+ import threading
6
+ from collections.abc import Awaitable, Callable, Generator
7
+ from contextlib import contextmanager
8
+ from contextvars import Token
9
+ from importlib import import_module
10
+ from typing import TYPE_CHECKING, Any, TypeVar
11
+
12
+ from ._exceptions import NoEventLoopError
13
+
14
+ if sys.version_info >= (3, 11):
15
+ from typing import TypeVarTuple, Unpack
16
+ else:
17
+ from typing_extensions import TypeVarTuple, Unpack
18
+
19
+ sniffio: Any
20
+ try:
21
+ import sniffio
22
+ except ModuleNotFoundError:
23
+ sniffio = None
24
+
25
+ if TYPE_CHECKING:
26
+ from ..abc import AsyncBackend
27
+
28
+ # This must be updated when new backends are introduced
29
+ BACKENDS = "asyncio", "trio"
30
+
31
+ T_Retval = TypeVar("T_Retval")
32
+ PosArgsT = TypeVarTuple("PosArgsT")
33
+
34
+ threadlocals = threading.local()
35
+ loaded_backends: dict[str, type[AsyncBackend]] = {}
36
+
37
+
38
+ def run(
39
+ func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
40
+ *args: Unpack[PosArgsT],
41
+ backend: str = "asyncio",
42
+ backend_options: dict[str, Any] | None = None,
43
+ ) -> T_Retval:
44
+ """
45
+ Run the given coroutine function in an asynchronous event loop.
46
+
47
+ The current thread must not be already running an event loop.
48
+
49
+ :param func: a coroutine function
50
+ :param args: positional arguments to ``func``
51
+ :param backend: name of the asynchronous event loop implementation – currently
52
+ either ``asyncio`` or ``trio``
53
+ :param backend_options: keyword arguments to call the backend ``run()``
54
+ implementation with (documented :ref:`here <backend options>`)
55
+ :return: the return value of the coroutine function
56
+ :raises RuntimeError: if an asynchronous event loop is already running in this
57
+ thread
58
+ :raises LookupError: if the named backend is not found
59
+
60
+ """
61
+ if asynclib_name := current_async_library():
62
+ raise RuntimeError(f"Already running {asynclib_name} in this thread")
63
+
64
+ try:
65
+ async_backend = get_async_backend(backend)
66
+ except ImportError as exc:
67
+ raise LookupError(f"No such backend: {backend}") from exc
68
+
69
+ token = None
70
+ if asynclib_name is None:
71
+ # Since we're in control of the event loop, we can cache the name of the async
72
+ # library
73
+ token = set_current_async_library(backend)
74
+
75
+ try:
76
+ backend_options = backend_options or {}
77
+ return async_backend.run(func, args, {}, backend_options)
78
+ finally:
79
+ reset_current_async_library(token)
80
+
81
+
82
+ async def sleep(delay: float) -> None:
83
+ """
84
+ Pause the current task for the specified duration.
85
+
86
+ :param delay: the duration, in seconds
87
+
88
+ """
89
+ return await get_async_backend().sleep(delay)
90
+
91
+
92
+ async def sleep_forever() -> None:
93
+ """
94
+ Pause the current task until it's cancelled.
95
+
96
+ This is a shortcut for ``sleep(math.inf)``.
97
+
98
+ .. versionadded:: 3.1
99
+
100
+ """
101
+ await sleep(math.inf)
102
+
103
+
104
+ async def sleep_until(deadline: float) -> None:
105
+ """
106
+ Pause the current task until the given time.
107
+
108
+ :param deadline: the absolute time to wake up at (according to the internal
109
+ monotonic clock of the event loop)
110
+
111
+ .. versionadded:: 3.1
112
+
113
+ """
114
+ now = current_time()
115
+ await sleep(max(deadline - now, 0))
116
+
117
+
118
+ def current_time() -> float:
119
+ """
120
+ Return the current value of the event loop's internal clock.
121
+
122
+ :return: the clock value (seconds)
123
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
124
+ current thread
125
+
126
+ """
127
+ return get_async_backend().current_time()
128
+
129
+
130
+ def get_all_backends() -> tuple[str, ...]:
131
+ """Return a tuple of the names of all built-in backends."""
132
+ return BACKENDS
133
+
134
+
135
+ def get_available_backends() -> tuple[str, ...]:
136
+ """
137
+ Test for the availability of built-in backends.
138
+
139
+ :return a tuple of the built-in backend names that were successfully imported
140
+
141
+ .. versionadded:: 4.12
142
+
143
+ """
144
+ available_backends: list[str] = []
145
+ for backend_name in get_all_backends():
146
+ try:
147
+ get_async_backend(backend_name)
148
+ except ImportError:
149
+ continue
150
+
151
+ available_backends.append(backend_name)
152
+
153
+ return tuple(available_backends)
154
+
155
+
156
+ def get_cancelled_exc_class() -> type[BaseException]:
157
+ """
158
+ Return the current async library's cancellation exception class.
159
+
160
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
161
+ current thread
162
+
163
+ """
164
+ return get_async_backend().cancelled_exception_class()
165
+
166
+
167
+ #
168
+ # Private API
169
+ #
170
+
171
+
172
+ @contextmanager
173
+ def claim_worker_thread(
174
+ backend_class: type[AsyncBackend], token: object
175
+ ) -> Generator[Any, None, None]:
176
+ from ..lowlevel import EventLoopToken
177
+
178
+ threadlocals.current_token = EventLoopToken(backend_class, token)
179
+ try:
180
+ yield
181
+ finally:
182
+ del threadlocals.current_token
183
+
184
+
185
+ def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]:
186
+ if asynclib_name is None:
187
+ asynclib_name = current_async_library()
188
+ if not asynclib_name:
189
+ raise NoEventLoopError(
190
+ f"Not currently running on any asynchronous event loop. "
191
+ f"Available async backends: {', '.join(get_all_backends())}"
192
+ )
193
+
194
+ # We use our own dict instead of sys.modules to get the already imported back-end
195
+ # class because the appropriate modules in sys.modules could potentially be only
196
+ # partially initialized
197
+ try:
198
+ return loaded_backends[asynclib_name]
199
+ except KeyError:
200
+ module = import_module(f"anyio._backends._{asynclib_name}")
201
+ loaded_backends[asynclib_name] = module.backend_class
202
+ return module.backend_class
203
+
204
+
205
+ def current_async_library() -> str | None:
206
+ if sniffio is None:
207
+ # If sniffio is not installed, we assume we're either running asyncio or nothing
208
+ import asyncio
209
+
210
+ try:
211
+ asyncio.get_running_loop()
212
+ return "asyncio"
213
+ except RuntimeError:
214
+ pass
215
+ else:
216
+ try:
217
+ return sniffio.current_async_library()
218
+ except sniffio.AsyncLibraryNotFoundError:
219
+ pass
220
+
221
+ return None
222
+
223
+
224
+ def set_current_async_library(asynclib_name: str | None) -> Token | None:
225
+ # no-op if sniffio is not installed
226
+ if sniffio is None:
227
+ return None
228
+
229
+ return sniffio.current_async_library_cvar.set(asynclib_name)
230
+
231
+
232
+ def reset_current_async_library(token: Token | None) -> None:
233
+ if token is not None:
234
+ sniffio.current_async_library_cvar.reset(token)
.venv/Lib/site-packages/anyio/_core/_exceptions.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ from collections.abc import Generator
5
+ from textwrap import dedent
6
+ from typing import Any
7
+
8
+ if sys.version_info < (3, 11):
9
+ from exceptiongroup import BaseExceptionGroup
10
+
11
+
12
+ class BrokenResourceError(Exception):
13
+ """
14
+ Raised when trying to use a resource that has been rendered unusable due to external
15
+ causes (e.g. a send stream whose peer has disconnected).
16
+ """
17
+
18
+
19
+ class BrokenWorkerProcess(Exception):
20
+ """
21
+ Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or
22
+ otherwise misbehaves.
23
+ """
24
+
25
+
26
+ class BrokenWorkerInterpreter(Exception):
27
+ """
28
+ Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is
29
+ raised in the subinterpreter.
30
+ """
31
+
32
+ def __init__(self, excinfo: Any):
33
+ # This was adapted from concurrent.futures.interpreter.ExecutionFailed
34
+ msg = excinfo.formatted
35
+ if not msg:
36
+ if excinfo.type and excinfo.msg:
37
+ msg = f"{excinfo.type.__name__}: {excinfo.msg}"
38
+ else:
39
+ msg = excinfo.type.__name__ or excinfo.msg
40
+
41
+ super().__init__(msg)
42
+ self.excinfo = excinfo
43
+
44
+ def __str__(self) -> str:
45
+ try:
46
+ formatted = self.excinfo.errdisplay
47
+ except Exception:
48
+ return super().__str__()
49
+ else:
50
+ return dedent(
51
+ f"""
52
+ {super().__str__()}
53
+
54
+ Uncaught in the interpreter:
55
+
56
+ {formatted}
57
+ """.strip()
58
+ )
59
+
60
+
61
+ class BusyResourceError(Exception):
62
+ """
63
+ Raised when two tasks are trying to read from or write to the same resource
64
+ concurrently.
65
+ """
66
+
67
+ def __init__(self, action: str):
68
+ super().__init__(f"Another task is already {action} this resource")
69
+
70
+
71
+ class ClosedResourceError(Exception):
72
+ """Raised when trying to use a resource that has been closed."""
73
+
74
+
75
+ class ConnectionFailed(OSError):
76
+ """
77
+ Raised when a connection attempt fails.
78
+
79
+ .. note:: This class inherits from :exc:`OSError` for backwards compatibility.
80
+ """
81
+
82
+
83
+ def iterate_exceptions(
84
+ exception: BaseException,
85
+ ) -> Generator[BaseException, None, None]:
86
+ if isinstance(exception, BaseExceptionGroup):
87
+ for exc in exception.exceptions:
88
+ yield from iterate_exceptions(exc)
89
+ else:
90
+ yield exception
91
+
92
+
93
+ class DelimiterNotFound(Exception):
94
+ """
95
+ Raised during
96
+ :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
97
+ maximum number of bytes has been read without the delimiter being found.
98
+ """
99
+
100
+ def __init__(self, max_bytes: int) -> None:
101
+ super().__init__(
102
+ f"The delimiter was not found among the first {max_bytes} bytes"
103
+ )
104
+
105
+
106
+ class EndOfStream(Exception):
107
+ """
108
+ Raised when trying to read from a stream that has been closed from the other end.
109
+ """
110
+
111
+
112
+ class IncompleteRead(Exception):
113
+ """
114
+ Raised during
115
+ :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
116
+ :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
117
+ connection is closed before the requested amount of bytes has been read.
118
+ """
119
+
120
+ def __init__(self) -> None:
121
+ super().__init__(
122
+ "The stream was closed before the read operation could be completed"
123
+ )
124
+
125
+
126
+ class TypedAttributeLookupError(LookupError):
127
+ """
128
+ Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute
129
+ is not found and no default value has been given.
130
+ """
131
+
132
+
133
+ class WouldBlock(Exception):
134
+ """Raised by ``X_nowait`` functions if ``X()`` would block."""
135
+
136
+
137
+ class NoEventLoopError(RuntimeError):
138
+ """
139
+ Raised by several functions that require an event loop to be running in the current
140
+ thread when there is no running event loop.
141
+
142
+ This is also raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync`
143
+ if not calling from an AnyIO worker thread, and no ``token`` was passed.
144
+ """
145
+
146
+
147
+ class RunFinishedError(RuntimeError):
148
+ """
149
+ Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if the event
150
+ loop associated with the explicitly passed token has already finished.
151
+ """
152
+
153
+ def __init__(self) -> None:
154
+ super().__init__(
155
+ "The event loop associated with the given token has already finished"
156
+ )
.venv/Lib/site-packages/anyio/_core/_fileio.py ADDED
@@ -0,0 +1,797 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import pathlib
5
+ import sys
6
+ from collections.abc import (
7
+ AsyncIterator,
8
+ Callable,
9
+ Iterable,
10
+ Iterator,
11
+ Sequence,
12
+ )
13
+ from dataclasses import dataclass
14
+ from functools import partial
15
+ from os import PathLike
16
+ from typing import (
17
+ IO,
18
+ TYPE_CHECKING,
19
+ Any,
20
+ AnyStr,
21
+ ClassVar,
22
+ Final,
23
+ Generic,
24
+ overload,
25
+ )
26
+
27
+ from .. import to_thread
28
+ from ..abc import AsyncResource
29
+
30
+ if TYPE_CHECKING:
31
+ from types import ModuleType
32
+
33
+ from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
34
+ else:
35
+ ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
36
+
37
+
38
+ class AsyncFile(AsyncResource, Generic[AnyStr]):
39
+ """
40
+ An asynchronous file object.
41
+
42
+ This class wraps a standard file object and provides async friendly versions of the
43
+ following blocking methods (where available on the original file object):
44
+
45
+ * read
46
+ * read1
47
+ * readline
48
+ * readlines
49
+ * readinto
50
+ * readinto1
51
+ * write
52
+ * writelines
53
+ * truncate
54
+ * seek
55
+ * tell
56
+ * flush
57
+
58
+ All other methods are directly passed through.
59
+
60
+ This class supports the asynchronous context manager protocol which closes the
61
+ underlying file at the end of the context block.
62
+
63
+ This class also supports asynchronous iteration::
64
+
65
+ async with await open_file(...) as f:
66
+ async for line in f:
67
+ print(line)
68
+ """
69
+
70
+ def __init__(self, fp: IO[AnyStr]) -> None:
71
+ self._fp: Any = fp
72
+
73
+ def __getattr__(self, name: str) -> object:
74
+ return getattr(self._fp, name)
75
+
76
+ @property
77
+ def wrapped(self) -> IO[AnyStr]:
78
+ """The wrapped file object."""
79
+ return self._fp
80
+
81
+ async def __aiter__(self) -> AsyncIterator[AnyStr]:
82
+ while True:
83
+ line = await self.readline()
84
+ if line:
85
+ yield line
86
+ else:
87
+ break
88
+
89
+ async def aclose(self) -> None:
90
+ return await to_thread.run_sync(self._fp.close)
91
+
92
+ async def read(self, size: int = -1) -> AnyStr:
93
+ return await to_thread.run_sync(self._fp.read, size)
94
+
95
+ async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes:
96
+ return await to_thread.run_sync(self._fp.read1, size)
97
+
98
+ async def readline(self) -> AnyStr:
99
+ return await to_thread.run_sync(self._fp.readline)
100
+
101
+ async def readlines(self) -> list[AnyStr]:
102
+ return await to_thread.run_sync(self._fp.readlines)
103
+
104
+ async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
105
+ return await to_thread.run_sync(self._fp.readinto, b)
106
+
107
+ async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
108
+ return await to_thread.run_sync(self._fp.readinto1, b)
109
+
110
+ @overload
111
+ async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ...
112
+
113
+ @overload
114
+ async def write(self: AsyncFile[str], b: str) -> int: ...
115
+
116
+ async def write(self, b: ReadableBuffer | str) -> int:
117
+ return await to_thread.run_sync(self._fp.write, b)
118
+
119
+ @overload
120
+ async def writelines(
121
+ self: AsyncFile[bytes], lines: Iterable[ReadableBuffer]
122
+ ) -> None: ...
123
+
124
+ @overload
125
+ async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ...
126
+
127
+ async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None:
128
+ return await to_thread.run_sync(self._fp.writelines, lines)
129
+
130
+ async def truncate(self, size: int | None = None) -> int:
131
+ return await to_thread.run_sync(self._fp.truncate, size)
132
+
133
+ async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
134
+ return await to_thread.run_sync(self._fp.seek, offset, whence)
135
+
136
+ async def tell(self) -> int:
137
+ return await to_thread.run_sync(self._fp.tell)
138
+
139
+ async def flush(self) -> None:
140
+ return await to_thread.run_sync(self._fp.flush)
141
+
142
+
143
+ @overload
144
+ async def open_file(
145
+ file: str | PathLike[str] | int,
146
+ mode: OpenBinaryMode,
147
+ buffering: int = ...,
148
+ encoding: str | None = ...,
149
+ errors: str | None = ...,
150
+ newline: str | None = ...,
151
+ closefd: bool = ...,
152
+ opener: Callable[[str, int], int] | None = ...,
153
+ ) -> AsyncFile[bytes]: ...
154
+
155
+
156
+ @overload
157
+ async def open_file(
158
+ file: str | PathLike[str] | int,
159
+ mode: OpenTextMode = ...,
160
+ buffering: int = ...,
161
+ encoding: str | None = ...,
162
+ errors: str | None = ...,
163
+ newline: str | None = ...,
164
+ closefd: bool = ...,
165
+ opener: Callable[[str, int], int] | None = ...,
166
+ ) -> AsyncFile[str]: ...
167
+
168
+
169
+ async def open_file(
170
+ file: str | PathLike[str] | int,
171
+ mode: str = "r",
172
+ buffering: int = -1,
173
+ encoding: str | None = None,
174
+ errors: str | None = None,
175
+ newline: str | None = None,
176
+ closefd: bool = True,
177
+ opener: Callable[[str, int], int] | None = None,
178
+ ) -> AsyncFile[Any]:
179
+ """
180
+ Open a file asynchronously.
181
+
182
+ The arguments are exactly the same as for the builtin :func:`open`.
183
+
184
+ :return: an asynchronous file object
185
+
186
+ """
187
+ fp = await to_thread.run_sync(
188
+ open, file, mode, buffering, encoding, errors, newline, closefd, opener
189
+ )
190
+ return AsyncFile(fp)
191
+
192
+
193
+ def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
194
+ """
195
+ Wrap an existing file as an asynchronous file.
196
+
197
+ :param file: an existing file-like object
198
+ :return: an asynchronous file object
199
+
200
+ """
201
+ return AsyncFile(file)
202
+
203
+
204
+ @dataclass(eq=False)
205
+ class _PathIterator(AsyncIterator["Path"]):
206
+ iterator: Iterator[PathLike[str]]
207
+
208
+ async def __anext__(self) -> Path:
209
+ nextval = await to_thread.run_sync(
210
+ next, self.iterator, None, abandon_on_cancel=True
211
+ )
212
+ if nextval is None:
213
+ raise StopAsyncIteration from None
214
+
215
+ return Path(nextval)
216
+
217
+
218
+ class Path:
219
+ """
220
+ An asynchronous version of :class:`pathlib.Path`.
221
+
222
+ This class cannot be substituted for :class:`pathlib.Path` or
223
+ :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike`
224
+ interface.
225
+
226
+ It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for
227
+ the deprecated :meth:`~pathlib.Path.link_to` method.
228
+
229
+ Some methods may be unavailable or have limited functionality, based on the Python
230
+ version:
231
+
232
+ * :meth:`~pathlib.Path.copy` (available on Python 3.14 or later)
233
+ * :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later)
234
+ * :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later)
235
+ * :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later)
236
+ * :attr:`~pathlib.Path.info` (available on Python 3.14 or later)
237
+ * :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later)
238
+ * :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only
239
+ available on Python 3.13 or later)
240
+ * :meth:`~pathlib.Path.move` (available on Python 3.14 or later)
241
+ * :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later)
242
+ * :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available
243
+ on Python 3.12 or later)
244
+ * :meth:`~pathlib.Path.walk` (available on Python 3.12 or later)
245
+
246
+ Any methods that do disk I/O need to be awaited on. These methods are:
247
+
248
+ * :meth:`~pathlib.Path.absolute`
249
+ * :meth:`~pathlib.Path.chmod`
250
+ * :meth:`~pathlib.Path.cwd`
251
+ * :meth:`~pathlib.Path.exists`
252
+ * :meth:`~pathlib.Path.expanduser`
253
+ * :meth:`~pathlib.Path.group`
254
+ * :meth:`~pathlib.Path.hardlink_to`
255
+ * :meth:`~pathlib.Path.home`
256
+ * :meth:`~pathlib.Path.is_block_device`
257
+ * :meth:`~pathlib.Path.is_char_device`
258
+ * :meth:`~pathlib.Path.is_dir`
259
+ * :meth:`~pathlib.Path.is_fifo`
260
+ * :meth:`~pathlib.Path.is_file`
261
+ * :meth:`~pathlib.Path.is_junction`
262
+ * :meth:`~pathlib.Path.is_mount`
263
+ * :meth:`~pathlib.Path.is_socket`
264
+ * :meth:`~pathlib.Path.is_symlink`
265
+ * :meth:`~pathlib.Path.lchmod`
266
+ * :meth:`~pathlib.Path.lstat`
267
+ * :meth:`~pathlib.Path.mkdir`
268
+ * :meth:`~pathlib.Path.open`
269
+ * :meth:`~pathlib.Path.owner`
270
+ * :meth:`~pathlib.Path.read_bytes`
271
+ * :meth:`~pathlib.Path.read_text`
272
+ * :meth:`~pathlib.Path.readlink`
273
+ * :meth:`~pathlib.Path.rename`
274
+ * :meth:`~pathlib.Path.replace`
275
+ * :meth:`~pathlib.Path.resolve`
276
+ * :meth:`~pathlib.Path.rmdir`
277
+ * :meth:`~pathlib.Path.samefile`
278
+ * :meth:`~pathlib.Path.stat`
279
+ * :meth:`~pathlib.Path.symlink_to`
280
+ * :meth:`~pathlib.Path.touch`
281
+ * :meth:`~pathlib.Path.unlink`
282
+ * :meth:`~pathlib.Path.walk`
283
+ * :meth:`~pathlib.Path.write_bytes`
284
+ * :meth:`~pathlib.Path.write_text`
285
+
286
+ Additionally, the following methods return an async iterator yielding
287
+ :class:`~.Path` objects:
288
+
289
+ * :meth:`~pathlib.Path.glob`
290
+ * :meth:`~pathlib.Path.iterdir`
291
+ * :meth:`~pathlib.Path.rglob`
292
+ """
293
+
294
+ __slots__ = "_path", "__weakref__"
295
+
296
+ __weakref__: Any
297
+
298
+ def __init__(self, *args: str | PathLike[str]) -> None:
299
+ self._path: Final[pathlib.Path] = pathlib.Path(*args)
300
+
301
+ def __fspath__(self) -> str:
302
+ return self._path.__fspath__()
303
+
304
+ def __str__(self) -> str:
305
+ return self._path.__str__()
306
+
307
+ def __repr__(self) -> str:
308
+ return f"{self.__class__.__name__}({self.as_posix()!r})"
309
+
310
+ def __bytes__(self) -> bytes:
311
+ return self._path.__bytes__()
312
+
313
+ def __hash__(self) -> int:
314
+ return self._path.__hash__()
315
+
316
+ def __eq__(self, other: object) -> bool:
317
+ target = other._path if isinstance(other, Path) else other
318
+ return self._path.__eq__(target)
319
+
320
+ def __lt__(self, other: pathlib.PurePath | Path) -> bool:
321
+ target = other._path if isinstance(other, Path) else other
322
+ return self._path.__lt__(target)
323
+
324
+ def __le__(self, other: pathlib.PurePath | Path) -> bool:
325
+ target = other._path if isinstance(other, Path) else other
326
+ return self._path.__le__(target)
327
+
328
+ def __gt__(self, other: pathlib.PurePath | Path) -> bool:
329
+ target = other._path if isinstance(other, Path) else other
330
+ return self._path.__gt__(target)
331
+
332
+ def __ge__(self, other: pathlib.PurePath | Path) -> bool:
333
+ target = other._path if isinstance(other, Path) else other
334
+ return self._path.__ge__(target)
335
+
336
+ def __truediv__(self, other: str | PathLike[str]) -> Path:
337
+ return Path(self._path / other)
338
+
339
+ def __rtruediv__(self, other: str | PathLike[str]) -> Path:
340
+ return Path(other) / self
341
+
342
+ @property
343
+ def parts(self) -> tuple[str, ...]:
344
+ return self._path.parts
345
+
346
+ @property
347
+ def drive(self) -> str:
348
+ return self._path.drive
349
+
350
+ @property
351
+ def root(self) -> str:
352
+ return self._path.root
353
+
354
+ @property
355
+ def anchor(self) -> str:
356
+ return self._path.anchor
357
+
358
+ @property
359
+ def parents(self) -> Sequence[Path]:
360
+ return tuple(Path(p) for p in self._path.parents)
361
+
362
+ @property
363
+ def parent(self) -> Path:
364
+ return Path(self._path.parent)
365
+
366
+ @property
367
+ def name(self) -> str:
368
+ return self._path.name
369
+
370
+ @property
371
+ def suffix(self) -> str:
372
+ return self._path.suffix
373
+
374
+ @property
375
+ def suffixes(self) -> list[str]:
376
+ return self._path.suffixes
377
+
378
+ @property
379
+ def stem(self) -> str:
380
+ return self._path.stem
381
+
382
+ async def absolute(self) -> Path:
383
+ path = await to_thread.run_sync(self._path.absolute)
384
+ return Path(path)
385
+
386
+ def as_posix(self) -> str:
387
+ return self._path.as_posix()
388
+
389
+ def as_uri(self) -> str:
390
+ return self._path.as_uri()
391
+
392
+ if sys.version_info >= (3, 13):
393
+ parser: ClassVar[ModuleType] = pathlib.Path.parser
394
+
395
+ @classmethod
396
+ def from_uri(cls, uri: str) -> Path:
397
+ return Path(pathlib.Path.from_uri(uri))
398
+
399
+ def full_match(
400
+ self, path_pattern: str, *, case_sensitive: bool | None = None
401
+ ) -> bool:
402
+ return self._path.full_match(path_pattern, case_sensitive=case_sensitive)
403
+
404
+ def match(
405
+ self, path_pattern: str, *, case_sensitive: bool | None = None
406
+ ) -> bool:
407
+ return self._path.match(path_pattern, case_sensitive=case_sensitive)
408
+ else:
409
+
410
+ def match(self, path_pattern: str) -> bool:
411
+ return self._path.match(path_pattern)
412
+
413
+ if sys.version_info >= (3, 14):
414
+
415
+ @property
416
+ def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it
417
+ return self._path.info
418
+
419
+ async def copy(
420
+ self,
421
+ target: str | os.PathLike[str],
422
+ *,
423
+ follow_symlinks: bool = True,
424
+ preserve_metadata: bool = False,
425
+ ) -> Path:
426
+ func = partial(
427
+ self._path.copy,
428
+ follow_symlinks=follow_symlinks,
429
+ preserve_metadata=preserve_metadata,
430
+ )
431
+ return Path(await to_thread.run_sync(func, pathlib.Path(target)))
432
+
433
+ async def copy_into(
434
+ self,
435
+ target_dir: str | os.PathLike[str],
436
+ *,
437
+ follow_symlinks: bool = True,
438
+ preserve_metadata: bool = False,
439
+ ) -> Path:
440
+ func = partial(
441
+ self._path.copy_into,
442
+ follow_symlinks=follow_symlinks,
443
+ preserve_metadata=preserve_metadata,
444
+ )
445
+ return Path(await to_thread.run_sync(func, pathlib.Path(target_dir)))
446
+
447
+ async def move(self, target: str | os.PathLike[str]) -> Path:
448
+ # Upstream does not handle anyio.Path properly as a PathLike
449
+ target = pathlib.Path(target)
450
+ return Path(await to_thread.run_sync(self._path.move, target))
451
+
452
+ async def move_into(
453
+ self,
454
+ target_dir: str | os.PathLike[str],
455
+ ) -> Path:
456
+ return Path(await to_thread.run_sync(self._path.move_into, target_dir))
457
+
458
+ def is_relative_to(self, other: str | PathLike[str]) -> bool:
459
+ try:
460
+ self.relative_to(other)
461
+ return True
462
+ except ValueError:
463
+ return False
464
+
465
+ async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
466
+ func = partial(os.chmod, follow_symlinks=follow_symlinks)
467
+ return await to_thread.run_sync(func, self._path, mode)
468
+
469
+ @classmethod
470
+ async def cwd(cls) -> Path:
471
+ path = await to_thread.run_sync(pathlib.Path.cwd)
472
+ return cls(path)
473
+
474
+ async def exists(self) -> bool:
475
+ return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True)
476
+
477
+ async def expanduser(self) -> Path:
478
+ return Path(
479
+ await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True)
480
+ )
481
+
482
+ if sys.version_info < (3, 12):
483
+ # Python 3.11 and earlier
484
+ def glob(self, pattern: str) -> AsyncIterator[Path]:
485
+ gen = self._path.glob(pattern)
486
+ return _PathIterator(gen)
487
+ elif (3, 12) <= sys.version_info < (3, 13):
488
+ # changed in Python 3.12:
489
+ # - The case_sensitive parameter was added.
490
+ def glob(
491
+ self,
492
+ pattern: str,
493
+ *,
494
+ case_sensitive: bool | None = None,
495
+ ) -> AsyncIterator[Path]:
496
+ gen = self._path.glob(pattern, case_sensitive=case_sensitive)
497
+ return _PathIterator(gen)
498
+ elif sys.version_info >= (3, 13):
499
+ # Changed in Python 3.13:
500
+ # - The recurse_symlinks parameter was added.
501
+ # - The pattern parameter accepts a path-like object.
502
+ def glob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
503
+ self,
504
+ pattern: str | PathLike[str],
505
+ *,
506
+ case_sensitive: bool | None = None,
507
+ recurse_symlinks: bool = False,
508
+ ) -> AsyncIterator[Path]:
509
+ gen = self._path.glob(
510
+ pattern, # type: ignore[arg-type]
511
+ case_sensitive=case_sensitive,
512
+ recurse_symlinks=recurse_symlinks,
513
+ )
514
+ return _PathIterator(gen)
515
+
516
+ async def group(self) -> str:
517
+ return await to_thread.run_sync(self._path.group, abandon_on_cancel=True)
518
+
519
+ async def hardlink_to(
520
+ self, target: str | bytes | PathLike[str] | PathLike[bytes]
521
+ ) -> None:
522
+ if isinstance(target, Path):
523
+ target = target._path
524
+
525
+ await to_thread.run_sync(os.link, target, self)
526
+
527
+ @classmethod
528
+ async def home(cls) -> Path:
529
+ home_path = await to_thread.run_sync(pathlib.Path.home)
530
+ return cls(home_path)
531
+
532
+ def is_absolute(self) -> bool:
533
+ return self._path.is_absolute()
534
+
535
+ async def is_block_device(self) -> bool:
536
+ return await to_thread.run_sync(
537
+ self._path.is_block_device, abandon_on_cancel=True
538
+ )
539
+
540
+ async def is_char_device(self) -> bool:
541
+ return await to_thread.run_sync(
542
+ self._path.is_char_device, abandon_on_cancel=True
543
+ )
544
+
545
+ async def is_dir(self) -> bool:
546
+ return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True)
547
+
548
+ async def is_fifo(self) -> bool:
549
+ return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True)
550
+
551
+ async def is_file(self) -> bool:
552
+ return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True)
553
+
554
+ if sys.version_info >= (3, 12):
555
+
556
+ async def is_junction(self) -> bool:
557
+ return await to_thread.run_sync(self._path.is_junction)
558
+
559
+ async def is_mount(self) -> bool:
560
+ return await to_thread.run_sync(
561
+ os.path.ismount, self._path, abandon_on_cancel=True
562
+ )
563
+
564
+ def is_reserved(self) -> bool:
565
+ return self._path.is_reserved()
566
+
567
+ async def is_socket(self) -> bool:
568
+ return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True)
569
+
570
+ async def is_symlink(self) -> bool:
571
+ return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True)
572
+
573
+ async def iterdir(self) -> AsyncIterator[Path]:
574
+ gen = (
575
+ self._path.iterdir()
576
+ if sys.version_info < (3, 13)
577
+ else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True)
578
+ )
579
+ async for path in _PathIterator(gen):
580
+ yield path
581
+
582
+ def joinpath(self, *args: str | PathLike[str]) -> Path:
583
+ return Path(self._path.joinpath(*args))
584
+
585
+ async def lchmod(self, mode: int) -> None:
586
+ await to_thread.run_sync(self._path.lchmod, mode)
587
+
588
+ async def lstat(self) -> os.stat_result:
589
+ return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True)
590
+
591
+ async def mkdir(
592
+ self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
593
+ ) -> None:
594
+ await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
595
+
596
+ @overload
597
+ async def open(
598
+ self,
599
+ mode: OpenBinaryMode,
600
+ buffering: int = ...,
601
+ encoding: str | None = ...,
602
+ errors: str | None = ...,
603
+ newline: str | None = ...,
604
+ ) -> AsyncFile[bytes]: ...
605
+
606
+ @overload
607
+ async def open(
608
+ self,
609
+ mode: OpenTextMode = ...,
610
+ buffering: int = ...,
611
+ encoding: str | None = ...,
612
+ errors: str | None = ...,
613
+ newline: str | None = ...,
614
+ ) -> AsyncFile[str]: ...
615
+
616
+ async def open(
617
+ self,
618
+ mode: str = "r",
619
+ buffering: int = -1,
620
+ encoding: str | None = None,
621
+ errors: str | None = None,
622
+ newline: str | None = None,
623
+ ) -> AsyncFile[Any]:
624
+ fp = await to_thread.run_sync(
625
+ self._path.open, mode, buffering, encoding, errors, newline
626
+ )
627
+ return AsyncFile(fp)
628
+
629
+ async def owner(self) -> str:
630
+ return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True)
631
+
632
+ async def read_bytes(self) -> bytes:
633
+ return await to_thread.run_sync(self._path.read_bytes)
634
+
635
+ async def read_text(
636
+ self, encoding: str | None = None, errors: str | None = None
637
+ ) -> str:
638
+ return await to_thread.run_sync(self._path.read_text, encoding, errors)
639
+
640
+ if sys.version_info >= (3, 12):
641
+
642
+ def relative_to(
643
+ self, *other: str | PathLike[str], walk_up: bool = False
644
+ ) -> Path:
645
+ # relative_to() should work with any PathLike but it doesn't
646
+ others = [pathlib.Path(other) for other in other]
647
+ return Path(self._path.relative_to(*others, walk_up=walk_up))
648
+
649
+ else:
650
+
651
+ def relative_to(self, *other: str | PathLike[str]) -> Path:
652
+ return Path(self._path.relative_to(*other))
653
+
654
+ async def readlink(self) -> Path:
655
+ target = await to_thread.run_sync(os.readlink, self._path)
656
+ return Path(target)
657
+
658
+ async def rename(self, target: str | pathlib.PurePath | Path) -> Path:
659
+ if isinstance(target, Path):
660
+ target = target._path
661
+
662
+ await to_thread.run_sync(self._path.rename, target)
663
+ return Path(target)
664
+
665
+ async def replace(self, target: str | pathlib.PurePath | Path) -> Path:
666
+ if isinstance(target, Path):
667
+ target = target._path
668
+
669
+ await to_thread.run_sync(self._path.replace, target)
670
+ return Path(target)
671
+
672
+ async def resolve(self, strict: bool = False) -> Path:
673
+ func = partial(self._path.resolve, strict=strict)
674
+ return Path(await to_thread.run_sync(func, abandon_on_cancel=True))
675
+
676
+ if sys.version_info < (3, 12):
677
+ # Pre Python 3.12
678
+ def rglob(self, pattern: str) -> AsyncIterator[Path]:
679
+ gen = self._path.rglob(pattern)
680
+ return _PathIterator(gen)
681
+ elif (3, 12) <= sys.version_info < (3, 13):
682
+ # Changed in Python 3.12:
683
+ # - The case_sensitive parameter was added.
684
+ def rglob(
685
+ self, pattern: str, *, case_sensitive: bool | None = None
686
+ ) -> AsyncIterator[Path]:
687
+ gen = self._path.rglob(pattern, case_sensitive=case_sensitive)
688
+ return _PathIterator(gen)
689
+ elif sys.version_info >= (3, 13):
690
+ # Changed in Python 3.13:
691
+ # - The recurse_symlinks parameter was added.
692
+ # - The pattern parameter accepts a path-like object.
693
+ def rglob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
694
+ self,
695
+ pattern: str | PathLike[str],
696
+ *,
697
+ case_sensitive: bool | None = None,
698
+ recurse_symlinks: bool = False,
699
+ ) -> AsyncIterator[Path]:
700
+ gen = self._path.rglob(
701
+ pattern, # type: ignore[arg-type]
702
+ case_sensitive=case_sensitive,
703
+ recurse_symlinks=recurse_symlinks,
704
+ )
705
+ return _PathIterator(gen)
706
+
707
+ async def rmdir(self) -> None:
708
+ await to_thread.run_sync(self._path.rmdir)
709
+
710
+ async def samefile(self, other_path: str | PathLike[str]) -> bool:
711
+ if isinstance(other_path, Path):
712
+ other_path = other_path._path
713
+
714
+ return await to_thread.run_sync(
715
+ self._path.samefile, other_path, abandon_on_cancel=True
716
+ )
717
+
718
+ async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
719
+ func = partial(os.stat, follow_symlinks=follow_symlinks)
720
+ return await to_thread.run_sync(func, self._path, abandon_on_cancel=True)
721
+
722
+ async def symlink_to(
723
+ self,
724
+ target: str | bytes | PathLike[str] | PathLike[bytes],
725
+ target_is_directory: bool = False,
726
+ ) -> None:
727
+ if isinstance(target, Path):
728
+ target = target._path
729
+
730
+ await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
731
+
732
+ async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
733
+ await to_thread.run_sync(self._path.touch, mode, exist_ok)
734
+
735
+ async def unlink(self, missing_ok: bool = False) -> None:
736
+ try:
737
+ await to_thread.run_sync(self._path.unlink)
738
+ except FileNotFoundError:
739
+ if not missing_ok:
740
+ raise
741
+
742
+ if sys.version_info >= (3, 12):
743
+
744
+ async def walk(
745
+ self,
746
+ top_down: bool = True,
747
+ on_error: Callable[[OSError], object] | None = None,
748
+ follow_symlinks: bool = False,
749
+ ) -> AsyncIterator[tuple[Path, list[str], list[str]]]:
750
+ def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None:
751
+ try:
752
+ return next(gen)
753
+ except StopIteration:
754
+ return None
755
+
756
+ gen = self._path.walk(top_down, on_error, follow_symlinks)
757
+ while True:
758
+ value = await to_thread.run_sync(get_next_value)
759
+ if value is None:
760
+ return
761
+
762
+ root, dirs, paths = value
763
+ yield Path(root), dirs, paths
764
+
765
+ def with_name(self, name: str) -> Path:
766
+ return Path(self._path.with_name(name))
767
+
768
+ def with_stem(self, stem: str) -> Path:
769
+ return Path(self._path.with_name(stem + self._path.suffix))
770
+
771
+ def with_suffix(self, suffix: str) -> Path:
772
+ return Path(self._path.with_suffix(suffix))
773
+
774
+ def with_segments(self, *pathsegments: str | PathLike[str]) -> Path:
775
+ return Path(*pathsegments)
776
+
777
+ async def write_bytes(self, data: bytes) -> int:
778
+ return await to_thread.run_sync(self._path.write_bytes, data)
779
+
780
+ async def write_text(
781
+ self,
782
+ data: str,
783
+ encoding: str | None = None,
784
+ errors: str | None = None,
785
+ newline: str | None = None,
786
+ ) -> int:
787
+ # Path.write_text() does not support the "newline" parameter before Python 3.10
788
+ def sync_write_text() -> int:
789
+ with self._path.open(
790
+ "w", encoding=encoding, errors=errors, newline=newline
791
+ ) as fp:
792
+ return fp.write(data)
793
+
794
+ return await to_thread.run_sync(sync_write_text)
795
+
796
+
797
+ PathLike.register(Path)
.venv/Lib/site-packages/anyio/_core/_resources.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from ..abc import AsyncResource
4
+ from ._tasks import CancelScope
5
+
6
+
7
+ async def aclose_forcefully(resource: AsyncResource) -> None:
8
+ """
9
+ Close an asynchronous resource in a cancelled scope.
10
+
11
+ Doing this closes the resource without waiting on anything.
12
+
13
+ :param resource: the resource to close
14
+
15
+ """
16
+ with CancelScope() as scope:
17
+ scope.cancel()
18
+ await resource.aclose()
.venv/Lib/site-packages/anyio/_core/_signals.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import AsyncIterator
4
+ from contextlib import AbstractContextManager
5
+ from signal import Signals
6
+
7
+ from ._eventloop import get_async_backend
8
+
9
+
10
+ def open_signal_receiver(
11
+ *signals: Signals,
12
+ ) -> AbstractContextManager[AsyncIterator[Signals]]:
13
+ """
14
+ Start receiving operating system signals.
15
+
16
+ :param signals: signals to receive (e.g. ``signal.SIGINT``)
17
+ :return: an asynchronous context manager for an asynchronous iterator which yields
18
+ signal numbers
19
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
20
+ current thread
21
+
22
+ .. warning:: Windows does not support signals natively so it is best to avoid
23
+ relying on this in cross-platform applications.
24
+
25
+ .. warning:: On asyncio, this permanently replaces any previous signal handler for
26
+ the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
27
+
28
+ """
29
+ return get_async_backend().open_signal_receiver(*signals)
.venv/Lib/site-packages/anyio/_core/_sockets.py ADDED
@@ -0,0 +1,1003 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import errno
4
+ import os
5
+ import socket
6
+ import ssl
7
+ import stat
8
+ import sys
9
+ from collections.abc import Awaitable
10
+ from dataclasses import dataclass
11
+ from ipaddress import IPv4Address, IPv6Address, ip_address
12
+ from os import PathLike, chmod
13
+ from socket import AddressFamily, SocketKind
14
+ from typing import TYPE_CHECKING, Any, Literal, cast, overload
15
+
16
+ from .. import ConnectionFailed, to_thread
17
+ from ..abc import (
18
+ ByteStreamConnectable,
19
+ ConnectedUDPSocket,
20
+ ConnectedUNIXDatagramSocket,
21
+ IPAddressType,
22
+ IPSockAddrType,
23
+ SocketListener,
24
+ SocketStream,
25
+ UDPSocket,
26
+ UNIXDatagramSocket,
27
+ UNIXSocketStream,
28
+ )
29
+ from ..streams.stapled import MultiListener
30
+ from ..streams.tls import TLSConnectable, TLSStream
31
+ from ._eventloop import get_async_backend
32
+ from ._resources import aclose_forcefully
33
+ from ._synchronization import Event
34
+ from ._tasks import create_task_group, move_on_after
35
+
36
+ if TYPE_CHECKING:
37
+ from _typeshed import FileDescriptorLike
38
+ else:
39
+ FileDescriptorLike = object
40
+
41
+ if sys.version_info < (3, 11):
42
+ from exceptiongroup import ExceptionGroup
43
+
44
+ if sys.version_info >= (3, 12):
45
+ from typing import override
46
+ else:
47
+ from typing_extensions import override
48
+
49
+ if sys.version_info < (3, 13):
50
+ from typing_extensions import deprecated
51
+ else:
52
+ from warnings import deprecated
53
+
54
+ IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515
55
+
56
+ AnyIPAddressFamily = Literal[
57
+ AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6
58
+ ]
59
+ IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
60
+
61
+
62
+ # tls_hostname given
63
+ @overload
64
+ async def connect_tcp(
65
+ remote_host: IPAddressType,
66
+ remote_port: int,
67
+ *,
68
+ local_host: IPAddressType | None = ...,
69
+ ssl_context: ssl.SSLContext | None = ...,
70
+ tls_standard_compatible: bool = ...,
71
+ tls_hostname: str,
72
+ happy_eyeballs_delay: float = ...,
73
+ ) -> TLSStream: ...
74
+
75
+
76
+ # ssl_context given
77
+ @overload
78
+ async def connect_tcp(
79
+ remote_host: IPAddressType,
80
+ remote_port: int,
81
+ *,
82
+ local_host: IPAddressType | None = ...,
83
+ ssl_context: ssl.SSLContext,
84
+ tls_standard_compatible: bool = ...,
85
+ tls_hostname: str | None = ...,
86
+ happy_eyeballs_delay: float = ...,
87
+ ) -> TLSStream: ...
88
+
89
+
90
+ # tls=True
91
+ @overload
92
+ async def connect_tcp(
93
+ remote_host: IPAddressType,
94
+ remote_port: int,
95
+ *,
96
+ local_host: IPAddressType | None = ...,
97
+ tls: Literal[True],
98
+ ssl_context: ssl.SSLContext | None = ...,
99
+ tls_standard_compatible: bool = ...,
100
+ tls_hostname: str | None = ...,
101
+ happy_eyeballs_delay: float = ...,
102
+ ) -> TLSStream: ...
103
+
104
+
105
+ # tls=False
106
+ @overload
107
+ async def connect_tcp(
108
+ remote_host: IPAddressType,
109
+ remote_port: int,
110
+ *,
111
+ local_host: IPAddressType | None = ...,
112
+ tls: Literal[False],
113
+ ssl_context: ssl.SSLContext | None = ...,
114
+ tls_standard_compatible: bool = ...,
115
+ tls_hostname: str | None = ...,
116
+ happy_eyeballs_delay: float = ...,
117
+ ) -> SocketStream: ...
118
+
119
+
120
+ # No TLS arguments
121
+ @overload
122
+ async def connect_tcp(
123
+ remote_host: IPAddressType,
124
+ remote_port: int,
125
+ *,
126
+ local_host: IPAddressType | None = ...,
127
+ happy_eyeballs_delay: float = ...,
128
+ ) -> SocketStream: ...
129
+
130
+
131
+ async def connect_tcp(
132
+ remote_host: IPAddressType,
133
+ remote_port: int,
134
+ *,
135
+ local_host: IPAddressType | None = None,
136
+ tls: bool = False,
137
+ ssl_context: ssl.SSLContext | None = None,
138
+ tls_standard_compatible: bool = True,
139
+ tls_hostname: str | None = None,
140
+ happy_eyeballs_delay: float = 0.25,
141
+ ) -> SocketStream | TLSStream:
142
+ """
143
+ Connect to a host using the TCP protocol.
144
+
145
+ This function implements the stateless version of the Happy Eyeballs algorithm (RFC
146
+ 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses,
147
+ each one is tried until one connection attempt succeeds. If the first attempt does
148
+ not connected within 250 milliseconds, a second attempt is started using the next
149
+ address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if
150
+ available) is tried first.
151
+
152
+ When the connection has been established, a TLS handshake will be done if either
153
+ ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``.
154
+
155
+ :param remote_host: the IP address or host name to connect to
156
+ :param remote_port: port on the target host to connect to
157
+ :param local_host: the interface address or name to bind the socket to before
158
+ connecting
159
+ :param tls: ``True`` to do a TLS handshake with the connected stream and return a
160
+ :class:`~anyio.streams.tls.TLSStream` instead
161
+ :param ssl_context: the SSL context object to use (if omitted, a default context is
162
+ created)
163
+ :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake
164
+ before closing the stream and requires that the server does this as well.
165
+ Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream.
166
+ Some protocols, such as HTTP, require this option to be ``False``.
167
+ See :meth:`~ssl.SSLContext.wrap_socket` for details.
168
+ :param tls_hostname: host name to check the server certificate against (defaults to
169
+ the value of ``remote_host``)
170
+ :param happy_eyeballs_delay: delay (in seconds) before starting the next connection
171
+ attempt
172
+ :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream
173
+ :raises ConnectionFailed: if the connection fails
174
+
175
+ """
176
+ # Placed here due to https://github.com/python/mypy/issues/7057
177
+ connected_stream: SocketStream | None = None
178
+
179
+ async def try_connect(remote_host: str, event: Event) -> None:
180
+ nonlocal connected_stream
181
+ try:
182
+ stream = await asynclib.connect_tcp(remote_host, remote_port, local_address)
183
+ except OSError as exc:
184
+ oserrors.append(exc)
185
+ return
186
+ else:
187
+ if connected_stream is None:
188
+ connected_stream = stream
189
+ tg.cancel_scope.cancel()
190
+ else:
191
+ await stream.aclose()
192
+ finally:
193
+ event.set()
194
+
195
+ asynclib = get_async_backend()
196
+ local_address: IPSockAddrType | None = None
197
+ family = socket.AF_UNSPEC
198
+ if local_host:
199
+ gai_res = await getaddrinfo(str(local_host), None)
200
+ family, *_, local_address = gai_res[0]
201
+
202
+ target_host = str(remote_host)
203
+ try:
204
+ addr_obj = ip_address(remote_host)
205
+ except ValueError:
206
+ addr_obj = None
207
+
208
+ if addr_obj is not None:
209
+ if isinstance(addr_obj, IPv6Address):
210
+ target_addrs = [(socket.AF_INET6, addr_obj.compressed)]
211
+ else:
212
+ target_addrs = [(socket.AF_INET, addr_obj.compressed)]
213
+ else:
214
+ # getaddrinfo() will raise an exception if name resolution fails
215
+ gai_res = await getaddrinfo(
216
+ target_host, remote_port, family=family, type=socket.SOCK_STREAM
217
+ )
218
+
219
+ # Organize the list so that the first address is an IPv6 address (if available)
220
+ # and the second one is an IPv4 addresses. The rest can be in whatever order.
221
+ v6_found = v4_found = False
222
+ target_addrs = []
223
+ for af, *_, sa in gai_res:
224
+ if af == socket.AF_INET6 and not v6_found:
225
+ v6_found = True
226
+ target_addrs.insert(0, (af, sa[0]))
227
+ elif af == socket.AF_INET and not v4_found and v6_found:
228
+ v4_found = True
229
+ target_addrs.insert(1, (af, sa[0]))
230
+ else:
231
+ target_addrs.append((af, sa[0]))
232
+
233
+ oserrors: list[OSError] = []
234
+ try:
235
+ async with create_task_group() as tg:
236
+ for _af, addr in target_addrs:
237
+ event = Event()
238
+ tg.start_soon(try_connect, addr, event)
239
+ with move_on_after(happy_eyeballs_delay):
240
+ await event.wait()
241
+
242
+ if connected_stream is None:
243
+ cause = (
244
+ oserrors[0]
245
+ if len(oserrors) == 1
246
+ else ExceptionGroup("multiple connection attempts failed", oserrors)
247
+ )
248
+ raise OSError("All connection attempts failed") from cause
249
+ finally:
250
+ oserrors.clear()
251
+
252
+ if tls or tls_hostname or ssl_context:
253
+ try:
254
+ return await TLSStream.wrap(
255
+ connected_stream,
256
+ server_side=False,
257
+ hostname=tls_hostname or str(remote_host),
258
+ ssl_context=ssl_context,
259
+ standard_compatible=tls_standard_compatible,
260
+ )
261
+ except BaseException:
262
+ await aclose_forcefully(connected_stream)
263
+ raise
264
+
265
+ return connected_stream
266
+
267
+
268
+ async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream:
269
+ """
270
+ Connect to the given UNIX socket.
271
+
272
+ Not available on Windows.
273
+
274
+ :param path: path to the socket
275
+ :return: a socket stream object
276
+ :raises ConnectionFailed: if the connection fails
277
+
278
+ """
279
+ path = os.fspath(path)
280
+ return await get_async_backend().connect_unix(path)
281
+
282
+
283
+ async def create_tcp_listener(
284
+ *,
285
+ local_host: IPAddressType | None = None,
286
+ local_port: int = 0,
287
+ family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC,
288
+ backlog: int = 65536,
289
+ reuse_port: bool = False,
290
+ ) -> MultiListener[SocketStream]:
291
+ """
292
+ Create a TCP socket listener.
293
+
294
+ :param local_port: port number to listen on
295
+ :param local_host: IP address of the interface to listen on. If omitted, listen on
296
+ all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address
297
+ family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6.
298
+ :param family: address family (used if ``local_host`` was omitted)
299
+ :param backlog: maximum number of queued incoming connections (up to a maximum of
300
+ 2**16, or 65536)
301
+ :param reuse_port: ``True`` to allow multiple sockets to bind to the same
302
+ address/port (not supported on Windows)
303
+ :return: a multi-listener object containing one or more socket listeners
304
+ :raises OSError: if there's an error creating a socket, or binding to one or more
305
+ interfaces failed
306
+
307
+ """
308
+ asynclib = get_async_backend()
309
+ backlog = min(backlog, 65536)
310
+ local_host = str(local_host) if local_host is not None else None
311
+
312
+ def setup_raw_socket(
313
+ fam: AddressFamily,
314
+ bind_addr: tuple[str, int] | tuple[str, int, int, int],
315
+ *,
316
+ v6only: bool = True,
317
+ ) -> socket.socket:
318
+ sock = socket.socket(fam)
319
+ try:
320
+ sock.setblocking(False)
321
+
322
+ if fam == AddressFamily.AF_INET6:
323
+ sock.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, v6only)
324
+
325
+ # For Windows, enable exclusive address use. For others, enable address
326
+ # reuse.
327
+ if sys.platform == "win32":
328
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
329
+ else:
330
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
331
+
332
+ if reuse_port:
333
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
334
+
335
+ # Workaround for #554
336
+ if fam == socket.AF_INET6 and "%" in bind_addr[0]:
337
+ addr, scope_id = bind_addr[0].split("%", 1)
338
+ bind_addr = (addr, bind_addr[1], 0, int(scope_id))
339
+
340
+ sock.bind(bind_addr)
341
+ sock.listen(backlog)
342
+ except BaseException:
343
+ sock.close()
344
+ raise
345
+
346
+ return sock
347
+
348
+ # We passing type=0 on non-Windows platforms as a workaround for a uvloop bug
349
+ # where we don't get the correct scope ID for IPv6 link-local addresses when passing
350
+ # type=socket.SOCK_STREAM to getaddrinfo():
351
+ # https://github.com/MagicStack/uvloop/issues/539
352
+ gai_res = await getaddrinfo(
353
+ local_host,
354
+ local_port,
355
+ family=family,
356
+ type=socket.SOCK_STREAM if sys.platform == "win32" else 0,
357
+ flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
358
+ )
359
+
360
+ # The set comprehension is here to work around a glibc bug:
361
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=14969
362
+ sockaddrs = sorted({res for res in gai_res if res[1] == SocketKind.SOCK_STREAM})
363
+
364
+ # Special case for dual-stack binding on the "any" interface
365
+ if (
366
+ local_host is None
367
+ and family == AddressFamily.AF_UNSPEC
368
+ and socket.has_dualstack_ipv6()
369
+ and any(fam == AddressFamily.AF_INET6 for fam, *_ in gai_res)
370
+ ):
371
+ raw_socket = setup_raw_socket(
372
+ AddressFamily.AF_INET6, ("::", local_port), v6only=False
373
+ )
374
+ listener = asynclib.create_tcp_listener(raw_socket)
375
+ return MultiListener([listener])
376
+
377
+ errors: list[OSError] = []
378
+ try:
379
+ for _ in range(len(sockaddrs)):
380
+ listeners: list[SocketListener] = []
381
+ bound_ephemeral_port = local_port
382
+ try:
383
+ for fam, *_, sockaddr in sockaddrs:
384
+ sockaddr = sockaddr[0], bound_ephemeral_port, *sockaddr[2:]
385
+ raw_socket = setup_raw_socket(fam, sockaddr)
386
+
387
+ # Store the assigned port if an ephemeral port was requested, so
388
+ # we'll bind to the same port on all interfaces
389
+ if local_port == 0 and len(gai_res) > 1:
390
+ bound_ephemeral_port = raw_socket.getsockname()[1]
391
+
392
+ listeners.append(asynclib.create_tcp_listener(raw_socket))
393
+ except BaseException as exc:
394
+ for listener in listeners:
395
+ await listener.aclose()
396
+
397
+ # If an ephemeral port was requested but binding the assigned port
398
+ # failed for another interface, rotate the address list and try again
399
+ if (
400
+ isinstance(exc, OSError)
401
+ and exc.errno == errno.EADDRINUSE
402
+ and local_port == 0
403
+ and bound_ephemeral_port
404
+ ):
405
+ errors.append(exc)
406
+ sockaddrs.append(sockaddrs.pop(0))
407
+ continue
408
+
409
+ raise
410
+
411
+ return MultiListener(listeners)
412
+
413
+ raise OSError(
414
+ f"Could not create {len(sockaddrs)} listeners with a consistent port"
415
+ ) from ExceptionGroup("Several bind attempts failed", errors)
416
+ finally:
417
+ del errors # Prevent reference cycles
418
+
419
+
420
+ async def create_unix_listener(
421
+ path: str | bytes | PathLike[Any],
422
+ *,
423
+ mode: int | None = None,
424
+ backlog: int = 65536,
425
+ ) -> SocketListener:
426
+ """
427
+ Create a UNIX socket listener.
428
+
429
+ Not available on Windows.
430
+
431
+ :param path: path of the socket
432
+ :param mode: permissions to set on the socket
433
+ :param backlog: maximum number of queued incoming connections (up to a maximum of
434
+ 2**16, or 65536)
435
+ :return: a listener object
436
+
437
+ .. versionchanged:: 3.0
438
+ If a socket already exists on the file system in the given path, it will be
439
+ removed first.
440
+
441
+ """
442
+ backlog = min(backlog, 65536)
443
+ raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM)
444
+ try:
445
+ raw_socket.listen(backlog)
446
+ return get_async_backend().create_unix_listener(raw_socket)
447
+ except BaseException:
448
+ raw_socket.close()
449
+ raise
450
+
451
+
452
+ async def create_udp_socket(
453
+ family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
454
+ *,
455
+ local_host: IPAddressType | None = None,
456
+ local_port: int = 0,
457
+ reuse_port: bool = False,
458
+ ) -> UDPSocket:
459
+ """
460
+ Create a UDP socket.
461
+
462
+ If ``port`` has been given, the socket will be bound to this port on the local
463
+ machine, making this socket suitable for providing UDP based services.
464
+
465
+ :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically
466
+ determined from ``local_host`` if omitted
467
+ :param local_host: IP address or host name of the local interface to bind to
468
+ :param local_port: local port to bind to
469
+ :param reuse_port: ``True`` to allow multiple sockets to bind to the same
470
+ address/port (not supported on Windows)
471
+ :return: a UDP socket
472
+
473
+ """
474
+ if family is AddressFamily.AF_UNSPEC and not local_host:
475
+ raise ValueError('Either "family" or "local_host" must be given')
476
+
477
+ if local_host:
478
+ gai_res = await getaddrinfo(
479
+ str(local_host),
480
+ local_port,
481
+ family=family,
482
+ type=socket.SOCK_DGRAM,
483
+ flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
484
+ )
485
+ family = cast(AnyIPAddressFamily, gai_res[0][0])
486
+ local_address = gai_res[0][-1]
487
+ elif family is AddressFamily.AF_INET6:
488
+ local_address = ("::", 0)
489
+ else:
490
+ local_address = ("0.0.0.0", 0)
491
+
492
+ sock = await get_async_backend().create_udp_socket(
493
+ family, local_address, None, reuse_port
494
+ )
495
+ return cast(UDPSocket, sock)
496
+
497
+
498
+ async def create_connected_udp_socket(
499
+ remote_host: IPAddressType,
500
+ remote_port: int,
501
+ *,
502
+ family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
503
+ local_host: IPAddressType | None = None,
504
+ local_port: int = 0,
505
+ reuse_port: bool = False,
506
+ ) -> ConnectedUDPSocket:
507
+ """
508
+ Create a connected UDP socket.
509
+
510
+ Connected UDP sockets can only communicate with the specified remote host/port, an
511
+ any packets sent from other sources are dropped.
512
+
513
+ :param remote_host: remote host to set as the default target
514
+ :param remote_port: port on the remote host to set as the default target
515
+ :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically
516
+ determined from ``local_host`` or ``remote_host`` if omitted
517
+ :param local_host: IP address or host name of the local interface to bind to
518
+ :param local_port: local port to bind to
519
+ :param reuse_port: ``True`` to allow multiple sockets to bind to the same
520
+ address/port (not supported on Windows)
521
+ :return: a connected UDP socket
522
+
523
+ """
524
+ local_address = None
525
+ if local_host:
526
+ gai_res = await getaddrinfo(
527
+ str(local_host),
528
+ local_port,
529
+ family=family,
530
+ type=socket.SOCK_DGRAM,
531
+ flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
532
+ )
533
+ family = cast(AnyIPAddressFamily, gai_res[0][0])
534
+ local_address = gai_res[0][-1]
535
+
536
+ gai_res = await getaddrinfo(
537
+ str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM
538
+ )
539
+ family = cast(AnyIPAddressFamily, gai_res[0][0])
540
+ remote_address = gai_res[0][-1]
541
+
542
+ sock = await get_async_backend().create_udp_socket(
543
+ family, local_address, remote_address, reuse_port
544
+ )
545
+ return cast(ConnectedUDPSocket, sock)
546
+
547
+
548
+ async def create_unix_datagram_socket(
549
+ *,
550
+ local_path: None | str | bytes | PathLike[Any] = None,
551
+ local_mode: int | None = None,
552
+ ) -> UNIXDatagramSocket:
553
+ """
554
+ Create a UNIX datagram socket.
555
+
556
+ Not available on Windows.
557
+
558
+ If ``local_path`` has been given, the socket will be bound to this path, making this
559
+ socket suitable for receiving datagrams from other processes. Other processes can
560
+ send datagrams to this socket only if ``local_path`` is set.
561
+
562
+ If a socket already exists on the file system in the ``local_path``, it will be
563
+ removed first.
564
+
565
+ :param local_path: the path on which to bind to
566
+ :param local_mode: permissions to set on the local socket
567
+ :return: a UNIX datagram socket
568
+
569
+ """
570
+ raw_socket = await setup_unix_local_socket(
571
+ local_path, local_mode, socket.SOCK_DGRAM
572
+ )
573
+ return await get_async_backend().create_unix_datagram_socket(raw_socket, None)
574
+
575
+
576
+ async def create_connected_unix_datagram_socket(
577
+ remote_path: str | bytes | PathLike[Any],
578
+ *,
579
+ local_path: None | str | bytes | PathLike[Any] = None,
580
+ local_mode: int | None = None,
581
+ ) -> ConnectedUNIXDatagramSocket:
582
+ """
583
+ Create a connected UNIX datagram socket.
584
+
585
+ Connected datagram sockets can only communicate with the specified remote path.
586
+
587
+ If ``local_path`` has been given, the socket will be bound to this path, making
588
+ this socket suitable for receiving datagrams from other processes. Other processes
589
+ can send datagrams to this socket only if ``local_path`` is set.
590
+
591
+ If a socket already exists on the file system in the ``local_path``, it will be
592
+ removed first.
593
+
594
+ :param remote_path: the path to set as the default target
595
+ :param local_path: the path on which to bind to
596
+ :param local_mode: permissions to set on the local socket
597
+ :return: a connected UNIX datagram socket
598
+
599
+ """
600
+ remote_path = os.fspath(remote_path)
601
+ raw_socket = await setup_unix_local_socket(
602
+ local_path, local_mode, socket.SOCK_DGRAM
603
+ )
604
+ return await get_async_backend().create_unix_datagram_socket(
605
+ raw_socket, remote_path
606
+ )
607
+
608
+
609
+ async def getaddrinfo(
610
+ host: bytes | str | None,
611
+ port: str | int | None,
612
+ *,
613
+ family: int | AddressFamily = 0,
614
+ type: int | SocketKind = 0,
615
+ proto: int = 0,
616
+ flags: int = 0,
617
+ ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]:
618
+ """
619
+ Look up a numeric IP address given a host name.
620
+
621
+ Internationalized domain names are translated according to the (non-transitional)
622
+ IDNA 2008 standard.
623
+
624
+ .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
625
+ (host, port), unlike what :func:`socket.getaddrinfo` does.
626
+
627
+ :param host: host name
628
+ :param port: port number
629
+ :param family: socket family (`'AF_INET``, ...)
630
+ :param type: socket type (``SOCK_STREAM``, ...)
631
+ :param proto: protocol number
632
+ :param flags: flags to pass to upstream ``getaddrinfo()``
633
+ :return: list of tuples containing (family, type, proto, canonname, sockaddr)
634
+
635
+ .. seealso:: :func:`socket.getaddrinfo`
636
+
637
+ """
638
+ # Handle unicode hostnames
639
+ if isinstance(host, str):
640
+ try:
641
+ encoded_host: bytes | None = host.encode("ascii")
642
+ except UnicodeEncodeError:
643
+ import idna
644
+
645
+ encoded_host = idna.encode(host, uts46=True)
646
+ else:
647
+ encoded_host = host
648
+
649
+ gai_res = await get_async_backend().getaddrinfo(
650
+ encoded_host, port, family=family, type=type, proto=proto, flags=flags
651
+ )
652
+ return [
653
+ (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
654
+ for family, type, proto, canonname, sockaddr in gai_res
655
+ # filter out IPv6 results when IPv6 is disabled
656
+ if not isinstance(sockaddr[0], int)
657
+ ]
658
+
659
+
660
+ def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]:
661
+ """
662
+ Look up the host name of an IP address.
663
+
664
+ :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
665
+ :param flags: flags to pass to upstream ``getnameinfo()``
666
+ :return: a tuple of (host name, service name)
667
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
668
+ current thread
669
+
670
+ .. seealso:: :func:`socket.getnameinfo`
671
+
672
+ """
673
+ return get_async_backend().getnameinfo(sockaddr, flags)
674
+
675
+
676
+ @deprecated("This function is deprecated; use `wait_readable` instead")
677
+ def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
678
+ """
679
+ .. deprecated:: 4.7.0
680
+ Use :func:`wait_readable` instead.
681
+
682
+ Wait until the given socket has data to be read.
683
+
684
+ .. warning:: Only use this on raw sockets that have not been wrapped by any higher
685
+ level constructs like socket streams!
686
+
687
+ :param sock: a socket object
688
+ :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
689
+ socket to become readable
690
+ :raises ~anyio.BusyResourceError: if another task is already waiting for the socket
691
+ to become readable
692
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
693
+ current thread
694
+
695
+ """
696
+ return get_async_backend().wait_readable(sock.fileno())
697
+
698
+
699
+ @deprecated("This function is deprecated; use `wait_writable` instead")
700
+ def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
701
+ """
702
+ .. deprecated:: 4.7.0
703
+ Use :func:`wait_writable` instead.
704
+
705
+ Wait until the given socket can be written to.
706
+
707
+ This does **NOT** work on Windows when using the asyncio backend with a proactor
708
+ event loop (default on py3.8+).
709
+
710
+ .. warning:: Only use this on raw sockets that have not been wrapped by any higher
711
+ level constructs like socket streams!
712
+
713
+ :param sock: a socket object
714
+ :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
715
+ socket to become writable
716
+ :raises ~anyio.BusyResourceError: if another task is already waiting for the socket
717
+ to become writable
718
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
719
+ current thread
720
+
721
+ """
722
+ return get_async_backend().wait_writable(sock.fileno())
723
+
724
+
725
+ def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]:
726
+ """
727
+ Wait until the given object has data to be read.
728
+
729
+ On Unix systems, ``obj`` must either be an integer file descriptor, or else an
730
+ object with a ``.fileno()`` method which returns an integer file descriptor. Any
731
+ kind of file descriptor can be passed, though the exact semantics will depend on
732
+ your kernel. For example, this probably won't do anything useful for on-disk files.
733
+
734
+ On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an
735
+ object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File
736
+ descriptors aren't supported, and neither are handles that refer to anything besides
737
+ a ``SOCKET``.
738
+
739
+ On backends where this functionality is not natively provided (asyncio
740
+ ``ProactorEventLoop`` on Windows), it is provided using a separate selector thread
741
+ which is set to shut down when the interpreter shuts down.
742
+
743
+ .. warning:: Don't use this on raw sockets that have been wrapped by any higher
744
+ level constructs like socket streams!
745
+
746
+ :param obj: an object with a ``.fileno()`` method or an integer handle
747
+ :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the
748
+ object to become readable
749
+ :raises ~anyio.BusyResourceError: if another task is already waiting for the object
750
+ to become readable
751
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
752
+ current thread
753
+
754
+ """
755
+ return get_async_backend().wait_readable(obj)
756
+
757
+
758
+ def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]:
759
+ """
760
+ Wait until the given object can be written to.
761
+
762
+ :param obj: an object with a ``.fileno()`` method or an integer handle
763
+ :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the
764
+ object to become writable
765
+ :raises ~anyio.BusyResourceError: if another task is already waiting for the object
766
+ to become writable
767
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
768
+ current thread
769
+
770
+ .. seealso:: See the documentation of :func:`wait_readable` for the definition of
771
+ ``obj`` and notes on backend compatibility.
772
+
773
+ .. warning:: Don't use this on raw sockets that have been wrapped by any higher
774
+ level constructs like socket streams!
775
+
776
+ """
777
+ return get_async_backend().wait_writable(obj)
778
+
779
+
780
+ def notify_closing(obj: FileDescriptorLike) -> None:
781
+ """
782
+ Call this before closing a file descriptor (on Unix) or socket (on
783
+ Windows). This will cause any `wait_readable` or `wait_writable`
784
+ calls on the given object to immediately wake up and raise
785
+ `~anyio.ClosedResourceError`.
786
+
787
+ This doesn't actually close the object – you still have to do that
788
+ yourself afterwards. Also, you want to be careful to make sure no
789
+ new tasks start waiting on the object in between when you call this
790
+ and when it's actually closed. So to close something properly, you
791
+ usually want to do these steps in order:
792
+
793
+ 1. Explicitly mark the object as closed, so that any new attempts
794
+ to use it will abort before they start.
795
+ 2. Call `notify_closing` to wake up any already-existing users.
796
+ 3. Actually close the object.
797
+
798
+ It's also possible to do them in a different order if that's more
799
+ convenient, *but only if* you make sure not to have any checkpoints in
800
+ between the steps. This way they all happen in a single atomic
801
+ step, so other tasks won't be able to tell what order they happened
802
+ in anyway.
803
+
804
+ :param obj: an object with a ``.fileno()`` method or an integer handle
805
+ :raises NoEventLoopError: if no supported asynchronous event loop is running in the
806
+ current thread
807
+
808
+ """
809
+ get_async_backend().notify_closing(obj)
810
+
811
+
812
+ #
813
+ # Private API
814
+ #
815
+
816
+
817
+ def convert_ipv6_sockaddr(
818
+ sockaddr: tuple[str, int, int, int] | tuple[str, int],
819
+ ) -> tuple[str, int]:
820
+ """
821
+ Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
822
+
823
+ If the scope ID is nonzero, it is added to the address, separated with ``%``.
824
+ Otherwise the flow id and scope id are simply cut off from the tuple.
825
+ Any other kinds of socket addresses are returned as-is.
826
+
827
+ :param sockaddr: the result of :meth:`~socket.socket.getsockname`
828
+ :return: the converted socket address
829
+
830
+ """
831
+ # This is more complicated than it should be because of MyPy
832
+ if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
833
+ host, port, flowinfo, scope_id = sockaddr
834
+ if scope_id:
835
+ # PyPy (as of v7.3.11) leaves the interface name in the result, so
836
+ # we discard it and only get the scope ID from the end
837
+ # (https://foss.heptapod.net/pypy/pypy/-/issues/3938)
838
+ host = host.split("%")[0]
839
+
840
+ # Add scope_id to the address
841
+ return f"{host}%{scope_id}", port
842
+ else:
843
+ return host, port
844
+ else:
845
+ return sockaddr
846
+
847
+
848
+ async def setup_unix_local_socket(
849
+ path: None | str | bytes | PathLike[Any],
850
+ mode: int | None,
851
+ socktype: int,
852
+ ) -> socket.socket:
853
+ """
854
+ Create a UNIX local socket object, deleting the socket at the given path if it
855
+ exists.
856
+
857
+ Not available on Windows.
858
+
859
+ :param path: path of the socket
860
+ :param mode: permissions to set on the socket
861
+ :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM
862
+
863
+ """
864
+ path_str: str | None
865
+ if path is not None:
866
+ path_str = os.fsdecode(path)
867
+
868
+ # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call
869
+ if not path_str.startswith("\0"):
870
+ # Copied from pathlib...
871
+ try:
872
+ stat_result = os.stat(path)
873
+ except OSError as e:
874
+ if e.errno not in (
875
+ errno.ENOENT,
876
+ errno.ENOTDIR,
877
+ errno.EBADF,
878
+ errno.ELOOP,
879
+ ):
880
+ raise
881
+ else:
882
+ if stat.S_ISSOCK(stat_result.st_mode):
883
+ os.unlink(path)
884
+ else:
885
+ path_str = None
886
+
887
+ raw_socket = socket.socket(socket.AF_UNIX, socktype)
888
+ raw_socket.setblocking(False)
889
+
890
+ if path_str is not None:
891
+ try:
892
+ await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True)
893
+ if mode is not None:
894
+ await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True)
895
+ except BaseException:
896
+ raw_socket.close()
897
+ raise
898
+
899
+ return raw_socket
900
+
901
+
902
+ @dataclass
903
+ class TCPConnectable(ByteStreamConnectable):
904
+ """
905
+ Connects to a TCP server at the given host and port.
906
+
907
+ :param host: host name or IP address of the server
908
+ :param port: TCP port number of the server
909
+ """
910
+
911
+ host: str | IPv4Address | IPv6Address
912
+ port: int
913
+
914
+ def __post_init__(self) -> None:
915
+ if self.port < 1 or self.port > 65535:
916
+ raise ValueError("TCP port number out of range")
917
+
918
+ @override
919
+ async def connect(self) -> SocketStream:
920
+ try:
921
+ return await connect_tcp(self.host, self.port)
922
+ except OSError as exc:
923
+ raise ConnectionFailed(
924
+ f"error connecting to {self.host}:{self.port}: {exc}"
925
+ ) from exc
926
+
927
+
928
+ @dataclass
929
+ class UNIXConnectable(ByteStreamConnectable):
930
+ """
931
+ Connects to a UNIX domain socket at the given path.
932
+
933
+ :param path: the file system path of the socket
934
+ """
935
+
936
+ path: str | bytes | PathLike[str] | PathLike[bytes]
937
+
938
+ @override
939
+ async def connect(self) -> UNIXSocketStream:
940
+ try:
941
+ return await connect_unix(self.path)
942
+ except OSError as exc:
943
+ raise ConnectionFailed(f"error connecting to {self.path!r}: {exc}") from exc
944
+
945
+
946
+ def as_connectable(
947
+ remote: ByteStreamConnectable
948
+ | tuple[str | IPv4Address | IPv6Address, int]
949
+ | str
950
+ | bytes
951
+ | PathLike[str],
952
+ /,
953
+ *,
954
+ tls: bool = False,
955
+ ssl_context: ssl.SSLContext | None = None,
956
+ tls_hostname: str | None = None,
957
+ tls_standard_compatible: bool = True,
958
+ ) -> ByteStreamConnectable:
959
+ """
960
+ Return a byte stream connectable from the given object.
961
+
962
+ If a bytestream connectable is given, it is returned unchanged.
963
+ If a tuple of (host, port) is given, a TCP connectable is returned.
964
+ If a string or bytes path is given, a UNIX connectable is returned.
965
+
966
+ If ``tls=True``, the connectable will be wrapped in a
967
+ :class:`~.streams.tls.TLSConnectable`.
968
+
969
+ :param remote: a connectable, a tuple of (host, port) or a path to a UNIX socket
970
+ :param tls: if ``True``, wrap the plaintext connectable in a
971
+ :class:`~.streams.tls.TLSConnectable`, using the provided TLS settings)
972
+ :param ssl_context: if ``tls=True``, the SSLContext object to use (if not provided,
973
+ a secure default will be created)
974
+ :param tls_hostname: if ``tls=True``, host name of the server to use for checking
975
+ the server certificate (defaults to the host portion of the address for TCP
976
+ connectables)
977
+ :param tls_standard_compatible: if ``False`` and ``tls=True``, makes the TLS stream
978
+ skip the closing handshake when closing the connection, so it won't raise an
979
+ exception if the server does the same
980
+
981
+ """
982
+ connectable: TCPConnectable | UNIXConnectable | TLSConnectable
983
+ if isinstance(remote, ByteStreamConnectable):
984
+ return remote
985
+ elif isinstance(remote, tuple) and len(remote) == 2:
986
+ connectable = TCPConnectable(*remote)
987
+ elif isinstance(remote, (str, bytes, PathLike)):
988
+ connectable = UNIXConnectable(remote)
989
+ else:
990
+ raise TypeError(f"cannot convert {remote!r} to a connectable")
991
+
992
+ if tls:
993
+ if not tls_hostname and isinstance(connectable, TCPConnectable):
994
+ tls_hostname = str(connectable.host)
995
+
996
+ connectable = TLSConnectable(
997
+ connectable,
998
+ ssl_context=ssl_context,
999
+ hostname=tls_hostname,
1000
+ standard_compatible=tls_standard_compatible,
1001
+ )
1002
+
1003
+ return connectable
.venv/Lib/site-packages/anyio/_core/_streams.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import math
4
+ from typing import TypeVar
5
+ from warnings import warn
6
+
7
+ from ..streams.memory import (
8
+ MemoryObjectReceiveStream,
9
+ MemoryObjectSendStream,
10
+ _MemoryObjectStreamState,
11
+ )
12
+
13
+ T_Item = TypeVar("T_Item")
14
+
15
+
16
+ class create_memory_object_stream(
17
+ tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]],
18
+ ):
19
+ """
20
+ Create a memory object stream.
21
+
22
+ The stream's item type can be annotated like
23
+ :func:`create_memory_object_stream[T_Item]`.
24
+
25
+ :param max_buffer_size: number of items held in the buffer until ``send()`` starts
26
+ blocking
27
+ :param item_type: old way of marking the streams with the right generic type for
28
+ static typing (does nothing on AnyIO 4)
29
+
30
+ .. deprecated:: 4.0
31
+ Use ``create_memory_object_stream[YourItemType](...)`` instead.
32
+ :return: a tuple of (send stream, receive stream)
33
+
34
+ """
35
+
36
+ def __new__( # type: ignore[misc]
37
+ cls, max_buffer_size: float = 0, item_type: object = None
38
+ ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
39
+ if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
40
+ raise ValueError("max_buffer_size must be either an integer or math.inf")
41
+ if max_buffer_size < 0:
42
+ raise ValueError("max_buffer_size cannot be negative")
43
+ if item_type is not None:
44
+ warn(
45
+ "The item_type argument has been deprecated in AnyIO 4.0. "
46
+ "Use create_memory_object_stream[YourItemType](...) instead.",
47
+ DeprecationWarning,
48
+ stacklevel=2,
49
+ )
50
+
51
+ state = _MemoryObjectStreamState[T_Item](max_buffer_size)
52
+ return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state))