Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/INSTALLER +1 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/METADATA +291 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/RECORD +27 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/REQUESTED +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/WHEEL +4 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/licenses/NOTICE +2 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_headers.pxi +83 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/abc.py +253 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/base_protocol.py +100 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client.py +1576 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/cookiejar.py +495 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/formdata.py +182 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/helpers.py +944 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/http.py +72 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/http_parser.py +1046 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/payload.py +519 -0
- evalkit_cambrian/lib/python3.10/site-packages/aiohttp/web_middlewares.py +121 -0
- evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/INSTALLER +1 -0
- evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/License.txt +1568 -0
- evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/METADATA +35 -0
- evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/REQUESTED +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__about__.py +6 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__init__.py +79 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__main__.py +134 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/__about__.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/__main__.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/constants.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/exceptions.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/portalocker.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/redis.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/types.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/utils.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/constants.py +59 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/exceptions.py +29 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/portalocker.py +154 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/py.typed +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/types.py +62 -0
- evalkit_cambrian/lib/python3.10/site-packages/portalocker/utils.py +587 -0
- evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/AUTHORS.rst +11 -0
- evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/INSTALLER +1 -0
- evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/RECORD +297 -0
- evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/REQUESTED +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/WHEEL +5 -0
- evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/top_level.txt +1 -0
- evalkit_cambrian/lib/python3.10/site-packages/python_dateutil-2.9.0.post0.dist-info/RECORD +45 -0
- evalkit_cambrian/lib/python3.10/site-packages/regex-2024.11.6.dist-info/INSTALLER +1 -0
- evalkit_cambrian/lib/python3.10/site-packages/regex-2024.11.6.dist-info/METADATA +1060 -0
- evalkit_cambrian/lib/python3.10/site-packages/stack_data/__init__.py +10 -0
- evalkit_cambrian/lib/python3.10/site-packages/stack_data/__pycache__/__init__.cpython-310.pyc +0 -0
evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,291 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: aiofiles
|
| 3 |
+
Version: 23.2.1
|
| 4 |
+
Summary: File support for asyncio.
|
| 5 |
+
Project-URL: Changelog, https://github.com/Tinche/aiofiles#history
|
| 6 |
+
Project-URL: Bug Tracker, https://github.com/Tinche/aiofiles/issues
|
| 7 |
+
Project-URL: repository, https://github.com/Tinche/aiofiles
|
| 8 |
+
Author-email: Tin Tvrtkovic <tinchester@gmail.com>
|
| 9 |
+
License: Apache-2.0
|
| 10 |
+
License-File: LICENSE
|
| 11 |
+
License-File: NOTICE
|
| 12 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 13 |
+
Classifier: Framework :: AsyncIO
|
| 14 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 15 |
+
Classifier: Operating System :: OS Independent
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 22 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 23 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 24 |
+
Requires-Python: >=3.7
|
| 25 |
+
Description-Content-Type: text/markdown
|
| 26 |
+
|
| 27 |
+
# aiofiles: file support for asyncio
|
| 28 |
+
|
| 29 |
+
[](https://pypi.python.org/pypi/aiofiles)
|
| 30 |
+
[](https://github.com/Tinche/aiofiles/actions)
|
| 31 |
+
[](https://github.com/Tinche/aiofiles/actions/workflows/main.yml)
|
| 32 |
+
[](https://github.com/Tinche/aiofiles)
|
| 33 |
+
[](https://github.com/psf/black)
|
| 34 |
+
|
| 35 |
+
**aiofiles** is an Apache2 licensed library, written in Python, for handling local
|
| 36 |
+
disk files in asyncio applications.
|
| 37 |
+
|
| 38 |
+
Ordinary local file IO is blocking, and cannot easily and portably be made
|
| 39 |
+
asynchronous. This means doing file IO may interfere with asyncio applications,
|
| 40 |
+
which shouldn't block the executing thread. aiofiles helps with this by
|
| 41 |
+
introducing asynchronous versions of files that support delegating operations to
|
| 42 |
+
a separate thread pool.
|
| 43 |
+
|
| 44 |
+
```python
|
| 45 |
+
async with aiofiles.open('filename', mode='r') as f:
|
| 46 |
+
contents = await f.read()
|
| 47 |
+
print(contents)
|
| 48 |
+
'My file contents'
|
| 49 |
+
```
|
| 50 |
+
|
| 51 |
+
Asynchronous iteration is also supported.
|
| 52 |
+
|
| 53 |
+
```python
|
| 54 |
+
async with aiofiles.open('filename') as f:
|
| 55 |
+
async for line in f:
|
| 56 |
+
...
|
| 57 |
+
```
|
| 58 |
+
|
| 59 |
+
Asynchronous interface to tempfile module.
|
| 60 |
+
|
| 61 |
+
```python
|
| 62 |
+
async with aiofiles.tempfile.TemporaryFile('wb') as f:
|
| 63 |
+
await f.write(b'Hello, World!')
|
| 64 |
+
```
|
| 65 |
+
|
| 66 |
+
## Features
|
| 67 |
+
|
| 68 |
+
- a file API very similar to Python's standard, blocking API
|
| 69 |
+
- support for buffered and unbuffered binary files, and buffered text files
|
| 70 |
+
- support for `async`/`await` ([PEP 492](https://peps.python.org/pep-0492/)) constructs
|
| 71 |
+
- async interface to tempfile module
|
| 72 |
+
|
| 73 |
+
## Installation
|
| 74 |
+
|
| 75 |
+
To install aiofiles, simply:
|
| 76 |
+
|
| 77 |
+
```bash
|
| 78 |
+
$ pip install aiofiles
|
| 79 |
+
```
|
| 80 |
+
|
| 81 |
+
## Usage
|
| 82 |
+
|
| 83 |
+
Files are opened using the `aiofiles.open()` coroutine, which in addition to
|
| 84 |
+
mirroring the builtin `open` accepts optional `loop` and `executor`
|
| 85 |
+
arguments. If `loop` is absent, the default loop will be used, as per the
|
| 86 |
+
set asyncio policy. If `executor` is not specified, the default event loop
|
| 87 |
+
executor will be used.
|
| 88 |
+
|
| 89 |
+
In case of success, an asynchronous file object is returned with an
|
| 90 |
+
API identical to an ordinary file, except the following methods are coroutines
|
| 91 |
+
and delegate to an executor:
|
| 92 |
+
|
| 93 |
+
- `close`
|
| 94 |
+
- `flush`
|
| 95 |
+
- `isatty`
|
| 96 |
+
- `read`
|
| 97 |
+
- `readall`
|
| 98 |
+
- `read1`
|
| 99 |
+
- `readinto`
|
| 100 |
+
- `readline`
|
| 101 |
+
- `readlines`
|
| 102 |
+
- `seek`
|
| 103 |
+
- `seekable`
|
| 104 |
+
- `tell`
|
| 105 |
+
- `truncate`
|
| 106 |
+
- `writable`
|
| 107 |
+
- `write`
|
| 108 |
+
- `writelines`
|
| 109 |
+
|
| 110 |
+
In case of failure, one of the usual exceptions will be raised.
|
| 111 |
+
|
| 112 |
+
`aiofiles.stdin`, `aiofiles.stdout`, `aiofiles.stderr`,
|
| 113 |
+
`aiofiles.stdin_bytes`, `aiofiles.stdout_bytes`, and
|
| 114 |
+
`aiofiles.stderr_bytes` provide async access to `sys.stdin`,
|
| 115 |
+
`sys.stdout`, `sys.stderr`, and their corresponding `.buffer` properties.
|
| 116 |
+
|
| 117 |
+
The `aiofiles.os` module contains executor-enabled coroutine versions of
|
| 118 |
+
several useful `os` functions that deal with files:
|
| 119 |
+
|
| 120 |
+
- `stat`
|
| 121 |
+
- `statvfs`
|
| 122 |
+
- `sendfile`
|
| 123 |
+
- `rename`
|
| 124 |
+
- `renames`
|
| 125 |
+
- `replace`
|
| 126 |
+
- `remove`
|
| 127 |
+
- `unlink`
|
| 128 |
+
- `mkdir`
|
| 129 |
+
- `makedirs`
|
| 130 |
+
- `rmdir`
|
| 131 |
+
- `removedirs`
|
| 132 |
+
- `link`
|
| 133 |
+
- `symlink`
|
| 134 |
+
- `readlink`
|
| 135 |
+
- `listdir`
|
| 136 |
+
- `scandir`
|
| 137 |
+
- `access`
|
| 138 |
+
- `path.exists`
|
| 139 |
+
- `path.isfile`
|
| 140 |
+
- `path.isdir`
|
| 141 |
+
- `path.islink`
|
| 142 |
+
- `path.ismount`
|
| 143 |
+
- `path.getsize`
|
| 144 |
+
- `path.getatime`
|
| 145 |
+
- `path.getctime`
|
| 146 |
+
- `path.samefile`
|
| 147 |
+
- `path.sameopenfile`
|
| 148 |
+
|
| 149 |
+
### Tempfile
|
| 150 |
+
|
| 151 |
+
**aiofiles.tempfile** implements the following interfaces:
|
| 152 |
+
|
| 153 |
+
- TemporaryFile
|
| 154 |
+
- NamedTemporaryFile
|
| 155 |
+
- SpooledTemporaryFile
|
| 156 |
+
- TemporaryDirectory
|
| 157 |
+
|
| 158 |
+
Results return wrapped with a context manager allowing use with async with and async for.
|
| 159 |
+
|
| 160 |
+
```python
|
| 161 |
+
async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f:
|
| 162 |
+
await f.write(b'Line1\n Line2')
|
| 163 |
+
await f.seek(0)
|
| 164 |
+
async for line in f:
|
| 165 |
+
print(line)
|
| 166 |
+
|
| 167 |
+
async with aiofiles.tempfile.TemporaryDirectory() as d:
|
| 168 |
+
filename = os.path.join(d, "file.ext")
|
| 169 |
+
```
|
| 170 |
+
|
| 171 |
+
### Writing tests for aiofiles
|
| 172 |
+
|
| 173 |
+
Real file IO can be mocked by patching `aiofiles.threadpool.sync_open`
|
| 174 |
+
as desired. The return type also needs to be registered with the
|
| 175 |
+
`aiofiles.threadpool.wrap` dispatcher:
|
| 176 |
+
|
| 177 |
+
```python
|
| 178 |
+
aiofiles.threadpool.wrap.register(mock.MagicMock)(
|
| 179 |
+
lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs))
|
| 180 |
+
|
| 181 |
+
async def test_stuff():
|
| 182 |
+
data = 'data'
|
| 183 |
+
mock_file = mock.MagicMock()
|
| 184 |
+
|
| 185 |
+
with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open:
|
| 186 |
+
async with aiofiles.open('filename', 'w') as f:
|
| 187 |
+
await f.write(data)
|
| 188 |
+
|
| 189 |
+
mock_file.write.assert_called_once_with(data)
|
| 190 |
+
```
|
| 191 |
+
|
| 192 |
+
### History
|
| 193 |
+
|
| 194 |
+
#### 23.2.1 (2023-08-09)
|
| 195 |
+
|
| 196 |
+
- Import `os.statvfs` conditionally to fix importing on non-UNIX systems.
|
| 197 |
+
[#171](https://github.com/Tinche/aiofiles/issues/171) [#172](https://github.com/Tinche/aiofiles/pull/172)
|
| 198 |
+
|
| 199 |
+
#### 23.2.0 (2023-08-09)
|
| 200 |
+
|
| 201 |
+
- aiofiles is now tested on Python 3.12 too.
|
| 202 |
+
[#166](https://github.com/Tinche/aiofiles/issues/166) [#168](https://github.com/Tinche/aiofiles/pull/168)
|
| 203 |
+
- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` now accepts a `delete_on_close` argument, just like the stdlib version.
|
| 204 |
+
- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` no longer exposes a `delete` attribute, just like the stdlib version.
|
| 205 |
+
- Added `aiofiles.os.statvfs` and `aiofiles.os.path.ismount`.
|
| 206 |
+
[#162](https://github.com/Tinche/aiofiles/pull/162)
|
| 207 |
+
- Use [PDM](https://pdm.fming.dev/latest/) instead of Poetry.
|
| 208 |
+
[#169](https://github.com/Tinche/aiofiles/pull/169)
|
| 209 |
+
|
| 210 |
+
#### 23.1.0 (2023-02-09)
|
| 211 |
+
|
| 212 |
+
- Added `aiofiles.os.access`.
|
| 213 |
+
[#146](https://github.com/Tinche/aiofiles/pull/146)
|
| 214 |
+
- Removed `aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.softspace`.
|
| 215 |
+
[#151](https://github.com/Tinche/aiofiles/pull/151)
|
| 216 |
+
- Added `aiofiles.stdin`, `aiofiles.stdin_bytes`, and other stdio streams.
|
| 217 |
+
[#154](https://github.com/Tinche/aiofiles/pull/154)
|
| 218 |
+
- Transition to `asyncio.get_running_loop` (vs `asyncio.get_event_loop`) internally.
|
| 219 |
+
|
| 220 |
+
#### 22.1.0 (2022-09-04)
|
| 221 |
+
|
| 222 |
+
- Added `aiofiles.os.path.islink`.
|
| 223 |
+
[#126](https://github.com/Tinche/aiofiles/pull/126)
|
| 224 |
+
- Added `aiofiles.os.readlink`.
|
| 225 |
+
[#125](https://github.com/Tinche/aiofiles/pull/125)
|
| 226 |
+
- Added `aiofiles.os.symlink`.
|
| 227 |
+
[#124](https://github.com/Tinche/aiofiles/pull/124)
|
| 228 |
+
- Added `aiofiles.os.unlink`.
|
| 229 |
+
[#123](https://github.com/Tinche/aiofiles/pull/123)
|
| 230 |
+
- Added `aiofiles.os.link`.
|
| 231 |
+
[#121](https://github.com/Tinche/aiofiles/pull/121)
|
| 232 |
+
- Added `aiofiles.os.renames`.
|
| 233 |
+
[#120](https://github.com/Tinche/aiofiles/pull/120)
|
| 234 |
+
- Added `aiofiles.os.{listdir, scandir}`.
|
| 235 |
+
[#143](https://github.com/Tinche/aiofiles/pull/143)
|
| 236 |
+
- Switched to CalVer.
|
| 237 |
+
- Dropped Python 3.6 support. If you require it, use version 0.8.0.
|
| 238 |
+
- aiofiles is now tested on Python 3.11.
|
| 239 |
+
|
| 240 |
+
#### 0.8.0 (2021-11-27)
|
| 241 |
+
|
| 242 |
+
- aiofiles is now tested on Python 3.10.
|
| 243 |
+
- Added `aiofiles.os.replace`.
|
| 244 |
+
[#107](https://github.com/Tinche/aiofiles/pull/107)
|
| 245 |
+
- Added `aiofiles.os.{makedirs, removedirs}`.
|
| 246 |
+
- Added `aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}`.
|
| 247 |
+
[#63](https://github.com/Tinche/aiofiles/pull/63)
|
| 248 |
+
- Added `suffix`, `prefix`, `dir` args to `aiofiles.tempfile.TemporaryDirectory`.
|
| 249 |
+
[#116](https://github.com/Tinche/aiofiles/pull/116)
|
| 250 |
+
|
| 251 |
+
#### 0.7.0 (2021-05-17)
|
| 252 |
+
|
| 253 |
+
- Added the `aiofiles.tempfile` module for async temporary files.
|
| 254 |
+
[#56](https://github.com/Tinche/aiofiles/pull/56)
|
| 255 |
+
- Switched to Poetry and GitHub actions.
|
| 256 |
+
- Dropped 3.5 support.
|
| 257 |
+
|
| 258 |
+
#### 0.6.0 (2020-10-27)
|
| 259 |
+
|
| 260 |
+
- `aiofiles` is now tested on ppc64le.
|
| 261 |
+
- Added `name` and `mode` properties to async file objects.
|
| 262 |
+
[#82](https://github.com/Tinche/aiofiles/pull/82)
|
| 263 |
+
- Fixed a DeprecationWarning internally.
|
| 264 |
+
[#75](https://github.com/Tinche/aiofiles/pull/75)
|
| 265 |
+
- Python 3.9 support and tests.
|
| 266 |
+
|
| 267 |
+
#### 0.5.0 (2020-04-12)
|
| 268 |
+
|
| 269 |
+
- Python 3.8 support. Code base modernization (using `async/await` instead of `asyncio.coroutine`/`yield from`).
|
| 270 |
+
- Added `aiofiles.os.remove`, `aiofiles.os.rename`, `aiofiles.os.mkdir`, `aiofiles.os.rmdir`.
|
| 271 |
+
[#62](https://github.com/Tinche/aiofiles/pull/62)
|
| 272 |
+
|
| 273 |
+
#### 0.4.0 (2018-08-11)
|
| 274 |
+
|
| 275 |
+
- Python 3.7 support.
|
| 276 |
+
- Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x.
|
| 277 |
+
|
| 278 |
+
#### 0.3.2 (2017-09-23)
|
| 279 |
+
|
| 280 |
+
- The LICENSE is now included in the sdist.
|
| 281 |
+
[#31](https://github.com/Tinche/aiofiles/pull/31)
|
| 282 |
+
|
| 283 |
+
#### 0.3.1 (2017-03-10)
|
| 284 |
+
|
| 285 |
+
- Introduced a changelog.
|
| 286 |
+
- `aiofiles.os.sendfile` will now work if the standard `os` module contains a `sendfile` function.
|
| 287 |
+
|
| 288 |
+
### Contributing
|
| 289 |
+
|
| 290 |
+
Contributions are very welcome. Tests can be run with `tox`, please ensure
|
| 291 |
+
the coverage at least stays the same before you submit a pull request.
|
evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
aiofiles-23.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
aiofiles-23.2.1.dist-info/METADATA,sha256=cot28p_PNjdl_MK--l9Qu2e6QOv9OxdHrKbjLmYf9Uw,9673
|
| 3 |
+
aiofiles-23.2.1.dist-info/RECORD,,
|
| 4 |
+
aiofiles-23.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 5 |
+
aiofiles-23.2.1.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
|
| 6 |
+
aiofiles-23.2.1.dist-info/licenses/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325
|
| 7 |
+
aiofiles-23.2.1.dist-info/licenses/NOTICE,sha256=EExY0dRQvWR0wJ2LZLwBgnM6YKw9jCU-M0zegpRSD_E,55
|
| 8 |
+
aiofiles/__init__.py,sha256=1iAMJQyJtX3LGIS0AoFTJeO1aJ_RK2jpBSBhg0VoIrE,344
|
| 9 |
+
aiofiles/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
aiofiles/__pycache__/base.cpython-310.pyc,,
|
| 11 |
+
aiofiles/__pycache__/os.cpython-310.pyc,,
|
| 12 |
+
aiofiles/__pycache__/ospath.cpython-310.pyc,,
|
| 13 |
+
aiofiles/base.py,sha256=rZwA151Ji8XlBkzvDmcF1CgDTY2iKNuJMfvNlM0s0E0,2684
|
| 14 |
+
aiofiles/os.py,sha256=zuFGaIyGCGUuFb7trFFEm6SLdCRqTFsSV0mY6SO8z3M,970
|
| 15 |
+
aiofiles/ospath.py,sha256=zqG2VFzRb6yYiIOWipqsdgvZmoMTFvZmBdkxkAl1FT4,764
|
| 16 |
+
aiofiles/tempfile/__init__.py,sha256=hFSNTOjOUv371Ozdfy6FIxeln46Nm3xOVh4ZR3Q94V0,10244
|
| 17 |
+
aiofiles/tempfile/__pycache__/__init__.cpython-310.pyc,,
|
| 18 |
+
aiofiles/tempfile/__pycache__/temptypes.cpython-310.pyc,,
|
| 19 |
+
aiofiles/tempfile/temptypes.py,sha256=ddEvNjMLVlr7WUILCe6ypTqw77yREeIonTk16Uw_NVs,2093
|
| 20 |
+
aiofiles/threadpool/__init__.py,sha256=c_aexl1t193iKdPZaolPEEbHDrQ0RrsH_HTAToMPQBo,3171
|
| 21 |
+
aiofiles/threadpool/__pycache__/__init__.cpython-310.pyc,,
|
| 22 |
+
aiofiles/threadpool/__pycache__/binary.cpython-310.pyc,,
|
| 23 |
+
aiofiles/threadpool/__pycache__/text.cpython-310.pyc,,
|
| 24 |
+
aiofiles/threadpool/__pycache__/utils.cpython-310.pyc,,
|
| 25 |
+
aiofiles/threadpool/binary.py,sha256=hp-km9VCRu0MLz_wAEUfbCz7OL7xtn9iGAawabpnp5U,2315
|
| 26 |
+
aiofiles/threadpool/text.py,sha256=fNmpw2PEkj0BZSldipJXAgZqVGLxALcfOMiuDQ54Eas,1223
|
| 27 |
+
aiofiles/threadpool/utils.py,sha256=B59dSZwO_WZs2dFFycKeA91iD2Xq2nNw1EFF8YMBI5k,1868
|
evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.17.1
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
evalkit_cambrian/lib/python3.10/site-packages/aiofiles-23.2.1.dist-info/licenses/NOTICE
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Asyncio support for files
|
| 2 |
+
Copyright 2016 Tin Tvrtkovic
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_headers.pxi
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The file is autogenerated from aiohttp/hdrs.py
|
| 2 |
+
# Run ./tools/gen.py to update it after the origin changing.
|
| 3 |
+
|
| 4 |
+
from . import hdrs
|
| 5 |
+
cdef tuple headers = (
|
| 6 |
+
hdrs.ACCEPT,
|
| 7 |
+
hdrs.ACCEPT_CHARSET,
|
| 8 |
+
hdrs.ACCEPT_ENCODING,
|
| 9 |
+
hdrs.ACCEPT_LANGUAGE,
|
| 10 |
+
hdrs.ACCEPT_RANGES,
|
| 11 |
+
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
| 12 |
+
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
| 13 |
+
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
| 14 |
+
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
| 15 |
+
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
| 16 |
+
hdrs.ACCESS_CONTROL_MAX_AGE,
|
| 17 |
+
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
| 18 |
+
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
| 19 |
+
hdrs.AGE,
|
| 20 |
+
hdrs.ALLOW,
|
| 21 |
+
hdrs.AUTHORIZATION,
|
| 22 |
+
hdrs.CACHE_CONTROL,
|
| 23 |
+
hdrs.CONNECTION,
|
| 24 |
+
hdrs.CONTENT_DISPOSITION,
|
| 25 |
+
hdrs.CONTENT_ENCODING,
|
| 26 |
+
hdrs.CONTENT_LANGUAGE,
|
| 27 |
+
hdrs.CONTENT_LENGTH,
|
| 28 |
+
hdrs.CONTENT_LOCATION,
|
| 29 |
+
hdrs.CONTENT_MD5,
|
| 30 |
+
hdrs.CONTENT_RANGE,
|
| 31 |
+
hdrs.CONTENT_TRANSFER_ENCODING,
|
| 32 |
+
hdrs.CONTENT_TYPE,
|
| 33 |
+
hdrs.COOKIE,
|
| 34 |
+
hdrs.DATE,
|
| 35 |
+
hdrs.DESTINATION,
|
| 36 |
+
hdrs.DIGEST,
|
| 37 |
+
hdrs.ETAG,
|
| 38 |
+
hdrs.EXPECT,
|
| 39 |
+
hdrs.EXPIRES,
|
| 40 |
+
hdrs.FORWARDED,
|
| 41 |
+
hdrs.FROM,
|
| 42 |
+
hdrs.HOST,
|
| 43 |
+
hdrs.IF_MATCH,
|
| 44 |
+
hdrs.IF_MODIFIED_SINCE,
|
| 45 |
+
hdrs.IF_NONE_MATCH,
|
| 46 |
+
hdrs.IF_RANGE,
|
| 47 |
+
hdrs.IF_UNMODIFIED_SINCE,
|
| 48 |
+
hdrs.KEEP_ALIVE,
|
| 49 |
+
hdrs.LAST_EVENT_ID,
|
| 50 |
+
hdrs.LAST_MODIFIED,
|
| 51 |
+
hdrs.LINK,
|
| 52 |
+
hdrs.LOCATION,
|
| 53 |
+
hdrs.MAX_FORWARDS,
|
| 54 |
+
hdrs.ORIGIN,
|
| 55 |
+
hdrs.PRAGMA,
|
| 56 |
+
hdrs.PROXY_AUTHENTICATE,
|
| 57 |
+
hdrs.PROXY_AUTHORIZATION,
|
| 58 |
+
hdrs.RANGE,
|
| 59 |
+
hdrs.REFERER,
|
| 60 |
+
hdrs.RETRY_AFTER,
|
| 61 |
+
hdrs.SEC_WEBSOCKET_ACCEPT,
|
| 62 |
+
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
| 63 |
+
hdrs.SEC_WEBSOCKET_KEY,
|
| 64 |
+
hdrs.SEC_WEBSOCKET_KEY1,
|
| 65 |
+
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
| 66 |
+
hdrs.SEC_WEBSOCKET_VERSION,
|
| 67 |
+
hdrs.SERVER,
|
| 68 |
+
hdrs.SET_COOKIE,
|
| 69 |
+
hdrs.TE,
|
| 70 |
+
hdrs.TRAILER,
|
| 71 |
+
hdrs.TRANSFER_ENCODING,
|
| 72 |
+
hdrs.URI,
|
| 73 |
+
hdrs.UPGRADE,
|
| 74 |
+
hdrs.USER_AGENT,
|
| 75 |
+
hdrs.VARY,
|
| 76 |
+
hdrs.VIA,
|
| 77 |
+
hdrs.WWW_AUTHENTICATE,
|
| 78 |
+
hdrs.WANT_DIGEST,
|
| 79 |
+
hdrs.WARNING,
|
| 80 |
+
hdrs.X_FORWARDED_FOR,
|
| 81 |
+
hdrs.X_FORWARDED_HOST,
|
| 82 |
+
hdrs.X_FORWARDED_PROTO,
|
| 83 |
+
)
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/abc.py
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import socket
|
| 4 |
+
import zlib
|
| 5 |
+
from abc import ABC, abstractmethod
|
| 6 |
+
from collections.abc import Sized
|
| 7 |
+
from http.cookies import BaseCookie, Morsel
|
| 8 |
+
from typing import (
|
| 9 |
+
TYPE_CHECKING,
|
| 10 |
+
Any,
|
| 11 |
+
Awaitable,
|
| 12 |
+
Callable,
|
| 13 |
+
Dict,
|
| 14 |
+
Generator,
|
| 15 |
+
Iterable,
|
| 16 |
+
List,
|
| 17 |
+
Optional,
|
| 18 |
+
Tuple,
|
| 19 |
+
TypedDict,
|
| 20 |
+
Union,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from multidict import CIMultiDict
|
| 24 |
+
from yarl import URL
|
| 25 |
+
|
| 26 |
+
from .typedefs import LooseCookies
|
| 27 |
+
|
| 28 |
+
if TYPE_CHECKING:
|
| 29 |
+
from .web_app import Application
|
| 30 |
+
from .web_exceptions import HTTPException
|
| 31 |
+
from .web_request import BaseRequest, Request
|
| 32 |
+
from .web_response import StreamResponse
|
| 33 |
+
else:
|
| 34 |
+
BaseRequest = Request = Application = StreamResponse = None
|
| 35 |
+
HTTPException = None
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class AbstractRouter(ABC):
|
| 39 |
+
def __init__(self) -> None:
|
| 40 |
+
self._frozen = False
|
| 41 |
+
|
| 42 |
+
def post_init(self, app: Application) -> None:
|
| 43 |
+
"""Post init stage.
|
| 44 |
+
|
| 45 |
+
Not an abstract method for sake of backward compatibility,
|
| 46 |
+
but if the router wants to be aware of the application
|
| 47 |
+
it can override this.
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
@property
|
| 51 |
+
def frozen(self) -> bool:
|
| 52 |
+
return self._frozen
|
| 53 |
+
|
| 54 |
+
def freeze(self) -> None:
|
| 55 |
+
"""Freeze router."""
|
| 56 |
+
self._frozen = True
|
| 57 |
+
|
| 58 |
+
@abstractmethod
|
| 59 |
+
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
| 60 |
+
"""Return MATCH_INFO for given request"""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class AbstractMatchInfo(ABC):
|
| 64 |
+
|
| 65 |
+
__slots__ = ()
|
| 66 |
+
|
| 67 |
+
@property # pragma: no branch
|
| 68 |
+
@abstractmethod
|
| 69 |
+
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
| 70 |
+
"""Execute matched request handler"""
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
@abstractmethod
|
| 74 |
+
def expect_handler(
|
| 75 |
+
self,
|
| 76 |
+
) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
|
| 77 |
+
"""Expect handler for 100-continue processing"""
|
| 78 |
+
|
| 79 |
+
@property # pragma: no branch
|
| 80 |
+
@abstractmethod
|
| 81 |
+
def http_exception(self) -> Optional[HTTPException]:
|
| 82 |
+
"""HTTPException instance raised on router's resolving, or None"""
|
| 83 |
+
|
| 84 |
+
@abstractmethod # pragma: no branch
|
| 85 |
+
def get_info(self) -> Dict[str, Any]:
|
| 86 |
+
"""Return a dict with additional info useful for introspection"""
|
| 87 |
+
|
| 88 |
+
@property # pragma: no branch
|
| 89 |
+
@abstractmethod
|
| 90 |
+
def apps(self) -> Tuple[Application, ...]:
|
| 91 |
+
"""Stack of nested applications.
|
| 92 |
+
|
| 93 |
+
Top level application is left-most element.
|
| 94 |
+
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
@abstractmethod
|
| 98 |
+
def add_app(self, app: Application) -> None:
|
| 99 |
+
"""Add application to the nested apps stack."""
|
| 100 |
+
|
| 101 |
+
@abstractmethod
|
| 102 |
+
def freeze(self) -> None:
|
| 103 |
+
"""Freeze the match info.
|
| 104 |
+
|
| 105 |
+
The method is called after route resolution.
|
| 106 |
+
|
| 107 |
+
After the call .add_app() is forbidden.
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class AbstractView(ABC):
|
| 113 |
+
"""Abstract class based view."""
|
| 114 |
+
|
| 115 |
+
def __init__(self, request: Request) -> None:
|
| 116 |
+
self._request = request
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def request(self) -> Request:
|
| 120 |
+
"""Request instance."""
|
| 121 |
+
return self._request
|
| 122 |
+
|
| 123 |
+
@abstractmethod
|
| 124 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
| 125 |
+
"""Execute the view handler."""
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class ResolveResult(TypedDict):
|
| 129 |
+
"""Resolve result.
|
| 130 |
+
|
| 131 |
+
This is the result returned from an AbstractResolver's
|
| 132 |
+
resolve method.
|
| 133 |
+
|
| 134 |
+
:param hostname: The hostname that was provided.
|
| 135 |
+
:param host: The IP address that was resolved.
|
| 136 |
+
:param port: The port that was resolved.
|
| 137 |
+
:param family: The address family that was resolved.
|
| 138 |
+
:param proto: The protocol that was resolved.
|
| 139 |
+
:param flags: The flags that were resolved.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
hostname: str
|
| 143 |
+
host: str
|
| 144 |
+
port: int
|
| 145 |
+
family: int
|
| 146 |
+
proto: int
|
| 147 |
+
flags: int
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class AbstractResolver(ABC):
|
| 151 |
+
"""Abstract DNS resolver."""
|
| 152 |
+
|
| 153 |
+
@abstractmethod
|
| 154 |
+
async def resolve(
|
| 155 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 156 |
+
) -> List[ResolveResult]:
|
| 157 |
+
"""Return IP address for given hostname"""
|
| 158 |
+
|
| 159 |
+
@abstractmethod
|
| 160 |
+
async def close(self) -> None:
|
| 161 |
+
"""Release resolver"""
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
if TYPE_CHECKING:
|
| 165 |
+
IterableBase = Iterable[Morsel[str]]
|
| 166 |
+
else:
|
| 167 |
+
IterableBase = Iterable
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class AbstractCookieJar(Sized, IterableBase):
|
| 174 |
+
"""Abstract Cookie Jar."""
|
| 175 |
+
|
| 176 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 177 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
@abstractmethod
|
| 181 |
+
def quote_cookie(self) -> bool:
|
| 182 |
+
"""Return True if cookies should be quoted."""
|
| 183 |
+
|
| 184 |
+
@abstractmethod
|
| 185 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 186 |
+
"""Clear all cookies if no predicate is passed."""
|
| 187 |
+
|
| 188 |
+
@abstractmethod
|
| 189 |
+
def clear_domain(self, domain: str) -> None:
|
| 190 |
+
"""Clear all cookies for domain and all subdomains."""
|
| 191 |
+
|
| 192 |
+
@abstractmethod
|
| 193 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 194 |
+
"""Update cookies."""
|
| 195 |
+
|
| 196 |
+
@abstractmethod
|
| 197 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 198 |
+
"""Return the jar's cookies filtered by their attributes."""
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class AbstractStreamWriter(ABC):
|
| 202 |
+
"""Abstract stream writer."""
|
| 203 |
+
|
| 204 |
+
buffer_size: int = 0
|
| 205 |
+
output_size: int = 0
|
| 206 |
+
length: Optional[int] = 0
|
| 207 |
+
|
| 208 |
+
@abstractmethod
|
| 209 |
+
async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
|
| 210 |
+
"""Write chunk into stream."""
|
| 211 |
+
|
| 212 |
+
@abstractmethod
|
| 213 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 214 |
+
"""Write last chunk."""
|
| 215 |
+
|
| 216 |
+
@abstractmethod
|
| 217 |
+
async def drain(self) -> None:
|
| 218 |
+
"""Flush the write buffer."""
|
| 219 |
+
|
| 220 |
+
@abstractmethod
|
| 221 |
+
def enable_compression(
|
| 222 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 223 |
+
) -> None:
|
| 224 |
+
"""Enable HTTP body compression"""
|
| 225 |
+
|
| 226 |
+
@abstractmethod
|
| 227 |
+
def enable_chunking(self) -> None:
|
| 228 |
+
"""Enable HTTP chunked mode"""
|
| 229 |
+
|
| 230 |
+
@abstractmethod
|
| 231 |
+
async def write_headers(
|
| 232 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 233 |
+
) -> None:
|
| 234 |
+
"""Write HTTP headers"""
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class AbstractAccessLogger(ABC):
|
| 238 |
+
"""Abstract writer to access log."""
|
| 239 |
+
|
| 240 |
+
__slots__ = ("logger", "log_format")
|
| 241 |
+
|
| 242 |
+
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
| 243 |
+
self.logger = logger
|
| 244 |
+
self.log_format = log_format
|
| 245 |
+
|
| 246 |
+
@abstractmethod
|
| 247 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
| 248 |
+
"""Emit log to logger."""
|
| 249 |
+
|
| 250 |
+
@property
|
| 251 |
+
def enabled(self) -> bool:
|
| 252 |
+
"""Check if logger is enabled."""
|
| 253 |
+
return True
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/base_protocol.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from typing import Optional, cast
|
| 3 |
+
|
| 4 |
+
from .client_exceptions import ClientConnectionResetError
|
| 5 |
+
from .helpers import set_exception
|
| 6 |
+
from .tcp_helpers import tcp_nodelay
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class BaseProtocol(asyncio.Protocol):
|
| 10 |
+
__slots__ = (
|
| 11 |
+
"_loop",
|
| 12 |
+
"_paused",
|
| 13 |
+
"_drain_waiter",
|
| 14 |
+
"_connection_lost",
|
| 15 |
+
"_reading_paused",
|
| 16 |
+
"transport",
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 20 |
+
self._loop: asyncio.AbstractEventLoop = loop
|
| 21 |
+
self._paused = False
|
| 22 |
+
self._drain_waiter: Optional[asyncio.Future[None]] = None
|
| 23 |
+
self._reading_paused = False
|
| 24 |
+
|
| 25 |
+
self.transport: Optional[asyncio.Transport] = None
|
| 26 |
+
|
| 27 |
+
@property
|
| 28 |
+
def connected(self) -> bool:
|
| 29 |
+
"""Return True if the connection is open."""
|
| 30 |
+
return self.transport is not None
|
| 31 |
+
|
| 32 |
+
@property
|
| 33 |
+
def writing_paused(self) -> bool:
|
| 34 |
+
return self._paused
|
| 35 |
+
|
| 36 |
+
def pause_writing(self) -> None:
|
| 37 |
+
assert not self._paused
|
| 38 |
+
self._paused = True
|
| 39 |
+
|
| 40 |
+
def resume_writing(self) -> None:
|
| 41 |
+
assert self._paused
|
| 42 |
+
self._paused = False
|
| 43 |
+
|
| 44 |
+
waiter = self._drain_waiter
|
| 45 |
+
if waiter is not None:
|
| 46 |
+
self._drain_waiter = None
|
| 47 |
+
if not waiter.done():
|
| 48 |
+
waiter.set_result(None)
|
| 49 |
+
|
| 50 |
+
def pause_reading(self) -> None:
|
| 51 |
+
if not self._reading_paused and self.transport is not None:
|
| 52 |
+
try:
|
| 53 |
+
self.transport.pause_reading()
|
| 54 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
| 55 |
+
pass
|
| 56 |
+
self._reading_paused = True
|
| 57 |
+
|
| 58 |
+
def resume_reading(self) -> None:
|
| 59 |
+
if self._reading_paused and self.transport is not None:
|
| 60 |
+
try:
|
| 61 |
+
self.transport.resume_reading()
|
| 62 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
| 63 |
+
pass
|
| 64 |
+
self._reading_paused = False
|
| 65 |
+
|
| 66 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
| 67 |
+
tr = cast(asyncio.Transport, transport)
|
| 68 |
+
tcp_nodelay(tr, True)
|
| 69 |
+
self.transport = tr
|
| 70 |
+
|
| 71 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 72 |
+
# Wake up the writer if currently paused.
|
| 73 |
+
self.transport = None
|
| 74 |
+
if not self._paused:
|
| 75 |
+
return
|
| 76 |
+
waiter = self._drain_waiter
|
| 77 |
+
if waiter is None:
|
| 78 |
+
return
|
| 79 |
+
self._drain_waiter = None
|
| 80 |
+
if waiter.done():
|
| 81 |
+
return
|
| 82 |
+
if exc is None:
|
| 83 |
+
waiter.set_result(None)
|
| 84 |
+
else:
|
| 85 |
+
set_exception(
|
| 86 |
+
waiter,
|
| 87 |
+
ConnectionError("Connection lost"),
|
| 88 |
+
exc,
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
async def _drain_helper(self) -> None:
|
| 92 |
+
if self.transport is None:
|
| 93 |
+
raise ClientConnectionResetError("Connection lost")
|
| 94 |
+
if not self._paused:
|
| 95 |
+
return
|
| 96 |
+
waiter = self._drain_waiter
|
| 97 |
+
if waiter is None:
|
| 98 |
+
waiter = self._loop.create_future()
|
| 99 |
+
self._drain_waiter = waiter
|
| 100 |
+
await asyncio.shield(waiter)
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client.py
ADDED
|
@@ -0,0 +1,1576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP Client for asyncio."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import hashlib
|
| 6 |
+
import json
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
import traceback
|
| 10 |
+
import warnings
|
| 11 |
+
from contextlib import suppress
|
| 12 |
+
from types import TracebackType
|
| 13 |
+
from typing import (
|
| 14 |
+
TYPE_CHECKING,
|
| 15 |
+
Any,
|
| 16 |
+
Awaitable,
|
| 17 |
+
Callable,
|
| 18 |
+
Coroutine,
|
| 19 |
+
Final,
|
| 20 |
+
FrozenSet,
|
| 21 |
+
Generator,
|
| 22 |
+
Generic,
|
| 23 |
+
Iterable,
|
| 24 |
+
List,
|
| 25 |
+
Mapping,
|
| 26 |
+
Optional,
|
| 27 |
+
Set,
|
| 28 |
+
Tuple,
|
| 29 |
+
Type,
|
| 30 |
+
TypedDict,
|
| 31 |
+
TypeVar,
|
| 32 |
+
Union,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
import attr
|
| 36 |
+
from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
|
| 37 |
+
from yarl import URL
|
| 38 |
+
|
| 39 |
+
from . import hdrs, http, payload
|
| 40 |
+
from ._websocket.reader import WebSocketDataQueue
|
| 41 |
+
from .abc import AbstractCookieJar
|
| 42 |
+
from .client_exceptions import (
|
| 43 |
+
ClientConnectionError,
|
| 44 |
+
ClientConnectionResetError,
|
| 45 |
+
ClientConnectorCertificateError,
|
| 46 |
+
ClientConnectorDNSError,
|
| 47 |
+
ClientConnectorError,
|
| 48 |
+
ClientConnectorSSLError,
|
| 49 |
+
ClientError,
|
| 50 |
+
ClientHttpProxyError,
|
| 51 |
+
ClientOSError,
|
| 52 |
+
ClientPayloadError,
|
| 53 |
+
ClientProxyConnectionError,
|
| 54 |
+
ClientResponseError,
|
| 55 |
+
ClientSSLError,
|
| 56 |
+
ConnectionTimeoutError,
|
| 57 |
+
ContentTypeError,
|
| 58 |
+
InvalidURL,
|
| 59 |
+
InvalidUrlClientError,
|
| 60 |
+
InvalidUrlRedirectClientError,
|
| 61 |
+
NonHttpUrlClientError,
|
| 62 |
+
NonHttpUrlRedirectClientError,
|
| 63 |
+
RedirectClientError,
|
| 64 |
+
ServerConnectionError,
|
| 65 |
+
ServerDisconnectedError,
|
| 66 |
+
ServerFingerprintMismatch,
|
| 67 |
+
ServerTimeoutError,
|
| 68 |
+
SocketTimeoutError,
|
| 69 |
+
TooManyRedirects,
|
| 70 |
+
WSMessageTypeError,
|
| 71 |
+
WSServerHandshakeError,
|
| 72 |
+
)
|
| 73 |
+
from .client_reqrep import (
|
| 74 |
+
ClientRequest as ClientRequest,
|
| 75 |
+
ClientResponse as ClientResponse,
|
| 76 |
+
Fingerprint as Fingerprint,
|
| 77 |
+
RequestInfo as RequestInfo,
|
| 78 |
+
_merge_ssl_params,
|
| 79 |
+
)
|
| 80 |
+
from .client_ws import (
|
| 81 |
+
DEFAULT_WS_CLIENT_TIMEOUT,
|
| 82 |
+
ClientWebSocketResponse as ClientWebSocketResponse,
|
| 83 |
+
ClientWSTimeout as ClientWSTimeout,
|
| 84 |
+
)
|
| 85 |
+
from .connector import (
|
| 86 |
+
HTTP_AND_EMPTY_SCHEMA_SET,
|
| 87 |
+
BaseConnector as BaseConnector,
|
| 88 |
+
NamedPipeConnector as NamedPipeConnector,
|
| 89 |
+
TCPConnector as TCPConnector,
|
| 90 |
+
UnixConnector as UnixConnector,
|
| 91 |
+
)
|
| 92 |
+
from .cookiejar import CookieJar
|
| 93 |
+
from .helpers import (
|
| 94 |
+
_SENTINEL,
|
| 95 |
+
DEBUG,
|
| 96 |
+
EMPTY_BODY_METHODS,
|
| 97 |
+
BasicAuth,
|
| 98 |
+
TimeoutHandle,
|
| 99 |
+
get_env_proxy_for_url,
|
| 100 |
+
sentinel,
|
| 101 |
+
strip_auth_from_url,
|
| 102 |
+
)
|
| 103 |
+
from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
|
| 104 |
+
from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse
|
| 105 |
+
from .tracing import Trace, TraceConfig
|
| 106 |
+
from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL
|
| 107 |
+
|
| 108 |
+
__all__ = (
|
| 109 |
+
# client_exceptions
|
| 110 |
+
"ClientConnectionError",
|
| 111 |
+
"ClientConnectionResetError",
|
| 112 |
+
"ClientConnectorCertificateError",
|
| 113 |
+
"ClientConnectorDNSError",
|
| 114 |
+
"ClientConnectorError",
|
| 115 |
+
"ClientConnectorSSLError",
|
| 116 |
+
"ClientError",
|
| 117 |
+
"ClientHttpProxyError",
|
| 118 |
+
"ClientOSError",
|
| 119 |
+
"ClientPayloadError",
|
| 120 |
+
"ClientProxyConnectionError",
|
| 121 |
+
"ClientResponseError",
|
| 122 |
+
"ClientSSLError",
|
| 123 |
+
"ConnectionTimeoutError",
|
| 124 |
+
"ContentTypeError",
|
| 125 |
+
"InvalidURL",
|
| 126 |
+
"InvalidUrlClientError",
|
| 127 |
+
"RedirectClientError",
|
| 128 |
+
"NonHttpUrlClientError",
|
| 129 |
+
"InvalidUrlRedirectClientError",
|
| 130 |
+
"NonHttpUrlRedirectClientError",
|
| 131 |
+
"ServerConnectionError",
|
| 132 |
+
"ServerDisconnectedError",
|
| 133 |
+
"ServerFingerprintMismatch",
|
| 134 |
+
"ServerTimeoutError",
|
| 135 |
+
"SocketTimeoutError",
|
| 136 |
+
"TooManyRedirects",
|
| 137 |
+
"WSServerHandshakeError",
|
| 138 |
+
# client_reqrep
|
| 139 |
+
"ClientRequest",
|
| 140 |
+
"ClientResponse",
|
| 141 |
+
"Fingerprint",
|
| 142 |
+
"RequestInfo",
|
| 143 |
+
# connector
|
| 144 |
+
"BaseConnector",
|
| 145 |
+
"TCPConnector",
|
| 146 |
+
"UnixConnector",
|
| 147 |
+
"NamedPipeConnector",
|
| 148 |
+
# client_ws
|
| 149 |
+
"ClientWebSocketResponse",
|
| 150 |
+
# client
|
| 151 |
+
"ClientSession",
|
| 152 |
+
"ClientTimeout",
|
| 153 |
+
"ClientWSTimeout",
|
| 154 |
+
"request",
|
| 155 |
+
"WSMessageTypeError",
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
if TYPE_CHECKING:
|
| 160 |
+
from ssl import SSLContext
|
| 161 |
+
else:
|
| 162 |
+
SSLContext = None
|
| 163 |
+
|
| 164 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 165 |
+
from typing import Unpack
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class _RequestOptions(TypedDict, total=False):
|
| 169 |
+
params: Query
|
| 170 |
+
data: Any
|
| 171 |
+
json: Any
|
| 172 |
+
cookies: Union[LooseCookies, None]
|
| 173 |
+
headers: Union[LooseHeaders, None]
|
| 174 |
+
skip_auto_headers: Union[Iterable[str], None]
|
| 175 |
+
auth: Union[BasicAuth, None]
|
| 176 |
+
allow_redirects: bool
|
| 177 |
+
max_redirects: int
|
| 178 |
+
compress: Union[str, bool, None]
|
| 179 |
+
chunked: Union[bool, None]
|
| 180 |
+
expect100: bool
|
| 181 |
+
raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]]
|
| 182 |
+
read_until_eof: bool
|
| 183 |
+
proxy: Union[StrOrURL, None]
|
| 184 |
+
proxy_auth: Union[BasicAuth, None]
|
| 185 |
+
timeout: "Union[ClientTimeout, _SENTINEL, None]"
|
| 186 |
+
ssl: Union[SSLContext, bool, Fingerprint]
|
| 187 |
+
server_hostname: Union[str, None]
|
| 188 |
+
proxy_headers: Union[LooseHeaders, None]
|
| 189 |
+
trace_request_ctx: Union[Mapping[str, Any], None]
|
| 190 |
+
read_bufsize: Union[int, None]
|
| 191 |
+
auto_decompress: Union[bool, None]
|
| 192 |
+
max_line_size: Union[int, None]
|
| 193 |
+
max_field_size: Union[int, None]
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 197 |
+
class ClientTimeout:
|
| 198 |
+
total: Optional[float] = None
|
| 199 |
+
connect: Optional[float] = None
|
| 200 |
+
sock_read: Optional[float] = None
|
| 201 |
+
sock_connect: Optional[float] = None
|
| 202 |
+
ceil_threshold: float = 5
|
| 203 |
+
|
| 204 |
+
# pool_queue_timeout: Optional[float] = None
|
| 205 |
+
# dns_resolution_timeout: Optional[float] = None
|
| 206 |
+
# socket_connect_timeout: Optional[float] = None
|
| 207 |
+
# connection_acquiring_timeout: Optional[float] = None
|
| 208 |
+
# new_connection_timeout: Optional[float] = None
|
| 209 |
+
# http_header_timeout: Optional[float] = None
|
| 210 |
+
# response_body_timeout: Optional[float] = None
|
| 211 |
+
|
| 212 |
+
# to create a timeout specific for a single request, either
|
| 213 |
+
# - create a completely new one to overwrite the default
|
| 214 |
+
# - or use http://www.attrs.org/en/stable/api.html#attr.evolve
|
| 215 |
+
# to overwrite the defaults
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
# 5 Minute default read timeout
|
| 219 |
+
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30)
|
| 220 |
+
|
| 221 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2
|
| 222 |
+
IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"})
|
| 223 |
+
|
| 224 |
+
_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse)
|
| 225 |
+
_CharsetResolver = Callable[[ClientResponse, bytes], str]
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
class ClientSession:
|
| 229 |
+
"""First-class interface for making HTTP requests."""
|
| 230 |
+
|
| 231 |
+
ATTRS = frozenset(
|
| 232 |
+
[
|
| 233 |
+
"_base_url",
|
| 234 |
+
"_base_url_origin",
|
| 235 |
+
"_source_traceback",
|
| 236 |
+
"_connector",
|
| 237 |
+
"_loop",
|
| 238 |
+
"_cookie_jar",
|
| 239 |
+
"_connector_owner",
|
| 240 |
+
"_default_auth",
|
| 241 |
+
"_version",
|
| 242 |
+
"_json_serialize",
|
| 243 |
+
"_requote_redirect_url",
|
| 244 |
+
"_timeout",
|
| 245 |
+
"_raise_for_status",
|
| 246 |
+
"_auto_decompress",
|
| 247 |
+
"_trust_env",
|
| 248 |
+
"_default_headers",
|
| 249 |
+
"_skip_auto_headers",
|
| 250 |
+
"_request_class",
|
| 251 |
+
"_response_class",
|
| 252 |
+
"_ws_response_class",
|
| 253 |
+
"_trace_configs",
|
| 254 |
+
"_read_bufsize",
|
| 255 |
+
"_max_line_size",
|
| 256 |
+
"_max_field_size",
|
| 257 |
+
"_resolve_charset",
|
| 258 |
+
"_default_proxy",
|
| 259 |
+
"_default_proxy_auth",
|
| 260 |
+
"_retry_connection",
|
| 261 |
+
"requote_redirect_url",
|
| 262 |
+
]
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
| 266 |
+
_connector: Optional[BaseConnector] = None
|
| 267 |
+
|
| 268 |
+
def __init__(
|
| 269 |
+
self,
|
| 270 |
+
base_url: Optional[StrOrURL] = None,
|
| 271 |
+
*,
|
| 272 |
+
connector: Optional[BaseConnector] = None,
|
| 273 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 274 |
+
cookies: Optional[LooseCookies] = None,
|
| 275 |
+
headers: Optional[LooseHeaders] = None,
|
| 276 |
+
proxy: Optional[StrOrURL] = None,
|
| 277 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 278 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 279 |
+
auth: Optional[BasicAuth] = None,
|
| 280 |
+
json_serialize: JSONEncoder = json.dumps,
|
| 281 |
+
request_class: Type[ClientRequest] = ClientRequest,
|
| 282 |
+
response_class: Type[ClientResponse] = ClientResponse,
|
| 283 |
+
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
|
| 284 |
+
version: HttpVersion = http.HttpVersion11,
|
| 285 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 286 |
+
connector_owner: bool = True,
|
| 287 |
+
raise_for_status: Union[
|
| 288 |
+
bool, Callable[[ClientResponse], Awaitable[None]]
|
| 289 |
+
] = False,
|
| 290 |
+
read_timeout: Union[float, _SENTINEL] = sentinel,
|
| 291 |
+
conn_timeout: Optional[float] = None,
|
| 292 |
+
timeout: Union[object, ClientTimeout] = sentinel,
|
| 293 |
+
auto_decompress: bool = True,
|
| 294 |
+
trust_env: bool = False,
|
| 295 |
+
requote_redirect_url: bool = True,
|
| 296 |
+
trace_configs: Optional[List[TraceConfig]] = None,
|
| 297 |
+
read_bufsize: int = 2**16,
|
| 298 |
+
max_line_size: int = 8190,
|
| 299 |
+
max_field_size: int = 8190,
|
| 300 |
+
fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
|
| 301 |
+
) -> None:
|
| 302 |
+
# We initialise _connector to None immediately, as it's referenced in __del__()
|
| 303 |
+
# and could cause issues if an exception occurs during initialisation.
|
| 304 |
+
self._connector: Optional[BaseConnector] = None
|
| 305 |
+
|
| 306 |
+
if loop is None:
|
| 307 |
+
if connector is not None:
|
| 308 |
+
loop = connector._loop
|
| 309 |
+
|
| 310 |
+
loop = loop or asyncio.get_running_loop()
|
| 311 |
+
|
| 312 |
+
if base_url is None or isinstance(base_url, URL):
|
| 313 |
+
self._base_url: Optional[URL] = base_url
|
| 314 |
+
self._base_url_origin = None if base_url is None else base_url.origin()
|
| 315 |
+
else:
|
| 316 |
+
self._base_url = URL(base_url)
|
| 317 |
+
self._base_url_origin = self._base_url.origin()
|
| 318 |
+
assert self._base_url.absolute, "Only absolute URLs are supported"
|
| 319 |
+
if self._base_url is not None and not self._base_url.path.endswith("/"):
|
| 320 |
+
raise ValueError("base_url must have a trailing '/'")
|
| 321 |
+
|
| 322 |
+
if timeout is sentinel or timeout is None:
|
| 323 |
+
self._timeout = DEFAULT_TIMEOUT
|
| 324 |
+
if read_timeout is not sentinel:
|
| 325 |
+
warnings.warn(
|
| 326 |
+
"read_timeout is deprecated, use timeout argument instead",
|
| 327 |
+
DeprecationWarning,
|
| 328 |
+
stacklevel=2,
|
| 329 |
+
)
|
| 330 |
+
self._timeout = attr.evolve(self._timeout, total=read_timeout)
|
| 331 |
+
if conn_timeout is not None:
|
| 332 |
+
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
|
| 333 |
+
warnings.warn(
|
| 334 |
+
"conn_timeout is deprecated, use timeout argument instead",
|
| 335 |
+
DeprecationWarning,
|
| 336 |
+
stacklevel=2,
|
| 337 |
+
)
|
| 338 |
+
else:
|
| 339 |
+
if not isinstance(timeout, ClientTimeout):
|
| 340 |
+
raise ValueError(
|
| 341 |
+
f"timeout parameter cannot be of {type(timeout)} type, "
|
| 342 |
+
"please use 'timeout=ClientTimeout(...)'",
|
| 343 |
+
)
|
| 344 |
+
self._timeout = timeout
|
| 345 |
+
if read_timeout is not sentinel:
|
| 346 |
+
raise ValueError(
|
| 347 |
+
"read_timeout and timeout parameters "
|
| 348 |
+
"conflict, please setup "
|
| 349 |
+
"timeout.read"
|
| 350 |
+
)
|
| 351 |
+
if conn_timeout is not None:
|
| 352 |
+
raise ValueError(
|
| 353 |
+
"conn_timeout and timeout parameters "
|
| 354 |
+
"conflict, please setup "
|
| 355 |
+
"timeout.connect"
|
| 356 |
+
)
|
| 357 |
+
|
| 358 |
+
if connector is None:
|
| 359 |
+
connector = TCPConnector(loop=loop)
|
| 360 |
+
|
| 361 |
+
if connector._loop is not loop:
|
| 362 |
+
raise RuntimeError("Session and connector has to use same event loop")
|
| 363 |
+
|
| 364 |
+
self._loop = loop
|
| 365 |
+
|
| 366 |
+
if loop.get_debug():
|
| 367 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 368 |
+
|
| 369 |
+
if cookie_jar is None:
|
| 370 |
+
cookie_jar = CookieJar(loop=loop)
|
| 371 |
+
self._cookie_jar = cookie_jar
|
| 372 |
+
|
| 373 |
+
if cookies:
|
| 374 |
+
self._cookie_jar.update_cookies(cookies)
|
| 375 |
+
|
| 376 |
+
self._connector = connector
|
| 377 |
+
self._connector_owner = connector_owner
|
| 378 |
+
self._default_auth = auth
|
| 379 |
+
self._version = version
|
| 380 |
+
self._json_serialize = json_serialize
|
| 381 |
+
self._raise_for_status = raise_for_status
|
| 382 |
+
self._auto_decompress = auto_decompress
|
| 383 |
+
self._trust_env = trust_env
|
| 384 |
+
self._requote_redirect_url = requote_redirect_url
|
| 385 |
+
self._read_bufsize = read_bufsize
|
| 386 |
+
self._max_line_size = max_line_size
|
| 387 |
+
self._max_field_size = max_field_size
|
| 388 |
+
|
| 389 |
+
# Convert to list of tuples
|
| 390 |
+
if headers:
|
| 391 |
+
real_headers: CIMultiDict[str] = CIMultiDict(headers)
|
| 392 |
+
else:
|
| 393 |
+
real_headers = CIMultiDict()
|
| 394 |
+
self._default_headers: CIMultiDict[str] = real_headers
|
| 395 |
+
if skip_auto_headers is not None:
|
| 396 |
+
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
|
| 397 |
+
else:
|
| 398 |
+
self._skip_auto_headers = frozenset()
|
| 399 |
+
|
| 400 |
+
self._request_class = request_class
|
| 401 |
+
self._response_class = response_class
|
| 402 |
+
self._ws_response_class = ws_response_class
|
| 403 |
+
|
| 404 |
+
self._trace_configs = trace_configs or []
|
| 405 |
+
for trace_config in self._trace_configs:
|
| 406 |
+
trace_config.freeze()
|
| 407 |
+
|
| 408 |
+
self._resolve_charset = fallback_charset_resolver
|
| 409 |
+
|
| 410 |
+
self._default_proxy = proxy
|
| 411 |
+
self._default_proxy_auth = proxy_auth
|
| 412 |
+
self._retry_connection: bool = True
|
| 413 |
+
|
| 414 |
+
def __init_subclass__(cls: Type["ClientSession"]) -> None:
|
| 415 |
+
warnings.warn(
|
| 416 |
+
"Inheritance class {} from ClientSession "
|
| 417 |
+
"is discouraged".format(cls.__name__),
|
| 418 |
+
DeprecationWarning,
|
| 419 |
+
stacklevel=2,
|
| 420 |
+
)
|
| 421 |
+
|
| 422 |
+
if DEBUG:
|
| 423 |
+
|
| 424 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
| 425 |
+
if name not in self.ATTRS:
|
| 426 |
+
warnings.warn(
|
| 427 |
+
"Setting custom ClientSession.{} attribute "
|
| 428 |
+
"is discouraged".format(name),
|
| 429 |
+
DeprecationWarning,
|
| 430 |
+
stacklevel=2,
|
| 431 |
+
)
|
| 432 |
+
super().__setattr__(name, val)
|
| 433 |
+
|
| 434 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 435 |
+
if not self.closed:
|
| 436 |
+
kwargs = {"source": self}
|
| 437 |
+
_warnings.warn(
|
| 438 |
+
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
|
| 439 |
+
)
|
| 440 |
+
context = {"client_session": self, "message": "Unclosed client session"}
|
| 441 |
+
if self._source_traceback is not None:
|
| 442 |
+
context["source_traceback"] = self._source_traceback
|
| 443 |
+
self._loop.call_exception_handler(context)
|
| 444 |
+
|
| 445 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 446 |
+
|
| 447 |
+
def request(
|
| 448 |
+
self,
|
| 449 |
+
method: str,
|
| 450 |
+
url: StrOrURL,
|
| 451 |
+
**kwargs: Unpack[_RequestOptions],
|
| 452 |
+
) -> "_RequestContextManager": ...
|
| 453 |
+
|
| 454 |
+
else:
|
| 455 |
+
|
| 456 |
+
def request(
|
| 457 |
+
self, method: str, url: StrOrURL, **kwargs: Any
|
| 458 |
+
) -> "_RequestContextManager":
|
| 459 |
+
"""Perform HTTP request."""
|
| 460 |
+
return _RequestContextManager(self._request(method, url, **kwargs))
|
| 461 |
+
|
| 462 |
+
def _build_url(self, str_or_url: StrOrURL) -> URL:
|
| 463 |
+
url = URL(str_or_url)
|
| 464 |
+
if self._base_url is None:
|
| 465 |
+
return url
|
| 466 |
+
else:
|
| 467 |
+
assert not url.absolute
|
| 468 |
+
return self._base_url.join(url)
|
| 469 |
+
|
| 470 |
+
async def _request(
|
| 471 |
+
self,
|
| 472 |
+
method: str,
|
| 473 |
+
str_or_url: StrOrURL,
|
| 474 |
+
*,
|
| 475 |
+
params: Query = None,
|
| 476 |
+
data: Any = None,
|
| 477 |
+
json: Any = None,
|
| 478 |
+
cookies: Optional[LooseCookies] = None,
|
| 479 |
+
headers: Optional[LooseHeaders] = None,
|
| 480 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 481 |
+
auth: Optional[BasicAuth] = None,
|
| 482 |
+
allow_redirects: bool = True,
|
| 483 |
+
max_redirects: int = 10,
|
| 484 |
+
compress: Union[str, bool, None] = None,
|
| 485 |
+
chunked: Optional[bool] = None,
|
| 486 |
+
expect100: bool = False,
|
| 487 |
+
raise_for_status: Union[
|
| 488 |
+
None, bool, Callable[[ClientResponse], Awaitable[None]]
|
| 489 |
+
] = None,
|
| 490 |
+
read_until_eof: bool = True,
|
| 491 |
+
proxy: Optional[StrOrURL] = None,
|
| 492 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 493 |
+
timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
|
| 494 |
+
verify_ssl: Optional[bool] = None,
|
| 495 |
+
fingerprint: Optional[bytes] = None,
|
| 496 |
+
ssl_context: Optional[SSLContext] = None,
|
| 497 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 498 |
+
server_hostname: Optional[str] = None,
|
| 499 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 500 |
+
trace_request_ctx: Optional[Mapping[str, Any]] = None,
|
| 501 |
+
read_bufsize: Optional[int] = None,
|
| 502 |
+
auto_decompress: Optional[bool] = None,
|
| 503 |
+
max_line_size: Optional[int] = None,
|
| 504 |
+
max_field_size: Optional[int] = None,
|
| 505 |
+
) -> ClientResponse:
|
| 506 |
+
|
| 507 |
+
# NOTE: timeout clamps existing connect and read timeouts. We cannot
|
| 508 |
+
# set the default to None because we need to detect if the user wants
|
| 509 |
+
# to use the existing timeouts by setting timeout to None.
|
| 510 |
+
|
| 511 |
+
if self.closed:
|
| 512 |
+
raise RuntimeError("Session is closed")
|
| 513 |
+
|
| 514 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 515 |
+
|
| 516 |
+
if data is not None and json is not None:
|
| 517 |
+
raise ValueError(
|
| 518 |
+
"data and json parameters can not be used at the same time"
|
| 519 |
+
)
|
| 520 |
+
elif json is not None:
|
| 521 |
+
data = payload.JsonPayload(json, dumps=self._json_serialize)
|
| 522 |
+
|
| 523 |
+
if not isinstance(chunked, bool) and chunked is not None:
|
| 524 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
| 525 |
+
|
| 526 |
+
redirects = 0
|
| 527 |
+
history: List[ClientResponse] = []
|
| 528 |
+
version = self._version
|
| 529 |
+
params = params or {}
|
| 530 |
+
|
| 531 |
+
# Merge with default headers and transform to CIMultiDict
|
| 532 |
+
headers = self._prepare_headers(headers)
|
| 533 |
+
|
| 534 |
+
try:
|
| 535 |
+
url = self._build_url(str_or_url)
|
| 536 |
+
except ValueError as e:
|
| 537 |
+
raise InvalidUrlClientError(str_or_url) from e
|
| 538 |
+
|
| 539 |
+
assert self._connector is not None
|
| 540 |
+
if url.scheme not in self._connector.allowed_protocol_schema_set:
|
| 541 |
+
raise NonHttpUrlClientError(url)
|
| 542 |
+
|
| 543 |
+
skip_headers: Optional[Iterable[istr]]
|
| 544 |
+
if skip_auto_headers is not None:
|
| 545 |
+
skip_headers = {
|
| 546 |
+
istr(i) for i in skip_auto_headers
|
| 547 |
+
} | self._skip_auto_headers
|
| 548 |
+
elif self._skip_auto_headers:
|
| 549 |
+
skip_headers = self._skip_auto_headers
|
| 550 |
+
else:
|
| 551 |
+
skip_headers = None
|
| 552 |
+
|
| 553 |
+
if proxy is None:
|
| 554 |
+
proxy = self._default_proxy
|
| 555 |
+
if proxy_auth is None:
|
| 556 |
+
proxy_auth = self._default_proxy_auth
|
| 557 |
+
|
| 558 |
+
if proxy is None:
|
| 559 |
+
proxy_headers = None
|
| 560 |
+
else:
|
| 561 |
+
proxy_headers = self._prepare_headers(proxy_headers)
|
| 562 |
+
try:
|
| 563 |
+
proxy = URL(proxy)
|
| 564 |
+
except ValueError as e:
|
| 565 |
+
raise InvalidURL(proxy) from e
|
| 566 |
+
|
| 567 |
+
if timeout is sentinel:
|
| 568 |
+
real_timeout: ClientTimeout = self._timeout
|
| 569 |
+
else:
|
| 570 |
+
if not isinstance(timeout, ClientTimeout):
|
| 571 |
+
real_timeout = ClientTimeout(total=timeout)
|
| 572 |
+
else:
|
| 573 |
+
real_timeout = timeout
|
| 574 |
+
# timeout is cumulative for all request operations
|
| 575 |
+
# (request, redirects, responses, data consuming)
|
| 576 |
+
tm = TimeoutHandle(
|
| 577 |
+
self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
|
| 578 |
+
)
|
| 579 |
+
handle = tm.start()
|
| 580 |
+
|
| 581 |
+
if read_bufsize is None:
|
| 582 |
+
read_bufsize = self._read_bufsize
|
| 583 |
+
|
| 584 |
+
if auto_decompress is None:
|
| 585 |
+
auto_decompress = self._auto_decompress
|
| 586 |
+
|
| 587 |
+
if max_line_size is None:
|
| 588 |
+
max_line_size = self._max_line_size
|
| 589 |
+
|
| 590 |
+
if max_field_size is None:
|
| 591 |
+
max_field_size = self._max_field_size
|
| 592 |
+
|
| 593 |
+
traces = [
|
| 594 |
+
Trace(
|
| 595 |
+
self,
|
| 596 |
+
trace_config,
|
| 597 |
+
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
|
| 598 |
+
)
|
| 599 |
+
for trace_config in self._trace_configs
|
| 600 |
+
]
|
| 601 |
+
|
| 602 |
+
for trace in traces:
|
| 603 |
+
await trace.send_request_start(method, url.update_query(params), headers)
|
| 604 |
+
|
| 605 |
+
timer = tm.timer()
|
| 606 |
+
try:
|
| 607 |
+
with timer:
|
| 608 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests
|
| 609 |
+
retry_persistent_connection = (
|
| 610 |
+
self._retry_connection and method in IDEMPOTENT_METHODS
|
| 611 |
+
)
|
| 612 |
+
while True:
|
| 613 |
+
url, auth_from_url = strip_auth_from_url(url)
|
| 614 |
+
if not url.raw_host:
|
| 615 |
+
# NOTE: Bail early, otherwise, causes `InvalidURL` through
|
| 616 |
+
# NOTE: `self._request_class()` below.
|
| 617 |
+
err_exc_cls = (
|
| 618 |
+
InvalidUrlRedirectClientError
|
| 619 |
+
if redirects
|
| 620 |
+
else InvalidUrlClientError
|
| 621 |
+
)
|
| 622 |
+
raise err_exc_cls(url)
|
| 623 |
+
# If `auth` was passed for an already authenticated URL,
|
| 624 |
+
# disallow only if this is the initial URL; this is to avoid issues
|
| 625 |
+
# with sketchy redirects that are not the caller's responsibility
|
| 626 |
+
if not history and (auth and auth_from_url):
|
| 627 |
+
raise ValueError(
|
| 628 |
+
"Cannot combine AUTH argument with "
|
| 629 |
+
"credentials encoded in URL"
|
| 630 |
+
)
|
| 631 |
+
|
| 632 |
+
# Override the auth with the one from the URL only if we
|
| 633 |
+
# have no auth, or if we got an auth from a redirect URL
|
| 634 |
+
if auth is None or (history and auth_from_url is not None):
|
| 635 |
+
auth = auth_from_url
|
| 636 |
+
|
| 637 |
+
if (
|
| 638 |
+
auth is None
|
| 639 |
+
and self._default_auth
|
| 640 |
+
and (
|
| 641 |
+
not self._base_url or self._base_url_origin == url.origin()
|
| 642 |
+
)
|
| 643 |
+
):
|
| 644 |
+
auth = self._default_auth
|
| 645 |
+
# It would be confusing if we support explicit
|
| 646 |
+
# Authorization header with auth argument
|
| 647 |
+
if (
|
| 648 |
+
headers is not None
|
| 649 |
+
and auth is not None
|
| 650 |
+
and hdrs.AUTHORIZATION in headers
|
| 651 |
+
):
|
| 652 |
+
raise ValueError(
|
| 653 |
+
"Cannot combine AUTHORIZATION header "
|
| 654 |
+
"with AUTH argument or credentials "
|
| 655 |
+
"encoded in URL"
|
| 656 |
+
)
|
| 657 |
+
|
| 658 |
+
all_cookies = self._cookie_jar.filter_cookies(url)
|
| 659 |
+
|
| 660 |
+
if cookies is not None:
|
| 661 |
+
tmp_cookie_jar = CookieJar(
|
| 662 |
+
quote_cookie=self._cookie_jar.quote_cookie
|
| 663 |
+
)
|
| 664 |
+
tmp_cookie_jar.update_cookies(cookies)
|
| 665 |
+
req_cookies = tmp_cookie_jar.filter_cookies(url)
|
| 666 |
+
if req_cookies:
|
| 667 |
+
all_cookies.load(req_cookies)
|
| 668 |
+
|
| 669 |
+
if proxy is not None:
|
| 670 |
+
proxy = URL(proxy)
|
| 671 |
+
elif self._trust_env:
|
| 672 |
+
with suppress(LookupError):
|
| 673 |
+
proxy, proxy_auth = get_env_proxy_for_url(url)
|
| 674 |
+
|
| 675 |
+
req = self._request_class(
|
| 676 |
+
method,
|
| 677 |
+
url,
|
| 678 |
+
params=params,
|
| 679 |
+
headers=headers,
|
| 680 |
+
skip_auto_headers=skip_headers,
|
| 681 |
+
data=data,
|
| 682 |
+
cookies=all_cookies,
|
| 683 |
+
auth=auth,
|
| 684 |
+
version=version,
|
| 685 |
+
compress=compress,
|
| 686 |
+
chunked=chunked,
|
| 687 |
+
expect100=expect100,
|
| 688 |
+
loop=self._loop,
|
| 689 |
+
response_class=self._response_class,
|
| 690 |
+
proxy=proxy,
|
| 691 |
+
proxy_auth=proxy_auth,
|
| 692 |
+
timer=timer,
|
| 693 |
+
session=self,
|
| 694 |
+
ssl=ssl if ssl is not None else True,
|
| 695 |
+
server_hostname=server_hostname,
|
| 696 |
+
proxy_headers=proxy_headers,
|
| 697 |
+
traces=traces,
|
| 698 |
+
trust_env=self.trust_env,
|
| 699 |
+
)
|
| 700 |
+
|
| 701 |
+
# connection timeout
|
| 702 |
+
try:
|
| 703 |
+
conn = await self._connector.connect(
|
| 704 |
+
req, traces=traces, timeout=real_timeout
|
| 705 |
+
)
|
| 706 |
+
except asyncio.TimeoutError as exc:
|
| 707 |
+
raise ConnectionTimeoutError(
|
| 708 |
+
f"Connection timeout to host {url}"
|
| 709 |
+
) from exc
|
| 710 |
+
|
| 711 |
+
assert conn.transport is not None
|
| 712 |
+
|
| 713 |
+
assert conn.protocol is not None
|
| 714 |
+
conn.protocol.set_response_params(
|
| 715 |
+
timer=timer,
|
| 716 |
+
skip_payload=method in EMPTY_BODY_METHODS,
|
| 717 |
+
read_until_eof=read_until_eof,
|
| 718 |
+
auto_decompress=auto_decompress,
|
| 719 |
+
read_timeout=real_timeout.sock_read,
|
| 720 |
+
read_bufsize=read_bufsize,
|
| 721 |
+
timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
|
| 722 |
+
max_line_size=max_line_size,
|
| 723 |
+
max_field_size=max_field_size,
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
try:
|
| 727 |
+
try:
|
| 728 |
+
resp = await req.send(conn)
|
| 729 |
+
try:
|
| 730 |
+
await resp.start(conn)
|
| 731 |
+
except BaseException:
|
| 732 |
+
resp.close()
|
| 733 |
+
raise
|
| 734 |
+
except BaseException:
|
| 735 |
+
conn.close()
|
| 736 |
+
raise
|
| 737 |
+
except (ClientOSError, ServerDisconnectedError):
|
| 738 |
+
if retry_persistent_connection:
|
| 739 |
+
retry_persistent_connection = False
|
| 740 |
+
continue
|
| 741 |
+
raise
|
| 742 |
+
except ClientError:
|
| 743 |
+
raise
|
| 744 |
+
except OSError as exc:
|
| 745 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 746 |
+
raise
|
| 747 |
+
raise ClientOSError(*exc.args) from exc
|
| 748 |
+
|
| 749 |
+
if cookies := resp._cookies:
|
| 750 |
+
self._cookie_jar.update_cookies(cookies, resp.url)
|
| 751 |
+
|
| 752 |
+
# redirects
|
| 753 |
+
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
|
| 754 |
+
|
| 755 |
+
for trace in traces:
|
| 756 |
+
await trace.send_request_redirect(
|
| 757 |
+
method, url.update_query(params), headers, resp
|
| 758 |
+
)
|
| 759 |
+
|
| 760 |
+
redirects += 1
|
| 761 |
+
history.append(resp)
|
| 762 |
+
if max_redirects and redirects >= max_redirects:
|
| 763 |
+
resp.close()
|
| 764 |
+
raise TooManyRedirects(
|
| 765 |
+
history[0].request_info, tuple(history)
|
| 766 |
+
)
|
| 767 |
+
|
| 768 |
+
# For 301 and 302, mimic IE, now changed in RFC
|
| 769 |
+
# https://github.com/kennethreitz/requests/pull/269
|
| 770 |
+
if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
|
| 771 |
+
resp.status in (301, 302) and resp.method == hdrs.METH_POST
|
| 772 |
+
):
|
| 773 |
+
method = hdrs.METH_GET
|
| 774 |
+
data = None
|
| 775 |
+
if headers.get(hdrs.CONTENT_LENGTH):
|
| 776 |
+
headers.pop(hdrs.CONTENT_LENGTH)
|
| 777 |
+
|
| 778 |
+
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
|
| 779 |
+
hdrs.URI
|
| 780 |
+
)
|
| 781 |
+
if r_url is None:
|
| 782 |
+
# see github.com/aio-libs/aiohttp/issues/2022
|
| 783 |
+
break
|
| 784 |
+
else:
|
| 785 |
+
# reading from correct redirection
|
| 786 |
+
# response is forbidden
|
| 787 |
+
resp.release()
|
| 788 |
+
|
| 789 |
+
try:
|
| 790 |
+
parsed_redirect_url = URL(
|
| 791 |
+
r_url, encoded=not self._requote_redirect_url
|
| 792 |
+
)
|
| 793 |
+
except ValueError as e:
|
| 794 |
+
raise InvalidUrlRedirectClientError(
|
| 795 |
+
r_url,
|
| 796 |
+
"Server attempted redirecting to a location that does not look like a URL",
|
| 797 |
+
) from e
|
| 798 |
+
|
| 799 |
+
scheme = parsed_redirect_url.scheme
|
| 800 |
+
if scheme not in HTTP_AND_EMPTY_SCHEMA_SET:
|
| 801 |
+
resp.close()
|
| 802 |
+
raise NonHttpUrlRedirectClientError(r_url)
|
| 803 |
+
elif not scheme:
|
| 804 |
+
parsed_redirect_url = url.join(parsed_redirect_url)
|
| 805 |
+
|
| 806 |
+
try:
|
| 807 |
+
redirect_origin = parsed_redirect_url.origin()
|
| 808 |
+
except ValueError as origin_val_err:
|
| 809 |
+
raise InvalidUrlRedirectClientError(
|
| 810 |
+
parsed_redirect_url,
|
| 811 |
+
"Invalid redirect URL origin",
|
| 812 |
+
) from origin_val_err
|
| 813 |
+
|
| 814 |
+
if url.origin() != redirect_origin:
|
| 815 |
+
auth = None
|
| 816 |
+
headers.pop(hdrs.AUTHORIZATION, None)
|
| 817 |
+
|
| 818 |
+
url = parsed_redirect_url
|
| 819 |
+
params = {}
|
| 820 |
+
resp.release()
|
| 821 |
+
continue
|
| 822 |
+
|
| 823 |
+
break
|
| 824 |
+
|
| 825 |
+
# check response status
|
| 826 |
+
if raise_for_status is None:
|
| 827 |
+
raise_for_status = self._raise_for_status
|
| 828 |
+
|
| 829 |
+
if raise_for_status is None:
|
| 830 |
+
pass
|
| 831 |
+
elif callable(raise_for_status):
|
| 832 |
+
await raise_for_status(resp)
|
| 833 |
+
elif raise_for_status:
|
| 834 |
+
resp.raise_for_status()
|
| 835 |
+
|
| 836 |
+
# register connection
|
| 837 |
+
if handle is not None:
|
| 838 |
+
if resp.connection is not None:
|
| 839 |
+
resp.connection.add_callback(handle.cancel)
|
| 840 |
+
else:
|
| 841 |
+
handle.cancel()
|
| 842 |
+
|
| 843 |
+
resp._history = tuple(history)
|
| 844 |
+
|
| 845 |
+
for trace in traces:
|
| 846 |
+
await trace.send_request_end(
|
| 847 |
+
method, url.update_query(params), headers, resp
|
| 848 |
+
)
|
| 849 |
+
return resp
|
| 850 |
+
|
| 851 |
+
except BaseException as e:
|
| 852 |
+
# cleanup timer
|
| 853 |
+
tm.close()
|
| 854 |
+
if handle:
|
| 855 |
+
handle.cancel()
|
| 856 |
+
handle = None
|
| 857 |
+
|
| 858 |
+
for trace in traces:
|
| 859 |
+
await trace.send_request_exception(
|
| 860 |
+
method, url.update_query(params), headers, e
|
| 861 |
+
)
|
| 862 |
+
raise
|
| 863 |
+
|
| 864 |
+
def ws_connect(
|
| 865 |
+
self,
|
| 866 |
+
url: StrOrURL,
|
| 867 |
+
*,
|
| 868 |
+
method: str = hdrs.METH_GET,
|
| 869 |
+
protocols: Iterable[str] = (),
|
| 870 |
+
timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
|
| 871 |
+
receive_timeout: Optional[float] = None,
|
| 872 |
+
autoclose: bool = True,
|
| 873 |
+
autoping: bool = True,
|
| 874 |
+
heartbeat: Optional[float] = None,
|
| 875 |
+
auth: Optional[BasicAuth] = None,
|
| 876 |
+
origin: Optional[str] = None,
|
| 877 |
+
params: Query = None,
|
| 878 |
+
headers: Optional[LooseHeaders] = None,
|
| 879 |
+
proxy: Optional[StrOrURL] = None,
|
| 880 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 881 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 882 |
+
verify_ssl: Optional[bool] = None,
|
| 883 |
+
fingerprint: Optional[bytes] = None,
|
| 884 |
+
ssl_context: Optional[SSLContext] = None,
|
| 885 |
+
server_hostname: Optional[str] = None,
|
| 886 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 887 |
+
compress: int = 0,
|
| 888 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
| 889 |
+
) -> "_WSRequestContextManager":
|
| 890 |
+
"""Initiate websocket connection."""
|
| 891 |
+
return _WSRequestContextManager(
|
| 892 |
+
self._ws_connect(
|
| 893 |
+
url,
|
| 894 |
+
method=method,
|
| 895 |
+
protocols=protocols,
|
| 896 |
+
timeout=timeout,
|
| 897 |
+
receive_timeout=receive_timeout,
|
| 898 |
+
autoclose=autoclose,
|
| 899 |
+
autoping=autoping,
|
| 900 |
+
heartbeat=heartbeat,
|
| 901 |
+
auth=auth,
|
| 902 |
+
origin=origin,
|
| 903 |
+
params=params,
|
| 904 |
+
headers=headers,
|
| 905 |
+
proxy=proxy,
|
| 906 |
+
proxy_auth=proxy_auth,
|
| 907 |
+
ssl=ssl,
|
| 908 |
+
verify_ssl=verify_ssl,
|
| 909 |
+
fingerprint=fingerprint,
|
| 910 |
+
ssl_context=ssl_context,
|
| 911 |
+
server_hostname=server_hostname,
|
| 912 |
+
proxy_headers=proxy_headers,
|
| 913 |
+
compress=compress,
|
| 914 |
+
max_msg_size=max_msg_size,
|
| 915 |
+
)
|
| 916 |
+
)
|
| 917 |
+
|
| 918 |
+
async def _ws_connect(
|
| 919 |
+
self,
|
| 920 |
+
url: StrOrURL,
|
| 921 |
+
*,
|
| 922 |
+
method: str = hdrs.METH_GET,
|
| 923 |
+
protocols: Iterable[str] = (),
|
| 924 |
+
timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
|
| 925 |
+
receive_timeout: Optional[float] = None,
|
| 926 |
+
autoclose: bool = True,
|
| 927 |
+
autoping: bool = True,
|
| 928 |
+
heartbeat: Optional[float] = None,
|
| 929 |
+
auth: Optional[BasicAuth] = None,
|
| 930 |
+
origin: Optional[str] = None,
|
| 931 |
+
params: Query = None,
|
| 932 |
+
headers: Optional[LooseHeaders] = None,
|
| 933 |
+
proxy: Optional[StrOrURL] = None,
|
| 934 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 935 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 936 |
+
verify_ssl: Optional[bool] = None,
|
| 937 |
+
fingerprint: Optional[bytes] = None,
|
| 938 |
+
ssl_context: Optional[SSLContext] = None,
|
| 939 |
+
server_hostname: Optional[str] = None,
|
| 940 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 941 |
+
compress: int = 0,
|
| 942 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
| 943 |
+
) -> ClientWebSocketResponse:
|
| 944 |
+
if timeout is not sentinel:
|
| 945 |
+
if isinstance(timeout, ClientWSTimeout):
|
| 946 |
+
ws_timeout = timeout
|
| 947 |
+
else:
|
| 948 |
+
warnings.warn(
|
| 949 |
+
"parameter 'timeout' of type 'float' "
|
| 950 |
+
"is deprecated, please use "
|
| 951 |
+
"'timeout=ClientWSTimeout(ws_close=...)'",
|
| 952 |
+
DeprecationWarning,
|
| 953 |
+
stacklevel=2,
|
| 954 |
+
)
|
| 955 |
+
ws_timeout = ClientWSTimeout(ws_close=timeout)
|
| 956 |
+
else:
|
| 957 |
+
ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT
|
| 958 |
+
if receive_timeout is not None:
|
| 959 |
+
warnings.warn(
|
| 960 |
+
"float parameter 'receive_timeout' "
|
| 961 |
+
"is deprecated, please use parameter "
|
| 962 |
+
"'timeout=ClientWSTimeout(ws_receive=...)'",
|
| 963 |
+
DeprecationWarning,
|
| 964 |
+
stacklevel=2,
|
| 965 |
+
)
|
| 966 |
+
ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout)
|
| 967 |
+
|
| 968 |
+
if headers is None:
|
| 969 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
| 970 |
+
else:
|
| 971 |
+
real_headers = CIMultiDict(headers)
|
| 972 |
+
|
| 973 |
+
default_headers = {
|
| 974 |
+
hdrs.UPGRADE: "websocket",
|
| 975 |
+
hdrs.CONNECTION: "Upgrade",
|
| 976 |
+
hdrs.SEC_WEBSOCKET_VERSION: "13",
|
| 977 |
+
}
|
| 978 |
+
|
| 979 |
+
for key, value in default_headers.items():
|
| 980 |
+
real_headers.setdefault(key, value)
|
| 981 |
+
|
| 982 |
+
sec_key = base64.b64encode(os.urandom(16))
|
| 983 |
+
real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
|
| 984 |
+
|
| 985 |
+
if protocols:
|
| 986 |
+
real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
|
| 987 |
+
if origin is not None:
|
| 988 |
+
real_headers[hdrs.ORIGIN] = origin
|
| 989 |
+
if compress:
|
| 990 |
+
extstr = ws_ext_gen(compress=compress)
|
| 991 |
+
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
|
| 992 |
+
|
| 993 |
+
# For the sake of backward compatibility, if user passes in None, convert it to True
|
| 994 |
+
if ssl is None:
|
| 995 |
+
warnings.warn(
|
| 996 |
+
"ssl=None is deprecated, please use ssl=True",
|
| 997 |
+
DeprecationWarning,
|
| 998 |
+
stacklevel=2,
|
| 999 |
+
)
|
| 1000 |
+
ssl = True
|
| 1001 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 1002 |
+
|
| 1003 |
+
# send request
|
| 1004 |
+
resp = await self.request(
|
| 1005 |
+
method,
|
| 1006 |
+
url,
|
| 1007 |
+
params=params,
|
| 1008 |
+
headers=real_headers,
|
| 1009 |
+
read_until_eof=False,
|
| 1010 |
+
auth=auth,
|
| 1011 |
+
proxy=proxy,
|
| 1012 |
+
proxy_auth=proxy_auth,
|
| 1013 |
+
ssl=ssl,
|
| 1014 |
+
server_hostname=server_hostname,
|
| 1015 |
+
proxy_headers=proxy_headers,
|
| 1016 |
+
)
|
| 1017 |
+
|
| 1018 |
+
try:
|
| 1019 |
+
# check handshake
|
| 1020 |
+
if resp.status != 101:
|
| 1021 |
+
raise WSServerHandshakeError(
|
| 1022 |
+
resp.request_info,
|
| 1023 |
+
resp.history,
|
| 1024 |
+
message="Invalid response status",
|
| 1025 |
+
status=resp.status,
|
| 1026 |
+
headers=resp.headers,
|
| 1027 |
+
)
|
| 1028 |
+
|
| 1029 |
+
if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
|
| 1030 |
+
raise WSServerHandshakeError(
|
| 1031 |
+
resp.request_info,
|
| 1032 |
+
resp.history,
|
| 1033 |
+
message="Invalid upgrade header",
|
| 1034 |
+
status=resp.status,
|
| 1035 |
+
headers=resp.headers,
|
| 1036 |
+
)
|
| 1037 |
+
|
| 1038 |
+
if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
|
| 1039 |
+
raise WSServerHandshakeError(
|
| 1040 |
+
resp.request_info,
|
| 1041 |
+
resp.history,
|
| 1042 |
+
message="Invalid connection header",
|
| 1043 |
+
status=resp.status,
|
| 1044 |
+
headers=resp.headers,
|
| 1045 |
+
)
|
| 1046 |
+
|
| 1047 |
+
# key calculation
|
| 1048 |
+
r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
|
| 1049 |
+
match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
|
| 1050 |
+
if r_key != match:
|
| 1051 |
+
raise WSServerHandshakeError(
|
| 1052 |
+
resp.request_info,
|
| 1053 |
+
resp.history,
|
| 1054 |
+
message="Invalid challenge response",
|
| 1055 |
+
status=resp.status,
|
| 1056 |
+
headers=resp.headers,
|
| 1057 |
+
)
|
| 1058 |
+
|
| 1059 |
+
# websocket protocol
|
| 1060 |
+
protocol = None
|
| 1061 |
+
if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
|
| 1062 |
+
resp_protocols = [
|
| 1063 |
+
proto.strip()
|
| 1064 |
+
for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
| 1065 |
+
]
|
| 1066 |
+
|
| 1067 |
+
for proto in resp_protocols:
|
| 1068 |
+
if proto in protocols:
|
| 1069 |
+
protocol = proto
|
| 1070 |
+
break
|
| 1071 |
+
|
| 1072 |
+
# websocket compress
|
| 1073 |
+
notakeover = False
|
| 1074 |
+
if compress:
|
| 1075 |
+
compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
|
| 1076 |
+
if compress_hdrs:
|
| 1077 |
+
try:
|
| 1078 |
+
compress, notakeover = ws_ext_parse(compress_hdrs)
|
| 1079 |
+
except WSHandshakeError as exc:
|
| 1080 |
+
raise WSServerHandshakeError(
|
| 1081 |
+
resp.request_info,
|
| 1082 |
+
resp.history,
|
| 1083 |
+
message=exc.args[0],
|
| 1084 |
+
status=resp.status,
|
| 1085 |
+
headers=resp.headers,
|
| 1086 |
+
) from exc
|
| 1087 |
+
else:
|
| 1088 |
+
compress = 0
|
| 1089 |
+
notakeover = False
|
| 1090 |
+
|
| 1091 |
+
conn = resp.connection
|
| 1092 |
+
assert conn is not None
|
| 1093 |
+
conn_proto = conn.protocol
|
| 1094 |
+
assert conn_proto is not None
|
| 1095 |
+
|
| 1096 |
+
# For WS connection the read_timeout must be either receive_timeout or greater
|
| 1097 |
+
# None == no timeout, i.e. infinite timeout, so None is the max timeout possible
|
| 1098 |
+
if ws_timeout.ws_receive is None:
|
| 1099 |
+
# Reset regardless
|
| 1100 |
+
conn_proto.read_timeout = None
|
| 1101 |
+
elif conn_proto.read_timeout is not None:
|
| 1102 |
+
conn_proto.read_timeout = max(
|
| 1103 |
+
ws_timeout.ws_receive, conn_proto.read_timeout
|
| 1104 |
+
)
|
| 1105 |
+
|
| 1106 |
+
transport = conn.transport
|
| 1107 |
+
assert transport is not None
|
| 1108 |
+
reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop)
|
| 1109 |
+
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
|
| 1110 |
+
writer = WebSocketWriter(
|
| 1111 |
+
conn_proto,
|
| 1112 |
+
transport,
|
| 1113 |
+
use_mask=True,
|
| 1114 |
+
compress=compress,
|
| 1115 |
+
notakeover=notakeover,
|
| 1116 |
+
)
|
| 1117 |
+
except BaseException:
|
| 1118 |
+
resp.close()
|
| 1119 |
+
raise
|
| 1120 |
+
else:
|
| 1121 |
+
return self._ws_response_class(
|
| 1122 |
+
reader,
|
| 1123 |
+
writer,
|
| 1124 |
+
protocol,
|
| 1125 |
+
resp,
|
| 1126 |
+
ws_timeout,
|
| 1127 |
+
autoclose,
|
| 1128 |
+
autoping,
|
| 1129 |
+
self._loop,
|
| 1130 |
+
heartbeat=heartbeat,
|
| 1131 |
+
compress=compress,
|
| 1132 |
+
client_notakeover=notakeover,
|
| 1133 |
+
)
|
| 1134 |
+
|
| 1135 |
+
def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
|
| 1136 |
+
"""Add default headers and transform it to CIMultiDict"""
|
| 1137 |
+
# Convert headers to MultiDict
|
| 1138 |
+
result = CIMultiDict(self._default_headers)
|
| 1139 |
+
if headers:
|
| 1140 |
+
if not isinstance(headers, (MultiDictProxy, MultiDict)):
|
| 1141 |
+
headers = CIMultiDict(headers)
|
| 1142 |
+
added_names: Set[str] = set()
|
| 1143 |
+
for key, value in headers.items():
|
| 1144 |
+
if key in added_names:
|
| 1145 |
+
result.add(key, value)
|
| 1146 |
+
else:
|
| 1147 |
+
result[key] = value
|
| 1148 |
+
added_names.add(key)
|
| 1149 |
+
return result
|
| 1150 |
+
|
| 1151 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 1152 |
+
|
| 1153 |
+
def get(
|
| 1154 |
+
self,
|
| 1155 |
+
url: StrOrURL,
|
| 1156 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1157 |
+
) -> "_RequestContextManager": ...
|
| 1158 |
+
|
| 1159 |
+
def options(
|
| 1160 |
+
self,
|
| 1161 |
+
url: StrOrURL,
|
| 1162 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1163 |
+
) -> "_RequestContextManager": ...
|
| 1164 |
+
|
| 1165 |
+
def head(
|
| 1166 |
+
self,
|
| 1167 |
+
url: StrOrURL,
|
| 1168 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1169 |
+
) -> "_RequestContextManager": ...
|
| 1170 |
+
|
| 1171 |
+
def post(
|
| 1172 |
+
self,
|
| 1173 |
+
url: StrOrURL,
|
| 1174 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1175 |
+
) -> "_RequestContextManager": ...
|
| 1176 |
+
|
| 1177 |
+
def put(
|
| 1178 |
+
self,
|
| 1179 |
+
url: StrOrURL,
|
| 1180 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1181 |
+
) -> "_RequestContextManager": ...
|
| 1182 |
+
|
| 1183 |
+
def patch(
|
| 1184 |
+
self,
|
| 1185 |
+
url: StrOrURL,
|
| 1186 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1187 |
+
) -> "_RequestContextManager": ...
|
| 1188 |
+
|
| 1189 |
+
def delete(
|
| 1190 |
+
self,
|
| 1191 |
+
url: StrOrURL,
|
| 1192 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1193 |
+
) -> "_RequestContextManager": ...
|
| 1194 |
+
|
| 1195 |
+
else:
|
| 1196 |
+
|
| 1197 |
+
def get(
|
| 1198 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
| 1199 |
+
) -> "_RequestContextManager":
|
| 1200 |
+
"""Perform HTTP GET request."""
|
| 1201 |
+
return _RequestContextManager(
|
| 1202 |
+
self._request(
|
| 1203 |
+
hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs
|
| 1204 |
+
)
|
| 1205 |
+
)
|
| 1206 |
+
|
| 1207 |
+
def options(
|
| 1208 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
| 1209 |
+
) -> "_RequestContextManager":
|
| 1210 |
+
"""Perform HTTP OPTIONS request."""
|
| 1211 |
+
return _RequestContextManager(
|
| 1212 |
+
self._request(
|
| 1213 |
+
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
|
| 1214 |
+
)
|
| 1215 |
+
)
|
| 1216 |
+
|
| 1217 |
+
def head(
|
| 1218 |
+
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
|
| 1219 |
+
) -> "_RequestContextManager":
|
| 1220 |
+
"""Perform HTTP HEAD request."""
|
| 1221 |
+
return _RequestContextManager(
|
| 1222 |
+
self._request(
|
| 1223 |
+
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
|
| 1224 |
+
)
|
| 1225 |
+
)
|
| 1226 |
+
|
| 1227 |
+
def post(
|
| 1228 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 1229 |
+
) -> "_RequestContextManager":
|
| 1230 |
+
"""Perform HTTP POST request."""
|
| 1231 |
+
return _RequestContextManager(
|
| 1232 |
+
self._request(hdrs.METH_POST, url, data=data, **kwargs)
|
| 1233 |
+
)
|
| 1234 |
+
|
| 1235 |
+
def put(
|
| 1236 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 1237 |
+
) -> "_RequestContextManager":
|
| 1238 |
+
"""Perform HTTP PUT request."""
|
| 1239 |
+
return _RequestContextManager(
|
| 1240 |
+
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
|
| 1241 |
+
)
|
| 1242 |
+
|
| 1243 |
+
def patch(
|
| 1244 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 1245 |
+
) -> "_RequestContextManager":
|
| 1246 |
+
"""Perform HTTP PATCH request."""
|
| 1247 |
+
return _RequestContextManager(
|
| 1248 |
+
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
|
| 1249 |
+
)
|
| 1250 |
+
|
| 1251 |
+
def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
|
| 1252 |
+
"""Perform HTTP DELETE request."""
|
| 1253 |
+
return _RequestContextManager(
|
| 1254 |
+
self._request(hdrs.METH_DELETE, url, **kwargs)
|
| 1255 |
+
)
|
| 1256 |
+
|
| 1257 |
+
async def close(self) -> None:
|
| 1258 |
+
"""Close underlying connector.
|
| 1259 |
+
|
| 1260 |
+
Release all acquired resources.
|
| 1261 |
+
"""
|
| 1262 |
+
if not self.closed:
|
| 1263 |
+
if self._connector is not None and self._connector_owner:
|
| 1264 |
+
await self._connector.close()
|
| 1265 |
+
self._connector = None
|
| 1266 |
+
|
| 1267 |
+
@property
|
| 1268 |
+
def closed(self) -> bool:
|
| 1269 |
+
"""Is client session closed.
|
| 1270 |
+
|
| 1271 |
+
A readonly property.
|
| 1272 |
+
"""
|
| 1273 |
+
return self._connector is None or self._connector.closed
|
| 1274 |
+
|
| 1275 |
+
@property
|
| 1276 |
+
def connector(self) -> Optional[BaseConnector]:
|
| 1277 |
+
"""Connector instance used for the session."""
|
| 1278 |
+
return self._connector
|
| 1279 |
+
|
| 1280 |
+
@property
|
| 1281 |
+
def cookie_jar(self) -> AbstractCookieJar:
|
| 1282 |
+
"""The session cookies."""
|
| 1283 |
+
return self._cookie_jar
|
| 1284 |
+
|
| 1285 |
+
@property
|
| 1286 |
+
def version(self) -> Tuple[int, int]:
|
| 1287 |
+
"""The session HTTP protocol version."""
|
| 1288 |
+
return self._version
|
| 1289 |
+
|
| 1290 |
+
@property
|
| 1291 |
+
def requote_redirect_url(self) -> bool:
|
| 1292 |
+
"""Do URL requoting on redirection handling."""
|
| 1293 |
+
return self._requote_redirect_url
|
| 1294 |
+
|
| 1295 |
+
@requote_redirect_url.setter
|
| 1296 |
+
def requote_redirect_url(self, val: bool) -> None:
|
| 1297 |
+
"""Do URL requoting on redirection handling."""
|
| 1298 |
+
warnings.warn(
|
| 1299 |
+
"session.requote_redirect_url modification is deprecated #2778",
|
| 1300 |
+
DeprecationWarning,
|
| 1301 |
+
stacklevel=2,
|
| 1302 |
+
)
|
| 1303 |
+
self._requote_redirect_url = val
|
| 1304 |
+
|
| 1305 |
+
@property
|
| 1306 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
| 1307 |
+
"""Session's loop."""
|
| 1308 |
+
warnings.warn(
|
| 1309 |
+
"client.loop property is deprecated", DeprecationWarning, stacklevel=2
|
| 1310 |
+
)
|
| 1311 |
+
return self._loop
|
| 1312 |
+
|
| 1313 |
+
@property
|
| 1314 |
+
def timeout(self) -> ClientTimeout:
|
| 1315 |
+
"""Timeout for the session."""
|
| 1316 |
+
return self._timeout
|
| 1317 |
+
|
| 1318 |
+
@property
|
| 1319 |
+
def headers(self) -> "CIMultiDict[str]":
|
| 1320 |
+
"""The default headers of the client session."""
|
| 1321 |
+
return self._default_headers
|
| 1322 |
+
|
| 1323 |
+
@property
|
| 1324 |
+
def skip_auto_headers(self) -> FrozenSet[istr]:
|
| 1325 |
+
"""Headers for which autogeneration should be skipped"""
|
| 1326 |
+
return self._skip_auto_headers
|
| 1327 |
+
|
| 1328 |
+
@property
|
| 1329 |
+
def auth(self) -> Optional[BasicAuth]:
|
| 1330 |
+
"""An object that represents HTTP Basic Authorization"""
|
| 1331 |
+
return self._default_auth
|
| 1332 |
+
|
| 1333 |
+
@property
|
| 1334 |
+
def json_serialize(self) -> JSONEncoder:
|
| 1335 |
+
"""Json serializer callable"""
|
| 1336 |
+
return self._json_serialize
|
| 1337 |
+
|
| 1338 |
+
@property
|
| 1339 |
+
def connector_owner(self) -> bool:
|
| 1340 |
+
"""Should connector be closed on session closing"""
|
| 1341 |
+
return self._connector_owner
|
| 1342 |
+
|
| 1343 |
+
@property
|
| 1344 |
+
def raise_for_status(
|
| 1345 |
+
self,
|
| 1346 |
+
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
|
| 1347 |
+
"""Should `ClientResponse.raise_for_status()` be called for each response."""
|
| 1348 |
+
return self._raise_for_status
|
| 1349 |
+
|
| 1350 |
+
@property
|
| 1351 |
+
def auto_decompress(self) -> bool:
|
| 1352 |
+
"""Should the body response be automatically decompressed."""
|
| 1353 |
+
return self._auto_decompress
|
| 1354 |
+
|
| 1355 |
+
@property
|
| 1356 |
+
def trust_env(self) -> bool:
|
| 1357 |
+
"""
|
| 1358 |
+
Should proxies information from environment or netrc be trusted.
|
| 1359 |
+
|
| 1360 |
+
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
|
| 1361 |
+
or ~/.netrc file if present.
|
| 1362 |
+
"""
|
| 1363 |
+
return self._trust_env
|
| 1364 |
+
|
| 1365 |
+
@property
|
| 1366 |
+
def trace_configs(self) -> List[TraceConfig]:
|
| 1367 |
+
"""A list of TraceConfig instances used for client tracing"""
|
| 1368 |
+
return self._trace_configs
|
| 1369 |
+
|
| 1370 |
+
def detach(self) -> None:
|
| 1371 |
+
"""Detach connector from session without closing the former.
|
| 1372 |
+
|
| 1373 |
+
Session is switched to closed state anyway.
|
| 1374 |
+
"""
|
| 1375 |
+
self._connector = None
|
| 1376 |
+
|
| 1377 |
+
def __enter__(self) -> None:
|
| 1378 |
+
raise TypeError("Use async with instead")
|
| 1379 |
+
|
| 1380 |
+
def __exit__(
|
| 1381 |
+
self,
|
| 1382 |
+
exc_type: Optional[Type[BaseException]],
|
| 1383 |
+
exc_val: Optional[BaseException],
|
| 1384 |
+
exc_tb: Optional[TracebackType],
|
| 1385 |
+
) -> None:
|
| 1386 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 1387 |
+
pass # pragma: no cover
|
| 1388 |
+
|
| 1389 |
+
async def __aenter__(self) -> "ClientSession":
|
| 1390 |
+
return self
|
| 1391 |
+
|
| 1392 |
+
async def __aexit__(
|
| 1393 |
+
self,
|
| 1394 |
+
exc_type: Optional[Type[BaseException]],
|
| 1395 |
+
exc_val: Optional[BaseException],
|
| 1396 |
+
exc_tb: Optional[TracebackType],
|
| 1397 |
+
) -> None:
|
| 1398 |
+
await self.close()
|
| 1399 |
+
|
| 1400 |
+
|
| 1401 |
+
class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
|
| 1402 |
+
|
| 1403 |
+
__slots__ = ("_coro", "_resp")
|
| 1404 |
+
|
| 1405 |
+
def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
|
| 1406 |
+
self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro
|
| 1407 |
+
|
| 1408 |
+
def send(self, arg: None) -> "asyncio.Future[Any]":
|
| 1409 |
+
return self._coro.send(arg)
|
| 1410 |
+
|
| 1411 |
+
def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
|
| 1412 |
+
return self._coro.throw(*args, **kwargs)
|
| 1413 |
+
|
| 1414 |
+
def close(self) -> None:
|
| 1415 |
+
return self._coro.close()
|
| 1416 |
+
|
| 1417 |
+
def __await__(self) -> Generator[Any, None, _RetType]:
|
| 1418 |
+
ret = self._coro.__await__()
|
| 1419 |
+
return ret
|
| 1420 |
+
|
| 1421 |
+
def __iter__(self) -> Generator[Any, None, _RetType]:
|
| 1422 |
+
return self.__await__()
|
| 1423 |
+
|
| 1424 |
+
async def __aenter__(self) -> _RetType:
|
| 1425 |
+
self._resp: _RetType = await self._coro
|
| 1426 |
+
return await self._resp.__aenter__()
|
| 1427 |
+
|
| 1428 |
+
async def __aexit__(
|
| 1429 |
+
self,
|
| 1430 |
+
exc_type: Optional[Type[BaseException]],
|
| 1431 |
+
exc: Optional[BaseException],
|
| 1432 |
+
tb: Optional[TracebackType],
|
| 1433 |
+
) -> None:
|
| 1434 |
+
await self._resp.__aexit__(exc_type, exc, tb)
|
| 1435 |
+
|
| 1436 |
+
|
| 1437 |
+
_RequestContextManager = _BaseRequestContextManager[ClientResponse]
|
| 1438 |
+
_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse]
|
| 1439 |
+
|
| 1440 |
+
|
| 1441 |
+
class _SessionRequestContextManager:
|
| 1442 |
+
|
| 1443 |
+
__slots__ = ("_coro", "_resp", "_session")
|
| 1444 |
+
|
| 1445 |
+
def __init__(
|
| 1446 |
+
self,
|
| 1447 |
+
coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
|
| 1448 |
+
session: ClientSession,
|
| 1449 |
+
) -> None:
|
| 1450 |
+
self._coro = coro
|
| 1451 |
+
self._resp: Optional[ClientResponse] = None
|
| 1452 |
+
self._session = session
|
| 1453 |
+
|
| 1454 |
+
async def __aenter__(self) -> ClientResponse:
|
| 1455 |
+
try:
|
| 1456 |
+
self._resp = await self._coro
|
| 1457 |
+
except BaseException:
|
| 1458 |
+
await self._session.close()
|
| 1459 |
+
raise
|
| 1460 |
+
else:
|
| 1461 |
+
return self._resp
|
| 1462 |
+
|
| 1463 |
+
async def __aexit__(
|
| 1464 |
+
self,
|
| 1465 |
+
exc_type: Optional[Type[BaseException]],
|
| 1466 |
+
exc: Optional[BaseException],
|
| 1467 |
+
tb: Optional[TracebackType],
|
| 1468 |
+
) -> None:
|
| 1469 |
+
assert self._resp is not None
|
| 1470 |
+
self._resp.close()
|
| 1471 |
+
await self._session.close()
|
| 1472 |
+
|
| 1473 |
+
|
| 1474 |
+
def request(
|
| 1475 |
+
method: str,
|
| 1476 |
+
url: StrOrURL,
|
| 1477 |
+
*,
|
| 1478 |
+
params: Query = None,
|
| 1479 |
+
data: Any = None,
|
| 1480 |
+
json: Any = None,
|
| 1481 |
+
headers: Optional[LooseHeaders] = None,
|
| 1482 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 1483 |
+
auth: Optional[BasicAuth] = None,
|
| 1484 |
+
allow_redirects: bool = True,
|
| 1485 |
+
max_redirects: int = 10,
|
| 1486 |
+
compress: Optional[str] = None,
|
| 1487 |
+
chunked: Optional[bool] = None,
|
| 1488 |
+
expect100: bool = False,
|
| 1489 |
+
raise_for_status: Optional[bool] = None,
|
| 1490 |
+
read_until_eof: bool = True,
|
| 1491 |
+
proxy: Optional[StrOrURL] = None,
|
| 1492 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 1493 |
+
timeout: Union[ClientTimeout, object] = sentinel,
|
| 1494 |
+
cookies: Optional[LooseCookies] = None,
|
| 1495 |
+
version: HttpVersion = http.HttpVersion11,
|
| 1496 |
+
connector: Optional[BaseConnector] = None,
|
| 1497 |
+
read_bufsize: Optional[int] = None,
|
| 1498 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 1499 |
+
max_line_size: int = 8190,
|
| 1500 |
+
max_field_size: int = 8190,
|
| 1501 |
+
) -> _SessionRequestContextManager:
|
| 1502 |
+
"""Constructs and sends a request.
|
| 1503 |
+
|
| 1504 |
+
Returns response object.
|
| 1505 |
+
method - HTTP method
|
| 1506 |
+
url - request url
|
| 1507 |
+
params - (optional) Dictionary or bytes to be sent in the query
|
| 1508 |
+
string of the new request
|
| 1509 |
+
data - (optional) Dictionary, bytes, or file-like object to
|
| 1510 |
+
send in the body of the request
|
| 1511 |
+
json - (optional) Any json compatible python object
|
| 1512 |
+
headers - (optional) Dictionary of HTTP Headers to send with
|
| 1513 |
+
the request
|
| 1514 |
+
cookies - (optional) Dict object to send with the request
|
| 1515 |
+
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
|
| 1516 |
+
auth - aiohttp.helpers.BasicAuth
|
| 1517 |
+
allow_redirects - (optional) If set to False, do not follow
|
| 1518 |
+
redirects
|
| 1519 |
+
version - Request HTTP version.
|
| 1520 |
+
compress - Set to True if request has to be compressed
|
| 1521 |
+
with deflate encoding.
|
| 1522 |
+
chunked - Set to chunk size for chunked transfer encoding.
|
| 1523 |
+
expect100 - Expect 100-continue response from server.
|
| 1524 |
+
connector - BaseConnector sub-class instance to support
|
| 1525 |
+
connection pooling.
|
| 1526 |
+
read_until_eof - Read response until eof if response
|
| 1527 |
+
does not have Content-Length header.
|
| 1528 |
+
loop - Optional event loop.
|
| 1529 |
+
timeout - Optional ClientTimeout settings structure, 5min
|
| 1530 |
+
total timeout by default.
|
| 1531 |
+
Usage::
|
| 1532 |
+
>>> import aiohttp
|
| 1533 |
+
>>> resp = await aiohttp.request('GET', 'http://python.org/')
|
| 1534 |
+
>>> resp
|
| 1535 |
+
<ClientResponse(python.org/) [200]>
|
| 1536 |
+
>>> data = await resp.read()
|
| 1537 |
+
"""
|
| 1538 |
+
connector_owner = False
|
| 1539 |
+
if connector is None:
|
| 1540 |
+
connector_owner = True
|
| 1541 |
+
connector = TCPConnector(loop=loop, force_close=True)
|
| 1542 |
+
|
| 1543 |
+
session = ClientSession(
|
| 1544 |
+
loop=loop,
|
| 1545 |
+
cookies=cookies,
|
| 1546 |
+
version=version,
|
| 1547 |
+
timeout=timeout,
|
| 1548 |
+
connector=connector,
|
| 1549 |
+
connector_owner=connector_owner,
|
| 1550 |
+
)
|
| 1551 |
+
|
| 1552 |
+
return _SessionRequestContextManager(
|
| 1553 |
+
session._request(
|
| 1554 |
+
method,
|
| 1555 |
+
url,
|
| 1556 |
+
params=params,
|
| 1557 |
+
data=data,
|
| 1558 |
+
json=json,
|
| 1559 |
+
headers=headers,
|
| 1560 |
+
skip_auto_headers=skip_auto_headers,
|
| 1561 |
+
auth=auth,
|
| 1562 |
+
allow_redirects=allow_redirects,
|
| 1563 |
+
max_redirects=max_redirects,
|
| 1564 |
+
compress=compress,
|
| 1565 |
+
chunked=chunked,
|
| 1566 |
+
expect100=expect100,
|
| 1567 |
+
raise_for_status=raise_for_status,
|
| 1568 |
+
read_until_eof=read_until_eof,
|
| 1569 |
+
proxy=proxy,
|
| 1570 |
+
proxy_auth=proxy_auth,
|
| 1571 |
+
read_bufsize=read_bufsize,
|
| 1572 |
+
max_line_size=max_line_size,
|
| 1573 |
+
max_field_size=max_field_size,
|
| 1574 |
+
),
|
| 1575 |
+
session,
|
| 1576 |
+
)
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/cookiejar.py
ADDED
|
@@ -0,0 +1,495 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import calendar
|
| 3 |
+
import contextlib
|
| 4 |
+
import datetime
|
| 5 |
+
import heapq
|
| 6 |
+
import itertools
|
| 7 |
+
import os # noqa
|
| 8 |
+
import pathlib
|
| 9 |
+
import pickle
|
| 10 |
+
import re
|
| 11 |
+
import time
|
| 12 |
+
import warnings
|
| 13 |
+
from collections import defaultdict
|
| 14 |
+
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
| 15 |
+
from typing import (
|
| 16 |
+
DefaultDict,
|
| 17 |
+
Dict,
|
| 18 |
+
Iterable,
|
| 19 |
+
Iterator,
|
| 20 |
+
List,
|
| 21 |
+
Mapping,
|
| 22 |
+
Optional,
|
| 23 |
+
Set,
|
| 24 |
+
Tuple,
|
| 25 |
+
Union,
|
| 26 |
+
cast,
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
from yarl import URL
|
| 30 |
+
|
| 31 |
+
from .abc import AbstractCookieJar, ClearCookiePredicate
|
| 32 |
+
from .helpers import is_ip_address
|
| 33 |
+
from .typedefs import LooseCookies, PathLike, StrOrURL
|
| 34 |
+
|
| 35 |
+
__all__ = ("CookieJar", "DummyCookieJar")
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
CookieItem = Union[str, "Morsel[str]"]
|
| 39 |
+
|
| 40 |
+
# We cache these string methods here as their use is in performance critical code.
|
| 41 |
+
_FORMAT_PATH = "{}/{}".format
|
| 42 |
+
_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format
|
| 43 |
+
|
| 44 |
+
# The minimum number of scheduled cookie expirations before we start cleaning up
|
| 45 |
+
# the expiration heap. This is a performance optimization to avoid cleaning up the
|
| 46 |
+
# heap too often when there are only a few scheduled expirations.
|
| 47 |
+
_MIN_SCHEDULED_COOKIE_EXPIRATION = 100
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class CookieJar(AbstractCookieJar):
|
| 51 |
+
"""Implements cookie storage adhering to RFC 6265."""
|
| 52 |
+
|
| 53 |
+
DATE_TOKENS_RE = re.compile(
|
| 54 |
+
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
| 55 |
+
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
| 59 |
+
|
| 60 |
+
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
| 61 |
+
|
| 62 |
+
DATE_MONTH_RE = re.compile(
|
| 63 |
+
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)",
|
| 64 |
+
re.I,
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
| 68 |
+
|
| 69 |
+
# calendar.timegm() fails for timestamps after datetime.datetime.max
|
| 70 |
+
# Minus one as a loss of precision occurs when timestamp() is called.
|
| 71 |
+
MAX_TIME = (
|
| 72 |
+
int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
|
| 73 |
+
)
|
| 74 |
+
try:
|
| 75 |
+
calendar.timegm(time.gmtime(MAX_TIME))
|
| 76 |
+
except (OSError, ValueError):
|
| 77 |
+
# Hit the maximum representable time on Windows
|
| 78 |
+
# https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
|
| 79 |
+
# Throws ValueError on PyPy 3.9, OSError elsewhere
|
| 80 |
+
MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
|
| 81 |
+
except OverflowError:
|
| 82 |
+
# #4515: datetime.max may not be representable on 32-bit platforms
|
| 83 |
+
MAX_TIME = 2**31 - 1
|
| 84 |
+
# Avoid minuses in the future, 3x faster
|
| 85 |
+
SUB_MAX_TIME = MAX_TIME - 1
|
| 86 |
+
|
| 87 |
+
def __init__(
|
| 88 |
+
self,
|
| 89 |
+
*,
|
| 90 |
+
unsafe: bool = False,
|
| 91 |
+
quote_cookie: bool = True,
|
| 92 |
+
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
| 93 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 94 |
+
) -> None:
|
| 95 |
+
super().__init__(loop=loop)
|
| 96 |
+
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
|
| 97 |
+
SimpleCookie
|
| 98 |
+
)
|
| 99 |
+
self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = (
|
| 100 |
+
defaultdict(dict)
|
| 101 |
+
)
|
| 102 |
+
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
| 103 |
+
self._unsafe = unsafe
|
| 104 |
+
self._quote_cookie = quote_cookie
|
| 105 |
+
if treat_as_secure_origin is None:
|
| 106 |
+
treat_as_secure_origin = []
|
| 107 |
+
elif isinstance(treat_as_secure_origin, URL):
|
| 108 |
+
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
| 109 |
+
elif isinstance(treat_as_secure_origin, str):
|
| 110 |
+
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
| 111 |
+
else:
|
| 112 |
+
treat_as_secure_origin = [
|
| 113 |
+
URL(url).origin() if isinstance(url, str) else url.origin()
|
| 114 |
+
for url in treat_as_secure_origin
|
| 115 |
+
]
|
| 116 |
+
self._treat_as_secure_origin = treat_as_secure_origin
|
| 117 |
+
self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = []
|
| 118 |
+
self._expirations: Dict[Tuple[str, str, str], float] = {}
|
| 119 |
+
|
| 120 |
+
@property
|
| 121 |
+
def quote_cookie(self) -> bool:
|
| 122 |
+
return self._quote_cookie
|
| 123 |
+
|
| 124 |
+
def save(self, file_path: PathLike) -> None:
|
| 125 |
+
file_path = pathlib.Path(file_path)
|
| 126 |
+
with file_path.open(mode="wb") as f:
|
| 127 |
+
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
| 128 |
+
|
| 129 |
+
def load(self, file_path: PathLike) -> None:
|
| 130 |
+
file_path = pathlib.Path(file_path)
|
| 131 |
+
with file_path.open(mode="rb") as f:
|
| 132 |
+
self._cookies = pickle.load(f)
|
| 133 |
+
|
| 134 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 135 |
+
if predicate is None:
|
| 136 |
+
self._expire_heap.clear()
|
| 137 |
+
self._cookies.clear()
|
| 138 |
+
self._morsel_cache.clear()
|
| 139 |
+
self._host_only_cookies.clear()
|
| 140 |
+
self._expirations.clear()
|
| 141 |
+
return
|
| 142 |
+
|
| 143 |
+
now = time.time()
|
| 144 |
+
to_del = [
|
| 145 |
+
key
|
| 146 |
+
for (domain, path), cookie in self._cookies.items()
|
| 147 |
+
for name, morsel in cookie.items()
|
| 148 |
+
if (
|
| 149 |
+
(key := (domain, path, name)) in self._expirations
|
| 150 |
+
and self._expirations[key] <= now
|
| 151 |
+
)
|
| 152 |
+
or predicate(morsel)
|
| 153 |
+
]
|
| 154 |
+
if to_del:
|
| 155 |
+
self._delete_cookies(to_del)
|
| 156 |
+
|
| 157 |
+
def clear_domain(self, domain: str) -> None:
|
| 158 |
+
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
| 159 |
+
|
| 160 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
| 161 |
+
self._do_expiration()
|
| 162 |
+
for val in self._cookies.values():
|
| 163 |
+
yield from val.values()
|
| 164 |
+
|
| 165 |
+
def __len__(self) -> int:
|
| 166 |
+
"""Return number of cookies.
|
| 167 |
+
|
| 168 |
+
This function does not iterate self to avoid unnecessary expiration
|
| 169 |
+
checks.
|
| 170 |
+
"""
|
| 171 |
+
return sum(len(cookie.values()) for cookie in self._cookies.values())
|
| 172 |
+
|
| 173 |
+
def _do_expiration(self) -> None:
|
| 174 |
+
"""Remove expired cookies."""
|
| 175 |
+
if not (expire_heap_len := len(self._expire_heap)):
|
| 176 |
+
return
|
| 177 |
+
|
| 178 |
+
# If the expiration heap grows larger than the number expirations
|
| 179 |
+
# times two, we clean it up to avoid keeping expired entries in
|
| 180 |
+
# the heap and consuming memory. We guard this with a minimum
|
| 181 |
+
# threshold to avoid cleaning up the heap too often when there are
|
| 182 |
+
# only a few scheduled expirations.
|
| 183 |
+
if (
|
| 184 |
+
expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION
|
| 185 |
+
and expire_heap_len > len(self._expirations) * 2
|
| 186 |
+
):
|
| 187 |
+
# Remove any expired entries from the expiration heap
|
| 188 |
+
# that do not match the expiration time in the expirations
|
| 189 |
+
# as it means the cookie has been re-added to the heap
|
| 190 |
+
# with a different expiration time.
|
| 191 |
+
self._expire_heap = [
|
| 192 |
+
entry
|
| 193 |
+
for entry in self._expire_heap
|
| 194 |
+
if self._expirations.get(entry[1]) == entry[0]
|
| 195 |
+
]
|
| 196 |
+
heapq.heapify(self._expire_heap)
|
| 197 |
+
|
| 198 |
+
now = time.time()
|
| 199 |
+
to_del: List[Tuple[str, str, str]] = []
|
| 200 |
+
# Find any expired cookies and add them to the to-delete list
|
| 201 |
+
while self._expire_heap:
|
| 202 |
+
when, cookie_key = self._expire_heap[0]
|
| 203 |
+
if when > now:
|
| 204 |
+
break
|
| 205 |
+
heapq.heappop(self._expire_heap)
|
| 206 |
+
# Check if the cookie hasn't been re-added to the heap
|
| 207 |
+
# with a different expiration time as it will be removed
|
| 208 |
+
# later when it reaches the top of the heap and its
|
| 209 |
+
# expiration time is met.
|
| 210 |
+
if self._expirations.get(cookie_key) == when:
|
| 211 |
+
to_del.append(cookie_key)
|
| 212 |
+
|
| 213 |
+
if to_del:
|
| 214 |
+
self._delete_cookies(to_del)
|
| 215 |
+
|
| 216 |
+
def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None:
|
| 217 |
+
for domain, path, name in to_del:
|
| 218 |
+
self._host_only_cookies.discard((domain, name))
|
| 219 |
+
self._cookies[(domain, path)].pop(name, None)
|
| 220 |
+
self._morsel_cache[(domain, path)].pop(name, None)
|
| 221 |
+
self._expirations.pop((domain, path, name), None)
|
| 222 |
+
|
| 223 |
+
def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
|
| 224 |
+
cookie_key = (domain, path, name)
|
| 225 |
+
if self._expirations.get(cookie_key) == when:
|
| 226 |
+
# Avoid adding duplicates to the heap
|
| 227 |
+
return
|
| 228 |
+
heapq.heappush(self._expire_heap, (when, cookie_key))
|
| 229 |
+
self._expirations[cookie_key] = when
|
| 230 |
+
|
| 231 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 232 |
+
"""Update cookies."""
|
| 233 |
+
hostname = response_url.raw_host
|
| 234 |
+
|
| 235 |
+
if not self._unsafe and is_ip_address(hostname):
|
| 236 |
+
# Don't accept cookies from IPs
|
| 237 |
+
return
|
| 238 |
+
|
| 239 |
+
if isinstance(cookies, Mapping):
|
| 240 |
+
cookies = cookies.items()
|
| 241 |
+
|
| 242 |
+
for name, cookie in cookies:
|
| 243 |
+
if not isinstance(cookie, Morsel):
|
| 244 |
+
tmp = SimpleCookie()
|
| 245 |
+
tmp[name] = cookie # type: ignore[assignment]
|
| 246 |
+
cookie = tmp[name]
|
| 247 |
+
|
| 248 |
+
domain = cookie["domain"]
|
| 249 |
+
|
| 250 |
+
# ignore domains with trailing dots
|
| 251 |
+
if domain and domain[-1] == ".":
|
| 252 |
+
domain = ""
|
| 253 |
+
del cookie["domain"]
|
| 254 |
+
|
| 255 |
+
if not domain and hostname is not None:
|
| 256 |
+
# Set the cookie's domain to the response hostname
|
| 257 |
+
# and set its host-only-flag
|
| 258 |
+
self._host_only_cookies.add((hostname, name))
|
| 259 |
+
domain = cookie["domain"] = hostname
|
| 260 |
+
|
| 261 |
+
if domain and domain[0] == ".":
|
| 262 |
+
# Remove leading dot
|
| 263 |
+
domain = domain[1:]
|
| 264 |
+
cookie["domain"] = domain
|
| 265 |
+
|
| 266 |
+
if hostname and not self._is_domain_match(domain, hostname):
|
| 267 |
+
# Setting cookies for different domains is not allowed
|
| 268 |
+
continue
|
| 269 |
+
|
| 270 |
+
path = cookie["path"]
|
| 271 |
+
if not path or path[0] != "/":
|
| 272 |
+
# Set the cookie's path to the response path
|
| 273 |
+
path = response_url.path
|
| 274 |
+
if not path.startswith("/"):
|
| 275 |
+
path = "/"
|
| 276 |
+
else:
|
| 277 |
+
# Cut everything from the last slash to the end
|
| 278 |
+
path = "/" + path[1 : path.rfind("/")]
|
| 279 |
+
cookie["path"] = path
|
| 280 |
+
path = path.rstrip("/")
|
| 281 |
+
|
| 282 |
+
if max_age := cookie["max-age"]:
|
| 283 |
+
try:
|
| 284 |
+
delta_seconds = int(max_age)
|
| 285 |
+
max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
|
| 286 |
+
self._expire_cookie(max_age_expiration, domain, path, name)
|
| 287 |
+
except ValueError:
|
| 288 |
+
cookie["max-age"] = ""
|
| 289 |
+
|
| 290 |
+
elif expires := cookie["expires"]:
|
| 291 |
+
if expire_time := self._parse_date(expires):
|
| 292 |
+
self._expire_cookie(expire_time, domain, path, name)
|
| 293 |
+
else:
|
| 294 |
+
cookie["expires"] = ""
|
| 295 |
+
|
| 296 |
+
key = (domain, path)
|
| 297 |
+
if self._cookies[key].get(name) != cookie:
|
| 298 |
+
# Don't blow away the cache if the same
|
| 299 |
+
# cookie gets set again
|
| 300 |
+
self._cookies[key][name] = cookie
|
| 301 |
+
self._morsel_cache[key].pop(name, None)
|
| 302 |
+
|
| 303 |
+
self._do_expiration()
|
| 304 |
+
|
| 305 |
+
def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
|
| 306 |
+
"""Returns this jar's cookies filtered by their attributes."""
|
| 307 |
+
filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
|
| 308 |
+
SimpleCookie() if self._quote_cookie else BaseCookie()
|
| 309 |
+
)
|
| 310 |
+
if not self._cookies:
|
| 311 |
+
# Skip do_expiration() if there are no cookies.
|
| 312 |
+
return filtered
|
| 313 |
+
self._do_expiration()
|
| 314 |
+
if not self._cookies:
|
| 315 |
+
# Skip rest of function if no non-expired cookies.
|
| 316 |
+
return filtered
|
| 317 |
+
if type(request_url) is not URL:
|
| 318 |
+
warnings.warn(
|
| 319 |
+
"filter_cookies expects yarl.URL instances only,"
|
| 320 |
+
f"and will stop working in 4.x, got {type(request_url)}",
|
| 321 |
+
DeprecationWarning,
|
| 322 |
+
stacklevel=2,
|
| 323 |
+
)
|
| 324 |
+
request_url = URL(request_url)
|
| 325 |
+
hostname = request_url.raw_host or ""
|
| 326 |
+
|
| 327 |
+
is_not_secure = request_url.scheme not in ("https", "wss")
|
| 328 |
+
if is_not_secure and self._treat_as_secure_origin:
|
| 329 |
+
request_origin = URL()
|
| 330 |
+
with contextlib.suppress(ValueError):
|
| 331 |
+
request_origin = request_url.origin()
|
| 332 |
+
is_not_secure = request_origin not in self._treat_as_secure_origin
|
| 333 |
+
|
| 334 |
+
# Send shared cookie
|
| 335 |
+
for c in self._cookies[("", "")].values():
|
| 336 |
+
filtered[c.key] = c.value
|
| 337 |
+
|
| 338 |
+
if is_ip_address(hostname):
|
| 339 |
+
if not self._unsafe:
|
| 340 |
+
return filtered
|
| 341 |
+
domains: Iterable[str] = (hostname,)
|
| 342 |
+
else:
|
| 343 |
+
# Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com")
|
| 344 |
+
domains = itertools.accumulate(
|
| 345 |
+
reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
# Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar")
|
| 349 |
+
paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH)
|
| 350 |
+
# Create every combination of (domain, path) pairs.
|
| 351 |
+
pairs = itertools.product(domains, paths)
|
| 352 |
+
|
| 353 |
+
path_len = len(request_url.path)
|
| 354 |
+
# Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
|
| 355 |
+
for p in pairs:
|
| 356 |
+
for name, cookie in self._cookies[p].items():
|
| 357 |
+
domain = cookie["domain"]
|
| 358 |
+
|
| 359 |
+
if (domain, name) in self._host_only_cookies and domain != hostname:
|
| 360 |
+
continue
|
| 361 |
+
|
| 362 |
+
# Skip edge case when the cookie has a trailing slash but request doesn't.
|
| 363 |
+
if len(cookie["path"]) > path_len:
|
| 364 |
+
continue
|
| 365 |
+
|
| 366 |
+
if is_not_secure and cookie["secure"]:
|
| 367 |
+
continue
|
| 368 |
+
|
| 369 |
+
# We already built the Morsel so reuse it here
|
| 370 |
+
if name in self._morsel_cache[p]:
|
| 371 |
+
filtered[name] = self._morsel_cache[p][name]
|
| 372 |
+
continue
|
| 373 |
+
|
| 374 |
+
# It's critical we use the Morsel so the coded_value
|
| 375 |
+
# (based on cookie version) is preserved
|
| 376 |
+
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
| 377 |
+
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
| 378 |
+
self._morsel_cache[p][name] = mrsl_val
|
| 379 |
+
filtered[name] = mrsl_val
|
| 380 |
+
|
| 381 |
+
return filtered
|
| 382 |
+
|
| 383 |
+
@staticmethod
|
| 384 |
+
def _is_domain_match(domain: str, hostname: str) -> bool:
|
| 385 |
+
"""Implements domain matching adhering to RFC 6265."""
|
| 386 |
+
if hostname == domain:
|
| 387 |
+
return True
|
| 388 |
+
|
| 389 |
+
if not hostname.endswith(domain):
|
| 390 |
+
return False
|
| 391 |
+
|
| 392 |
+
non_matching = hostname[: -len(domain)]
|
| 393 |
+
|
| 394 |
+
if not non_matching.endswith("."):
|
| 395 |
+
return False
|
| 396 |
+
|
| 397 |
+
return not is_ip_address(hostname)
|
| 398 |
+
|
| 399 |
+
@classmethod
|
| 400 |
+
def _parse_date(cls, date_str: str) -> Optional[int]:
|
| 401 |
+
"""Implements date string parsing adhering to RFC 6265."""
|
| 402 |
+
if not date_str:
|
| 403 |
+
return None
|
| 404 |
+
|
| 405 |
+
found_time = False
|
| 406 |
+
found_day = False
|
| 407 |
+
found_month = False
|
| 408 |
+
found_year = False
|
| 409 |
+
|
| 410 |
+
hour = minute = second = 0
|
| 411 |
+
day = 0
|
| 412 |
+
month = 0
|
| 413 |
+
year = 0
|
| 414 |
+
|
| 415 |
+
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
| 416 |
+
|
| 417 |
+
token = token_match.group("token")
|
| 418 |
+
|
| 419 |
+
if not found_time:
|
| 420 |
+
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
| 421 |
+
if time_match:
|
| 422 |
+
found_time = True
|
| 423 |
+
hour, minute, second = (int(s) for s in time_match.groups())
|
| 424 |
+
continue
|
| 425 |
+
|
| 426 |
+
if not found_day:
|
| 427 |
+
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
| 428 |
+
if day_match:
|
| 429 |
+
found_day = True
|
| 430 |
+
day = int(day_match.group())
|
| 431 |
+
continue
|
| 432 |
+
|
| 433 |
+
if not found_month:
|
| 434 |
+
month_match = cls.DATE_MONTH_RE.match(token)
|
| 435 |
+
if month_match:
|
| 436 |
+
found_month = True
|
| 437 |
+
assert month_match.lastindex is not None
|
| 438 |
+
month = month_match.lastindex
|
| 439 |
+
continue
|
| 440 |
+
|
| 441 |
+
if not found_year:
|
| 442 |
+
year_match = cls.DATE_YEAR_RE.match(token)
|
| 443 |
+
if year_match:
|
| 444 |
+
found_year = True
|
| 445 |
+
year = int(year_match.group())
|
| 446 |
+
|
| 447 |
+
if 70 <= year <= 99:
|
| 448 |
+
year += 1900
|
| 449 |
+
elif 0 <= year <= 69:
|
| 450 |
+
year += 2000
|
| 451 |
+
|
| 452 |
+
if False in (found_day, found_month, found_year, found_time):
|
| 453 |
+
return None
|
| 454 |
+
|
| 455 |
+
if not 1 <= day <= 31:
|
| 456 |
+
return None
|
| 457 |
+
|
| 458 |
+
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
| 459 |
+
return None
|
| 460 |
+
|
| 461 |
+
return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
|
| 462 |
+
|
| 463 |
+
|
| 464 |
+
class DummyCookieJar(AbstractCookieJar):
|
| 465 |
+
"""Implements a dummy cookie storage.
|
| 466 |
+
|
| 467 |
+
It can be used with the ClientSession when no cookie processing is needed.
|
| 468 |
+
|
| 469 |
+
"""
|
| 470 |
+
|
| 471 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 472 |
+
super().__init__(loop=loop)
|
| 473 |
+
|
| 474 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
| 475 |
+
while False:
|
| 476 |
+
yield None
|
| 477 |
+
|
| 478 |
+
def __len__(self) -> int:
|
| 479 |
+
return 0
|
| 480 |
+
|
| 481 |
+
@property
|
| 482 |
+
def quote_cookie(self) -> bool:
|
| 483 |
+
return True
|
| 484 |
+
|
| 485 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 486 |
+
pass
|
| 487 |
+
|
| 488 |
+
def clear_domain(self, domain: str) -> None:
|
| 489 |
+
pass
|
| 490 |
+
|
| 491 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 492 |
+
pass
|
| 493 |
+
|
| 494 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 495 |
+
return SimpleCookie()
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/formdata.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import warnings
|
| 3 |
+
from typing import Any, Iterable, List, Optional
|
| 4 |
+
from urllib.parse import urlencode
|
| 5 |
+
|
| 6 |
+
from multidict import MultiDict, MultiDictProxy
|
| 7 |
+
|
| 8 |
+
from . import hdrs, multipart, payload
|
| 9 |
+
from .helpers import guess_filename
|
| 10 |
+
from .payload import Payload
|
| 11 |
+
|
| 12 |
+
__all__ = ("FormData",)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FormData:
|
| 16 |
+
"""Helper class for form body generation.
|
| 17 |
+
|
| 18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(
|
| 22 |
+
self,
|
| 23 |
+
fields: Iterable[Any] = (),
|
| 24 |
+
quote_fields: bool = True,
|
| 25 |
+
charset: Optional[str] = None,
|
| 26 |
+
*,
|
| 27 |
+
default_to_multipart: bool = False,
|
| 28 |
+
) -> None:
|
| 29 |
+
self._writer = multipart.MultipartWriter("form-data")
|
| 30 |
+
self._fields: List[Any] = []
|
| 31 |
+
self._is_multipart = default_to_multipart
|
| 32 |
+
self._is_processed = False
|
| 33 |
+
self._quote_fields = quote_fields
|
| 34 |
+
self._charset = charset
|
| 35 |
+
|
| 36 |
+
if isinstance(fields, dict):
|
| 37 |
+
fields = list(fields.items())
|
| 38 |
+
elif not isinstance(fields, (list, tuple)):
|
| 39 |
+
fields = (fields,)
|
| 40 |
+
self.add_fields(*fields)
|
| 41 |
+
|
| 42 |
+
@property
|
| 43 |
+
def is_multipart(self) -> bool:
|
| 44 |
+
return self._is_multipart
|
| 45 |
+
|
| 46 |
+
def add_field(
|
| 47 |
+
self,
|
| 48 |
+
name: str,
|
| 49 |
+
value: Any,
|
| 50 |
+
*,
|
| 51 |
+
content_type: Optional[str] = None,
|
| 52 |
+
filename: Optional[str] = None,
|
| 53 |
+
content_transfer_encoding: Optional[str] = None,
|
| 54 |
+
) -> None:
|
| 55 |
+
|
| 56 |
+
if isinstance(value, io.IOBase):
|
| 57 |
+
self._is_multipart = True
|
| 58 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
| 59 |
+
msg = (
|
| 60 |
+
"In v4, passing bytes will no longer create a file field. "
|
| 61 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
| 62 |
+
)
|
| 63 |
+
if filename is None and content_transfer_encoding is None:
|
| 64 |
+
warnings.warn(msg, DeprecationWarning)
|
| 65 |
+
filename = name
|
| 66 |
+
|
| 67 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
| 68 |
+
if filename is not None and not isinstance(filename, str):
|
| 69 |
+
raise TypeError("filename must be an instance of str. Got: %s" % filename)
|
| 70 |
+
if filename is None and isinstance(value, io.IOBase):
|
| 71 |
+
filename = guess_filename(value, name)
|
| 72 |
+
if filename is not None:
|
| 73 |
+
type_options["filename"] = filename
|
| 74 |
+
self._is_multipart = True
|
| 75 |
+
|
| 76 |
+
headers = {}
|
| 77 |
+
if content_type is not None:
|
| 78 |
+
if not isinstance(content_type, str):
|
| 79 |
+
raise TypeError(
|
| 80 |
+
"content_type must be an instance of str. Got: %s" % content_type
|
| 81 |
+
)
|
| 82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
| 83 |
+
self._is_multipart = True
|
| 84 |
+
if content_transfer_encoding is not None:
|
| 85 |
+
if not isinstance(content_transfer_encoding, str):
|
| 86 |
+
raise TypeError(
|
| 87 |
+
"content_transfer_encoding must be an instance"
|
| 88 |
+
" of str. Got: %s" % content_transfer_encoding
|
| 89 |
+
)
|
| 90 |
+
msg = (
|
| 91 |
+
"content_transfer_encoding is deprecated. "
|
| 92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
| 93 |
+
)
|
| 94 |
+
warnings.warn(msg, DeprecationWarning)
|
| 95 |
+
self._is_multipart = True
|
| 96 |
+
|
| 97 |
+
self._fields.append((type_options, headers, value))
|
| 98 |
+
|
| 99 |
+
def add_fields(self, *fields: Any) -> None:
|
| 100 |
+
to_add = list(fields)
|
| 101 |
+
|
| 102 |
+
while to_add:
|
| 103 |
+
rec = to_add.pop(0)
|
| 104 |
+
|
| 105 |
+
if isinstance(rec, io.IOBase):
|
| 106 |
+
k = guess_filename(rec, "unknown")
|
| 107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
| 108 |
+
|
| 109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
| 110 |
+
to_add.extend(rec.items())
|
| 111 |
+
|
| 112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
| 113 |
+
k, fp = rec
|
| 114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
| 115 |
+
|
| 116 |
+
else:
|
| 117 |
+
raise TypeError(
|
| 118 |
+
"Only io.IOBase, multidict and (name, file) "
|
| 119 |
+
"pairs allowed, use .add_field() for passing "
|
| 120 |
+
"more complex parameters, got {!r}".format(rec)
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
| 124 |
+
# form data (x-www-form-urlencoded)
|
| 125 |
+
data = []
|
| 126 |
+
for type_options, _, value in self._fields:
|
| 127 |
+
data.append((type_options["name"], value))
|
| 128 |
+
|
| 129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
| 130 |
+
|
| 131 |
+
if charset == "utf-8":
|
| 132 |
+
content_type = "application/x-www-form-urlencoded"
|
| 133 |
+
else:
|
| 134 |
+
content_type = "application/x-www-form-urlencoded; charset=%s" % charset
|
| 135 |
+
|
| 136 |
+
return payload.BytesPayload(
|
| 137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
| 138 |
+
content_type=content_type,
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
| 142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
| 143 |
+
if self._is_processed:
|
| 144 |
+
raise RuntimeError("Form data has been processed already")
|
| 145 |
+
for dispparams, headers, value in self._fields:
|
| 146 |
+
try:
|
| 147 |
+
if hdrs.CONTENT_TYPE in headers:
|
| 148 |
+
part = payload.get_payload(
|
| 149 |
+
value,
|
| 150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
| 151 |
+
headers=headers,
|
| 152 |
+
encoding=self._charset,
|
| 153 |
+
)
|
| 154 |
+
else:
|
| 155 |
+
part = payload.get_payload(
|
| 156 |
+
value, headers=headers, encoding=self._charset
|
| 157 |
+
)
|
| 158 |
+
except Exception as exc:
|
| 159 |
+
raise TypeError(
|
| 160 |
+
"Can not serialize value type: %r\n "
|
| 161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
| 162 |
+
) from exc
|
| 163 |
+
|
| 164 |
+
if dispparams:
|
| 165 |
+
part.set_content_disposition(
|
| 166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
| 167 |
+
)
|
| 168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
| 169 |
+
# Content-Length which were sent via chunked transfer encoding
|
| 170 |
+
assert part.headers is not None
|
| 171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 172 |
+
|
| 173 |
+
self._writer.append_payload(part)
|
| 174 |
+
|
| 175 |
+
self._is_processed = True
|
| 176 |
+
return self._writer
|
| 177 |
+
|
| 178 |
+
def __call__(self) -> Payload:
|
| 179 |
+
if self._is_multipart:
|
| 180 |
+
return self._gen_form_data()
|
| 181 |
+
else:
|
| 182 |
+
return self._gen_form_urlencoded()
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/helpers.py
ADDED
|
@@ -0,0 +1,944 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Various helper functions"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import binascii
|
| 6 |
+
import contextlib
|
| 7 |
+
import datetime
|
| 8 |
+
import enum
|
| 9 |
+
import functools
|
| 10 |
+
import inspect
|
| 11 |
+
import netrc
|
| 12 |
+
import os
|
| 13 |
+
import platform
|
| 14 |
+
import re
|
| 15 |
+
import sys
|
| 16 |
+
import time
|
| 17 |
+
import weakref
|
| 18 |
+
from collections import namedtuple
|
| 19 |
+
from contextlib import suppress
|
| 20 |
+
from email.parser import HeaderParser
|
| 21 |
+
from email.utils import parsedate
|
| 22 |
+
from math import ceil
|
| 23 |
+
from pathlib import Path
|
| 24 |
+
from types import TracebackType
|
| 25 |
+
from typing import (
|
| 26 |
+
Any,
|
| 27 |
+
Callable,
|
| 28 |
+
ContextManager,
|
| 29 |
+
Dict,
|
| 30 |
+
Generator,
|
| 31 |
+
Generic,
|
| 32 |
+
Iterable,
|
| 33 |
+
Iterator,
|
| 34 |
+
List,
|
| 35 |
+
Mapping,
|
| 36 |
+
Optional,
|
| 37 |
+
Protocol,
|
| 38 |
+
Tuple,
|
| 39 |
+
Type,
|
| 40 |
+
TypeVar,
|
| 41 |
+
Union,
|
| 42 |
+
get_args,
|
| 43 |
+
overload,
|
| 44 |
+
)
|
| 45 |
+
from urllib.parse import quote
|
| 46 |
+
from urllib.request import getproxies, proxy_bypass
|
| 47 |
+
|
| 48 |
+
import attr
|
| 49 |
+
from multidict import MultiDict, MultiDictProxy, MultiMapping
|
| 50 |
+
from propcache.api import under_cached_property as reify
|
| 51 |
+
from yarl import URL
|
| 52 |
+
|
| 53 |
+
from . import hdrs
|
| 54 |
+
from .log import client_logger
|
| 55 |
+
|
| 56 |
+
if sys.version_info >= (3, 11):
|
| 57 |
+
import asyncio as async_timeout
|
| 58 |
+
else:
|
| 59 |
+
import async_timeout
|
| 60 |
+
|
| 61 |
+
__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify")
|
| 62 |
+
|
| 63 |
+
IS_MACOS = platform.system() == "Darwin"
|
| 64 |
+
IS_WINDOWS = platform.system() == "Windows"
|
| 65 |
+
|
| 66 |
+
PY_310 = sys.version_info >= (3, 10)
|
| 67 |
+
PY_311 = sys.version_info >= (3, 11)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
_T = TypeVar("_T")
|
| 71 |
+
_S = TypeVar("_S")
|
| 72 |
+
|
| 73 |
+
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
|
| 74 |
+
sentinel = _SENTINEL.sentinel
|
| 75 |
+
|
| 76 |
+
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
| 77 |
+
|
| 78 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
| 79 |
+
EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200)))
|
| 80 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
| 81 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
| 82 |
+
EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL
|
| 83 |
+
|
| 84 |
+
DEBUG = sys.flags.dev_mode or (
|
| 85 |
+
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
CHAR = {chr(i) for i in range(0, 128)}
|
| 90 |
+
CTL = {chr(i) for i in range(0, 32)} | {
|
| 91 |
+
chr(127),
|
| 92 |
+
}
|
| 93 |
+
SEPARATORS = {
|
| 94 |
+
"(",
|
| 95 |
+
")",
|
| 96 |
+
"<",
|
| 97 |
+
">",
|
| 98 |
+
"@",
|
| 99 |
+
",",
|
| 100 |
+
";",
|
| 101 |
+
":",
|
| 102 |
+
"\\",
|
| 103 |
+
'"',
|
| 104 |
+
"/",
|
| 105 |
+
"[",
|
| 106 |
+
"]",
|
| 107 |
+
"?",
|
| 108 |
+
"=",
|
| 109 |
+
"{",
|
| 110 |
+
"}",
|
| 111 |
+
" ",
|
| 112 |
+
chr(9),
|
| 113 |
+
}
|
| 114 |
+
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class noop:
|
| 118 |
+
def __await__(self) -> Generator[None, None, None]:
|
| 119 |
+
yield
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
| 123 |
+
"""Http basic authentication helper."""
|
| 124 |
+
|
| 125 |
+
def __new__(
|
| 126 |
+
cls, login: str, password: str = "", encoding: str = "latin1"
|
| 127 |
+
) -> "BasicAuth":
|
| 128 |
+
if login is None:
|
| 129 |
+
raise ValueError("None is not allowed as login value")
|
| 130 |
+
|
| 131 |
+
if password is None:
|
| 132 |
+
raise ValueError("None is not allowed as password value")
|
| 133 |
+
|
| 134 |
+
if ":" in login:
|
| 135 |
+
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
| 136 |
+
|
| 137 |
+
return super().__new__(cls, login, password, encoding)
|
| 138 |
+
|
| 139 |
+
@classmethod
|
| 140 |
+
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
| 141 |
+
"""Create a BasicAuth object from an Authorization HTTP header."""
|
| 142 |
+
try:
|
| 143 |
+
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
| 144 |
+
except ValueError:
|
| 145 |
+
raise ValueError("Could not parse authorization header.")
|
| 146 |
+
|
| 147 |
+
if auth_type.lower() != "basic":
|
| 148 |
+
raise ValueError("Unknown authorization method %s" % auth_type)
|
| 149 |
+
|
| 150 |
+
try:
|
| 151 |
+
decoded = base64.b64decode(
|
| 152 |
+
encoded_credentials.encode("ascii"), validate=True
|
| 153 |
+
).decode(encoding)
|
| 154 |
+
except binascii.Error:
|
| 155 |
+
raise ValueError("Invalid base64 encoding.")
|
| 156 |
+
|
| 157 |
+
try:
|
| 158 |
+
# RFC 2617 HTTP Authentication
|
| 159 |
+
# https://www.ietf.org/rfc/rfc2617.txt
|
| 160 |
+
# the colon must be present, but the username and password may be
|
| 161 |
+
# otherwise blank.
|
| 162 |
+
username, password = decoded.split(":", 1)
|
| 163 |
+
except ValueError:
|
| 164 |
+
raise ValueError("Invalid credentials.")
|
| 165 |
+
|
| 166 |
+
return cls(username, password, encoding=encoding)
|
| 167 |
+
|
| 168 |
+
@classmethod
|
| 169 |
+
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
| 170 |
+
"""Create BasicAuth from url."""
|
| 171 |
+
if not isinstance(url, URL):
|
| 172 |
+
raise TypeError("url should be yarl.URL instance")
|
| 173 |
+
# Check raw_user and raw_password first as yarl is likely
|
| 174 |
+
# to already have these values parsed from the netloc in the cache.
|
| 175 |
+
if url.raw_user is None and url.raw_password is None:
|
| 176 |
+
return None
|
| 177 |
+
return cls(url.user or "", url.password or "", encoding=encoding)
|
| 178 |
+
|
| 179 |
+
def encode(self) -> str:
|
| 180 |
+
"""Encode credentials."""
|
| 181 |
+
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
| 182 |
+
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 186 |
+
"""Remove user and password from URL if present and return BasicAuth object."""
|
| 187 |
+
# Check raw_user and raw_password first as yarl is likely
|
| 188 |
+
# to already have these values parsed from the netloc in the cache.
|
| 189 |
+
if url.raw_user is None and url.raw_password is None:
|
| 190 |
+
return url, None
|
| 191 |
+
return url.with_user(None), BasicAuth(url.user or "", url.password or "")
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def netrc_from_env() -> Optional[netrc.netrc]:
|
| 195 |
+
"""Load netrc from file.
|
| 196 |
+
|
| 197 |
+
Attempt to load it from the path specified by the env-var
|
| 198 |
+
NETRC or in the default location in the user's home directory.
|
| 199 |
+
|
| 200 |
+
Returns None if it couldn't be found or fails to parse.
|
| 201 |
+
"""
|
| 202 |
+
netrc_env = os.environ.get("NETRC")
|
| 203 |
+
|
| 204 |
+
if netrc_env is not None:
|
| 205 |
+
netrc_path = Path(netrc_env)
|
| 206 |
+
else:
|
| 207 |
+
try:
|
| 208 |
+
home_dir = Path.home()
|
| 209 |
+
except RuntimeError as e: # pragma: no cover
|
| 210 |
+
# if pathlib can't resolve home, it may raise a RuntimeError
|
| 211 |
+
client_logger.debug(
|
| 212 |
+
"Could not resolve home directory when "
|
| 213 |
+
"trying to look for .netrc file: %s",
|
| 214 |
+
e,
|
| 215 |
+
)
|
| 216 |
+
return None
|
| 217 |
+
|
| 218 |
+
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
| 219 |
+
|
| 220 |
+
try:
|
| 221 |
+
return netrc.netrc(str(netrc_path))
|
| 222 |
+
except netrc.NetrcParseError as e:
|
| 223 |
+
client_logger.warning("Could not parse .netrc file: %s", e)
|
| 224 |
+
except OSError as e:
|
| 225 |
+
netrc_exists = False
|
| 226 |
+
with contextlib.suppress(OSError):
|
| 227 |
+
netrc_exists = netrc_path.is_file()
|
| 228 |
+
# we couldn't read the file (doesn't exist, permissions, etc.)
|
| 229 |
+
if netrc_env or netrc_exists:
|
| 230 |
+
# only warn if the environment wanted us to load it,
|
| 231 |
+
# or it appears like the default file does actually exist
|
| 232 |
+
client_logger.warning("Could not read .netrc file: %s", e)
|
| 233 |
+
|
| 234 |
+
return None
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 238 |
+
class ProxyInfo:
|
| 239 |
+
proxy: URL
|
| 240 |
+
proxy_auth: Optional[BasicAuth]
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
|
| 244 |
+
"""
|
| 245 |
+
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
|
| 246 |
+
|
| 247 |
+
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
|
| 248 |
+
entry is found for the ``host``.
|
| 249 |
+
"""
|
| 250 |
+
if netrc_obj is None:
|
| 251 |
+
raise LookupError("No .netrc file found")
|
| 252 |
+
auth_from_netrc = netrc_obj.authenticators(host)
|
| 253 |
+
|
| 254 |
+
if auth_from_netrc is None:
|
| 255 |
+
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
|
| 256 |
+
login, account, password = auth_from_netrc
|
| 257 |
+
|
| 258 |
+
# TODO(PY311): username = login or account
|
| 259 |
+
# Up to python 3.10, account could be None if not specified,
|
| 260 |
+
# and login will be empty string if not specified. From 3.11,
|
| 261 |
+
# login and account will be empty string if not specified.
|
| 262 |
+
username = login if (login or account is None) else account
|
| 263 |
+
|
| 264 |
+
# TODO(PY311): Remove this, as password will be empty string
|
| 265 |
+
# if not specified
|
| 266 |
+
if password is None:
|
| 267 |
+
password = ""
|
| 268 |
+
|
| 269 |
+
return BasicAuth(username, password)
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
| 273 |
+
proxy_urls = {
|
| 274 |
+
k: URL(v)
|
| 275 |
+
for k, v in getproxies().items()
|
| 276 |
+
if k in ("http", "https", "ws", "wss")
|
| 277 |
+
}
|
| 278 |
+
netrc_obj = netrc_from_env()
|
| 279 |
+
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
| 280 |
+
ret = {}
|
| 281 |
+
for proto, val in stripped.items():
|
| 282 |
+
proxy, auth = val
|
| 283 |
+
if proxy.scheme in ("https", "wss"):
|
| 284 |
+
client_logger.warning(
|
| 285 |
+
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
| 286 |
+
)
|
| 287 |
+
continue
|
| 288 |
+
if netrc_obj and auth is None:
|
| 289 |
+
if proxy.host is not None:
|
| 290 |
+
try:
|
| 291 |
+
auth = basicauth_from_netrc(netrc_obj, proxy.host)
|
| 292 |
+
except LookupError:
|
| 293 |
+
auth = None
|
| 294 |
+
ret[proto] = ProxyInfo(proxy, auth)
|
| 295 |
+
return ret
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 299 |
+
"""Get a permitted proxy for the given URL from the env."""
|
| 300 |
+
if url.host is not None and proxy_bypass(url.host):
|
| 301 |
+
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
| 302 |
+
|
| 303 |
+
proxies_in_env = proxies_from_env()
|
| 304 |
+
try:
|
| 305 |
+
proxy_info = proxies_in_env[url.scheme]
|
| 306 |
+
except KeyError:
|
| 307 |
+
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
| 308 |
+
else:
|
| 309 |
+
return proxy_info.proxy, proxy_info.proxy_auth
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 313 |
+
class MimeType:
|
| 314 |
+
type: str
|
| 315 |
+
subtype: str
|
| 316 |
+
suffix: str
|
| 317 |
+
parameters: "MultiDictProxy[str]"
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
@functools.lru_cache(maxsize=56)
|
| 321 |
+
def parse_mimetype(mimetype: str) -> MimeType:
|
| 322 |
+
"""Parses a MIME type into its components.
|
| 323 |
+
|
| 324 |
+
mimetype is a MIME type string.
|
| 325 |
+
|
| 326 |
+
Returns a MimeType object.
|
| 327 |
+
|
| 328 |
+
Example:
|
| 329 |
+
|
| 330 |
+
>>> parse_mimetype('text/html; charset=utf-8')
|
| 331 |
+
MimeType(type='text', subtype='html', suffix='',
|
| 332 |
+
parameters={'charset': 'utf-8'})
|
| 333 |
+
|
| 334 |
+
"""
|
| 335 |
+
if not mimetype:
|
| 336 |
+
return MimeType(
|
| 337 |
+
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
parts = mimetype.split(";")
|
| 341 |
+
params: MultiDict[str] = MultiDict()
|
| 342 |
+
for item in parts[1:]:
|
| 343 |
+
if not item:
|
| 344 |
+
continue
|
| 345 |
+
key, _, value = item.partition("=")
|
| 346 |
+
params.add(key.lower().strip(), value.strip(' "'))
|
| 347 |
+
|
| 348 |
+
fulltype = parts[0].strip().lower()
|
| 349 |
+
if fulltype == "*":
|
| 350 |
+
fulltype = "*/*"
|
| 351 |
+
|
| 352 |
+
mtype, _, stype = fulltype.partition("/")
|
| 353 |
+
stype, _, suffix = stype.partition("+")
|
| 354 |
+
|
| 355 |
+
return MimeType(
|
| 356 |
+
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
| 361 |
+
name = getattr(obj, "name", None)
|
| 362 |
+
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
| 363 |
+
return Path(name).name
|
| 364 |
+
return default
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
| 368 |
+
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
def quoted_string(content: str) -> str:
|
| 372 |
+
"""Return 7-bit content as quoted-string.
|
| 373 |
+
|
| 374 |
+
Format content into a quoted-string as defined in RFC5322 for
|
| 375 |
+
Internet Message Format. Notice that this is not the 8-bit HTTP
|
| 376 |
+
format, but the 7-bit email format. Content must be in usascii or
|
| 377 |
+
a ValueError is raised.
|
| 378 |
+
"""
|
| 379 |
+
if not (QCONTENT > set(content)):
|
| 380 |
+
raise ValueError(f"bad content for quoted-string {content!r}")
|
| 381 |
+
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
def content_disposition_header(
|
| 385 |
+
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
| 386 |
+
) -> str:
|
| 387 |
+
"""Sets ``Content-Disposition`` header for MIME.
|
| 388 |
+
|
| 389 |
+
This is the MIME payload Content-Disposition header from RFC 2183
|
| 390 |
+
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
| 391 |
+
RFC 6266.
|
| 392 |
+
|
| 393 |
+
disptype is a disposition type: inline, attachment, form-data.
|
| 394 |
+
Should be valid extension token (see RFC 2183)
|
| 395 |
+
|
| 396 |
+
quote_fields performs value quoting to 7-bit MIME headers
|
| 397 |
+
according to RFC 7578. Set to quote_fields to False if recipient
|
| 398 |
+
can take 8-bit file names and field values.
|
| 399 |
+
|
| 400 |
+
_charset specifies the charset to use when quote_fields is True.
|
| 401 |
+
|
| 402 |
+
params is a dict with disposition params.
|
| 403 |
+
"""
|
| 404 |
+
if not disptype or not (TOKEN > set(disptype)):
|
| 405 |
+
raise ValueError(f"bad content disposition type {disptype!r}")
|
| 406 |
+
|
| 407 |
+
value = disptype
|
| 408 |
+
if params:
|
| 409 |
+
lparams = []
|
| 410 |
+
for key, val in params.items():
|
| 411 |
+
if not key or not (TOKEN > set(key)):
|
| 412 |
+
raise ValueError(f"bad content disposition parameter {key!r}={val!r}")
|
| 413 |
+
if quote_fields:
|
| 414 |
+
if key.lower() == "filename":
|
| 415 |
+
qval = quote(val, "", encoding=_charset)
|
| 416 |
+
lparams.append((key, '"%s"' % qval))
|
| 417 |
+
else:
|
| 418 |
+
try:
|
| 419 |
+
qval = quoted_string(val)
|
| 420 |
+
except ValueError:
|
| 421 |
+
qval = "".join(
|
| 422 |
+
(_charset, "''", quote(val, "", encoding=_charset))
|
| 423 |
+
)
|
| 424 |
+
lparams.append((key + "*", qval))
|
| 425 |
+
else:
|
| 426 |
+
lparams.append((key, '"%s"' % qval))
|
| 427 |
+
else:
|
| 428 |
+
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
| 429 |
+
lparams.append((key, '"%s"' % qval))
|
| 430 |
+
sparams = "; ".join("=".join(pair) for pair in lparams)
|
| 431 |
+
value = "; ".join((value, sparams))
|
| 432 |
+
return value
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
def is_ip_address(host: Optional[str]) -> bool:
|
| 436 |
+
"""Check if host looks like an IP Address.
|
| 437 |
+
|
| 438 |
+
This check is only meant as a heuristic to ensure that
|
| 439 |
+
a host is not a domain name.
|
| 440 |
+
"""
|
| 441 |
+
if not host:
|
| 442 |
+
return False
|
| 443 |
+
# For a host to be an ipv4 address, it must be all numeric.
|
| 444 |
+
# The host must contain a colon to be an IPv6 address.
|
| 445 |
+
return ":" in host or host.replace(".", "").isdigit()
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
_cached_current_datetime: Optional[int] = None
|
| 449 |
+
_cached_formatted_datetime = ""
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
def rfc822_formatted_time() -> str:
|
| 453 |
+
global _cached_current_datetime
|
| 454 |
+
global _cached_formatted_datetime
|
| 455 |
+
|
| 456 |
+
now = int(time.time())
|
| 457 |
+
if now != _cached_current_datetime:
|
| 458 |
+
# Weekday and month names for HTTP date/time formatting;
|
| 459 |
+
# always English!
|
| 460 |
+
# Tuples are constants stored in codeobject!
|
| 461 |
+
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
| 462 |
+
_monthname = (
|
| 463 |
+
"", # Dummy so we can use 1-based month numbers
|
| 464 |
+
"Jan",
|
| 465 |
+
"Feb",
|
| 466 |
+
"Mar",
|
| 467 |
+
"Apr",
|
| 468 |
+
"May",
|
| 469 |
+
"Jun",
|
| 470 |
+
"Jul",
|
| 471 |
+
"Aug",
|
| 472 |
+
"Sep",
|
| 473 |
+
"Oct",
|
| 474 |
+
"Nov",
|
| 475 |
+
"Dec",
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
| 479 |
+
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
| 480 |
+
_weekdayname[wd],
|
| 481 |
+
day,
|
| 482 |
+
_monthname[month],
|
| 483 |
+
year,
|
| 484 |
+
hh,
|
| 485 |
+
mm,
|
| 486 |
+
ss,
|
| 487 |
+
)
|
| 488 |
+
_cached_current_datetime = now
|
| 489 |
+
return _cached_formatted_datetime
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
| 493 |
+
ref, name = info
|
| 494 |
+
ob = ref()
|
| 495 |
+
if ob is not None:
|
| 496 |
+
with suppress(Exception):
|
| 497 |
+
getattr(ob, name)()
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
def weakref_handle(
|
| 501 |
+
ob: object,
|
| 502 |
+
name: str,
|
| 503 |
+
timeout: float,
|
| 504 |
+
loop: asyncio.AbstractEventLoop,
|
| 505 |
+
timeout_ceil_threshold: float = 5,
|
| 506 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 507 |
+
if timeout is not None and timeout > 0:
|
| 508 |
+
when = loop.time() + timeout
|
| 509 |
+
if timeout >= timeout_ceil_threshold:
|
| 510 |
+
when = ceil(when)
|
| 511 |
+
|
| 512 |
+
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
| 513 |
+
return None
|
| 514 |
+
|
| 515 |
+
|
| 516 |
+
def call_later(
|
| 517 |
+
cb: Callable[[], Any],
|
| 518 |
+
timeout: float,
|
| 519 |
+
loop: asyncio.AbstractEventLoop,
|
| 520 |
+
timeout_ceil_threshold: float = 5,
|
| 521 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 522 |
+
if timeout is None or timeout <= 0:
|
| 523 |
+
return None
|
| 524 |
+
now = loop.time()
|
| 525 |
+
when = calculate_timeout_when(now, timeout, timeout_ceil_threshold)
|
| 526 |
+
return loop.call_at(when, cb)
|
| 527 |
+
|
| 528 |
+
|
| 529 |
+
def calculate_timeout_when(
|
| 530 |
+
loop_time: float,
|
| 531 |
+
timeout: float,
|
| 532 |
+
timeout_ceiling_threshold: float,
|
| 533 |
+
) -> float:
|
| 534 |
+
"""Calculate when to execute a timeout."""
|
| 535 |
+
when = loop_time + timeout
|
| 536 |
+
if timeout > timeout_ceiling_threshold:
|
| 537 |
+
return ceil(when)
|
| 538 |
+
return when
|
| 539 |
+
|
| 540 |
+
|
| 541 |
+
class TimeoutHandle:
|
| 542 |
+
"""Timeout handle"""
|
| 543 |
+
|
| 544 |
+
__slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks")
|
| 545 |
+
|
| 546 |
+
def __init__(
|
| 547 |
+
self,
|
| 548 |
+
loop: asyncio.AbstractEventLoop,
|
| 549 |
+
timeout: Optional[float],
|
| 550 |
+
ceil_threshold: float = 5,
|
| 551 |
+
) -> None:
|
| 552 |
+
self._timeout = timeout
|
| 553 |
+
self._loop = loop
|
| 554 |
+
self._ceil_threshold = ceil_threshold
|
| 555 |
+
self._callbacks: List[
|
| 556 |
+
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
| 557 |
+
] = []
|
| 558 |
+
|
| 559 |
+
def register(
|
| 560 |
+
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
| 561 |
+
) -> None:
|
| 562 |
+
self._callbacks.append((callback, args, kwargs))
|
| 563 |
+
|
| 564 |
+
def close(self) -> None:
|
| 565 |
+
self._callbacks.clear()
|
| 566 |
+
|
| 567 |
+
def start(self) -> Optional[asyncio.TimerHandle]:
|
| 568 |
+
timeout = self._timeout
|
| 569 |
+
if timeout is not None and timeout > 0:
|
| 570 |
+
when = self._loop.time() + timeout
|
| 571 |
+
if timeout >= self._ceil_threshold:
|
| 572 |
+
when = ceil(when)
|
| 573 |
+
return self._loop.call_at(when, self.__call__)
|
| 574 |
+
else:
|
| 575 |
+
return None
|
| 576 |
+
|
| 577 |
+
def timer(self) -> "BaseTimerContext":
|
| 578 |
+
if self._timeout is not None and self._timeout > 0:
|
| 579 |
+
timer = TimerContext(self._loop)
|
| 580 |
+
self.register(timer.timeout)
|
| 581 |
+
return timer
|
| 582 |
+
else:
|
| 583 |
+
return TimerNoop()
|
| 584 |
+
|
| 585 |
+
def __call__(self) -> None:
|
| 586 |
+
for cb, args, kwargs in self._callbacks:
|
| 587 |
+
with suppress(Exception):
|
| 588 |
+
cb(*args, **kwargs)
|
| 589 |
+
|
| 590 |
+
self._callbacks.clear()
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
| 594 |
+
|
| 595 |
+
__slots__ = ()
|
| 596 |
+
|
| 597 |
+
def assert_timeout(self) -> None:
|
| 598 |
+
"""Raise TimeoutError if timeout has been exceeded."""
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
class TimerNoop(BaseTimerContext):
|
| 602 |
+
|
| 603 |
+
__slots__ = ()
|
| 604 |
+
|
| 605 |
+
def __enter__(self) -> BaseTimerContext:
|
| 606 |
+
return self
|
| 607 |
+
|
| 608 |
+
def __exit__(
|
| 609 |
+
self,
|
| 610 |
+
exc_type: Optional[Type[BaseException]],
|
| 611 |
+
exc_val: Optional[BaseException],
|
| 612 |
+
exc_tb: Optional[TracebackType],
|
| 613 |
+
) -> None:
|
| 614 |
+
return
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
class TimerContext(BaseTimerContext):
|
| 618 |
+
"""Low resolution timeout context manager"""
|
| 619 |
+
|
| 620 |
+
__slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling")
|
| 621 |
+
|
| 622 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 623 |
+
self._loop = loop
|
| 624 |
+
self._tasks: List[asyncio.Task[Any]] = []
|
| 625 |
+
self._cancelled = False
|
| 626 |
+
self._cancelling = 0
|
| 627 |
+
|
| 628 |
+
def assert_timeout(self) -> None:
|
| 629 |
+
"""Raise TimeoutError if timer has already been cancelled."""
|
| 630 |
+
if self._cancelled:
|
| 631 |
+
raise asyncio.TimeoutError from None
|
| 632 |
+
|
| 633 |
+
def __enter__(self) -> BaseTimerContext:
|
| 634 |
+
task = asyncio.current_task(loop=self._loop)
|
| 635 |
+
if task is None:
|
| 636 |
+
raise RuntimeError("Timeout context manager should be used inside a task")
|
| 637 |
+
|
| 638 |
+
if sys.version_info >= (3, 11):
|
| 639 |
+
# Remember if the task was already cancelling
|
| 640 |
+
# so when we __exit__ we can decide if we should
|
| 641 |
+
# raise asyncio.TimeoutError or let the cancellation propagate
|
| 642 |
+
self._cancelling = task.cancelling()
|
| 643 |
+
|
| 644 |
+
if self._cancelled:
|
| 645 |
+
raise asyncio.TimeoutError from None
|
| 646 |
+
|
| 647 |
+
self._tasks.append(task)
|
| 648 |
+
return self
|
| 649 |
+
|
| 650 |
+
def __exit__(
|
| 651 |
+
self,
|
| 652 |
+
exc_type: Optional[Type[BaseException]],
|
| 653 |
+
exc_val: Optional[BaseException],
|
| 654 |
+
exc_tb: Optional[TracebackType],
|
| 655 |
+
) -> Optional[bool]:
|
| 656 |
+
enter_task: Optional[asyncio.Task[Any]] = None
|
| 657 |
+
if self._tasks:
|
| 658 |
+
enter_task = self._tasks.pop()
|
| 659 |
+
|
| 660 |
+
if exc_type is asyncio.CancelledError and self._cancelled:
|
| 661 |
+
assert enter_task is not None
|
| 662 |
+
# The timeout was hit, and the task was cancelled
|
| 663 |
+
# so we need to uncancel the last task that entered the context manager
|
| 664 |
+
# since the cancellation should not leak out of the context manager
|
| 665 |
+
if sys.version_info >= (3, 11):
|
| 666 |
+
# If the task was already cancelling don't raise
|
| 667 |
+
# asyncio.TimeoutError and instead return None
|
| 668 |
+
# to allow the cancellation to propagate
|
| 669 |
+
if enter_task.uncancel() > self._cancelling:
|
| 670 |
+
return None
|
| 671 |
+
raise asyncio.TimeoutError from exc_val
|
| 672 |
+
return None
|
| 673 |
+
|
| 674 |
+
def timeout(self) -> None:
|
| 675 |
+
if not self._cancelled:
|
| 676 |
+
for task in set(self._tasks):
|
| 677 |
+
task.cancel()
|
| 678 |
+
|
| 679 |
+
self._cancelled = True
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
def ceil_timeout(
|
| 683 |
+
delay: Optional[float], ceil_threshold: float = 5
|
| 684 |
+
) -> async_timeout.Timeout:
|
| 685 |
+
if delay is None or delay <= 0:
|
| 686 |
+
return async_timeout.timeout(None)
|
| 687 |
+
|
| 688 |
+
loop = asyncio.get_running_loop()
|
| 689 |
+
now = loop.time()
|
| 690 |
+
when = now + delay
|
| 691 |
+
if delay > ceil_threshold:
|
| 692 |
+
when = ceil(when)
|
| 693 |
+
return async_timeout.timeout_at(when)
|
| 694 |
+
|
| 695 |
+
|
| 696 |
+
class HeadersMixin:
|
| 697 |
+
"""Mixin for handling headers."""
|
| 698 |
+
|
| 699 |
+
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
| 700 |
+
|
| 701 |
+
_headers: MultiMapping[str]
|
| 702 |
+
_content_type: Optional[str] = None
|
| 703 |
+
_content_dict: Optional[Dict[str, str]] = None
|
| 704 |
+
_stored_content_type: Union[str, None, _SENTINEL] = sentinel
|
| 705 |
+
|
| 706 |
+
def _parse_content_type(self, raw: Optional[str]) -> None:
|
| 707 |
+
self._stored_content_type = raw
|
| 708 |
+
if raw is None:
|
| 709 |
+
# default value according to RFC 2616
|
| 710 |
+
self._content_type = "application/octet-stream"
|
| 711 |
+
self._content_dict = {}
|
| 712 |
+
else:
|
| 713 |
+
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
| 714 |
+
self._content_type = msg.get_content_type()
|
| 715 |
+
params = msg.get_params(())
|
| 716 |
+
self._content_dict = dict(params[1:]) # First element is content type again
|
| 717 |
+
|
| 718 |
+
@property
|
| 719 |
+
def content_type(self) -> str:
|
| 720 |
+
"""The value of content part for Content-Type HTTP header."""
|
| 721 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
| 722 |
+
if self._stored_content_type != raw:
|
| 723 |
+
self._parse_content_type(raw)
|
| 724 |
+
assert self._content_type is not None
|
| 725 |
+
return self._content_type
|
| 726 |
+
|
| 727 |
+
@property
|
| 728 |
+
def charset(self) -> Optional[str]:
|
| 729 |
+
"""The value of charset part for Content-Type HTTP header."""
|
| 730 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
| 731 |
+
if self._stored_content_type != raw:
|
| 732 |
+
self._parse_content_type(raw)
|
| 733 |
+
assert self._content_dict is not None
|
| 734 |
+
return self._content_dict.get("charset")
|
| 735 |
+
|
| 736 |
+
@property
|
| 737 |
+
def content_length(self) -> Optional[int]:
|
| 738 |
+
"""The value of Content-Length HTTP header."""
|
| 739 |
+
content_length = self._headers.get(hdrs.CONTENT_LENGTH)
|
| 740 |
+
return None if content_length is None else int(content_length)
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
| 744 |
+
if not fut.done():
|
| 745 |
+
fut.set_result(result)
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
_EXC_SENTINEL = BaseException()
|
| 749 |
+
|
| 750 |
+
|
| 751 |
+
class ErrorableProtocol(Protocol):
|
| 752 |
+
def set_exception(
|
| 753 |
+
self,
|
| 754 |
+
exc: BaseException,
|
| 755 |
+
exc_cause: BaseException = ...,
|
| 756 |
+
) -> None: ... # pragma: no cover
|
| 757 |
+
|
| 758 |
+
|
| 759 |
+
def set_exception(
|
| 760 |
+
fut: "asyncio.Future[_T] | ErrorableProtocol",
|
| 761 |
+
exc: BaseException,
|
| 762 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 763 |
+
) -> None:
|
| 764 |
+
"""Set future exception.
|
| 765 |
+
|
| 766 |
+
If the future is marked as complete, this function is a no-op.
|
| 767 |
+
|
| 768 |
+
:param exc_cause: An exception that is a direct cause of ``exc``.
|
| 769 |
+
Only set if provided.
|
| 770 |
+
"""
|
| 771 |
+
if asyncio.isfuture(fut) and fut.done():
|
| 772 |
+
return
|
| 773 |
+
|
| 774 |
+
exc_is_sentinel = exc_cause is _EXC_SENTINEL
|
| 775 |
+
exc_causes_itself = exc is exc_cause
|
| 776 |
+
if not exc_is_sentinel and not exc_causes_itself:
|
| 777 |
+
exc.__cause__ = exc_cause
|
| 778 |
+
|
| 779 |
+
fut.set_exception(exc)
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
@functools.total_ordering
|
| 783 |
+
class AppKey(Generic[_T]):
|
| 784 |
+
"""Keys for static typing support in Application."""
|
| 785 |
+
|
| 786 |
+
__slots__ = ("_name", "_t", "__orig_class__")
|
| 787 |
+
|
| 788 |
+
# This may be set by Python when instantiating with a generic type. We need to
|
| 789 |
+
# support this, in order to support types that are not concrete classes,
|
| 790 |
+
# like Iterable, which can't be passed as the second parameter to __init__.
|
| 791 |
+
__orig_class__: Type[object]
|
| 792 |
+
|
| 793 |
+
def __init__(self, name: str, t: Optional[Type[_T]] = None):
|
| 794 |
+
# Prefix with module name to help deduplicate key names.
|
| 795 |
+
frame = inspect.currentframe()
|
| 796 |
+
while frame:
|
| 797 |
+
if frame.f_code.co_name == "<module>":
|
| 798 |
+
module: str = frame.f_globals["__name__"]
|
| 799 |
+
break
|
| 800 |
+
frame = frame.f_back
|
| 801 |
+
|
| 802 |
+
self._name = module + "." + name
|
| 803 |
+
self._t = t
|
| 804 |
+
|
| 805 |
+
def __lt__(self, other: object) -> bool:
|
| 806 |
+
if isinstance(other, AppKey):
|
| 807 |
+
return self._name < other._name
|
| 808 |
+
return True # Order AppKey above other types.
|
| 809 |
+
|
| 810 |
+
def __repr__(self) -> str:
|
| 811 |
+
t = self._t
|
| 812 |
+
if t is None:
|
| 813 |
+
with suppress(AttributeError):
|
| 814 |
+
# Set to type arg.
|
| 815 |
+
t = get_args(self.__orig_class__)[0]
|
| 816 |
+
|
| 817 |
+
if t is None:
|
| 818 |
+
t_repr = "<<Unknown>>"
|
| 819 |
+
elif isinstance(t, type):
|
| 820 |
+
if t.__module__ == "builtins":
|
| 821 |
+
t_repr = t.__qualname__
|
| 822 |
+
else:
|
| 823 |
+
t_repr = f"{t.__module__}.{t.__qualname__}"
|
| 824 |
+
else:
|
| 825 |
+
t_repr = repr(t)
|
| 826 |
+
return f"<AppKey({self._name}, type={t_repr})>"
|
| 827 |
+
|
| 828 |
+
|
| 829 |
+
class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
|
| 830 |
+
__slots__ = ("_maps",)
|
| 831 |
+
|
| 832 |
+
def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
|
| 833 |
+
self._maps = tuple(maps)
|
| 834 |
+
|
| 835 |
+
def __init_subclass__(cls) -> None:
|
| 836 |
+
raise TypeError(
|
| 837 |
+
"Inheritance class {} from ChainMapProxy "
|
| 838 |
+
"is forbidden".format(cls.__name__)
|
| 839 |
+
)
|
| 840 |
+
|
| 841 |
+
@overload # type: ignore[override]
|
| 842 |
+
def __getitem__(self, key: AppKey[_T]) -> _T: ...
|
| 843 |
+
|
| 844 |
+
@overload
|
| 845 |
+
def __getitem__(self, key: str) -> Any: ...
|
| 846 |
+
|
| 847 |
+
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
| 848 |
+
for mapping in self._maps:
|
| 849 |
+
try:
|
| 850 |
+
return mapping[key]
|
| 851 |
+
except KeyError:
|
| 852 |
+
pass
|
| 853 |
+
raise KeyError(key)
|
| 854 |
+
|
| 855 |
+
@overload # type: ignore[override]
|
| 856 |
+
def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ...
|
| 857 |
+
|
| 858 |
+
@overload
|
| 859 |
+
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
|
| 860 |
+
|
| 861 |
+
@overload
|
| 862 |
+
def get(self, key: str, default: Any = ...) -> Any: ...
|
| 863 |
+
|
| 864 |
+
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
| 865 |
+
try:
|
| 866 |
+
return self[key]
|
| 867 |
+
except KeyError:
|
| 868 |
+
return default
|
| 869 |
+
|
| 870 |
+
def __len__(self) -> int:
|
| 871 |
+
# reuses stored hash values if possible
|
| 872 |
+
return len(set().union(*self._maps))
|
| 873 |
+
|
| 874 |
+
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
| 875 |
+
d: Dict[Union[str, AppKey[Any]], Any] = {}
|
| 876 |
+
for mapping in reversed(self._maps):
|
| 877 |
+
# reuses stored hash values if possible
|
| 878 |
+
d.update(mapping)
|
| 879 |
+
return iter(d)
|
| 880 |
+
|
| 881 |
+
def __contains__(self, key: object) -> bool:
|
| 882 |
+
return any(key in m for m in self._maps)
|
| 883 |
+
|
| 884 |
+
def __bool__(self) -> bool:
|
| 885 |
+
return any(self._maps)
|
| 886 |
+
|
| 887 |
+
def __repr__(self) -> str:
|
| 888 |
+
content = ", ".join(map(repr, self._maps))
|
| 889 |
+
return f"ChainMapProxy({content})"
|
| 890 |
+
|
| 891 |
+
|
| 892 |
+
# https://tools.ietf.org/html/rfc7232#section-2.3
|
| 893 |
+
_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
|
| 894 |
+
_ETAGC_RE = re.compile(_ETAGC)
|
| 895 |
+
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
| 896 |
+
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
| 897 |
+
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
| 898 |
+
|
| 899 |
+
ETAG_ANY = "*"
|
| 900 |
+
|
| 901 |
+
|
| 902 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 903 |
+
class ETag:
|
| 904 |
+
value: str
|
| 905 |
+
is_weak: bool = False
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
def validate_etag_value(value: str) -> None:
|
| 909 |
+
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
| 910 |
+
raise ValueError(
|
| 911 |
+
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
| 912 |
+
)
|
| 913 |
+
|
| 914 |
+
|
| 915 |
+
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
| 916 |
+
"""Process a date string, return a datetime object"""
|
| 917 |
+
if date_str is not None:
|
| 918 |
+
timetuple = parsedate(date_str)
|
| 919 |
+
if timetuple is not None:
|
| 920 |
+
with suppress(ValueError):
|
| 921 |
+
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
| 922 |
+
return None
|
| 923 |
+
|
| 924 |
+
|
| 925 |
+
@functools.lru_cache
|
| 926 |
+
def must_be_empty_body(method: str, code: int) -> bool:
|
| 927 |
+
"""Check if a request must return an empty body."""
|
| 928 |
+
return (
|
| 929 |
+
code in EMPTY_BODY_STATUS_CODES
|
| 930 |
+
or method in EMPTY_BODY_METHODS
|
| 931 |
+
or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL)
|
| 932 |
+
)
|
| 933 |
+
|
| 934 |
+
|
| 935 |
+
def should_remove_content_length(method: str, code: int) -> bool:
|
| 936 |
+
"""Check if a Content-Length header should be removed.
|
| 937 |
+
|
| 938 |
+
This should always be a subset of must_be_empty_body
|
| 939 |
+
"""
|
| 940 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
|
| 941 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
|
| 942 |
+
return code in EMPTY_BODY_STATUS_CODES or (
|
| 943 |
+
200 <= code < 300 and method in hdrs.METH_CONNECT_ALL
|
| 944 |
+
)
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/http.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from http import HTTPStatus
|
| 3 |
+
from typing import Mapping, Tuple
|
| 4 |
+
|
| 5 |
+
from . import __version__
|
| 6 |
+
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
| 7 |
+
from .http_parser import (
|
| 8 |
+
HeadersParser as HeadersParser,
|
| 9 |
+
HttpParser as HttpParser,
|
| 10 |
+
HttpRequestParser as HttpRequestParser,
|
| 11 |
+
HttpResponseParser as HttpResponseParser,
|
| 12 |
+
RawRequestMessage as RawRequestMessage,
|
| 13 |
+
RawResponseMessage as RawResponseMessage,
|
| 14 |
+
)
|
| 15 |
+
from .http_websocket import (
|
| 16 |
+
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
| 17 |
+
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
| 18 |
+
WS_KEY as WS_KEY,
|
| 19 |
+
WebSocketError as WebSocketError,
|
| 20 |
+
WebSocketReader as WebSocketReader,
|
| 21 |
+
WebSocketWriter as WebSocketWriter,
|
| 22 |
+
WSCloseCode as WSCloseCode,
|
| 23 |
+
WSMessage as WSMessage,
|
| 24 |
+
WSMsgType as WSMsgType,
|
| 25 |
+
ws_ext_gen as ws_ext_gen,
|
| 26 |
+
ws_ext_parse as ws_ext_parse,
|
| 27 |
+
)
|
| 28 |
+
from .http_writer import (
|
| 29 |
+
HttpVersion as HttpVersion,
|
| 30 |
+
HttpVersion10 as HttpVersion10,
|
| 31 |
+
HttpVersion11 as HttpVersion11,
|
| 32 |
+
StreamWriter as StreamWriter,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
__all__ = (
|
| 36 |
+
"HttpProcessingError",
|
| 37 |
+
"RESPONSES",
|
| 38 |
+
"SERVER_SOFTWARE",
|
| 39 |
+
# .http_writer
|
| 40 |
+
"StreamWriter",
|
| 41 |
+
"HttpVersion",
|
| 42 |
+
"HttpVersion10",
|
| 43 |
+
"HttpVersion11",
|
| 44 |
+
# .http_parser
|
| 45 |
+
"HeadersParser",
|
| 46 |
+
"HttpParser",
|
| 47 |
+
"HttpRequestParser",
|
| 48 |
+
"HttpResponseParser",
|
| 49 |
+
"RawRequestMessage",
|
| 50 |
+
"RawResponseMessage",
|
| 51 |
+
# .http_websocket
|
| 52 |
+
"WS_CLOSED_MESSAGE",
|
| 53 |
+
"WS_CLOSING_MESSAGE",
|
| 54 |
+
"WS_KEY",
|
| 55 |
+
"WebSocketReader",
|
| 56 |
+
"WebSocketWriter",
|
| 57 |
+
"ws_ext_gen",
|
| 58 |
+
"ws_ext_parse",
|
| 59 |
+
"WSMessage",
|
| 60 |
+
"WebSocketError",
|
| 61 |
+
"WSMsgType",
|
| 62 |
+
"WSCloseCode",
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
| 67 |
+
sys.version_info, __version__
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
RESPONSES: Mapping[int, Tuple[str, str]] = {
|
| 71 |
+
v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
|
| 72 |
+
}
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/http_parser.py
ADDED
|
@@ -0,0 +1,1046 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import asyncio
|
| 3 |
+
import re
|
| 4 |
+
import string
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from enum import IntEnum
|
| 7 |
+
from typing import (
|
| 8 |
+
Any,
|
| 9 |
+
ClassVar,
|
| 10 |
+
Final,
|
| 11 |
+
Generic,
|
| 12 |
+
List,
|
| 13 |
+
Literal,
|
| 14 |
+
NamedTuple,
|
| 15 |
+
Optional,
|
| 16 |
+
Pattern,
|
| 17 |
+
Set,
|
| 18 |
+
Tuple,
|
| 19 |
+
Type,
|
| 20 |
+
TypeVar,
|
| 21 |
+
Union,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
| 25 |
+
from yarl import URL
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .base_protocol import BaseProtocol
|
| 29 |
+
from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
|
| 30 |
+
from .helpers import (
|
| 31 |
+
_EXC_SENTINEL,
|
| 32 |
+
DEBUG,
|
| 33 |
+
EMPTY_BODY_METHODS,
|
| 34 |
+
EMPTY_BODY_STATUS_CODES,
|
| 35 |
+
NO_EXTENSIONS,
|
| 36 |
+
BaseTimerContext,
|
| 37 |
+
set_exception,
|
| 38 |
+
)
|
| 39 |
+
from .http_exceptions import (
|
| 40 |
+
BadHttpMessage,
|
| 41 |
+
BadHttpMethod,
|
| 42 |
+
BadStatusLine,
|
| 43 |
+
ContentEncodingError,
|
| 44 |
+
ContentLengthError,
|
| 45 |
+
InvalidHeader,
|
| 46 |
+
InvalidURLError,
|
| 47 |
+
LineTooLong,
|
| 48 |
+
TransferEncodingError,
|
| 49 |
+
)
|
| 50 |
+
from .http_writer import HttpVersion, HttpVersion10
|
| 51 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 52 |
+
from .typedefs import RawHeaders
|
| 53 |
+
|
| 54 |
+
__all__ = (
|
| 55 |
+
"HeadersParser",
|
| 56 |
+
"HttpParser",
|
| 57 |
+
"HttpRequestParser",
|
| 58 |
+
"HttpResponseParser",
|
| 59 |
+
"RawRequestMessage",
|
| 60 |
+
"RawResponseMessage",
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
_SEP = Literal[b"\r\n", b"\n"]
|
| 64 |
+
|
| 65 |
+
ASCIISET: Final[Set[str]] = set(string.printable)
|
| 66 |
+
|
| 67 |
+
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
|
| 68 |
+
# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
|
| 69 |
+
#
|
| 70 |
+
# method = token
|
| 71 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
| 72 |
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
| 73 |
+
# token = 1*tchar
|
| 74 |
+
_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
|
| 75 |
+
TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
|
| 76 |
+
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
|
| 77 |
+
DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
|
| 78 |
+
HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class RawRequestMessage(NamedTuple):
|
| 82 |
+
method: str
|
| 83 |
+
path: str
|
| 84 |
+
version: HttpVersion
|
| 85 |
+
headers: "CIMultiDictProxy[str]"
|
| 86 |
+
raw_headers: RawHeaders
|
| 87 |
+
should_close: bool
|
| 88 |
+
compression: Optional[str]
|
| 89 |
+
upgrade: bool
|
| 90 |
+
chunked: bool
|
| 91 |
+
url: URL
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class RawResponseMessage(NamedTuple):
|
| 95 |
+
version: HttpVersion
|
| 96 |
+
code: int
|
| 97 |
+
reason: str
|
| 98 |
+
headers: CIMultiDictProxy[str]
|
| 99 |
+
raw_headers: RawHeaders
|
| 100 |
+
should_close: bool
|
| 101 |
+
compression: Optional[str]
|
| 102 |
+
upgrade: bool
|
| 103 |
+
chunked: bool
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class ParseState(IntEnum):
|
| 110 |
+
|
| 111 |
+
PARSE_NONE = 0
|
| 112 |
+
PARSE_LENGTH = 1
|
| 113 |
+
PARSE_CHUNKED = 2
|
| 114 |
+
PARSE_UNTIL_EOF = 3
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class ChunkState(IntEnum):
|
| 118 |
+
PARSE_CHUNKED_SIZE = 0
|
| 119 |
+
PARSE_CHUNKED_CHUNK = 1
|
| 120 |
+
PARSE_CHUNKED_CHUNK_EOF = 2
|
| 121 |
+
PARSE_MAYBE_TRAILERS = 3
|
| 122 |
+
PARSE_TRAILERS = 4
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class HeadersParser:
|
| 126 |
+
def __init__(
|
| 127 |
+
self,
|
| 128 |
+
max_line_size: int = 8190,
|
| 129 |
+
max_headers: int = 32768,
|
| 130 |
+
max_field_size: int = 8190,
|
| 131 |
+
lax: bool = False,
|
| 132 |
+
) -> None:
|
| 133 |
+
self.max_line_size = max_line_size
|
| 134 |
+
self.max_headers = max_headers
|
| 135 |
+
self.max_field_size = max_field_size
|
| 136 |
+
self._lax = lax
|
| 137 |
+
|
| 138 |
+
def parse_headers(
|
| 139 |
+
self, lines: List[bytes]
|
| 140 |
+
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
| 141 |
+
headers: CIMultiDict[str] = CIMultiDict()
|
| 142 |
+
# note: "raw" does not mean inclusion of OWS before/after the field value
|
| 143 |
+
raw_headers = []
|
| 144 |
+
|
| 145 |
+
lines_idx = 1
|
| 146 |
+
line = lines[1]
|
| 147 |
+
line_count = len(lines)
|
| 148 |
+
|
| 149 |
+
while line:
|
| 150 |
+
# Parse initial header name : value pair.
|
| 151 |
+
try:
|
| 152 |
+
bname, bvalue = line.split(b":", 1)
|
| 153 |
+
except ValueError:
|
| 154 |
+
raise InvalidHeader(line) from None
|
| 155 |
+
|
| 156 |
+
if len(bname) == 0:
|
| 157 |
+
raise InvalidHeader(bname)
|
| 158 |
+
|
| 159 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
|
| 160 |
+
if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
|
| 161 |
+
raise InvalidHeader(line)
|
| 162 |
+
|
| 163 |
+
bvalue = bvalue.lstrip(b" \t")
|
| 164 |
+
if len(bname) > self.max_field_size:
|
| 165 |
+
raise LineTooLong(
|
| 166 |
+
"request header name {}".format(
|
| 167 |
+
bname.decode("utf8", "backslashreplace")
|
| 168 |
+
),
|
| 169 |
+
str(self.max_field_size),
|
| 170 |
+
str(len(bname)),
|
| 171 |
+
)
|
| 172 |
+
name = bname.decode("utf-8", "surrogateescape")
|
| 173 |
+
if not TOKENRE.fullmatch(name):
|
| 174 |
+
raise InvalidHeader(bname)
|
| 175 |
+
|
| 176 |
+
header_length = len(bvalue)
|
| 177 |
+
|
| 178 |
+
# next line
|
| 179 |
+
lines_idx += 1
|
| 180 |
+
line = lines[lines_idx]
|
| 181 |
+
|
| 182 |
+
# consume continuation lines
|
| 183 |
+
continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
|
| 184 |
+
|
| 185 |
+
# Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
|
| 186 |
+
if continuation:
|
| 187 |
+
bvalue_lst = [bvalue]
|
| 188 |
+
while continuation:
|
| 189 |
+
header_length += len(line)
|
| 190 |
+
if header_length > self.max_field_size:
|
| 191 |
+
raise LineTooLong(
|
| 192 |
+
"request header field {}".format(
|
| 193 |
+
bname.decode("utf8", "backslashreplace")
|
| 194 |
+
),
|
| 195 |
+
str(self.max_field_size),
|
| 196 |
+
str(header_length),
|
| 197 |
+
)
|
| 198 |
+
bvalue_lst.append(line)
|
| 199 |
+
|
| 200 |
+
# next line
|
| 201 |
+
lines_idx += 1
|
| 202 |
+
if lines_idx < line_count:
|
| 203 |
+
line = lines[lines_idx]
|
| 204 |
+
if line:
|
| 205 |
+
continuation = line[0] in (32, 9) # (' ', '\t')
|
| 206 |
+
else:
|
| 207 |
+
line = b""
|
| 208 |
+
break
|
| 209 |
+
bvalue = b"".join(bvalue_lst)
|
| 210 |
+
else:
|
| 211 |
+
if header_length > self.max_field_size:
|
| 212 |
+
raise LineTooLong(
|
| 213 |
+
"request header field {}".format(
|
| 214 |
+
bname.decode("utf8", "backslashreplace")
|
| 215 |
+
),
|
| 216 |
+
str(self.max_field_size),
|
| 217 |
+
str(header_length),
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
bvalue = bvalue.strip(b" \t")
|
| 221 |
+
value = bvalue.decode("utf-8", "surrogateescape")
|
| 222 |
+
|
| 223 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
|
| 224 |
+
if "\n" in value or "\r" in value or "\x00" in value:
|
| 225 |
+
raise InvalidHeader(bvalue)
|
| 226 |
+
|
| 227 |
+
headers.add(name, value)
|
| 228 |
+
raw_headers.append((bname, bvalue))
|
| 229 |
+
|
| 230 |
+
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
|
| 234 |
+
"""Check if the upgrade header is supported."""
|
| 235 |
+
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
class HttpParser(abc.ABC, Generic[_MsgT]):
|
| 239 |
+
lax: ClassVar[bool] = False
|
| 240 |
+
|
| 241 |
+
def __init__(
|
| 242 |
+
self,
|
| 243 |
+
protocol: Optional[BaseProtocol] = None,
|
| 244 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 245 |
+
limit: int = 2**16,
|
| 246 |
+
max_line_size: int = 8190,
|
| 247 |
+
max_headers: int = 32768,
|
| 248 |
+
max_field_size: int = 8190,
|
| 249 |
+
timer: Optional[BaseTimerContext] = None,
|
| 250 |
+
code: Optional[int] = None,
|
| 251 |
+
method: Optional[str] = None,
|
| 252 |
+
payload_exception: Optional[Type[BaseException]] = None,
|
| 253 |
+
response_with_body: bool = True,
|
| 254 |
+
read_until_eof: bool = False,
|
| 255 |
+
auto_decompress: bool = True,
|
| 256 |
+
) -> None:
|
| 257 |
+
self.protocol = protocol
|
| 258 |
+
self.loop = loop
|
| 259 |
+
self.max_line_size = max_line_size
|
| 260 |
+
self.max_headers = max_headers
|
| 261 |
+
self.max_field_size = max_field_size
|
| 262 |
+
self.timer = timer
|
| 263 |
+
self.code = code
|
| 264 |
+
self.method = method
|
| 265 |
+
self.payload_exception = payload_exception
|
| 266 |
+
self.response_with_body = response_with_body
|
| 267 |
+
self.read_until_eof = read_until_eof
|
| 268 |
+
|
| 269 |
+
self._lines: List[bytes] = []
|
| 270 |
+
self._tail = b""
|
| 271 |
+
self._upgraded = False
|
| 272 |
+
self._payload = None
|
| 273 |
+
self._payload_parser: Optional[HttpPayloadParser] = None
|
| 274 |
+
self._auto_decompress = auto_decompress
|
| 275 |
+
self._limit = limit
|
| 276 |
+
self._headers_parser = HeadersParser(
|
| 277 |
+
max_line_size, max_headers, max_field_size, self.lax
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
@abc.abstractmethod
|
| 281 |
+
def parse_message(self, lines: List[bytes]) -> _MsgT: ...
|
| 282 |
+
|
| 283 |
+
@abc.abstractmethod
|
| 284 |
+
def _is_chunked_te(self, te: str) -> bool: ...
|
| 285 |
+
|
| 286 |
+
def feed_eof(self) -> Optional[_MsgT]:
|
| 287 |
+
if self._payload_parser is not None:
|
| 288 |
+
self._payload_parser.feed_eof()
|
| 289 |
+
self._payload_parser = None
|
| 290 |
+
else:
|
| 291 |
+
# try to extract partial message
|
| 292 |
+
if self._tail:
|
| 293 |
+
self._lines.append(self._tail)
|
| 294 |
+
|
| 295 |
+
if self._lines:
|
| 296 |
+
if self._lines[-1] != "\r\n":
|
| 297 |
+
self._lines.append(b"")
|
| 298 |
+
with suppress(Exception):
|
| 299 |
+
return self.parse_message(self._lines)
|
| 300 |
+
return None
|
| 301 |
+
|
| 302 |
+
def feed_data(
|
| 303 |
+
self,
|
| 304 |
+
data: bytes,
|
| 305 |
+
SEP: _SEP = b"\r\n",
|
| 306 |
+
EMPTY: bytes = b"",
|
| 307 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
| 308 |
+
METH_CONNECT: str = hdrs.METH_CONNECT,
|
| 309 |
+
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
| 310 |
+
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
| 311 |
+
|
| 312 |
+
messages = []
|
| 313 |
+
|
| 314 |
+
if self._tail:
|
| 315 |
+
data, self._tail = self._tail + data, b""
|
| 316 |
+
|
| 317 |
+
data_len = len(data)
|
| 318 |
+
start_pos = 0
|
| 319 |
+
loop = self.loop
|
| 320 |
+
|
| 321 |
+
should_close = False
|
| 322 |
+
while start_pos < data_len:
|
| 323 |
+
|
| 324 |
+
# read HTTP message (request/response line + headers), \r\n\r\n
|
| 325 |
+
# and split by lines
|
| 326 |
+
if self._payload_parser is None and not self._upgraded:
|
| 327 |
+
pos = data.find(SEP, start_pos)
|
| 328 |
+
# consume \r\n
|
| 329 |
+
if pos == start_pos and not self._lines:
|
| 330 |
+
start_pos = pos + len(SEP)
|
| 331 |
+
continue
|
| 332 |
+
|
| 333 |
+
if pos >= start_pos:
|
| 334 |
+
if should_close:
|
| 335 |
+
raise BadHttpMessage("Data after `Connection: close`")
|
| 336 |
+
|
| 337 |
+
# line found
|
| 338 |
+
line = data[start_pos:pos]
|
| 339 |
+
if SEP == b"\n": # For lax response parsing
|
| 340 |
+
line = line.rstrip(b"\r")
|
| 341 |
+
self._lines.append(line)
|
| 342 |
+
start_pos = pos + len(SEP)
|
| 343 |
+
|
| 344 |
+
# \r\n\r\n found
|
| 345 |
+
if self._lines[-1] == EMPTY:
|
| 346 |
+
try:
|
| 347 |
+
msg: _MsgT = self.parse_message(self._lines)
|
| 348 |
+
finally:
|
| 349 |
+
self._lines.clear()
|
| 350 |
+
|
| 351 |
+
def get_content_length() -> Optional[int]:
|
| 352 |
+
# payload length
|
| 353 |
+
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
| 354 |
+
if length_hdr is None:
|
| 355 |
+
return None
|
| 356 |
+
|
| 357 |
+
# Shouldn't allow +/- or other number formats.
|
| 358 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
|
| 359 |
+
# msg.headers is already stripped of leading/trailing wsp
|
| 360 |
+
if not DIGITS.fullmatch(length_hdr):
|
| 361 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
| 362 |
+
|
| 363 |
+
return int(length_hdr)
|
| 364 |
+
|
| 365 |
+
length = get_content_length()
|
| 366 |
+
# do not support old websocket spec
|
| 367 |
+
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
| 368 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
| 369 |
+
|
| 370 |
+
self._upgraded = msg.upgrade and _is_supported_upgrade(
|
| 371 |
+
msg.headers
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
method = getattr(msg, "method", self.method)
|
| 375 |
+
# code is only present on responses
|
| 376 |
+
code = getattr(msg, "code", 0)
|
| 377 |
+
|
| 378 |
+
assert self.protocol is not None
|
| 379 |
+
# calculate payload
|
| 380 |
+
empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
|
| 381 |
+
method and method in EMPTY_BODY_METHODS
|
| 382 |
+
)
|
| 383 |
+
if not empty_body and (
|
| 384 |
+
((length is not None and length > 0) or msg.chunked)
|
| 385 |
+
and not self._upgraded
|
| 386 |
+
):
|
| 387 |
+
payload = StreamReader(
|
| 388 |
+
self.protocol,
|
| 389 |
+
timer=self.timer,
|
| 390 |
+
loop=loop,
|
| 391 |
+
limit=self._limit,
|
| 392 |
+
)
|
| 393 |
+
payload_parser = HttpPayloadParser(
|
| 394 |
+
payload,
|
| 395 |
+
length=length,
|
| 396 |
+
chunked=msg.chunked,
|
| 397 |
+
method=method,
|
| 398 |
+
compression=msg.compression,
|
| 399 |
+
code=self.code,
|
| 400 |
+
response_with_body=self.response_with_body,
|
| 401 |
+
auto_decompress=self._auto_decompress,
|
| 402 |
+
lax=self.lax,
|
| 403 |
+
)
|
| 404 |
+
if not payload_parser.done:
|
| 405 |
+
self._payload_parser = payload_parser
|
| 406 |
+
elif method == METH_CONNECT:
|
| 407 |
+
assert isinstance(msg, RawRequestMessage)
|
| 408 |
+
payload = StreamReader(
|
| 409 |
+
self.protocol,
|
| 410 |
+
timer=self.timer,
|
| 411 |
+
loop=loop,
|
| 412 |
+
limit=self._limit,
|
| 413 |
+
)
|
| 414 |
+
self._upgraded = True
|
| 415 |
+
self._payload_parser = HttpPayloadParser(
|
| 416 |
+
payload,
|
| 417 |
+
method=msg.method,
|
| 418 |
+
compression=msg.compression,
|
| 419 |
+
auto_decompress=self._auto_decompress,
|
| 420 |
+
lax=self.lax,
|
| 421 |
+
)
|
| 422 |
+
elif not empty_body and length is None and self.read_until_eof:
|
| 423 |
+
payload = StreamReader(
|
| 424 |
+
self.protocol,
|
| 425 |
+
timer=self.timer,
|
| 426 |
+
loop=loop,
|
| 427 |
+
limit=self._limit,
|
| 428 |
+
)
|
| 429 |
+
payload_parser = HttpPayloadParser(
|
| 430 |
+
payload,
|
| 431 |
+
length=length,
|
| 432 |
+
chunked=msg.chunked,
|
| 433 |
+
method=method,
|
| 434 |
+
compression=msg.compression,
|
| 435 |
+
code=self.code,
|
| 436 |
+
response_with_body=self.response_with_body,
|
| 437 |
+
auto_decompress=self._auto_decompress,
|
| 438 |
+
lax=self.lax,
|
| 439 |
+
)
|
| 440 |
+
if not payload_parser.done:
|
| 441 |
+
self._payload_parser = payload_parser
|
| 442 |
+
else:
|
| 443 |
+
payload = EMPTY_PAYLOAD
|
| 444 |
+
|
| 445 |
+
messages.append((msg, payload))
|
| 446 |
+
should_close = msg.should_close
|
| 447 |
+
else:
|
| 448 |
+
self._tail = data[start_pos:]
|
| 449 |
+
data = EMPTY
|
| 450 |
+
break
|
| 451 |
+
|
| 452 |
+
# no parser, just store
|
| 453 |
+
elif self._payload_parser is None and self._upgraded:
|
| 454 |
+
assert not self._lines
|
| 455 |
+
break
|
| 456 |
+
|
| 457 |
+
# feed payload
|
| 458 |
+
elif data and start_pos < data_len:
|
| 459 |
+
assert not self._lines
|
| 460 |
+
assert self._payload_parser is not None
|
| 461 |
+
try:
|
| 462 |
+
eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
|
| 463 |
+
except BaseException as underlying_exc:
|
| 464 |
+
reraised_exc = underlying_exc
|
| 465 |
+
if self.payload_exception is not None:
|
| 466 |
+
reraised_exc = self.payload_exception(str(underlying_exc))
|
| 467 |
+
|
| 468 |
+
set_exception(
|
| 469 |
+
self._payload_parser.payload,
|
| 470 |
+
reraised_exc,
|
| 471 |
+
underlying_exc,
|
| 472 |
+
)
|
| 473 |
+
|
| 474 |
+
eof = True
|
| 475 |
+
data = b""
|
| 476 |
+
|
| 477 |
+
if eof:
|
| 478 |
+
start_pos = 0
|
| 479 |
+
data_len = len(data)
|
| 480 |
+
self._payload_parser = None
|
| 481 |
+
continue
|
| 482 |
+
else:
|
| 483 |
+
break
|
| 484 |
+
|
| 485 |
+
if data and start_pos < data_len:
|
| 486 |
+
data = data[start_pos:]
|
| 487 |
+
else:
|
| 488 |
+
data = EMPTY
|
| 489 |
+
|
| 490 |
+
return messages, self._upgraded, data
|
| 491 |
+
|
| 492 |
+
def parse_headers(
|
| 493 |
+
self, lines: List[bytes]
|
| 494 |
+
) -> Tuple[
|
| 495 |
+
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
| 496 |
+
]:
|
| 497 |
+
"""Parses RFC 5322 headers from a stream.
|
| 498 |
+
|
| 499 |
+
Line continuations are supported. Returns list of header name
|
| 500 |
+
and value pairs. Header name is in upper case.
|
| 501 |
+
"""
|
| 502 |
+
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
| 503 |
+
close_conn = None
|
| 504 |
+
encoding = None
|
| 505 |
+
upgrade = False
|
| 506 |
+
chunked = False
|
| 507 |
+
|
| 508 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
|
| 509 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
|
| 510 |
+
singletons = (
|
| 511 |
+
hdrs.CONTENT_LENGTH,
|
| 512 |
+
hdrs.CONTENT_LOCATION,
|
| 513 |
+
hdrs.CONTENT_RANGE,
|
| 514 |
+
hdrs.CONTENT_TYPE,
|
| 515 |
+
hdrs.ETAG,
|
| 516 |
+
hdrs.HOST,
|
| 517 |
+
hdrs.MAX_FORWARDS,
|
| 518 |
+
hdrs.SERVER,
|
| 519 |
+
hdrs.TRANSFER_ENCODING,
|
| 520 |
+
hdrs.USER_AGENT,
|
| 521 |
+
)
|
| 522 |
+
bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
|
| 523 |
+
if bad_hdr is not None:
|
| 524 |
+
raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
|
| 525 |
+
|
| 526 |
+
# keep-alive
|
| 527 |
+
conn = headers.get(hdrs.CONNECTION)
|
| 528 |
+
if conn:
|
| 529 |
+
v = conn.lower()
|
| 530 |
+
if v == "close":
|
| 531 |
+
close_conn = True
|
| 532 |
+
elif v == "keep-alive":
|
| 533 |
+
close_conn = False
|
| 534 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
|
| 535 |
+
elif v == "upgrade" and headers.get(hdrs.UPGRADE):
|
| 536 |
+
upgrade = True
|
| 537 |
+
|
| 538 |
+
# encoding
|
| 539 |
+
enc = headers.get(hdrs.CONTENT_ENCODING)
|
| 540 |
+
if enc:
|
| 541 |
+
enc = enc.lower()
|
| 542 |
+
if enc in ("gzip", "deflate", "br"):
|
| 543 |
+
encoding = enc
|
| 544 |
+
|
| 545 |
+
# chunking
|
| 546 |
+
te = headers.get(hdrs.TRANSFER_ENCODING)
|
| 547 |
+
if te is not None:
|
| 548 |
+
if self._is_chunked_te(te):
|
| 549 |
+
chunked = True
|
| 550 |
+
|
| 551 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 552 |
+
raise BadHttpMessage(
|
| 553 |
+
"Transfer-Encoding can't be present with Content-Length",
|
| 554 |
+
)
|
| 555 |
+
|
| 556 |
+
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
| 557 |
+
|
| 558 |
+
def set_upgraded(self, val: bool) -> None:
|
| 559 |
+
"""Set connection upgraded (to websocket) mode.
|
| 560 |
+
|
| 561 |
+
:param bool val: new state.
|
| 562 |
+
"""
|
| 563 |
+
self._upgraded = val
|
| 564 |
+
|
| 565 |
+
|
| 566 |
+
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
| 567 |
+
"""Read request status line.
|
| 568 |
+
|
| 569 |
+
Exception .http_exceptions.BadStatusLine
|
| 570 |
+
could be raised in case of any errors in status line.
|
| 571 |
+
Returns RawRequestMessage.
|
| 572 |
+
"""
|
| 573 |
+
|
| 574 |
+
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
| 575 |
+
# request line
|
| 576 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 577 |
+
try:
|
| 578 |
+
method, path, version = line.split(" ", maxsplit=2)
|
| 579 |
+
except ValueError:
|
| 580 |
+
raise BadHttpMethod(line) from None
|
| 581 |
+
|
| 582 |
+
if len(path) > self.max_line_size:
|
| 583 |
+
raise LineTooLong(
|
| 584 |
+
"Status line is too long", str(self.max_line_size), str(len(path))
|
| 585 |
+
)
|
| 586 |
+
|
| 587 |
+
# method
|
| 588 |
+
if not TOKENRE.fullmatch(method):
|
| 589 |
+
raise BadHttpMethod(method)
|
| 590 |
+
|
| 591 |
+
# version
|
| 592 |
+
match = VERSRE.fullmatch(version)
|
| 593 |
+
if match is None:
|
| 594 |
+
raise BadStatusLine(line)
|
| 595 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 596 |
+
|
| 597 |
+
if method == "CONNECT":
|
| 598 |
+
# authority-form,
|
| 599 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
| 600 |
+
url = URL.build(authority=path, encoded=True)
|
| 601 |
+
elif path.startswith("/"):
|
| 602 |
+
# origin-form,
|
| 603 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
| 604 |
+
path_part, _hash_separator, url_fragment = path.partition("#")
|
| 605 |
+
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
| 606 |
+
|
| 607 |
+
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
| 608 |
+
# NOTE: parser does, otherwise it results into the same
|
| 609 |
+
# NOTE: HTTP Request-Line input producing different
|
| 610 |
+
# NOTE: `yarl.URL()` objects
|
| 611 |
+
url = URL.build(
|
| 612 |
+
path=path_part,
|
| 613 |
+
query_string=qs_part,
|
| 614 |
+
fragment=url_fragment,
|
| 615 |
+
encoded=True,
|
| 616 |
+
)
|
| 617 |
+
elif path == "*" and method == "OPTIONS":
|
| 618 |
+
# asterisk-form,
|
| 619 |
+
url = URL(path, encoded=True)
|
| 620 |
+
else:
|
| 621 |
+
# absolute-form for proxy maybe,
|
| 622 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
| 623 |
+
url = URL(path, encoded=True)
|
| 624 |
+
if url.scheme == "":
|
| 625 |
+
# not absolute-form
|
| 626 |
+
raise InvalidURLError(
|
| 627 |
+
path.encode(errors="surrogateescape").decode("latin1")
|
| 628 |
+
)
|
| 629 |
+
|
| 630 |
+
# read headers
|
| 631 |
+
(
|
| 632 |
+
headers,
|
| 633 |
+
raw_headers,
|
| 634 |
+
close,
|
| 635 |
+
compression,
|
| 636 |
+
upgrade,
|
| 637 |
+
chunked,
|
| 638 |
+
) = self.parse_headers(lines)
|
| 639 |
+
|
| 640 |
+
if close is None: # then the headers weren't set in the request
|
| 641 |
+
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
| 642 |
+
close = True
|
| 643 |
+
else: # HTTP 1.1 must ask to close.
|
| 644 |
+
close = False
|
| 645 |
+
|
| 646 |
+
return RawRequestMessage(
|
| 647 |
+
method,
|
| 648 |
+
path,
|
| 649 |
+
version_o,
|
| 650 |
+
headers,
|
| 651 |
+
raw_headers,
|
| 652 |
+
close,
|
| 653 |
+
compression,
|
| 654 |
+
upgrade,
|
| 655 |
+
chunked,
|
| 656 |
+
url,
|
| 657 |
+
)
|
| 658 |
+
|
| 659 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 660 |
+
if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
|
| 661 |
+
return True
|
| 662 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
|
| 663 |
+
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
| 664 |
+
|
| 665 |
+
|
| 666 |
+
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
| 667 |
+
"""Read response status line and headers.
|
| 668 |
+
|
| 669 |
+
BadStatusLine could be raised in case of any errors in status line.
|
| 670 |
+
Returns RawResponseMessage.
|
| 671 |
+
"""
|
| 672 |
+
|
| 673 |
+
# Lax mode should only be enabled on response parser.
|
| 674 |
+
lax = not DEBUG
|
| 675 |
+
|
| 676 |
+
def feed_data(
|
| 677 |
+
self,
|
| 678 |
+
data: bytes,
|
| 679 |
+
SEP: Optional[_SEP] = None,
|
| 680 |
+
*args: Any,
|
| 681 |
+
**kwargs: Any,
|
| 682 |
+
) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
|
| 683 |
+
if SEP is None:
|
| 684 |
+
SEP = b"\r\n" if DEBUG else b"\n"
|
| 685 |
+
return super().feed_data(data, SEP, *args, **kwargs)
|
| 686 |
+
|
| 687 |
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
| 688 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 689 |
+
try:
|
| 690 |
+
version, status = line.split(maxsplit=1)
|
| 691 |
+
except ValueError:
|
| 692 |
+
raise BadStatusLine(line) from None
|
| 693 |
+
|
| 694 |
+
try:
|
| 695 |
+
status, reason = status.split(maxsplit=1)
|
| 696 |
+
except ValueError:
|
| 697 |
+
status = status.strip()
|
| 698 |
+
reason = ""
|
| 699 |
+
|
| 700 |
+
if len(reason) > self.max_line_size:
|
| 701 |
+
raise LineTooLong(
|
| 702 |
+
"Status line is too long", str(self.max_line_size), str(len(reason))
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
# version
|
| 706 |
+
match = VERSRE.fullmatch(version)
|
| 707 |
+
if match is None:
|
| 708 |
+
raise BadStatusLine(line)
|
| 709 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 710 |
+
|
| 711 |
+
# The status code is a three-digit ASCII number, no padding
|
| 712 |
+
if len(status) != 3 or not DIGITS.fullmatch(status):
|
| 713 |
+
raise BadStatusLine(line)
|
| 714 |
+
status_i = int(status)
|
| 715 |
+
|
| 716 |
+
# read headers
|
| 717 |
+
(
|
| 718 |
+
headers,
|
| 719 |
+
raw_headers,
|
| 720 |
+
close,
|
| 721 |
+
compression,
|
| 722 |
+
upgrade,
|
| 723 |
+
chunked,
|
| 724 |
+
) = self.parse_headers(lines)
|
| 725 |
+
|
| 726 |
+
if close is None:
|
| 727 |
+
if version_o <= HttpVersion10:
|
| 728 |
+
close = True
|
| 729 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
|
| 730 |
+
elif 100 <= status_i < 200 or status_i in {204, 304}:
|
| 731 |
+
close = False
|
| 732 |
+
elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
|
| 733 |
+
close = False
|
| 734 |
+
else:
|
| 735 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
|
| 736 |
+
close = True
|
| 737 |
+
|
| 738 |
+
return RawResponseMessage(
|
| 739 |
+
version_o,
|
| 740 |
+
status_i,
|
| 741 |
+
reason.strip(),
|
| 742 |
+
headers,
|
| 743 |
+
raw_headers,
|
| 744 |
+
close,
|
| 745 |
+
compression,
|
| 746 |
+
upgrade,
|
| 747 |
+
chunked,
|
| 748 |
+
)
|
| 749 |
+
|
| 750 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 751 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
|
| 752 |
+
return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
|
| 753 |
+
|
| 754 |
+
|
| 755 |
+
class HttpPayloadParser:
|
| 756 |
+
def __init__(
|
| 757 |
+
self,
|
| 758 |
+
payload: StreamReader,
|
| 759 |
+
length: Optional[int] = None,
|
| 760 |
+
chunked: bool = False,
|
| 761 |
+
compression: Optional[str] = None,
|
| 762 |
+
code: Optional[int] = None,
|
| 763 |
+
method: Optional[str] = None,
|
| 764 |
+
response_with_body: bool = True,
|
| 765 |
+
auto_decompress: bool = True,
|
| 766 |
+
lax: bool = False,
|
| 767 |
+
) -> None:
|
| 768 |
+
self._length = 0
|
| 769 |
+
self._type = ParseState.PARSE_UNTIL_EOF
|
| 770 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 771 |
+
self._chunk_size = 0
|
| 772 |
+
self._chunk_tail = b""
|
| 773 |
+
self._auto_decompress = auto_decompress
|
| 774 |
+
self._lax = lax
|
| 775 |
+
self.done = False
|
| 776 |
+
|
| 777 |
+
# payload decompression wrapper
|
| 778 |
+
if response_with_body and compression and self._auto_decompress:
|
| 779 |
+
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
| 780 |
+
payload, compression
|
| 781 |
+
)
|
| 782 |
+
else:
|
| 783 |
+
real_payload = payload
|
| 784 |
+
|
| 785 |
+
# payload parser
|
| 786 |
+
if not response_with_body:
|
| 787 |
+
# don't parse payload if it's not expected to be received
|
| 788 |
+
self._type = ParseState.PARSE_NONE
|
| 789 |
+
real_payload.feed_eof()
|
| 790 |
+
self.done = True
|
| 791 |
+
elif chunked:
|
| 792 |
+
self._type = ParseState.PARSE_CHUNKED
|
| 793 |
+
elif length is not None:
|
| 794 |
+
self._type = ParseState.PARSE_LENGTH
|
| 795 |
+
self._length = length
|
| 796 |
+
if self._length == 0:
|
| 797 |
+
real_payload.feed_eof()
|
| 798 |
+
self.done = True
|
| 799 |
+
|
| 800 |
+
self.payload = real_payload
|
| 801 |
+
|
| 802 |
+
def feed_eof(self) -> None:
|
| 803 |
+
if self._type == ParseState.PARSE_UNTIL_EOF:
|
| 804 |
+
self.payload.feed_eof()
|
| 805 |
+
elif self._type == ParseState.PARSE_LENGTH:
|
| 806 |
+
raise ContentLengthError(
|
| 807 |
+
"Not enough data for satisfy content length header."
|
| 808 |
+
)
|
| 809 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 810 |
+
raise TransferEncodingError(
|
| 811 |
+
"Not enough data for satisfy transfer length header."
|
| 812 |
+
)
|
| 813 |
+
|
| 814 |
+
def feed_data(
|
| 815 |
+
self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
|
| 816 |
+
) -> Tuple[bool, bytes]:
|
| 817 |
+
# Read specified amount of bytes
|
| 818 |
+
if self._type == ParseState.PARSE_LENGTH:
|
| 819 |
+
required = self._length
|
| 820 |
+
chunk_len = len(chunk)
|
| 821 |
+
|
| 822 |
+
if required >= chunk_len:
|
| 823 |
+
self._length = required - chunk_len
|
| 824 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 825 |
+
if self._length == 0:
|
| 826 |
+
self.payload.feed_eof()
|
| 827 |
+
return True, b""
|
| 828 |
+
else:
|
| 829 |
+
self._length = 0
|
| 830 |
+
self.payload.feed_data(chunk[:required], required)
|
| 831 |
+
self.payload.feed_eof()
|
| 832 |
+
return True, chunk[required:]
|
| 833 |
+
|
| 834 |
+
# Chunked transfer encoding parser
|
| 835 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 836 |
+
if self._chunk_tail:
|
| 837 |
+
chunk = self._chunk_tail + chunk
|
| 838 |
+
self._chunk_tail = b""
|
| 839 |
+
|
| 840 |
+
while chunk:
|
| 841 |
+
|
| 842 |
+
# read next chunk size
|
| 843 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
| 844 |
+
pos = chunk.find(SEP)
|
| 845 |
+
if pos >= 0:
|
| 846 |
+
i = chunk.find(CHUNK_EXT, 0, pos)
|
| 847 |
+
if i >= 0:
|
| 848 |
+
size_b = chunk[:i] # strip chunk-extensions
|
| 849 |
+
# Verify no LF in the chunk-extension
|
| 850 |
+
if b"\n" in (ext := chunk[i:pos]):
|
| 851 |
+
exc = BadHttpMessage(
|
| 852 |
+
f"Unexpected LF in chunk-extension: {ext!r}"
|
| 853 |
+
)
|
| 854 |
+
set_exception(self.payload, exc)
|
| 855 |
+
raise exc
|
| 856 |
+
else:
|
| 857 |
+
size_b = chunk[:pos]
|
| 858 |
+
|
| 859 |
+
if self._lax: # Allow whitespace in lax mode.
|
| 860 |
+
size_b = size_b.strip()
|
| 861 |
+
|
| 862 |
+
if not re.fullmatch(HEXDIGITS, size_b):
|
| 863 |
+
exc = TransferEncodingError(
|
| 864 |
+
chunk[:pos].decode("ascii", "surrogateescape")
|
| 865 |
+
)
|
| 866 |
+
set_exception(self.payload, exc)
|
| 867 |
+
raise exc
|
| 868 |
+
size = int(bytes(size_b), 16)
|
| 869 |
+
|
| 870 |
+
chunk = chunk[pos + len(SEP) :]
|
| 871 |
+
if size == 0: # eof marker
|
| 872 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 873 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 874 |
+
chunk = chunk[1:]
|
| 875 |
+
else:
|
| 876 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
| 877 |
+
self._chunk_size = size
|
| 878 |
+
self.payload.begin_http_chunk_receiving()
|
| 879 |
+
else:
|
| 880 |
+
self._chunk_tail = chunk
|
| 881 |
+
return False, b""
|
| 882 |
+
|
| 883 |
+
# read chunk and feed buffer
|
| 884 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
| 885 |
+
required = self._chunk_size
|
| 886 |
+
chunk_len = len(chunk)
|
| 887 |
+
|
| 888 |
+
if required > chunk_len:
|
| 889 |
+
self._chunk_size = required - chunk_len
|
| 890 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 891 |
+
return False, b""
|
| 892 |
+
else:
|
| 893 |
+
self._chunk_size = 0
|
| 894 |
+
self.payload.feed_data(chunk[:required], required)
|
| 895 |
+
chunk = chunk[required:]
|
| 896 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
| 897 |
+
self.payload.end_http_chunk_receiving()
|
| 898 |
+
|
| 899 |
+
# toss the CRLF at the end of the chunk
|
| 900 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
| 901 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 902 |
+
chunk = chunk[1:]
|
| 903 |
+
if chunk[: len(SEP)] == SEP:
|
| 904 |
+
chunk = chunk[len(SEP) :]
|
| 905 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 906 |
+
else:
|
| 907 |
+
self._chunk_tail = chunk
|
| 908 |
+
return False, b""
|
| 909 |
+
|
| 910 |
+
# if stream does not contain trailer, after 0\r\n
|
| 911 |
+
# we should get another \r\n otherwise
|
| 912 |
+
# trailers needs to be skipped until \r\n\r\n
|
| 913 |
+
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
| 914 |
+
head = chunk[: len(SEP)]
|
| 915 |
+
if head == SEP:
|
| 916 |
+
# end of stream
|
| 917 |
+
self.payload.feed_eof()
|
| 918 |
+
return True, chunk[len(SEP) :]
|
| 919 |
+
# Both CR and LF, or only LF may not be received yet. It is
|
| 920 |
+
# expected that CRLF or LF will be shown at the very first
|
| 921 |
+
# byte next time, otherwise trailers should come. The last
|
| 922 |
+
# CRLF which marks the end of response might not be
|
| 923 |
+
# contained in the same TCP segment which delivered the
|
| 924 |
+
# size indicator.
|
| 925 |
+
if not head:
|
| 926 |
+
return False, b""
|
| 927 |
+
if head == SEP[:1]:
|
| 928 |
+
self._chunk_tail = head
|
| 929 |
+
return False, b""
|
| 930 |
+
self._chunk = ChunkState.PARSE_TRAILERS
|
| 931 |
+
|
| 932 |
+
# read and discard trailer up to the CRLF terminator
|
| 933 |
+
if self._chunk == ChunkState.PARSE_TRAILERS:
|
| 934 |
+
pos = chunk.find(SEP)
|
| 935 |
+
if pos >= 0:
|
| 936 |
+
chunk = chunk[pos + len(SEP) :]
|
| 937 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 938 |
+
else:
|
| 939 |
+
self._chunk_tail = chunk
|
| 940 |
+
return False, b""
|
| 941 |
+
|
| 942 |
+
# Read all bytes until eof
|
| 943 |
+
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
| 944 |
+
self.payload.feed_data(chunk, len(chunk))
|
| 945 |
+
|
| 946 |
+
return False, b""
|
| 947 |
+
|
| 948 |
+
|
| 949 |
+
class DeflateBuffer:
|
| 950 |
+
"""DeflateStream decompress stream and feed data into specified stream."""
|
| 951 |
+
|
| 952 |
+
decompressor: Any
|
| 953 |
+
|
| 954 |
+
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
| 955 |
+
self.out = out
|
| 956 |
+
self.size = 0
|
| 957 |
+
self.encoding = encoding
|
| 958 |
+
self._started_decoding = False
|
| 959 |
+
|
| 960 |
+
self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
|
| 961 |
+
if encoding == "br":
|
| 962 |
+
if not HAS_BROTLI: # pragma: no cover
|
| 963 |
+
raise ContentEncodingError(
|
| 964 |
+
"Can not decode content-encoding: brotli (br). "
|
| 965 |
+
"Please install `Brotli`"
|
| 966 |
+
)
|
| 967 |
+
self.decompressor = BrotliDecompressor()
|
| 968 |
+
else:
|
| 969 |
+
self.decompressor = ZLibDecompressor(encoding=encoding)
|
| 970 |
+
|
| 971 |
+
def set_exception(
|
| 972 |
+
self,
|
| 973 |
+
exc: BaseException,
|
| 974 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 975 |
+
) -> None:
|
| 976 |
+
set_exception(self.out, exc, exc_cause)
|
| 977 |
+
|
| 978 |
+
def feed_data(self, chunk: bytes, size: int) -> None:
|
| 979 |
+
if not size:
|
| 980 |
+
return
|
| 981 |
+
|
| 982 |
+
self.size += size
|
| 983 |
+
|
| 984 |
+
# RFC1950
|
| 985 |
+
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
| 986 |
+
# bits 4..7 = CINFO = 1..7 = windows size.
|
| 987 |
+
if (
|
| 988 |
+
not self._started_decoding
|
| 989 |
+
and self.encoding == "deflate"
|
| 990 |
+
and chunk[0] & 0xF != 8
|
| 991 |
+
):
|
| 992 |
+
# Change the decoder to decompress incorrectly compressed data
|
| 993 |
+
# Actually we should issue a warning about non-RFC-compliant data.
|
| 994 |
+
self.decompressor = ZLibDecompressor(
|
| 995 |
+
encoding=self.encoding, suppress_deflate_header=True
|
| 996 |
+
)
|
| 997 |
+
|
| 998 |
+
try:
|
| 999 |
+
chunk = self.decompressor.decompress_sync(chunk)
|
| 1000 |
+
except Exception:
|
| 1001 |
+
raise ContentEncodingError(
|
| 1002 |
+
"Can not decode content-encoding: %s" % self.encoding
|
| 1003 |
+
)
|
| 1004 |
+
|
| 1005 |
+
self._started_decoding = True
|
| 1006 |
+
|
| 1007 |
+
if chunk:
|
| 1008 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1009 |
+
|
| 1010 |
+
def feed_eof(self) -> None:
|
| 1011 |
+
chunk = self.decompressor.flush()
|
| 1012 |
+
|
| 1013 |
+
if chunk or self.size > 0:
|
| 1014 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1015 |
+
if self.encoding == "deflate" and not self.decompressor.eof:
|
| 1016 |
+
raise ContentEncodingError("deflate")
|
| 1017 |
+
|
| 1018 |
+
self.out.feed_eof()
|
| 1019 |
+
|
| 1020 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 1021 |
+
self.out.begin_http_chunk_receiving()
|
| 1022 |
+
|
| 1023 |
+
def end_http_chunk_receiving(self) -> None:
|
| 1024 |
+
self.out.end_http_chunk_receiving()
|
| 1025 |
+
|
| 1026 |
+
|
| 1027 |
+
HttpRequestParserPy = HttpRequestParser
|
| 1028 |
+
HttpResponseParserPy = HttpResponseParser
|
| 1029 |
+
RawRequestMessagePy = RawRequestMessage
|
| 1030 |
+
RawResponseMessagePy = RawResponseMessage
|
| 1031 |
+
|
| 1032 |
+
try:
|
| 1033 |
+
if not NO_EXTENSIONS:
|
| 1034 |
+
from ._http_parser import ( # type: ignore[import-not-found,no-redef]
|
| 1035 |
+
HttpRequestParser,
|
| 1036 |
+
HttpResponseParser,
|
| 1037 |
+
RawRequestMessage,
|
| 1038 |
+
RawResponseMessage,
|
| 1039 |
+
)
|
| 1040 |
+
|
| 1041 |
+
HttpRequestParserC = HttpRequestParser
|
| 1042 |
+
HttpResponseParserC = HttpResponseParser
|
| 1043 |
+
RawRequestMessageC = RawRequestMessage
|
| 1044 |
+
RawResponseMessageC = RawResponseMessage
|
| 1045 |
+
except ImportError: # pragma: no cover
|
| 1046 |
+
pass
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/payload.py
ADDED
|
@@ -0,0 +1,519 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import enum
|
| 3 |
+
import io
|
| 4 |
+
import json
|
| 5 |
+
import mimetypes
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
import warnings
|
| 9 |
+
from abc import ABC, abstractmethod
|
| 10 |
+
from itertools import chain
|
| 11 |
+
from typing import (
|
| 12 |
+
IO,
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Dict,
|
| 16 |
+
Final,
|
| 17 |
+
Iterable,
|
| 18 |
+
Optional,
|
| 19 |
+
TextIO,
|
| 20 |
+
Tuple,
|
| 21 |
+
Type,
|
| 22 |
+
Union,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from multidict import CIMultiDict
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .abc import AbstractStreamWriter
|
| 29 |
+
from .helpers import (
|
| 30 |
+
_SENTINEL,
|
| 31 |
+
content_disposition_header,
|
| 32 |
+
guess_filename,
|
| 33 |
+
parse_mimetype,
|
| 34 |
+
sentinel,
|
| 35 |
+
)
|
| 36 |
+
from .streams import StreamReader
|
| 37 |
+
from .typedefs import JSONEncoder, _CIMultiDict
|
| 38 |
+
|
| 39 |
+
__all__ = (
|
| 40 |
+
"PAYLOAD_REGISTRY",
|
| 41 |
+
"get_payload",
|
| 42 |
+
"payload_type",
|
| 43 |
+
"Payload",
|
| 44 |
+
"BytesPayload",
|
| 45 |
+
"StringPayload",
|
| 46 |
+
"IOBasePayload",
|
| 47 |
+
"BytesIOPayload",
|
| 48 |
+
"BufferedReaderPayload",
|
| 49 |
+
"TextIOPayload",
|
| 50 |
+
"StringIOPayload",
|
| 51 |
+
"JsonPayload",
|
| 52 |
+
"AsyncIterablePayload",
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
| 56 |
+
|
| 57 |
+
if TYPE_CHECKING:
|
| 58 |
+
from typing import List
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class LookupError(Exception):
|
| 62 |
+
pass
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class Order(str, enum.Enum):
|
| 66 |
+
normal = "normal"
|
| 67 |
+
try_first = "try_first"
|
| 68 |
+
try_last = "try_last"
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
| 72 |
+
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def register_payload(
|
| 76 |
+
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
| 77 |
+
) -> None:
|
| 78 |
+
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class payload_type:
|
| 82 |
+
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
| 83 |
+
self.type = type
|
| 84 |
+
self.order = order
|
| 85 |
+
|
| 86 |
+
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
| 87 |
+
register_payload(factory, self.type, order=self.order)
|
| 88 |
+
return factory
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
PayloadType = Type["Payload"]
|
| 92 |
+
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class PayloadRegistry:
|
| 96 |
+
"""Payload registry.
|
| 97 |
+
|
| 98 |
+
note: we need zope.interface for more efficient adapter search
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
__slots__ = ("_first", "_normal", "_last", "_normal_lookup")
|
| 102 |
+
|
| 103 |
+
def __init__(self) -> None:
|
| 104 |
+
self._first: List[_PayloadRegistryItem] = []
|
| 105 |
+
self._normal: List[_PayloadRegistryItem] = []
|
| 106 |
+
self._last: List[_PayloadRegistryItem] = []
|
| 107 |
+
self._normal_lookup: Dict[Any, PayloadType] = {}
|
| 108 |
+
|
| 109 |
+
def get(
|
| 110 |
+
self,
|
| 111 |
+
data: Any,
|
| 112 |
+
*args: Any,
|
| 113 |
+
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
| 114 |
+
**kwargs: Any,
|
| 115 |
+
) -> "Payload":
|
| 116 |
+
if self._first:
|
| 117 |
+
for factory, type_ in self._first:
|
| 118 |
+
if isinstance(data, type_):
|
| 119 |
+
return factory(data, *args, **kwargs)
|
| 120 |
+
# Try the fast lookup first
|
| 121 |
+
if lookup_factory := self._normal_lookup.get(type(data)):
|
| 122 |
+
return lookup_factory(data, *args, **kwargs)
|
| 123 |
+
# Bail early if its already a Payload
|
| 124 |
+
if isinstance(data, Payload):
|
| 125 |
+
return data
|
| 126 |
+
# Fallback to the slower linear search
|
| 127 |
+
for factory, type_ in _CHAIN(self._normal, self._last):
|
| 128 |
+
if isinstance(data, type_):
|
| 129 |
+
return factory(data, *args, **kwargs)
|
| 130 |
+
raise LookupError()
|
| 131 |
+
|
| 132 |
+
def register(
|
| 133 |
+
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
| 134 |
+
) -> None:
|
| 135 |
+
if order is Order.try_first:
|
| 136 |
+
self._first.append((factory, type))
|
| 137 |
+
elif order is Order.normal:
|
| 138 |
+
self._normal.append((factory, type))
|
| 139 |
+
if isinstance(type, Iterable):
|
| 140 |
+
for t in type:
|
| 141 |
+
self._normal_lookup[t] = factory
|
| 142 |
+
else:
|
| 143 |
+
self._normal_lookup[type] = factory
|
| 144 |
+
elif order is Order.try_last:
|
| 145 |
+
self._last.append((factory, type))
|
| 146 |
+
else:
|
| 147 |
+
raise ValueError(f"Unsupported order {order!r}")
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class Payload(ABC):
|
| 151 |
+
|
| 152 |
+
_default_content_type: str = "application/octet-stream"
|
| 153 |
+
_size: Optional[int] = None
|
| 154 |
+
|
| 155 |
+
def __init__(
|
| 156 |
+
self,
|
| 157 |
+
value: Any,
|
| 158 |
+
headers: Optional[
|
| 159 |
+
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
| 160 |
+
] = None,
|
| 161 |
+
content_type: Union[str, None, _SENTINEL] = sentinel,
|
| 162 |
+
filename: Optional[str] = None,
|
| 163 |
+
encoding: Optional[str] = None,
|
| 164 |
+
**kwargs: Any,
|
| 165 |
+
) -> None:
|
| 166 |
+
self._encoding = encoding
|
| 167 |
+
self._filename = filename
|
| 168 |
+
self._headers: _CIMultiDict = CIMultiDict()
|
| 169 |
+
self._value = value
|
| 170 |
+
if content_type is not sentinel and content_type is not None:
|
| 171 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 172 |
+
elif self._filename is not None:
|
| 173 |
+
if sys.version_info >= (3, 13):
|
| 174 |
+
guesser = mimetypes.guess_file_type
|
| 175 |
+
else:
|
| 176 |
+
guesser = mimetypes.guess_type
|
| 177 |
+
content_type = guesser(self._filename)[0]
|
| 178 |
+
if content_type is None:
|
| 179 |
+
content_type = self._default_content_type
|
| 180 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 181 |
+
else:
|
| 182 |
+
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
| 183 |
+
if headers:
|
| 184 |
+
self._headers.update(headers)
|
| 185 |
+
|
| 186 |
+
@property
|
| 187 |
+
def size(self) -> Optional[int]:
|
| 188 |
+
"""Size of the payload."""
|
| 189 |
+
return self._size
|
| 190 |
+
|
| 191 |
+
@property
|
| 192 |
+
def filename(self) -> Optional[str]:
|
| 193 |
+
"""Filename of the payload."""
|
| 194 |
+
return self._filename
|
| 195 |
+
|
| 196 |
+
@property
|
| 197 |
+
def headers(self) -> _CIMultiDict:
|
| 198 |
+
"""Custom item headers"""
|
| 199 |
+
return self._headers
|
| 200 |
+
|
| 201 |
+
@property
|
| 202 |
+
def _binary_headers(self) -> bytes:
|
| 203 |
+
return (
|
| 204 |
+
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
| 205 |
+
"utf-8"
|
| 206 |
+
)
|
| 207 |
+
+ b"\r\n"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
@property
|
| 211 |
+
def encoding(self) -> Optional[str]:
|
| 212 |
+
"""Payload encoding"""
|
| 213 |
+
return self._encoding
|
| 214 |
+
|
| 215 |
+
@property
|
| 216 |
+
def content_type(self) -> str:
|
| 217 |
+
"""Content type"""
|
| 218 |
+
return self._headers[hdrs.CONTENT_TYPE]
|
| 219 |
+
|
| 220 |
+
def set_content_disposition(
|
| 221 |
+
self,
|
| 222 |
+
disptype: str,
|
| 223 |
+
quote_fields: bool = True,
|
| 224 |
+
_charset: str = "utf-8",
|
| 225 |
+
**params: Any,
|
| 226 |
+
) -> None:
|
| 227 |
+
"""Sets ``Content-Disposition`` header."""
|
| 228 |
+
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
| 229 |
+
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
| 230 |
+
)
|
| 231 |
+
|
| 232 |
+
@abstractmethod
|
| 233 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 234 |
+
"""Return string representation of the value.
|
| 235 |
+
|
| 236 |
+
This is named decode() to allow compatibility with bytes objects.
|
| 237 |
+
"""
|
| 238 |
+
|
| 239 |
+
@abstractmethod
|
| 240 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 241 |
+
"""Write payload.
|
| 242 |
+
|
| 243 |
+
writer is an AbstractStreamWriter instance:
|
| 244 |
+
"""
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
class BytesPayload(Payload):
|
| 248 |
+
_value: bytes
|
| 249 |
+
|
| 250 |
+
def __init__(
|
| 251 |
+
self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any
|
| 252 |
+
) -> None:
|
| 253 |
+
if "content_type" not in kwargs:
|
| 254 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 255 |
+
|
| 256 |
+
super().__init__(value, *args, **kwargs)
|
| 257 |
+
|
| 258 |
+
if isinstance(value, memoryview):
|
| 259 |
+
self._size = value.nbytes
|
| 260 |
+
elif isinstance(value, (bytes, bytearray)):
|
| 261 |
+
self._size = len(value)
|
| 262 |
+
else:
|
| 263 |
+
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
| 264 |
+
|
| 265 |
+
if self._size > TOO_LARGE_BYTES_BODY:
|
| 266 |
+
kwargs = {"source": self}
|
| 267 |
+
warnings.warn(
|
| 268 |
+
"Sending a large body directly with raw bytes might"
|
| 269 |
+
" lock the event loop. You should probably pass an "
|
| 270 |
+
"io.BytesIO object instead",
|
| 271 |
+
ResourceWarning,
|
| 272 |
+
**kwargs,
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 276 |
+
return self._value.decode(encoding, errors)
|
| 277 |
+
|
| 278 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 279 |
+
await writer.write(self._value)
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
class StringPayload(BytesPayload):
|
| 283 |
+
def __init__(
|
| 284 |
+
self,
|
| 285 |
+
value: str,
|
| 286 |
+
*args: Any,
|
| 287 |
+
encoding: Optional[str] = None,
|
| 288 |
+
content_type: Optional[str] = None,
|
| 289 |
+
**kwargs: Any,
|
| 290 |
+
) -> None:
|
| 291 |
+
|
| 292 |
+
if encoding is None:
|
| 293 |
+
if content_type is None:
|
| 294 |
+
real_encoding = "utf-8"
|
| 295 |
+
content_type = "text/plain; charset=utf-8"
|
| 296 |
+
else:
|
| 297 |
+
mimetype = parse_mimetype(content_type)
|
| 298 |
+
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
| 299 |
+
else:
|
| 300 |
+
if content_type is None:
|
| 301 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 302 |
+
real_encoding = encoding
|
| 303 |
+
|
| 304 |
+
super().__init__(
|
| 305 |
+
value.encode(real_encoding),
|
| 306 |
+
encoding=real_encoding,
|
| 307 |
+
content_type=content_type,
|
| 308 |
+
*args,
|
| 309 |
+
**kwargs,
|
| 310 |
+
)
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class StringIOPayload(StringPayload):
|
| 314 |
+
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
| 315 |
+
super().__init__(value.read(), *args, **kwargs)
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
class IOBasePayload(Payload):
|
| 319 |
+
_value: io.IOBase
|
| 320 |
+
|
| 321 |
+
def __init__(
|
| 322 |
+
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
| 323 |
+
) -> None:
|
| 324 |
+
if "filename" not in kwargs:
|
| 325 |
+
kwargs["filename"] = guess_filename(value)
|
| 326 |
+
|
| 327 |
+
super().__init__(value, *args, **kwargs)
|
| 328 |
+
|
| 329 |
+
if self._filename is not None and disposition is not None:
|
| 330 |
+
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
| 331 |
+
self.set_content_disposition(disposition, filename=self._filename)
|
| 332 |
+
|
| 333 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 334 |
+
loop = asyncio.get_event_loop()
|
| 335 |
+
try:
|
| 336 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 337 |
+
while chunk:
|
| 338 |
+
await writer.write(chunk)
|
| 339 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 340 |
+
finally:
|
| 341 |
+
await loop.run_in_executor(None, self._value.close)
|
| 342 |
+
|
| 343 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 344 |
+
return "".join(r.decode(encoding, errors) for r in self._value.readlines())
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
class TextIOPayload(IOBasePayload):
|
| 348 |
+
_value: io.TextIOBase
|
| 349 |
+
|
| 350 |
+
def __init__(
|
| 351 |
+
self,
|
| 352 |
+
value: TextIO,
|
| 353 |
+
*args: Any,
|
| 354 |
+
encoding: Optional[str] = None,
|
| 355 |
+
content_type: Optional[str] = None,
|
| 356 |
+
**kwargs: Any,
|
| 357 |
+
) -> None:
|
| 358 |
+
|
| 359 |
+
if encoding is None:
|
| 360 |
+
if content_type is None:
|
| 361 |
+
encoding = "utf-8"
|
| 362 |
+
content_type = "text/plain; charset=utf-8"
|
| 363 |
+
else:
|
| 364 |
+
mimetype = parse_mimetype(content_type)
|
| 365 |
+
encoding = mimetype.parameters.get("charset", "utf-8")
|
| 366 |
+
else:
|
| 367 |
+
if content_type is None:
|
| 368 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 369 |
+
|
| 370 |
+
super().__init__(
|
| 371 |
+
value,
|
| 372 |
+
content_type=content_type,
|
| 373 |
+
encoding=encoding,
|
| 374 |
+
*args,
|
| 375 |
+
**kwargs,
|
| 376 |
+
)
|
| 377 |
+
|
| 378 |
+
@property
|
| 379 |
+
def size(self) -> Optional[int]:
|
| 380 |
+
try:
|
| 381 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 382 |
+
except OSError:
|
| 383 |
+
return None
|
| 384 |
+
|
| 385 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 386 |
+
return self._value.read()
|
| 387 |
+
|
| 388 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 389 |
+
loop = asyncio.get_event_loop()
|
| 390 |
+
try:
|
| 391 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 392 |
+
while chunk:
|
| 393 |
+
data = (
|
| 394 |
+
chunk.encode(encoding=self._encoding)
|
| 395 |
+
if self._encoding
|
| 396 |
+
else chunk.encode()
|
| 397 |
+
)
|
| 398 |
+
await writer.write(data)
|
| 399 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 400 |
+
finally:
|
| 401 |
+
await loop.run_in_executor(None, self._value.close)
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
class BytesIOPayload(IOBasePayload):
|
| 405 |
+
_value: io.BytesIO
|
| 406 |
+
|
| 407 |
+
@property
|
| 408 |
+
def size(self) -> int:
|
| 409 |
+
position = self._value.tell()
|
| 410 |
+
end = self._value.seek(0, os.SEEK_END)
|
| 411 |
+
self._value.seek(position)
|
| 412 |
+
return end - position
|
| 413 |
+
|
| 414 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 415 |
+
return self._value.read().decode(encoding, errors)
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
class BufferedReaderPayload(IOBasePayload):
|
| 419 |
+
_value: io.BufferedIOBase
|
| 420 |
+
|
| 421 |
+
@property
|
| 422 |
+
def size(self) -> Optional[int]:
|
| 423 |
+
try:
|
| 424 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 425 |
+
except (OSError, AttributeError):
|
| 426 |
+
# data.fileno() is not supported, e.g.
|
| 427 |
+
# io.BufferedReader(io.BytesIO(b'data'))
|
| 428 |
+
# For some file-like objects (e.g. tarfile), the fileno() attribute may
|
| 429 |
+
# not exist at all, and will instead raise an AttributeError.
|
| 430 |
+
return None
|
| 431 |
+
|
| 432 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 433 |
+
return self._value.read().decode(encoding, errors)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
class JsonPayload(BytesPayload):
|
| 437 |
+
def __init__(
|
| 438 |
+
self,
|
| 439 |
+
value: Any,
|
| 440 |
+
encoding: str = "utf-8",
|
| 441 |
+
content_type: str = "application/json",
|
| 442 |
+
dumps: JSONEncoder = json.dumps,
|
| 443 |
+
*args: Any,
|
| 444 |
+
**kwargs: Any,
|
| 445 |
+
) -> None:
|
| 446 |
+
|
| 447 |
+
super().__init__(
|
| 448 |
+
dumps(value).encode(encoding),
|
| 449 |
+
content_type=content_type,
|
| 450 |
+
encoding=encoding,
|
| 451 |
+
*args,
|
| 452 |
+
**kwargs,
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
|
| 456 |
+
if TYPE_CHECKING:
|
| 457 |
+
from typing import AsyncIterable, AsyncIterator
|
| 458 |
+
|
| 459 |
+
_AsyncIterator = AsyncIterator[bytes]
|
| 460 |
+
_AsyncIterable = AsyncIterable[bytes]
|
| 461 |
+
else:
|
| 462 |
+
from collections.abc import AsyncIterable, AsyncIterator
|
| 463 |
+
|
| 464 |
+
_AsyncIterator = AsyncIterator
|
| 465 |
+
_AsyncIterable = AsyncIterable
|
| 466 |
+
|
| 467 |
+
|
| 468 |
+
class AsyncIterablePayload(Payload):
|
| 469 |
+
|
| 470 |
+
_iter: Optional[_AsyncIterator] = None
|
| 471 |
+
_value: _AsyncIterable
|
| 472 |
+
|
| 473 |
+
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
| 474 |
+
if not isinstance(value, AsyncIterable):
|
| 475 |
+
raise TypeError(
|
| 476 |
+
"value argument must support "
|
| 477 |
+
"collections.abc.AsyncIterable interface, "
|
| 478 |
+
"got {!r}".format(type(value))
|
| 479 |
+
)
|
| 480 |
+
|
| 481 |
+
if "content_type" not in kwargs:
|
| 482 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 483 |
+
|
| 484 |
+
super().__init__(value, *args, **kwargs)
|
| 485 |
+
|
| 486 |
+
self._iter = value.__aiter__()
|
| 487 |
+
|
| 488 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 489 |
+
if self._iter:
|
| 490 |
+
try:
|
| 491 |
+
# iter is not None check prevents rare cases
|
| 492 |
+
# when the case iterable is used twice
|
| 493 |
+
while True:
|
| 494 |
+
chunk = await self._iter.__anext__()
|
| 495 |
+
await writer.write(chunk)
|
| 496 |
+
except StopAsyncIteration:
|
| 497 |
+
self._iter = None
|
| 498 |
+
|
| 499 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 500 |
+
raise TypeError("Unable to decode.")
|
| 501 |
+
|
| 502 |
+
|
| 503 |
+
class StreamReaderPayload(AsyncIterablePayload):
|
| 504 |
+
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
| 505 |
+
super().__init__(value.iter_any(), *args, **kwargs)
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
PAYLOAD_REGISTRY = PayloadRegistry()
|
| 509 |
+
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
| 510 |
+
PAYLOAD_REGISTRY.register(StringPayload, str)
|
| 511 |
+
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
| 512 |
+
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
| 513 |
+
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
| 514 |
+
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
| 515 |
+
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
| 516 |
+
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
| 517 |
+
# try_last for giving a chance to more specialized async interables like
|
| 518 |
+
# multidict.BodyPartReaderPayload override the default
|
| 519 |
+
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/web_middlewares.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from typing import TYPE_CHECKING, Tuple, Type, TypeVar
|
| 3 |
+
|
| 4 |
+
from .typedefs import Handler, Middleware
|
| 5 |
+
from .web_exceptions import HTTPMove, HTTPPermanentRedirect
|
| 6 |
+
from .web_request import Request
|
| 7 |
+
from .web_response import StreamResponse
|
| 8 |
+
from .web_urldispatcher import SystemRoute
|
| 9 |
+
|
| 10 |
+
__all__ = (
|
| 11 |
+
"middleware",
|
| 12 |
+
"normalize_path_middleware",
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from .web_app import Application
|
| 17 |
+
|
| 18 |
+
_Func = TypeVar("_Func")
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
|
| 22 |
+
alt_request = request.clone(rel_url=path)
|
| 23 |
+
|
| 24 |
+
match_info = await request.app.router.resolve(alt_request)
|
| 25 |
+
alt_request._match_info = match_info
|
| 26 |
+
|
| 27 |
+
if match_info.http_exception is None:
|
| 28 |
+
return True, alt_request
|
| 29 |
+
|
| 30 |
+
return False, request
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def middleware(f: _Func) -> _Func:
|
| 34 |
+
f.__middleware_version__ = 1 # type: ignore[attr-defined]
|
| 35 |
+
return f
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def normalize_path_middleware(
|
| 39 |
+
*,
|
| 40 |
+
append_slash: bool = True,
|
| 41 |
+
remove_slash: bool = False,
|
| 42 |
+
merge_slashes: bool = True,
|
| 43 |
+
redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
|
| 44 |
+
) -> Middleware:
|
| 45 |
+
"""Factory for producing a middleware that normalizes the path of a request.
|
| 46 |
+
|
| 47 |
+
Normalizing means:
|
| 48 |
+
- Add or remove a trailing slash to the path.
|
| 49 |
+
- Double slashes are replaced by one.
|
| 50 |
+
|
| 51 |
+
The middleware returns as soon as it finds a path that resolves
|
| 52 |
+
correctly. The order if both merge and append/remove are enabled is
|
| 53 |
+
1) merge slashes
|
| 54 |
+
2) append/remove slash
|
| 55 |
+
3) both merge slashes and append/remove slash.
|
| 56 |
+
If the path resolves with at least one of those conditions, it will
|
| 57 |
+
redirect to the new path.
|
| 58 |
+
|
| 59 |
+
Only one of `append_slash` and `remove_slash` can be enabled. If both
|
| 60 |
+
are `True` the factory will raise an assertion error
|
| 61 |
+
|
| 62 |
+
If `append_slash` is `True` the middleware will append a slash when
|
| 63 |
+
needed. If a resource is defined with trailing slash and the request
|
| 64 |
+
comes without it, it will append it automatically.
|
| 65 |
+
|
| 66 |
+
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
|
| 67 |
+
the middleware will remove trailing slashes and redirect if the resource
|
| 68 |
+
is defined
|
| 69 |
+
|
| 70 |
+
If merge_slashes is True, merge multiple consecutive slashes in the
|
| 71 |
+
path into one.
|
| 72 |
+
"""
|
| 73 |
+
correct_configuration = not (append_slash and remove_slash)
|
| 74 |
+
assert correct_configuration, "Cannot both remove and append slash"
|
| 75 |
+
|
| 76 |
+
@middleware
|
| 77 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
| 78 |
+
if isinstance(request.match_info.route, SystemRoute):
|
| 79 |
+
paths_to_check = []
|
| 80 |
+
if "?" in request.raw_path:
|
| 81 |
+
path, query = request.raw_path.split("?", 1)
|
| 82 |
+
query = "?" + query
|
| 83 |
+
else:
|
| 84 |
+
query = ""
|
| 85 |
+
path = request.raw_path
|
| 86 |
+
|
| 87 |
+
if merge_slashes:
|
| 88 |
+
paths_to_check.append(re.sub("//+", "/", path))
|
| 89 |
+
if append_slash and not request.path.endswith("/"):
|
| 90 |
+
paths_to_check.append(path + "/")
|
| 91 |
+
if remove_slash and request.path.endswith("/"):
|
| 92 |
+
paths_to_check.append(path[:-1])
|
| 93 |
+
if merge_slashes and append_slash:
|
| 94 |
+
paths_to_check.append(re.sub("//+", "/", path + "/"))
|
| 95 |
+
if merge_slashes and remove_slash:
|
| 96 |
+
merged_slashes = re.sub("//+", "/", path)
|
| 97 |
+
paths_to_check.append(merged_slashes[:-1])
|
| 98 |
+
|
| 99 |
+
for path in paths_to_check:
|
| 100 |
+
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
|
| 101 |
+
resolves, request = await _check_request_resolves(request, path)
|
| 102 |
+
if resolves:
|
| 103 |
+
raise redirect_class(request.raw_path + query)
|
| 104 |
+
|
| 105 |
+
return await handler(request)
|
| 106 |
+
|
| 107 |
+
return impl
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def _fix_request_current_app(app: "Application") -> Middleware:
|
| 111 |
+
@middleware
|
| 112 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
| 113 |
+
match_info = request.match_info
|
| 114 |
+
prev = match_info.current_app
|
| 115 |
+
match_info.current_app = app
|
| 116 |
+
try:
|
| 117 |
+
return await handler(request)
|
| 118 |
+
finally:
|
| 119 |
+
match_info.current_app = prev
|
| 120 |
+
|
| 121 |
+
return impl
|
evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/License.txt
ADDED
|
@@ -0,0 +1,1568 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
End User License Agreement
|
| 2 |
+
--------------------------
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
Preface
|
| 6 |
+
-------
|
| 7 |
+
|
| 8 |
+
The Software License Agreement in Chapter 1 and the Supplement
|
| 9 |
+
in Chapter 2 contain license terms and conditions that govern
|
| 10 |
+
the use of NVIDIA software. By accepting this agreement, you
|
| 11 |
+
agree to comply with all the terms and conditions applicable
|
| 12 |
+
to the product(s) included herein.
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
NVIDIA Driver
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
Description
|
| 19 |
+
|
| 20 |
+
This package contains the operating system driver and
|
| 21 |
+
fundamental system software components for NVIDIA GPUs.
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
NVIDIA CUDA Toolkit
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
Description
|
| 28 |
+
|
| 29 |
+
The NVIDIA CUDA Toolkit provides command-line and graphical
|
| 30 |
+
tools for building, debugging and optimizing the performance
|
| 31 |
+
of applications accelerated by NVIDIA GPUs, runtime and math
|
| 32 |
+
libraries, and documentation including programming guides,
|
| 33 |
+
user manuals, and API references.
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
Default Install Location of CUDA Toolkit
|
| 37 |
+
|
| 38 |
+
Windows platform:
|
| 39 |
+
|
| 40 |
+
%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
|
| 41 |
+
|
| 42 |
+
Linux platform:
|
| 43 |
+
|
| 44 |
+
/usr/local/cuda-#.#
|
| 45 |
+
|
| 46 |
+
Mac platform:
|
| 47 |
+
|
| 48 |
+
/Developer/NVIDIA/CUDA-#.#
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
NVIDIA CUDA Samples
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
Description
|
| 55 |
+
|
| 56 |
+
This package includes over 100+ CUDA examples that demonstrate
|
| 57 |
+
various CUDA programming principles, and efficient CUDA
|
| 58 |
+
implementation of algorithms in specific application domains.
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
Default Install Location of CUDA Samples
|
| 62 |
+
|
| 63 |
+
Windows platform:
|
| 64 |
+
|
| 65 |
+
%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
|
| 66 |
+
|
| 67 |
+
Linux platform:
|
| 68 |
+
|
| 69 |
+
/usr/local/cuda-#.#/samples
|
| 70 |
+
|
| 71 |
+
and
|
| 72 |
+
|
| 73 |
+
$HOME/NVIDIA_CUDA-#.#_Samples
|
| 74 |
+
|
| 75 |
+
Mac platform:
|
| 76 |
+
|
| 77 |
+
/Developer/NVIDIA/CUDA-#.#/samples
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
NVIDIA Nsight Visual Studio Edition (Windows only)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
Description
|
| 84 |
+
|
| 85 |
+
NVIDIA Nsight Development Platform, Visual Studio Edition is a
|
| 86 |
+
development environment integrated into Microsoft Visual
|
| 87 |
+
Studio that provides tools for debugging, profiling, analyzing
|
| 88 |
+
and optimizing your GPU computing and graphics applications.
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
Default Install Location of Nsight Visual Studio Edition
|
| 92 |
+
|
| 93 |
+
Windows platform:
|
| 94 |
+
|
| 95 |
+
%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
1. License Agreement for NVIDIA Software Development Kits
|
| 99 |
+
---------------------------------------------------------
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
Release Date: July 26, 2018
|
| 103 |
+
---------------------------
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
Important NoticeRead before downloading, installing,
|
| 107 |
+
copying or using the licensed software:
|
| 108 |
+
-------------------------------------------------------
|
| 109 |
+
|
| 110 |
+
This license agreement, including exhibits attached
|
| 111 |
+
("Agreement”) is a legal agreement between you and NVIDIA
|
| 112 |
+
Corporation ("NVIDIA") and governs your use of a NVIDIA
|
| 113 |
+
software development kit (“SDK”).
|
| 114 |
+
|
| 115 |
+
Each SDK has its own set of software and materials, but here
|
| 116 |
+
is a description of the types of items that may be included in
|
| 117 |
+
a SDK: source code, header files, APIs, data sets and assets
|
| 118 |
+
(examples include images, textures, models, scenes, videos,
|
| 119 |
+
native API input/output files), binary software, sample code,
|
| 120 |
+
libraries, utility programs, programming code and
|
| 121 |
+
documentation.
|
| 122 |
+
|
| 123 |
+
This Agreement can be accepted only by an adult of legal age
|
| 124 |
+
of majority in the country in which the SDK is used.
|
| 125 |
+
|
| 126 |
+
If you are entering into this Agreement on behalf of a company
|
| 127 |
+
or other legal entity, you represent that you have the legal
|
| 128 |
+
authority to bind the entity to this Agreement, in which case
|
| 129 |
+
“you” will mean the entity you represent.
|
| 130 |
+
|
| 131 |
+
If you don’t have the required age or authority to accept
|
| 132 |
+
this Agreement, or if you don’t accept all the terms and
|
| 133 |
+
conditions of this Agreement, do not download, install or use
|
| 134 |
+
the SDK.
|
| 135 |
+
|
| 136 |
+
You agree to use the SDK only for purposes that are permitted
|
| 137 |
+
by (a) this Agreement, and (b) any applicable law, regulation
|
| 138 |
+
or generally accepted practices or guidelines in the relevant
|
| 139 |
+
jurisdictions.
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
1.1. License
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
1.1.1. License Grant
|
| 146 |
+
|
| 147 |
+
Subject to the terms of this Agreement, NVIDIA hereby grants
|
| 148 |
+
you a non-exclusive, non-transferable license, without the
|
| 149 |
+
right to sublicense (except as expressly provided in this
|
| 150 |
+
Agreement) to:
|
| 151 |
+
|
| 152 |
+
1. Install and use the SDK,
|
| 153 |
+
|
| 154 |
+
2. Modify and create derivative works of sample source code
|
| 155 |
+
delivered in the SDK, and
|
| 156 |
+
|
| 157 |
+
3. Distribute those portions of the SDK that are identified
|
| 158 |
+
in this Agreement as distributable, as incorporated in
|
| 159 |
+
object code format into a software application that meets
|
| 160 |
+
the distribution requirements indicated in this Agreement.
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
1.1.2. Distribution Requirements
|
| 164 |
+
|
| 165 |
+
These are the distribution requirements for you to exercise
|
| 166 |
+
the distribution grant:
|
| 167 |
+
|
| 168 |
+
1. Your application must have material additional
|
| 169 |
+
functionality, beyond the included portions of the SDK.
|
| 170 |
+
|
| 171 |
+
2. The distributable portions of the SDK shall only be
|
| 172 |
+
accessed by your application.
|
| 173 |
+
|
| 174 |
+
3. The following notice shall be included in modifications
|
| 175 |
+
and derivative works of sample source code distributed:
|
| 176 |
+
“This software contains source code provided by NVIDIA
|
| 177 |
+
Corporation.”
|
| 178 |
+
|
| 179 |
+
4. Unless a developer tool is identified in this Agreement
|
| 180 |
+
as distributable, it is delivered for your internal use
|
| 181 |
+
only.
|
| 182 |
+
|
| 183 |
+
5. The terms under which you distribute your application
|
| 184 |
+
must be consistent with the terms of this Agreement,
|
| 185 |
+
including (without limitation) terms relating to the
|
| 186 |
+
license grant and license restrictions and protection of
|
| 187 |
+
NVIDIA’s intellectual property rights. Additionally, you
|
| 188 |
+
agree that you will protect the privacy, security and
|
| 189 |
+
legal rights of your application users.
|
| 190 |
+
|
| 191 |
+
6. You agree to notify NVIDIA in writing of any known or
|
| 192 |
+
suspected distribution or use of the SDK not in compliance
|
| 193 |
+
with the requirements of this Agreement, and to enforce
|
| 194 |
+
the terms of your agreements with respect to distributed
|
| 195 |
+
SDK.
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
1.1.3. Authorized Users
|
| 199 |
+
|
| 200 |
+
You may allow employees and contractors of your entity or of
|
| 201 |
+
your subsidiary(ies) to access and use the SDK from your
|
| 202 |
+
secure network to perform work on your behalf.
|
| 203 |
+
|
| 204 |
+
If you are an academic institution you may allow users
|
| 205 |
+
enrolled or employed by the academic institution to access and
|
| 206 |
+
use the SDK from your secure network.
|
| 207 |
+
|
| 208 |
+
You are responsible for the compliance with the terms of this
|
| 209 |
+
Agreement by your authorized users. If you become aware that
|
| 210 |
+
your authorized users didn’t follow the terms of this
|
| 211 |
+
Agreement, you agree to take reasonable steps to resolve the
|
| 212 |
+
non-compliance and prevent new occurrences.
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
1.1.4. Pre-Release SDK
|
| 216 |
+
|
| 217 |
+
The SDK versions identified as alpha, beta, preview or
|
| 218 |
+
otherwise as pre-release, may not be fully functional, may
|
| 219 |
+
contain errors or design flaws, and may have reduced or
|
| 220 |
+
different security, privacy, accessibility, availability, and
|
| 221 |
+
reliability standards relative to commercial versions of
|
| 222 |
+
NVIDIA software and materials. Use of a pre-release SDK may
|
| 223 |
+
result in unexpected results, loss of data, project delays or
|
| 224 |
+
other unpredictable damage or loss.
|
| 225 |
+
|
| 226 |
+
You may use a pre-release SDK at your own risk, understanding
|
| 227 |
+
that pre-release SDKs are not intended for use in production
|
| 228 |
+
or business-critical systems.
|
| 229 |
+
|
| 230 |
+
NVIDIA may choose not to make available a commercial version
|
| 231 |
+
of any pre-release SDK. NVIDIA may also choose to abandon
|
| 232 |
+
development and terminate the availability of a pre-release
|
| 233 |
+
SDK at any time without liability.
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
1.1.5. Updates
|
| 237 |
+
|
| 238 |
+
NVIDIA may, at its option, make available patches, workarounds
|
| 239 |
+
or other updates to this SDK. Unless the updates are provided
|
| 240 |
+
with their separate governing terms, they are deemed part of
|
| 241 |
+
the SDK licensed to you as provided in this Agreement. You
|
| 242 |
+
agree that the form and content of the SDK that NVIDIA
|
| 243 |
+
provides may change without prior notice to you. While NVIDIA
|
| 244 |
+
generally maintains compatibility between versions, NVIDIA may
|
| 245 |
+
in some cases make changes that introduce incompatibilities in
|
| 246 |
+
future versions of the SDK.
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
1.1.6. Third Party Licenses
|
| 250 |
+
|
| 251 |
+
The SDK may come bundled with, or otherwise include or be
|
| 252 |
+
distributed with, third party software licensed by a NVIDIA
|
| 253 |
+
supplier and/or open source software provided under an open
|
| 254 |
+
source license. Use of third party software is subject to the
|
| 255 |
+
third-party license terms, or in the absence of third party
|
| 256 |
+
terms, the terms of this Agreement. Copyright to third party
|
| 257 |
+
software is held by the copyright holders indicated in the
|
| 258 |
+
third-party software or license.
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
1.1.7. Reservation of Rights
|
| 262 |
+
|
| 263 |
+
NVIDIA reserves all rights, title, and interest in and to the
|
| 264 |
+
SDK, not expressly granted to you under this Agreement.
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
1.2. Limitations
|
| 268 |
+
|
| 269 |
+
The following license limitations apply to your use of the
|
| 270 |
+
SDK:
|
| 271 |
+
|
| 272 |
+
1. You may not reverse engineer, decompile or disassemble,
|
| 273 |
+
or remove copyright or other proprietary notices from any
|
| 274 |
+
portion of the SDK or copies of the SDK.
|
| 275 |
+
|
| 276 |
+
2. Except as expressly provided in this Agreement, you may
|
| 277 |
+
not copy, sell, rent, sublicense, transfer, distribute,
|
| 278 |
+
modify, or create derivative works of any portion of the
|
| 279 |
+
SDK. For clarity, you may not distribute or sublicense the
|
| 280 |
+
SDK as a stand-alone product.
|
| 281 |
+
|
| 282 |
+
3. Unless you have an agreement with NVIDIA for this
|
| 283 |
+
purpose, you may not indicate that an application created
|
| 284 |
+
with the SDK is sponsored or endorsed by NVIDIA.
|
| 285 |
+
|
| 286 |
+
4. You may not bypass, disable, or circumvent any
|
| 287 |
+
encryption, security, digital rights management or
|
| 288 |
+
authentication mechanism in the SDK.
|
| 289 |
+
|
| 290 |
+
5. You may not use the SDK in any manner that would cause it
|
| 291 |
+
to become subject to an open source software license. As
|
| 292 |
+
examples, licenses that require as a condition of use,
|
| 293 |
+
modification, and/or distribution that the SDK be:
|
| 294 |
+
|
| 295 |
+
a. Disclosed or distributed in source code form;
|
| 296 |
+
|
| 297 |
+
b. Licensed for the purpose of making derivative works;
|
| 298 |
+
or
|
| 299 |
+
|
| 300 |
+
c. Redistributable at no charge.
|
| 301 |
+
|
| 302 |
+
6. Unless you have an agreement with NVIDIA for this
|
| 303 |
+
purpose, you may not use the SDK with any system or
|
| 304 |
+
application where the use or failure of the system or
|
| 305 |
+
application can reasonably be expected to threaten or
|
| 306 |
+
result in personal injury, death, or catastrophic loss.
|
| 307 |
+
Examples include use in avionics, navigation, military,
|
| 308 |
+
medical, life support or other life critical applications.
|
| 309 |
+
NVIDIA does not design, test or manufacture the SDK for
|
| 310 |
+
these critical uses and NVIDIA shall not be liable to you
|
| 311 |
+
or any third party, in whole or in part, for any claims or
|
| 312 |
+
damages arising from such uses.
|
| 313 |
+
|
| 314 |
+
7. You agree to defend, indemnify and hold harmless NVIDIA
|
| 315 |
+
and its affiliates, and their respective employees,
|
| 316 |
+
contractors, agents, officers and directors, from and
|
| 317 |
+
against any and all claims, damages, obligations, losses,
|
| 318 |
+
liabilities, costs or debt, fines, restitutions and
|
| 319 |
+
expenses (including but not limited to attorney’s fees
|
| 320 |
+
and costs incident to establishing the right of
|
| 321 |
+
indemnification) arising out of or related to your use of
|
| 322 |
+
the SDK outside of the scope of this Agreement, or not in
|
| 323 |
+
compliance with its terms.
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
1.3. Ownership
|
| 327 |
+
|
| 328 |
+
1. NVIDIA or its licensors hold all rights, title and
|
| 329 |
+
interest in and to the SDK and its modifications and
|
| 330 |
+
derivative works, including their respective intellectual
|
| 331 |
+
property rights, subject to your rights described in this
|
| 332 |
+
section. This SDK may include software and materials from
|
| 333 |
+
NVIDIA’s licensors, and these licensors are intended
|
| 334 |
+
third party beneficiaries that may enforce this Agreement
|
| 335 |
+
with respect to their intellectual property rights.
|
| 336 |
+
|
| 337 |
+
2. You hold all rights, title and interest in and to your
|
| 338 |
+
applications and your derivative works of the sample
|
| 339 |
+
source code delivered in the SDK, including their
|
| 340 |
+
respective intellectual property rights, subject to
|
| 341 |
+
NVIDIA’s rights described in this section.
|
| 342 |
+
|
| 343 |
+
3. You may, but don’t have to, provide to NVIDIA
|
| 344 |
+
suggestions, feature requests or other feedback regarding
|
| 345 |
+
the SDK, including possible enhancements or modifications
|
| 346 |
+
to the SDK. For any feedback that you voluntarily provide,
|
| 347 |
+
you hereby grant NVIDIA and its affiliates a perpetual,
|
| 348 |
+
non-exclusive, worldwide, irrevocable license to use,
|
| 349 |
+
reproduce, modify, license, sublicense (through multiple
|
| 350 |
+
tiers of sublicensees), and distribute (through multiple
|
| 351 |
+
tiers of distributors) it without the payment of any
|
| 352 |
+
royalties or fees to you. NVIDIA will use feedback at its
|
| 353 |
+
choice. NVIDIA is constantly looking for ways to improve
|
| 354 |
+
its products, so you may send feedback to NVIDIA through
|
| 355 |
+
the developer portal at https://developer.nvidia.com.
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
1.4. No Warranties
|
| 359 |
+
|
| 360 |
+
THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
|
| 361 |
+
FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
|
| 362 |
+
ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
|
| 363 |
+
OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
|
| 364 |
+
BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
|
| 365 |
+
FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
|
| 366 |
+
ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
|
| 367 |
+
WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
|
| 368 |
+
DEALING OR COURSE OF TRADE.
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
1.5. Limitation of Liability
|
| 372 |
+
|
| 373 |
+
TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
|
| 374 |
+
AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
| 375 |
+
PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
|
| 376 |
+
OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
|
| 377 |
+
PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
|
| 378 |
+
WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
|
| 379 |
+
WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
|
| 380 |
+
OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
|
| 381 |
+
PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
|
| 382 |
+
LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
|
| 383 |
+
TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
|
| 384 |
+
AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
|
| 385 |
+
NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
|
| 386 |
+
LIMIT.
|
| 387 |
+
|
| 388 |
+
These exclusions and limitations of liability shall apply
|
| 389 |
+
regardless if NVIDIA or its affiliates have been advised of
|
| 390 |
+
the possibility of such damages, and regardless of whether a
|
| 391 |
+
remedy fails its essential purpose. These exclusions and
|
| 392 |
+
limitations of liability form an essential basis of the
|
| 393 |
+
bargain between the parties, and, absent any of these
|
| 394 |
+
exclusions or limitations of liability, the provisions of this
|
| 395 |
+
Agreement, including, without limitation, the economic terms,
|
| 396 |
+
would be substantially different.
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
1.6. Termination
|
| 400 |
+
|
| 401 |
+
1. This Agreement will continue to apply until terminated by
|
| 402 |
+
either you or NVIDIA as described below.
|
| 403 |
+
|
| 404 |
+
2. If you want to terminate this Agreement, you may do so by
|
| 405 |
+
stopping to use the SDK.
|
| 406 |
+
|
| 407 |
+
3. NVIDIA may, at any time, terminate this Agreement if:
|
| 408 |
+
|
| 409 |
+
a. (i) you fail to comply with any term of this
|
| 410 |
+
Agreement and the non-compliance is not fixed within
|
| 411 |
+
thirty (30) days following notice from NVIDIA (or
|
| 412 |
+
immediately if you violate NVIDIA’s intellectual
|
| 413 |
+
property rights);
|
| 414 |
+
|
| 415 |
+
b. (ii) you commence or participate in any legal
|
| 416 |
+
proceeding against NVIDIA with respect to the SDK; or
|
| 417 |
+
|
| 418 |
+
c. (iii) NVIDIA decides to no longer provide the SDK in
|
| 419 |
+
a country or, in NVIDIA’s sole discretion, the
|
| 420 |
+
continued use of it is no longer commercially viable.
|
| 421 |
+
|
| 422 |
+
4. Upon any termination of this Agreement, you agree to
|
| 423 |
+
promptly discontinue use of the SDK and destroy all copies
|
| 424 |
+
in your possession or control. Your prior distributions in
|
| 425 |
+
accordance with this Agreement are not affected by the
|
| 426 |
+
termination of this Agreement. Upon written request, you
|
| 427 |
+
will certify in writing that you have complied with your
|
| 428 |
+
commitments under this section. Upon any termination of
|
| 429 |
+
this Agreement all provisions survive except for the
|
| 430 |
+
license grant provisions.
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
1.7. General
|
| 434 |
+
|
| 435 |
+
If you wish to assign this Agreement or your rights and
|
| 436 |
+
obligations, including by merger, consolidation, dissolution
|
| 437 |
+
or operation of law, contact NVIDIA to ask for permission. Any
|
| 438 |
+
attempted assignment not approved by NVIDIA in writing shall
|
| 439 |
+
be void and of no effect. NVIDIA may assign, delegate or
|
| 440 |
+
transfer this Agreement and its rights and obligations, and if
|
| 441 |
+
to a non-affiliate you will be notified.
|
| 442 |
+
|
| 443 |
+
You agree to cooperate with NVIDIA and provide reasonably
|
| 444 |
+
requested information to verify your compliance with this
|
| 445 |
+
Agreement.
|
| 446 |
+
|
| 447 |
+
This Agreement will be governed in all respects by the laws of
|
| 448 |
+
the United States and of the State of Delaware as those laws
|
| 449 |
+
are applied to contracts entered into and performed entirely
|
| 450 |
+
within Delaware by Delaware residents, without regard to the
|
| 451 |
+
conflicts of laws principles. The United Nations Convention on
|
| 452 |
+
Contracts for the International Sale of Goods is specifically
|
| 453 |
+
disclaimed. You agree to all terms of this Agreement in the
|
| 454 |
+
English language.
|
| 455 |
+
|
| 456 |
+
The state or federal courts residing in Santa Clara County,
|
| 457 |
+
California shall have exclusive jurisdiction over any dispute
|
| 458 |
+
or claim arising out of this Agreement. Notwithstanding this,
|
| 459 |
+
you agree that NVIDIA shall still be allowed to apply for
|
| 460 |
+
injunctive remedies or an equivalent type of urgent legal
|
| 461 |
+
relief in any jurisdiction.
|
| 462 |
+
|
| 463 |
+
If any court of competent jurisdiction determines that any
|
| 464 |
+
provision of this Agreement is illegal, invalid or
|
| 465 |
+
unenforceable, such provision will be construed as limited to
|
| 466 |
+
the extent necessary to be consistent with and fully
|
| 467 |
+
enforceable under the law and the remaining provisions will
|
| 468 |
+
remain in full force and effect. Unless otherwise specified,
|
| 469 |
+
remedies are cumulative.
|
| 470 |
+
|
| 471 |
+
Each party acknowledges and agrees that the other is an
|
| 472 |
+
independent contractor in the performance of this Agreement.
|
| 473 |
+
|
| 474 |
+
The SDK has been developed entirely at private expense and is
|
| 475 |
+
“commercial items” consisting of “commercial computer
|
| 476 |
+
software” and “commercial computer software
|
| 477 |
+
documentation” provided with RESTRICTED RIGHTS. Use,
|
| 478 |
+
duplication or disclosure by the U.S. Government or a U.S.
|
| 479 |
+
Government subcontractor is subject to the restrictions in
|
| 480 |
+
this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
|
| 481 |
+
in subparagraphs (c)(1) and (2) of the Commercial Computer
|
| 482 |
+
Software - Restricted Rights clause at FAR 52.227-19, as
|
| 483 |
+
applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
|
| 484 |
+
Expressway, Santa Clara, CA 95051.
|
| 485 |
+
|
| 486 |
+
The SDK is subject to United States export laws and
|
| 487 |
+
regulations. You agree that you will not ship, transfer or
|
| 488 |
+
export the SDK into any country, or use the SDK in any manner,
|
| 489 |
+
prohibited by the United States Bureau of Industry and
|
| 490 |
+
Security or economic sanctions regulations administered by the
|
| 491 |
+
U.S. Department of Treasury’s Office of Foreign Assets
|
| 492 |
+
Control (OFAC), or any applicable export laws, restrictions or
|
| 493 |
+
regulations. These laws include restrictions on destinations,
|
| 494 |
+
end users and end use. By accepting this Agreement, you
|
| 495 |
+
confirm that you are not a resident or citizen of any country
|
| 496 |
+
currently embargoed by the U.S. and that you are not otherwise
|
| 497 |
+
prohibited from receiving the SDK.
|
| 498 |
+
|
| 499 |
+
Any notice delivered by NVIDIA to you under this Agreement
|
| 500 |
+
will be delivered via mail, email or fax. You agree that any
|
| 501 |
+
notices that NVIDIA sends you electronically will satisfy any
|
| 502 |
+
legal communication requirements. Please direct your legal
|
| 503 |
+
notices or other correspondence to NVIDIA Corporation, 2788
|
| 504 |
+
San Tomas Expressway, Santa Clara, California 95051, United
|
| 505 |
+
States of America, Attention: Legal Department.
|
| 506 |
+
|
| 507 |
+
This Agreement and any exhibits incorporated into this
|
| 508 |
+
Agreement constitute the entire agreement of the parties with
|
| 509 |
+
respect to the subject matter of this Agreement and supersede
|
| 510 |
+
all prior negotiations or documentation exchanged between the
|
| 511 |
+
parties relating to this SDK license. Any additional and/or
|
| 512 |
+
conflicting terms on documents issued by you are null, void,
|
| 513 |
+
and invalid. Any amendment or waiver under this Agreement
|
| 514 |
+
shall be in writing and signed by representatives of both
|
| 515 |
+
parties.
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
2. CUDA Toolkit Supplement to Software License Agreement for
|
| 519 |
+
NVIDIA Software Development Kits
|
| 520 |
+
------------------------------------------------------------
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
Release date: August 16, 2018
|
| 524 |
+
-----------------------------
|
| 525 |
+
|
| 526 |
+
The terms in this supplement govern your use of the NVIDIA
|
| 527 |
+
CUDA Toolkit SDK under the terms of your license agreement
|
| 528 |
+
(“Agreement”) as modified by this supplement. Capitalized
|
| 529 |
+
terms used but not defined below have the meaning assigned to
|
| 530 |
+
them in the Agreement.
|
| 531 |
+
|
| 532 |
+
This supplement is an exhibit to the Agreement and is
|
| 533 |
+
incorporated as an integral part of the Agreement. In the
|
| 534 |
+
event of conflict between the terms in this supplement and the
|
| 535 |
+
terms in the Agreement, the terms in this supplement govern.
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
2.1. License Scope
|
| 539 |
+
|
| 540 |
+
The SDK is licensed for you to develop applications only for
|
| 541 |
+
use in systems with NVIDIA GPUs.
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
2.2. Distribution
|
| 545 |
+
|
| 546 |
+
The portions of the SDK that are distributable under the
|
| 547 |
+
Agreement are listed in Attachment A.
|
| 548 |
+
|
| 549 |
+
|
| 550 |
+
2.3. Operating Systems
|
| 551 |
+
|
| 552 |
+
Those portions of the SDK designed exclusively for use on the
|
| 553 |
+
Linux or FreeBSD operating systems, or other operating systems
|
| 554 |
+
derived from the source code to these operating systems, may
|
| 555 |
+
be copied and redistributed for use in accordance with this
|
| 556 |
+
Agreement, provided that the object code files are not
|
| 557 |
+
modified in any way (except for unzipping of compressed
|
| 558 |
+
files).
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
2.4. Audio and Video Encoders and Decoders
|
| 562 |
+
|
| 563 |
+
You acknowledge and agree that it is your sole responsibility
|
| 564 |
+
to obtain any additional third-party licenses required to
|
| 565 |
+
make, have made, use, have used, sell, import, and offer for
|
| 566 |
+
sale your products or services that include or incorporate any
|
| 567 |
+
third-party software and content relating to audio and/or
|
| 568 |
+
video encoders and decoders from, including but not limited
|
| 569 |
+
to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
|
| 570 |
+
MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
|
| 571 |
+
under this Agreement any necessary patent or other rights with
|
| 572 |
+
respect to any audio and/or video encoders and decoders.
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
2.5. Licensing
|
| 576 |
+
|
| 577 |
+
If the distribution terms in this Agreement are not suitable
|
| 578 |
+
for your organization, or for any questions regarding this
|
| 579 |
+
Agreement, please contact NVIDIA at
|
| 580 |
+
nvidia-compute-license-questions@nvidia.com.
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
2.6. Attachment A
|
| 584 |
+
|
| 585 |
+
The following portions of the SDK are distributable under the
|
| 586 |
+
Agreement:
|
| 587 |
+
|
| 588 |
+
Component
|
| 589 |
+
|
| 590 |
+
CUDA Runtime
|
| 591 |
+
|
| 592 |
+
Windows
|
| 593 |
+
|
| 594 |
+
cudart.dll, cudart_static.lib, cudadevrt.lib
|
| 595 |
+
|
| 596 |
+
Mac OSX
|
| 597 |
+
|
| 598 |
+
libcudart.dylib, libcudart_static.a, libcudadevrt.a
|
| 599 |
+
|
| 600 |
+
Linux
|
| 601 |
+
|
| 602 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 603 |
+
|
| 604 |
+
Android
|
| 605 |
+
|
| 606 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 607 |
+
|
| 608 |
+
Component
|
| 609 |
+
|
| 610 |
+
CUDA FFT Library
|
| 611 |
+
|
| 612 |
+
Windows
|
| 613 |
+
|
| 614 |
+
cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
|
| 615 |
+
|
| 616 |
+
Mac OSX
|
| 617 |
+
|
| 618 |
+
libcufft.dylib, libcufft_static.a, libcufftw.dylib,
|
| 619 |
+
libcufftw_static.a
|
| 620 |
+
|
| 621 |
+
Linux
|
| 622 |
+
|
| 623 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 624 |
+
libcufftw_static.a
|
| 625 |
+
|
| 626 |
+
Android
|
| 627 |
+
|
| 628 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 629 |
+
libcufftw_static.a
|
| 630 |
+
|
| 631 |
+
Component
|
| 632 |
+
|
| 633 |
+
CUDA BLAS Library
|
| 634 |
+
|
| 635 |
+
Windows
|
| 636 |
+
|
| 637 |
+
cublas.dll, cublasLt.dll
|
| 638 |
+
|
| 639 |
+
Mac OSX
|
| 640 |
+
|
| 641 |
+
libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
|
| 642 |
+
libcublasLt_static.a
|
| 643 |
+
|
| 644 |
+
Linux
|
| 645 |
+
|
| 646 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 647 |
+
libcublasLt_static.a
|
| 648 |
+
|
| 649 |
+
Android
|
| 650 |
+
|
| 651 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 652 |
+
libcublasLt_static.a
|
| 653 |
+
|
| 654 |
+
Component
|
| 655 |
+
|
| 656 |
+
NVIDIA "Drop-in" BLAS Library
|
| 657 |
+
|
| 658 |
+
Windows
|
| 659 |
+
|
| 660 |
+
nvblas.dll
|
| 661 |
+
|
| 662 |
+
Mac OSX
|
| 663 |
+
|
| 664 |
+
libnvblas.dylib
|
| 665 |
+
|
| 666 |
+
Linux
|
| 667 |
+
|
| 668 |
+
libnvblas.so
|
| 669 |
+
|
| 670 |
+
Component
|
| 671 |
+
|
| 672 |
+
CUDA Sparse Matrix Library
|
| 673 |
+
|
| 674 |
+
Windows
|
| 675 |
+
|
| 676 |
+
cusparse.dll, cusparse.lib
|
| 677 |
+
|
| 678 |
+
Mac OSX
|
| 679 |
+
|
| 680 |
+
libcusparse.dylib, libcusparse_static.a
|
| 681 |
+
|
| 682 |
+
Linux
|
| 683 |
+
|
| 684 |
+
libcusparse.so, libcusparse_static.a
|
| 685 |
+
|
| 686 |
+
Android
|
| 687 |
+
|
| 688 |
+
libcusparse.so, libcusparse_static.a
|
| 689 |
+
|
| 690 |
+
Component
|
| 691 |
+
|
| 692 |
+
CUDA Linear Solver Library
|
| 693 |
+
|
| 694 |
+
Windows
|
| 695 |
+
|
| 696 |
+
cusolver.dll, cusolver.lib
|
| 697 |
+
|
| 698 |
+
Mac OSX
|
| 699 |
+
|
| 700 |
+
libcusolver.dylib, libcusolver_static.a
|
| 701 |
+
|
| 702 |
+
Linux
|
| 703 |
+
|
| 704 |
+
libcusolver.so, libcusolver_static.a
|
| 705 |
+
|
| 706 |
+
Android
|
| 707 |
+
|
| 708 |
+
libcusolver.so, libcusolver_static.a
|
| 709 |
+
|
| 710 |
+
Component
|
| 711 |
+
|
| 712 |
+
CUDA Random Number Generation Library
|
| 713 |
+
|
| 714 |
+
Windows
|
| 715 |
+
|
| 716 |
+
curand.dll, curand.lib
|
| 717 |
+
|
| 718 |
+
Mac OSX
|
| 719 |
+
|
| 720 |
+
libcurand.dylib, libcurand_static.a
|
| 721 |
+
|
| 722 |
+
Linux
|
| 723 |
+
|
| 724 |
+
libcurand.so, libcurand_static.a
|
| 725 |
+
|
| 726 |
+
Android
|
| 727 |
+
|
| 728 |
+
libcurand.so, libcurand_static.a
|
| 729 |
+
|
| 730 |
+
Component
|
| 731 |
+
|
| 732 |
+
CUDA Accelerated Graph Library
|
| 733 |
+
|
| 734 |
+
Component
|
| 735 |
+
|
| 736 |
+
NVIDIA Performance Primitives Library
|
| 737 |
+
|
| 738 |
+
Windows
|
| 739 |
+
|
| 740 |
+
nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
|
| 741 |
+
nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
|
| 742 |
+
nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
|
| 743 |
+
nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
|
| 744 |
+
nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
|
| 745 |
+
|
| 746 |
+
Mac OSX
|
| 747 |
+
|
| 748 |
+
libnppc.dylib, libnppc_static.a, libnppial.dylib,
|
| 749 |
+
libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
|
| 750 |
+
libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
|
| 751 |
+
libnppidei_static.a, libnppif.dylib, libnppif_static.a,
|
| 752 |
+
libnppig.dylib, libnppig_static.a, libnppim.dylib,
|
| 753 |
+
libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
|
| 754 |
+
libnpps.dylib, libnpps_static.a
|
| 755 |
+
|
| 756 |
+
Linux
|
| 757 |
+
|
| 758 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 759 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 760 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 761 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 762 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 763 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 764 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 765 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 766 |
+
|
| 767 |
+
Android
|
| 768 |
+
|
| 769 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 770 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 771 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 772 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 773 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 774 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 775 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 776 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 777 |
+
|
| 778 |
+
Component
|
| 779 |
+
|
| 780 |
+
NVIDIA JPEG Library
|
| 781 |
+
|
| 782 |
+
Linux
|
| 783 |
+
|
| 784 |
+
libnvjpeg.so, libnvjpeg_static.a
|
| 785 |
+
|
| 786 |
+
Component
|
| 787 |
+
|
| 788 |
+
Internal common library required for statically linking to
|
| 789 |
+
cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
|
| 790 |
+
|
| 791 |
+
Mac OSX
|
| 792 |
+
|
| 793 |
+
libculibos.a
|
| 794 |
+
|
| 795 |
+
Linux
|
| 796 |
+
|
| 797 |
+
libculibos.a
|
| 798 |
+
|
| 799 |
+
Component
|
| 800 |
+
|
| 801 |
+
NVIDIA Runtime Compilation Library and Header
|
| 802 |
+
|
| 803 |
+
All
|
| 804 |
+
|
| 805 |
+
nvrtc.h
|
| 806 |
+
|
| 807 |
+
Windows
|
| 808 |
+
|
| 809 |
+
nvrtc.dll, nvrtc-builtins.dll
|
| 810 |
+
|
| 811 |
+
Mac OSX
|
| 812 |
+
|
| 813 |
+
libnvrtc.dylib, libnvrtc-builtins.dylib
|
| 814 |
+
|
| 815 |
+
Linux
|
| 816 |
+
|
| 817 |
+
libnvrtc.so, libnvrtc-builtins.so
|
| 818 |
+
|
| 819 |
+
Component
|
| 820 |
+
|
| 821 |
+
NVIDIA Optimizing Compiler Library
|
| 822 |
+
|
| 823 |
+
Windows
|
| 824 |
+
|
| 825 |
+
nvvm.dll
|
| 826 |
+
|
| 827 |
+
Mac OSX
|
| 828 |
+
|
| 829 |
+
libnvvm.dylib
|
| 830 |
+
|
| 831 |
+
Linux
|
| 832 |
+
|
| 833 |
+
libnvvm.so
|
| 834 |
+
|
| 835 |
+
Component
|
| 836 |
+
|
| 837 |
+
NVIDIA Common Device Math Functions Library
|
| 838 |
+
|
| 839 |
+
Windows
|
| 840 |
+
|
| 841 |
+
libdevice.10.bc
|
| 842 |
+
|
| 843 |
+
Mac OSX
|
| 844 |
+
|
| 845 |
+
libdevice.10.bc
|
| 846 |
+
|
| 847 |
+
Linux
|
| 848 |
+
|
| 849 |
+
libdevice.10.bc
|
| 850 |
+
|
| 851 |
+
Component
|
| 852 |
+
|
| 853 |
+
CUDA Occupancy Calculation Header Library
|
| 854 |
+
|
| 855 |
+
All
|
| 856 |
+
|
| 857 |
+
cuda_occupancy.h
|
| 858 |
+
|
| 859 |
+
Component
|
| 860 |
+
|
| 861 |
+
CUDA Half Precision Headers
|
| 862 |
+
|
| 863 |
+
All
|
| 864 |
+
|
| 865 |
+
cuda_fp16.h, cuda_fp16.hpp
|
| 866 |
+
|
| 867 |
+
Component
|
| 868 |
+
|
| 869 |
+
CUDA Profiling Tools Interface (CUPTI) Library
|
| 870 |
+
|
| 871 |
+
Windows
|
| 872 |
+
|
| 873 |
+
cupti.dll
|
| 874 |
+
|
| 875 |
+
Mac OSX
|
| 876 |
+
|
| 877 |
+
libcupti.dylib
|
| 878 |
+
|
| 879 |
+
Linux
|
| 880 |
+
|
| 881 |
+
libcupti.so
|
| 882 |
+
|
| 883 |
+
Component
|
| 884 |
+
|
| 885 |
+
NVIDIA Tools Extension Library
|
| 886 |
+
|
| 887 |
+
Windows
|
| 888 |
+
|
| 889 |
+
nvToolsExt.dll, nvToolsExt.lib
|
| 890 |
+
|
| 891 |
+
Mac OSX
|
| 892 |
+
|
| 893 |
+
libnvToolsExt.dylib
|
| 894 |
+
|
| 895 |
+
Linux
|
| 896 |
+
|
| 897 |
+
libnvToolsExt.so
|
| 898 |
+
|
| 899 |
+
Component
|
| 900 |
+
|
| 901 |
+
NVIDIA CUDA Driver Libraries
|
| 902 |
+
|
| 903 |
+
Linux
|
| 904 |
+
|
| 905 |
+
libcuda.so, libnvidia-fatbinaryloader.so,
|
| 906 |
+
libnvidia-ptxjitcompiler.so
|
| 907 |
+
|
| 908 |
+
The NVIDIA CUDA Driver Libraries are only distributable in
|
| 909 |
+
applications that meet this criteria:
|
| 910 |
+
|
| 911 |
+
1. The application was developed starting from a NVIDIA CUDA
|
| 912 |
+
container obtained from Docker Hub or the NVIDIA GPU
|
| 913 |
+
Cloud, and
|
| 914 |
+
|
| 915 |
+
2. The resulting application is packaged as a Docker
|
| 916 |
+
container and distributed to users on Docker Hub or the
|
| 917 |
+
NVIDIA GPU Cloud only.
|
| 918 |
+
|
| 919 |
+
|
| 920 |
+
2.7. Attachment B
|
| 921 |
+
|
| 922 |
+
|
| 923 |
+
Additional Licensing Obligations
|
| 924 |
+
|
| 925 |
+
The following third party components included in the SOFTWARE
|
| 926 |
+
are licensed to Licensee pursuant to the following terms and
|
| 927 |
+
conditions:
|
| 928 |
+
|
| 929 |
+
1. Licensee's use of the GDB third party component is
|
| 930 |
+
subject to the terms and conditions of GNU GPL v3:
|
| 931 |
+
|
| 932 |
+
This product includes copyrighted third-party software licensed
|
| 933 |
+
under the terms of the GNU General Public License v3 ("GPL v3").
|
| 934 |
+
All third-party software packages are copyright by their respective
|
| 935 |
+
authors. GPL v3 terms and conditions are hereby incorporated into
|
| 936 |
+
the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
|
| 937 |
+
|
| 938 |
+
Consistent with these licensing requirements, the software
|
| 939 |
+
listed below is provided under the terms of the specified
|
| 940 |
+
open source software licenses. To obtain source code for
|
| 941 |
+
software provided under licenses that require
|
| 942 |
+
redistribution of source code, including the GNU General
|
| 943 |
+
Public License (GPL) and GNU Lesser General Public License
|
| 944 |
+
(LGPL), contact oss-requests@nvidia.com. This offer is
|
| 945 |
+
valid for a period of three (3) years from the date of the
|
| 946 |
+
distribution of this product by NVIDIA CORPORATION.
|
| 947 |
+
|
| 948 |
+
Component License
|
| 949 |
+
CUDA-GDB GPL v3
|
| 950 |
+
|
| 951 |
+
2. Licensee represents and warrants that any and all third
|
| 952 |
+
party licensing and/or royalty payment obligations in
|
| 953 |
+
connection with Licensee's use of the H.264 video codecs
|
| 954 |
+
are solely the responsibility of Licensee.
|
| 955 |
+
|
| 956 |
+
3. Licensee's use of the Thrust library is subject to the
|
| 957 |
+
terms and conditions of the Apache License Version 2.0.
|
| 958 |
+
All third-party software packages are copyright by their
|
| 959 |
+
respective authors. Apache License Version 2.0 terms and
|
| 960 |
+
conditions are hereby incorporated into the Agreement by
|
| 961 |
+
this reference.
|
| 962 |
+
http://www.apache.org/licenses/LICENSE-2.0.html
|
| 963 |
+
|
| 964 |
+
In addition, Licensee acknowledges the following notice:
|
| 965 |
+
Thrust includes source code from the Boost Iterator,
|
| 966 |
+
Tuple, System, and Random Number libraries.
|
| 967 |
+
|
| 968 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 969 |
+
. . . .
|
| 970 |
+
|
| 971 |
+
Permission is hereby granted, free of charge, to any person or
|
| 972 |
+
organization obtaining a copy of the software and accompanying
|
| 973 |
+
documentation covered by this license (the "Software") to use,
|
| 974 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 975 |
+
and to prepare derivative works of the Software, and to permit
|
| 976 |
+
third-parties to whom the Software is furnished to do so, all
|
| 977 |
+
subject to the following:
|
| 978 |
+
|
| 979 |
+
The copyright notices in the Software and this entire statement,
|
| 980 |
+
including the above license grant, this restriction and the following
|
| 981 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 982 |
+
or in part, and all derivative works of the Software, unless such
|
| 983 |
+
copies or derivative works are solely in the form of machine-executable
|
| 984 |
+
object code generated by a source language processor.
|
| 985 |
+
|
| 986 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 987 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 988 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 989 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 990 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 991 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 992 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 993 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 994 |
+
|
| 995 |
+
4. Licensee's use of the LLVM third party component is
|
| 996 |
+
subject to the following terms and conditions:
|
| 997 |
+
|
| 998 |
+
======================================================
|
| 999 |
+
LLVM Release License
|
| 1000 |
+
======================================================
|
| 1001 |
+
University of Illinois/NCSA
|
| 1002 |
+
Open Source License
|
| 1003 |
+
|
| 1004 |
+
Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
|
| 1005 |
+
All rights reserved.
|
| 1006 |
+
|
| 1007 |
+
Developed by:
|
| 1008 |
+
|
| 1009 |
+
LLVM Team
|
| 1010 |
+
|
| 1011 |
+
University of Illinois at Urbana-Champaign
|
| 1012 |
+
|
| 1013 |
+
http://llvm.org
|
| 1014 |
+
|
| 1015 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1016 |
+
of this software and associated documentation files (the "Software"), to
|
| 1017 |
+
deal with the Software without restriction, including without limitation the
|
| 1018 |
+
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
| 1019 |
+
sell copies of the Software, and to permit persons to whom the Software is
|
| 1020 |
+
furnished to do so, subject to the following conditions:
|
| 1021 |
+
|
| 1022 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1023 |
+
this list of conditions and the following disclaimers.
|
| 1024 |
+
|
| 1025 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1026 |
+
notice, this list of conditions and the following disclaimers in the
|
| 1027 |
+
documentation and/or other materials provided with the distribution.
|
| 1028 |
+
|
| 1029 |
+
* Neither the names of the LLVM Team, University of Illinois at Urbana-
|
| 1030 |
+
Champaign, nor the names of its contributors may be used to endorse or
|
| 1031 |
+
promote products derived from this Software without specific prior
|
| 1032 |
+
written permission.
|
| 1033 |
+
|
| 1034 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1035 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1036 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
| 1037 |
+
THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
| 1038 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
| 1039 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
| 1040 |
+
DEALINGS WITH THE SOFTWARE.
|
| 1041 |
+
|
| 1042 |
+
5. Licensee's use (e.g. nvprof) of the PCRE third party
|
| 1043 |
+
component is subject to the following terms and
|
| 1044 |
+
conditions:
|
| 1045 |
+
|
| 1046 |
+
------------
|
| 1047 |
+
PCRE LICENCE
|
| 1048 |
+
------------
|
| 1049 |
+
PCRE is a library of functions to support regular expressions whose syntax
|
| 1050 |
+
and semantics are as close as possible to those of the Perl 5 language.
|
| 1051 |
+
Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
|
| 1052 |
+
specified below. The documentation for PCRE, supplied in the "doc"
|
| 1053 |
+
directory, is distributed under the same terms as the software itself. The
|
| 1054 |
+
basic library functions are written in C and are freestanding. Also
|
| 1055 |
+
included in the distribution is a set of C++ wrapper functions, and a just-
|
| 1056 |
+
in-time compiler that can be used to optimize pattern matching. These are
|
| 1057 |
+
both optional features that can be omitted when the library is built.
|
| 1058 |
+
|
| 1059 |
+
THE BASIC LIBRARY FUNCTIONS
|
| 1060 |
+
---------------------------
|
| 1061 |
+
Written by: Philip Hazel
|
| 1062 |
+
Email local part: ph10
|
| 1063 |
+
Email domain: cam.ac.uk
|
| 1064 |
+
University of Cambridge Computing Service,
|
| 1065 |
+
Cambridge, England.
|
| 1066 |
+
Copyright (c) 1997-2012 University of Cambridge
|
| 1067 |
+
All rights reserved.
|
| 1068 |
+
|
| 1069 |
+
PCRE JUST-IN-TIME COMPILATION SUPPORT
|
| 1070 |
+
-------------------------------------
|
| 1071 |
+
Written by: Zoltan Herczeg
|
| 1072 |
+
Email local part: hzmester
|
| 1073 |
+
Emain domain: freemail.hu
|
| 1074 |
+
Copyright(c) 2010-2012 Zoltan Herczeg
|
| 1075 |
+
All rights reserved.
|
| 1076 |
+
|
| 1077 |
+
STACK-LESS JUST-IN-TIME COMPILER
|
| 1078 |
+
--------------------------------
|
| 1079 |
+
Written by: Zoltan Herczeg
|
| 1080 |
+
Email local part: hzmester
|
| 1081 |
+
Emain domain: freemail.hu
|
| 1082 |
+
Copyright(c) 2009-2012 Zoltan Herczeg
|
| 1083 |
+
All rights reserved.
|
| 1084 |
+
|
| 1085 |
+
THE C++ WRAPPER FUNCTIONS
|
| 1086 |
+
-------------------------
|
| 1087 |
+
Contributed by: Google Inc.
|
| 1088 |
+
Copyright (c) 2007-2012, Google Inc.
|
| 1089 |
+
All rights reserved.
|
| 1090 |
+
|
| 1091 |
+
THE "BSD" LICENCE
|
| 1092 |
+
-----------------
|
| 1093 |
+
Redistribution and use in source and binary forms, with or without
|
| 1094 |
+
modification, are permitted provided that the following conditions are met:
|
| 1095 |
+
|
| 1096 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1097 |
+
this list of conditions and the following disclaimer.
|
| 1098 |
+
|
| 1099 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1100 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1101 |
+
documentation and/or other materials provided with the distribution.
|
| 1102 |
+
|
| 1103 |
+
* Neither the name of the University of Cambridge nor the name of Google
|
| 1104 |
+
Inc. nor the names of their contributors may be used to endorse or
|
| 1105 |
+
promote products derived from this software without specific prior
|
| 1106 |
+
written permission.
|
| 1107 |
+
|
| 1108 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 1109 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 1110 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
| 1111 |
+
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
| 1112 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1113 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1114 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
| 1115 |
+
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
| 1116 |
+
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
| 1117 |
+
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1118 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1119 |
+
|
| 1120 |
+
6. Some of the cuBLAS library routines were written by or
|
| 1121 |
+
derived from code written by Vasily Volkov and are subject
|
| 1122 |
+
to the Modified Berkeley Software Distribution License as
|
| 1123 |
+
follows:
|
| 1124 |
+
|
| 1125 |
+
Copyright (c) 2007-2009, Regents of the University of California
|
| 1126 |
+
|
| 1127 |
+
All rights reserved.
|
| 1128 |
+
|
| 1129 |
+
Redistribution and use in source and binary forms, with or without
|
| 1130 |
+
modification, are permitted provided that the following conditions are
|
| 1131 |
+
met:
|
| 1132 |
+
* Redistributions of source code must retain the above copyright
|
| 1133 |
+
notice, this list of conditions and the following disclaimer.
|
| 1134 |
+
* Redistributions in binary form must reproduce the above
|
| 1135 |
+
copyright notice, this list of conditions and the following
|
| 1136 |
+
disclaimer in the documentation and/or other materials provided
|
| 1137 |
+
with the distribution.
|
| 1138 |
+
* Neither the name of the University of California, Berkeley nor
|
| 1139 |
+
the names of its contributors may be used to endorse or promote
|
| 1140 |
+
products derived from this software without specific prior
|
| 1141 |
+
written permission.
|
| 1142 |
+
|
| 1143 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1144 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1145 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1146 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1147 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1148 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1149 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1150 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1151 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1152 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1153 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1154 |
+
|
| 1155 |
+
7. Some of the cuBLAS library routines were written by or
|
| 1156 |
+
derived from code written by Davide Barbieri and are
|
| 1157 |
+
subject to the Modified Berkeley Software Distribution
|
| 1158 |
+
License as follows:
|
| 1159 |
+
|
| 1160 |
+
Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
|
| 1161 |
+
|
| 1162 |
+
All rights reserved.
|
| 1163 |
+
|
| 1164 |
+
Redistribution and use in source and binary forms, with or without
|
| 1165 |
+
modification, are permitted provided that the following conditions are
|
| 1166 |
+
met:
|
| 1167 |
+
* Redistributions of source code must retain the above copyright
|
| 1168 |
+
notice, this list of conditions and the following disclaimer.
|
| 1169 |
+
* Redistributions in binary form must reproduce the above
|
| 1170 |
+
copyright notice, this list of conditions and the following
|
| 1171 |
+
disclaimer in the documentation and/or other materials provided
|
| 1172 |
+
with the distribution.
|
| 1173 |
+
* The name of the author may not be used to endorse or promote
|
| 1174 |
+
products derived from this software without specific prior
|
| 1175 |
+
written permission.
|
| 1176 |
+
|
| 1177 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1178 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1179 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1180 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1181 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1182 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1183 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1184 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1185 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1186 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1187 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1188 |
+
|
| 1189 |
+
8. Some of the cuBLAS library routines were derived from
|
| 1190 |
+
code developed by the University of Tennessee and are
|
| 1191 |
+
subject to the Modified Berkeley Software Distribution
|
| 1192 |
+
License as follows:
|
| 1193 |
+
|
| 1194 |
+
Copyright (c) 2010 The University of Tennessee.
|
| 1195 |
+
|
| 1196 |
+
All rights reserved.
|
| 1197 |
+
|
| 1198 |
+
Redistribution and use in source and binary forms, with or without
|
| 1199 |
+
modification, are permitted provided that the following conditions are
|
| 1200 |
+
met:
|
| 1201 |
+
* Redistributions of source code must retain the above copyright
|
| 1202 |
+
notice, this list of conditions and the following disclaimer.
|
| 1203 |
+
* Redistributions in binary form must reproduce the above
|
| 1204 |
+
copyright notice, this list of conditions and the following
|
| 1205 |
+
disclaimer listed in this license in the documentation and/or
|
| 1206 |
+
other materials provided with the distribution.
|
| 1207 |
+
* Neither the name of the copyright holders nor the names of its
|
| 1208 |
+
contributors may be used to endorse or promote products derived
|
| 1209 |
+
from this software without specific prior written permission.
|
| 1210 |
+
|
| 1211 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1212 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1213 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1214 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1215 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1216 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1217 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1218 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1219 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1220 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1221 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1222 |
+
|
| 1223 |
+
9. Some of the cuBLAS library routines were written by or
|
| 1224 |
+
derived from code written by Jonathan Hogg and are subject
|
| 1225 |
+
to the Modified Berkeley Software Distribution License as
|
| 1226 |
+
follows:
|
| 1227 |
+
|
| 1228 |
+
Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
|
| 1229 |
+
|
| 1230 |
+
All rights reserved.
|
| 1231 |
+
|
| 1232 |
+
Redistribution and use in source and binary forms, with or without
|
| 1233 |
+
modification, are permitted provided that the following conditions are
|
| 1234 |
+
met:
|
| 1235 |
+
* Redistributions of source code must retain the above copyright
|
| 1236 |
+
notice, this list of conditions and the following disclaimer.
|
| 1237 |
+
* Redistributions in binary form must reproduce the above
|
| 1238 |
+
copyright notice, this list of conditions and the following
|
| 1239 |
+
disclaimer in the documentation and/or other materials provided
|
| 1240 |
+
with the distribution.
|
| 1241 |
+
* Neither the name of the STFC nor the names of its contributors
|
| 1242 |
+
may be used to endorse or promote products derived from this
|
| 1243 |
+
software without specific prior written permission.
|
| 1244 |
+
|
| 1245 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1246 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1247 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1248 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
|
| 1249 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1250 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1251 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
| 1252 |
+
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
| 1253 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
| 1254 |
+
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
| 1255 |
+
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1256 |
+
|
| 1257 |
+
10. Some of the cuBLAS library routines were written by or
|
| 1258 |
+
derived from code written by Ahmad M. Abdelfattah, David
|
| 1259 |
+
Keyes, and Hatem Ltaief, and are subject to the Apache
|
| 1260 |
+
License, Version 2.0, as follows:
|
| 1261 |
+
|
| 1262 |
+
-- (C) Copyright 2013 King Abdullah University of Science and Technology
|
| 1263 |
+
Authors:
|
| 1264 |
+
Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa)
|
| 1265 |
+
David Keyes (david.keyes@kaust.edu.sa)
|
| 1266 |
+
Hatem Ltaief (hatem.ltaief@kaust.edu.sa)
|
| 1267 |
+
|
| 1268 |
+
Redistribution and use in source and binary forms, with or without
|
| 1269 |
+
modification, are permitted provided that the following conditions
|
| 1270 |
+
are met:
|
| 1271 |
+
|
| 1272 |
+
* Redistributions of source code must retain the above copyright
|
| 1273 |
+
notice, this list of conditions and the following disclaimer.
|
| 1274 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1275 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1276 |
+
documentation and/or other materials provided with the distribution.
|
| 1277 |
+
* Neither the name of the King Abdullah University of Science and
|
| 1278 |
+
Technology nor the names of its contributors may be used to endorse
|
| 1279 |
+
or promote products derived from this software without specific prior
|
| 1280 |
+
written permission.
|
| 1281 |
+
|
| 1282 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1283 |
+
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1284 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1285 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1286 |
+
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1287 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1288 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1289 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1290 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1291 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1292 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
|
| 1293 |
+
|
| 1294 |
+
11. Some of the cuSPARSE library routines were written by or
|
| 1295 |
+
derived from code written by Li-Wen Chang and are subject
|
| 1296 |
+
to the NCSA Open Source License as follows:
|
| 1297 |
+
|
| 1298 |
+
Copyright (c) 2012, University of Illinois.
|
| 1299 |
+
|
| 1300 |
+
All rights reserved.
|
| 1301 |
+
|
| 1302 |
+
Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
|
| 1303 |
+
|
| 1304 |
+
Permission is hereby granted, free of charge, to any person obtaining
|
| 1305 |
+
a copy of this software and associated documentation files (the
|
| 1306 |
+
"Software"), to deal with the Software without restriction, including
|
| 1307 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
| 1308 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
| 1309 |
+
permit persons to whom the Software is furnished to do so, subject to
|
| 1310 |
+
the following conditions:
|
| 1311 |
+
* Redistributions of source code must retain the above copyright
|
| 1312 |
+
notice, this list of conditions and the following disclaimer.
|
| 1313 |
+
* Redistributions in binary form must reproduce the above
|
| 1314 |
+
copyright notice, this list of conditions and the following
|
| 1315 |
+
disclaimers in the documentation and/or other materials provided
|
| 1316 |
+
with the distribution.
|
| 1317 |
+
* Neither the names of IMPACT Group, University of Illinois, nor
|
| 1318 |
+
the names of its contributors may be used to endorse or promote
|
| 1319 |
+
products derived from this Software without specific prior
|
| 1320 |
+
written permission.
|
| 1321 |
+
|
| 1322 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1323 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1324 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 1325 |
+
NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
|
| 1326 |
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
| 1327 |
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
| 1328 |
+
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
|
| 1329 |
+
SOFTWARE.
|
| 1330 |
+
|
| 1331 |
+
12. Some of the cuRAND library routines were written by or
|
| 1332 |
+
derived from code written by Mutsuo Saito and Makoto
|
| 1333 |
+
Matsumoto and are subject to the following license:
|
| 1334 |
+
|
| 1335 |
+
Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
|
| 1336 |
+
University. All rights reserved.
|
| 1337 |
+
|
| 1338 |
+
Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
|
| 1339 |
+
University and University of Tokyo. All rights reserved.
|
| 1340 |
+
|
| 1341 |
+
Redistribution and use in source and binary forms, with or without
|
| 1342 |
+
modification, are permitted provided that the following conditions are
|
| 1343 |
+
met:
|
| 1344 |
+
* Redistributions of source code must retain the above copyright
|
| 1345 |
+
notice, this list of conditions and the following disclaimer.
|
| 1346 |
+
* Redistributions in binary form must reproduce the above
|
| 1347 |
+
copyright notice, this list of conditions and the following
|
| 1348 |
+
disclaimer in the documentation and/or other materials provided
|
| 1349 |
+
with the distribution.
|
| 1350 |
+
* Neither the name of the Hiroshima University nor the names of
|
| 1351 |
+
its contributors may be used to endorse or promote products
|
| 1352 |
+
derived from this software without specific prior written
|
| 1353 |
+
permission.
|
| 1354 |
+
|
| 1355 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1356 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1357 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1358 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1359 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1360 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1361 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1362 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1363 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1364 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1365 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1366 |
+
|
| 1367 |
+
13. Some of the cuRAND library routines were derived from
|
| 1368 |
+
code developed by D. E. Shaw Research and are subject to
|
| 1369 |
+
the following license:
|
| 1370 |
+
|
| 1371 |
+
Copyright 2010-2011, D. E. Shaw Research.
|
| 1372 |
+
|
| 1373 |
+
All rights reserved.
|
| 1374 |
+
|
| 1375 |
+
Redistribution and use in source and binary forms, with or without
|
| 1376 |
+
modification, are permitted provided that the following conditions are
|
| 1377 |
+
met:
|
| 1378 |
+
* Redistributions of source code must retain the above copyright
|
| 1379 |
+
notice, this list of conditions, and the following disclaimer.
|
| 1380 |
+
* Redistributions in binary form must reproduce the above
|
| 1381 |
+
copyright notice, this list of conditions, and the following
|
| 1382 |
+
disclaimer in the documentation and/or other materials provided
|
| 1383 |
+
with the distribution.
|
| 1384 |
+
* Neither the name of D. E. Shaw Research nor the names of its
|
| 1385 |
+
contributors may be used to endorse or promote products derived
|
| 1386 |
+
from this software without specific prior written permission.
|
| 1387 |
+
|
| 1388 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1389 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1390 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1391 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1392 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1393 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1394 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1395 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1396 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1397 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1398 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1399 |
+
|
| 1400 |
+
14. Some of the Math library routines were written by or
|
| 1401 |
+
derived from code developed by Norbert Juffa and are
|
| 1402 |
+
subject to the following license:
|
| 1403 |
+
|
| 1404 |
+
Copyright (c) 2015-2017, Norbert Juffa
|
| 1405 |
+
All rights reserved.
|
| 1406 |
+
|
| 1407 |
+
Redistribution and use in source and binary forms, with or without
|
| 1408 |
+
modification, are permitted provided that the following conditions
|
| 1409 |
+
are met:
|
| 1410 |
+
|
| 1411 |
+
1. Redistributions of source code must retain the above copyright
|
| 1412 |
+
notice, this list of conditions and the following disclaimer.
|
| 1413 |
+
|
| 1414 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 1415 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1416 |
+
documentation and/or other materials provided with the distribution.
|
| 1417 |
+
|
| 1418 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1419 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1420 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1421 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1422 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1423 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1424 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1425 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1426 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1427 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1428 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1429 |
+
|
| 1430 |
+
15. Licensee's use of the lz4 third party component is
|
| 1431 |
+
subject to the following terms and conditions:
|
| 1432 |
+
|
| 1433 |
+
Copyright (C) 2011-2013, Yann Collet.
|
| 1434 |
+
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
|
| 1435 |
+
|
| 1436 |
+
Redistribution and use in source and binary forms, with or without
|
| 1437 |
+
modification, are permitted provided that the following conditions are
|
| 1438 |
+
met:
|
| 1439 |
+
|
| 1440 |
+
* Redistributions of source code must retain the above copyright
|
| 1441 |
+
notice, this list of conditions and the following disclaimer.
|
| 1442 |
+
* Redistributions in binary form must reproduce the above
|
| 1443 |
+
copyright notice, this list of conditions and the following disclaimer
|
| 1444 |
+
in the documentation and/or other materials provided with the
|
| 1445 |
+
distribution.
|
| 1446 |
+
|
| 1447 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1448 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1449 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1450 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1451 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1452 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1453 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1454 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1455 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1456 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1457 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1458 |
+
|
| 1459 |
+
16. The NPP library uses code from the Boost Math Toolkit,
|
| 1460 |
+
and is subject to the following license:
|
| 1461 |
+
|
| 1462 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 1463 |
+
. . . .
|
| 1464 |
+
|
| 1465 |
+
Permission is hereby granted, free of charge, to any person or
|
| 1466 |
+
organization obtaining a copy of the software and accompanying
|
| 1467 |
+
documentation covered by this license (the "Software") to use,
|
| 1468 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 1469 |
+
and to prepare derivative works of the Software, and to permit
|
| 1470 |
+
third-parties to whom the Software is furnished to do so, all
|
| 1471 |
+
subject to the following:
|
| 1472 |
+
|
| 1473 |
+
The copyright notices in the Software and this entire statement,
|
| 1474 |
+
including the above license grant, this restriction and the following
|
| 1475 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 1476 |
+
or in part, and all derivative works of the Software, unless such
|
| 1477 |
+
copies or derivative works are solely in the form of machine-executable
|
| 1478 |
+
object code generated by a source language processor.
|
| 1479 |
+
|
| 1480 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1481 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1482 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 1483 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 1484 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 1485 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 1486 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 1487 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 1488 |
+
|
| 1489 |
+
17. Portions of the Nsight Eclipse Edition is subject to the
|
| 1490 |
+
following license:
|
| 1491 |
+
|
| 1492 |
+
The Eclipse Foundation makes available all content in this plug-in
|
| 1493 |
+
("Content"). Unless otherwise indicated below, the Content is provided
|
| 1494 |
+
to you under the terms and conditions of the Eclipse Public License
|
| 1495 |
+
Version 1.0 ("EPL"). A copy of the EPL is available at http://
|
| 1496 |
+
www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
|
| 1497 |
+
will mean the Content.
|
| 1498 |
+
|
| 1499 |
+
If you did not receive this Content directly from the Eclipse
|
| 1500 |
+
Foundation, the Content is being redistributed by another party
|
| 1501 |
+
("Redistributor") and different terms and conditions may apply to your
|
| 1502 |
+
use of any object code in the Content. Check the Redistributor's
|
| 1503 |
+
license that was provided with the Content. If no such license exists,
|
| 1504 |
+
contact the Redistributor. Unless otherwise indicated below, the terms
|
| 1505 |
+
and conditions of the EPL still apply to any source code in the
|
| 1506 |
+
Content and such source code may be obtained at http://www.eclipse.org.
|
| 1507 |
+
|
| 1508 |
+
18. Some of the cuBLAS library routines uses code from
|
| 1509 |
+
OpenAI, which is subject to the following license:
|
| 1510 |
+
|
| 1511 |
+
License URL
|
| 1512 |
+
https://github.com/openai/openai-gemm/blob/master/LICENSE
|
| 1513 |
+
|
| 1514 |
+
License Text
|
| 1515 |
+
The MIT License
|
| 1516 |
+
|
| 1517 |
+
Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
|
| 1518 |
+
|
| 1519 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1520 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 1521 |
+
in the Software without restriction, including without limitation the rights
|
| 1522 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 1523 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 1524 |
+
furnished to do so, subject to the following conditions:
|
| 1525 |
+
|
| 1526 |
+
The above copyright notice and this permission notice shall be included in
|
| 1527 |
+
all copies or substantial portions of the Software.
|
| 1528 |
+
|
| 1529 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1530 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1531 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1532 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1533 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1534 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
| 1535 |
+
THE SOFTWARE.
|
| 1536 |
+
|
| 1537 |
+
19. Licensee's use of the Visual Studio Setup Configuration
|
| 1538 |
+
Samples is subject to the following license:
|
| 1539 |
+
|
| 1540 |
+
The MIT License (MIT)
|
| 1541 |
+
Copyright (C) Microsoft Corporation. All rights reserved.
|
| 1542 |
+
|
| 1543 |
+
Permission is hereby granted, free of charge, to any person
|
| 1544 |
+
obtaining a copy of this software and associated documentation
|
| 1545 |
+
files (the "Software"), to deal in the Software without restriction,
|
| 1546 |
+
including without limitation the rights to use, copy, modify, merge,
|
| 1547 |
+
publish, distribute, sublicense, and/or sell copies of the Software,
|
| 1548 |
+
and to permit persons to whom the Software is furnished to do so,
|
| 1549 |
+
subject to the following conditions:
|
| 1550 |
+
|
| 1551 |
+
The above copyright notice and this permission notice shall be included
|
| 1552 |
+
in all copies or substantial portions of the Software.
|
| 1553 |
+
|
| 1554 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
| 1555 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1556 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1557 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1558 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1559 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 1560 |
+
|
| 1561 |
+
20. Licensee's use of linmath.h header for CPU functions for
|
| 1562 |
+
GL vector/matrix operations from lunarG is subject to the
|
| 1563 |
+
Apache License Version 2.0.
|
| 1564 |
+
|
| 1565 |
+
21. The DX12-CUDA sample uses the d3dx12.h header, which is
|
| 1566 |
+
subject to the MIT license .
|
| 1567 |
+
|
| 1568 |
+
-----------------
|
evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/METADATA
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: nvidia-cuda-nvrtc-cu12
|
| 3 |
+
Version: 12.1.105
|
| 4 |
+
Summary: NVRTC native runtime libraries
|
| 5 |
+
Home-page: https://developer.nvidia.com/cuda-zone
|
| 6 |
+
Author: Nvidia CUDA Installer Team
|
| 7 |
+
Author-email: cuda_installer@nvidia.com
|
| 8 |
+
License: NVIDIA Proprietary Software
|
| 9 |
+
Keywords: cuda,nvidia,runtime,machine learning,deep learning
|
| 10 |
+
Classifier: Development Status :: 4 - Beta
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Intended Audience :: Education
|
| 13 |
+
Classifier: Intended Audience :: Science/Research
|
| 14 |
+
Classifier: License :: Other/Proprietary License
|
| 15 |
+
Classifier: Natural Language :: English
|
| 16 |
+
Classifier: Programming Language :: Python :: 3
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.5
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.6
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 24 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 25 |
+
Classifier: Topic :: Scientific/Engineering
|
| 26 |
+
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
| 27 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
| 28 |
+
Classifier: Topic :: Software Development
|
| 29 |
+
Classifier: Topic :: Software Development :: Libraries
|
| 30 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 31 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 32 |
+
Requires-Python: >=3
|
| 33 |
+
License-File: License.txt
|
| 34 |
+
|
| 35 |
+
NVRTC native runtime libraries
|
evalkit_cambrian/lib/python3.10/site-packages/nvidia_cuda_nvrtc_cu12-12.1.105.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__about__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__package_name__ = 'portalocker'
|
| 2 |
+
__author__ = 'Rick van Hattem'
|
| 3 |
+
__email__ = 'wolph@wol.ph'
|
| 4 |
+
__version__ = '3.1.1'
|
| 5 |
+
__description__ = """Wraps the portalocker recipe for easy usage"""
|
| 6 |
+
__url__ = 'https://github.com/WoLpH/portalocker'
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__init__.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from . import __about__, constants, exceptions, portalocker
|
| 2 |
+
from .utils import (
|
| 3 |
+
BoundedSemaphore,
|
| 4 |
+
Lock,
|
| 5 |
+
RLock,
|
| 6 |
+
TemporaryFileLock,
|
| 7 |
+
open_atomic,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
try: # pragma: no cover
|
| 11 |
+
from .redis import RedisLock
|
| 12 |
+
except ImportError: # pragma: no cover
|
| 13 |
+
RedisLock = None # type: ignore[assignment,misc]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
#: The package name on Pypi
|
| 17 |
+
__package_name__ = __about__.__package_name__
|
| 18 |
+
#: Current author and maintainer, view the git history for the previous ones
|
| 19 |
+
__author__ = __about__.__author__
|
| 20 |
+
#: Current author's email address
|
| 21 |
+
__email__ = __about__.__email__
|
| 22 |
+
#: Version number
|
| 23 |
+
__version__ = '3.1.1'
|
| 24 |
+
#: Package description for Pypi
|
| 25 |
+
__description__ = __about__.__description__
|
| 26 |
+
#: Package homepage
|
| 27 |
+
__url__ = __about__.__url__
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
#: Exception thrown when the file is already locked by someone else
|
| 31 |
+
AlreadyLocked = exceptions.AlreadyLocked
|
| 32 |
+
#: Exception thrown if an error occurred during locking
|
| 33 |
+
LockException = exceptions.LockException
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
#: Lock a file. Note that this is an advisory lock on Linux/Unix systems
|
| 37 |
+
lock = portalocker.lock
|
| 38 |
+
#: Unlock a file
|
| 39 |
+
unlock = portalocker.unlock
|
| 40 |
+
|
| 41 |
+
#: Place an exclusive lock.
|
| 42 |
+
#: Only one process may hold an exclusive lock for a given file at a given
|
| 43 |
+
#: time.
|
| 44 |
+
LOCK_EX: constants.LockFlags = constants.LockFlags.EXCLUSIVE
|
| 45 |
+
|
| 46 |
+
#: Place a shared lock.
|
| 47 |
+
#: More than one process may hold a shared lock for a given file at a given
|
| 48 |
+
#: time.
|
| 49 |
+
LOCK_SH: constants.LockFlags = constants.LockFlags.SHARED
|
| 50 |
+
|
| 51 |
+
#: Acquire the lock in a non-blocking fashion.
|
| 52 |
+
LOCK_NB: constants.LockFlags = constants.LockFlags.NON_BLOCKING
|
| 53 |
+
|
| 54 |
+
#: Remove an existing lock held by this process.
|
| 55 |
+
LOCK_UN: constants.LockFlags = constants.LockFlags.UNBLOCK
|
| 56 |
+
|
| 57 |
+
#: Locking flags enum
|
| 58 |
+
LockFlags = constants.LockFlags
|
| 59 |
+
|
| 60 |
+
#: Locking utility class to automatically handle opening with timeouts and
|
| 61 |
+
#: context wrappers
|
| 62 |
+
|
| 63 |
+
__all__ = [
|
| 64 |
+
'LOCK_EX',
|
| 65 |
+
'LOCK_NB',
|
| 66 |
+
'LOCK_SH',
|
| 67 |
+
'LOCK_UN',
|
| 68 |
+
'AlreadyLocked',
|
| 69 |
+
'BoundedSemaphore',
|
| 70 |
+
'Lock',
|
| 71 |
+
'LockException',
|
| 72 |
+
'LockFlags',
|
| 73 |
+
'RLock',
|
| 74 |
+
'RedisLock',
|
| 75 |
+
'TemporaryFileLock',
|
| 76 |
+
'lock',
|
| 77 |
+
'open_atomic',
|
| 78 |
+
'unlock',
|
| 79 |
+
]
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__main__.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import argparse
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
import re
|
| 8 |
+
import typing
|
| 9 |
+
|
| 10 |
+
base_path = pathlib.Path(__file__).parent.parent
|
| 11 |
+
src_path = base_path / 'portalocker'
|
| 12 |
+
dist_path = base_path / 'dist'
|
| 13 |
+
_default_output_path = base_path / 'dist' / 'portalocker.py'
|
| 14 |
+
|
| 15 |
+
_NAMES_RE = re.compile(r'(?P<names>[^()]+)$')
|
| 16 |
+
_RELATIVE_IMPORT_RE = re.compile(
|
| 17 |
+
r'^from \.(?P<from>.*?) import (?P<paren>\(?)(?P<names>[^()]+)$',
|
| 18 |
+
)
|
| 19 |
+
_USELESS_ASSIGNMENT_RE = re.compile(r'^(?P<name>\w+) = \1\n$')
|
| 20 |
+
|
| 21 |
+
_TEXT_TEMPLATE = """'''
|
| 22 |
+
{}
|
| 23 |
+
'''
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
logger = logging.getLogger(__name__)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def main(argv: typing.Sequence[str] | None = None) -> None:
|
| 31 |
+
parser = argparse.ArgumentParser()
|
| 32 |
+
|
| 33 |
+
subparsers = parser.add_subparsers(required=True)
|
| 34 |
+
combine_parser = subparsers.add_parser(
|
| 35 |
+
'combine',
|
| 36 |
+
help='Combine all Python files into a single unified `portalocker.py` '
|
| 37 |
+
'file for easy distribution',
|
| 38 |
+
)
|
| 39 |
+
combine_parser.add_argument(
|
| 40 |
+
'--output-file',
|
| 41 |
+
'-o',
|
| 42 |
+
type=argparse.FileType('w'),
|
| 43 |
+
default=str(_default_output_path),
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
combine_parser.set_defaults(func=combine)
|
| 47 |
+
args = parser.parse_args(argv)
|
| 48 |
+
args.func(args)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _read_file(
|
| 52 |
+
path: pathlib.Path,
|
| 53 |
+
seen_files: set[pathlib.Path],
|
| 54 |
+
) -> typing.Iterator[str]:
|
| 55 |
+
if path in seen_files:
|
| 56 |
+
return
|
| 57 |
+
|
| 58 |
+
names: set[str] = set()
|
| 59 |
+
seen_files.add(path)
|
| 60 |
+
paren = False
|
| 61 |
+
from_ = None
|
| 62 |
+
for line in path.open():
|
| 63 |
+
if '__future__' in line:
|
| 64 |
+
continue
|
| 65 |
+
|
| 66 |
+
if paren:
|
| 67 |
+
if ')' in line:
|
| 68 |
+
line = line.split(')', 1)[1]
|
| 69 |
+
paren = False
|
| 70 |
+
continue
|
| 71 |
+
|
| 72 |
+
match = _NAMES_RE.match(line)
|
| 73 |
+
else:
|
| 74 |
+
match = _RELATIVE_IMPORT_RE.match(line)
|
| 75 |
+
|
| 76 |
+
if match:
|
| 77 |
+
if not paren:
|
| 78 |
+
paren = bool(match.group('paren'))
|
| 79 |
+
from_ = match.group('from')
|
| 80 |
+
|
| 81 |
+
if from_:
|
| 82 |
+
names.add(from_)
|
| 83 |
+
yield from _read_file(src_path / f'{from_}.py', seen_files)
|
| 84 |
+
else:
|
| 85 |
+
for name in match.group('names').split(','):
|
| 86 |
+
name = name.strip()
|
| 87 |
+
names.add(name)
|
| 88 |
+
yield from _read_file(src_path / f'{name}.py', seen_files)
|
| 89 |
+
else:
|
| 90 |
+
yield _clean_line(line, names)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _clean_line(line: str, names: set[str]) -> str:
|
| 94 |
+
# Replace `some_import.spam` with `spam`
|
| 95 |
+
if names:
|
| 96 |
+
joined_names = '|'.join(names)
|
| 97 |
+
line = re.sub(rf'\b({joined_names})\.', '', line)
|
| 98 |
+
|
| 99 |
+
# Replace useless assignments (e.g. `spam = spam`)
|
| 100 |
+
return _USELESS_ASSIGNMENT_RE.sub('', line)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def combine(args: argparse.Namespace) -> None:
|
| 104 |
+
output_file = args.output_file
|
| 105 |
+
pathlib.Path(output_file.name).parent.mkdir(parents=True, exist_ok=True)
|
| 106 |
+
|
| 107 |
+
# We're handling this separately because it has to be the first import.
|
| 108 |
+
output_file.write('from __future__ import annotations\n')
|
| 109 |
+
|
| 110 |
+
output_file.write(
|
| 111 |
+
_TEXT_TEMPLATE.format((base_path / 'README.rst').read_text()),
|
| 112 |
+
)
|
| 113 |
+
output_file.write(
|
| 114 |
+
_TEXT_TEMPLATE.format((base_path / 'LICENSE').read_text()),
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
seen_files: set[pathlib.Path] = set()
|
| 118 |
+
for line in _read_file(src_path / '__init__.py', seen_files):
|
| 119 |
+
output_file.write(line)
|
| 120 |
+
|
| 121 |
+
output_file.flush()
|
| 122 |
+
output_file.close()
|
| 123 |
+
|
| 124 |
+
logger.info(f'Wrote combined file to {output_file.name}')
|
| 125 |
+
# Run black and ruff if available. If not then just run the file.
|
| 126 |
+
os.system(f'black {output_file.name}')
|
| 127 |
+
os.system(f'ruff format {output_file.name}')
|
| 128 |
+
os.system(f'ruff check --fix --fix-only {output_file.name}')
|
| 129 |
+
os.system(f'python3 {output_file.name}')
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
if __name__ == '__main__':
|
| 133 |
+
logging.basicConfig(level=logging.INFO)
|
| 134 |
+
main()
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/__about__.cpython-310.pyc
ADDED
|
Binary file (419 Bytes). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.01 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (3.19 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/constants.cpython-310.pyc
ADDED
|
Binary file (942 Bytes). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/exceptions.cpython-310.pyc
ADDED
|
Binary file (1.11 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/portalocker.cpython-310.pyc
ADDED
|
Binary file (3.17 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/redis.cpython-310.pyc
ADDED
|
Binary file (6.98 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/types.cpython-310.pyc
ADDED
|
Binary file (1.24 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (16.5 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/constants.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Locking constants
|
| 3 |
+
|
| 4 |
+
Lock types:
|
| 5 |
+
|
| 6 |
+
- `EXCLUSIVE` exclusive lock
|
| 7 |
+
- `SHARED` shared lock
|
| 8 |
+
|
| 9 |
+
Lock flags:
|
| 10 |
+
|
| 11 |
+
- `NON_BLOCKING` non-blocking
|
| 12 |
+
|
| 13 |
+
Manually unlock, only needed internally
|
| 14 |
+
|
| 15 |
+
- `UNBLOCK` unlock
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
import enum
|
| 19 |
+
import os
|
| 20 |
+
|
| 21 |
+
# The actual tests will execute the code anyhow so the following code can
|
| 22 |
+
# safely be ignored from the coverage tests
|
| 23 |
+
if os.name == 'nt': # pragma: no cover
|
| 24 |
+
import msvcrt
|
| 25 |
+
|
| 26 |
+
#: exclusive lock
|
| 27 |
+
LOCK_EX = 0x1
|
| 28 |
+
#: shared lock
|
| 29 |
+
LOCK_SH = 0x2
|
| 30 |
+
#: non-blocking
|
| 31 |
+
LOCK_NB = 0x4
|
| 32 |
+
#: unlock
|
| 33 |
+
LOCK_UN = msvcrt.LK_UNLCK # type: ignore[attr-defined]
|
| 34 |
+
|
| 35 |
+
elif os.name == 'posix': # pragma: no cover
|
| 36 |
+
import fcntl
|
| 37 |
+
|
| 38 |
+
#: exclusive lock
|
| 39 |
+
LOCK_EX = fcntl.LOCK_EX
|
| 40 |
+
#: shared lock
|
| 41 |
+
LOCK_SH = fcntl.LOCK_SH
|
| 42 |
+
#: non-blocking
|
| 43 |
+
LOCK_NB = fcntl.LOCK_NB
|
| 44 |
+
#: unlock
|
| 45 |
+
LOCK_UN = fcntl.LOCK_UN
|
| 46 |
+
|
| 47 |
+
else: # pragma: no cover
|
| 48 |
+
raise RuntimeError('PortaLocker only defined for nt and posix platforms')
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class LockFlags(enum.IntFlag):
|
| 52 |
+
#: exclusive lock
|
| 53 |
+
EXCLUSIVE = LOCK_EX
|
| 54 |
+
#: shared lock
|
| 55 |
+
SHARED = LOCK_SH
|
| 56 |
+
#: non-blocking
|
| 57 |
+
NON_BLOCKING = LOCK_NB
|
| 58 |
+
#: unlock
|
| 59 |
+
UNBLOCK = LOCK_UN
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/exceptions.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import typing
|
| 2 |
+
|
| 3 |
+
from portalocker import types
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class BaseLockException(Exception): # noqa: N818
|
| 7 |
+
# Error codes:
|
| 8 |
+
LOCK_FAILED = 1
|
| 9 |
+
|
| 10 |
+
def __init__(
|
| 11 |
+
self,
|
| 12 |
+
*args: typing.Any,
|
| 13 |
+
fh: typing.Union[types.IO, None, int] = None,
|
| 14 |
+
**kwargs: typing.Any,
|
| 15 |
+
) -> None:
|
| 16 |
+
self.fh = fh
|
| 17 |
+
Exception.__init__(self, *args)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class LockException(BaseLockException):
|
| 21 |
+
pass
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class AlreadyLocked(LockException):
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class FileToLarge(LockException):
|
| 29 |
+
pass
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/portalocker.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import typing
|
| 5 |
+
|
| 6 |
+
from . import constants, exceptions, types
|
| 7 |
+
|
| 8 |
+
# Alias for readability. Due to import recursion issues we cannot do:
|
| 9 |
+
# from .constants import LockFlags
|
| 10 |
+
LockFlags = constants.LockFlags
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class HasFileno(typing.Protocol):
|
| 14 |
+
def fileno(self) -> int: ...
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
LOCKER: typing.Callable[[int | HasFileno, int], typing.Any] | None = None
|
| 18 |
+
|
| 19 |
+
if os.name == 'nt': # pragma: no cover
|
| 20 |
+
import msvcrt
|
| 21 |
+
|
| 22 |
+
import pywintypes
|
| 23 |
+
import win32con
|
| 24 |
+
import win32file
|
| 25 |
+
import winerror
|
| 26 |
+
|
| 27 |
+
__overlapped = pywintypes.OVERLAPPED()
|
| 28 |
+
|
| 29 |
+
def lock(file_: types.IO | int, flags: LockFlags) -> None:
|
| 30 |
+
# Windows locking does not support locking through `fh.fileno()` so
|
| 31 |
+
# we cast it to make mypy and pyright happy
|
| 32 |
+
file_ = typing.cast(types.IO, file_)
|
| 33 |
+
|
| 34 |
+
mode = 0
|
| 35 |
+
if flags & LockFlags.NON_BLOCKING:
|
| 36 |
+
mode |= win32con.LOCKFILE_FAIL_IMMEDIATELY
|
| 37 |
+
|
| 38 |
+
if flags & LockFlags.EXCLUSIVE:
|
| 39 |
+
mode |= win32con.LOCKFILE_EXCLUSIVE_LOCK
|
| 40 |
+
|
| 41 |
+
# Save the old position so we can go back to that position but
|
| 42 |
+
# still lock from the beginning of the file
|
| 43 |
+
savepos = file_.tell()
|
| 44 |
+
if savepos:
|
| 45 |
+
file_.seek(0)
|
| 46 |
+
|
| 47 |
+
os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore[attr-defined]
|
| 48 |
+
try:
|
| 49 |
+
win32file.LockFileEx(os_fh, mode, 0, -0x10000, __overlapped)
|
| 50 |
+
except pywintypes.error as exc_value:
|
| 51 |
+
# error: (33, 'LockFileEx', 'The process cannot access the file
|
| 52 |
+
# because another process has locked a portion of the file.')
|
| 53 |
+
if exc_value.winerror == winerror.ERROR_LOCK_VIOLATION:
|
| 54 |
+
raise exceptions.AlreadyLocked(
|
| 55 |
+
exceptions.LockException.LOCK_FAILED,
|
| 56 |
+
exc_value.strerror,
|
| 57 |
+
fh=file_,
|
| 58 |
+
) from exc_value
|
| 59 |
+
else:
|
| 60 |
+
# Q: Are there exceptions/codes we should be dealing with
|
| 61 |
+
# here?
|
| 62 |
+
raise
|
| 63 |
+
finally:
|
| 64 |
+
if savepos:
|
| 65 |
+
file_.seek(savepos)
|
| 66 |
+
|
| 67 |
+
def unlock(file_: types.IO) -> None:
|
| 68 |
+
try:
|
| 69 |
+
savepos = file_.tell()
|
| 70 |
+
if savepos:
|
| 71 |
+
file_.seek(0)
|
| 72 |
+
|
| 73 |
+
os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore[attr-defined]
|
| 74 |
+
try:
|
| 75 |
+
win32file.UnlockFileEx(
|
| 76 |
+
os_fh,
|
| 77 |
+
0,
|
| 78 |
+
-0x10000,
|
| 79 |
+
__overlapped,
|
| 80 |
+
)
|
| 81 |
+
except pywintypes.error as exc:
|
| 82 |
+
if exc.winerror != winerror.ERROR_NOT_LOCKED:
|
| 83 |
+
# Q: Are there exceptions/codes we should be
|
| 84 |
+
# dealing with here?
|
| 85 |
+
raise
|
| 86 |
+
finally:
|
| 87 |
+
if savepos:
|
| 88 |
+
file_.seek(savepos)
|
| 89 |
+
except OSError as exc:
|
| 90 |
+
raise exceptions.LockException(
|
| 91 |
+
exceptions.LockException.LOCK_FAILED,
|
| 92 |
+
exc.strerror,
|
| 93 |
+
fh=file_,
|
| 94 |
+
) from exc
|
| 95 |
+
|
| 96 |
+
elif os.name == 'posix': # pragma: no cover
|
| 97 |
+
import errno
|
| 98 |
+
import fcntl
|
| 99 |
+
|
| 100 |
+
# The locking implementation.
|
| 101 |
+
# Expected values are either fcntl.flock() or fcntl.lockf(),
|
| 102 |
+
# but any callable that matches the syntax will be accepted.
|
| 103 |
+
LOCKER = fcntl.flock # pyright: ignore[reportConstantRedefinition]
|
| 104 |
+
|
| 105 |
+
def lock(file: int | types.IO, flags: LockFlags) -> None: # type: ignore[misc]
|
| 106 |
+
assert LOCKER is not None, 'We need a locking function in `LOCKER` '
|
| 107 |
+
# Locking with NON_BLOCKING without EXCLUSIVE or SHARED enabled
|
| 108 |
+
# results in an error
|
| 109 |
+
if (flags & LockFlags.NON_BLOCKING) and not flags & (
|
| 110 |
+
LockFlags.SHARED | LockFlags.EXCLUSIVE
|
| 111 |
+
):
|
| 112 |
+
raise RuntimeError(
|
| 113 |
+
'When locking in non-blocking mode the SHARED '
|
| 114 |
+
'or EXCLUSIVE flag must be specified as well',
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
try:
|
| 118 |
+
LOCKER(file, flags)
|
| 119 |
+
except OSError as exc_value:
|
| 120 |
+
# Python can use one of several different exception classes to
|
| 121 |
+
# represent timeout (most likely is BlockingIOError and IOError),
|
| 122 |
+
# but these errors may also represent other failures. On some
|
| 123 |
+
# systems, `IOError is OSError` which means checking for either
|
| 124 |
+
# IOError or OSError can mask other errors.
|
| 125 |
+
# The safest check is to catch OSError (from which the others
|
| 126 |
+
# inherit) and check the errno (which should be EACCESS or EAGAIN
|
| 127 |
+
# according to the spec).
|
| 128 |
+
if exc_value.errno in (errno.EACCES, errno.EAGAIN):
|
| 129 |
+
# A timeout exception, wrap this so the outer code knows to try
|
| 130 |
+
# again (if it wants to).
|
| 131 |
+
raise exceptions.AlreadyLocked(
|
| 132 |
+
exc_value,
|
| 133 |
+
fh=file,
|
| 134 |
+
) from exc_value
|
| 135 |
+
else:
|
| 136 |
+
# Something else went wrong; don't wrap this so we stop
|
| 137 |
+
# immediately.
|
| 138 |
+
raise exceptions.LockException(
|
| 139 |
+
exc_value,
|
| 140 |
+
fh=file,
|
| 141 |
+
) from exc_value
|
| 142 |
+
except EOFError as exc_value:
|
| 143 |
+
# On NFS filesystems, flock can raise an EOFError
|
| 144 |
+
raise exceptions.LockException(
|
| 145 |
+
exc_value,
|
| 146 |
+
fh=file,
|
| 147 |
+
) from exc_value
|
| 148 |
+
|
| 149 |
+
def unlock(file: types.IO) -> None: # type: ignore[misc]
|
| 150 |
+
assert LOCKER is not None, 'We need a locking function in `LOCKER` '
|
| 151 |
+
LOCKER(file.fileno(), LockFlags.UNBLOCK)
|
| 152 |
+
|
| 153 |
+
else: # pragma: no cover
|
| 154 |
+
raise RuntimeError('PortaLocker only defined for nt and posix platforms')
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/py.typed
ADDED
|
File without changes
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/types.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import pathlib
|
| 4 |
+
import typing
|
| 5 |
+
from typing import Union
|
| 6 |
+
|
| 7 |
+
# fmt: off
|
| 8 |
+
Mode = typing.Literal[
|
| 9 |
+
# Text modes
|
| 10 |
+
# Read text
|
| 11 |
+
'r', 'rt', 'tr',
|
| 12 |
+
# Write text
|
| 13 |
+
'w', 'wt', 'tw',
|
| 14 |
+
# Append text
|
| 15 |
+
'a', 'at', 'ta',
|
| 16 |
+
# Exclusive creation text
|
| 17 |
+
'x', 'xt', 'tx',
|
| 18 |
+
# Read and write text
|
| 19 |
+
'r+', '+r', 'rt+', 'r+t', '+rt', 'tr+', 't+r', '+tr',
|
| 20 |
+
# Write and read text
|
| 21 |
+
'w+', '+w', 'wt+', 'w+t', '+wt', 'tw+', 't+w', '+tw',
|
| 22 |
+
# Append and read text
|
| 23 |
+
'a+', '+a', 'at+', 'a+t', '+at', 'ta+', 't+a', '+ta',
|
| 24 |
+
# Exclusive creation and read text
|
| 25 |
+
'x+', '+x', 'xt+', 'x+t', '+xt', 'tx+', 't+x', '+tx',
|
| 26 |
+
# Universal newline support
|
| 27 |
+
'U', 'rU', 'Ur', 'rtU', 'rUt', 'Urt', 'trU', 'tUr', 'Utr',
|
| 28 |
+
|
| 29 |
+
# Binary modes
|
| 30 |
+
# Read binary
|
| 31 |
+
'rb', 'br',
|
| 32 |
+
# Write binary
|
| 33 |
+
'wb', 'bw',
|
| 34 |
+
# Append binary
|
| 35 |
+
'ab', 'ba',
|
| 36 |
+
# Exclusive creation binary
|
| 37 |
+
'xb', 'bx',
|
| 38 |
+
# Read and write binary
|
| 39 |
+
'rb+', 'r+b', '+rb', 'br+', 'b+r', '+br',
|
| 40 |
+
# Write and read binary
|
| 41 |
+
'wb+', 'w+b', '+wb', 'bw+', 'b+w', '+bw',
|
| 42 |
+
# Append and read binary
|
| 43 |
+
'ab+', 'a+b', '+ab', 'ba+', 'b+a', '+ba',
|
| 44 |
+
# Exclusive creation and read binary
|
| 45 |
+
'xb+', 'x+b', '+xb', 'bx+', 'b+x', '+bx',
|
| 46 |
+
# Universal newline support in binary mode
|
| 47 |
+
'rbU', 'rUb', 'Urb', 'brU', 'bUr', 'Ubr',
|
| 48 |
+
]
|
| 49 |
+
Filename = Union[str, pathlib.Path]
|
| 50 |
+
IO: typing.TypeAlias = Union[ # type: ignore[name-defined]
|
| 51 |
+
typing.IO[str],
|
| 52 |
+
typing.IO[bytes],
|
| 53 |
+
]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class FileOpenKwargs(typing.TypedDict):
|
| 57 |
+
buffering: int | None
|
| 58 |
+
encoding: str | None
|
| 59 |
+
errors: str | None
|
| 60 |
+
newline: str | None
|
| 61 |
+
closefd: bool | None
|
| 62 |
+
opener: typing.Callable[[str, int], int] | None
|
evalkit_cambrian/lib/python3.10/site-packages/portalocker/utils.py
ADDED
|
@@ -0,0 +1,587 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import abc
|
| 4 |
+
import atexit
|
| 5 |
+
import contextlib
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
import pathlib
|
| 9 |
+
import random
|
| 10 |
+
import tempfile
|
| 11 |
+
import time
|
| 12 |
+
import typing
|
| 13 |
+
import warnings
|
| 14 |
+
|
| 15 |
+
from . import constants, exceptions, portalocker, types
|
| 16 |
+
from .types import Filename, Mode
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
DEFAULT_TIMEOUT = 5
|
| 21 |
+
DEFAULT_CHECK_INTERVAL = 0.25
|
| 22 |
+
DEFAULT_FAIL_WHEN_LOCKED = False
|
| 23 |
+
LOCK_METHOD = constants.LockFlags.EXCLUSIVE | constants.LockFlags.NON_BLOCKING
|
| 24 |
+
|
| 25 |
+
__all__ = [
|
| 26 |
+
'Lock',
|
| 27 |
+
'open_atomic',
|
| 28 |
+
]
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def coalesce(*args: typing.Any, test_value: typing.Any = None) -> typing.Any:
|
| 32 |
+
"""Simple coalescing function that returns the first value that is not
|
| 33 |
+
equal to the `test_value`. Or `None` if no value is valid. Usually this
|
| 34 |
+
means that the last given value is the default value.
|
| 35 |
+
|
| 36 |
+
Note that the `test_value` is compared using an identity check
|
| 37 |
+
(i.e. `value is not test_value`) so changing the `test_value` won't work
|
| 38 |
+
for all values.
|
| 39 |
+
|
| 40 |
+
>>> coalesce(None, 1)
|
| 41 |
+
1
|
| 42 |
+
>>> coalesce()
|
| 43 |
+
|
| 44 |
+
>>> coalesce(0, False, True)
|
| 45 |
+
0
|
| 46 |
+
>>> coalesce(0, False, True, test_value=0)
|
| 47 |
+
False
|
| 48 |
+
|
| 49 |
+
# This won't work because of the `is not test_value` type testing:
|
| 50 |
+
>>> coalesce([], dict(spam='eggs'), test_value=[])
|
| 51 |
+
[]
|
| 52 |
+
"""
|
| 53 |
+
return next((arg for arg in args if arg is not test_value), None)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@contextlib.contextmanager
|
| 57 |
+
def open_atomic(
|
| 58 |
+
filename: Filename,
|
| 59 |
+
binary: bool = True,
|
| 60 |
+
) -> typing.Iterator[types.IO]:
|
| 61 |
+
"""Open a file for atomic writing. Instead of locking this method allows
|
| 62 |
+
you to write the entire file and move it to the actual location. Note that
|
| 63 |
+
this makes the assumption that a rename is atomic on your platform which
|
| 64 |
+
is generally the case but not a guarantee.
|
| 65 |
+
|
| 66 |
+
http://docs.python.org/library/os.html#os.rename
|
| 67 |
+
|
| 68 |
+
>>> filename = 'test_file.txt'
|
| 69 |
+
>>> if os.path.exists(filename):
|
| 70 |
+
... os.remove(filename)
|
| 71 |
+
|
| 72 |
+
>>> with open_atomic(filename) as fh:
|
| 73 |
+
... written = fh.write(b'test')
|
| 74 |
+
>>> assert os.path.exists(filename)
|
| 75 |
+
>>> os.remove(filename)
|
| 76 |
+
|
| 77 |
+
>>> import pathlib
|
| 78 |
+
>>> path_filename = pathlib.Path('test_file.txt')
|
| 79 |
+
|
| 80 |
+
>>> with open_atomic(path_filename) as fh:
|
| 81 |
+
... written = fh.write(b'test')
|
| 82 |
+
>>> assert path_filename.exists()
|
| 83 |
+
>>> path_filename.unlink()
|
| 84 |
+
"""
|
| 85 |
+
# `pathlib.Path` cast in case `path` is a `str`
|
| 86 |
+
path: pathlib.Path
|
| 87 |
+
if isinstance(filename, pathlib.Path):
|
| 88 |
+
path = filename
|
| 89 |
+
else:
|
| 90 |
+
path = pathlib.Path(filename)
|
| 91 |
+
|
| 92 |
+
assert not path.exists(), f'{path!r} exists'
|
| 93 |
+
|
| 94 |
+
# Create the parent directory if it doesn't exist
|
| 95 |
+
path.parent.mkdir(parents=True, exist_ok=True)
|
| 96 |
+
|
| 97 |
+
with tempfile.NamedTemporaryFile(
|
| 98 |
+
mode=(binary and 'wb') or 'w',
|
| 99 |
+
dir=str(path.parent),
|
| 100 |
+
delete=False,
|
| 101 |
+
) as temp_fh:
|
| 102 |
+
yield temp_fh
|
| 103 |
+
temp_fh.flush()
|
| 104 |
+
os.fsync(temp_fh.fileno())
|
| 105 |
+
|
| 106 |
+
try:
|
| 107 |
+
os.rename(temp_fh.name, path)
|
| 108 |
+
finally:
|
| 109 |
+
with contextlib.suppress(Exception):
|
| 110 |
+
os.remove(temp_fh.name)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class LockBase(abc.ABC): # pragma: no cover
|
| 114 |
+
#: timeout when trying to acquire a lock
|
| 115 |
+
timeout: float
|
| 116 |
+
#: check interval while waiting for `timeout`
|
| 117 |
+
check_interval: float
|
| 118 |
+
#: skip the timeout and immediately fail if the initial lock fails
|
| 119 |
+
fail_when_locked: bool
|
| 120 |
+
|
| 121 |
+
def __init__(
|
| 122 |
+
self,
|
| 123 |
+
timeout: float | None = None,
|
| 124 |
+
check_interval: float | None = None,
|
| 125 |
+
fail_when_locked: bool | None = None,
|
| 126 |
+
) -> None:
|
| 127 |
+
self.timeout = coalesce(timeout, DEFAULT_TIMEOUT)
|
| 128 |
+
self.check_interval = coalesce(check_interval, DEFAULT_CHECK_INTERVAL)
|
| 129 |
+
self.fail_when_locked = coalesce(
|
| 130 |
+
fail_when_locked,
|
| 131 |
+
DEFAULT_FAIL_WHEN_LOCKED,
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
@abc.abstractmethod
|
| 135 |
+
def acquire(
|
| 136 |
+
self,
|
| 137 |
+
timeout: float | None = None,
|
| 138 |
+
check_interval: float | None = None,
|
| 139 |
+
fail_when_locked: bool | None = None,
|
| 140 |
+
) -> typing.IO[typing.AnyStr]: ...
|
| 141 |
+
|
| 142 |
+
def _timeout_generator(
|
| 143 |
+
self,
|
| 144 |
+
timeout: float | None,
|
| 145 |
+
check_interval: float | None,
|
| 146 |
+
) -> typing.Iterator[int]:
|
| 147 |
+
f_timeout = coalesce(timeout, self.timeout, 0.0)
|
| 148 |
+
f_check_interval = coalesce(check_interval, self.check_interval, 0.0)
|
| 149 |
+
|
| 150 |
+
yield 0
|
| 151 |
+
i = 0
|
| 152 |
+
|
| 153 |
+
start_time = time.perf_counter()
|
| 154 |
+
while start_time + f_timeout > time.perf_counter():
|
| 155 |
+
i += 1
|
| 156 |
+
yield i
|
| 157 |
+
|
| 158 |
+
# Take low lock checks into account to stay within the interval
|
| 159 |
+
since_start_time = time.perf_counter() - start_time
|
| 160 |
+
time.sleep(max(0.001, (i * f_check_interval) - since_start_time))
|
| 161 |
+
|
| 162 |
+
@abc.abstractmethod
|
| 163 |
+
def release(self) -> None: ...
|
| 164 |
+
|
| 165 |
+
def __enter__(self) -> typing.IO[typing.AnyStr]:
|
| 166 |
+
return self.acquire()
|
| 167 |
+
|
| 168 |
+
def __exit__(
|
| 169 |
+
self,
|
| 170 |
+
exc_type: type[BaseException] | None,
|
| 171 |
+
exc_value: BaseException | None,
|
| 172 |
+
traceback: typing.Any, # Should be typing.TracebackType
|
| 173 |
+
) -> bool | None:
|
| 174 |
+
self.release()
|
| 175 |
+
return None
|
| 176 |
+
|
| 177 |
+
def __delete__(self, instance: LockBase) -> None:
|
| 178 |
+
instance.release()
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
class Lock(LockBase):
|
| 182 |
+
"""Lock manager with built-in timeout
|
| 183 |
+
|
| 184 |
+
Args:
|
| 185 |
+
filename: filename
|
| 186 |
+
mode: the open mode, 'a' or 'ab' should be used for writing. When mode
|
| 187 |
+
contains `w` the file will be truncated to 0 bytes.
|
| 188 |
+
timeout: timeout when trying to acquire a lock
|
| 189 |
+
check_interval: check interval while waiting
|
| 190 |
+
fail_when_locked: after the initial lock failed, return an error
|
| 191 |
+
or lock the file. This does not wait for the timeout.
|
| 192 |
+
**file_open_kwargs: The kwargs for the `open(...)` call
|
| 193 |
+
|
| 194 |
+
fail_when_locked is useful when multiple threads/processes can race
|
| 195 |
+
when creating a file. If set to true than the system will wait till
|
| 196 |
+
the lock was acquired and then return an AlreadyLocked exception.
|
| 197 |
+
|
| 198 |
+
Note that the file is opened first and locked later. So using 'w' as
|
| 199 |
+
mode will result in truncate _BEFORE_ the lock is checked.
|
| 200 |
+
"""
|
| 201 |
+
|
| 202 |
+
fh: types.IO | None
|
| 203 |
+
filename: str
|
| 204 |
+
mode: str
|
| 205 |
+
truncate: bool
|
| 206 |
+
timeout: float
|
| 207 |
+
check_interval: float
|
| 208 |
+
fail_when_locked: bool
|
| 209 |
+
flags: constants.LockFlags
|
| 210 |
+
file_open_kwargs: dict[str, typing.Any]
|
| 211 |
+
|
| 212 |
+
def __init__(
|
| 213 |
+
self,
|
| 214 |
+
filename: Filename,
|
| 215 |
+
mode: Mode = 'a',
|
| 216 |
+
timeout: float | None = None,
|
| 217 |
+
check_interval: float = DEFAULT_CHECK_INTERVAL,
|
| 218 |
+
fail_when_locked: bool = DEFAULT_FAIL_WHEN_LOCKED,
|
| 219 |
+
flags: constants.LockFlags = LOCK_METHOD,
|
| 220 |
+
**file_open_kwargs: typing.Any,
|
| 221 |
+
) -> None:
|
| 222 |
+
if 'w' in mode:
|
| 223 |
+
truncate = True
|
| 224 |
+
mode = typing.cast(Mode, mode.replace('w', 'a'))
|
| 225 |
+
else:
|
| 226 |
+
truncate = False
|
| 227 |
+
|
| 228 |
+
if timeout is None:
|
| 229 |
+
timeout = DEFAULT_TIMEOUT
|
| 230 |
+
elif not (flags & constants.LockFlags.NON_BLOCKING):
|
| 231 |
+
warnings.warn(
|
| 232 |
+
'timeout has no effect in blocking mode',
|
| 233 |
+
stacklevel=1,
|
| 234 |
+
)
|
| 235 |
+
|
| 236 |
+
self.fh = None
|
| 237 |
+
self.filename = str(filename)
|
| 238 |
+
self.mode = mode
|
| 239 |
+
self.truncate = truncate
|
| 240 |
+
self.flags = flags
|
| 241 |
+
self.file_open_kwargs = file_open_kwargs
|
| 242 |
+
super().__init__(timeout, check_interval, fail_when_locked)
|
| 243 |
+
|
| 244 |
+
def acquire(
|
| 245 |
+
self,
|
| 246 |
+
timeout: float | None = None,
|
| 247 |
+
check_interval: float | None = None,
|
| 248 |
+
fail_when_locked: bool | None = None,
|
| 249 |
+
) -> typing.IO[typing.AnyStr]:
|
| 250 |
+
"""Acquire the locked filehandle"""
|
| 251 |
+
|
| 252 |
+
fail_when_locked = coalesce(fail_when_locked, self.fail_when_locked)
|
| 253 |
+
|
| 254 |
+
if (
|
| 255 |
+
not (self.flags & constants.LockFlags.NON_BLOCKING)
|
| 256 |
+
and timeout is not None
|
| 257 |
+
):
|
| 258 |
+
warnings.warn(
|
| 259 |
+
'timeout has no effect in blocking mode',
|
| 260 |
+
stacklevel=1,
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
# If we already have a filehandle, return it
|
| 264 |
+
fh = self.fh
|
| 265 |
+
if fh:
|
| 266 |
+
# Due to type invariance we need to cast the type
|
| 267 |
+
return typing.cast(typing.IO[typing.AnyStr], fh)
|
| 268 |
+
|
| 269 |
+
# Get a new filehandler
|
| 270 |
+
fh = self._get_fh()
|
| 271 |
+
|
| 272 |
+
def try_close() -> None: # pragma: no cover
|
| 273 |
+
# Silently try to close the handle if possible, ignore all issues
|
| 274 |
+
if fh is not None:
|
| 275 |
+
with contextlib.suppress(Exception):
|
| 276 |
+
fh.close()
|
| 277 |
+
|
| 278 |
+
exception = None
|
| 279 |
+
# Try till the timeout has passed
|
| 280 |
+
for _ in self._timeout_generator(timeout, check_interval):
|
| 281 |
+
exception = None
|
| 282 |
+
try:
|
| 283 |
+
# Try to lock
|
| 284 |
+
fh = self._get_lock(fh)
|
| 285 |
+
break
|
| 286 |
+
except exceptions.LockException as exc:
|
| 287 |
+
# Python will automatically remove the variable from memory
|
| 288 |
+
# unless you save it in a different location
|
| 289 |
+
exception = exc
|
| 290 |
+
|
| 291 |
+
# We already tried to the get the lock
|
| 292 |
+
# If fail_when_locked is True, stop trying
|
| 293 |
+
if fail_when_locked:
|
| 294 |
+
try_close()
|
| 295 |
+
raise exceptions.AlreadyLocked(exception) from exc
|
| 296 |
+
except Exception as exc:
|
| 297 |
+
# Something went wrong with the locking mechanism.
|
| 298 |
+
# Wrap in a LockException and re-raise:
|
| 299 |
+
try_close()
|
| 300 |
+
raise exceptions.LockException(exc) from exc
|
| 301 |
+
|
| 302 |
+
# Wait a bit
|
| 303 |
+
|
| 304 |
+
if exception:
|
| 305 |
+
try_close()
|
| 306 |
+
# We got a timeout... reraising
|
| 307 |
+
raise exception
|
| 308 |
+
|
| 309 |
+
# Prepare the filehandle (truncate if needed)
|
| 310 |
+
fh = self._prepare_fh(fh)
|
| 311 |
+
|
| 312 |
+
self.fh = fh
|
| 313 |
+
return typing.cast(typing.IO[typing.AnyStr], fh)
|
| 314 |
+
|
| 315 |
+
def __enter__(self) -> typing.IO[typing.AnyStr]:
|
| 316 |
+
return self.acquire()
|
| 317 |
+
|
| 318 |
+
def release(self) -> None:
|
| 319 |
+
"""Releases the currently locked file handle"""
|
| 320 |
+
if self.fh:
|
| 321 |
+
portalocker.unlock(self.fh)
|
| 322 |
+
self.fh.close()
|
| 323 |
+
self.fh = None
|
| 324 |
+
|
| 325 |
+
def _get_fh(self) -> types.IO:
|
| 326 |
+
"""Get a new filehandle"""
|
| 327 |
+
return typing.cast(
|
| 328 |
+
types.IO,
|
| 329 |
+
open( # noqa: SIM115
|
| 330 |
+
self.filename,
|
| 331 |
+
self.mode,
|
| 332 |
+
**self.file_open_kwargs,
|
| 333 |
+
),
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
def _get_lock(self, fh: types.IO) -> types.IO:
|
| 337 |
+
"""
|
| 338 |
+
Try to lock the given filehandle
|
| 339 |
+
|
| 340 |
+
returns LockException if it fails"""
|
| 341 |
+
portalocker.lock(fh, self.flags)
|
| 342 |
+
return fh
|
| 343 |
+
|
| 344 |
+
def _prepare_fh(self, fh: types.IO) -> types.IO:
|
| 345 |
+
"""
|
| 346 |
+
Prepare the filehandle for usage
|
| 347 |
+
|
| 348 |
+
If truncate is a number, the file will be truncated to that amount of
|
| 349 |
+
bytes
|
| 350 |
+
"""
|
| 351 |
+
if self.truncate:
|
| 352 |
+
fh.seek(0)
|
| 353 |
+
fh.truncate(0)
|
| 354 |
+
|
| 355 |
+
return fh
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
class RLock(Lock):
|
| 359 |
+
"""
|
| 360 |
+
A reentrant lock, functions in a similar way to threading.RLock in that it
|
| 361 |
+
can be acquired multiple times. When the corresponding number of release()
|
| 362 |
+
calls are made the lock will finally release the underlying file lock.
|
| 363 |
+
"""
|
| 364 |
+
|
| 365 |
+
def __init__(
|
| 366 |
+
self,
|
| 367 |
+
filename: Filename,
|
| 368 |
+
mode: Mode = 'a',
|
| 369 |
+
timeout: float = DEFAULT_TIMEOUT,
|
| 370 |
+
check_interval: float = DEFAULT_CHECK_INTERVAL,
|
| 371 |
+
fail_when_locked: bool = False,
|
| 372 |
+
flags: constants.LockFlags = LOCK_METHOD,
|
| 373 |
+
) -> None:
|
| 374 |
+
super().__init__(
|
| 375 |
+
filename,
|
| 376 |
+
mode,
|
| 377 |
+
timeout,
|
| 378 |
+
check_interval,
|
| 379 |
+
fail_when_locked,
|
| 380 |
+
flags,
|
| 381 |
+
)
|
| 382 |
+
self._acquire_count = 0
|
| 383 |
+
|
| 384 |
+
def acquire(
|
| 385 |
+
self,
|
| 386 |
+
timeout: float | None = None,
|
| 387 |
+
check_interval: float | None = None,
|
| 388 |
+
fail_when_locked: bool | None = None,
|
| 389 |
+
) -> typing.IO[typing.AnyStr]:
|
| 390 |
+
fh: typing.IO[typing.AnyStr]
|
| 391 |
+
if self._acquire_count >= 1:
|
| 392 |
+
fh = typing.cast(typing.IO[typing.AnyStr], self.fh)
|
| 393 |
+
else:
|
| 394 |
+
fh = super().acquire(timeout, check_interval, fail_when_locked)
|
| 395 |
+
self._acquire_count += 1
|
| 396 |
+
assert fh is not None
|
| 397 |
+
return fh
|
| 398 |
+
|
| 399 |
+
def release(self) -> None:
|
| 400 |
+
if self._acquire_count == 0:
|
| 401 |
+
raise exceptions.LockException(
|
| 402 |
+
'Cannot release more times than acquired',
|
| 403 |
+
)
|
| 404 |
+
|
| 405 |
+
if self._acquire_count == 1:
|
| 406 |
+
super().release()
|
| 407 |
+
self._acquire_count -= 1
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
class TemporaryFileLock(Lock):
|
| 411 |
+
def __init__(
|
| 412 |
+
self,
|
| 413 |
+
filename: str = '.lock',
|
| 414 |
+
timeout: float = DEFAULT_TIMEOUT,
|
| 415 |
+
check_interval: float = DEFAULT_CHECK_INTERVAL,
|
| 416 |
+
fail_when_locked: bool = True,
|
| 417 |
+
flags: constants.LockFlags = LOCK_METHOD,
|
| 418 |
+
) -> None:
|
| 419 |
+
Lock.__init__(
|
| 420 |
+
self,
|
| 421 |
+
filename=filename,
|
| 422 |
+
mode='w',
|
| 423 |
+
timeout=timeout,
|
| 424 |
+
check_interval=check_interval,
|
| 425 |
+
fail_when_locked=fail_when_locked,
|
| 426 |
+
flags=flags,
|
| 427 |
+
)
|
| 428 |
+
atexit.register(self.release)
|
| 429 |
+
|
| 430 |
+
def release(self) -> None:
|
| 431 |
+
Lock.release(self)
|
| 432 |
+
if os.path.isfile(self.filename): # pragma: no branch
|
| 433 |
+
os.unlink(self.filename)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
class BoundedSemaphore(LockBase):
|
| 437 |
+
"""
|
| 438 |
+
Bounded semaphore to prevent too many parallel processes from running
|
| 439 |
+
|
| 440 |
+
This method is deprecated because multiple processes that are completely
|
| 441 |
+
unrelated could end up using the same semaphore. To prevent this,
|
| 442 |
+
use `NamedBoundedSemaphore` instead. The
|
| 443 |
+
`NamedBoundedSemaphore` is a drop-in replacement for this class.
|
| 444 |
+
|
| 445 |
+
>>> semaphore = BoundedSemaphore(2, directory='')
|
| 446 |
+
>>> str(semaphore.get_filenames()[0])
|
| 447 |
+
'bounded_semaphore.00.lock'
|
| 448 |
+
>>> str(sorted(semaphore.get_random_filenames())[1])
|
| 449 |
+
'bounded_semaphore.01.lock'
|
| 450 |
+
"""
|
| 451 |
+
|
| 452 |
+
lock: Lock | None
|
| 453 |
+
|
| 454 |
+
def __init__(
|
| 455 |
+
self,
|
| 456 |
+
maximum: int,
|
| 457 |
+
name: str = 'bounded_semaphore',
|
| 458 |
+
filename_pattern: str = '{name}.{number:02d}.lock',
|
| 459 |
+
directory: str = tempfile.gettempdir(),
|
| 460 |
+
timeout: float | None = DEFAULT_TIMEOUT,
|
| 461 |
+
check_interval: float | None = DEFAULT_CHECK_INTERVAL,
|
| 462 |
+
fail_when_locked: bool | None = True,
|
| 463 |
+
) -> None:
|
| 464 |
+
self.maximum = maximum
|
| 465 |
+
self.name = name
|
| 466 |
+
self.filename_pattern = filename_pattern
|
| 467 |
+
self.directory = directory
|
| 468 |
+
self.lock: Lock | None = None
|
| 469 |
+
super().__init__(
|
| 470 |
+
timeout=timeout,
|
| 471 |
+
check_interval=check_interval,
|
| 472 |
+
fail_when_locked=fail_when_locked,
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
if not name or name == 'bounded_semaphore':
|
| 476 |
+
warnings.warn(
|
| 477 |
+
'`BoundedSemaphore` without an explicit `name` '
|
| 478 |
+
'argument is deprecated, use NamedBoundedSemaphore',
|
| 479 |
+
DeprecationWarning,
|
| 480 |
+
stacklevel=1,
|
| 481 |
+
)
|
| 482 |
+
|
| 483 |
+
def get_filenames(self) -> typing.Sequence[pathlib.Path]:
|
| 484 |
+
return [self.get_filename(n) for n in range(self.maximum)]
|
| 485 |
+
|
| 486 |
+
def get_random_filenames(self) -> typing.Sequence[pathlib.Path]:
|
| 487 |
+
filenames = list(self.get_filenames())
|
| 488 |
+
random.shuffle(filenames)
|
| 489 |
+
return filenames
|
| 490 |
+
|
| 491 |
+
def get_filename(self, number: int) -> pathlib.Path:
|
| 492 |
+
return pathlib.Path(self.directory) / self.filename_pattern.format(
|
| 493 |
+
name=self.name,
|
| 494 |
+
number=number,
|
| 495 |
+
)
|
| 496 |
+
|
| 497 |
+
def acquire( # type: ignore[override]
|
| 498 |
+
self,
|
| 499 |
+
timeout: float | None = None,
|
| 500 |
+
check_interval: float | None = None,
|
| 501 |
+
fail_when_locked: bool | None = None,
|
| 502 |
+
) -> Lock | None:
|
| 503 |
+
assert not self.lock, 'Already locked'
|
| 504 |
+
|
| 505 |
+
filenames = self.get_filenames()
|
| 506 |
+
|
| 507 |
+
for n in self._timeout_generator(timeout, check_interval): # pragma:
|
| 508 |
+
logger.debug('trying lock (attempt %d) %r', n, filenames)
|
| 509 |
+
# no branch
|
| 510 |
+
if self.try_lock(filenames): # pragma: no branch
|
| 511 |
+
return self.lock # pragma: no cover
|
| 512 |
+
|
| 513 |
+
if fail_when_locked := coalesce(
|
| 514 |
+
fail_when_locked,
|
| 515 |
+
self.fail_when_locked,
|
| 516 |
+
):
|
| 517 |
+
raise exceptions.AlreadyLocked()
|
| 518 |
+
|
| 519 |
+
return None
|
| 520 |
+
|
| 521 |
+
def try_lock(self, filenames: typing.Sequence[Filename]) -> bool:
|
| 522 |
+
filename: Filename
|
| 523 |
+
for filename in filenames:
|
| 524 |
+
logger.debug('trying lock for %r', filename)
|
| 525 |
+
self.lock = Lock(filename, fail_when_locked=True)
|
| 526 |
+
try:
|
| 527 |
+
self.lock.acquire()
|
| 528 |
+
except exceptions.AlreadyLocked:
|
| 529 |
+
self.lock = None
|
| 530 |
+
else:
|
| 531 |
+
logger.debug('locked %r', filename)
|
| 532 |
+
return True
|
| 533 |
+
|
| 534 |
+
return False
|
| 535 |
+
|
| 536 |
+
def release(self) -> None: # pragma: no cover
|
| 537 |
+
if self.lock is not None:
|
| 538 |
+
self.lock.release()
|
| 539 |
+
self.lock = None
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
class NamedBoundedSemaphore(BoundedSemaphore):
|
| 543 |
+
"""
|
| 544 |
+
Bounded semaphore to prevent too many parallel processes from running
|
| 545 |
+
|
| 546 |
+
It's also possible to specify a timeout when acquiring the lock to wait
|
| 547 |
+
for a resource to become available. This is very similar to
|
| 548 |
+
`threading.BoundedSemaphore` but works across multiple processes and across
|
| 549 |
+
multiple operating systems.
|
| 550 |
+
|
| 551 |
+
Because this works across multiple processes it's important to give the
|
| 552 |
+
semaphore a name. This name is used to create the lock files. If you
|
| 553 |
+
don't specify a name, a random name will be generated. This means that
|
| 554 |
+
you can't use the same semaphore in multiple processes unless you pass the
|
| 555 |
+
semaphore object to the other processes.
|
| 556 |
+
|
| 557 |
+
>>> semaphore = NamedBoundedSemaphore(2, name='test')
|
| 558 |
+
>>> str(semaphore.get_filenames()[0])
|
| 559 |
+
'...test.00.lock'
|
| 560 |
+
|
| 561 |
+
>>> semaphore = NamedBoundedSemaphore(2)
|
| 562 |
+
>>> 'bounded_semaphore' in str(semaphore.get_filenames()[0])
|
| 563 |
+
True
|
| 564 |
+
|
| 565 |
+
"""
|
| 566 |
+
|
| 567 |
+
def __init__(
|
| 568 |
+
self,
|
| 569 |
+
maximum: int,
|
| 570 |
+
name: str | None = None,
|
| 571 |
+
filename_pattern: str = '{name}.{number:02d}.lock',
|
| 572 |
+
directory: str = tempfile.gettempdir(),
|
| 573 |
+
timeout: float | None = DEFAULT_TIMEOUT,
|
| 574 |
+
check_interval: float | None = DEFAULT_CHECK_INTERVAL,
|
| 575 |
+
fail_when_locked: bool | None = True,
|
| 576 |
+
) -> None:
|
| 577 |
+
if name is None:
|
| 578 |
+
name = f'bounded_semaphore.{random.randint(0, 1000000):d}'
|
| 579 |
+
super().__init__(
|
| 580 |
+
maximum,
|
| 581 |
+
name,
|
| 582 |
+
filename_pattern,
|
| 583 |
+
directory,
|
| 584 |
+
timeout,
|
| 585 |
+
check_interval,
|
| 586 |
+
fail_when_locked,
|
| 587 |
+
)
|
evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/AUTHORS.rst
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Authors
|
| 2 |
+
=======
|
| 3 |
+
|
| 4 |
+
Creator
|
| 5 |
+
-------
|
| 6 |
+
Jonathan Slenders <jonathan AT slenders.be>
|
| 7 |
+
|
| 8 |
+
Contributors
|
| 9 |
+
------------
|
| 10 |
+
|
| 11 |
+
- Amjith Ramanujam <amjith.r AT gmail.com>
|
evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/RECORD
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
prompt_toolkit-3.0.48.dist-info/AUTHORS.rst,sha256=09xixryENmWElauJrqN1Eef6k5HSgmVyOcnPuA29QuU,148
|
| 2 |
+
prompt_toolkit-3.0.48.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
prompt_toolkit-3.0.48.dist-info/LICENSE,sha256=MDV02b3YXHV9YCUBeUK_F7ru3yd49ivX9CXQfYgPTEo,1493
|
| 4 |
+
prompt_toolkit-3.0.48.dist-info/METADATA,sha256=1vCuzkhh4l8u5xaN5hI2QNDmxPgJUz_6ijqt0jYMEcg,6429
|
| 5 |
+
prompt_toolkit-3.0.48.dist-info/RECORD,,
|
| 6 |
+
prompt_toolkit-3.0.48.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 7 |
+
prompt_toolkit-3.0.48.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
| 8 |
+
prompt_toolkit-3.0.48.dist-info/top_level.txt,sha256=5rJXrEGx6st4KkmhOPR6l0ITDbV53x_Xy6MurOukXfA,15
|
| 9 |
+
prompt_toolkit/__init__.py,sha256=aZiJwEFAv6OIYcVL-eKqMBAX722lQNfYBtl5twiCP5A,1297
|
| 10 |
+
prompt_toolkit/__pycache__/__init__.cpython-310.pyc,,
|
| 11 |
+
prompt_toolkit/__pycache__/auto_suggest.cpython-310.pyc,,
|
| 12 |
+
prompt_toolkit/__pycache__/buffer.cpython-310.pyc,,
|
| 13 |
+
prompt_toolkit/__pycache__/cache.cpython-310.pyc,,
|
| 14 |
+
prompt_toolkit/__pycache__/cursor_shapes.cpython-310.pyc,,
|
| 15 |
+
prompt_toolkit/__pycache__/data_structures.cpython-310.pyc,,
|
| 16 |
+
prompt_toolkit/__pycache__/document.cpython-310.pyc,,
|
| 17 |
+
prompt_toolkit/__pycache__/enums.cpython-310.pyc,,
|
| 18 |
+
prompt_toolkit/__pycache__/history.cpython-310.pyc,,
|
| 19 |
+
prompt_toolkit/__pycache__/keys.cpython-310.pyc,,
|
| 20 |
+
prompt_toolkit/__pycache__/log.cpython-310.pyc,,
|
| 21 |
+
prompt_toolkit/__pycache__/mouse_events.cpython-310.pyc,,
|
| 22 |
+
prompt_toolkit/__pycache__/patch_stdout.cpython-310.pyc,,
|
| 23 |
+
prompt_toolkit/__pycache__/renderer.cpython-310.pyc,,
|
| 24 |
+
prompt_toolkit/__pycache__/search.cpython-310.pyc,,
|
| 25 |
+
prompt_toolkit/__pycache__/selection.cpython-310.pyc,,
|
| 26 |
+
prompt_toolkit/__pycache__/token.cpython-310.pyc,,
|
| 27 |
+
prompt_toolkit/__pycache__/utils.cpython-310.pyc,,
|
| 28 |
+
prompt_toolkit/__pycache__/validation.cpython-310.pyc,,
|
| 29 |
+
prompt_toolkit/__pycache__/win32_types.cpython-310.pyc,,
|
| 30 |
+
prompt_toolkit/application/__init__.py,sha256=rat9iPhYTmo7nd2BU8xZSU_-AfJpjnnBmxe9y3TQivM,657
|
| 31 |
+
prompt_toolkit/application/__pycache__/__init__.cpython-310.pyc,,
|
| 32 |
+
prompt_toolkit/application/__pycache__/application.cpython-310.pyc,,
|
| 33 |
+
prompt_toolkit/application/__pycache__/current.cpython-310.pyc,,
|
| 34 |
+
prompt_toolkit/application/__pycache__/dummy.cpython-310.pyc,,
|
| 35 |
+
prompt_toolkit/application/__pycache__/run_in_terminal.cpython-310.pyc,,
|
| 36 |
+
prompt_toolkit/application/application.py,sha256=oziKsiHNmGJQhMj1ODQIkJSeOI3ejRWWQ9XduVHYOAE,63046
|
| 37 |
+
prompt_toolkit/application/current.py,sha256=Y2KYZZEp7f5eKv-CdlRCuqQ0p7ZJ5LhtPhh_uOs1pVs,6205
|
| 38 |
+
prompt_toolkit/application/dummy.py,sha256=BCfThUgz5Eb5fWJSKBVeJaA5kwksw8jJQtN6g61xMXM,1619
|
| 39 |
+
prompt_toolkit/application/run_in_terminal.py,sha256=GygjXOZIp8akJAsSEP5zYYv8KXHR6kG2hhYFWIACuXc,3613
|
| 40 |
+
prompt_toolkit/auto_suggest.py,sha256=qSiaxlaKjLyNEJ8bJN0641gqsIzZ3LB2cOyq88xBQb4,5798
|
| 41 |
+
prompt_toolkit/buffer.py,sha256=VkAbKTJV7441aO4Ei-ozqc8IBlNPEnLGAt5t42tB6jg,74513
|
| 42 |
+
prompt_toolkit/cache.py,sha256=Lo3ewsEIgn-LQBYNni79w74u5LSvvuVYF0e6giEArQg,3823
|
| 43 |
+
prompt_toolkit/clipboard/__init__.py,sha256=yK0LonIfEZRyoXqcgLdh8kjOhechjO-Ej2-C1g3VegQ,439
|
| 44 |
+
prompt_toolkit/clipboard/__pycache__/__init__.cpython-310.pyc,,
|
| 45 |
+
prompt_toolkit/clipboard/__pycache__/base.cpython-310.pyc,,
|
| 46 |
+
prompt_toolkit/clipboard/__pycache__/in_memory.cpython-310.pyc,,
|
| 47 |
+
prompt_toolkit/clipboard/__pycache__/pyperclip.cpython-310.pyc,,
|
| 48 |
+
prompt_toolkit/clipboard/base.py,sha256=rucEv1kKfvZUj6bwGRz04uSSTZie7rvnKUnyVXb2vv4,2515
|
| 49 |
+
prompt_toolkit/clipboard/in_memory.py,sha256=U_iY6UUevkKMfTvir_XMsD0qwuLVKuoTeRdjkZW-A6I,1060
|
| 50 |
+
prompt_toolkit/clipboard/pyperclip.py,sha256=H9HOlyGW0XItvx_Ji64zBQkiQPfLb6DFAw5J5irzK28,1160
|
| 51 |
+
prompt_toolkit/completion/__init__.py,sha256=8Hm2yJ1nqBkaC-R9ugELgjhU32U308V89F6bJG0QDYo,992
|
| 52 |
+
prompt_toolkit/completion/__pycache__/__init__.cpython-310.pyc,,
|
| 53 |
+
prompt_toolkit/completion/__pycache__/base.cpython-310.pyc,,
|
| 54 |
+
prompt_toolkit/completion/__pycache__/deduplicate.cpython-310.pyc,,
|
| 55 |
+
prompt_toolkit/completion/__pycache__/filesystem.cpython-310.pyc,,
|
| 56 |
+
prompt_toolkit/completion/__pycache__/fuzzy_completer.cpython-310.pyc,,
|
| 57 |
+
prompt_toolkit/completion/__pycache__/nested.cpython-310.pyc,,
|
| 58 |
+
prompt_toolkit/completion/__pycache__/word_completer.cpython-310.pyc,,
|
| 59 |
+
prompt_toolkit/completion/base.py,sha256=T7212aScNaGMaSrDIwsJIXeXLIM_eVCIcScNcDPZYwI,16103
|
| 60 |
+
prompt_toolkit/completion/deduplicate.py,sha256=QibqYI23GPjsbyxaxiNoqAbKawzHmfYOlxeW2HPFbTE,1436
|
| 61 |
+
prompt_toolkit/completion/filesystem.py,sha256=Z_RR72e14bVavdWnbxECw23qCt_TWTY9R6DpVqW7vxE,3949
|
| 62 |
+
prompt_toolkit/completion/fuzzy_completer.py,sha256=RnREvA7y6nC7LKGqZUEvtuSm8eXVQYheJTsnhUvRbmM,7738
|
| 63 |
+
prompt_toolkit/completion/nested.py,sha256=dEgb_KckCSQDQ4eblNkONz-qs5njEZVWbtEW3WCEPH4,3854
|
| 64 |
+
prompt_toolkit/completion/word_completer.py,sha256=VF1S7YCxYqI3pKmVXJaD82eMW1ZMq8_zAAIS1XKGU5M,3435
|
| 65 |
+
prompt_toolkit/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 66 |
+
prompt_toolkit/contrib/__pycache__/__init__.cpython-310.pyc,,
|
| 67 |
+
prompt_toolkit/contrib/completers/__init__.py,sha256=qJB_xNFGbhfiDv_zUaox9mkQEGqBYqP_jfByQDb93hA,103
|
| 68 |
+
prompt_toolkit/contrib/completers/__pycache__/__init__.cpython-310.pyc,,
|
| 69 |
+
prompt_toolkit/contrib/completers/__pycache__/system.cpython-310.pyc,,
|
| 70 |
+
prompt_toolkit/contrib/completers/system.py,sha256=0Hc2dziheEx2qNog4YOl-4Tu8Fg5Dx2xjNURTx09BDg,2057
|
| 71 |
+
prompt_toolkit/contrib/regular_languages/__init__.py,sha256=cgMQkakD4FbvLUozDGucRRFOk8yScfcKfqOMpCtvAPo,3279
|
| 72 |
+
prompt_toolkit/contrib/regular_languages/__pycache__/__init__.cpython-310.pyc,,
|
| 73 |
+
prompt_toolkit/contrib/regular_languages/__pycache__/compiler.cpython-310.pyc,,
|
| 74 |
+
prompt_toolkit/contrib/regular_languages/__pycache__/completion.cpython-310.pyc,,
|
| 75 |
+
prompt_toolkit/contrib/regular_languages/__pycache__/lexer.cpython-310.pyc,,
|
| 76 |
+
prompt_toolkit/contrib/regular_languages/__pycache__/regex_parser.cpython-310.pyc,,
|
| 77 |
+
prompt_toolkit/contrib/regular_languages/__pycache__/validation.cpython-310.pyc,,
|
| 78 |
+
prompt_toolkit/contrib/regular_languages/compiler.py,sha256=3tnUJCE2jCcVI63vcpI0kG4KfuqIatSQRb8-F5UCgsI,21948
|
| 79 |
+
prompt_toolkit/contrib/regular_languages/completion.py,sha256=jESF35RaYWj_rnT-OZc_zC9QZXYvPao4JZ8wx7yS3KM,3468
|
| 80 |
+
prompt_toolkit/contrib/regular_languages/lexer.py,sha256=DBgyek9LkfJv6hz24eOaVM--w9Qaw4zIMWusMvGHBts,3415
|
| 81 |
+
prompt_toolkit/contrib/regular_languages/regex_parser.py,sha256=zWGJfQSjomvdj2rD7MPpn2pWOUR7VMv4su5iAV0jzM4,7732
|
| 82 |
+
prompt_toolkit/contrib/regular_languages/validation.py,sha256=4k5wxgUFc_KTOW5PmmZOrWb-Z-HjX8fjjKqul-oR8uc,2059
|
| 83 |
+
prompt_toolkit/contrib/ssh/__init__.py,sha256=UcRG2wc28EEKtFEudoIXz_DFzWKKQjAVSv6cf-ufPiM,180
|
| 84 |
+
prompt_toolkit/contrib/ssh/__pycache__/__init__.cpython-310.pyc,,
|
| 85 |
+
prompt_toolkit/contrib/ssh/__pycache__/server.cpython-310.pyc,,
|
| 86 |
+
prompt_toolkit/contrib/ssh/server.py,sha256=81McNn6r0Cbu9SPceH7fa5QirAnteHmNh1Gk4dFpgvI,6130
|
| 87 |
+
prompt_toolkit/contrib/telnet/__init__.py,sha256=NyUfsmJdafGiUxD9gzYQNlVdHu_ILDH7F57VJw8efUM,104
|
| 88 |
+
prompt_toolkit/contrib/telnet/__pycache__/__init__.cpython-310.pyc,,
|
| 89 |
+
prompt_toolkit/contrib/telnet/__pycache__/log.cpython-310.pyc,,
|
| 90 |
+
prompt_toolkit/contrib/telnet/__pycache__/protocol.cpython-310.pyc,,
|
| 91 |
+
prompt_toolkit/contrib/telnet/__pycache__/server.cpython-310.pyc,,
|
| 92 |
+
prompt_toolkit/contrib/telnet/log.py,sha256=LcFRDyRxoRKSZsVRVpBOrEgsEt_LQLyUHKtgVZklopI,167
|
| 93 |
+
prompt_toolkit/contrib/telnet/protocol.py,sha256=2i-JYfaAse-uFWtNdVEoP_Q-OMbkl3YbUfv_wvaaS3k,5584
|
| 94 |
+
prompt_toolkit/contrib/telnet/server.py,sha256=dKHpEhXkIef_iuvZCbumwotapx6i03t6Gk01zkAoNIU,13477
|
| 95 |
+
prompt_toolkit/cursor_shapes.py,sha256=k5g5yJONGl1ITgy29KX9yzspJvIJ6Jbbwd7WkYC9Z-4,3721
|
| 96 |
+
prompt_toolkit/data_structures.py,sha256=w0BZy6Fpx4se-kAI9Kj8Q7lAKLln8U_Em_ncpqnC1xY,212
|
| 97 |
+
prompt_toolkit/document.py,sha256=vzg3U2Zzd95l1pkZnIFjAA88ygFYSdybhRKwJcxvvr0,40579
|
| 98 |
+
prompt_toolkit/enums.py,sha256=F3q9JmH9vhpMLA2OKKN7RrNQu_YDlNWoPU-0qsTUuAs,358
|
| 99 |
+
prompt_toolkit/eventloop/__init__.py,sha256=pxSkV_zybeoj6Ff3lgNHhbD5ENmBW9mk_XkiyeRL_OY,730
|
| 100 |
+
prompt_toolkit/eventloop/__pycache__/__init__.cpython-310.pyc,,
|
| 101 |
+
prompt_toolkit/eventloop/__pycache__/async_generator.cpython-310.pyc,,
|
| 102 |
+
prompt_toolkit/eventloop/__pycache__/inputhook.cpython-310.pyc,,
|
| 103 |
+
prompt_toolkit/eventloop/__pycache__/utils.cpython-310.pyc,,
|
| 104 |
+
prompt_toolkit/eventloop/__pycache__/win32.cpython-310.pyc,,
|
| 105 |
+
prompt_toolkit/eventloop/async_generator.py,sha256=nozLJR4z2MJKV7Qi0hsknA2mb1Jcp7XJx-AdUEDhDhw,3933
|
| 106 |
+
prompt_toolkit/eventloop/inputhook.py,sha256=LDElZtmg-kLQiItMS8CFPxtLzxV8QzohWHsWUvw3h00,6130
|
| 107 |
+
prompt_toolkit/eventloop/utils.py,sha256=VhYmsDZmRwVXnEPBF_C2LpiW-ranPn6EIXWIuMa6XaU,3200
|
| 108 |
+
prompt_toolkit/eventloop/win32.py,sha256=wrLJVOtOw_tqVOeK6ttNF47Sk2oX342dLN1pxKBLCL4,2286
|
| 109 |
+
prompt_toolkit/filters/__init__.py,sha256=2YSVwf4EnLf1VOXYmb8Dr0WoA93XGGO0iCUIr14KGXQ,1990
|
| 110 |
+
prompt_toolkit/filters/__pycache__/__init__.cpython-310.pyc,,
|
| 111 |
+
prompt_toolkit/filters/__pycache__/app.cpython-310.pyc,,
|
| 112 |
+
prompt_toolkit/filters/__pycache__/base.cpython-310.pyc,,
|
| 113 |
+
prompt_toolkit/filters/__pycache__/cli.cpython-310.pyc,,
|
| 114 |
+
prompt_toolkit/filters/__pycache__/utils.cpython-310.pyc,,
|
| 115 |
+
prompt_toolkit/filters/app.py,sha256=QVJMjR6Zf-BxlmGaUd-WbtEaGlxMKYMFVwj3qcwo7ns,10408
|
| 116 |
+
prompt_toolkit/filters/base.py,sha256=P5qAh9--mTyncYi1R-WK-waqh5j118o_hTj-bBBoEw4,6870
|
| 117 |
+
prompt_toolkit/filters/cli.py,sha256=QGV7JT7-BUXpPXNzBLUcNH3GI69ugFZCDV1nylOjq78,1867
|
| 118 |
+
prompt_toolkit/filters/utils.py,sha256=4nOjHPEd451Pj3qpfg40fq3XSnt1kmq3WoAbhu2NV-8,859
|
| 119 |
+
prompt_toolkit/formatted_text/__init__.py,sha256=aQtNhxOhIa_HmvlNOQ2RGGpplg-KX3sYFJWiXgNfQxY,1509
|
| 120 |
+
prompt_toolkit/formatted_text/__pycache__/__init__.cpython-310.pyc,,
|
| 121 |
+
prompt_toolkit/formatted_text/__pycache__/ansi.cpython-310.pyc,,
|
| 122 |
+
prompt_toolkit/formatted_text/__pycache__/base.cpython-310.pyc,,
|
| 123 |
+
prompt_toolkit/formatted_text/__pycache__/html.cpython-310.pyc,,
|
| 124 |
+
prompt_toolkit/formatted_text/__pycache__/pygments.cpython-310.pyc,,
|
| 125 |
+
prompt_toolkit/formatted_text/__pycache__/utils.cpython-310.pyc,,
|
| 126 |
+
prompt_toolkit/formatted_text/ansi.py,sha256=5uERUQxVrXAQfbTE5R9oWTIqLnDfNmsNpWPtfH5nLSo,9679
|
| 127 |
+
prompt_toolkit/formatted_text/base.py,sha256=X3y5QIPH2IS9LesYzXneELtT4zGpik8gd-UQVh6I2bE,5162
|
| 128 |
+
prompt_toolkit/formatted_text/html.py,sha256=-88VwuuCLRNkzEgK8FJKOHT9NDh939BxH8vGivvILdU,4374
|
| 129 |
+
prompt_toolkit/formatted_text/pygments.py,sha256=sK-eFFzOnD2sgadVLgNkW-xOuTw_uIf8_z06DZ4CA8g,780
|
| 130 |
+
prompt_toolkit/formatted_text/utils.py,sha256=77hogMzZtwQUf8--hkyOVgQP0s0W3XDlYHvRgKHjlbA,3069
|
| 131 |
+
prompt_toolkit/history.py,sha256=S9W9SgL83QftMQANdjdjBMm-yGmeM51_qCRRC1H4Mr8,9441
|
| 132 |
+
prompt_toolkit/input/__init__.py,sha256=7g6kwNanG4Ml12FFdj9E1ivChpXWcfRUMUJzmTQMS7U,273
|
| 133 |
+
prompt_toolkit/input/__pycache__/__init__.cpython-310.pyc,,
|
| 134 |
+
prompt_toolkit/input/__pycache__/ansi_escape_sequences.cpython-310.pyc,,
|
| 135 |
+
prompt_toolkit/input/__pycache__/base.cpython-310.pyc,,
|
| 136 |
+
prompt_toolkit/input/__pycache__/defaults.cpython-310.pyc,,
|
| 137 |
+
prompt_toolkit/input/__pycache__/posix_pipe.cpython-310.pyc,,
|
| 138 |
+
prompt_toolkit/input/__pycache__/posix_utils.cpython-310.pyc,,
|
| 139 |
+
prompt_toolkit/input/__pycache__/typeahead.cpython-310.pyc,,
|
| 140 |
+
prompt_toolkit/input/__pycache__/vt100.cpython-310.pyc,,
|
| 141 |
+
prompt_toolkit/input/__pycache__/vt100_parser.cpython-310.pyc,,
|
| 142 |
+
prompt_toolkit/input/__pycache__/win32.cpython-310.pyc,,
|
| 143 |
+
prompt_toolkit/input/__pycache__/win32_pipe.cpython-310.pyc,,
|
| 144 |
+
prompt_toolkit/input/ansi_escape_sequences.py,sha256=h7714SZgs2z80PZRVxsCfHHJjtEUmWlToVCBtFFvfR4,13663
|
| 145 |
+
prompt_toolkit/input/base.py,sha256=pItwaKXtVZwemrKpoltmRskwoeXnSaBUZ_6iFZBdQf8,4036
|
| 146 |
+
prompt_toolkit/input/defaults.py,sha256=a-vczSh7kngFArLhFsJ2CXNdkx5WQlzilxHLdzGDkFw,2500
|
| 147 |
+
prompt_toolkit/input/posix_pipe.py,sha256=B_JS2-FB6Sk0da9gSH0NnhcUCkp3bw0m1-ogMOHmmcE,3158
|
| 148 |
+
prompt_toolkit/input/posix_utils.py,sha256=ySaEGnt_IwG5nzxcpILgEXC60mbrIAbC3ZZ6kuE9zCw,3973
|
| 149 |
+
prompt_toolkit/input/typeahead.py,sha256=mAaf5_XKTLpao1kw9ORIrhGGEz9gvu4oc-iZKKMQz3k,2545
|
| 150 |
+
prompt_toolkit/input/vt100.py,sha256=soxxSLU7fwp6yn77j5gCYUZroEp7KBKm4a3Zn4vRAsk,10514
|
| 151 |
+
prompt_toolkit/input/vt100_parser.py,sha256=qDrNbsnPukZblfyjgfjCvzMv8xQKRz0M84UvUWq7P44,8407
|
| 152 |
+
prompt_toolkit/input/win32.py,sha256=I4X3MHrJAwL4Dy-EZyCQVjoFVd9HFY4JtI9IehpnrUw,25676
|
| 153 |
+
prompt_toolkit/input/win32_pipe.py,sha256=OvjKHN5xfEoGHLygWwayyeB0RolHL6YHLNeOMK-54LU,4700
|
| 154 |
+
prompt_toolkit/key_binding/__init__.py,sha256=IZWqJLBjQaQMfo0SJTjqJKQH0TZcSNa2Cdln-M4z8JI,447
|
| 155 |
+
prompt_toolkit/key_binding/__pycache__/__init__.cpython-310.pyc,,
|
| 156 |
+
prompt_toolkit/key_binding/__pycache__/defaults.cpython-310.pyc,,
|
| 157 |
+
prompt_toolkit/key_binding/__pycache__/digraphs.cpython-310.pyc,,
|
| 158 |
+
prompt_toolkit/key_binding/__pycache__/emacs_state.cpython-310.pyc,,
|
| 159 |
+
prompt_toolkit/key_binding/__pycache__/key_bindings.cpython-310.pyc,,
|
| 160 |
+
prompt_toolkit/key_binding/__pycache__/key_processor.cpython-310.pyc,,
|
| 161 |
+
prompt_toolkit/key_binding/__pycache__/vi_state.cpython-310.pyc,,
|
| 162 |
+
prompt_toolkit/key_binding/bindings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 163 |
+
prompt_toolkit/key_binding/bindings/__pycache__/__init__.cpython-310.pyc,,
|
| 164 |
+
prompt_toolkit/key_binding/bindings/__pycache__/auto_suggest.cpython-310.pyc,,
|
| 165 |
+
prompt_toolkit/key_binding/bindings/__pycache__/basic.cpython-310.pyc,,
|
| 166 |
+
prompt_toolkit/key_binding/bindings/__pycache__/completion.cpython-310.pyc,,
|
| 167 |
+
prompt_toolkit/key_binding/bindings/__pycache__/cpr.cpython-310.pyc,,
|
| 168 |
+
prompt_toolkit/key_binding/bindings/__pycache__/emacs.cpython-310.pyc,,
|
| 169 |
+
prompt_toolkit/key_binding/bindings/__pycache__/focus.cpython-310.pyc,,
|
| 170 |
+
prompt_toolkit/key_binding/bindings/__pycache__/mouse.cpython-310.pyc,,
|
| 171 |
+
prompt_toolkit/key_binding/bindings/__pycache__/named_commands.cpython-310.pyc,,
|
| 172 |
+
prompt_toolkit/key_binding/bindings/__pycache__/open_in_editor.cpython-310.pyc,,
|
| 173 |
+
prompt_toolkit/key_binding/bindings/__pycache__/page_navigation.cpython-310.pyc,,
|
| 174 |
+
prompt_toolkit/key_binding/bindings/__pycache__/scroll.cpython-310.pyc,,
|
| 175 |
+
prompt_toolkit/key_binding/bindings/__pycache__/search.cpython-310.pyc,,
|
| 176 |
+
prompt_toolkit/key_binding/bindings/__pycache__/vi.cpython-310.pyc,,
|
| 177 |
+
prompt_toolkit/key_binding/bindings/auto_suggest.py,sha256=4PrJVgIK_Nt2o3RtVtuRm2aFPGrackhuMCBVNtjPj7M,1855
|
| 178 |
+
prompt_toolkit/key_binding/bindings/basic.py,sha256=Fp9mj-RYZlGmAU9UV9wIIEnlxELN7NJ0qakMVH7MuRU,7229
|
| 179 |
+
prompt_toolkit/key_binding/bindings/completion.py,sha256=6nR3WfGe7FDsjq1xTsDazeajkV9KBLpCYQi3klujdLU,6903
|
| 180 |
+
prompt_toolkit/key_binding/bindings/cpr.py,sha256=181XQNZ0-sgL-vV2B67aRitTFHadogvMUh6LWVMUTV0,786
|
| 181 |
+
prompt_toolkit/key_binding/bindings/emacs.py,sha256=trIZUu8e5kJGSaq6Ndb-Exz4NdHV9SjUsfsw_UM8c6o,19634
|
| 182 |
+
prompt_toolkit/key_binding/bindings/focus.py,sha256=LIP4InccUUvD7I4NZrqtY9WjVfO_wJLyrVcoxAw92uU,507
|
| 183 |
+
prompt_toolkit/key_binding/bindings/mouse.py,sha256=6JPr0BqzFfLEVb7Ek_WO0CejUcwq0jIrrNwvSGkHeus,18586
|
| 184 |
+
prompt_toolkit/key_binding/bindings/named_commands.py,sha256=jdkqQ-ltNYC2BgIW1QdG7Qx4mWIod2Ps6C2TpL6NJ-Y,18407
|
| 185 |
+
prompt_toolkit/key_binding/bindings/open_in_editor.py,sha256=bgVmeDmVtHsgzJnc59b-dOZ-nO6WydBYI_7aOWMpe5c,1356
|
| 186 |
+
prompt_toolkit/key_binding/bindings/page_navigation.py,sha256=RPLUEZuZvekErPazex7pK0c6LxeV9LshewBHp012iMI,2392
|
| 187 |
+
prompt_toolkit/key_binding/bindings/scroll.py,sha256=hQeQ0m2AStUKjVNDXfa9DTMw3WS5qzW1n3gU0fkfWFk,5613
|
| 188 |
+
prompt_toolkit/key_binding/bindings/search.py,sha256=rU6VYra1IDzN6mG4mrbGivrZ-hjy_kZcjsKqmdVJKAE,2632
|
| 189 |
+
prompt_toolkit/key_binding/bindings/vi.py,sha256=TSglqzPZU9VMernOvn09GVxObFXpXuyCSiH9i1MpIIo,75602
|
| 190 |
+
prompt_toolkit/key_binding/defaults.py,sha256=JZJTshyBV39cWH2AT7xDP9AXOiyXQpjaI-ckePTi7os,1975
|
| 191 |
+
prompt_toolkit/key_binding/digraphs.py,sha256=rZvh9AdY5Te6bSlIHRQNskJYVIONYahYuu-w9Pti5yM,32785
|
| 192 |
+
prompt_toolkit/key_binding/emacs_state.py,sha256=ZJBWcLTzgtRkUW9UiDuI-SRrnlLsxu3IrTOK0_UQt5Y,884
|
| 193 |
+
prompt_toolkit/key_binding/key_bindings.py,sha256=0QDWvFuct2vAIHK-hrQmEipmiRMBQbWP4JB1PsXVZKY,20927
|
| 194 |
+
prompt_toolkit/key_binding/key_processor.py,sha256=0WLK4dcU8klL2Xna_RKxOpsW7t8ld67Y9Xmto3U-n0E,17555
|
| 195 |
+
prompt_toolkit/key_binding/vi_state.py,sha256=p-JuzwYtWl25tMmfRZ6e7UQWDi7RlXnAggir7ZSi07I,3341
|
| 196 |
+
prompt_toolkit/keys.py,sha256=nDkIqJbm_dRsVjArp7oItGKIFAAnSxcSniSwc1O-BYA,4916
|
| 197 |
+
prompt_toolkit/layout/__init__.py,sha256=gNbniLmlvkWwPE6Kg2ykyZJRTOKsWnHbwUjyO-VFDP8,3603
|
| 198 |
+
prompt_toolkit/layout/__pycache__/__init__.cpython-310.pyc,,
|
| 199 |
+
prompt_toolkit/layout/__pycache__/containers.cpython-310.pyc,,
|
| 200 |
+
prompt_toolkit/layout/__pycache__/controls.cpython-310.pyc,,
|
| 201 |
+
prompt_toolkit/layout/__pycache__/dimension.cpython-310.pyc,,
|
| 202 |
+
prompt_toolkit/layout/__pycache__/dummy.cpython-310.pyc,,
|
| 203 |
+
prompt_toolkit/layout/__pycache__/layout.cpython-310.pyc,,
|
| 204 |
+
prompt_toolkit/layout/__pycache__/margins.cpython-310.pyc,,
|
| 205 |
+
prompt_toolkit/layout/__pycache__/menus.cpython-310.pyc,,
|
| 206 |
+
prompt_toolkit/layout/__pycache__/mouse_handlers.cpython-310.pyc,,
|
| 207 |
+
prompt_toolkit/layout/__pycache__/processors.cpython-310.pyc,,
|
| 208 |
+
prompt_toolkit/layout/__pycache__/screen.cpython-310.pyc,,
|
| 209 |
+
prompt_toolkit/layout/__pycache__/scrollable_pane.cpython-310.pyc,,
|
| 210 |
+
prompt_toolkit/layout/__pycache__/utils.cpython-310.pyc,,
|
| 211 |
+
prompt_toolkit/layout/containers.py,sha256=ZdpJEFJT11_CDWJEV6fDv5w0NmjnG8kpB4s_JMay-_s,99206
|
| 212 |
+
prompt_toolkit/layout/controls.py,sha256=3QCrRYaVXsK1UZXJ2PcssTK2BkOJIdCrHjktYFzx2Sk,35738
|
| 213 |
+
prompt_toolkit/layout/dimension.py,sha256=e1Zbptz3dRcG7khlC3I3DbIhXnFfpLxYOOBoELAiZ20,7052
|
| 214 |
+
prompt_toolkit/layout/dummy.py,sha256=8zB3MwDDd4RpI880WUKhv719tTzy5bXS9gm9zdkBZ10,1047
|
| 215 |
+
prompt_toolkit/layout/layout.py,sha256=VXqWAoL3EviGn4CxtOrFJekMALvl9xff1bTSnE-gXF8,13960
|
| 216 |
+
prompt_toolkit/layout/margins.py,sha256=bt-IvD03uQvmLVYvGZLqPLluR6kUlBRBAGJwCc8F7II,10375
|
| 217 |
+
prompt_toolkit/layout/menus.py,sha256=B4H2oCPF48gLy9cB0vkdGIoH_8gGyj95TDHtfxXRVSw,27195
|
| 218 |
+
prompt_toolkit/layout/mouse_handlers.py,sha256=lwbGSdpn6_pcK7HQWJ6IvHsxTf1_ahBew4pkmtU6zUM,1589
|
| 219 |
+
prompt_toolkit/layout/processors.py,sha256=l70PjZ64bieg7y3caMd1aYDYe4khnDlaZ5EhCTgXPx0,33940
|
| 220 |
+
prompt_toolkit/layout/screen.py,sha256=2PWdPDkQxtJrMSv9oqdZrWa7ChCnC7J4SvfVIithi5E,10113
|
| 221 |
+
prompt_toolkit/layout/scrollable_pane.py,sha256=JQtPfafU61RJt3MzGW2wsw96o1sjJH0g2DSVyO7J6qA,19264
|
| 222 |
+
prompt_toolkit/layout/utils.py,sha256=qot9clyeG3FoplfAS2O6QxmnnA_PDln4-dUJ5Hu76fQ,2371
|
| 223 |
+
prompt_toolkit/lexers/__init__.py,sha256=QldV9b8J2Kb9Exyv2fDss-YRzP07z2FYAhwPN4coWn8,409
|
| 224 |
+
prompt_toolkit/lexers/__pycache__/__init__.cpython-310.pyc,,
|
| 225 |
+
prompt_toolkit/lexers/__pycache__/base.cpython-310.pyc,,
|
| 226 |
+
prompt_toolkit/lexers/__pycache__/pygments.cpython-310.pyc,,
|
| 227 |
+
prompt_toolkit/lexers/base.py,sha256=XdyKLj4rD25CVCqSCfElWE3ppBN1LGQ9fRLPi1oYfl0,2350
|
| 228 |
+
prompt_toolkit/lexers/pygments.py,sha256=it89LjsltZpzlQJPb95GX4GdMu7gq1J1QzWC29lCQi4,11922
|
| 229 |
+
prompt_toolkit/log.py,sha256=6typpL_jnewdUc3j2OoplVLwnP9dSMOkIsJy_sgR9IY,153
|
| 230 |
+
prompt_toolkit/mouse_events.py,sha256=4mUHJbG8WrrQznw7z_jsOrdmldC5ZMRM4gDDUy51pyk,2473
|
| 231 |
+
prompt_toolkit/output/__init__.py,sha256=GVlT-U_W0EuIP-c1Qjyp0DN6Fl2PsCEhFzjUMRHsGWI,280
|
| 232 |
+
prompt_toolkit/output/__pycache__/__init__.cpython-310.pyc,,
|
| 233 |
+
prompt_toolkit/output/__pycache__/base.cpython-310.pyc,,
|
| 234 |
+
prompt_toolkit/output/__pycache__/color_depth.cpython-310.pyc,,
|
| 235 |
+
prompt_toolkit/output/__pycache__/conemu.cpython-310.pyc,,
|
| 236 |
+
prompt_toolkit/output/__pycache__/defaults.cpython-310.pyc,,
|
| 237 |
+
prompt_toolkit/output/__pycache__/flush_stdout.cpython-310.pyc,,
|
| 238 |
+
prompt_toolkit/output/__pycache__/plain_text.cpython-310.pyc,,
|
| 239 |
+
prompt_toolkit/output/__pycache__/vt100.cpython-310.pyc,,
|
| 240 |
+
prompt_toolkit/output/__pycache__/win32.cpython-310.pyc,,
|
| 241 |
+
prompt_toolkit/output/__pycache__/windows10.cpython-310.pyc,,
|
| 242 |
+
prompt_toolkit/output/base.py,sha256=o74Vok7cXLxgHoAaqKHQAGcNZILn5B5g6Z0pUXU6x7s,8348
|
| 243 |
+
prompt_toolkit/output/color_depth.py,sha256=KEFTlxCYTqOvA-VDx4wUb8G6HaYD5Hbf5GKmPZwssCs,1569
|
| 244 |
+
prompt_toolkit/output/conemu.py,sha256=_w2IEFR-mXsaMFINgZITiJNRCS9QowLUxeskPEpz2GE,1865
|
| 245 |
+
prompt_toolkit/output/defaults.py,sha256=72RecTuugrjvfZinbvsFRYDwMcczE9Zw3ttmmiG0Ivg,3689
|
| 246 |
+
prompt_toolkit/output/flush_stdout.py,sha256=ReT0j0IwVJEcth7VJj2zE6UcY0OVz5Ut1rpANnbCyYQ,3236
|
| 247 |
+
prompt_toolkit/output/plain_text.py,sha256=VnjoDmy0pKQoubXXQJQ_MljoDYi1FcLdNZB2KN_TQIs,3296
|
| 248 |
+
prompt_toolkit/output/vt100.py,sha256=JntIb5rIGoRqQMnpxDJ3eSKnB5x1eUNcr3wEpbT0STs,22988
|
| 249 |
+
prompt_toolkit/output/win32.py,sha256=d5LG_3dLLcHomJ_eJiFAcmdSe1VSjgneWR9xg2EHz-M,22622
|
| 250 |
+
prompt_toolkit/output/windows10.py,sha256=Oy2K_-rsA7UHsuLhTgVlD8nzdlWZpm9cOOhyj4g7s6w,4034
|
| 251 |
+
prompt_toolkit/patch_stdout.py,sha256=8gEaQdqykdBczlvp3FrOjDlEG02yeXoYKrDAGqj48Wg,9477
|
| 252 |
+
prompt_toolkit/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 253 |
+
prompt_toolkit/renderer.py,sha256=4PQrc5ithARLg5pqHgQwbvfaoSg73lLDoXYB4IIx12I,29112
|
| 254 |
+
prompt_toolkit/search.py,sha256=6Go_LtBeBlIMkdUCqb-WFCBKLchd70kgtccqP5dyv08,6951
|
| 255 |
+
prompt_toolkit/selection.py,sha256=P6zQOahBqqt1YZmfQ2-V9iJjOo4cxl0bdmU_-0jezJI,1274
|
| 256 |
+
prompt_toolkit/shortcuts/__init__.py,sha256=AOdDyiuu4t2itpHhFcBdvY-Tgzzi1HwQNnu2la3yaXw,949
|
| 257 |
+
prompt_toolkit/shortcuts/__pycache__/__init__.cpython-310.pyc,,
|
| 258 |
+
prompt_toolkit/shortcuts/__pycache__/dialogs.cpython-310.pyc,,
|
| 259 |
+
prompt_toolkit/shortcuts/__pycache__/prompt.cpython-310.pyc,,
|
| 260 |
+
prompt_toolkit/shortcuts/__pycache__/utils.cpython-310.pyc,,
|
| 261 |
+
prompt_toolkit/shortcuts/dialogs.py,sha256=gFibLlbaii8ijuurk9TpbNi5fMTHu99T6m1wfFilbE8,9007
|
| 262 |
+
prompt_toolkit/shortcuts/progress_bar/__init__.py,sha256=QeAssmFBDPCC5VRoObAp4UkebwETP3qS7-na4acstWM,540
|
| 263 |
+
prompt_toolkit/shortcuts/progress_bar/__pycache__/__init__.cpython-310.pyc,,
|
| 264 |
+
prompt_toolkit/shortcuts/progress_bar/__pycache__/base.cpython-310.pyc,,
|
| 265 |
+
prompt_toolkit/shortcuts/progress_bar/__pycache__/formatters.cpython-310.pyc,,
|
| 266 |
+
prompt_toolkit/shortcuts/progress_bar/base.py,sha256=_cqp7coZMFDc7ZoAUL1iz3fL1Dt5hw3hi1HEfBvUpK8,14402
|
| 267 |
+
prompt_toolkit/shortcuts/progress_bar/formatters.py,sha256=VfRADwUm8op-DzoM51UrKI8pSa1T1LAz5q9VMUW2siI,11739
|
| 268 |
+
prompt_toolkit/shortcuts/prompt.py,sha256=IbbpTaV71ER8c920U5cf9CTkGHudfHtWnWuu4A_TjPE,60235
|
| 269 |
+
prompt_toolkit/shortcuts/utils.py,sha256=NNjBY0Brkcb13Gxhh7Yc72_YpDFsQbkIlm7ZXvW3rK0,6950
|
| 270 |
+
prompt_toolkit/styles/__init__.py,sha256=7N1NNE1gTQo5mjT9f7mRwRodkrBoNpT9pmqWK-lrSeY,1640
|
| 271 |
+
prompt_toolkit/styles/__pycache__/__init__.cpython-310.pyc,,
|
| 272 |
+
prompt_toolkit/styles/__pycache__/base.cpython-310.pyc,,
|
| 273 |
+
prompt_toolkit/styles/__pycache__/defaults.cpython-310.pyc,,
|
| 274 |
+
prompt_toolkit/styles/__pycache__/named_colors.cpython-310.pyc,,
|
| 275 |
+
prompt_toolkit/styles/__pycache__/pygments.cpython-310.pyc,,
|
| 276 |
+
prompt_toolkit/styles/__pycache__/style.cpython-310.pyc,,
|
| 277 |
+
prompt_toolkit/styles/__pycache__/style_transformation.cpython-310.pyc,,
|
| 278 |
+
prompt_toolkit/styles/base.py,sha256=9oTmvqg0Rxy9VEVbRxq_4_P_NnPWVr9QedK56kea2Ro,5014
|
| 279 |
+
prompt_toolkit/styles/defaults.py,sha256=TRnP1PeuauYa_Ru1PpJ_ImsfaldvLE1JjmPV8tvfJjs,8699
|
| 280 |
+
prompt_toolkit/styles/named_colors.py,sha256=yZ30oKB-fCRk6RMASYg8q3Uz2zgdfy_YNbuQWYpyYas,4367
|
| 281 |
+
prompt_toolkit/styles/pygments.py,sha256=yWJEcvYCFo1e2EN9IF5HWpxHQ104J0HOJg1LUsSA9oM,1974
|
| 282 |
+
prompt_toolkit/styles/style.py,sha256=ve7MBciSq6cBOXhboC_RLrlrEqQlq5kWn0XgFI6wNVU,13043
|
| 283 |
+
prompt_toolkit/styles/style_transformation.py,sha256=cGaOo-jqhP79QoEHLQxrOZo9QMrxWxtXgfXKsHlx1Jg,12427
|
| 284 |
+
prompt_toolkit/token.py,sha256=do3EnxLrCDVbq47MzJ2vqSYps-CjVKWNCWzCZgdf5Jo,121
|
| 285 |
+
prompt_toolkit/utils.py,sha256=7O8hILpI2VZb0KoC7J-5z1S2aXICf_kwtmRq5xdfDTg,8631
|
| 286 |
+
prompt_toolkit/validation.py,sha256=XTdmExMgaqj-Whym9yYyQxOAaKce97KYyyGXwCxMr-A,5807
|
| 287 |
+
prompt_toolkit/widgets/__init__.py,sha256=RZXj6UzZWFuxOQXc1TwHLIwwZYJU-YBAaV4oLrC2dCA,1218
|
| 288 |
+
prompt_toolkit/widgets/__pycache__/__init__.cpython-310.pyc,,
|
| 289 |
+
prompt_toolkit/widgets/__pycache__/base.cpython-310.pyc,,
|
| 290 |
+
prompt_toolkit/widgets/__pycache__/dialogs.cpython-310.pyc,,
|
| 291 |
+
prompt_toolkit/widgets/__pycache__/menus.cpython-310.pyc,,
|
| 292 |
+
prompt_toolkit/widgets/__pycache__/toolbars.cpython-310.pyc,,
|
| 293 |
+
prompt_toolkit/widgets/base.py,sha256=zdfJ9-kMGRpocEi-GqDIOgt9prxRkN8GB8D1uT0409U,32351
|
| 294 |
+
prompt_toolkit/widgets/dialogs.py,sha256=K2ACcf0rKXwpBQGQcjSTq2aNeSInGmklzZRPnhdtZTc,3380
|
| 295 |
+
prompt_toolkit/widgets/menus.py,sha256=SeX-llaTpF1pVak2lw37mAP0SFDONIRZT5oq23mARg8,13419
|
| 296 |
+
prompt_toolkit/widgets/toolbars.py,sha256=MoxOxaa8Yi3nJvH4G8OCwlNuwx3XWUJ07J0a7D17_w0,12178
|
| 297 |
+
prompt_toolkit/win32_types.py,sha256=3xVjabRA3Q-RN2x3DLqTOrstuYj4_uCq6w2i8t6LZ6E,5551
|
evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (75.1.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
evalkit_cambrian/lib/python3.10/site-packages/prompt_toolkit-3.0.48.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
prompt_toolkit
|
evalkit_cambrian/lib/python3.10/site-packages/python_dateutil-2.9.0.post0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
dateutil/__init__.py,sha256=Mqam67WO9IkTmUFyI66vS6IoSXTp9G388DadH2LCMLY,620
|
| 2 |
+
dateutil/__pycache__/__init__.cpython-310.pyc,,
|
| 3 |
+
dateutil/__pycache__/_common.cpython-310.pyc,,
|
| 4 |
+
dateutil/__pycache__/_version.cpython-310.pyc,,
|
| 5 |
+
dateutil/__pycache__/easter.cpython-310.pyc,,
|
| 6 |
+
dateutil/__pycache__/relativedelta.cpython-310.pyc,,
|
| 7 |
+
dateutil/__pycache__/rrule.cpython-310.pyc,,
|
| 8 |
+
dateutil/__pycache__/tzwin.cpython-310.pyc,,
|
| 9 |
+
dateutil/__pycache__/utils.cpython-310.pyc,,
|
| 10 |
+
dateutil/_common.py,sha256=77w0yytkrxlYbSn--lDVPUMabUXRR9I3lBv_vQRUqUY,932
|
| 11 |
+
dateutil/_version.py,sha256=BV031OxDDAmy58neUg5yyqLkLaqIw7ibK9As3jiMib0,166
|
| 12 |
+
dateutil/easter.py,sha256=dyBi-lKvimH1u_k6p7Z0JJK72QhqVtVBsqByvpEPKvc,2678
|
| 13 |
+
dateutil/parser/__init__.py,sha256=wWk6GFuxTpjoggCGtgkceJoti4pVjl4_fHQXpNOaSYg,1766
|
| 14 |
+
dateutil/parser/__pycache__/__init__.cpython-310.pyc,,
|
| 15 |
+
dateutil/parser/__pycache__/_parser.cpython-310.pyc,,
|
| 16 |
+
dateutil/parser/__pycache__/isoparser.cpython-310.pyc,,
|
| 17 |
+
dateutil/parser/_parser.py,sha256=7klDdyicksQB_Xgl-3UAmBwzCYor1AIZqklIcT6dH_8,58796
|
| 18 |
+
dateutil/parser/isoparser.py,sha256=8Fy999bnCd1frSdOYuOraWfJTtd5W7qQ51NwNuH_hXM,13233
|
| 19 |
+
dateutil/relativedelta.py,sha256=IY_mglMjoZbYfrvloTY2ce02aiVjPIkiZfqgNTZRfuA,24903
|
| 20 |
+
dateutil/rrule.py,sha256=KJzKlaCd1jEbu4A38ZltslaoAUh9nSbdbOFdjp70Kew,66557
|
| 21 |
+
dateutil/tz/__init__.py,sha256=F-Mz13v6jYseklQf9Te9J6nzcLDmq47gORa61K35_FA,444
|
| 22 |
+
dateutil/tz/__pycache__/__init__.cpython-310.pyc,,
|
| 23 |
+
dateutil/tz/__pycache__/_common.cpython-310.pyc,,
|
| 24 |
+
dateutil/tz/__pycache__/_factories.cpython-310.pyc,,
|
| 25 |
+
dateutil/tz/__pycache__/tz.cpython-310.pyc,,
|
| 26 |
+
dateutil/tz/__pycache__/win.cpython-310.pyc,,
|
| 27 |
+
dateutil/tz/_common.py,sha256=cgzDTANsOXvEc86cYF77EsliuSab8Puwpsl5-bX3_S4,12977
|
| 28 |
+
dateutil/tz/_factories.py,sha256=unb6XQNXrPMveksTCU-Ag8jmVZs4SojoPUcAHpWnrvU,2569
|
| 29 |
+
dateutil/tz/tz.py,sha256=EUnEdMfeThXiY6l4sh9yBabZ63_POzy01zSsh9thn1o,62855
|
| 30 |
+
dateutil/tz/win.py,sha256=xJszWgSwE1xPx_HJj4ZkepyukC_hNy016WMcXhbRaB8,12935
|
| 31 |
+
dateutil/tzwin.py,sha256=7Ar4vdQCnnM0mKR3MUjbIKsZrBVfHgdwsJZc_mGYRew,59
|
| 32 |
+
dateutil/utils.py,sha256=dKCchEw8eObi0loGTx91unBxm_7UGlU3v_FjFMdqwYM,1965
|
| 33 |
+
dateutil/zoneinfo/__init__.py,sha256=KYg0pthCMjcp5MXSEiBJn3nMjZeNZav7rlJw5-tz1S4,5889
|
| 34 |
+
dateutil/zoneinfo/__pycache__/__init__.cpython-310.pyc,,
|
| 35 |
+
dateutil/zoneinfo/__pycache__/rebuild.cpython-310.pyc,,
|
| 36 |
+
dateutil/zoneinfo/dateutil-zoneinfo.tar.gz,sha256=0-pS57bpaN4NiE3xKIGTWW-pW4A9tPkqGCeac5gARHU,156400
|
| 37 |
+
dateutil/zoneinfo/rebuild.py,sha256=MiqYzCIHvNbMH-LdRYLv-4T0EIA7hDKt5GLR0IRTLdI,2392
|
| 38 |
+
python_dateutil-2.9.0.post0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 39 |
+
python_dateutil-2.9.0.post0.dist-info/LICENSE,sha256=ugD1Gg2SgjtaHN4n2LW50jIeZ-2NqbwWPv-W1eF-V34,2889
|
| 40 |
+
python_dateutil-2.9.0.post0.dist-info/METADATA,sha256=qdQ22jIr6AgzL5jYgyWZjofLaTpniplp_rTPrXKabpM,8354
|
| 41 |
+
python_dateutil-2.9.0.post0.dist-info/RECORD,,
|
| 42 |
+
python_dateutil-2.9.0.post0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 43 |
+
python_dateutil-2.9.0.post0.dist-info/WHEEL,sha256=-G_t0oGuE7UD0DrSpVZnq1hHMBV9DD2XkS5v7XpmTnk,110
|
| 44 |
+
python_dateutil-2.9.0.post0.dist-info/top_level.txt,sha256=4tjdWkhRZvF7LA_BYe_L9gB2w_p2a-z5y6ArjaRkot8,9
|
| 45 |
+
python_dateutil-2.9.0.post0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
evalkit_cambrian/lib/python3.10/site-packages/regex-2024.11.6.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_cambrian/lib/python3.10/site-packages/regex-2024.11.6.dist-info/METADATA
ADDED
|
@@ -0,0 +1,1060 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: regex
|
| 3 |
+
Version: 2024.11.6
|
| 4 |
+
Summary: Alternative regular expression module, to replace re.
|
| 5 |
+
Home-page: https://github.com/mrabarnett/mrab-regex
|
| 6 |
+
Author: Matthew Barnett
|
| 7 |
+
Author-email: regex@mrabarnett.plus.com
|
| 8 |
+
License: Apache Software License
|
| 9 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 10 |
+
Classifier: Intended Audience :: Developers
|
| 11 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 12 |
+
Classifier: Operating System :: OS Independent
|
| 13 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 14 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 15 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 19 |
+
Classifier: Topic :: Scientific/Engineering :: Information Analysis
|
| 20 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 21 |
+
Classifier: Topic :: Text Processing
|
| 22 |
+
Classifier: Topic :: Text Processing :: General
|
| 23 |
+
Requires-Python: >=3.8
|
| 24 |
+
Description-Content-Type: text/x-rst
|
| 25 |
+
License-File: LICENSE.txt
|
| 26 |
+
|
| 27 |
+
Introduction
|
| 28 |
+
------------
|
| 29 |
+
|
| 30 |
+
This regex implementation is backwards-compatible with the standard 're' module, but offers additional functionality.
|
| 31 |
+
|
| 32 |
+
Python 2
|
| 33 |
+
--------
|
| 34 |
+
|
| 35 |
+
Python 2 is no longer supported. The last release that supported Python 2 was 2021.11.10.
|
| 36 |
+
|
| 37 |
+
PyPy
|
| 38 |
+
----
|
| 39 |
+
|
| 40 |
+
This module is targeted at CPython. It expects that all codepoints are the same width, so it won't behave properly with PyPy outside U+0000..U+007F because PyPy stores strings as UTF-8.
|
| 41 |
+
|
| 42 |
+
Multithreading
|
| 43 |
+
--------------
|
| 44 |
+
|
| 45 |
+
The regex module releases the GIL during matching on instances of the built-in (immutable) string classes, enabling other Python threads to run concurrently. It is also possible to force the regex module to release the GIL during matching by calling the matching methods with the keyword argument ``concurrent=True``. The behaviour is undefined if the string changes during matching, so use it *only* when it is guaranteed that that won't happen.
|
| 46 |
+
|
| 47 |
+
Unicode
|
| 48 |
+
-------
|
| 49 |
+
|
| 50 |
+
This module supports Unicode 16.0.0. Full Unicode case-folding is supported.
|
| 51 |
+
|
| 52 |
+
Flags
|
| 53 |
+
-----
|
| 54 |
+
|
| 55 |
+
There are 2 kinds of flag: scoped and global. Scoped flags can apply to only part of a pattern and can be turned on or off; global flags apply to the entire pattern and can only be turned on.
|
| 56 |
+
|
| 57 |
+
The scoped flags are: ``ASCII (?a)``, ``FULLCASE (?f)``, ``IGNORECASE (?i)``, ``LOCALE (?L)``, ``MULTILINE (?m)``, ``DOTALL (?s)``, ``UNICODE (?u)``, ``VERBOSE (?x)``, ``WORD (?w)``.
|
| 58 |
+
|
| 59 |
+
The global flags are: ``BESTMATCH (?b)``, ``ENHANCEMATCH (?e)``, ``POSIX (?p)``, ``REVERSE (?r)``, ``VERSION0 (?V0)``, ``VERSION1 (?V1)``.
|
| 60 |
+
|
| 61 |
+
If neither the ``ASCII``, ``LOCALE`` nor ``UNICODE`` flag is specified, it will default to ``UNICODE`` if the regex pattern is a Unicode string and ``ASCII`` if it's a bytestring.
|
| 62 |
+
|
| 63 |
+
The ``ENHANCEMATCH`` flag makes fuzzy matching attempt to improve the fit of the next match that it finds.
|
| 64 |
+
|
| 65 |
+
The ``BESTMATCH`` flag makes fuzzy matching search for the best match instead of the next match.
|
| 66 |
+
|
| 67 |
+
Old vs new behaviour
|
| 68 |
+
--------------------
|
| 69 |
+
|
| 70 |
+
In order to be compatible with the re module, this module has 2 behaviours:
|
| 71 |
+
|
| 72 |
+
* **Version 0** behaviour (old behaviour, compatible with the re module):
|
| 73 |
+
|
| 74 |
+
Please note that the re module's behaviour may change over time, and I'll endeavour to match that behaviour in version 0.
|
| 75 |
+
|
| 76 |
+
* Indicated by the ``VERSION0`` flag.
|
| 77 |
+
|
| 78 |
+
* Zero-width matches are not handled correctly in the re module before Python 3.7. The behaviour in those earlier versions is:
|
| 79 |
+
|
| 80 |
+
* ``.split`` won't split a string at a zero-width match.
|
| 81 |
+
|
| 82 |
+
* ``.sub`` will advance by one character after a zero-width match.
|
| 83 |
+
|
| 84 |
+
* Inline flags apply to the entire pattern, and they can't be turned off.
|
| 85 |
+
|
| 86 |
+
* Only simple sets are supported.
|
| 87 |
+
|
| 88 |
+
* Case-insensitive matches in Unicode use simple case-folding by default.
|
| 89 |
+
|
| 90 |
+
* **Version 1** behaviour (new behaviour, possibly different from the re module):
|
| 91 |
+
|
| 92 |
+
* Indicated by the ``VERSION1`` flag.
|
| 93 |
+
|
| 94 |
+
* Zero-width matches are handled correctly.
|
| 95 |
+
|
| 96 |
+
* Inline flags apply to the end of the group or pattern, and they can be turned off.
|
| 97 |
+
|
| 98 |
+
* Nested sets and set operations are supported.
|
| 99 |
+
|
| 100 |
+
* Case-insensitive matches in Unicode use full case-folding by default.
|
| 101 |
+
|
| 102 |
+
If no version is specified, the regex module will default to ``regex.DEFAULT_VERSION``.
|
| 103 |
+
|
| 104 |
+
Case-insensitive matches in Unicode
|
| 105 |
+
-----------------------------------
|
| 106 |
+
|
| 107 |
+
The regex module supports both simple and full case-folding for case-insensitive matches in Unicode. Use of full case-folding can be turned on using the ``FULLCASE`` flag. Please note that this flag affects how the ``IGNORECASE`` flag works; the ``FULLCASE`` flag itself does not turn on case-insensitive matching.
|
| 108 |
+
|
| 109 |
+
Version 0 behaviour: the flag is off by default.
|
| 110 |
+
|
| 111 |
+
Version 1 behaviour: the flag is on by default.
|
| 112 |
+
|
| 113 |
+
Nested sets and set operations
|
| 114 |
+
------------------------------
|
| 115 |
+
|
| 116 |
+
It's not possible to support both simple sets, as used in the re module, and nested sets at the same time because of a difference in the meaning of an unescaped ``"["`` in a set.
|
| 117 |
+
|
| 118 |
+
For example, the pattern ``[[a-z]--[aeiou]]`` is treated in the version 0 behaviour (simple sets, compatible with the re module) as:
|
| 119 |
+
|
| 120 |
+
* Set containing "[" and the letters "a" to "z"
|
| 121 |
+
|
| 122 |
+
* Literal "--"
|
| 123 |
+
|
| 124 |
+
* Set containing letters "a", "e", "i", "o", "u"
|
| 125 |
+
|
| 126 |
+
* Literal "]"
|
| 127 |
+
|
| 128 |
+
but in the version 1 behaviour (nested sets, enhanced behaviour) as:
|
| 129 |
+
|
| 130 |
+
* Set which is:
|
| 131 |
+
|
| 132 |
+
* Set containing the letters "a" to "z"
|
| 133 |
+
|
| 134 |
+
* but excluding:
|
| 135 |
+
|
| 136 |
+
* Set containing the letters "a", "e", "i", "o", "u"
|
| 137 |
+
|
| 138 |
+
Version 0 behaviour: only simple sets are supported.
|
| 139 |
+
|
| 140 |
+
Version 1 behaviour: nested sets and set operations are supported.
|
| 141 |
+
|
| 142 |
+
Notes on named groups
|
| 143 |
+
---------------------
|
| 144 |
+
|
| 145 |
+
All groups have a group number, starting from 1.
|
| 146 |
+
|
| 147 |
+
Groups with the same group name will have the same group number, and groups with a different group name will have a different group number.
|
| 148 |
+
|
| 149 |
+
The same name can be used by more than one group, with later captures 'overwriting' earlier captures. All the captures of the group will be available from the ``captures`` method of the match object.
|
| 150 |
+
|
| 151 |
+
Group numbers will be reused across different branches of a branch reset, eg. ``(?|(first)|(second))`` has only group 1. If groups have different group names then they will, of course, have different group numbers, eg. ``(?|(?P<foo>first)|(?P<bar>second))`` has group 1 ("foo") and group 2 ("bar").
|
| 152 |
+
|
| 153 |
+
In the regex ``(\s+)(?|(?P<foo>[A-Z]+)|(\w+) (?P<foo>[0-9]+)`` there are 2 groups:
|
| 154 |
+
|
| 155 |
+
* ``(\s+)`` is group 1.
|
| 156 |
+
|
| 157 |
+
* ``(?P<foo>[A-Z]+)`` is group 2, also called "foo".
|
| 158 |
+
|
| 159 |
+
* ``(\w+)`` is group 2 because of the branch reset.
|
| 160 |
+
|
| 161 |
+
* ``(?P<foo>[0-9]+)`` is group 2 because it's called "foo".
|
| 162 |
+
|
| 163 |
+
If you want to prevent ``(\w+)`` from being group 2, you need to name it (different name, different group number).
|
| 164 |
+
|
| 165 |
+
Additional features
|
| 166 |
+
-------------------
|
| 167 |
+
|
| 168 |
+
The issue numbers relate to the Python bug tracker, except where listed otherwise.
|
| 169 |
+
|
| 170 |
+
Added ``\p{Horiz_Space}`` and ``\p{Vert_Space}`` (`GitHub issue 477 <https://github.com/mrabarnett/mrab-regex/issues/477#issuecomment-1216779547>`_)
|
| 171 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 172 |
+
|
| 173 |
+
``\p{Horiz_Space}`` or ``\p{H}`` matches horizontal whitespace and ``\p{Vert_Space}`` or ``\p{V}`` matches vertical whitespace.
|
| 174 |
+
|
| 175 |
+
Added support for lookaround in conditional pattern (`Hg issue 163 <https://github.com/mrabarnett/mrab-regex/issues/163>`_)
|
| 176 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 177 |
+
|
| 178 |
+
The test of a conditional pattern can be a lookaround.
|
| 179 |
+
|
| 180 |
+
.. sourcecode:: python
|
| 181 |
+
|
| 182 |
+
>>> regex.match(r'(?(?=\d)\d+|\w+)', '123abc')
|
| 183 |
+
<regex.Match object; span=(0, 3), match='123'>
|
| 184 |
+
>>> regex.match(r'(?(?=\d)\d+|\w+)', 'abc123')
|
| 185 |
+
<regex.Match object; span=(0, 6), match='abc123'>
|
| 186 |
+
|
| 187 |
+
This is not quite the same as putting a lookaround in the first branch of a pair of alternatives.
|
| 188 |
+
|
| 189 |
+
.. sourcecode:: python
|
| 190 |
+
|
| 191 |
+
>>> print(regex.match(r'(?:(?=\d)\d+\b|\w+)', '123abc'))
|
| 192 |
+
<regex.Match object; span=(0, 6), match='123abc'>
|
| 193 |
+
>>> print(regex.match(r'(?(?=\d)\d+\b|\w+)', '123abc'))
|
| 194 |
+
None
|
| 195 |
+
|
| 196 |
+
In the first example, the lookaround matched, but the remainder of the first branch failed to match, and so the second branch was attempted, whereas in the second example, the lookaround matched, and the first branch failed to match, but the second branch was **not** attempted.
|
| 197 |
+
|
| 198 |
+
Added POSIX matching (leftmost longest) (`Hg issue 150 <https://github.com/mrabarnett/mrab-regex/issues/150>`_)
|
| 199 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 200 |
+
|
| 201 |
+
The POSIX standard for regex is to return the leftmost longest match. This can be turned on using the ``POSIX`` flag.
|
| 202 |
+
|
| 203 |
+
.. sourcecode:: python
|
| 204 |
+
|
| 205 |
+
>>> # Normal matching.
|
| 206 |
+
>>> regex.search(r'Mr|Mrs', 'Mrs')
|
| 207 |
+
<regex.Match object; span=(0, 2), match='Mr'>
|
| 208 |
+
>>> regex.search(r'one(self)?(selfsufficient)?', 'oneselfsufficient')
|
| 209 |
+
<regex.Match object; span=(0, 7), match='oneself'>
|
| 210 |
+
>>> # POSIX matching.
|
| 211 |
+
>>> regex.search(r'(?p)Mr|Mrs', 'Mrs')
|
| 212 |
+
<regex.Match object; span=(0, 3), match='Mrs'>
|
| 213 |
+
>>> regex.search(r'(?p)one(self)?(selfsufficient)?', 'oneselfsufficient')
|
| 214 |
+
<regex.Match object; span=(0, 17), match='oneselfsufficient'>
|
| 215 |
+
|
| 216 |
+
Note that it will take longer to find matches because when it finds a match at a certain position, it won't return that immediately, but will keep looking to see if there's another longer match there.
|
| 217 |
+
|
| 218 |
+
Added ``(?(DEFINE)...)`` (`Hg issue 152 <https://github.com/mrabarnett/mrab-regex/issues/152>`_)
|
| 219 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 220 |
+
|
| 221 |
+
If there's no group called "DEFINE", then ... will be ignored except that any groups defined within it can be called and that the normal rules for numbering groups still apply.
|
| 222 |
+
|
| 223 |
+
.. sourcecode:: python
|
| 224 |
+
|
| 225 |
+
>>> regex.search(r'(?(DEFINE)(?P<quant>\d+)(?P<item>\w+))(?&quant) (?&item)', '5 elephants')
|
| 226 |
+
<regex.Match object; span=(0, 11), match='5 elephants'>
|
| 227 |
+
|
| 228 |
+
Added ``(*PRUNE)``, ``(*SKIP)`` and ``(*FAIL)`` (`Hg issue 153 <https://github.com/mrabarnett/mrab-regex/issues/153>`_)
|
| 229 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 230 |
+
|
| 231 |
+
``(*PRUNE)`` discards the backtracking info up to that point. When used in an atomic group or a lookaround, it won't affect the enclosing pattern.
|
| 232 |
+
|
| 233 |
+
``(*SKIP)`` is similar to ``(*PRUNE)``, except that it also sets where in the text the next attempt to match will start. When used in an atomic group or a lookaround, it won't affect the enclosing pattern.
|
| 234 |
+
|
| 235 |
+
``(*FAIL)`` causes immediate backtracking. ``(*F)`` is a permitted abbreviation.
|
| 236 |
+
|
| 237 |
+
Added ``\K`` (`Hg issue 151 <https://github.com/mrabarnett/mrab-regex/issues/151>`_)
|
| 238 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 239 |
+
|
| 240 |
+
Keeps the part of the entire match after the position where ``\K`` occurred; the part before it is discarded.
|
| 241 |
+
|
| 242 |
+
It does not affect what groups return.
|
| 243 |
+
|
| 244 |
+
.. sourcecode:: python
|
| 245 |
+
|
| 246 |
+
>>> m = regex.search(r'(\w\w\K\w\w\w)', 'abcdef')
|
| 247 |
+
>>> m[0]
|
| 248 |
+
'cde'
|
| 249 |
+
>>> m[1]
|
| 250 |
+
'abcde'
|
| 251 |
+
>>>
|
| 252 |
+
>>> m = regex.search(r'(?r)(\w\w\K\w\w\w)', 'abcdef')
|
| 253 |
+
>>> m[0]
|
| 254 |
+
'bc'
|
| 255 |
+
>>> m[1]
|
| 256 |
+
'bcdef'
|
| 257 |
+
|
| 258 |
+
Added capture subscripting for ``expandf`` and ``subf``/``subfn`` (`Hg issue 133 <https://github.com/mrabarnett/mrab-regex/issues/133>`_)
|
| 259 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 260 |
+
|
| 261 |
+
You can use subscripting to get the captures of a repeated group.
|
| 262 |
+
|
| 263 |
+
.. sourcecode:: python
|
| 264 |
+
|
| 265 |
+
>>> m = regex.match(r"(\w)+", "abc")
|
| 266 |
+
>>> m.expandf("{1}")
|
| 267 |
+
'c'
|
| 268 |
+
>>> m.expandf("{1[0]} {1[1]} {1[2]}")
|
| 269 |
+
'a b c'
|
| 270 |
+
>>> m.expandf("{1[-1]} {1[-2]} {1[-3]}")
|
| 271 |
+
'c b a'
|
| 272 |
+
>>>
|
| 273 |
+
>>> m = regex.match(r"(?P<letter>\w)+", "abc")
|
| 274 |
+
>>> m.expandf("{letter}")
|
| 275 |
+
'c'
|
| 276 |
+
>>> m.expandf("{letter[0]} {letter[1]} {letter[2]}")
|
| 277 |
+
'a b c'
|
| 278 |
+
>>> m.expandf("{letter[-1]} {letter[-2]} {letter[-3]}")
|
| 279 |
+
'c b a'
|
| 280 |
+
|
| 281 |
+
Added support for referring to a group by number using ``(?P=...)``
|
| 282 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 283 |
+
|
| 284 |
+
This is in addition to the existing ``\g<...>``.
|
| 285 |
+
|
| 286 |
+
Fixed the handling of locale-sensitive regexes
|
| 287 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 288 |
+
|
| 289 |
+
The ``LOCALE`` flag is intended for legacy code and has limited support. You're still recommended to use Unicode instead.
|
| 290 |
+
|
| 291 |
+
Added partial matches (`Hg issue 102 <https://github.com/mrabarnett/mrab-regex/issues/102>`_)
|
| 292 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 293 |
+
|
| 294 |
+
A partial match is one that matches up to the end of string, but that string has been truncated and you want to know whether a complete match could be possible if the string had not been truncated.
|
| 295 |
+
|
| 296 |
+
Partial matches are supported by ``match``, ``search``, ``fullmatch`` and ``finditer`` with the ``partial`` keyword argument.
|
| 297 |
+
|
| 298 |
+
Match objects have a ``partial`` attribute, which is ``True`` if it's a partial match.
|
| 299 |
+
|
| 300 |
+
For example, if you wanted a user to enter a 4-digit number and check it character by character as it was being entered:
|
| 301 |
+
|
| 302 |
+
.. sourcecode:: python
|
| 303 |
+
|
| 304 |
+
>>> pattern = regex.compile(r'\d{4}')
|
| 305 |
+
|
| 306 |
+
>>> # Initially, nothing has been entered:
|
| 307 |
+
>>> print(pattern.fullmatch('', partial=True))
|
| 308 |
+
<regex.Match object; span=(0, 0), match='', partial=True>
|
| 309 |
+
|
| 310 |
+
>>> # An empty string is OK, but it's only a partial match.
|
| 311 |
+
>>> # The user enters a letter:
|
| 312 |
+
>>> print(pattern.fullmatch('a', partial=True))
|
| 313 |
+
None
|
| 314 |
+
>>> # It'll never match.
|
| 315 |
+
|
| 316 |
+
>>> # The user deletes that and enters a digit:
|
| 317 |
+
>>> print(pattern.fullmatch('1', partial=True))
|
| 318 |
+
<regex.Match object; span=(0, 1), match='1', partial=True>
|
| 319 |
+
>>> # It matches this far, but it's only a partial match.
|
| 320 |
+
|
| 321 |
+
>>> # The user enters 2 more digits:
|
| 322 |
+
>>> print(pattern.fullmatch('123', partial=True))
|
| 323 |
+
<regex.Match object; span=(0, 3), match='123', partial=True>
|
| 324 |
+
>>> # It matches this far, but it's only a partial match.
|
| 325 |
+
|
| 326 |
+
>>> # The user enters another digit:
|
| 327 |
+
>>> print(pattern.fullmatch('1234', partial=True))
|
| 328 |
+
<regex.Match object; span=(0, 4), match='1234'>
|
| 329 |
+
>>> # It's a complete match.
|
| 330 |
+
|
| 331 |
+
>>> # If the user enters another digit:
|
| 332 |
+
>>> print(pattern.fullmatch('12345', partial=True))
|
| 333 |
+
None
|
| 334 |
+
>>> # It's no longer a match.
|
| 335 |
+
|
| 336 |
+
>>> # This is a partial match:
|
| 337 |
+
>>> pattern.match('123', partial=True).partial
|
| 338 |
+
True
|
| 339 |
+
|
| 340 |
+
>>> # This is a complete match:
|
| 341 |
+
>>> pattern.match('1233', partial=True).partial
|
| 342 |
+
False
|
| 343 |
+
|
| 344 |
+
``*`` operator not working correctly with sub() (`Hg issue 106 <https://github.com/mrabarnett/mrab-regex/issues/106>`_)
|
| 345 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 346 |
+
|
| 347 |
+
Sometimes it's not clear how zero-width matches should be handled. For example, should ``.*`` match 0 characters directly after matching >0 characters?
|
| 348 |
+
|
| 349 |
+
.. sourcecode:: python
|
| 350 |
+
|
| 351 |
+
>>> regex.sub('.*', 'x', 'test')
|
| 352 |
+
'xx'
|
| 353 |
+
>>> regex.sub('.*?', '|', 'test')
|
| 354 |
+
'|||||||||'
|
| 355 |
+
|
| 356 |
+
Added ``capturesdict`` (`Hg issue 86 <https://github.com/mrabarnett/mrab-regex/issues/86>`_)
|
| 357 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 358 |
+
|
| 359 |
+
``capturesdict`` is a combination of ``groupdict`` and ``captures``:
|
| 360 |
+
|
| 361 |
+
``groupdict`` returns a dict of the named groups and the last capture of those groups.
|
| 362 |
+
|
| 363 |
+
``captures`` returns a list of all the captures of a group
|
| 364 |
+
|
| 365 |
+
``capturesdict`` returns a dict of the named groups and lists of all the captures of those groups.
|
| 366 |
+
|
| 367 |
+
.. sourcecode:: python
|
| 368 |
+
|
| 369 |
+
>>> m = regex.match(r"(?:(?P<word>\w+) (?P<digits>\d+)\n)+", "one 1\ntwo 2\nthree 3\n")
|
| 370 |
+
>>> m.groupdict()
|
| 371 |
+
{'word': 'three', 'digits': '3'}
|
| 372 |
+
>>> m.captures("word")
|
| 373 |
+
['one', 'two', 'three']
|
| 374 |
+
>>> m.captures("digits")
|
| 375 |
+
['1', '2', '3']
|
| 376 |
+
>>> m.capturesdict()
|
| 377 |
+
{'word': ['one', 'two', 'three'], 'digits': ['1', '2', '3']}
|
| 378 |
+
|
| 379 |
+
Added ``allcaptures`` and ``allspans`` (`Git issue 474 <https://github.com/mrabarnett/mrab-regex/issues/474>`_)
|
| 380 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 381 |
+
|
| 382 |
+
``allcaptures`` returns a list of all the captures of all the groups.
|
| 383 |
+
|
| 384 |
+
``allspans`` returns a list of all the spans of the all captures of all the groups.
|
| 385 |
+
|
| 386 |
+
.. sourcecode:: python
|
| 387 |
+
|
| 388 |
+
>>> m = regex.match(r"(?:(?P<word>\w+) (?P<digits>\d+)\n)+", "one 1\ntwo 2\nthree 3\n")
|
| 389 |
+
>>> m.allcaptures()
|
| 390 |
+
(['one 1\ntwo 2\nthree 3\n'], ['one', 'two', 'three'], ['1', '2', '3'])
|
| 391 |
+
>>> m.allspans()
|
| 392 |
+
([(0, 20)], [(0, 3), (6, 9), (12, 17)], [(4, 5), (10, 11), (18, 19)])
|
| 393 |
+
|
| 394 |
+
Allow duplicate names of groups (`Hg issue 87 <https://github.com/mrabarnett/mrab-regex/issues/87>`_)
|
| 395 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 396 |
+
|
| 397 |
+
Group names can be duplicated.
|
| 398 |
+
|
| 399 |
+
.. sourcecode:: python
|
| 400 |
+
|
| 401 |
+
>>> # With optional groups:
|
| 402 |
+
>>>
|
| 403 |
+
>>> # Both groups capture, the second capture 'overwriting' the first.
|
| 404 |
+
>>> m = regex.match(r"(?P<item>\w+)? or (?P<item>\w+)?", "first or second")
|
| 405 |
+
>>> m.group("item")
|
| 406 |
+
'second'
|
| 407 |
+
>>> m.captures("item")
|
| 408 |
+
['first', 'second']
|
| 409 |
+
>>> # Only the second group captures.
|
| 410 |
+
>>> m = regex.match(r"(?P<item>\w+)? or (?P<item>\w+)?", " or second")
|
| 411 |
+
>>> m.group("item")
|
| 412 |
+
'second'
|
| 413 |
+
>>> m.captures("item")
|
| 414 |
+
['second']
|
| 415 |
+
>>> # Only the first group captures.
|
| 416 |
+
>>> m = regex.match(r"(?P<item>\w+)? or (?P<item>\w+)?", "first or ")
|
| 417 |
+
>>> m.group("item")
|
| 418 |
+
'first'
|
| 419 |
+
>>> m.captures("item")
|
| 420 |
+
['first']
|
| 421 |
+
>>>
|
| 422 |
+
>>> # With mandatory groups:
|
| 423 |
+
>>>
|
| 424 |
+
>>> # Both groups capture, the second capture 'overwriting' the first.
|
| 425 |
+
>>> m = regex.match(r"(?P<item>\w*) or (?P<item>\w*)?", "first or second")
|
| 426 |
+
>>> m.group("item")
|
| 427 |
+
'second'
|
| 428 |
+
>>> m.captures("item")
|
| 429 |
+
['first', 'second']
|
| 430 |
+
>>> # Again, both groups capture, the second capture 'overwriting' the first.
|
| 431 |
+
>>> m = regex.match(r"(?P<item>\w*) or (?P<item>\w*)", " or second")
|
| 432 |
+
>>> m.group("item")
|
| 433 |
+
'second'
|
| 434 |
+
>>> m.captures("item")
|
| 435 |
+
['', 'second']
|
| 436 |
+
>>> # And yet again, both groups capture, the second capture 'overwriting' the first.
|
| 437 |
+
>>> m = regex.match(r"(?P<item>\w*) or (?P<item>\w*)", "first or ")
|
| 438 |
+
>>> m.group("item")
|
| 439 |
+
''
|
| 440 |
+
>>> m.captures("item")
|
| 441 |
+
['first', '']
|
| 442 |
+
|
| 443 |
+
Added ``fullmatch`` (`issue #16203 <https://bugs.python.org/issue16203>`_)
|
| 444 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 445 |
+
|
| 446 |
+
``fullmatch`` behaves like ``match``, except that it must match all of the string.
|
| 447 |
+
|
| 448 |
+
.. sourcecode:: python
|
| 449 |
+
|
| 450 |
+
>>> print(regex.fullmatch(r"abc", "abc").span())
|
| 451 |
+
(0, 3)
|
| 452 |
+
>>> print(regex.fullmatch(r"abc", "abcx"))
|
| 453 |
+
None
|
| 454 |
+
>>> print(regex.fullmatch(r"abc", "abcx", endpos=3).span())
|
| 455 |
+
(0, 3)
|
| 456 |
+
>>> print(regex.fullmatch(r"abc", "xabcy", pos=1, endpos=4).span())
|
| 457 |
+
(1, 4)
|
| 458 |
+
>>>
|
| 459 |
+
>>> regex.match(r"a.*?", "abcd").group(0)
|
| 460 |
+
'a'
|
| 461 |
+
>>> regex.fullmatch(r"a.*?", "abcd").group(0)
|
| 462 |
+
'abcd'
|
| 463 |
+
|
| 464 |
+
Added ``subf`` and ``subfn``
|
| 465 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 466 |
+
|
| 467 |
+
``subf`` and ``subfn`` are alternatives to ``sub`` and ``subn`` respectively. When passed a replacement string, they treat it as a format string.
|
| 468 |
+
|
| 469 |
+
.. sourcecode:: python
|
| 470 |
+
|
| 471 |
+
>>> regex.subf(r"(\w+) (\w+)", "{0} => {2} {1}", "foo bar")
|
| 472 |
+
'foo bar => bar foo'
|
| 473 |
+
>>> regex.subf(r"(?P<word1>\w+) (?P<word2>\w+)", "{word2} {word1}", "foo bar")
|
| 474 |
+
'bar foo'
|
| 475 |
+
|
| 476 |
+
Added ``expandf`` to match object
|
| 477 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 478 |
+
|
| 479 |
+
``expandf`` is an alternative to ``expand``. When passed a replacement string, it treats it as a format string.
|
| 480 |
+
|
| 481 |
+
.. sourcecode:: python
|
| 482 |
+
|
| 483 |
+
>>> m = regex.match(r"(\w+) (\w+)", "foo bar")
|
| 484 |
+
>>> m.expandf("{0} => {2} {1}")
|
| 485 |
+
'foo bar => bar foo'
|
| 486 |
+
>>>
|
| 487 |
+
>>> m = regex.match(r"(?P<word1>\w+) (?P<word2>\w+)", "foo bar")
|
| 488 |
+
>>> m.expandf("{word2} {word1}")
|
| 489 |
+
'bar foo'
|
| 490 |
+
|
| 491 |
+
Detach searched string
|
| 492 |
+
^^^^^^^^^^^^^^^^^^^^^^
|
| 493 |
+
|
| 494 |
+
A match object contains a reference to the string that was searched, via its ``string`` attribute. The ``detach_string`` method will 'detach' that string, making it available for garbage collection, which might save valuable memory if that string is very large.
|
| 495 |
+
|
| 496 |
+
.. sourcecode:: python
|
| 497 |
+
|
| 498 |
+
>>> m = regex.search(r"\w+", "Hello world")
|
| 499 |
+
>>> print(m.group())
|
| 500 |
+
Hello
|
| 501 |
+
>>> print(m.string)
|
| 502 |
+
Hello world
|
| 503 |
+
>>> m.detach_string()
|
| 504 |
+
>>> print(m.group())
|
| 505 |
+
Hello
|
| 506 |
+
>>> print(m.string)
|
| 507 |
+
None
|
| 508 |
+
|
| 509 |
+
Recursive patterns (`Hg issue 27 <https://github.com/mrabarnett/mrab-regex/issues/27>`_)
|
| 510 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 511 |
+
|
| 512 |
+
Recursive and repeated patterns are supported.
|
| 513 |
+
|
| 514 |
+
``(?R)`` or ``(?0)`` tries to match the entire regex recursively. ``(?1)``, ``(?2)``, etc, try to match the relevant group.
|
| 515 |
+
|
| 516 |
+
``(?&name)`` tries to match the named group.
|
| 517 |
+
|
| 518 |
+
.. sourcecode:: python
|
| 519 |
+
|
| 520 |
+
>>> regex.match(r"(Tarzan|Jane) loves (?1)", "Tarzan loves Jane").groups()
|
| 521 |
+
('Tarzan',)
|
| 522 |
+
>>> regex.match(r"(Tarzan|Jane) loves (?1)", "Jane loves Tarzan").groups()
|
| 523 |
+
('Jane',)
|
| 524 |
+
|
| 525 |
+
>>> m = regex.search(r"(\w)(?:(?R)|(\w?))\1", "kayak")
|
| 526 |
+
>>> m.group(0, 1, 2)
|
| 527 |
+
('kayak', 'k', None)
|
| 528 |
+
|
| 529 |
+
The first two examples show how the subpattern within the group is reused, but is _not_ itself a group. In other words, ``"(Tarzan|Jane) loves (?1)"`` is equivalent to ``"(Tarzan|Jane) loves (?:Tarzan|Jane)"``.
|
| 530 |
+
|
| 531 |
+
It's possible to backtrack into a recursed or repeated group.
|
| 532 |
+
|
| 533 |
+
You can't call a group if there is more than one group with that group name or group number (``"ambiguous group reference"``).
|
| 534 |
+
|
| 535 |
+
The alternative forms ``(?P>name)`` and ``(?P&name)`` are also supported.
|
| 536 |
+
|
| 537 |
+
Full Unicode case-folding is supported
|
| 538 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 539 |
+
|
| 540 |
+
In version 1 behaviour, the regex module uses full case-folding when performing case-insensitive matches in Unicode.
|
| 541 |
+
|
| 542 |
+
.. sourcecode:: python
|
| 543 |
+
|
| 544 |
+
>>> regex.match(r"(?iV1)strasse", "stra\N{LATIN SMALL LETTER SHARP S}e").span()
|
| 545 |
+
(0, 6)
|
| 546 |
+
>>> regex.match(r"(?iV1)stra\N{LATIN SMALL LETTER SHARP S}e", "STRASSE").span()
|
| 547 |
+
(0, 7)
|
| 548 |
+
|
| 549 |
+
In version 0 behaviour, it uses simple case-folding for backward compatibility with the re module.
|
| 550 |
+
|
| 551 |
+
Approximate "fuzzy" matching (`Hg issue 12 <https://github.com/mrabarnett/mrab-regex/issues/12>`_, `Hg issue 41 <https://github.com/mrabarnett/mrab-regex/issues/41>`_, `Hg issue 109 <https://github.com/mrabarnett/mrab-regex/issues/109>`_)
|
| 552 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 553 |
+
|
| 554 |
+
Regex usually attempts an exact match, but sometimes an approximate, or "fuzzy", match is needed, for those cases where the text being searched may contain errors in the form of inserted, deleted or substituted characters.
|
| 555 |
+
|
| 556 |
+
A fuzzy regex specifies which types of errors are permitted, and, optionally, either the minimum and maximum or only the maximum permitted number of each type. (You cannot specify only a minimum.)
|
| 557 |
+
|
| 558 |
+
The 3 types of error are:
|
| 559 |
+
|
| 560 |
+
* Insertion, indicated by "i"
|
| 561 |
+
|
| 562 |
+
* Deletion, indicated by "d"
|
| 563 |
+
|
| 564 |
+
* Substitution, indicated by "s"
|
| 565 |
+
|
| 566 |
+
In addition, "e" indicates any type of error.
|
| 567 |
+
|
| 568 |
+
The fuzziness of a regex item is specified between "{" and "}" after the item.
|
| 569 |
+
|
| 570 |
+
Examples:
|
| 571 |
+
|
| 572 |
+
* ``foo`` match "foo" exactly
|
| 573 |
+
|
| 574 |
+
* ``(?:foo){i}`` match "foo", permitting insertions
|
| 575 |
+
|
| 576 |
+
* ``(?:foo){d}`` match "foo", permitting deletions
|
| 577 |
+
|
| 578 |
+
* ``(?:foo){s}`` match "foo", permitting substitutions
|
| 579 |
+
|
| 580 |
+
* ``(?:foo){i,s}`` match "foo", permitting insertions and substitutions
|
| 581 |
+
|
| 582 |
+
* ``(?:foo){e}`` match "foo", permitting errors
|
| 583 |
+
|
| 584 |
+
If a certain type of error is specified, then any type not specified will **not** be permitted.
|
| 585 |
+
|
| 586 |
+
In the following examples I'll omit the item and write only the fuzziness:
|
| 587 |
+
|
| 588 |
+
* ``{d<=3}`` permit at most 3 deletions, but no other types
|
| 589 |
+
|
| 590 |
+
* ``{i<=1,s<=2}`` permit at most 1 insertion and at most 2 substitutions, but no deletions
|
| 591 |
+
|
| 592 |
+
* ``{1<=e<=3}`` permit at least 1 and at most 3 errors
|
| 593 |
+
|
| 594 |
+
* ``{i<=2,d<=2,e<=3}`` permit at most 2 insertions, at most 2 deletions, at most 3 errors in total, but no substitutions
|
| 595 |
+
|
| 596 |
+
It's also possible to state the costs of each type of error and the maximum permitted total cost.
|
| 597 |
+
|
| 598 |
+
Examples:
|
| 599 |
+
|
| 600 |
+
* ``{2i+2d+1s<=4}`` each insertion costs 2, each deletion costs 2, each substitution costs 1, the total cost must not exceed 4
|
| 601 |
+
|
| 602 |
+
* ``{i<=1,d<=1,s<=1,2i+2d+1s<=4}`` at most 1 insertion, at most 1 deletion, at most 1 substitution; each insertion costs 2, each deletion costs 2, each substitution costs 1, the total cost must not exceed 4
|
| 603 |
+
|
| 604 |
+
You can also use "<" instead of "<=" if you want an exclusive minimum or maximum.
|
| 605 |
+
|
| 606 |
+
You can add a test to perform on a character that's substituted or inserted.
|
| 607 |
+
|
| 608 |
+
Examples:
|
| 609 |
+
|
| 610 |
+
* ``{s<=2:[a-z]}`` at most 2 substitutions, which must be in the character set ``[a-z]``.
|
| 611 |
+
|
| 612 |
+
* ``{s<=2,i<=3:\d}`` at most 2 substitutions, at most 3 insertions, which must be digits.
|
| 613 |
+
|
| 614 |
+
By default, fuzzy matching searches for the first match that meets the given constraints. The ``ENHANCEMATCH`` flag will cause it to attempt to improve the fit (i.e. reduce the number of errors) of the match that it has found.
|
| 615 |
+
|
| 616 |
+
The ``BESTMATCH`` flag will make it search for the best match instead.
|
| 617 |
+
|
| 618 |
+
Further examples to note:
|
| 619 |
+
|
| 620 |
+
* ``regex.search("(dog){e}", "cat and dog")[1]`` returns ``"cat"`` because that matches ``"dog"`` with 3 errors (an unlimited number of errors is permitted).
|
| 621 |
+
|
| 622 |
+
* ``regex.search("(dog){e<=1}", "cat and dog")[1]`` returns ``" dog"`` (with a leading space) because that matches ``"dog"`` with 1 error, which is within the limit.
|
| 623 |
+
|
| 624 |
+
* ``regex.search("(?e)(dog){e<=1}", "cat and dog")[1]`` returns ``"dog"`` (without a leading space) because the fuzzy search matches ``" dog"`` with 1 error, which is within the limit, and the ``(?e)`` then it attempts a better fit.
|
| 625 |
+
|
| 626 |
+
In the first two examples there are perfect matches later in the string, but in neither case is it the first possible match.
|
| 627 |
+
|
| 628 |
+
The match object has an attribute ``fuzzy_counts`` which gives the total number of substitutions, insertions and deletions.
|
| 629 |
+
|
| 630 |
+
.. sourcecode:: python
|
| 631 |
+
|
| 632 |
+
>>> # A 'raw' fuzzy match:
|
| 633 |
+
>>> regex.fullmatch(r"(?:cats|cat){e<=1}", "cat").fuzzy_counts
|
| 634 |
+
(0, 0, 1)
|
| 635 |
+
>>> # 0 substitutions, 0 insertions, 1 deletion.
|
| 636 |
+
|
| 637 |
+
>>> # A better match might be possible if the ENHANCEMATCH flag used:
|
| 638 |
+
>>> regex.fullmatch(r"(?e)(?:cats|cat){e<=1}", "cat").fuzzy_counts
|
| 639 |
+
(0, 0, 0)
|
| 640 |
+
>>> # 0 substitutions, 0 insertions, 0 deletions.
|
| 641 |
+
|
| 642 |
+
The match object also has an attribute ``fuzzy_changes`` which gives a tuple of the positions of the substitutions, insertions and deletions.
|
| 643 |
+
|
| 644 |
+
.. sourcecode:: python
|
| 645 |
+
|
| 646 |
+
>>> m = regex.search('(fuu){i<=2,d<=2,e<=5}', 'anaconda foo bar')
|
| 647 |
+
>>> m
|
| 648 |
+
<regex.Match object; span=(7, 10), match='a f', fuzzy_counts=(0, 2, 2)>
|
| 649 |
+
>>> m.fuzzy_changes
|
| 650 |
+
([], [7, 8], [10, 11])
|
| 651 |
+
|
| 652 |
+
What this means is that if the matched part of the string had been:
|
| 653 |
+
|
| 654 |
+
.. sourcecode:: python
|
| 655 |
+
|
| 656 |
+
'anacondfuuoo bar'
|
| 657 |
+
|
| 658 |
+
it would've been an exact match.
|
| 659 |
+
|
| 660 |
+
However, there were insertions at positions 7 and 8:
|
| 661 |
+
|
| 662 |
+
.. sourcecode:: python
|
| 663 |
+
|
| 664 |
+
'anaconda fuuoo bar'
|
| 665 |
+
^^
|
| 666 |
+
|
| 667 |
+
and deletions at positions 10 and 11:
|
| 668 |
+
|
| 669 |
+
.. sourcecode:: python
|
| 670 |
+
|
| 671 |
+
'anaconda f~~oo bar'
|
| 672 |
+
^^
|
| 673 |
+
|
| 674 |
+
So the actual string was:
|
| 675 |
+
|
| 676 |
+
.. sourcecode:: python
|
| 677 |
+
|
| 678 |
+
'anaconda foo bar'
|
| 679 |
+
|
| 680 |
+
Named lists ``\L<name>`` (`Hg issue 11 <https://github.com/mrabarnett/mrab-regex/issues/11>`_)
|
| 681 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 682 |
+
|
| 683 |
+
There are occasions where you may want to include a list (actually, a set) of options in a regex.
|
| 684 |
+
|
| 685 |
+
One way is to build the pattern like this:
|
| 686 |
+
|
| 687 |
+
.. sourcecode:: python
|
| 688 |
+
|
| 689 |
+
>>> p = regex.compile(r"first|second|third|fourth|fifth")
|
| 690 |
+
|
| 691 |
+
but if the list is large, parsing the resulting regex can take considerable time, and care must also be taken that the strings are properly escaped and properly ordered, for example, "cats" before "cat".
|
| 692 |
+
|
| 693 |
+
The new alternative is to use a named list:
|
| 694 |
+
|
| 695 |
+
.. sourcecode:: python
|
| 696 |
+
|
| 697 |
+
>>> option_set = ["first", "second", "third", "fourth", "fifth"]
|
| 698 |
+
>>> p = regex.compile(r"\L<options>", options=option_set)
|
| 699 |
+
|
| 700 |
+
The order of the items is irrelevant, they are treated as a set. The named lists are available as the ``.named_lists`` attribute of the pattern object :
|
| 701 |
+
|
| 702 |
+
.. sourcecode:: python
|
| 703 |
+
|
| 704 |
+
>>> print(p.named_lists)
|
| 705 |
+
{'options': frozenset({'third', 'first', 'fifth', 'fourth', 'second'})}
|
| 706 |
+
|
| 707 |
+
If there are any unused keyword arguments, ``ValueError`` will be raised unless you tell it otherwise:
|
| 708 |
+
|
| 709 |
+
.. sourcecode:: python
|
| 710 |
+
|
| 711 |
+
>>> option_set = ["first", "second", "third", "fourth", "fifth"]
|
| 712 |
+
>>> p = regex.compile(r"\L<options>", options=option_set, other_options=[])
|
| 713 |
+
Traceback (most recent call last):
|
| 714 |
+
File "<stdin>", line 1, in <module>
|
| 715 |
+
File "C:\Python310\lib\site-packages\regex\regex.py", line 353, in compile
|
| 716 |
+
return _compile(pattern, flags, ignore_unused, kwargs, cache_pattern)
|
| 717 |
+
File "C:\Python310\lib\site-packages\regex\regex.py", line 500, in _compile
|
| 718 |
+
complain_unused_args()
|
| 719 |
+
File "C:\Python310\lib\site-packages\regex\regex.py", line 483, in complain_unused_args
|
| 720 |
+
raise ValueError('unused keyword argument {!a}'.format(any_one))
|
| 721 |
+
ValueError: unused keyword argument 'other_options'
|
| 722 |
+
>>> p = regex.compile(r"\L<options>", options=option_set, other_options=[], ignore_unused=True)
|
| 723 |
+
>>> p = regex.compile(r"\L<options>", options=option_set, other_options=[], ignore_unused=False)
|
| 724 |
+
Traceback (most recent call last):
|
| 725 |
+
File "<stdin>", line 1, in <module>
|
| 726 |
+
File "C:\Python310\lib\site-packages\regex\regex.py", line 353, in compile
|
| 727 |
+
return _compile(pattern, flags, ignore_unused, kwargs, cache_pattern)
|
| 728 |
+
File "C:\Python310\lib\site-packages\regex\regex.py", line 500, in _compile
|
| 729 |
+
complain_unused_args()
|
| 730 |
+
File "C:\Python310\lib\site-packages\regex\regex.py", line 483, in complain_unused_args
|
| 731 |
+
raise ValueError('unused keyword argument {!a}'.format(any_one))
|
| 732 |
+
ValueError: unused keyword argument 'other_options'
|
| 733 |
+
>>>
|
| 734 |
+
|
| 735 |
+
Start and end of word
|
| 736 |
+
^^^^^^^^^^^^^^^^^^^^^
|
| 737 |
+
|
| 738 |
+
``\m`` matches at the start of a word.
|
| 739 |
+
|
| 740 |
+
``\M`` matches at the end of a word.
|
| 741 |
+
|
| 742 |
+
Compare with ``\b``, which matches at the start or end of a word.
|
| 743 |
+
|
| 744 |
+
Unicode line separators
|
| 745 |
+
^^^^^^^^^^^^^^^^^^^^^^^
|
| 746 |
+
|
| 747 |
+
Normally the only line separator is ``\n`` (``\x0A``), but if the ``WORD`` flag is turned on then the line separators are ``\x0D\x0A``, ``\x0A``, ``\x0B``, ``\x0C`` and ``\x0D``, plus ``\x85``, ``\u2028`` and ``\u2029`` when working with Unicode.
|
| 748 |
+
|
| 749 |
+
This affects the regex dot ``"."``, which, with the ``DOTALL`` flag turned off, matches any character except a line separator. It also affects the line anchors ``^`` and ``$`` (in multiline mode).
|
| 750 |
+
|
| 751 |
+
Set operators
|
| 752 |
+
^^^^^^^^^^^^^
|
| 753 |
+
|
| 754 |
+
**Version 1 behaviour only**
|
| 755 |
+
|
| 756 |
+
Set operators have been added, and a set ``[...]`` can include nested sets.
|
| 757 |
+
|
| 758 |
+
The operators, in order of increasing precedence, are:
|
| 759 |
+
|
| 760 |
+
* ``||`` for union ("x||y" means "x or y")
|
| 761 |
+
|
| 762 |
+
* ``~~`` (double tilde) for symmetric difference ("x~~y" means "x or y, but not both")
|
| 763 |
+
|
| 764 |
+
* ``&&`` for intersection ("x&&y" means "x and y")
|
| 765 |
+
|
| 766 |
+
* ``--`` (double dash) for difference ("x--y" means "x but not y")
|
| 767 |
+
|
| 768 |
+
Implicit union, ie, simple juxtaposition like in ``[ab]``, has the highest precedence. Thus, ``[ab&&cd]`` is the same as ``[[a||b]&&[c||d]]``.
|
| 769 |
+
|
| 770 |
+
Examples:
|
| 771 |
+
|
| 772 |
+
* ``[ab]`` # Set containing 'a' and 'b'
|
| 773 |
+
|
| 774 |
+
* ``[a-z]`` # Set containing 'a' .. 'z'
|
| 775 |
+
|
| 776 |
+
* ``[[a-z]--[qw]]`` # Set containing 'a' .. 'z', but not 'q' or 'w'
|
| 777 |
+
|
| 778 |
+
* ``[a-z--qw]`` # Same as above
|
| 779 |
+
|
| 780 |
+
* ``[\p{L}--QW]`` # Set containing all letters except 'Q' and 'W'
|
| 781 |
+
|
| 782 |
+
* ``[\p{N}--[0-9]]`` # Set containing all numbers except '0' .. '9'
|
| 783 |
+
|
| 784 |
+
* ``[\p{ASCII}&&\p{Letter}]`` # Set containing all characters which are ASCII and letter
|
| 785 |
+
|
| 786 |
+
regex.escape (`issue #2650 <https://bugs.python.org/issue2650>`_)
|
| 787 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 788 |
+
|
| 789 |
+
regex.escape has an additional keyword parameter ``special_only``. When True, only 'special' regex characters, such as '?', are escaped.
|
| 790 |
+
|
| 791 |
+
.. sourcecode:: python
|
| 792 |
+
|
| 793 |
+
>>> regex.escape("foo!?", special_only=False)
|
| 794 |
+
'foo\\!\\?'
|
| 795 |
+
>>> regex.escape("foo!?", special_only=True)
|
| 796 |
+
'foo!\\?'
|
| 797 |
+
|
| 798 |
+
regex.escape (`Hg issue 249 <https://github.com/mrabarnett/mrab-regex/issues/249>`_)
|
| 799 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 800 |
+
|
| 801 |
+
regex.escape has an additional keyword parameter ``literal_spaces``. When True, spaces are not escaped.
|
| 802 |
+
|
| 803 |
+
.. sourcecode:: python
|
| 804 |
+
|
| 805 |
+
>>> regex.escape("foo bar!?", literal_spaces=False)
|
| 806 |
+
'foo\\ bar!\\?'
|
| 807 |
+
>>> regex.escape("foo bar!?", literal_spaces=True)
|
| 808 |
+
'foo bar!\\?'
|
| 809 |
+
|
| 810 |
+
Repeated captures (`issue #7132 <https://bugs.python.org/issue7132>`_)
|
| 811 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 812 |
+
|
| 813 |
+
A match object has additional methods which return information on all the successful matches of a repeated group. These methods are:
|
| 814 |
+
|
| 815 |
+
* ``matchobject.captures([group1, ...])``
|
| 816 |
+
|
| 817 |
+
* Returns a list of the strings matched in a group or groups. Compare with ``matchobject.group([group1, ...])``.
|
| 818 |
+
|
| 819 |
+
* ``matchobject.starts([group])``
|
| 820 |
+
|
| 821 |
+
* Returns a list of the start positions. Compare with ``matchobject.start([group])``.
|
| 822 |
+
|
| 823 |
+
* ``matchobject.ends([group])``
|
| 824 |
+
|
| 825 |
+
* Returns a list of the end positions. Compare with ``matchobject.end([group])``.
|
| 826 |
+
|
| 827 |
+
* ``matchobject.spans([group])``
|
| 828 |
+
|
| 829 |
+
* Returns a list of the spans. Compare with ``matchobject.span([group])``.
|
| 830 |
+
|
| 831 |
+
.. sourcecode:: python
|
| 832 |
+
|
| 833 |
+
>>> m = regex.search(r"(\w{3})+", "123456789")
|
| 834 |
+
>>> m.group(1)
|
| 835 |
+
'789'
|
| 836 |
+
>>> m.captures(1)
|
| 837 |
+
['123', '456', '789']
|
| 838 |
+
>>> m.start(1)
|
| 839 |
+
6
|
| 840 |
+
>>> m.starts(1)
|
| 841 |
+
[0, 3, 6]
|
| 842 |
+
>>> m.end(1)
|
| 843 |
+
9
|
| 844 |
+
>>> m.ends(1)
|
| 845 |
+
[3, 6, 9]
|
| 846 |
+
>>> m.span(1)
|
| 847 |
+
(6, 9)
|
| 848 |
+
>>> m.spans(1)
|
| 849 |
+
[(0, 3), (3, 6), (6, 9)]
|
| 850 |
+
|
| 851 |
+
Atomic grouping ``(?>...)`` (`issue #433030 <https://bugs.python.org/issue433030>`_)
|
| 852 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 853 |
+
|
| 854 |
+
If the following pattern subsequently fails, then the subpattern as a whole will fail.
|
| 855 |
+
|
| 856 |
+
Possessive quantifiers
|
| 857 |
+
^^^^^^^^^^^^^^^^^^^^^^
|
| 858 |
+
|
| 859 |
+
``(?:...)?+`` ; ``(?:...)*+`` ; ``(?:...)++`` ; ``(?:...){min,max}+``
|
| 860 |
+
|
| 861 |
+
The subpattern is matched up to 'max' times. If the following pattern subsequently fails, then all the repeated subpatterns will fail as a whole. For example, ``(?:...)++`` is equivalent to ``(?>(?:...)+)``.
|
| 862 |
+
|
| 863 |
+
Scoped flags (`issue #433028 <https://bugs.python.org/issue433028>`_)
|
| 864 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 865 |
+
|
| 866 |
+
``(?flags-flags:...)``
|
| 867 |
+
|
| 868 |
+
The flags will apply only to the subpattern. Flags can be turned on or off.
|
| 869 |
+
|
| 870 |
+
Definition of 'word' character (`issue #1693050 <https://bugs.python.org/issue1693050>`_)
|
| 871 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 872 |
+
|
| 873 |
+
The definition of a 'word' character has been expanded for Unicode. It conforms to the Unicode specification at ``http://www.unicode.org/reports/tr29/``.
|
| 874 |
+
|
| 875 |
+
Variable-length lookbehind
|
| 876 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 877 |
+
|
| 878 |
+
A lookbehind can match a variable-length string.
|
| 879 |
+
|
| 880 |
+
Flags argument for regex.split, regex.sub and regex.subn (`issue #3482 <https://bugs.python.org/issue3482>`_)
|
| 881 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 882 |
+
|
| 883 |
+
``regex.split``, ``regex.sub`` and ``regex.subn`` support a 'flags' argument.
|
| 884 |
+
|
| 885 |
+
Pos and endpos arguments for regex.sub and regex.subn
|
| 886 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 887 |
+
|
| 888 |
+
``regex.sub`` and ``regex.subn`` support 'pos' and 'endpos' arguments.
|
| 889 |
+
|
| 890 |
+
'Overlapped' argument for regex.findall and regex.finditer
|
| 891 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 892 |
+
|
| 893 |
+
``regex.findall`` and ``regex.finditer`` support an 'overlapped' flag which permits overlapped matches.
|
| 894 |
+
|
| 895 |
+
Splititer
|
| 896 |
+
^^^^^^^^^
|
| 897 |
+
|
| 898 |
+
``regex.splititer`` has been added. It's a generator equivalent of ``regex.split``.
|
| 899 |
+
|
| 900 |
+
Subscripting match objects for groups
|
| 901 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 902 |
+
|
| 903 |
+
A match object accepts access to the groups via subscripting and slicing:
|
| 904 |
+
|
| 905 |
+
.. sourcecode:: python
|
| 906 |
+
|
| 907 |
+
>>> m = regex.search(r"(?P<before>.*?)(?P<num>\d+)(?P<after>.*)", "pqr123stu")
|
| 908 |
+
>>> print(m["before"])
|
| 909 |
+
pqr
|
| 910 |
+
>>> print(len(m))
|
| 911 |
+
4
|
| 912 |
+
>>> print(m[:])
|
| 913 |
+
('pqr123stu', 'pqr', '123', 'stu')
|
| 914 |
+
|
| 915 |
+
Named groups
|
| 916 |
+
^^^^^^^^^^^^
|
| 917 |
+
|
| 918 |
+
Groups can be named with ``(?<name>...)`` as well as the existing ``(?P<name>...)``.
|
| 919 |
+
|
| 920 |
+
Group references
|
| 921 |
+
^^^^^^^^^^^^^^^^
|
| 922 |
+
|
| 923 |
+
Groups can be referenced within a pattern with ``\g<name>``. This also allows there to be more than 99 groups.
|
| 924 |
+
|
| 925 |
+
Named characters ``\N{name}``
|
| 926 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 927 |
+
|
| 928 |
+
Named characters are supported. Note that only those known by Python's Unicode database will be recognised.
|
| 929 |
+
|
| 930 |
+
Unicode codepoint properties, including scripts and blocks
|
| 931 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 932 |
+
|
| 933 |
+
``\p{property=value}``; ``\P{property=value}``; ``\p{value}`` ; ``\P{value}``
|
| 934 |
+
|
| 935 |
+
Many Unicode properties are supported, including blocks and scripts. ``\p{property=value}`` or ``\p{property:value}`` matches a character whose property ``property`` has value ``value``. The inverse of ``\p{property=value}`` is ``\P{property=value}`` or ``\p{^property=value}``.
|
| 936 |
+
|
| 937 |
+
If the short form ``\p{value}`` is used, the properties are checked in the order: ``General_Category``, ``Script``, ``Block``, binary property:
|
| 938 |
+
|
| 939 |
+
* ``Latin``, the 'Latin' script (``Script=Latin``).
|
| 940 |
+
|
| 941 |
+
* ``BasicLatin``, the 'BasicLatin' block (``Block=BasicLatin``).
|
| 942 |
+
|
| 943 |
+
* ``Alphabetic``, the 'Alphabetic' binary property (``Alphabetic=Yes``).
|
| 944 |
+
|
| 945 |
+
A short form starting with ``Is`` indicates a script or binary property:
|
| 946 |
+
|
| 947 |
+
* ``IsLatin``, the 'Latin' script (``Script=Latin``).
|
| 948 |
+
|
| 949 |
+
* ``IsAlphabetic``, the 'Alphabetic' binary property (``Alphabetic=Yes``).
|
| 950 |
+
|
| 951 |
+
A short form starting with ``In`` indicates a block property:
|
| 952 |
+
|
| 953 |
+
* ``InBasicLatin``, the 'BasicLatin' block (``Block=BasicLatin``).
|
| 954 |
+
|
| 955 |
+
POSIX character classes
|
| 956 |
+
^^^^^^^^^^^^^^^^^^^^^^^
|
| 957 |
+
|
| 958 |
+
``[[:alpha:]]``; ``[[:^alpha:]]``
|
| 959 |
+
|
| 960 |
+
POSIX character classes are supported. These are normally treated as an alternative form of ``\p{...}``.
|
| 961 |
+
|
| 962 |
+
The exceptions are ``alnum``, ``digit``, ``punct`` and ``xdigit``, whose definitions are different from those of Unicode.
|
| 963 |
+
|
| 964 |
+
``[[:alnum:]]`` is equivalent to ``\p{posix_alnum}``.
|
| 965 |
+
|
| 966 |
+
``[[:digit:]]`` is equivalent to ``\p{posix_digit}``.
|
| 967 |
+
|
| 968 |
+
``[[:punct:]]`` is equivalent to ``\p{posix_punct}``.
|
| 969 |
+
|
| 970 |
+
``[[:xdigit:]]`` is equivalent to ``\p{posix_xdigit}``.
|
| 971 |
+
|
| 972 |
+
Search anchor ``\G``
|
| 973 |
+
^^^^^^^^^^^^^^^^^^^^
|
| 974 |
+
|
| 975 |
+
A search anchor has been added. It matches at the position where each search started/continued and can be used for contiguous matches or in negative variable-length lookbehinds to limit how far back the lookbehind goes:
|
| 976 |
+
|
| 977 |
+
.. sourcecode:: python
|
| 978 |
+
|
| 979 |
+
>>> regex.findall(r"\w{2}", "abcd ef")
|
| 980 |
+
['ab', 'cd', 'ef']
|
| 981 |
+
>>> regex.findall(r"\G\w{2}", "abcd ef")
|
| 982 |
+
['ab', 'cd']
|
| 983 |
+
|
| 984 |
+
* The search starts at position 0 and matches 'ab'.
|
| 985 |
+
|
| 986 |
+
* The search continues at position 2 and matches 'cd'.
|
| 987 |
+
|
| 988 |
+
* The search continues at position 4 and fails to match any letters.
|
| 989 |
+
|
| 990 |
+
* The anchor stops the search start position from being advanced, so there are no more results.
|
| 991 |
+
|
| 992 |
+
Reverse searching
|
| 993 |
+
^^^^^^^^^^^^^^^^^
|
| 994 |
+
|
| 995 |
+
Searches can also work backwards:
|
| 996 |
+
|
| 997 |
+
.. sourcecode:: python
|
| 998 |
+
|
| 999 |
+
>>> regex.findall(r".", "abc")
|
| 1000 |
+
['a', 'b', 'c']
|
| 1001 |
+
>>> regex.findall(r"(?r).", "abc")
|
| 1002 |
+
['c', 'b', 'a']
|
| 1003 |
+
|
| 1004 |
+
Note that the result of a reverse search is not necessarily the reverse of a forward search:
|
| 1005 |
+
|
| 1006 |
+
.. sourcecode:: python
|
| 1007 |
+
|
| 1008 |
+
>>> regex.findall(r"..", "abcde")
|
| 1009 |
+
['ab', 'cd']
|
| 1010 |
+
>>> regex.findall(r"(?r)..", "abcde")
|
| 1011 |
+
['de', 'bc']
|
| 1012 |
+
|
| 1013 |
+
Matching a single grapheme ``\X``
|
| 1014 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 1015 |
+
|
| 1016 |
+
The grapheme matcher is supported. It conforms to the Unicode specification at ``http://www.unicode.org/reports/tr29/``.
|
| 1017 |
+
|
| 1018 |
+
Branch reset ``(?|...|...)``
|
| 1019 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 1020 |
+
|
| 1021 |
+
Group numbers will be reused across the alternatives, but groups with different names will have different group numbers.
|
| 1022 |
+
|
| 1023 |
+
.. sourcecode:: python
|
| 1024 |
+
|
| 1025 |
+
>>> regex.match(r"(?|(first)|(second))", "first").groups()
|
| 1026 |
+
('first',)
|
| 1027 |
+
>>> regex.match(r"(?|(first)|(second))", "second").groups()
|
| 1028 |
+
('second',)
|
| 1029 |
+
|
| 1030 |
+
Note that there is only one group.
|
| 1031 |
+
|
| 1032 |
+
Default Unicode word boundary
|
| 1033 |
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 1034 |
+
|
| 1035 |
+
The ``WORD`` flag changes the definition of a 'word boundary' to that of a default Unicode word boundary. This applies to ``\b`` and ``\B``.
|
| 1036 |
+
|
| 1037 |
+
Timeout
|
| 1038 |
+
^^^^^^^
|
| 1039 |
+
|
| 1040 |
+
The matching methods and functions support timeouts. The timeout (in seconds) applies to the entire operation:
|
| 1041 |
+
|
| 1042 |
+
.. sourcecode:: python
|
| 1043 |
+
|
| 1044 |
+
>>> from time import sleep
|
| 1045 |
+
>>>
|
| 1046 |
+
>>> def fast_replace(m):
|
| 1047 |
+
... return 'X'
|
| 1048 |
+
...
|
| 1049 |
+
>>> def slow_replace(m):
|
| 1050 |
+
... sleep(0.5)
|
| 1051 |
+
... return 'X'
|
| 1052 |
+
...
|
| 1053 |
+
>>> regex.sub(r'[a-z]', fast_replace, 'abcde', timeout=2)
|
| 1054 |
+
'XXXXX'
|
| 1055 |
+
>>> regex.sub(r'[a-z]', slow_replace, 'abcde', timeout=2)
|
| 1056 |
+
Traceback (most recent call last):
|
| 1057 |
+
File "<stdin>", line 1, in <module>
|
| 1058 |
+
File "C:\Python310\lib\site-packages\regex\regex.py", line 278, in sub
|
| 1059 |
+
return pat.sub(repl, string, count, pos, endpos, concurrent, timeout)
|
| 1060 |
+
TimeoutError: regex timed out
|
evalkit_cambrian/lib/python3.10/site-packages/stack_data/__init__.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .core import Source, FrameInfo, markers_from_ranges, Options, LINE_GAP, Line, Variable, RangeInLine, \
|
| 2 |
+
RepeatedFrames, MarkerInLine, style_with_executing_node, BlankLineRange, BlankLines
|
| 3 |
+
from .formatting import Formatter
|
| 4 |
+
from .serializing import Serializer
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
from .version import __version__
|
| 8 |
+
except ImportError:
|
| 9 |
+
# version.py is auto-generated with the git tag when building
|
| 10 |
+
__version__ = "???"
|
evalkit_cambrian/lib/python3.10/site-packages/stack_data/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (667 Bytes). View file
|
|
|