Add files using upload-large-folder tool
Browse files- download_datasets.py +68 -0
- venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER +1 -0
- venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA +202 -0
- venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD +29 -0
- venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL +5 -0
- venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt +1 -0
- venv/lib/python3.12/site-packages/h11/__init__.py +62 -0
- venv/lib/python3.12/site-packages/h11/_abnf.py +132 -0
- venv/lib/python3.12/site-packages/h11/_connection.py +659 -0
- venv/lib/python3.12/site-packages/h11/_events.py +369 -0
- venv/lib/python3.12/site-packages/h11/_headers.py +282 -0
- venv/lib/python3.12/site-packages/h11/_readers.py +250 -0
- venv/lib/python3.12/site-packages/h11/_receivebuffer.py +153 -0
- venv/lib/python3.12/site-packages/h11/_state.py +365 -0
- venv/lib/python3.12/site-packages/h11/_util.py +135 -0
- venv/lib/python3.12/site-packages/h11/_version.py +16 -0
- venv/lib/python3.12/site-packages/h11/_writers.py +145 -0
- venv/lib/python3.12/site-packages/h11/py.typed +1 -0
- venv/lib/python3.12/site-packages/idna-3.13.dist-info/INSTALLER +1 -0
- venv/lib/python3.12/site-packages/idna-3.13.dist-info/METADATA +204 -0
- venv/lib/python3.12/site-packages/idna-3.13.dist-info/RECORD +22 -0
- venv/lib/python3.12/site-packages/idna-3.13.dist-info/WHEEL +4 -0
- venv/lib/python3.12/site-packages/idna/__init__.py +45 -0
- venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/INSTALLER +1 -0
- venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/METADATA +0 -0
- venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/RECORD +217 -0
- venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/WHEEL +4 -0
- venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/INSTALLER +1 -0
- venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/METADATA +173 -0
- venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/RECORD +13 -0
- venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/WHEEL +5 -0
- venv/lib/python3.12/site-packages/pydantic_core/__init__.py +171 -0
- venv/lib/python3.12/site-packages/pydantic_core/_pydantic_core.pyi +1056 -0
- venv/lib/python3.12/site-packages/pydantic_core/core_schema.py +0 -0
- venv/lib/python3.12/site-packages/pydantic_core/py.typed +0 -0
- venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER +1 -0
- venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/METADATA +72 -0
- venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/RECORD +7 -0
- venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/WHEEL +4 -0
- venv/lib/python3.12/site-packages/typing_inspection/__init__.py +0 -0
- venv/lib/python3.12/site-packages/typing_inspection/introspection.py +587 -0
- venv/lib/python3.12/site-packages/typing_inspection/py.typed +0 -0
- venv/lib/python3.12/site-packages/typing_inspection/typing_objects.py +607 -0
- venv/lib/python3.12/site-packages/typing_inspection/typing_objects.pyi +417 -0
- venv/lib/python3.12/site-packages/websockets-16.0.dist-info/INSTALLER +1 -0
- venv/lib/python3.12/site-packages/websockets-16.0.dist-info/METADATA +179 -0
- venv/lib/python3.12/site-packages/websockets-16.0.dist-info/RECORD +108 -0
- venv/lib/python3.12/site-packages/websockets-16.0.dist-info/WHEEL +7 -0
- venv/lib/python3.12/site-packages/websockets-16.0.dist-info/entry_points.txt +2 -0
- venv/lib/python3.12/site-packages/websockets-16.0.dist-info/top_level.txt +1 -0
download_datasets.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datasets import load_dataset
|
| 2 |
+
import os
|
| 3 |
+
# List of datasets to download
|
| 4 |
+
datasets_to_download = [
|
| 5 |
+
'arc_challenge', # arc-challenge-indic
|
| 6 |
+
'audio_evals', # audiollm-evals
|
| 7 |
+
'boolq_indic', # boolq-indic
|
| 8 |
+
'contextual_asr_benchmark', # contextual_asr_benchmark
|
| 9 |
+
'gsm8k_indic', # gsm8k-indic
|
| 10 |
+
'mmlu_indic', # mmlu-indic
|
| 11 |
+
'olmOCR_Bench_English', # olmOCR-Bench-English
|
| 12 |
+
'samvaad_hi_v1', # samvaad-hi-v1
|
| 13 |
+
'sarvam_dub_benchmark_set', # sarvam-dub-benchmark-set
|
| 14 |
+
'trivia_qa_indic_mcq', # trivia-qa-indic-mcq
|
| 15 |
+
'tts_general_benchmark', # tts-general-benchmark
|
| 16 |
+
'tts_robustness_benchmark' # tts-robustness-benchmark
|
| 17 |
+
]
|
| 18 |
+
# Create directory for datasets
|
| 19 |
+
os.makedirs('datasets', exist_ok=True)
|
| 20 |
+
# Download each dataset
|
| 21 |
+
for dataset_name in datasets_to_download:
|
| 22 |
+
try:
|
| 23 |
+
print(f"Downloading {dataset_name}...")
|
| 24 |
+
dataset = load_dataset(dataset_name)
|
| 25 |
+
|
| 26 |
+
# Save to disk
|
| 27 |
+
output_path = f'datasets/{dataset_name}'
|
| 28 |
+
if isinstance(dataset, dict):
|
| 29 |
+
# If it's a dictionary (train/test/validation splits)
|
| 30 |
+
for split_name, split_data in dataset.items():
|
| 31 |
+
split_path = f'{output_path}_{split_name}'
|
| 32 |
+
split_data.save_to_disk(split_path)
|
| 33 |
+
print(f" Saved {split_name} split to {split_path}")
|
| 34 |
+
else:
|
| 35 |
+
# If it's a single dataset
|
| 36 |
+
dataset.save_to_disk(output_path)
|
| 37 |
+
print(f" Saved to {output_path}")
|
| 38 |
+
|
| 39 |
+
except Exception as e:
|
| 40 |
+
print(f" Failed to download {dataset_name}: {str(e)}")
|
| 41 |
+
print(f" Trying alternative names...")
|
| 42 |
+
|
| 43 |
+
# Try some common variations
|
| 44 |
+
alternatives = [
|
| 45 |
+
dataset_name.replace('_indic', ''),
|
| 46 |
+
dataset_name.replace('_indic', '_indian'),
|
| 47 |
+
dataset_name.replace('_benchmark', '_benchmarks'),
|
| 48 |
+
dataset_name.replace('_general', '_general_bench'),
|
| 49 |
+
dataset_name.replace('_robustness', '_robust'),
|
| 50 |
+
]
|
| 51 |
+
|
| 52 |
+
for alt_name in alternatives:
|
| 53 |
+
try:
|
| 54 |
+
print(f" Trying: {alt_name}")
|
| 55 |
+
dataset = load_dataset(alt_name)
|
| 56 |
+
output_path = f'datasets/{alt_name}'
|
| 57 |
+
if isinstance(dataset, dict):
|
| 58 |
+
for split_name, split_data in dataset.items():
|
| 59 |
+
split_path = f'{output_path}_{split_name}'
|
| 60 |
+
split_data.save_to_disk(split_path)
|
| 61 |
+
print(f" Saved {split_name} split to {split_path}")
|
| 62 |
+
else:
|
| 63 |
+
dataset.save_to_disk(output_path)
|
| 64 |
+
print(f" Saved to {output_path}")
|
| 65 |
+
break
|
| 66 |
+
except:
|
| 67 |
+
continue
|
| 68 |
+
print("Dataset download complete!")
|
venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: h11
|
| 3 |
+
Version: 0.16.0
|
| 4 |
+
Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1
|
| 5 |
+
Home-page: https://github.com/python-hyper/h11
|
| 6 |
+
Author: Nathaniel J. Smith
|
| 7 |
+
Author-email: njs@pobox.com
|
| 8 |
+
License: MIT
|
| 9 |
+
Classifier: Development Status :: 3 - Alpha
|
| 10 |
+
Classifier: Intended Audience :: Developers
|
| 11 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 12 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 13 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 14 |
+
Classifier: Programming Language :: Python :: 3
|
| 15 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 21 |
+
Classifier: Topic :: Internet :: WWW/HTTP
|
| 22 |
+
Classifier: Topic :: System :: Networking
|
| 23 |
+
Requires-Python: >=3.8
|
| 24 |
+
License-File: LICENSE.txt
|
| 25 |
+
Dynamic: author
|
| 26 |
+
Dynamic: author-email
|
| 27 |
+
Dynamic: classifier
|
| 28 |
+
Dynamic: description
|
| 29 |
+
Dynamic: home-page
|
| 30 |
+
Dynamic: license
|
| 31 |
+
Dynamic: license-file
|
| 32 |
+
Dynamic: requires-python
|
| 33 |
+
Dynamic: summary
|
| 34 |
+
|
| 35 |
+
h11
|
| 36 |
+
===
|
| 37 |
+
|
| 38 |
+
.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master
|
| 39 |
+
:target: https://travis-ci.org/python-hyper/h11
|
| 40 |
+
:alt: Automated test status
|
| 41 |
+
|
| 42 |
+
.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg
|
| 43 |
+
:target: https://codecov.io/gh/python-hyper/h11
|
| 44 |
+
:alt: Test coverage
|
| 45 |
+
|
| 46 |
+
.. image:: https://readthedocs.org/projects/h11/badge/?version=latest
|
| 47 |
+
:target: http://h11.readthedocs.io/en/latest/?badge=latest
|
| 48 |
+
:alt: Documentation Status
|
| 49 |
+
|
| 50 |
+
This is a little HTTP/1.1 library written from scratch in Python,
|
| 51 |
+
heavily inspired by `hyper-h2 <https://hyper-h2.readthedocs.io/>`_.
|
| 52 |
+
|
| 53 |
+
It's a "bring-your-own-I/O" library; h11 contains no IO code
|
| 54 |
+
whatsoever. This means you can hook h11 up to your favorite network
|
| 55 |
+
API, and that could be anything you want: synchronous, threaded,
|
| 56 |
+
asynchronous, or your own implementation of `RFC 6214
|
| 57 |
+
<https://tools.ietf.org/html/rfc6214>`_ -- h11 won't judge you.
|
| 58 |
+
(Compare this to the current state of the art, where every time a `new
|
| 59 |
+
network API <https://trio.readthedocs.io/>`_ comes along then someone
|
| 60 |
+
gets to start over reimplementing the entire HTTP protocol from
|
| 61 |
+
scratch.) Cory Benfield made an `excellent blog post describing the
|
| 62 |
+
benefits of this approach
|
| 63 |
+
<https://lukasa.co.uk/2015/10/The_New_Hyper/>`_, or if you like video
|
| 64 |
+
then here's his `PyCon 2016 talk on the same theme
|
| 65 |
+
<https://www.youtube.com/watch?v=7cC3_jGwl_U>`_.
|
| 66 |
+
|
| 67 |
+
This also means that h11 is not immediately useful out of the box:
|
| 68 |
+
it's a toolkit for building programs that speak HTTP, not something
|
| 69 |
+
that could directly replace ``requests`` or ``twisted.web`` or
|
| 70 |
+
whatever. But h11 makes it much easier to implement something like
|
| 71 |
+
``requests`` or ``twisted.web``.
|
| 72 |
+
|
| 73 |
+
At a high level, working with h11 goes like this:
|
| 74 |
+
|
| 75 |
+
1) First, create an ``h11.Connection`` object to track the state of a
|
| 76 |
+
single HTTP/1.1 connection.
|
| 77 |
+
|
| 78 |
+
2) When you read data off the network, pass it to
|
| 79 |
+
``conn.receive_data(...)``; you'll get back a list of objects
|
| 80 |
+
representing high-level HTTP "events".
|
| 81 |
+
|
| 82 |
+
3) When you want to send a high-level HTTP event, create the
|
| 83 |
+
corresponding "event" object and pass it to ``conn.send(...)``;
|
| 84 |
+
this will give you back some bytes that you can then push out
|
| 85 |
+
through the network.
|
| 86 |
+
|
| 87 |
+
For example, a client might instantiate and then send a
|
| 88 |
+
``h11.Request`` object, then zero or more ``h11.Data`` objects for the
|
| 89 |
+
request body (e.g., if this is a POST), and then a
|
| 90 |
+
``h11.EndOfMessage`` to indicate the end of the message. Then the
|
| 91 |
+
server would then send back a ``h11.Response``, some ``h11.Data``, and
|
| 92 |
+
its own ``h11.EndOfMessage``. If either side violates the protocol,
|
| 93 |
+
you'll get a ``h11.ProtocolError`` exception.
|
| 94 |
+
|
| 95 |
+
h11 is suitable for implementing both servers and clients, and has a
|
| 96 |
+
pleasantly symmetric API: the events you send as a client are exactly
|
| 97 |
+
the ones that you receive as a server and vice-versa.
|
| 98 |
+
|
| 99 |
+
`Here's an example of a tiny HTTP client
|
| 100 |
+
<https://github.com/python-hyper/h11/blob/master/examples/basic-client.py>`_
|
| 101 |
+
|
| 102 |
+
It also has `a fine manual <https://h11.readthedocs.io/>`_.
|
| 103 |
+
|
| 104 |
+
FAQ
|
| 105 |
+
---
|
| 106 |
+
|
| 107 |
+
*Whyyyyy?*
|
| 108 |
+
|
| 109 |
+
I wanted to play with HTTP in `Curio
|
| 110 |
+
<https://curio.readthedocs.io/en/latest/tutorial.html>`__ and `Trio
|
| 111 |
+
<https://trio.readthedocs.io>`__, which at the time didn't have any
|
| 112 |
+
HTTP libraries. So I thought, no big deal, Python has, like, a dozen
|
| 113 |
+
different implementations of HTTP, surely I can find one that's
|
| 114 |
+
reusable. I didn't find one, but I did find Cory's call-to-arms
|
| 115 |
+
blog-post. So I figured, well, fine, if I have to implement HTTP from
|
| 116 |
+
scratch, at least I can make sure no-one *else* has to ever again.
|
| 117 |
+
|
| 118 |
+
*Should I use it?*
|
| 119 |
+
|
| 120 |
+
Maybe. You should be aware that it's a very young project. But, it's
|
| 121 |
+
feature complete and has an exhaustive test-suite and complete docs,
|
| 122 |
+
so the next step is for people to try using it and see how it goes
|
| 123 |
+
:-). If you do then please let us know -- if nothing else we'll want
|
| 124 |
+
to talk to you before making any incompatible changes!
|
| 125 |
+
|
| 126 |
+
*What are the features/limitations?*
|
| 127 |
+
|
| 128 |
+
Roughly speaking, it's trying to be a robust, complete, and non-hacky
|
| 129 |
+
implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230:
|
| 130 |
+
HTTP/1.1 Message Syntax and Routing
|
| 131 |
+
<https://tools.ietf.org/html/rfc7230>`_. That is, it mostly focuses on
|
| 132 |
+
implementing HTTP at the level of taking bytes on and off the wire,
|
| 133 |
+
and the headers related to that, and tries to be anal about spec
|
| 134 |
+
conformance. It doesn't know about higher-level concerns like URL
|
| 135 |
+
routing, conditional GETs, cross-origin cookie policies, or content
|
| 136 |
+
negotiation. But it does know how to take care of framing,
|
| 137 |
+
cross-version differences in keep-alive handling, and the "obsolete
|
| 138 |
+
line folding" rule, so you can focus your energies on the hard /
|
| 139 |
+
interesting parts for your application, and it tries to support the
|
| 140 |
+
full specification in the sense that any useful HTTP/1.1 conformant
|
| 141 |
+
application should be able to use h11.
|
| 142 |
+
|
| 143 |
+
It's pure Python, and has no dependencies outside of the standard
|
| 144 |
+
library.
|
| 145 |
+
|
| 146 |
+
It has a test suite with 100.0% coverage for both statements and
|
| 147 |
+
branches.
|
| 148 |
+
|
| 149 |
+
Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3.
|
| 150 |
+
The last Python 2-compatible version was h11 0.11.x.
|
| 151 |
+
(Originally it had a Cython wrapper for `http-parser
|
| 152 |
+
<https://github.com/nodejs/http-parser>`_ and a beautiful nested state
|
| 153 |
+
machine implemented with ``yield from`` to postprocess the output. But
|
| 154 |
+
I had to take these out -- the new *parser* needs fewer lines-of-code
|
| 155 |
+
than the old *parser wrapper*, is written in pure Python, uses no
|
| 156 |
+
exotic language syntax, and has more features. It's sad, really; that
|
| 157 |
+
old state machine was really slick. I just need a few sentences here
|
| 158 |
+
to mourn that.)
|
| 159 |
+
|
| 160 |
+
I don't know how fast it is. I haven't benchmarked or profiled it yet,
|
| 161 |
+
so it's probably got a few pointless hot spots, and I've been trying
|
| 162 |
+
to err on the side of simplicity and robustness instead of
|
| 163 |
+
micro-optimization. But at the architectural level I tried hard to
|
| 164 |
+
avoid fundamentally bad decisions, e.g., I believe that all the
|
| 165 |
+
parsing algorithms remain linear-time even in the face of pathological
|
| 166 |
+
input like slowloris, and there are no byte-by-byte loops. (I also
|
| 167 |
+
believe that it maintains bounded memory usage in the face of
|
| 168 |
+
arbitrary/pathological input.)
|
| 169 |
+
|
| 170 |
+
The whole library is ~800 lines-of-code. You can read and understand
|
| 171 |
+
the whole thing in less than an hour. Most of the energy invested in
|
| 172 |
+
this so far has been spent on trying to keep things simple by
|
| 173 |
+
minimizing special-cases and ad hoc state manipulation; even though it
|
| 174 |
+
is now quite small and simple, I'm still annoyed that I haven't
|
| 175 |
+
figured out how to make it even smaller and simpler. (Unfortunately,
|
| 176 |
+
HTTP does not lend itself to simplicity.)
|
| 177 |
+
|
| 178 |
+
The API is ~feature complete and I don't expect the general outlines
|
| 179 |
+
to change much, but you can't judge an API's ergonomics until you
|
| 180 |
+
actually document and use it, so I'd expect some changes in the
|
| 181 |
+
details.
|
| 182 |
+
|
| 183 |
+
*How do I try it?*
|
| 184 |
+
|
| 185 |
+
.. code-block:: sh
|
| 186 |
+
|
| 187 |
+
$ pip install h11
|
| 188 |
+
$ git clone git@github.com:python-hyper/h11
|
| 189 |
+
$ cd h11/examples
|
| 190 |
+
$ python basic-client.py
|
| 191 |
+
|
| 192 |
+
and go from there.
|
| 193 |
+
|
| 194 |
+
*License?*
|
| 195 |
+
|
| 196 |
+
MIT
|
| 197 |
+
|
| 198 |
+
*Code of conduct?*
|
| 199 |
+
|
| 200 |
+
Contributors are requested to follow our `code of conduct
|
| 201 |
+
<https://github.com/python-hyper/h11/blob/master/CODE_OF_CONDUCT.md>`_ in
|
| 202 |
+
all project spaces.
|
venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
h11-0.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348
|
| 3 |
+
h11-0.16.0.dist-info/RECORD,,
|
| 4 |
+
h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
| 5 |
+
h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124
|
| 6 |
+
h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4
|
| 7 |
+
h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507
|
| 8 |
+
h11/__pycache__/__init__.cpython-312.pyc,,
|
| 9 |
+
h11/__pycache__/_abnf.cpython-312.pyc,,
|
| 10 |
+
h11/__pycache__/_connection.cpython-312.pyc,,
|
| 11 |
+
h11/__pycache__/_events.cpython-312.pyc,,
|
| 12 |
+
h11/__pycache__/_headers.cpython-312.pyc,,
|
| 13 |
+
h11/__pycache__/_readers.cpython-312.pyc,,
|
| 14 |
+
h11/__pycache__/_receivebuffer.cpython-312.pyc,,
|
| 15 |
+
h11/__pycache__/_state.cpython-312.pyc,,
|
| 16 |
+
h11/__pycache__/_util.cpython-312.pyc,,
|
| 17 |
+
h11/__pycache__/_version.cpython-312.pyc,,
|
| 18 |
+
h11/__pycache__/_writers.cpython-312.pyc,,
|
| 19 |
+
h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815
|
| 20 |
+
h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863
|
| 21 |
+
h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792
|
| 22 |
+
h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412
|
| 23 |
+
h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590
|
| 24 |
+
h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252
|
| 25 |
+
h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231
|
| 26 |
+
h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888
|
| 27 |
+
h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686
|
| 28 |
+
h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081
|
| 29 |
+
h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7
|
venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (78.1.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
h11
|
venv/lib/python3.12/site-packages/h11/__init__.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230),
|
| 2 |
+
# containing no networking code at all, loosely modelled on hyper-h2's generic
|
| 3 |
+
# implementation of HTTP/2 (and in particular the h2.connection.H2Connection
|
| 4 |
+
# class). There's still a bunch of subtle details you need to get right if you
|
| 5 |
+
# want to make this actually useful, because it doesn't implement all the
|
| 6 |
+
# semantics to check that what you're asking to write to the wire is sensible,
|
| 7 |
+
# but at least it gets you out of dealing with the wire itself.
|
| 8 |
+
|
| 9 |
+
from h11._connection import Connection, NEED_DATA, PAUSED
|
| 10 |
+
from h11._events import (
|
| 11 |
+
ConnectionClosed,
|
| 12 |
+
Data,
|
| 13 |
+
EndOfMessage,
|
| 14 |
+
Event,
|
| 15 |
+
InformationalResponse,
|
| 16 |
+
Request,
|
| 17 |
+
Response,
|
| 18 |
+
)
|
| 19 |
+
from h11._state import (
|
| 20 |
+
CLIENT,
|
| 21 |
+
CLOSED,
|
| 22 |
+
DONE,
|
| 23 |
+
ERROR,
|
| 24 |
+
IDLE,
|
| 25 |
+
MIGHT_SWITCH_PROTOCOL,
|
| 26 |
+
MUST_CLOSE,
|
| 27 |
+
SEND_BODY,
|
| 28 |
+
SEND_RESPONSE,
|
| 29 |
+
SERVER,
|
| 30 |
+
SWITCHED_PROTOCOL,
|
| 31 |
+
)
|
| 32 |
+
from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError
|
| 33 |
+
from h11._version import __version__
|
| 34 |
+
|
| 35 |
+
PRODUCT_ID = "python-h11/" + __version__
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
__all__ = (
|
| 39 |
+
"Connection",
|
| 40 |
+
"NEED_DATA",
|
| 41 |
+
"PAUSED",
|
| 42 |
+
"ConnectionClosed",
|
| 43 |
+
"Data",
|
| 44 |
+
"EndOfMessage",
|
| 45 |
+
"Event",
|
| 46 |
+
"InformationalResponse",
|
| 47 |
+
"Request",
|
| 48 |
+
"Response",
|
| 49 |
+
"CLIENT",
|
| 50 |
+
"CLOSED",
|
| 51 |
+
"DONE",
|
| 52 |
+
"ERROR",
|
| 53 |
+
"IDLE",
|
| 54 |
+
"MUST_CLOSE",
|
| 55 |
+
"SEND_BODY",
|
| 56 |
+
"SEND_RESPONSE",
|
| 57 |
+
"SERVER",
|
| 58 |
+
"SWITCHED_PROTOCOL",
|
| 59 |
+
"ProtocolError",
|
| 60 |
+
"LocalProtocolError",
|
| 61 |
+
"RemoteProtocolError",
|
| 62 |
+
)
|
venv/lib/python3.12/site-packages/h11/_abnf.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# We use native strings for all the re patterns, to take advantage of string
|
| 2 |
+
# formatting, and then convert to bytestrings when compiling the final re
|
| 3 |
+
# objects.
|
| 4 |
+
|
| 5 |
+
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace
|
| 6 |
+
# OWS = *( SP / HTAB )
|
| 7 |
+
# ; optional whitespace
|
| 8 |
+
OWS = r"[ \t]*"
|
| 9 |
+
|
| 10 |
+
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators
|
| 11 |
+
# token = 1*tchar
|
| 12 |
+
#
|
| 13 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
| 14 |
+
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
| 15 |
+
# / DIGIT / ALPHA
|
| 16 |
+
# ; any VCHAR, except delimiters
|
| 17 |
+
token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+"
|
| 18 |
+
|
| 19 |
+
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields
|
| 20 |
+
# field-name = token
|
| 21 |
+
field_name = token
|
| 22 |
+
|
| 23 |
+
# The standard says:
|
| 24 |
+
#
|
| 25 |
+
# field-value = *( field-content / obs-fold )
|
| 26 |
+
# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
| 27 |
+
# field-vchar = VCHAR / obs-text
|
| 28 |
+
# obs-fold = CRLF 1*( SP / HTAB )
|
| 29 |
+
# ; obsolete line folding
|
| 30 |
+
# ; see Section 3.2.4
|
| 31 |
+
#
|
| 32 |
+
# https://tools.ietf.org/html/rfc5234#appendix-B.1
|
| 33 |
+
#
|
| 34 |
+
# VCHAR = %x21-7E
|
| 35 |
+
# ; visible (printing) characters
|
| 36 |
+
#
|
| 37 |
+
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string
|
| 38 |
+
# obs-text = %x80-FF
|
| 39 |
+
#
|
| 40 |
+
# However, the standard definition of field-content is WRONG! It disallows
|
| 41 |
+
# fields containing a single visible character surrounded by whitespace,
|
| 42 |
+
# e.g. "foo a bar".
|
| 43 |
+
#
|
| 44 |
+
# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
|
| 45 |
+
#
|
| 46 |
+
# So our definition of field_content attempts to fix it up...
|
| 47 |
+
#
|
| 48 |
+
# Also, we allow lots of control characters, because apparently people assume
|
| 49 |
+
# that they're legal in practice (e.g., google analytics makes cookies with
|
| 50 |
+
# \x01 in them!):
|
| 51 |
+
# https://github.com/python-hyper/h11/issues/57
|
| 52 |
+
# We still don't allow NUL or whitespace, because those are often treated as
|
| 53 |
+
# meta-characters and letting them through can lead to nasty issues like SSRF.
|
| 54 |
+
vchar = r"[\x21-\x7e]"
|
| 55 |
+
vchar_or_obs_text = r"[^\x00\s]"
|
| 56 |
+
field_vchar = vchar_or_obs_text
|
| 57 |
+
field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals())
|
| 58 |
+
|
| 59 |
+
# We handle obs-fold at a different level, and our fixed-up field_content
|
| 60 |
+
# already grows to swallow the whole value, so ? instead of *
|
| 61 |
+
field_value = r"({field_content})?".format(**globals())
|
| 62 |
+
|
| 63 |
+
# header-field = field-name ":" OWS field-value OWS
|
| 64 |
+
header_field = (
|
| 65 |
+
r"(?P<field_name>{field_name})"
|
| 66 |
+
r":"
|
| 67 |
+
r"{OWS}"
|
| 68 |
+
r"(?P<field_value>{field_value})"
|
| 69 |
+
r"{OWS}".format(**globals())
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line
|
| 73 |
+
#
|
| 74 |
+
# request-line = method SP request-target SP HTTP-version CRLF
|
| 75 |
+
# method = token
|
| 76 |
+
# HTTP-version = HTTP-name "/" DIGIT "." DIGIT
|
| 77 |
+
# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive
|
| 78 |
+
#
|
| 79 |
+
# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full
|
| 80 |
+
# URL, host+port (for connect), or even "*", but in any case we are guaranteed
|
| 81 |
+
# that it contists of the visible printing characters.
|
| 82 |
+
method = token
|
| 83 |
+
request_target = r"{vchar}+".format(**globals())
|
| 84 |
+
http_version = r"HTTP/(?P<http_version>[0-9]\.[0-9])"
|
| 85 |
+
request_line = (
|
| 86 |
+
r"(?P<method>{method})"
|
| 87 |
+
r" "
|
| 88 |
+
r"(?P<target>{request_target})"
|
| 89 |
+
r" "
|
| 90 |
+
r"{http_version}".format(**globals())
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line
|
| 94 |
+
#
|
| 95 |
+
# status-line = HTTP-version SP status-code SP reason-phrase CRLF
|
| 96 |
+
# status-code = 3DIGIT
|
| 97 |
+
# reason-phrase = *( HTAB / SP / VCHAR / obs-text )
|
| 98 |
+
status_code = r"[0-9]{3}"
|
| 99 |
+
reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals())
|
| 100 |
+
status_line = (
|
| 101 |
+
r"{http_version}"
|
| 102 |
+
r" "
|
| 103 |
+
r"(?P<status_code>{status_code})"
|
| 104 |
+
# However, there are apparently a few too many servers out there that just
|
| 105 |
+
# leave out the reason phrase:
|
| 106 |
+
# https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036
|
| 107 |
+
# https://github.com/seanmonstar/httparse/issues/29
|
| 108 |
+
# so make it optional. ?: is a non-capturing group.
|
| 109 |
+
r"(?: (?P<reason>{reason_phrase}))?".format(**globals())
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
HEXDIG = r"[0-9A-Fa-f]"
|
| 113 |
+
# Actually
|
| 114 |
+
#
|
| 115 |
+
# chunk-size = 1*HEXDIG
|
| 116 |
+
#
|
| 117 |
+
# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20
|
| 118 |
+
chunk_size = r"({HEXDIG}){{1,20}}".format(**globals())
|
| 119 |
+
# Actually
|
| 120 |
+
#
|
| 121 |
+
# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
|
| 122 |
+
#
|
| 123 |
+
# but we aren't parsing the things so we don't really care.
|
| 124 |
+
chunk_ext = r";.*"
|
| 125 |
+
chunk_header = (
|
| 126 |
+
r"(?P<chunk_size>{chunk_size})"
|
| 127 |
+
r"(?P<chunk_ext>{chunk_ext})?"
|
| 128 |
+
r"{OWS}\r\n".format(
|
| 129 |
+
**globals()
|
| 130 |
+
) # Even though the specification does not allow for extra whitespaces,
|
| 131 |
+
# we are lenient with trailing whitespaces because some servers on the wild use it.
|
| 132 |
+
)
|
venv/lib/python3.12/site-packages/h11/_connection.py
ADDED
|
@@ -0,0 +1,659 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This contains the main Connection class. Everything in h11 revolves around
|
| 2 |
+
# this.
|
| 3 |
+
from typing import (
|
| 4 |
+
Any,
|
| 5 |
+
Callable,
|
| 6 |
+
cast,
|
| 7 |
+
Dict,
|
| 8 |
+
List,
|
| 9 |
+
Optional,
|
| 10 |
+
overload,
|
| 11 |
+
Tuple,
|
| 12 |
+
Type,
|
| 13 |
+
Union,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
from ._events import (
|
| 17 |
+
ConnectionClosed,
|
| 18 |
+
Data,
|
| 19 |
+
EndOfMessage,
|
| 20 |
+
Event,
|
| 21 |
+
InformationalResponse,
|
| 22 |
+
Request,
|
| 23 |
+
Response,
|
| 24 |
+
)
|
| 25 |
+
from ._headers import get_comma_header, has_expect_100_continue, set_comma_header
|
| 26 |
+
from ._readers import READERS, ReadersType
|
| 27 |
+
from ._receivebuffer import ReceiveBuffer
|
| 28 |
+
from ._state import (
|
| 29 |
+
_SWITCH_CONNECT,
|
| 30 |
+
_SWITCH_UPGRADE,
|
| 31 |
+
CLIENT,
|
| 32 |
+
ConnectionState,
|
| 33 |
+
DONE,
|
| 34 |
+
ERROR,
|
| 35 |
+
MIGHT_SWITCH_PROTOCOL,
|
| 36 |
+
SEND_BODY,
|
| 37 |
+
SERVER,
|
| 38 |
+
SWITCHED_PROTOCOL,
|
| 39 |
+
)
|
| 40 |
+
from ._util import ( # Import the internal things we need
|
| 41 |
+
LocalProtocolError,
|
| 42 |
+
RemoteProtocolError,
|
| 43 |
+
Sentinel,
|
| 44 |
+
)
|
| 45 |
+
from ._writers import WRITERS, WritersType
|
| 46 |
+
|
| 47 |
+
# Everything in __all__ gets re-exported as part of the h11 public API.
|
| 48 |
+
__all__ = ["Connection", "NEED_DATA", "PAUSED"]
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class NEED_DATA(Sentinel, metaclass=Sentinel):
|
| 52 |
+
pass
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class PAUSED(Sentinel, metaclass=Sentinel):
|
| 56 |
+
pass
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
# If we ever have this much buffered without it making a complete parseable
|
| 60 |
+
# event, we error out. The only time we really buffer is when reading the
|
| 61 |
+
# request/response line + headers together, so this is effectively the limit on
|
| 62 |
+
# the size of that.
|
| 63 |
+
#
|
| 64 |
+
# Some precedents for defaults:
|
| 65 |
+
# - node.js: 80 * 1024
|
| 66 |
+
# - tomcat: 8 * 1024
|
| 67 |
+
# - IIS: 16 * 1024
|
| 68 |
+
# - Apache: <8 KiB per line>
|
| 69 |
+
DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
# RFC 7230's rules for connection lifecycles:
|
| 73 |
+
# - If either side says they want to close the connection, then the connection
|
| 74 |
+
# must close.
|
| 75 |
+
# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close
|
| 76 |
+
# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive
|
| 77 |
+
# (and even this is a mess -- e.g. if you're implementing a proxy then
|
| 78 |
+
# sending Connection: keep-alive is forbidden).
|
| 79 |
+
#
|
| 80 |
+
# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So
|
| 81 |
+
# our rule is:
|
| 82 |
+
# - If someone says Connection: close, we will close
|
| 83 |
+
# - If someone uses HTTP/1.0, we will close.
|
| 84 |
+
def _keep_alive(event: Union[Request, Response]) -> bool:
|
| 85 |
+
connection = get_comma_header(event.headers, b"connection")
|
| 86 |
+
if b"close" in connection:
|
| 87 |
+
return False
|
| 88 |
+
if getattr(event, "http_version", b"1.1") < b"1.1":
|
| 89 |
+
return False
|
| 90 |
+
return True
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _body_framing(
|
| 94 |
+
request_method: bytes, event: Union[Request, Response]
|
| 95 |
+
) -> Tuple[str, Union[Tuple[()], Tuple[int]]]:
|
| 96 |
+
# Called when we enter SEND_BODY to figure out framing information for
|
| 97 |
+
# this body.
|
| 98 |
+
#
|
| 99 |
+
# These are the only two events that can trigger a SEND_BODY state:
|
| 100 |
+
assert type(event) in (Request, Response)
|
| 101 |
+
# Returns one of:
|
| 102 |
+
#
|
| 103 |
+
# ("content-length", count)
|
| 104 |
+
# ("chunked", ())
|
| 105 |
+
# ("http/1.0", ())
|
| 106 |
+
#
|
| 107 |
+
# which are (lookup key, *args) for constructing body reader/writer
|
| 108 |
+
# objects.
|
| 109 |
+
#
|
| 110 |
+
# Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3
|
| 111 |
+
#
|
| 112 |
+
# Step 1: some responses always have an empty body, regardless of what the
|
| 113 |
+
# headers say.
|
| 114 |
+
if type(event) is Response:
|
| 115 |
+
if (
|
| 116 |
+
event.status_code in (204, 304)
|
| 117 |
+
or request_method == b"HEAD"
|
| 118 |
+
or (request_method == b"CONNECT" and 200 <= event.status_code < 300)
|
| 119 |
+
):
|
| 120 |
+
return ("content-length", (0,))
|
| 121 |
+
# Section 3.3.3 also lists another case -- responses with status_code
|
| 122 |
+
# < 200. For us these are InformationalResponses, not Responses, so
|
| 123 |
+
# they can't get into this function in the first place.
|
| 124 |
+
assert event.status_code >= 200
|
| 125 |
+
|
| 126 |
+
# Step 2: check for Transfer-Encoding (T-E beats C-L):
|
| 127 |
+
transfer_encodings = get_comma_header(event.headers, b"transfer-encoding")
|
| 128 |
+
if transfer_encodings:
|
| 129 |
+
assert transfer_encodings == [b"chunked"]
|
| 130 |
+
return ("chunked", ())
|
| 131 |
+
|
| 132 |
+
# Step 3: check for Content-Length
|
| 133 |
+
content_lengths = get_comma_header(event.headers, b"content-length")
|
| 134 |
+
if content_lengths:
|
| 135 |
+
return ("content-length", (int(content_lengths[0]),))
|
| 136 |
+
|
| 137 |
+
# Step 4: no applicable headers; fallback/default depends on type
|
| 138 |
+
if type(event) is Request:
|
| 139 |
+
return ("content-length", (0,))
|
| 140 |
+
else:
|
| 141 |
+
return ("http/1.0", ())
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
################################################################
|
| 145 |
+
#
|
| 146 |
+
# The main Connection class
|
| 147 |
+
#
|
| 148 |
+
################################################################
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class Connection:
|
| 152 |
+
"""An object encapsulating the state of an HTTP connection.
|
| 153 |
+
|
| 154 |
+
Args:
|
| 155 |
+
our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If
|
| 156 |
+
you're implementing a server, pass :data:`h11.SERVER`.
|
| 157 |
+
|
| 158 |
+
max_incomplete_event_size (int):
|
| 159 |
+
The maximum number of bytes we're willing to buffer of an
|
| 160 |
+
incomplete event. In practice this mostly sets a limit on the
|
| 161 |
+
maximum size of the request/response line + headers. If this is
|
| 162 |
+
exceeded, then :meth:`next_event` will raise
|
| 163 |
+
:exc:`RemoteProtocolError`.
|
| 164 |
+
|
| 165 |
+
"""
|
| 166 |
+
|
| 167 |
+
def __init__(
|
| 168 |
+
self,
|
| 169 |
+
our_role: Type[Sentinel],
|
| 170 |
+
max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE,
|
| 171 |
+
) -> None:
|
| 172 |
+
self._max_incomplete_event_size = max_incomplete_event_size
|
| 173 |
+
# State and role tracking
|
| 174 |
+
if our_role not in (CLIENT, SERVER):
|
| 175 |
+
raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}")
|
| 176 |
+
self.our_role = our_role
|
| 177 |
+
self.their_role: Type[Sentinel]
|
| 178 |
+
if our_role is CLIENT:
|
| 179 |
+
self.their_role = SERVER
|
| 180 |
+
else:
|
| 181 |
+
self.their_role = CLIENT
|
| 182 |
+
self._cstate = ConnectionState()
|
| 183 |
+
|
| 184 |
+
# Callables for converting data->events or vice-versa given the
|
| 185 |
+
# current state
|
| 186 |
+
self._writer = self._get_io_object(self.our_role, None, WRITERS)
|
| 187 |
+
self._reader = self._get_io_object(self.their_role, None, READERS)
|
| 188 |
+
|
| 189 |
+
# Holds any unprocessed received data
|
| 190 |
+
self._receive_buffer = ReceiveBuffer()
|
| 191 |
+
# If this is true, then it indicates that the incoming connection was
|
| 192 |
+
# closed *after* the end of whatever's in self._receive_buffer:
|
| 193 |
+
self._receive_buffer_closed = False
|
| 194 |
+
|
| 195 |
+
# Extra bits of state that don't fit into the state machine.
|
| 196 |
+
#
|
| 197 |
+
# These two are only used to interpret framing headers for figuring
|
| 198 |
+
# out how to read/write response bodies. their_http_version is also
|
| 199 |
+
# made available as a convenient public API.
|
| 200 |
+
self.their_http_version: Optional[bytes] = None
|
| 201 |
+
self._request_method: Optional[bytes] = None
|
| 202 |
+
# This is pure flow-control and doesn't at all affect the set of legal
|
| 203 |
+
# transitions, so no need to bother ConnectionState with it:
|
| 204 |
+
self.client_is_waiting_for_100_continue = False
|
| 205 |
+
|
| 206 |
+
@property
|
| 207 |
+
def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]:
|
| 208 |
+
"""A dictionary like::
|
| 209 |
+
|
| 210 |
+
{CLIENT: <client state>, SERVER: <server state>}
|
| 211 |
+
|
| 212 |
+
See :ref:`state-machine` for details.
|
| 213 |
+
|
| 214 |
+
"""
|
| 215 |
+
return dict(self._cstate.states)
|
| 216 |
+
|
| 217 |
+
@property
|
| 218 |
+
def our_state(self) -> Type[Sentinel]:
|
| 219 |
+
"""The current state of whichever role we are playing. See
|
| 220 |
+
:ref:`state-machine` for details.
|
| 221 |
+
"""
|
| 222 |
+
return self._cstate.states[self.our_role]
|
| 223 |
+
|
| 224 |
+
@property
|
| 225 |
+
def their_state(self) -> Type[Sentinel]:
|
| 226 |
+
"""The current state of whichever role we are NOT playing. See
|
| 227 |
+
:ref:`state-machine` for details.
|
| 228 |
+
"""
|
| 229 |
+
return self._cstate.states[self.their_role]
|
| 230 |
+
|
| 231 |
+
@property
|
| 232 |
+
def they_are_waiting_for_100_continue(self) -> bool:
|
| 233 |
+
return self.their_role is CLIENT and self.client_is_waiting_for_100_continue
|
| 234 |
+
|
| 235 |
+
def start_next_cycle(self) -> None:
|
| 236 |
+
"""Attempt to reset our connection state for a new request/response
|
| 237 |
+
cycle.
|
| 238 |
+
|
| 239 |
+
If both client and server are in :data:`DONE` state, then resets them
|
| 240 |
+
both to :data:`IDLE` state in preparation for a new request/response
|
| 241 |
+
cycle on this same connection. Otherwise, raises a
|
| 242 |
+
:exc:`LocalProtocolError`.
|
| 243 |
+
|
| 244 |
+
See :ref:`keepalive-and-pipelining`.
|
| 245 |
+
|
| 246 |
+
"""
|
| 247 |
+
old_states = dict(self._cstate.states)
|
| 248 |
+
self._cstate.start_next_cycle()
|
| 249 |
+
self._request_method = None
|
| 250 |
+
# self.their_http_version gets left alone, since it presumably lasts
|
| 251 |
+
# beyond a single request/response cycle
|
| 252 |
+
assert not self.client_is_waiting_for_100_continue
|
| 253 |
+
self._respond_to_state_changes(old_states)
|
| 254 |
+
|
| 255 |
+
def _process_error(self, role: Type[Sentinel]) -> None:
|
| 256 |
+
old_states = dict(self._cstate.states)
|
| 257 |
+
self._cstate.process_error(role)
|
| 258 |
+
self._respond_to_state_changes(old_states)
|
| 259 |
+
|
| 260 |
+
def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]:
|
| 261 |
+
if type(event) is InformationalResponse and event.status_code == 101:
|
| 262 |
+
return _SWITCH_UPGRADE
|
| 263 |
+
if type(event) is Response:
|
| 264 |
+
if (
|
| 265 |
+
_SWITCH_CONNECT in self._cstate.pending_switch_proposals
|
| 266 |
+
and 200 <= event.status_code < 300
|
| 267 |
+
):
|
| 268 |
+
return _SWITCH_CONNECT
|
| 269 |
+
return None
|
| 270 |
+
|
| 271 |
+
# All events go through here
|
| 272 |
+
def _process_event(self, role: Type[Sentinel], event: Event) -> None:
|
| 273 |
+
# First, pass the event through the state machine to make sure it
|
| 274 |
+
# succeeds.
|
| 275 |
+
old_states = dict(self._cstate.states)
|
| 276 |
+
if role is CLIENT and type(event) is Request:
|
| 277 |
+
if event.method == b"CONNECT":
|
| 278 |
+
self._cstate.process_client_switch_proposal(_SWITCH_CONNECT)
|
| 279 |
+
if get_comma_header(event.headers, b"upgrade"):
|
| 280 |
+
self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE)
|
| 281 |
+
server_switch_event = None
|
| 282 |
+
if role is SERVER:
|
| 283 |
+
server_switch_event = self._server_switch_event(event)
|
| 284 |
+
self._cstate.process_event(role, type(event), server_switch_event)
|
| 285 |
+
|
| 286 |
+
# Then perform the updates triggered by it.
|
| 287 |
+
|
| 288 |
+
if type(event) is Request:
|
| 289 |
+
self._request_method = event.method
|
| 290 |
+
|
| 291 |
+
if role is self.their_role and type(event) in (
|
| 292 |
+
Request,
|
| 293 |
+
Response,
|
| 294 |
+
InformationalResponse,
|
| 295 |
+
):
|
| 296 |
+
event = cast(Union[Request, Response, InformationalResponse], event)
|
| 297 |
+
self.their_http_version = event.http_version
|
| 298 |
+
|
| 299 |
+
# Keep alive handling
|
| 300 |
+
#
|
| 301 |
+
# RFC 7230 doesn't really say what one should do if Connection: close
|
| 302 |
+
# shows up on a 1xx InformationalResponse. I think the idea is that
|
| 303 |
+
# this is not supposed to happen. In any case, if it does happen, we
|
| 304 |
+
# ignore it.
|
| 305 |
+
if type(event) in (Request, Response) and not _keep_alive(
|
| 306 |
+
cast(Union[Request, Response], event)
|
| 307 |
+
):
|
| 308 |
+
self._cstate.process_keep_alive_disabled()
|
| 309 |
+
|
| 310 |
+
# 100-continue
|
| 311 |
+
if type(event) is Request and has_expect_100_continue(event):
|
| 312 |
+
self.client_is_waiting_for_100_continue = True
|
| 313 |
+
if type(event) in (InformationalResponse, Response):
|
| 314 |
+
self.client_is_waiting_for_100_continue = False
|
| 315 |
+
if role is CLIENT and type(event) in (Data, EndOfMessage):
|
| 316 |
+
self.client_is_waiting_for_100_continue = False
|
| 317 |
+
|
| 318 |
+
self._respond_to_state_changes(old_states, event)
|
| 319 |
+
|
| 320 |
+
def _get_io_object(
|
| 321 |
+
self,
|
| 322 |
+
role: Type[Sentinel],
|
| 323 |
+
event: Optional[Event],
|
| 324 |
+
io_dict: Union[ReadersType, WritersType],
|
| 325 |
+
) -> Optional[Callable[..., Any]]:
|
| 326 |
+
# event may be None; it's only used when entering SEND_BODY
|
| 327 |
+
state = self._cstate.states[role]
|
| 328 |
+
if state is SEND_BODY:
|
| 329 |
+
# Special case: the io_dict has a dict of reader/writer factories
|
| 330 |
+
# that depend on the request/response framing.
|
| 331 |
+
framing_type, args = _body_framing(
|
| 332 |
+
cast(bytes, self._request_method), cast(Union[Request, Response], event)
|
| 333 |
+
)
|
| 334 |
+
return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index]
|
| 335 |
+
else:
|
| 336 |
+
# General case: the io_dict just has the appropriate reader/writer
|
| 337 |
+
# for this state
|
| 338 |
+
return io_dict.get((role, state)) # type: ignore[return-value]
|
| 339 |
+
|
| 340 |
+
# This must be called after any action that might have caused
|
| 341 |
+
# self._cstate.states to change.
|
| 342 |
+
def _respond_to_state_changes(
|
| 343 |
+
self,
|
| 344 |
+
old_states: Dict[Type[Sentinel], Type[Sentinel]],
|
| 345 |
+
event: Optional[Event] = None,
|
| 346 |
+
) -> None:
|
| 347 |
+
# Update reader/writer
|
| 348 |
+
if self.our_state != old_states[self.our_role]:
|
| 349 |
+
self._writer = self._get_io_object(self.our_role, event, WRITERS)
|
| 350 |
+
if self.their_state != old_states[self.their_role]:
|
| 351 |
+
self._reader = self._get_io_object(self.their_role, event, READERS)
|
| 352 |
+
|
| 353 |
+
@property
|
| 354 |
+
def trailing_data(self) -> Tuple[bytes, bool]:
|
| 355 |
+
"""Data that has been received, but not yet processed, represented as
|
| 356 |
+
a tuple with two elements, where the first is a byte-string containing
|
| 357 |
+
the unprocessed data itself, and the second is a bool that is True if
|
| 358 |
+
the receive connection was closed.
|
| 359 |
+
|
| 360 |
+
See :ref:`switching-protocols` for discussion of why you'd want this.
|
| 361 |
+
"""
|
| 362 |
+
return (bytes(self._receive_buffer), self._receive_buffer_closed)
|
| 363 |
+
|
| 364 |
+
def receive_data(self, data: bytes) -> None:
|
| 365 |
+
"""Add data to our internal receive buffer.
|
| 366 |
+
|
| 367 |
+
This does not actually do any processing on the data, just stores
|
| 368 |
+
it. To trigger processing, you have to call :meth:`next_event`.
|
| 369 |
+
|
| 370 |
+
Args:
|
| 371 |
+
data (:term:`bytes-like object`):
|
| 372 |
+
The new data that was just received.
|
| 373 |
+
|
| 374 |
+
Special case: If *data* is an empty byte-string like ``b""``,
|
| 375 |
+
then this indicates that the remote side has closed the
|
| 376 |
+
connection (end of file). Normally this is convenient, because
|
| 377 |
+
standard Python APIs like :meth:`file.read` or
|
| 378 |
+
:meth:`socket.recv` use ``b""`` to indicate end-of-file, while
|
| 379 |
+
other failures to read are indicated using other mechanisms
|
| 380 |
+
like raising :exc:`TimeoutError`. When using such an API you
|
| 381 |
+
can just blindly pass through whatever you get from ``read``
|
| 382 |
+
to :meth:`receive_data`, and everything will work.
|
| 383 |
+
|
| 384 |
+
But, if you have an API where reading an empty string is a
|
| 385 |
+
valid non-EOF condition, then you need to be aware of this and
|
| 386 |
+
make sure to check for such strings and avoid passing them to
|
| 387 |
+
:meth:`receive_data`.
|
| 388 |
+
|
| 389 |
+
Returns:
|
| 390 |
+
Nothing, but after calling this you should call :meth:`next_event`
|
| 391 |
+
to parse the newly received data.
|
| 392 |
+
|
| 393 |
+
Raises:
|
| 394 |
+
RuntimeError:
|
| 395 |
+
Raised if you pass an empty *data*, indicating EOF, and then
|
| 396 |
+
pass a non-empty *data*, indicating more data that somehow
|
| 397 |
+
arrived after the EOF.
|
| 398 |
+
|
| 399 |
+
(Calling ``receive_data(b"")`` multiple times is fine,
|
| 400 |
+
and equivalent to calling it once.)
|
| 401 |
+
|
| 402 |
+
"""
|
| 403 |
+
if data:
|
| 404 |
+
if self._receive_buffer_closed:
|
| 405 |
+
raise RuntimeError("received close, then received more data?")
|
| 406 |
+
self._receive_buffer += data
|
| 407 |
+
else:
|
| 408 |
+
self._receive_buffer_closed = True
|
| 409 |
+
|
| 410 |
+
def _extract_next_receive_event(
|
| 411 |
+
self,
|
| 412 |
+
) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
|
| 413 |
+
state = self.their_state
|
| 414 |
+
# We don't pause immediately when they enter DONE, because even in
|
| 415 |
+
# DONE state we can still process a ConnectionClosed() event. But
|
| 416 |
+
# if we have data in our buffer, then we definitely aren't getting
|
| 417 |
+
# a ConnectionClosed() immediately and we need to pause.
|
| 418 |
+
if state is DONE and self._receive_buffer:
|
| 419 |
+
return PAUSED
|
| 420 |
+
if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL:
|
| 421 |
+
return PAUSED
|
| 422 |
+
assert self._reader is not None
|
| 423 |
+
event = self._reader(self._receive_buffer)
|
| 424 |
+
if event is None:
|
| 425 |
+
if not self._receive_buffer and self._receive_buffer_closed:
|
| 426 |
+
# In some unusual cases (basically just HTTP/1.0 bodies), EOF
|
| 427 |
+
# triggers an actual protocol event; in that case, we want to
|
| 428 |
+
# return that event, and then the state will change and we'll
|
| 429 |
+
# get called again to generate the actual ConnectionClosed().
|
| 430 |
+
if hasattr(self._reader, "read_eof"):
|
| 431 |
+
event = self._reader.read_eof()
|
| 432 |
+
else:
|
| 433 |
+
event = ConnectionClosed()
|
| 434 |
+
if event is None:
|
| 435 |
+
event = NEED_DATA
|
| 436 |
+
return event # type: ignore[no-any-return]
|
| 437 |
+
|
| 438 |
+
def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
|
| 439 |
+
"""Parse the next event out of our receive buffer, update our internal
|
| 440 |
+
state, and return it.
|
| 441 |
+
|
| 442 |
+
This is a mutating operation -- think of it like calling :func:`next`
|
| 443 |
+
on an iterator.
|
| 444 |
+
|
| 445 |
+
Returns:
|
| 446 |
+
: One of three things:
|
| 447 |
+
|
| 448 |
+
1) An event object -- see :ref:`events`.
|
| 449 |
+
|
| 450 |
+
2) The special constant :data:`NEED_DATA`, which indicates that
|
| 451 |
+
you need to read more data from your socket and pass it to
|
| 452 |
+
:meth:`receive_data` before this method will be able to return
|
| 453 |
+
any more events.
|
| 454 |
+
|
| 455 |
+
3) The special constant :data:`PAUSED`, which indicates that we
|
| 456 |
+
are not in a state where we can process incoming data (usually
|
| 457 |
+
because the peer has finished their part of the current
|
| 458 |
+
request/response cycle, and you have not yet called
|
| 459 |
+
:meth:`start_next_cycle`). See :ref:`flow-control` for details.
|
| 460 |
+
|
| 461 |
+
Raises:
|
| 462 |
+
RemoteProtocolError:
|
| 463 |
+
The peer has misbehaved. You should close the connection
|
| 464 |
+
(possibly after sending some kind of 4xx response).
|
| 465 |
+
|
| 466 |
+
Once this method returns :class:`ConnectionClosed` once, then all
|
| 467 |
+
subsequent calls will also return :class:`ConnectionClosed`.
|
| 468 |
+
|
| 469 |
+
If this method raises any exception besides :exc:`RemoteProtocolError`
|
| 470 |
+
then that's a bug -- if it happens please file a bug report!
|
| 471 |
+
|
| 472 |
+
If this method raises any exception then it also sets
|
| 473 |
+
:attr:`Connection.their_state` to :data:`ERROR` -- see
|
| 474 |
+
:ref:`error-handling` for discussion.
|
| 475 |
+
|
| 476 |
+
"""
|
| 477 |
+
|
| 478 |
+
if self.their_state is ERROR:
|
| 479 |
+
raise RemoteProtocolError("Can't receive data when peer state is ERROR")
|
| 480 |
+
try:
|
| 481 |
+
event = self._extract_next_receive_event()
|
| 482 |
+
if event not in [NEED_DATA, PAUSED]:
|
| 483 |
+
self._process_event(self.their_role, cast(Event, event))
|
| 484 |
+
if event is NEED_DATA:
|
| 485 |
+
if len(self._receive_buffer) > self._max_incomplete_event_size:
|
| 486 |
+
# 431 is "Request header fields too large" which is pretty
|
| 487 |
+
# much the only situation where we can get here
|
| 488 |
+
raise RemoteProtocolError(
|
| 489 |
+
"Receive buffer too long", error_status_hint=431
|
| 490 |
+
)
|
| 491 |
+
if self._receive_buffer_closed:
|
| 492 |
+
# We're still trying to complete some event, but that's
|
| 493 |
+
# never going to happen because no more data is coming
|
| 494 |
+
raise RemoteProtocolError("peer unexpectedly closed connection")
|
| 495 |
+
return event
|
| 496 |
+
except BaseException as exc:
|
| 497 |
+
self._process_error(self.their_role)
|
| 498 |
+
if isinstance(exc, LocalProtocolError):
|
| 499 |
+
exc._reraise_as_remote_protocol_error()
|
| 500 |
+
else:
|
| 501 |
+
raise
|
| 502 |
+
|
| 503 |
+
@overload
|
| 504 |
+
def send(self, event: ConnectionClosed) -> None:
|
| 505 |
+
...
|
| 506 |
+
|
| 507 |
+
@overload
|
| 508 |
+
def send(
|
| 509 |
+
self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage]
|
| 510 |
+
) -> bytes:
|
| 511 |
+
...
|
| 512 |
+
|
| 513 |
+
@overload
|
| 514 |
+
def send(self, event: Event) -> Optional[bytes]:
|
| 515 |
+
...
|
| 516 |
+
|
| 517 |
+
def send(self, event: Event) -> Optional[bytes]:
|
| 518 |
+
"""Convert a high-level event into bytes that can be sent to the peer,
|
| 519 |
+
while updating our internal state machine.
|
| 520 |
+
|
| 521 |
+
Args:
|
| 522 |
+
event: The :ref:`event <events>` to send.
|
| 523 |
+
|
| 524 |
+
Returns:
|
| 525 |
+
If ``type(event) is ConnectionClosed``, then returns
|
| 526 |
+
``None``. Otherwise, returns a :term:`bytes-like object`.
|
| 527 |
+
|
| 528 |
+
Raises:
|
| 529 |
+
LocalProtocolError:
|
| 530 |
+
Sending this event at this time would violate our
|
| 531 |
+
understanding of the HTTP/1.1 protocol.
|
| 532 |
+
|
| 533 |
+
If this method raises any exception then it also sets
|
| 534 |
+
:attr:`Connection.our_state` to :data:`ERROR` -- see
|
| 535 |
+
:ref:`error-handling` for discussion.
|
| 536 |
+
|
| 537 |
+
"""
|
| 538 |
+
data_list = self.send_with_data_passthrough(event)
|
| 539 |
+
if data_list is None:
|
| 540 |
+
return None
|
| 541 |
+
else:
|
| 542 |
+
return b"".join(data_list)
|
| 543 |
+
|
| 544 |
+
def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]:
|
| 545 |
+
"""Identical to :meth:`send`, except that in situations where
|
| 546 |
+
:meth:`send` returns a single :term:`bytes-like object`, this instead
|
| 547 |
+
returns a list of them -- and when sending a :class:`Data` event, this
|
| 548 |
+
list is guaranteed to contain the exact object you passed in as
|
| 549 |
+
:attr:`Data.data`. See :ref:`sendfile` for discussion.
|
| 550 |
+
|
| 551 |
+
"""
|
| 552 |
+
if self.our_state is ERROR:
|
| 553 |
+
raise LocalProtocolError("Can't send data when our state is ERROR")
|
| 554 |
+
try:
|
| 555 |
+
if type(event) is Response:
|
| 556 |
+
event = self._clean_up_response_headers_for_sending(event)
|
| 557 |
+
# We want to call _process_event before calling the writer,
|
| 558 |
+
# because if someone tries to do something invalid then this will
|
| 559 |
+
# give a sensible error message, while our writers all just assume
|
| 560 |
+
# they will only receive valid events. But, _process_event might
|
| 561 |
+
# change self._writer. So we have to do a little dance:
|
| 562 |
+
writer = self._writer
|
| 563 |
+
self._process_event(self.our_role, event)
|
| 564 |
+
if type(event) is ConnectionClosed:
|
| 565 |
+
return None
|
| 566 |
+
else:
|
| 567 |
+
# In any situation where writer is None, process_event should
|
| 568 |
+
# have raised ProtocolError
|
| 569 |
+
assert writer is not None
|
| 570 |
+
data_list: List[bytes] = []
|
| 571 |
+
writer(event, data_list.append)
|
| 572 |
+
return data_list
|
| 573 |
+
except:
|
| 574 |
+
self._process_error(self.our_role)
|
| 575 |
+
raise
|
| 576 |
+
|
| 577 |
+
def send_failed(self) -> None:
|
| 578 |
+
"""Notify the state machine that we failed to send the data it gave
|
| 579 |
+
us.
|
| 580 |
+
|
| 581 |
+
This causes :attr:`Connection.our_state` to immediately become
|
| 582 |
+
:data:`ERROR` -- see :ref:`error-handling` for discussion.
|
| 583 |
+
|
| 584 |
+
"""
|
| 585 |
+
self._process_error(self.our_role)
|
| 586 |
+
|
| 587 |
+
# When sending a Response, we take responsibility for a few things:
|
| 588 |
+
#
|
| 589 |
+
# - Sometimes you MUST set Connection: close. We take care of those
|
| 590 |
+
# times. (You can also set it yourself if you want, and if you do then
|
| 591 |
+
# we'll respect that and close the connection at the right time. But you
|
| 592 |
+
# don't have to worry about that unless you want to.)
|
| 593 |
+
#
|
| 594 |
+
# - The user has to set Content-Length if they want it. Otherwise, for
|
| 595 |
+
# responses that have bodies (e.g. not HEAD), then we will automatically
|
| 596 |
+
# select the right mechanism for streaming a body of unknown length,
|
| 597 |
+
# which depends on depending on the peer's HTTP version.
|
| 598 |
+
#
|
| 599 |
+
# This function's *only* responsibility is making sure headers are set up
|
| 600 |
+
# right -- everything downstream just looks at the headers. There are no
|
| 601 |
+
# side channels.
|
| 602 |
+
def _clean_up_response_headers_for_sending(self, response: Response) -> Response:
|
| 603 |
+
assert type(response) is Response
|
| 604 |
+
|
| 605 |
+
headers = response.headers
|
| 606 |
+
need_close = False
|
| 607 |
+
|
| 608 |
+
# HEAD requests need some special handling: they always act like they
|
| 609 |
+
# have Content-Length: 0, and that's how _body_framing treats
|
| 610 |
+
# them. But their headers are supposed to match what we would send if
|
| 611 |
+
# the request was a GET. (Technically there is one deviation allowed:
|
| 612 |
+
# we're allowed to leave out the framing headers -- see
|
| 613 |
+
# https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as
|
| 614 |
+
# easy to get them right.)
|
| 615 |
+
method_for_choosing_headers = cast(bytes, self._request_method)
|
| 616 |
+
if method_for_choosing_headers == b"HEAD":
|
| 617 |
+
method_for_choosing_headers = b"GET"
|
| 618 |
+
framing_type, _ = _body_framing(method_for_choosing_headers, response)
|
| 619 |
+
if framing_type in ("chunked", "http/1.0"):
|
| 620 |
+
# This response has a body of unknown length.
|
| 621 |
+
# If our peer is HTTP/1.1, we use Transfer-Encoding: chunked
|
| 622 |
+
# If our peer is HTTP/1.0, we use no framing headers, and close the
|
| 623 |
+
# connection afterwards.
|
| 624 |
+
#
|
| 625 |
+
# Make sure to clear Content-Length (in principle user could have
|
| 626 |
+
# set both and then we ignored Content-Length b/c
|
| 627 |
+
# Transfer-Encoding overwrote it -- this would be naughty of them,
|
| 628 |
+
# but the HTTP spec says that if our peer does this then we have
|
| 629 |
+
# to fix it instead of erroring out, so we'll accord the user the
|
| 630 |
+
# same respect).
|
| 631 |
+
headers = set_comma_header(headers, b"content-length", [])
|
| 632 |
+
if self.their_http_version is None or self.their_http_version < b"1.1":
|
| 633 |
+
# Either we never got a valid request and are sending back an
|
| 634 |
+
# error (their_http_version is None), so we assume the worst;
|
| 635 |
+
# or else we did get a valid HTTP/1.0 request, so we know that
|
| 636 |
+
# they don't understand chunked encoding.
|
| 637 |
+
headers = set_comma_header(headers, b"transfer-encoding", [])
|
| 638 |
+
# This is actually redundant ATM, since currently we
|
| 639 |
+
# unconditionally disable keep-alive when talking to HTTP/1.0
|
| 640 |
+
# peers. But let's be defensive just in case we add
|
| 641 |
+
# Connection: keep-alive support later:
|
| 642 |
+
if self._request_method != b"HEAD":
|
| 643 |
+
need_close = True
|
| 644 |
+
else:
|
| 645 |
+
headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"])
|
| 646 |
+
|
| 647 |
+
if not self._cstate.keep_alive or need_close:
|
| 648 |
+
# Make sure Connection: close is set
|
| 649 |
+
connection = set(get_comma_header(headers, b"connection"))
|
| 650 |
+
connection.discard(b"keep-alive")
|
| 651 |
+
connection.add(b"close")
|
| 652 |
+
headers = set_comma_header(headers, b"connection", sorted(connection))
|
| 653 |
+
|
| 654 |
+
return Response(
|
| 655 |
+
headers=headers,
|
| 656 |
+
status_code=response.status_code,
|
| 657 |
+
http_version=response.http_version,
|
| 658 |
+
reason=response.reason,
|
| 659 |
+
)
|
venv/lib/python3.12/site-packages/h11/_events.py
ADDED
|
@@ -0,0 +1,369 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# High level events that make up HTTP/1.1 conversations. Loosely inspired by
|
| 2 |
+
# the corresponding events in hyper-h2:
|
| 3 |
+
#
|
| 4 |
+
# http://python-hyper.org/h2/en/stable/api.html#events
|
| 5 |
+
#
|
| 6 |
+
# Don't subclass these. Stuff will break.
|
| 7 |
+
|
| 8 |
+
import re
|
| 9 |
+
from abc import ABC
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
from typing import List, Tuple, Union
|
| 12 |
+
|
| 13 |
+
from ._abnf import method, request_target
|
| 14 |
+
from ._headers import Headers, normalize_and_validate
|
| 15 |
+
from ._util import bytesify, LocalProtocolError, validate
|
| 16 |
+
|
| 17 |
+
# Everything in __all__ gets re-exported as part of the h11 public API.
|
| 18 |
+
__all__ = [
|
| 19 |
+
"Event",
|
| 20 |
+
"Request",
|
| 21 |
+
"InformationalResponse",
|
| 22 |
+
"Response",
|
| 23 |
+
"Data",
|
| 24 |
+
"EndOfMessage",
|
| 25 |
+
"ConnectionClosed",
|
| 26 |
+
]
|
| 27 |
+
|
| 28 |
+
method_re = re.compile(method.encode("ascii"))
|
| 29 |
+
request_target_re = re.compile(request_target.encode("ascii"))
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Event(ABC):
|
| 33 |
+
"""
|
| 34 |
+
Base class for h11 events.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
__slots__ = ()
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@dataclass(init=False, frozen=True)
|
| 41 |
+
class Request(Event):
|
| 42 |
+
"""The beginning of an HTTP request.
|
| 43 |
+
|
| 44 |
+
Fields:
|
| 45 |
+
|
| 46 |
+
.. attribute:: method
|
| 47 |
+
|
| 48 |
+
An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte
|
| 49 |
+
string. :term:`Bytes-like objects <bytes-like object>` and native
|
| 50 |
+
strings containing only ascii characters will be automatically
|
| 51 |
+
converted to byte strings.
|
| 52 |
+
|
| 53 |
+
.. attribute:: target
|
| 54 |
+
|
| 55 |
+
The target of an HTTP request, e.g. ``b"/index.html"``, or one of the
|
| 56 |
+
more exotic formats described in `RFC 7320, section 5.3
|
| 57 |
+
<https://tools.ietf.org/html/rfc7230#section-5.3>`_. Always a byte
|
| 58 |
+
string. :term:`Bytes-like objects <bytes-like object>` and native
|
| 59 |
+
strings containing only ascii characters will be automatically
|
| 60 |
+
converted to byte strings.
|
| 61 |
+
|
| 62 |
+
.. attribute:: headers
|
| 63 |
+
|
| 64 |
+
Request headers, represented as a list of (name, value) pairs. See
|
| 65 |
+
:ref:`the header normalization rules <headers-format>` for details.
|
| 66 |
+
|
| 67 |
+
.. attribute:: http_version
|
| 68 |
+
|
| 69 |
+
The HTTP protocol version, represented as a byte string like
|
| 70 |
+
``b"1.1"``. See :ref:`the HTTP version normalization rules
|
| 71 |
+
<http_version-format>` for details.
|
| 72 |
+
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
__slots__ = ("method", "headers", "target", "http_version")
|
| 76 |
+
|
| 77 |
+
method: bytes
|
| 78 |
+
headers: Headers
|
| 79 |
+
target: bytes
|
| 80 |
+
http_version: bytes
|
| 81 |
+
|
| 82 |
+
def __init__(
|
| 83 |
+
self,
|
| 84 |
+
*,
|
| 85 |
+
method: Union[bytes, str],
|
| 86 |
+
headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
|
| 87 |
+
target: Union[bytes, str],
|
| 88 |
+
http_version: Union[bytes, str] = b"1.1",
|
| 89 |
+
_parsed: bool = False,
|
| 90 |
+
) -> None:
|
| 91 |
+
super().__init__()
|
| 92 |
+
if isinstance(headers, Headers):
|
| 93 |
+
object.__setattr__(self, "headers", headers)
|
| 94 |
+
else:
|
| 95 |
+
object.__setattr__(
|
| 96 |
+
self, "headers", normalize_and_validate(headers, _parsed=_parsed)
|
| 97 |
+
)
|
| 98 |
+
if not _parsed:
|
| 99 |
+
object.__setattr__(self, "method", bytesify(method))
|
| 100 |
+
object.__setattr__(self, "target", bytesify(target))
|
| 101 |
+
object.__setattr__(self, "http_version", bytesify(http_version))
|
| 102 |
+
else:
|
| 103 |
+
object.__setattr__(self, "method", method)
|
| 104 |
+
object.__setattr__(self, "target", target)
|
| 105 |
+
object.__setattr__(self, "http_version", http_version)
|
| 106 |
+
|
| 107 |
+
# "A server MUST respond with a 400 (Bad Request) status code to any
|
| 108 |
+
# HTTP/1.1 request message that lacks a Host header field and to any
|
| 109 |
+
# request message that contains more than one Host header field or a
|
| 110 |
+
# Host header field with an invalid field-value."
|
| 111 |
+
# -- https://tools.ietf.org/html/rfc7230#section-5.4
|
| 112 |
+
host_count = 0
|
| 113 |
+
for name, value in self.headers:
|
| 114 |
+
if name == b"host":
|
| 115 |
+
host_count += 1
|
| 116 |
+
if self.http_version == b"1.1" and host_count == 0:
|
| 117 |
+
raise LocalProtocolError("Missing mandatory Host: header")
|
| 118 |
+
if host_count > 1:
|
| 119 |
+
raise LocalProtocolError("Found multiple Host: headers")
|
| 120 |
+
|
| 121 |
+
validate(method_re, self.method, "Illegal method characters")
|
| 122 |
+
validate(request_target_re, self.target, "Illegal target characters")
|
| 123 |
+
|
| 124 |
+
# This is an unhashable type.
|
| 125 |
+
__hash__ = None # type: ignore
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
@dataclass(init=False, frozen=True)
|
| 129 |
+
class _ResponseBase(Event):
|
| 130 |
+
__slots__ = ("headers", "http_version", "reason", "status_code")
|
| 131 |
+
|
| 132 |
+
headers: Headers
|
| 133 |
+
http_version: bytes
|
| 134 |
+
reason: bytes
|
| 135 |
+
status_code: int
|
| 136 |
+
|
| 137 |
+
def __init__(
|
| 138 |
+
self,
|
| 139 |
+
*,
|
| 140 |
+
headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
|
| 141 |
+
status_code: int,
|
| 142 |
+
http_version: Union[bytes, str] = b"1.1",
|
| 143 |
+
reason: Union[bytes, str] = b"",
|
| 144 |
+
_parsed: bool = False,
|
| 145 |
+
) -> None:
|
| 146 |
+
super().__init__()
|
| 147 |
+
if isinstance(headers, Headers):
|
| 148 |
+
object.__setattr__(self, "headers", headers)
|
| 149 |
+
else:
|
| 150 |
+
object.__setattr__(
|
| 151 |
+
self, "headers", normalize_and_validate(headers, _parsed=_parsed)
|
| 152 |
+
)
|
| 153 |
+
if not _parsed:
|
| 154 |
+
object.__setattr__(self, "reason", bytesify(reason))
|
| 155 |
+
object.__setattr__(self, "http_version", bytesify(http_version))
|
| 156 |
+
if not isinstance(status_code, int):
|
| 157 |
+
raise LocalProtocolError("status code must be integer")
|
| 158 |
+
# Because IntEnum objects are instances of int, but aren't
|
| 159 |
+
# duck-compatible (sigh), see gh-72.
|
| 160 |
+
object.__setattr__(self, "status_code", int(status_code))
|
| 161 |
+
else:
|
| 162 |
+
object.__setattr__(self, "reason", reason)
|
| 163 |
+
object.__setattr__(self, "http_version", http_version)
|
| 164 |
+
object.__setattr__(self, "status_code", status_code)
|
| 165 |
+
|
| 166 |
+
self.__post_init__()
|
| 167 |
+
|
| 168 |
+
def __post_init__(self) -> None:
|
| 169 |
+
pass
|
| 170 |
+
|
| 171 |
+
# This is an unhashable type.
|
| 172 |
+
__hash__ = None # type: ignore
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
@dataclass(init=False, frozen=True)
|
| 176 |
+
class InformationalResponse(_ResponseBase):
|
| 177 |
+
"""An HTTP informational response.
|
| 178 |
+
|
| 179 |
+
Fields:
|
| 180 |
+
|
| 181 |
+
.. attribute:: status_code
|
| 182 |
+
|
| 183 |
+
The status code of this response, as an integer. For an
|
| 184 |
+
:class:`InformationalResponse`, this is always in the range [100,
|
| 185 |
+
200).
|
| 186 |
+
|
| 187 |
+
.. attribute:: headers
|
| 188 |
+
|
| 189 |
+
Request headers, represented as a list of (name, value) pairs. See
|
| 190 |
+
:ref:`the header normalization rules <headers-format>` for
|
| 191 |
+
details.
|
| 192 |
+
|
| 193 |
+
.. attribute:: http_version
|
| 194 |
+
|
| 195 |
+
The HTTP protocol version, represented as a byte string like
|
| 196 |
+
``b"1.1"``. See :ref:`the HTTP version normalization rules
|
| 197 |
+
<http_version-format>` for details.
|
| 198 |
+
|
| 199 |
+
.. attribute:: reason
|
| 200 |
+
|
| 201 |
+
The reason phrase of this response, as a byte string. For example:
|
| 202 |
+
``b"OK"``, or ``b"Not Found"``.
|
| 203 |
+
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
def __post_init__(self) -> None:
|
| 207 |
+
if not (100 <= self.status_code < 200):
|
| 208 |
+
raise LocalProtocolError(
|
| 209 |
+
"InformationalResponse status_code should be in range "
|
| 210 |
+
"[100, 200), not {}".format(self.status_code)
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
# This is an unhashable type.
|
| 214 |
+
__hash__ = None # type: ignore
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
@dataclass(init=False, frozen=True)
|
| 218 |
+
class Response(_ResponseBase):
|
| 219 |
+
"""The beginning of an HTTP response.
|
| 220 |
+
|
| 221 |
+
Fields:
|
| 222 |
+
|
| 223 |
+
.. attribute:: status_code
|
| 224 |
+
|
| 225 |
+
The status code of this response, as an integer. For an
|
| 226 |
+
:class:`Response`, this is always in the range [200,
|
| 227 |
+
1000).
|
| 228 |
+
|
| 229 |
+
.. attribute:: headers
|
| 230 |
+
|
| 231 |
+
Request headers, represented as a list of (name, value) pairs. See
|
| 232 |
+
:ref:`the header normalization rules <headers-format>` for details.
|
| 233 |
+
|
| 234 |
+
.. attribute:: http_version
|
| 235 |
+
|
| 236 |
+
The HTTP protocol version, represented as a byte string like
|
| 237 |
+
``b"1.1"``. See :ref:`the HTTP version normalization rules
|
| 238 |
+
<http_version-format>` for details.
|
| 239 |
+
|
| 240 |
+
.. attribute:: reason
|
| 241 |
+
|
| 242 |
+
The reason phrase of this response, as a byte string. For example:
|
| 243 |
+
``b"OK"``, or ``b"Not Found"``.
|
| 244 |
+
|
| 245 |
+
"""
|
| 246 |
+
|
| 247 |
+
def __post_init__(self) -> None:
|
| 248 |
+
if not (200 <= self.status_code < 1000):
|
| 249 |
+
raise LocalProtocolError(
|
| 250 |
+
"Response status_code should be in range [200, 1000), not {}".format(
|
| 251 |
+
self.status_code
|
| 252 |
+
)
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
# This is an unhashable type.
|
| 256 |
+
__hash__ = None # type: ignore
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
@dataclass(init=False, frozen=True)
|
| 260 |
+
class Data(Event):
|
| 261 |
+
"""Part of an HTTP message body.
|
| 262 |
+
|
| 263 |
+
Fields:
|
| 264 |
+
|
| 265 |
+
.. attribute:: data
|
| 266 |
+
|
| 267 |
+
A :term:`bytes-like object` containing part of a message body. Or, if
|
| 268 |
+
using the ``combine=False`` argument to :meth:`Connection.send`, then
|
| 269 |
+
any object that your socket writing code knows what to do with, and for
|
| 270 |
+
which calling :func:`len` returns the number of bytes that will be
|
| 271 |
+
written -- see :ref:`sendfile` for details.
|
| 272 |
+
|
| 273 |
+
.. attribute:: chunk_start
|
| 274 |
+
|
| 275 |
+
A marker that indicates whether this data object is from the start of a
|
| 276 |
+
chunked transfer encoding chunk. This field is ignored when when a Data
|
| 277 |
+
event is provided to :meth:`Connection.send`: it is only valid on
|
| 278 |
+
events emitted from :meth:`Connection.next_event`. You probably
|
| 279 |
+
shouldn't use this attribute at all; see
|
| 280 |
+
:ref:`chunk-delimiters-are-bad` for details.
|
| 281 |
+
|
| 282 |
+
.. attribute:: chunk_end
|
| 283 |
+
|
| 284 |
+
A marker that indicates whether this data object is the last for a
|
| 285 |
+
given chunked transfer encoding chunk. This field is ignored when when
|
| 286 |
+
a Data event is provided to :meth:`Connection.send`: it is only valid
|
| 287 |
+
on events emitted from :meth:`Connection.next_event`. You probably
|
| 288 |
+
shouldn't use this attribute at all; see
|
| 289 |
+
:ref:`chunk-delimiters-are-bad` for details.
|
| 290 |
+
|
| 291 |
+
"""
|
| 292 |
+
|
| 293 |
+
__slots__ = ("data", "chunk_start", "chunk_end")
|
| 294 |
+
|
| 295 |
+
data: bytes
|
| 296 |
+
chunk_start: bool
|
| 297 |
+
chunk_end: bool
|
| 298 |
+
|
| 299 |
+
def __init__(
|
| 300 |
+
self, data: bytes, chunk_start: bool = False, chunk_end: bool = False
|
| 301 |
+
) -> None:
|
| 302 |
+
object.__setattr__(self, "data", data)
|
| 303 |
+
object.__setattr__(self, "chunk_start", chunk_start)
|
| 304 |
+
object.__setattr__(self, "chunk_end", chunk_end)
|
| 305 |
+
|
| 306 |
+
# This is an unhashable type.
|
| 307 |
+
__hash__ = None # type: ignore
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that
|
| 311 |
+
# are forbidden to be sent in a trailer, since processing them as if they were
|
| 312 |
+
# present in the header section might bypass external security filters."
|
| 313 |
+
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part
|
| 314 |
+
# Unfortunately, the list of forbidden fields is long and vague :-/
|
| 315 |
+
@dataclass(init=False, frozen=True)
|
| 316 |
+
class EndOfMessage(Event):
|
| 317 |
+
"""The end of an HTTP message.
|
| 318 |
+
|
| 319 |
+
Fields:
|
| 320 |
+
|
| 321 |
+
.. attribute:: headers
|
| 322 |
+
|
| 323 |
+
Default value: ``[]``
|
| 324 |
+
|
| 325 |
+
Any trailing headers attached to this message, represented as a list of
|
| 326 |
+
(name, value) pairs. See :ref:`the header normalization rules
|
| 327 |
+
<headers-format>` for details.
|
| 328 |
+
|
| 329 |
+
Must be empty unless ``Transfer-Encoding: chunked`` is in use.
|
| 330 |
+
|
| 331 |
+
"""
|
| 332 |
+
|
| 333 |
+
__slots__ = ("headers",)
|
| 334 |
+
|
| 335 |
+
headers: Headers
|
| 336 |
+
|
| 337 |
+
def __init__(
|
| 338 |
+
self,
|
| 339 |
+
*,
|
| 340 |
+
headers: Union[
|
| 341 |
+
Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None
|
| 342 |
+
] = None,
|
| 343 |
+
_parsed: bool = False,
|
| 344 |
+
) -> None:
|
| 345 |
+
super().__init__()
|
| 346 |
+
if headers is None:
|
| 347 |
+
headers = Headers([])
|
| 348 |
+
elif not isinstance(headers, Headers):
|
| 349 |
+
headers = normalize_and_validate(headers, _parsed=_parsed)
|
| 350 |
+
|
| 351 |
+
object.__setattr__(self, "headers", headers)
|
| 352 |
+
|
| 353 |
+
# This is an unhashable type.
|
| 354 |
+
__hash__ = None # type: ignore
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
@dataclass(frozen=True)
|
| 358 |
+
class ConnectionClosed(Event):
|
| 359 |
+
"""This event indicates that the sender has closed their outgoing
|
| 360 |
+
connection.
|
| 361 |
+
|
| 362 |
+
Note that this does not necessarily mean that they can't *receive* further
|
| 363 |
+
data, because TCP connections are composed to two one-way channels which
|
| 364 |
+
can be closed independently. See :ref:`closing` for details.
|
| 365 |
+
|
| 366 |
+
No fields.
|
| 367 |
+
"""
|
| 368 |
+
|
| 369 |
+
pass
|
venv/lib/python3.12/site-packages/h11/_headers.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union
|
| 3 |
+
|
| 4 |
+
from ._abnf import field_name, field_value
|
| 5 |
+
from ._util import bytesify, LocalProtocolError, validate
|
| 6 |
+
|
| 7 |
+
if TYPE_CHECKING:
|
| 8 |
+
from ._events import Request
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
from typing import Literal
|
| 12 |
+
except ImportError:
|
| 13 |
+
from typing_extensions import Literal # type: ignore
|
| 14 |
+
|
| 15 |
+
CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# Facts
|
| 19 |
+
# -----
|
| 20 |
+
#
|
| 21 |
+
# Headers are:
|
| 22 |
+
# keys: case-insensitive ascii
|
| 23 |
+
# values: mixture of ascii and raw bytes
|
| 24 |
+
#
|
| 25 |
+
# "Historically, HTTP has allowed field content with text in the ISO-8859-1
|
| 26 |
+
# charset [ISO-8859-1], supporting other charsets only through use of
|
| 27 |
+
# [RFC2047] encoding. In practice, most HTTP header field values use only a
|
| 28 |
+
# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD
|
| 29 |
+
# limit their field values to US-ASCII octets. A recipient SHOULD treat other
|
| 30 |
+
# octets in field content (obs-text) as opaque data."
|
| 31 |
+
# And it deprecates all non-ascii values
|
| 32 |
+
#
|
| 33 |
+
# Leading/trailing whitespace in header names is forbidden
|
| 34 |
+
#
|
| 35 |
+
# Values get leading/trailing whitespace stripped
|
| 36 |
+
#
|
| 37 |
+
# Content-Disposition actually needs to contain unicode semantically; to
|
| 38 |
+
# accomplish this it has a terrifically weird way of encoding the filename
|
| 39 |
+
# itself as ascii (and even this still has lots of cross-browser
|
| 40 |
+
# incompatibilities)
|
| 41 |
+
#
|
| 42 |
+
# Order is important:
|
| 43 |
+
# "a proxy MUST NOT change the order of these field values when forwarding a
|
| 44 |
+
# message"
|
| 45 |
+
# (and there are several headers where the order indicates a preference)
|
| 46 |
+
#
|
| 47 |
+
# Multiple occurences of the same header:
|
| 48 |
+
# "A sender MUST NOT generate multiple header fields with the same field name
|
| 49 |
+
# in a message unless either the entire field value for that header field is
|
| 50 |
+
# defined as a comma-separated list [or the header is Set-Cookie which gets a
|
| 51 |
+
# special exception]" - RFC 7230. (cookies are in RFC 6265)
|
| 52 |
+
#
|
| 53 |
+
# So every header aside from Set-Cookie can be merged by b", ".join if it
|
| 54 |
+
# occurs repeatedly. But, of course, they can't necessarily be split by
|
| 55 |
+
# .split(b","), because quoting.
|
| 56 |
+
#
|
| 57 |
+
# Given all this mess (case insensitive, duplicates allowed, order is
|
| 58 |
+
# important, ...), there doesn't appear to be any standard way to handle
|
| 59 |
+
# headers in Python -- they're almost like dicts, but... actually just
|
| 60 |
+
# aren't. For now we punt and just use a super simple representation: headers
|
| 61 |
+
# are a list of pairs
|
| 62 |
+
#
|
| 63 |
+
# [(name1, value1), (name2, value2), ...]
|
| 64 |
+
#
|
| 65 |
+
# where all entries are bytestrings, names are lowercase and have no
|
| 66 |
+
# leading/trailing whitespace, and values are bytestrings with no
|
| 67 |
+
# leading/trailing whitespace. Searching and updating are done via naive O(n)
|
| 68 |
+
# methods.
|
| 69 |
+
#
|
| 70 |
+
# Maybe a dict-of-lists would be better?
|
| 71 |
+
|
| 72 |
+
_content_length_re = re.compile(rb"[0-9]+")
|
| 73 |
+
_field_name_re = re.compile(field_name.encode("ascii"))
|
| 74 |
+
_field_value_re = re.compile(field_value.encode("ascii"))
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class Headers(Sequence[Tuple[bytes, bytes]]):
|
| 78 |
+
"""
|
| 79 |
+
A list-like interface that allows iterating over headers as byte-pairs
|
| 80 |
+
of (lowercased-name, value).
|
| 81 |
+
|
| 82 |
+
Internally we actually store the representation as three-tuples,
|
| 83 |
+
including both the raw original casing, in order to preserve casing
|
| 84 |
+
over-the-wire, and the lowercased name, for case-insensitive comparisions.
|
| 85 |
+
|
| 86 |
+
r = Request(
|
| 87 |
+
method="GET",
|
| 88 |
+
target="/",
|
| 89 |
+
headers=[("Host", "example.org"), ("Connection", "keep-alive")],
|
| 90 |
+
http_version="1.1",
|
| 91 |
+
)
|
| 92 |
+
assert r.headers == [
|
| 93 |
+
(b"host", b"example.org"),
|
| 94 |
+
(b"connection", b"keep-alive")
|
| 95 |
+
]
|
| 96 |
+
assert r.headers.raw_items() == [
|
| 97 |
+
(b"Host", b"example.org"),
|
| 98 |
+
(b"Connection", b"keep-alive")
|
| 99 |
+
]
|
| 100 |
+
"""
|
| 101 |
+
|
| 102 |
+
__slots__ = "_full_items"
|
| 103 |
+
|
| 104 |
+
def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None:
|
| 105 |
+
self._full_items = full_items
|
| 106 |
+
|
| 107 |
+
def __bool__(self) -> bool:
|
| 108 |
+
return bool(self._full_items)
|
| 109 |
+
|
| 110 |
+
def __eq__(self, other: object) -> bool:
|
| 111 |
+
return list(self) == list(other) # type: ignore
|
| 112 |
+
|
| 113 |
+
def __len__(self) -> int:
|
| 114 |
+
return len(self._full_items)
|
| 115 |
+
|
| 116 |
+
def __repr__(self) -> str:
|
| 117 |
+
return "<Headers(%s)>" % repr(list(self))
|
| 118 |
+
|
| 119 |
+
def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override]
|
| 120 |
+
_, name, value = self._full_items[idx]
|
| 121 |
+
return (name, value)
|
| 122 |
+
|
| 123 |
+
def raw_items(self) -> List[Tuple[bytes, bytes]]:
|
| 124 |
+
return [(raw_name, value) for raw_name, _, value in self._full_items]
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
HeaderTypes = Union[
|
| 128 |
+
List[Tuple[bytes, bytes]],
|
| 129 |
+
List[Tuple[bytes, str]],
|
| 130 |
+
List[Tuple[str, bytes]],
|
| 131 |
+
List[Tuple[str, str]],
|
| 132 |
+
]
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
@overload
|
| 136 |
+
def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers:
|
| 137 |
+
...
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@overload
|
| 141 |
+
def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers:
|
| 142 |
+
...
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
@overload
|
| 146 |
+
def normalize_and_validate(
|
| 147 |
+
headers: Union[Headers, HeaderTypes], _parsed: bool = False
|
| 148 |
+
) -> Headers:
|
| 149 |
+
...
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def normalize_and_validate(
|
| 153 |
+
headers: Union[Headers, HeaderTypes], _parsed: bool = False
|
| 154 |
+
) -> Headers:
|
| 155 |
+
new_headers = []
|
| 156 |
+
seen_content_length = None
|
| 157 |
+
saw_transfer_encoding = False
|
| 158 |
+
for name, value in headers:
|
| 159 |
+
# For headers coming out of the parser, we can safely skip some steps,
|
| 160 |
+
# because it always returns bytes and has already run these regexes
|
| 161 |
+
# over the data:
|
| 162 |
+
if not _parsed:
|
| 163 |
+
name = bytesify(name)
|
| 164 |
+
value = bytesify(value)
|
| 165 |
+
validate(_field_name_re, name, "Illegal header name {!r}", name)
|
| 166 |
+
validate(_field_value_re, value, "Illegal header value {!r}", value)
|
| 167 |
+
assert isinstance(name, bytes)
|
| 168 |
+
assert isinstance(value, bytes)
|
| 169 |
+
|
| 170 |
+
raw_name = name
|
| 171 |
+
name = name.lower()
|
| 172 |
+
if name == b"content-length":
|
| 173 |
+
lengths = {length.strip() for length in value.split(b",")}
|
| 174 |
+
if len(lengths) != 1:
|
| 175 |
+
raise LocalProtocolError("conflicting Content-Length headers")
|
| 176 |
+
value = lengths.pop()
|
| 177 |
+
validate(_content_length_re, value, "bad Content-Length")
|
| 178 |
+
if len(value) > CONTENT_LENGTH_MAX_DIGITS:
|
| 179 |
+
raise LocalProtocolError("bad Content-Length")
|
| 180 |
+
if seen_content_length is None:
|
| 181 |
+
seen_content_length = value
|
| 182 |
+
new_headers.append((raw_name, name, value))
|
| 183 |
+
elif seen_content_length != value:
|
| 184 |
+
raise LocalProtocolError("conflicting Content-Length headers")
|
| 185 |
+
elif name == b"transfer-encoding":
|
| 186 |
+
# "A server that receives a request message with a transfer coding
|
| 187 |
+
# it does not understand SHOULD respond with 501 (Not
|
| 188 |
+
# Implemented)."
|
| 189 |
+
# https://tools.ietf.org/html/rfc7230#section-3.3.1
|
| 190 |
+
if saw_transfer_encoding:
|
| 191 |
+
raise LocalProtocolError(
|
| 192 |
+
"multiple Transfer-Encoding headers", error_status_hint=501
|
| 193 |
+
)
|
| 194 |
+
# "All transfer-coding names are case-insensitive"
|
| 195 |
+
# -- https://tools.ietf.org/html/rfc7230#section-4
|
| 196 |
+
value = value.lower()
|
| 197 |
+
if value != b"chunked":
|
| 198 |
+
raise LocalProtocolError(
|
| 199 |
+
"Only Transfer-Encoding: chunked is supported",
|
| 200 |
+
error_status_hint=501,
|
| 201 |
+
)
|
| 202 |
+
saw_transfer_encoding = True
|
| 203 |
+
new_headers.append((raw_name, name, value))
|
| 204 |
+
else:
|
| 205 |
+
new_headers.append((raw_name, name, value))
|
| 206 |
+
return Headers(new_headers)
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def get_comma_header(headers: Headers, name: bytes) -> List[bytes]:
|
| 210 |
+
# Should only be used for headers whose value is a list of
|
| 211 |
+
# comma-separated, case-insensitive values.
|
| 212 |
+
#
|
| 213 |
+
# The header name `name` is expected to be lower-case bytes.
|
| 214 |
+
#
|
| 215 |
+
# Connection: meets these criteria (including cast insensitivity).
|
| 216 |
+
#
|
| 217 |
+
# Content-Length: technically is just a single value (1*DIGIT), but the
|
| 218 |
+
# standard makes reference to implementations that do multiple values, and
|
| 219 |
+
# using this doesn't hurt. Ditto, case insensitivity doesn't things either
|
| 220 |
+
# way.
|
| 221 |
+
#
|
| 222 |
+
# Transfer-Encoding: is more complex (allows for quoted strings), so
|
| 223 |
+
# splitting on , is actually wrong. For example, this is legal:
|
| 224 |
+
#
|
| 225 |
+
# Transfer-Encoding: foo; options="1,2", chunked
|
| 226 |
+
#
|
| 227 |
+
# and should be parsed as
|
| 228 |
+
#
|
| 229 |
+
# foo; options="1,2"
|
| 230 |
+
# chunked
|
| 231 |
+
#
|
| 232 |
+
# but this naive function will parse it as
|
| 233 |
+
#
|
| 234 |
+
# foo; options="1
|
| 235 |
+
# 2"
|
| 236 |
+
# chunked
|
| 237 |
+
#
|
| 238 |
+
# However, this is okay because the only thing we are going to do with
|
| 239 |
+
# any Transfer-Encoding is reject ones that aren't just "chunked", so
|
| 240 |
+
# both of these will be treated the same anyway.
|
| 241 |
+
#
|
| 242 |
+
# Expect: the only legal value is the literal string
|
| 243 |
+
# "100-continue". Splitting on commas is harmless. Case insensitive.
|
| 244 |
+
#
|
| 245 |
+
out: List[bytes] = []
|
| 246 |
+
for _, found_name, found_raw_value in headers._full_items:
|
| 247 |
+
if found_name == name:
|
| 248 |
+
found_raw_value = found_raw_value.lower()
|
| 249 |
+
for found_split_value in found_raw_value.split(b","):
|
| 250 |
+
found_split_value = found_split_value.strip()
|
| 251 |
+
if found_split_value:
|
| 252 |
+
out.append(found_split_value)
|
| 253 |
+
return out
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers:
|
| 257 |
+
# The header name `name` is expected to be lower-case bytes.
|
| 258 |
+
#
|
| 259 |
+
# Note that when we store the header we use title casing for the header
|
| 260 |
+
# names, in order to match the conventional HTTP header style.
|
| 261 |
+
#
|
| 262 |
+
# Simply calling `.title()` is a blunt approach, but it's correct
|
| 263 |
+
# here given the cases where we're using `set_comma_header`...
|
| 264 |
+
#
|
| 265 |
+
# Connection, Content-Length, Transfer-Encoding.
|
| 266 |
+
new_headers: List[Tuple[bytes, bytes]] = []
|
| 267 |
+
for found_raw_name, found_name, found_raw_value in headers._full_items:
|
| 268 |
+
if found_name != name:
|
| 269 |
+
new_headers.append((found_raw_name, found_raw_value))
|
| 270 |
+
for new_value in new_values:
|
| 271 |
+
new_headers.append((name.title(), new_value))
|
| 272 |
+
return normalize_and_validate(new_headers)
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def has_expect_100_continue(request: "Request") -> bool:
|
| 276 |
+
# https://tools.ietf.org/html/rfc7231#section-5.1.1
|
| 277 |
+
# "A server that receives a 100-continue expectation in an HTTP/1.0 request
|
| 278 |
+
# MUST ignore that expectation."
|
| 279 |
+
if request.http_version < b"1.1":
|
| 280 |
+
return False
|
| 281 |
+
expect = get_comma_header(request.headers, b"expect")
|
| 282 |
+
return b"100-continue" in expect
|
venv/lib/python3.12/site-packages/h11/_readers.py
ADDED
|
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Code to read HTTP data
|
| 2 |
+
#
|
| 3 |
+
# Strategy: each reader is a callable which takes a ReceiveBuffer object, and
|
| 4 |
+
# either:
|
| 5 |
+
# 1) consumes some of it and returns an Event
|
| 6 |
+
# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate()
|
| 7 |
+
# and it might raise a LocalProtocolError, so simpler just to always use
|
| 8 |
+
# this)
|
| 9 |
+
# 3) returns None, meaning "I need more data"
|
| 10 |
+
#
|
| 11 |
+
# If they have a .read_eof attribute, then this will be called if an EOF is
|
| 12 |
+
# received -- but this is optional. Either way, the actual ConnectionClosed
|
| 13 |
+
# event will be generated afterwards.
|
| 14 |
+
#
|
| 15 |
+
# READERS is a dict describing how to pick a reader. It maps states to either:
|
| 16 |
+
# - a reader
|
| 17 |
+
# - or, for body readers, a dict of per-framing reader factories
|
| 18 |
+
|
| 19 |
+
import re
|
| 20 |
+
from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union
|
| 21 |
+
|
| 22 |
+
from ._abnf import chunk_header, header_field, request_line, status_line
|
| 23 |
+
from ._events import Data, EndOfMessage, InformationalResponse, Request, Response
|
| 24 |
+
from ._receivebuffer import ReceiveBuffer
|
| 25 |
+
from ._state import (
|
| 26 |
+
CLIENT,
|
| 27 |
+
CLOSED,
|
| 28 |
+
DONE,
|
| 29 |
+
IDLE,
|
| 30 |
+
MUST_CLOSE,
|
| 31 |
+
SEND_BODY,
|
| 32 |
+
SEND_RESPONSE,
|
| 33 |
+
SERVER,
|
| 34 |
+
)
|
| 35 |
+
from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate
|
| 36 |
+
|
| 37 |
+
__all__ = ["READERS"]
|
| 38 |
+
|
| 39 |
+
header_field_re = re.compile(header_field.encode("ascii"))
|
| 40 |
+
obs_fold_re = re.compile(rb"[ \t]+")
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]:
|
| 44 |
+
it = iter(lines)
|
| 45 |
+
last: Optional[bytes] = None
|
| 46 |
+
for line in it:
|
| 47 |
+
match = obs_fold_re.match(line)
|
| 48 |
+
if match:
|
| 49 |
+
if last is None:
|
| 50 |
+
raise LocalProtocolError("continuation line at start of headers")
|
| 51 |
+
if not isinstance(last, bytearray):
|
| 52 |
+
# Cast to a mutable type, avoiding copy on append to ensure O(n) time
|
| 53 |
+
last = bytearray(last)
|
| 54 |
+
last += b" "
|
| 55 |
+
last += line[match.end() :]
|
| 56 |
+
else:
|
| 57 |
+
if last is not None:
|
| 58 |
+
yield last
|
| 59 |
+
last = line
|
| 60 |
+
if last is not None:
|
| 61 |
+
yield last
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def _decode_header_lines(
|
| 65 |
+
lines: Iterable[bytes],
|
| 66 |
+
) -> Iterable[Tuple[bytes, bytes]]:
|
| 67 |
+
for line in _obsolete_line_fold(lines):
|
| 68 |
+
matches = validate(header_field_re, line, "illegal header line: {!r}", line)
|
| 69 |
+
yield (matches["field_name"], matches["field_value"])
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
request_line_re = re.compile(request_line.encode("ascii"))
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]:
|
| 76 |
+
lines = buf.maybe_extract_lines()
|
| 77 |
+
if lines is None:
|
| 78 |
+
if buf.is_next_line_obviously_invalid_request_line():
|
| 79 |
+
raise LocalProtocolError("illegal request line")
|
| 80 |
+
return None
|
| 81 |
+
if not lines:
|
| 82 |
+
raise LocalProtocolError("no request line received")
|
| 83 |
+
matches = validate(
|
| 84 |
+
request_line_re, lines[0], "illegal request line: {!r}", lines[0]
|
| 85 |
+
)
|
| 86 |
+
return Request(
|
| 87 |
+
headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
status_line_re = re.compile(status_line.encode("ascii"))
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def maybe_read_from_SEND_RESPONSE_server(
|
| 95 |
+
buf: ReceiveBuffer,
|
| 96 |
+
) -> Union[InformationalResponse, Response, None]:
|
| 97 |
+
lines = buf.maybe_extract_lines()
|
| 98 |
+
if lines is None:
|
| 99 |
+
if buf.is_next_line_obviously_invalid_request_line():
|
| 100 |
+
raise LocalProtocolError("illegal request line")
|
| 101 |
+
return None
|
| 102 |
+
if not lines:
|
| 103 |
+
raise LocalProtocolError("no response line received")
|
| 104 |
+
matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0])
|
| 105 |
+
http_version = (
|
| 106 |
+
b"1.1" if matches["http_version"] is None else matches["http_version"]
|
| 107 |
+
)
|
| 108 |
+
reason = b"" if matches["reason"] is None else matches["reason"]
|
| 109 |
+
status_code = int(matches["status_code"])
|
| 110 |
+
class_: Union[Type[InformationalResponse], Type[Response]] = (
|
| 111 |
+
InformationalResponse if status_code < 200 else Response
|
| 112 |
+
)
|
| 113 |
+
return class_(
|
| 114 |
+
headers=list(_decode_header_lines(lines[1:])),
|
| 115 |
+
_parsed=True,
|
| 116 |
+
status_code=status_code,
|
| 117 |
+
reason=reason,
|
| 118 |
+
http_version=http_version,
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class ContentLengthReader:
|
| 123 |
+
def __init__(self, length: int) -> None:
|
| 124 |
+
self._length = length
|
| 125 |
+
self._remaining = length
|
| 126 |
+
|
| 127 |
+
def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
|
| 128 |
+
if self._remaining == 0:
|
| 129 |
+
return EndOfMessage()
|
| 130 |
+
data = buf.maybe_extract_at_most(self._remaining)
|
| 131 |
+
if data is None:
|
| 132 |
+
return None
|
| 133 |
+
self._remaining -= len(data)
|
| 134 |
+
return Data(data=data)
|
| 135 |
+
|
| 136 |
+
def read_eof(self) -> NoReturn:
|
| 137 |
+
raise RemoteProtocolError(
|
| 138 |
+
"peer closed connection without sending complete message body "
|
| 139 |
+
"(received {} bytes, expected {})".format(
|
| 140 |
+
self._length - self._remaining, self._length
|
| 141 |
+
)
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
chunk_header_re = re.compile(chunk_header.encode("ascii"))
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
class ChunkedReader:
|
| 149 |
+
def __init__(self) -> None:
|
| 150 |
+
self._bytes_in_chunk = 0
|
| 151 |
+
# After reading a chunk, we have to throw away the trailing \r\n.
|
| 152 |
+
# This tracks the bytes that we need to match and throw away.
|
| 153 |
+
self._bytes_to_discard = b""
|
| 154 |
+
self._reading_trailer = False
|
| 155 |
+
|
| 156 |
+
def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
|
| 157 |
+
if self._reading_trailer:
|
| 158 |
+
lines = buf.maybe_extract_lines()
|
| 159 |
+
if lines is None:
|
| 160 |
+
return None
|
| 161 |
+
return EndOfMessage(headers=list(_decode_header_lines(lines)))
|
| 162 |
+
if self._bytes_to_discard:
|
| 163 |
+
data = buf.maybe_extract_at_most(len(self._bytes_to_discard))
|
| 164 |
+
if data is None:
|
| 165 |
+
return None
|
| 166 |
+
if data != self._bytes_to_discard[: len(data)]:
|
| 167 |
+
raise LocalProtocolError(
|
| 168 |
+
f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})"
|
| 169 |
+
)
|
| 170 |
+
self._bytes_to_discard = self._bytes_to_discard[len(data) :]
|
| 171 |
+
if self._bytes_to_discard:
|
| 172 |
+
return None
|
| 173 |
+
# else, fall through and read some more
|
| 174 |
+
assert self._bytes_to_discard == b""
|
| 175 |
+
if self._bytes_in_chunk == 0:
|
| 176 |
+
# We need to refill our chunk count
|
| 177 |
+
chunk_header = buf.maybe_extract_next_line()
|
| 178 |
+
if chunk_header is None:
|
| 179 |
+
return None
|
| 180 |
+
matches = validate(
|
| 181 |
+
chunk_header_re,
|
| 182 |
+
chunk_header,
|
| 183 |
+
"illegal chunk header: {!r}",
|
| 184 |
+
chunk_header,
|
| 185 |
+
)
|
| 186 |
+
# XX FIXME: we discard chunk extensions. Does anyone care?
|
| 187 |
+
self._bytes_in_chunk = int(matches["chunk_size"], base=16)
|
| 188 |
+
if self._bytes_in_chunk == 0:
|
| 189 |
+
self._reading_trailer = True
|
| 190 |
+
return self(buf)
|
| 191 |
+
chunk_start = True
|
| 192 |
+
else:
|
| 193 |
+
chunk_start = False
|
| 194 |
+
assert self._bytes_in_chunk > 0
|
| 195 |
+
data = buf.maybe_extract_at_most(self._bytes_in_chunk)
|
| 196 |
+
if data is None:
|
| 197 |
+
return None
|
| 198 |
+
self._bytes_in_chunk -= len(data)
|
| 199 |
+
if self._bytes_in_chunk == 0:
|
| 200 |
+
self._bytes_to_discard = b"\r\n"
|
| 201 |
+
chunk_end = True
|
| 202 |
+
else:
|
| 203 |
+
chunk_end = False
|
| 204 |
+
return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end)
|
| 205 |
+
|
| 206 |
+
def read_eof(self) -> NoReturn:
|
| 207 |
+
raise RemoteProtocolError(
|
| 208 |
+
"peer closed connection without sending complete message body "
|
| 209 |
+
"(incomplete chunked read)"
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class Http10Reader:
|
| 214 |
+
def __call__(self, buf: ReceiveBuffer) -> Optional[Data]:
|
| 215 |
+
data = buf.maybe_extract_at_most(999999999)
|
| 216 |
+
if data is None:
|
| 217 |
+
return None
|
| 218 |
+
return Data(data=data)
|
| 219 |
+
|
| 220 |
+
def read_eof(self) -> EndOfMessage:
|
| 221 |
+
return EndOfMessage()
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def expect_nothing(buf: ReceiveBuffer) -> None:
|
| 225 |
+
if buf:
|
| 226 |
+
raise LocalProtocolError("Got data when expecting EOF")
|
| 227 |
+
return None
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
ReadersType = Dict[
|
| 231 |
+
Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]],
|
| 232 |
+
Union[Callable[..., Any], Dict[str, Callable[..., Any]]],
|
| 233 |
+
]
|
| 234 |
+
|
| 235 |
+
READERS: ReadersType = {
|
| 236 |
+
(CLIENT, IDLE): maybe_read_from_IDLE_client,
|
| 237 |
+
(SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server,
|
| 238 |
+
(SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server,
|
| 239 |
+
(CLIENT, DONE): expect_nothing,
|
| 240 |
+
(CLIENT, MUST_CLOSE): expect_nothing,
|
| 241 |
+
(CLIENT, CLOSED): expect_nothing,
|
| 242 |
+
(SERVER, DONE): expect_nothing,
|
| 243 |
+
(SERVER, MUST_CLOSE): expect_nothing,
|
| 244 |
+
(SERVER, CLOSED): expect_nothing,
|
| 245 |
+
SEND_BODY: {
|
| 246 |
+
"chunked": ChunkedReader,
|
| 247 |
+
"content-length": ContentLengthReader,
|
| 248 |
+
"http/1.0": Http10Reader,
|
| 249 |
+
},
|
| 250 |
+
}
|
venv/lib/python3.12/site-packages/h11/_receivebuffer.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
import sys
|
| 3 |
+
from typing import List, Optional, Union
|
| 4 |
+
|
| 5 |
+
__all__ = ["ReceiveBuffer"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
# Operations we want to support:
|
| 9 |
+
# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable),
|
| 10 |
+
# or wait until there is one
|
| 11 |
+
# - read at-most-N bytes
|
| 12 |
+
# Goals:
|
| 13 |
+
# - on average, do this fast
|
| 14 |
+
# - worst case, do this in O(n) where n is the number of bytes processed
|
| 15 |
+
# Plan:
|
| 16 |
+
# - store bytearray, offset, how far we've searched for a separator token
|
| 17 |
+
# - use the how-far-we've-searched data to avoid rescanning
|
| 18 |
+
# - while doing a stream of uninterrupted processing, advance offset instead
|
| 19 |
+
# of constantly copying
|
| 20 |
+
# WARNING:
|
| 21 |
+
# - I haven't benchmarked or profiled any of this yet.
|
| 22 |
+
#
|
| 23 |
+
# Note that starting in Python 3.4, deleting the initial n bytes from a
|
| 24 |
+
# bytearray is amortized O(n), thanks to some excellent work by Antoine
|
| 25 |
+
# Martin:
|
| 26 |
+
#
|
| 27 |
+
# https://bugs.python.org/issue19087
|
| 28 |
+
#
|
| 29 |
+
# This means that if we only supported 3.4+, we could get rid of the code here
|
| 30 |
+
# involving self._start and self.compress, because it's doing exactly the same
|
| 31 |
+
# thing that bytearray now does internally.
|
| 32 |
+
#
|
| 33 |
+
# BUT unfortunately, we still support 2.7, and reading short segments out of a
|
| 34 |
+
# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually
|
| 35 |
+
# delete this code. Yet:
|
| 36 |
+
#
|
| 37 |
+
# https://pythonclock.org/
|
| 38 |
+
#
|
| 39 |
+
# (Two things to double-check first though: make sure PyPy also has the
|
| 40 |
+
# optimization, and benchmark to make sure it's a win, since we do have a
|
| 41 |
+
# slightly clever thing where we delay calling compress() until we've
|
| 42 |
+
# processed a whole event, which could in theory be slightly more efficient
|
| 43 |
+
# than the internal bytearray support.)
|
| 44 |
+
blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class ReceiveBuffer:
|
| 48 |
+
def __init__(self) -> None:
|
| 49 |
+
self._data = bytearray()
|
| 50 |
+
self._next_line_search = 0
|
| 51 |
+
self._multiple_lines_search = 0
|
| 52 |
+
|
| 53 |
+
def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer":
|
| 54 |
+
self._data += byteslike
|
| 55 |
+
return self
|
| 56 |
+
|
| 57 |
+
def __bool__(self) -> bool:
|
| 58 |
+
return bool(len(self))
|
| 59 |
+
|
| 60 |
+
def __len__(self) -> int:
|
| 61 |
+
return len(self._data)
|
| 62 |
+
|
| 63 |
+
# for @property unprocessed_data
|
| 64 |
+
def __bytes__(self) -> bytes:
|
| 65 |
+
return bytes(self._data)
|
| 66 |
+
|
| 67 |
+
def _extract(self, count: int) -> bytearray:
|
| 68 |
+
# extracting an initial slice of the data buffer and return it
|
| 69 |
+
out = self._data[:count]
|
| 70 |
+
del self._data[:count]
|
| 71 |
+
|
| 72 |
+
self._next_line_search = 0
|
| 73 |
+
self._multiple_lines_search = 0
|
| 74 |
+
|
| 75 |
+
return out
|
| 76 |
+
|
| 77 |
+
def maybe_extract_at_most(self, count: int) -> Optional[bytearray]:
|
| 78 |
+
"""
|
| 79 |
+
Extract a fixed number of bytes from the buffer.
|
| 80 |
+
"""
|
| 81 |
+
out = self._data[:count]
|
| 82 |
+
if not out:
|
| 83 |
+
return None
|
| 84 |
+
|
| 85 |
+
return self._extract(count)
|
| 86 |
+
|
| 87 |
+
def maybe_extract_next_line(self) -> Optional[bytearray]:
|
| 88 |
+
"""
|
| 89 |
+
Extract the first line, if it is completed in the buffer.
|
| 90 |
+
"""
|
| 91 |
+
# Only search in buffer space that we've not already looked at.
|
| 92 |
+
search_start_index = max(0, self._next_line_search - 1)
|
| 93 |
+
partial_idx = self._data.find(b"\r\n", search_start_index)
|
| 94 |
+
|
| 95 |
+
if partial_idx == -1:
|
| 96 |
+
self._next_line_search = len(self._data)
|
| 97 |
+
return None
|
| 98 |
+
|
| 99 |
+
# + 2 is to compensate len(b"\r\n")
|
| 100 |
+
idx = partial_idx + 2
|
| 101 |
+
|
| 102 |
+
return self._extract(idx)
|
| 103 |
+
|
| 104 |
+
def maybe_extract_lines(self) -> Optional[List[bytearray]]:
|
| 105 |
+
"""
|
| 106 |
+
Extract everything up to the first blank line, and return a list of lines.
|
| 107 |
+
"""
|
| 108 |
+
# Handle the case where we have an immediate empty line.
|
| 109 |
+
if self._data[:1] == b"\n":
|
| 110 |
+
self._extract(1)
|
| 111 |
+
return []
|
| 112 |
+
|
| 113 |
+
if self._data[:2] == b"\r\n":
|
| 114 |
+
self._extract(2)
|
| 115 |
+
return []
|
| 116 |
+
|
| 117 |
+
# Only search in buffer space that we've not already looked at.
|
| 118 |
+
match = blank_line_regex.search(self._data, self._multiple_lines_search)
|
| 119 |
+
if match is None:
|
| 120 |
+
self._multiple_lines_search = max(0, len(self._data) - 2)
|
| 121 |
+
return None
|
| 122 |
+
|
| 123 |
+
# Truncate the buffer and return it.
|
| 124 |
+
idx = match.span(0)[-1]
|
| 125 |
+
out = self._extract(idx)
|
| 126 |
+
lines = out.split(b"\n")
|
| 127 |
+
|
| 128 |
+
for line in lines:
|
| 129 |
+
if line.endswith(b"\r"):
|
| 130 |
+
del line[-1]
|
| 131 |
+
|
| 132 |
+
assert lines[-2] == lines[-1] == b""
|
| 133 |
+
|
| 134 |
+
del lines[-2:]
|
| 135 |
+
|
| 136 |
+
return lines
|
| 137 |
+
|
| 138 |
+
# In theory we should wait until `\r\n` before starting to validate
|
| 139 |
+
# incoming data. However it's interesting to detect (very) invalid data
|
| 140 |
+
# early given they might not even contain `\r\n` at all (hence only
|
| 141 |
+
# timeout will get rid of them).
|
| 142 |
+
# This is not a 100% effective detection but more of a cheap sanity check
|
| 143 |
+
# allowing for early abort in some useful cases.
|
| 144 |
+
# This is especially interesting when peer is messing up with HTTPS and
|
| 145 |
+
# sent us a TLS stream where we were expecting plain HTTP given all
|
| 146 |
+
# versions of TLS so far start handshake with a 0x16 message type code.
|
| 147 |
+
def is_next_line_obviously_invalid_request_line(self) -> bool:
|
| 148 |
+
try:
|
| 149 |
+
# HTTP header line must not contain non-printable characters
|
| 150 |
+
# and should not start with a space
|
| 151 |
+
return self._data[0] < 0x21
|
| 152 |
+
except IndexError:
|
| 153 |
+
return False
|
venv/lib/python3.12/site-packages/h11/_state.py
ADDED
|
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
################################################################
|
| 2 |
+
# The core state machine
|
| 3 |
+
################################################################
|
| 4 |
+
#
|
| 5 |
+
# Rule 1: everything that affects the state machine and state transitions must
|
| 6 |
+
# live here in this file. As much as possible goes into the table-based
|
| 7 |
+
# representation, but for the bits that don't quite fit, the actual code and
|
| 8 |
+
# state must nonetheless live here.
|
| 9 |
+
#
|
| 10 |
+
# Rule 2: this file does not know about what role we're playing; it only knows
|
| 11 |
+
# about HTTP request/response cycles in the abstract. This ensures that we
|
| 12 |
+
# don't cheat and apply different rules to local and remote parties.
|
| 13 |
+
#
|
| 14 |
+
#
|
| 15 |
+
# Theory of operation
|
| 16 |
+
# ===================
|
| 17 |
+
#
|
| 18 |
+
# Possibly the simplest way to think about this is that we actually have 5
|
| 19 |
+
# different state machines here. Yes, 5. These are:
|
| 20 |
+
#
|
| 21 |
+
# 1) The client state, with its complicated automaton (see the docs)
|
| 22 |
+
# 2) The server state, with its complicated automaton (see the docs)
|
| 23 |
+
# 3) The keep-alive state, with possible states {True, False}
|
| 24 |
+
# 4) The SWITCH_CONNECT state, with possible states {False, True}
|
| 25 |
+
# 5) The SWITCH_UPGRADE state, with possible states {False, True}
|
| 26 |
+
#
|
| 27 |
+
# For (3)-(5), the first state listed is the initial state.
|
| 28 |
+
#
|
| 29 |
+
# (1)-(3) are stored explicitly in member variables. The last
|
| 30 |
+
# two are stored implicitly in the pending_switch_proposals set as:
|
| 31 |
+
# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals)
|
| 32 |
+
# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals)
|
| 33 |
+
#
|
| 34 |
+
# And each of these machines has two different kinds of transitions:
|
| 35 |
+
#
|
| 36 |
+
# a) Event-triggered
|
| 37 |
+
# b) State-triggered
|
| 38 |
+
#
|
| 39 |
+
# Event triggered is the obvious thing that you'd think it is: some event
|
| 40 |
+
# happens, and if it's the right event at the right time then a transition
|
| 41 |
+
# happens. But there are somewhat complicated rules for which machines can
|
| 42 |
+
# "see" which events. (As a rule of thumb, if a machine "sees" an event, this
|
| 43 |
+
# means two things: the event can affect the machine, and if the machine is
|
| 44 |
+
# not in a state where it expects that event then it's an error.) These rules
|
| 45 |
+
# are:
|
| 46 |
+
#
|
| 47 |
+
# 1) The client machine sees all h11.events objects emitted by the client.
|
| 48 |
+
#
|
| 49 |
+
# 2) The server machine sees all h11.events objects emitted by the server.
|
| 50 |
+
#
|
| 51 |
+
# It also sees the client's Request event.
|
| 52 |
+
#
|
| 53 |
+
# And sometimes, server events are annotated with a _SWITCH_* event. For
|
| 54 |
+
# example, we can have a (Response, _SWITCH_CONNECT) event, which is
|
| 55 |
+
# different from a regular Response event.
|
| 56 |
+
#
|
| 57 |
+
# 3) The keep-alive machine sees the process_keep_alive_disabled() event
|
| 58 |
+
# (which is derived from Request/Response events), and this event
|
| 59 |
+
# transitions it from True -> False, or from False -> False. There's no way
|
| 60 |
+
# to transition back.
|
| 61 |
+
#
|
| 62 |
+
# 4&5) The _SWITCH_* machines transition from False->True when we get a
|
| 63 |
+
# Request that proposes the relevant type of switch (via
|
| 64 |
+
# process_client_switch_proposals), and they go from True->False when we
|
| 65 |
+
# get a Response that has no _SWITCH_* annotation.
|
| 66 |
+
#
|
| 67 |
+
# So that's event-triggered transitions.
|
| 68 |
+
#
|
| 69 |
+
# State-triggered transitions are less standard. What they do here is couple
|
| 70 |
+
# the machines together. The way this works is, when certain *joint*
|
| 71 |
+
# configurations of states are achieved, then we automatically transition to a
|
| 72 |
+
# new *joint* state. So, for example, if we're ever in a joint state with
|
| 73 |
+
#
|
| 74 |
+
# client: DONE
|
| 75 |
+
# keep-alive: False
|
| 76 |
+
#
|
| 77 |
+
# then the client state immediately transitions to:
|
| 78 |
+
#
|
| 79 |
+
# client: MUST_CLOSE
|
| 80 |
+
#
|
| 81 |
+
# This is fundamentally different from an event-based transition, because it
|
| 82 |
+
# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state
|
| 83 |
+
# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive
|
| 84 |
+
# transitioned True -> False. Either way, once this precondition is satisfied,
|
| 85 |
+
# this transition is immediately triggered.
|
| 86 |
+
#
|
| 87 |
+
# What if two conflicting state-based transitions get enabled at the same
|
| 88 |
+
# time? In practice there's only one case where this arises (client DONE ->
|
| 89 |
+
# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by
|
| 90 |
+
# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition.
|
| 91 |
+
#
|
| 92 |
+
# Implementation
|
| 93 |
+
# --------------
|
| 94 |
+
#
|
| 95 |
+
# The event-triggered transitions for the server and client machines are all
|
| 96 |
+
# stored explicitly in a table. Ditto for the state-triggered transitions that
|
| 97 |
+
# involve just the server and client state.
|
| 98 |
+
#
|
| 99 |
+
# The transitions for the other machines, and the state-triggered transitions
|
| 100 |
+
# that involve the other machines, are written out as explicit Python code.
|
| 101 |
+
#
|
| 102 |
+
# It'd be nice if there were some cleaner way to do all this. This isn't
|
| 103 |
+
# *too* terrible, but I feel like it could probably be better.
|
| 104 |
+
#
|
| 105 |
+
# WARNING
|
| 106 |
+
# -------
|
| 107 |
+
#
|
| 108 |
+
# The script that generates the state machine diagrams for the docs knows how
|
| 109 |
+
# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS
|
| 110 |
+
# tables. But it can't automatically read the transitions that are written
|
| 111 |
+
# directly in Python code. So if you touch those, you need to also update the
|
| 112 |
+
# script to keep it in sync!
|
| 113 |
+
from typing import cast, Dict, Optional, Set, Tuple, Type, Union
|
| 114 |
+
|
| 115 |
+
from ._events import *
|
| 116 |
+
from ._util import LocalProtocolError, Sentinel
|
| 117 |
+
|
| 118 |
+
# Everything in __all__ gets re-exported as part of the h11 public API.
|
| 119 |
+
__all__ = [
|
| 120 |
+
"CLIENT",
|
| 121 |
+
"SERVER",
|
| 122 |
+
"IDLE",
|
| 123 |
+
"SEND_RESPONSE",
|
| 124 |
+
"SEND_BODY",
|
| 125 |
+
"DONE",
|
| 126 |
+
"MUST_CLOSE",
|
| 127 |
+
"CLOSED",
|
| 128 |
+
"MIGHT_SWITCH_PROTOCOL",
|
| 129 |
+
"SWITCHED_PROTOCOL",
|
| 130 |
+
"ERROR",
|
| 131 |
+
]
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
class CLIENT(Sentinel, metaclass=Sentinel):
|
| 135 |
+
pass
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
class SERVER(Sentinel, metaclass=Sentinel):
|
| 139 |
+
pass
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
# States
|
| 143 |
+
class IDLE(Sentinel, metaclass=Sentinel):
|
| 144 |
+
pass
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class SEND_RESPONSE(Sentinel, metaclass=Sentinel):
|
| 148 |
+
pass
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class SEND_BODY(Sentinel, metaclass=Sentinel):
|
| 152 |
+
pass
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class DONE(Sentinel, metaclass=Sentinel):
|
| 156 |
+
pass
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
class MUST_CLOSE(Sentinel, metaclass=Sentinel):
|
| 160 |
+
pass
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class CLOSED(Sentinel, metaclass=Sentinel):
|
| 164 |
+
pass
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class ERROR(Sentinel, metaclass=Sentinel):
|
| 168 |
+
pass
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
# Switch types
|
| 172 |
+
class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel):
|
| 173 |
+
pass
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel):
|
| 177 |
+
pass
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel):
|
| 181 |
+
pass
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel):
|
| 185 |
+
pass
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
EventTransitionType = Dict[
|
| 189 |
+
Type[Sentinel],
|
| 190 |
+
Dict[
|
| 191 |
+
Type[Sentinel],
|
| 192 |
+
Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]],
|
| 193 |
+
],
|
| 194 |
+
]
|
| 195 |
+
|
| 196 |
+
EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = {
|
| 197 |
+
CLIENT: {
|
| 198 |
+
IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED},
|
| 199 |
+
SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
|
| 200 |
+
DONE: {ConnectionClosed: CLOSED},
|
| 201 |
+
MUST_CLOSE: {ConnectionClosed: CLOSED},
|
| 202 |
+
CLOSED: {ConnectionClosed: CLOSED},
|
| 203 |
+
MIGHT_SWITCH_PROTOCOL: {},
|
| 204 |
+
SWITCHED_PROTOCOL: {},
|
| 205 |
+
ERROR: {},
|
| 206 |
+
},
|
| 207 |
+
SERVER: {
|
| 208 |
+
IDLE: {
|
| 209 |
+
ConnectionClosed: CLOSED,
|
| 210 |
+
Response: SEND_BODY,
|
| 211 |
+
# Special case: server sees client Request events, in this form
|
| 212 |
+
(Request, CLIENT): SEND_RESPONSE,
|
| 213 |
+
},
|
| 214 |
+
SEND_RESPONSE: {
|
| 215 |
+
InformationalResponse: SEND_RESPONSE,
|
| 216 |
+
Response: SEND_BODY,
|
| 217 |
+
(InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL,
|
| 218 |
+
(Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL,
|
| 219 |
+
},
|
| 220 |
+
SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
|
| 221 |
+
DONE: {ConnectionClosed: CLOSED},
|
| 222 |
+
MUST_CLOSE: {ConnectionClosed: CLOSED},
|
| 223 |
+
CLOSED: {ConnectionClosed: CLOSED},
|
| 224 |
+
SWITCHED_PROTOCOL: {},
|
| 225 |
+
ERROR: {},
|
| 226 |
+
},
|
| 227 |
+
}
|
| 228 |
+
|
| 229 |
+
StateTransitionType = Dict[
|
| 230 |
+
Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]]
|
| 231 |
+
]
|
| 232 |
+
|
| 233 |
+
# NB: there are also some special-case state-triggered transitions hard-coded
|
| 234 |
+
# into _fire_state_triggered_transitions below.
|
| 235 |
+
STATE_TRIGGERED_TRANSITIONS: StateTransitionType = {
|
| 236 |
+
# (Client state, Server state) -> new states
|
| 237 |
+
# Protocol negotiation
|
| 238 |
+
(MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL},
|
| 239 |
+
# Socket shutdown
|
| 240 |
+
(CLOSED, DONE): {SERVER: MUST_CLOSE},
|
| 241 |
+
(CLOSED, IDLE): {SERVER: MUST_CLOSE},
|
| 242 |
+
(ERROR, DONE): {SERVER: MUST_CLOSE},
|
| 243 |
+
(DONE, CLOSED): {CLIENT: MUST_CLOSE},
|
| 244 |
+
(IDLE, CLOSED): {CLIENT: MUST_CLOSE},
|
| 245 |
+
(DONE, ERROR): {CLIENT: MUST_CLOSE},
|
| 246 |
+
}
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
class ConnectionState:
|
| 250 |
+
def __init__(self) -> None:
|
| 251 |
+
# Extra bits of state that don't quite fit into the state model.
|
| 252 |
+
|
| 253 |
+
# If this is False then it enables the automatic DONE -> MUST_CLOSE
|
| 254 |
+
# transition. Don't set this directly; call .keep_alive_disabled()
|
| 255 |
+
self.keep_alive = True
|
| 256 |
+
|
| 257 |
+
# This is a subset of {UPGRADE, CONNECT}, containing the proposals
|
| 258 |
+
# made by the client for switching protocols.
|
| 259 |
+
self.pending_switch_proposals: Set[Type[Sentinel]] = set()
|
| 260 |
+
|
| 261 |
+
self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE}
|
| 262 |
+
|
| 263 |
+
def process_error(self, role: Type[Sentinel]) -> None:
|
| 264 |
+
self.states[role] = ERROR
|
| 265 |
+
self._fire_state_triggered_transitions()
|
| 266 |
+
|
| 267 |
+
def process_keep_alive_disabled(self) -> None:
|
| 268 |
+
self.keep_alive = False
|
| 269 |
+
self._fire_state_triggered_transitions()
|
| 270 |
+
|
| 271 |
+
def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None:
|
| 272 |
+
self.pending_switch_proposals.add(switch_event)
|
| 273 |
+
self._fire_state_triggered_transitions()
|
| 274 |
+
|
| 275 |
+
def process_event(
|
| 276 |
+
self,
|
| 277 |
+
role: Type[Sentinel],
|
| 278 |
+
event_type: Type[Event],
|
| 279 |
+
server_switch_event: Optional[Type[Sentinel]] = None,
|
| 280 |
+
) -> None:
|
| 281 |
+
_event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type
|
| 282 |
+
if server_switch_event is not None:
|
| 283 |
+
assert role is SERVER
|
| 284 |
+
if server_switch_event not in self.pending_switch_proposals:
|
| 285 |
+
raise LocalProtocolError(
|
| 286 |
+
"Received server _SWITCH_UPGRADE event without a pending proposal"
|
| 287 |
+
)
|
| 288 |
+
_event_type = (event_type, server_switch_event)
|
| 289 |
+
if server_switch_event is None and _event_type is Response:
|
| 290 |
+
self.pending_switch_proposals = set()
|
| 291 |
+
self._fire_event_triggered_transitions(role, _event_type)
|
| 292 |
+
# Special case: the server state does get to see Request
|
| 293 |
+
# events.
|
| 294 |
+
if _event_type is Request:
|
| 295 |
+
assert role is CLIENT
|
| 296 |
+
self._fire_event_triggered_transitions(SERVER, (Request, CLIENT))
|
| 297 |
+
self._fire_state_triggered_transitions()
|
| 298 |
+
|
| 299 |
+
def _fire_event_triggered_transitions(
|
| 300 |
+
self,
|
| 301 |
+
role: Type[Sentinel],
|
| 302 |
+
event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]],
|
| 303 |
+
) -> None:
|
| 304 |
+
state = self.states[role]
|
| 305 |
+
try:
|
| 306 |
+
new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type]
|
| 307 |
+
except KeyError:
|
| 308 |
+
event_type = cast(Type[Event], event_type)
|
| 309 |
+
raise LocalProtocolError(
|
| 310 |
+
"can't handle event type {} when role={} and state={}".format(
|
| 311 |
+
event_type.__name__, role, self.states[role]
|
| 312 |
+
)
|
| 313 |
+
) from None
|
| 314 |
+
self.states[role] = new_state
|
| 315 |
+
|
| 316 |
+
def _fire_state_triggered_transitions(self) -> None:
|
| 317 |
+
# We apply these rules repeatedly until converging on a fixed point
|
| 318 |
+
while True:
|
| 319 |
+
start_states = dict(self.states)
|
| 320 |
+
|
| 321 |
+
# It could happen that both these special-case transitions are
|
| 322 |
+
# enabled at the same time:
|
| 323 |
+
#
|
| 324 |
+
# DONE -> MIGHT_SWITCH_PROTOCOL
|
| 325 |
+
# DONE -> MUST_CLOSE
|
| 326 |
+
#
|
| 327 |
+
# For example, this will always be true of a HTTP/1.0 client
|
| 328 |
+
# requesting CONNECT. If this happens, the protocol switch takes
|
| 329 |
+
# priority. From there the client will either go to
|
| 330 |
+
# SWITCHED_PROTOCOL, in which case it's none of our business when
|
| 331 |
+
# they close the connection, or else the server will deny the
|
| 332 |
+
# request, in which case the client will go back to DONE and then
|
| 333 |
+
# from there to MUST_CLOSE.
|
| 334 |
+
if self.pending_switch_proposals:
|
| 335 |
+
if self.states[CLIENT] is DONE:
|
| 336 |
+
self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL
|
| 337 |
+
|
| 338 |
+
if not self.pending_switch_proposals:
|
| 339 |
+
if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL:
|
| 340 |
+
self.states[CLIENT] = DONE
|
| 341 |
+
|
| 342 |
+
if not self.keep_alive:
|
| 343 |
+
for role in (CLIENT, SERVER):
|
| 344 |
+
if self.states[role] is DONE:
|
| 345 |
+
self.states[role] = MUST_CLOSE
|
| 346 |
+
|
| 347 |
+
# Tabular state-triggered transitions
|
| 348 |
+
joint_state = (self.states[CLIENT], self.states[SERVER])
|
| 349 |
+
changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {})
|
| 350 |
+
self.states.update(changes)
|
| 351 |
+
|
| 352 |
+
if self.states == start_states:
|
| 353 |
+
# Fixed point reached
|
| 354 |
+
return
|
| 355 |
+
|
| 356 |
+
def start_next_cycle(self) -> None:
|
| 357 |
+
if self.states != {CLIENT: DONE, SERVER: DONE}:
|
| 358 |
+
raise LocalProtocolError(
|
| 359 |
+
f"not in a reusable state. self.states={self.states}"
|
| 360 |
+
)
|
| 361 |
+
# Can't reach DONE/DONE with any of these active, but still, let's be
|
| 362 |
+
# sure.
|
| 363 |
+
assert self.keep_alive
|
| 364 |
+
assert not self.pending_switch_proposals
|
| 365 |
+
self.states = {CLIENT: IDLE, SERVER: IDLE}
|
venv/lib/python3.12/site-packages/h11/_util.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union
|
| 2 |
+
|
| 3 |
+
__all__ = [
|
| 4 |
+
"ProtocolError",
|
| 5 |
+
"LocalProtocolError",
|
| 6 |
+
"RemoteProtocolError",
|
| 7 |
+
"validate",
|
| 8 |
+
"bytesify",
|
| 9 |
+
]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ProtocolError(Exception):
|
| 13 |
+
"""Exception indicating a violation of the HTTP/1.1 protocol.
|
| 14 |
+
|
| 15 |
+
This as an abstract base class, with two concrete base classes:
|
| 16 |
+
:exc:`LocalProtocolError`, which indicates that you tried to do something
|
| 17 |
+
that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which
|
| 18 |
+
indicates that the remote peer tried to do something that HTTP/1.1 says is
|
| 19 |
+
illegal. See :ref:`error-handling` for details.
|
| 20 |
+
|
| 21 |
+
In addition to the normal :exc:`Exception` features, it has one attribute:
|
| 22 |
+
|
| 23 |
+
.. attribute:: error_status_hint
|
| 24 |
+
|
| 25 |
+
This gives a suggestion as to what status code a server might use if
|
| 26 |
+
this error occurred as part of a request.
|
| 27 |
+
|
| 28 |
+
For a :exc:`RemoteProtocolError`, this is useful as a suggestion for
|
| 29 |
+
how you might want to respond to a misbehaving peer, if you're
|
| 30 |
+
implementing a server.
|
| 31 |
+
|
| 32 |
+
For a :exc:`LocalProtocolError`, this can be taken as a suggestion for
|
| 33 |
+
how your peer might have responded to *you* if h11 had allowed you to
|
| 34 |
+
continue.
|
| 35 |
+
|
| 36 |
+
The default is 400 Bad Request, a generic catch-all for protocol
|
| 37 |
+
violations.
|
| 38 |
+
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
def __init__(self, msg: str, error_status_hint: int = 400) -> None:
|
| 42 |
+
if type(self) is ProtocolError:
|
| 43 |
+
raise TypeError("tried to directly instantiate ProtocolError")
|
| 44 |
+
Exception.__init__(self, msg)
|
| 45 |
+
self.error_status_hint = error_status_hint
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
# Strategy: there are a number of public APIs where a LocalProtocolError can
|
| 49 |
+
# be raised (send(), all the different event constructors, ...), and only one
|
| 50 |
+
# public API where RemoteProtocolError can be raised
|
| 51 |
+
# (receive_data()). Therefore we always raise LocalProtocolError internally,
|
| 52 |
+
# and then receive_data will translate this into a RemoteProtocolError.
|
| 53 |
+
#
|
| 54 |
+
# Internally:
|
| 55 |
+
# LocalProtocolError is the generic "ProtocolError".
|
| 56 |
+
# Externally:
|
| 57 |
+
# LocalProtocolError is for local errors and RemoteProtocolError is for
|
| 58 |
+
# remote errors.
|
| 59 |
+
class LocalProtocolError(ProtocolError):
|
| 60 |
+
def _reraise_as_remote_protocol_error(self) -> NoReturn:
|
| 61 |
+
# After catching a LocalProtocolError, use this method to re-raise it
|
| 62 |
+
# as a RemoteProtocolError. This method must be called from inside an
|
| 63 |
+
# except: block.
|
| 64 |
+
#
|
| 65 |
+
# An easy way to get an equivalent RemoteProtocolError is just to
|
| 66 |
+
# modify 'self' in place.
|
| 67 |
+
self.__class__ = RemoteProtocolError # type: ignore
|
| 68 |
+
# But the re-raising is somewhat non-trivial -- you might think that
|
| 69 |
+
# now that we've modified the in-flight exception object, that just
|
| 70 |
+
# doing 'raise' to re-raise it would be enough. But it turns out that
|
| 71 |
+
# this doesn't work, because Python tracks the exception type
|
| 72 |
+
# (exc_info[0]) separately from the exception object (exc_info[1]),
|
| 73 |
+
# and we only modified the latter. So we really do need to re-raise
|
| 74 |
+
# the new type explicitly.
|
| 75 |
+
# On py3, the traceback is part of the exception object, so our
|
| 76 |
+
# in-place modification preserved it and we can just re-raise:
|
| 77 |
+
raise self
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class RemoteProtocolError(ProtocolError):
|
| 81 |
+
pass
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def validate(
|
| 85 |
+
regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any
|
| 86 |
+
) -> Dict[str, bytes]:
|
| 87 |
+
match = regex.fullmatch(data)
|
| 88 |
+
if not match:
|
| 89 |
+
if format_args:
|
| 90 |
+
msg = msg.format(*format_args)
|
| 91 |
+
raise LocalProtocolError(msg)
|
| 92 |
+
return match.groupdict()
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
# Sentinel values
|
| 96 |
+
#
|
| 97 |
+
# - Inherit identity-based comparison and hashing from object
|
| 98 |
+
# - Have a nice repr
|
| 99 |
+
# - Have a *bonus property*: type(sentinel) is sentinel
|
| 100 |
+
#
|
| 101 |
+
# The bonus property is useful if you want to take the return value from
|
| 102 |
+
# next_event() and do some sort of dispatch based on type(event).
|
| 103 |
+
|
| 104 |
+
_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel")
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class Sentinel(type):
|
| 108 |
+
def __new__(
|
| 109 |
+
cls: Type[_T_Sentinel],
|
| 110 |
+
name: str,
|
| 111 |
+
bases: Tuple[type, ...],
|
| 112 |
+
namespace: Dict[str, Any],
|
| 113 |
+
**kwds: Any
|
| 114 |
+
) -> _T_Sentinel:
|
| 115 |
+
assert bases == (Sentinel,)
|
| 116 |
+
v = super().__new__(cls, name, bases, namespace, **kwds)
|
| 117 |
+
v.__class__ = v # type: ignore
|
| 118 |
+
return v
|
| 119 |
+
|
| 120 |
+
def __repr__(self) -> str:
|
| 121 |
+
return self.__name__
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
# Used for methods, request targets, HTTP versions, header names, and header
|
| 125 |
+
# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always
|
| 126 |
+
# returns bytes.
|
| 127 |
+
def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes:
|
| 128 |
+
# Fast-path:
|
| 129 |
+
if type(s) is bytes:
|
| 130 |
+
return s
|
| 131 |
+
if isinstance(s, str):
|
| 132 |
+
s = s.encode("ascii")
|
| 133 |
+
if isinstance(s, int):
|
| 134 |
+
raise TypeError("expected bytes-like object, not int")
|
| 135 |
+
return bytes(s)
|
venv/lib/python3.12/site-packages/h11/_version.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file must be kept very simple, because it is consumed from several
|
| 2 |
+
# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc.
|
| 3 |
+
|
| 4 |
+
# We use a simple scheme:
|
| 5 |
+
# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev
|
| 6 |
+
# where the +dev versions are never released into the wild, they're just what
|
| 7 |
+
# we stick into the VCS in between releases.
|
| 8 |
+
#
|
| 9 |
+
# This is compatible with PEP 440:
|
| 10 |
+
# http://legacy.python.org/dev/peps/pep-0440/
|
| 11 |
+
# via the use of the "local suffix" "+dev", which is disallowed on index
|
| 12 |
+
# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we
|
| 13 |
+
# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
|
| 14 |
+
# 1.0.0.)
|
| 15 |
+
|
| 16 |
+
__version__ = "0.16.0"
|
venv/lib/python3.12/site-packages/h11/_writers.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Code to read HTTP data
|
| 2 |
+
#
|
| 3 |
+
# Strategy: each writer takes an event + a write-some-bytes function, which is
|
| 4 |
+
# calls.
|
| 5 |
+
#
|
| 6 |
+
# WRITERS is a dict describing how to pick a reader. It maps states to either:
|
| 7 |
+
# - a writer
|
| 8 |
+
# - or, for body writers, a dict of framin-dependent writer factories
|
| 9 |
+
|
| 10 |
+
from typing import Any, Callable, Dict, List, Tuple, Type, Union
|
| 11 |
+
|
| 12 |
+
from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response
|
| 13 |
+
from ._headers import Headers
|
| 14 |
+
from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER
|
| 15 |
+
from ._util import LocalProtocolError, Sentinel
|
| 16 |
+
|
| 17 |
+
__all__ = ["WRITERS"]
|
| 18 |
+
|
| 19 |
+
Writer = Callable[[bytes], Any]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def write_headers(headers: Headers, write: Writer) -> None:
|
| 23 |
+
# "Since the Host field-value is critical information for handling a
|
| 24 |
+
# request, a user agent SHOULD generate Host as the first header field
|
| 25 |
+
# following the request-line." - RFC 7230
|
| 26 |
+
raw_items = headers._full_items
|
| 27 |
+
for raw_name, name, value in raw_items:
|
| 28 |
+
if name == b"host":
|
| 29 |
+
write(b"%s: %s\r\n" % (raw_name, value))
|
| 30 |
+
for raw_name, name, value in raw_items:
|
| 31 |
+
if name != b"host":
|
| 32 |
+
write(b"%s: %s\r\n" % (raw_name, value))
|
| 33 |
+
write(b"\r\n")
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def write_request(request: Request, write: Writer) -> None:
|
| 37 |
+
if request.http_version != b"1.1":
|
| 38 |
+
raise LocalProtocolError("I only send HTTP/1.1")
|
| 39 |
+
write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target))
|
| 40 |
+
write_headers(request.headers, write)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
# Shared between InformationalResponse and Response
|
| 44 |
+
def write_any_response(
|
| 45 |
+
response: Union[InformationalResponse, Response], write: Writer
|
| 46 |
+
) -> None:
|
| 47 |
+
if response.http_version != b"1.1":
|
| 48 |
+
raise LocalProtocolError("I only send HTTP/1.1")
|
| 49 |
+
status_bytes = str(response.status_code).encode("ascii")
|
| 50 |
+
# We don't bother sending ascii status messages like "OK"; they're
|
| 51 |
+
# optional and ignored by the protocol. (But the space after the numeric
|
| 52 |
+
# status code is mandatory.)
|
| 53 |
+
#
|
| 54 |
+
# XX FIXME: could at least make an effort to pull out the status message
|
| 55 |
+
# from stdlib's http.HTTPStatus table. Or maybe just steal their enums
|
| 56 |
+
# (either by import or copy/paste). We already accept them as status codes
|
| 57 |
+
# since they're of type IntEnum < int.
|
| 58 |
+
write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason))
|
| 59 |
+
write_headers(response.headers, write)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class BodyWriter:
|
| 63 |
+
def __call__(self, event: Event, write: Writer) -> None:
|
| 64 |
+
if type(event) is Data:
|
| 65 |
+
self.send_data(event.data, write)
|
| 66 |
+
elif type(event) is EndOfMessage:
|
| 67 |
+
self.send_eom(event.headers, write)
|
| 68 |
+
else: # pragma: no cover
|
| 69 |
+
assert False
|
| 70 |
+
|
| 71 |
+
def send_data(self, data: bytes, write: Writer) -> None:
|
| 72 |
+
pass
|
| 73 |
+
|
| 74 |
+
def send_eom(self, headers: Headers, write: Writer) -> None:
|
| 75 |
+
pass
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
#
|
| 79 |
+
# These are all careful not to do anything to 'data' except call len(data) and
|
| 80 |
+
# write(data). This allows us to transparently pass-through funny objects,
|
| 81 |
+
# like placeholder objects referring to files on disk that will be sent via
|
| 82 |
+
# sendfile(2).
|
| 83 |
+
#
|
| 84 |
+
class ContentLengthWriter(BodyWriter):
|
| 85 |
+
def __init__(self, length: int) -> None:
|
| 86 |
+
self._length = length
|
| 87 |
+
|
| 88 |
+
def send_data(self, data: bytes, write: Writer) -> None:
|
| 89 |
+
self._length -= len(data)
|
| 90 |
+
if self._length < 0:
|
| 91 |
+
raise LocalProtocolError("Too much data for declared Content-Length")
|
| 92 |
+
write(data)
|
| 93 |
+
|
| 94 |
+
def send_eom(self, headers: Headers, write: Writer) -> None:
|
| 95 |
+
if self._length != 0:
|
| 96 |
+
raise LocalProtocolError("Too little data for declared Content-Length")
|
| 97 |
+
if headers:
|
| 98 |
+
raise LocalProtocolError("Content-Length and trailers don't mix")
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class ChunkedWriter(BodyWriter):
|
| 102 |
+
def send_data(self, data: bytes, write: Writer) -> None:
|
| 103 |
+
# if we encoded 0-length data in the naive way, it would look like an
|
| 104 |
+
# end-of-message.
|
| 105 |
+
if not data:
|
| 106 |
+
return
|
| 107 |
+
write(b"%x\r\n" % len(data))
|
| 108 |
+
write(data)
|
| 109 |
+
write(b"\r\n")
|
| 110 |
+
|
| 111 |
+
def send_eom(self, headers: Headers, write: Writer) -> None:
|
| 112 |
+
write(b"0\r\n")
|
| 113 |
+
write_headers(headers, write)
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class Http10Writer(BodyWriter):
|
| 117 |
+
def send_data(self, data: bytes, write: Writer) -> None:
|
| 118 |
+
write(data)
|
| 119 |
+
|
| 120 |
+
def send_eom(self, headers: Headers, write: Writer) -> None:
|
| 121 |
+
if headers:
|
| 122 |
+
raise LocalProtocolError("can't send trailers to HTTP/1.0 client")
|
| 123 |
+
# no need to close the socket ourselves, that will be taken care of by
|
| 124 |
+
# Connection: close machinery
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
WritersType = Dict[
|
| 128 |
+
Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]],
|
| 129 |
+
Union[
|
| 130 |
+
Dict[str, Type[BodyWriter]],
|
| 131 |
+
Callable[[Union[InformationalResponse, Response], Writer], None],
|
| 132 |
+
Callable[[Request, Writer], None],
|
| 133 |
+
],
|
| 134 |
+
]
|
| 135 |
+
|
| 136 |
+
WRITERS: WritersType = {
|
| 137 |
+
(CLIENT, IDLE): write_request,
|
| 138 |
+
(SERVER, IDLE): write_any_response,
|
| 139 |
+
(SERVER, SEND_RESPONSE): write_any_response,
|
| 140 |
+
SEND_BODY: {
|
| 141 |
+
"chunked": ChunkedWriter,
|
| 142 |
+
"content-length": ContentLengthWriter,
|
| 143 |
+
"http/1.0": Http10Writer,
|
| 144 |
+
},
|
| 145 |
+
}
|
venv/lib/python3.12/site-packages/h11/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Marker
|
venv/lib/python3.12/site-packages/idna-3.13.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/lib/python3.12/site-packages/idna-3.13.dist-info/METADATA
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: idna
|
| 3 |
+
Version: 3.13
|
| 4 |
+
Summary: Internationalized Domain Names in Applications (IDNA)
|
| 5 |
+
Author-email: Kim Davies <kim+pypi@gumleaf.org>
|
| 6 |
+
Requires-Python: >=3.8
|
| 7 |
+
Description-Content-Type: text/x-rst
|
| 8 |
+
License-Expression: BSD-3-Clause
|
| 9 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 10 |
+
Classifier: Intended Audience :: Developers
|
| 11 |
+
Classifier: Intended Audience :: System Administrators
|
| 12 |
+
Classifier: Operating System :: OS Independent
|
| 13 |
+
Classifier: Programming Language :: Python
|
| 14 |
+
Classifier: Programming Language :: Python :: 3
|
| 15 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.14
|
| 23 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 24 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 25 |
+
Classifier: Topic :: Internet :: Name Service (DNS)
|
| 26 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 27 |
+
Classifier: Topic :: Utilities
|
| 28 |
+
License-File: LICENSE.md
|
| 29 |
+
Requires-Dist: ruff >= 0.6.2 ; extra == "all"
|
| 30 |
+
Requires-Dist: mypy >= 1.11.2 ; extra == "all"
|
| 31 |
+
Requires-Dist: pytest >= 8.3.2 ; extra == "all"
|
| 32 |
+
Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst
|
| 33 |
+
Project-URL: Issue tracker, https://github.com/kjd/idna/issues
|
| 34 |
+
Project-URL: Source, https://github.com/kjd/idna
|
| 35 |
+
Provides-Extra: all
|
| 36 |
+
|
| 37 |
+
Internationalized Domain Names in Applications (IDNA)
|
| 38 |
+
=====================================================
|
| 39 |
+
|
| 40 |
+
Support for `Internationalized Domain Names in
|
| 41 |
+
Applications (IDNA) <https://tools.ietf.org/html/rfc5891>`_
|
| 42 |
+
and `Unicode IDNA Compatibility Processing
|
| 43 |
+
<https://unicode.org/reports/tr46/>`_.
|
| 44 |
+
|
| 45 |
+
The latest versions of these standards supplied here provide
|
| 46 |
+
more comprehensive language coverage and reduce the potential of
|
| 47 |
+
allowing domains with known security vulnerabilities. This library
|
| 48 |
+
is a suitable replacement for the “encodings.idna”
|
| 49 |
+
module that comes with the Python standard library, but which
|
| 50 |
+
only supports an older superseded IDNA specification from 2003.
|
| 51 |
+
|
| 52 |
+
Basic functions are simply executed:
|
| 53 |
+
|
| 54 |
+
.. code-block:: pycon
|
| 55 |
+
|
| 56 |
+
>>> import idna
|
| 57 |
+
>>> idna.encode('ドメイン.テスト')
|
| 58 |
+
b'xn--eckwd4c7c.xn--zckzah'
|
| 59 |
+
>>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
|
| 60 |
+
ドメイン.テスト
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
Installation
|
| 64 |
+
------------
|
| 65 |
+
|
| 66 |
+
This package is available for installation from PyPI via the
|
| 67 |
+
typical mechanisms, such as:
|
| 68 |
+
|
| 69 |
+
.. code-block:: bash
|
| 70 |
+
|
| 71 |
+
$ python3 -m pip install idna
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
Usage
|
| 75 |
+
-----
|
| 76 |
+
|
| 77 |
+
For typical usage, the ``encode`` and ``decode`` functions will take a
|
| 78 |
+
domain name argument and perform a conversion to ASCII-compatible encoding
|
| 79 |
+
(known as A-labels), or to Unicode strings (known as U-labels)
|
| 80 |
+
respectively.
|
| 81 |
+
|
| 82 |
+
.. code-block:: pycon
|
| 83 |
+
|
| 84 |
+
>>> import idna
|
| 85 |
+
>>> idna.encode('ドメイン.テスト')
|
| 86 |
+
b'xn--eckwd4c7c.xn--zckzah'
|
| 87 |
+
>>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
|
| 88 |
+
ドメイン.テスト
|
| 89 |
+
|
| 90 |
+
Conversions can be applied at a per-label basis using the ``ulabel`` or
|
| 91 |
+
``alabel`` functions if necessary:
|
| 92 |
+
|
| 93 |
+
.. code-block:: pycon
|
| 94 |
+
|
| 95 |
+
>>> idna.alabel('测试')
|
| 96 |
+
b'xn--0zwm56d'
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
Compatibility Mapping (UTS #46)
|
| 100 |
+
+++++++++++++++++++++++++++++++
|
| 101 |
+
|
| 102 |
+
This library provides support for `Unicode IDNA Compatibility
|
| 103 |
+
Processing <https://unicode.org/reports/tr46/>`_ which normalizes input from
|
| 104 |
+
different potential ways a user may input a domain prior to performing the IDNA
|
| 105 |
+
conversion operations. This functionality, known as a
|
| 106 |
+
`mapping <https://tools.ietf.org/html/rfc5895>`_, is considered by the
|
| 107 |
+
specification to be a local user-interface issue distinct from IDNA
|
| 108 |
+
conversion functionality.
|
| 109 |
+
|
| 110 |
+
For example, “Königsgäßchen” is not a permissible label as *LATIN
|
| 111 |
+
CAPITAL LETTER K* is not allowed (nor are capital letters in general).
|
| 112 |
+
UTS 46 will convert this into lower case prior to applying the IDNA
|
| 113 |
+
conversion.
|
| 114 |
+
|
| 115 |
+
.. code-block:: pycon
|
| 116 |
+
|
| 117 |
+
>>> import idna
|
| 118 |
+
>>> idna.encode('Königsgäßchen')
|
| 119 |
+
...
|
| 120 |
+
idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed
|
| 121 |
+
>>> idna.encode('Königsgäßchen', uts46=True)
|
| 122 |
+
b'xn--knigsgchen-b4a3dun'
|
| 123 |
+
>>> print(idna.decode('xn--knigsgchen-b4a3dun'))
|
| 124 |
+
königsgäßchen
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
Exceptions
|
| 128 |
+
----------
|
| 129 |
+
|
| 130 |
+
All errors raised during the conversion following the specification
|
| 131 |
+
should raise an exception derived from the ``idna.IDNAError`` base
|
| 132 |
+
class.
|
| 133 |
+
|
| 134 |
+
More specific exceptions that may be generated as ``idna.IDNABidiError``
|
| 135 |
+
when the error reflects an illegal combination of left-to-right and
|
| 136 |
+
right-to-left characters in a label; ``idna.InvalidCodepoint`` when
|
| 137 |
+
a specific codepoint is an illegal character in an IDN label (i.e.
|
| 138 |
+
INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is
|
| 139 |
+
illegal based on its position in the string (i.e. it is CONTEXTO or CONTEXTJ
|
| 140 |
+
but the contextual requirements are not satisfied.)
|
| 141 |
+
|
| 142 |
+
Building and Diagnostics
|
| 143 |
+
------------------------
|
| 144 |
+
|
| 145 |
+
The IDNA and UTS 46 functionality relies upon pre-calculated lookup
|
| 146 |
+
tables for performance. These tables are derived from computing against
|
| 147 |
+
eligibility criteria in the respective standards using the command-line
|
| 148 |
+
script ``tools/idna-data``.
|
| 149 |
+
|
| 150 |
+
This tool will fetch relevant codepoint data from the Unicode repository
|
| 151 |
+
and perform the required calculations to identify eligibility. There are
|
| 152 |
+
three main modes:
|
| 153 |
+
|
| 154 |
+
* ``idna-data make-libdata``. Generates ``idnadata.py`` and
|
| 155 |
+
``uts46data.py``, the pre-calculated lookup tables used for IDNA and
|
| 156 |
+
UTS 46 conversions. Implementers who wish to track this library against
|
| 157 |
+
a different Unicode version may use this tool to manually generate a
|
| 158 |
+
different version of the ``idnadata.py`` and ``uts46data.py`` files.
|
| 159 |
+
|
| 160 |
+
* ``idna-data make-table``. Generate a table of the IDNA disposition
|
| 161 |
+
(e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix
|
| 162 |
+
B.1 of RFC 5892 and the pre-computed tables published by `IANA
|
| 163 |
+
<https://www.iana.org/>`_.
|
| 164 |
+
|
| 165 |
+
* ``idna-data U+0061``. Prints debugging output on the various
|
| 166 |
+
properties associated with an individual Unicode codepoint (in this
|
| 167 |
+
case, U+0061), that are used to assess the IDNA and UTS 46 status of a
|
| 168 |
+
codepoint. This is helpful in debugging or analysis.
|
| 169 |
+
|
| 170 |
+
The tool accepts a number of arguments, described using ``idna-data -h``.
|
| 171 |
+
Most notably, the ``--version`` argument allows the specification
|
| 172 |
+
of the version of Unicode to be used in computing the table data. For
|
| 173 |
+
example, ``idna-data --version 9.0.0 make-libdata`` will generate
|
| 174 |
+
library data against Unicode 9.0.0.
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
Additional Notes
|
| 178 |
+
----------------
|
| 179 |
+
|
| 180 |
+
* **Packages**. The latest tagged release version is published in the
|
| 181 |
+
`Python Package Index <https://pypi.org/project/idna/>`_.
|
| 182 |
+
|
| 183 |
+
* **Version support**. This library supports Python 3.8 and higher.
|
| 184 |
+
As this library serves as a low-level toolkit for a variety of
|
| 185 |
+
applications, many of which strive for broad compatibility with older
|
| 186 |
+
Python versions, there is no rush to remove older interpreter support.
|
| 187 |
+
Support for older versions are likely to be removed from new releases
|
| 188 |
+
as automated tests can no longer easily be run, i.e. once the Python
|
| 189 |
+
version is officially end-of-life.
|
| 190 |
+
|
| 191 |
+
* **Testing**. The library has a test suite based on each rule of the
|
| 192 |
+
IDNA specification, as well as tests that are provided as part of the
|
| 193 |
+
Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing
|
| 194 |
+
<https://unicode.org/reports/tr46/>`_.
|
| 195 |
+
|
| 196 |
+
* **Emoji**. It is an occasional request to support emoji domains in
|
| 197 |
+
this library. Encoding of symbols like emoji is expressly prohibited by
|
| 198 |
+
the IDNA technical standard, and emoji domains are broadly phased
|
| 199 |
+
out across the domain industry due to associated security risks.
|
| 200 |
+
|
| 201 |
+
* **Transitional processing**. Unicode 16.0.0 removed transitional
|
| 202 |
+
processing so the `transitional` argument for the encode() method
|
| 203 |
+
no longer has any effect and will be removed at a later date.
|
| 204 |
+
|
venv/lib/python3.12/site-packages/idna-3.13.dist-info/RECORD
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
idna-3.13.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
idna-3.13.dist-info/METADATA,sha256=aCJPP9iIHtp9NZSENIrzP52rgjQxw0-PWNfHGJ7ffQA,8033
|
| 3 |
+
idna-3.13.dist-info/RECORD,,
|
| 4 |
+
idna-3.13.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
| 5 |
+
idna-3.13.dist-info/licenses/LICENSE.md,sha256=GppPDj1HmickDd1ZqRN6ZqtKD539yMphiMwL_YUYfwQ,1541
|
| 6 |
+
idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868
|
| 7 |
+
idna/__pycache__/__init__.cpython-312.pyc,,
|
| 8 |
+
idna/__pycache__/codec.cpython-312.pyc,,
|
| 9 |
+
idna/__pycache__/compat.cpython-312.pyc,,
|
| 10 |
+
idna/__pycache__/core.cpython-312.pyc,,
|
| 11 |
+
idna/__pycache__/idnadata.cpython-312.pyc,,
|
| 12 |
+
idna/__pycache__/intranges.cpython-312.pyc,,
|
| 13 |
+
idna/__pycache__/package_data.cpython-312.pyc,,
|
| 14 |
+
idna/__pycache__/uts46data.cpython-312.pyc,,
|
| 15 |
+
idna/codec.py,sha256=M2SGWN7cs_6B32QmKTyTN6xQGZeYQgQ2wiX3_DR6loE,3438
|
| 16 |
+
idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316
|
| 17 |
+
idna/core.py,sha256=0_28DR8hKT_pJKcmbeIQiiO1rHOpS3kJTtp9pyrk-4k,13374
|
| 18 |
+
idna/idnadata.py,sha256=EYkhqhOze0Whw1pIMum_-qEHM-8utUwc_IBoIUWW8Go,92613
|
| 19 |
+
idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898
|
| 20 |
+
idna/package_data.py,sha256=Nc4yldCrxxvai27WL76JL60jhw0ajOoboFGsm5L8FxM,21
|
| 21 |
+
idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 22 |
+
idna/uts46data.py,sha256=JAacELsMTouJI-U6jk_UcKuAM4K7ffGCYpbDLux8iVk,202713
|
venv/lib/python3.12/site-packages/idna-3.13.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.12.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
venv/lib/python3.12/site-packages/idna/__init__.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .core import (
|
| 2 |
+
IDNABidiError,
|
| 3 |
+
IDNAError,
|
| 4 |
+
InvalidCodepoint,
|
| 5 |
+
InvalidCodepointContext,
|
| 6 |
+
alabel,
|
| 7 |
+
check_bidi,
|
| 8 |
+
check_hyphen_ok,
|
| 9 |
+
check_initial_combiner,
|
| 10 |
+
check_label,
|
| 11 |
+
check_nfc,
|
| 12 |
+
decode,
|
| 13 |
+
encode,
|
| 14 |
+
ulabel,
|
| 15 |
+
uts46_remap,
|
| 16 |
+
valid_contextj,
|
| 17 |
+
valid_contexto,
|
| 18 |
+
valid_label_length,
|
| 19 |
+
valid_string_length,
|
| 20 |
+
)
|
| 21 |
+
from .intranges import intranges_contain
|
| 22 |
+
from .package_data import __version__
|
| 23 |
+
|
| 24 |
+
__all__ = [
|
| 25 |
+
"__version__",
|
| 26 |
+
"IDNABidiError",
|
| 27 |
+
"IDNAError",
|
| 28 |
+
"InvalidCodepoint",
|
| 29 |
+
"InvalidCodepointContext",
|
| 30 |
+
"alabel",
|
| 31 |
+
"check_bidi",
|
| 32 |
+
"check_hyphen_ok",
|
| 33 |
+
"check_initial_combiner",
|
| 34 |
+
"check_label",
|
| 35 |
+
"check_nfc",
|
| 36 |
+
"decode",
|
| 37 |
+
"encode",
|
| 38 |
+
"intranges_contain",
|
| 39 |
+
"ulabel",
|
| 40 |
+
"uts46_remap",
|
| 41 |
+
"valid_contextj",
|
| 42 |
+
"valid_contexto",
|
| 43 |
+
"valid_label_length",
|
| 44 |
+
"valid_string_length",
|
| 45 |
+
]
|
venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/METADATA
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/RECORD
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pydantic-2.13.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
pydantic-2.13.4.dist-info/METADATA,sha256=0eVsNBJJJRL7a1mp38Jat68qggRYXNe8oABRMCMTM5Q,109397
|
| 3 |
+
pydantic-2.13.4.dist-info/RECORD,,
|
| 4 |
+
pydantic-2.13.4.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
| 5 |
+
pydantic-2.13.4.dist-info/licenses/LICENSE,sha256=qeGG88oWte74QxjnpwFyE1GgDLe4rjpDlLZ7SeNSnvM,1129
|
| 6 |
+
pydantic/__init__.py,sha256=5iEnJ4wHv1OEzdKQPzaKaZKfO4pSQAC65ODrYI6_S8Y,15812
|
| 7 |
+
pydantic/__pycache__/__init__.cpython-312.pyc,,
|
| 8 |
+
pydantic/__pycache__/_migration.cpython-312.pyc,,
|
| 9 |
+
pydantic/__pycache__/alias_generators.cpython-312.pyc,,
|
| 10 |
+
pydantic/__pycache__/aliases.cpython-312.pyc,,
|
| 11 |
+
pydantic/__pycache__/annotated_handlers.cpython-312.pyc,,
|
| 12 |
+
pydantic/__pycache__/class_validators.cpython-312.pyc,,
|
| 13 |
+
pydantic/__pycache__/color.cpython-312.pyc,,
|
| 14 |
+
pydantic/__pycache__/config.cpython-312.pyc,,
|
| 15 |
+
pydantic/__pycache__/dataclasses.cpython-312.pyc,,
|
| 16 |
+
pydantic/__pycache__/datetime_parse.cpython-312.pyc,,
|
| 17 |
+
pydantic/__pycache__/decorator.cpython-312.pyc,,
|
| 18 |
+
pydantic/__pycache__/env_settings.cpython-312.pyc,,
|
| 19 |
+
pydantic/__pycache__/error_wrappers.cpython-312.pyc,,
|
| 20 |
+
pydantic/__pycache__/errors.cpython-312.pyc,,
|
| 21 |
+
pydantic/__pycache__/fields.cpython-312.pyc,,
|
| 22 |
+
pydantic/__pycache__/functional_serializers.cpython-312.pyc,,
|
| 23 |
+
pydantic/__pycache__/functional_validators.cpython-312.pyc,,
|
| 24 |
+
pydantic/__pycache__/generics.cpython-312.pyc,,
|
| 25 |
+
pydantic/__pycache__/json.cpython-312.pyc,,
|
| 26 |
+
pydantic/__pycache__/json_schema.cpython-312.pyc,,
|
| 27 |
+
pydantic/__pycache__/main.cpython-312.pyc,,
|
| 28 |
+
pydantic/__pycache__/mypy.cpython-312.pyc,,
|
| 29 |
+
pydantic/__pycache__/networks.cpython-312.pyc,,
|
| 30 |
+
pydantic/__pycache__/parse.cpython-312.pyc,,
|
| 31 |
+
pydantic/__pycache__/root_model.cpython-312.pyc,,
|
| 32 |
+
pydantic/__pycache__/schema.cpython-312.pyc,,
|
| 33 |
+
pydantic/__pycache__/tools.cpython-312.pyc,,
|
| 34 |
+
pydantic/__pycache__/type_adapter.cpython-312.pyc,,
|
| 35 |
+
pydantic/__pycache__/types.cpython-312.pyc,,
|
| 36 |
+
pydantic/__pycache__/typing.cpython-312.pyc,,
|
| 37 |
+
pydantic/__pycache__/utils.cpython-312.pyc,,
|
| 38 |
+
pydantic/__pycache__/validate_call_decorator.cpython-312.pyc,,
|
| 39 |
+
pydantic/__pycache__/validators.cpython-312.pyc,,
|
| 40 |
+
pydantic/__pycache__/version.cpython-312.pyc,,
|
| 41 |
+
pydantic/__pycache__/warnings.cpython-312.pyc,,
|
| 42 |
+
pydantic/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 43 |
+
pydantic/_internal/__pycache__/__init__.cpython-312.pyc,,
|
| 44 |
+
pydantic/_internal/__pycache__/_config.cpython-312.pyc,,
|
| 45 |
+
pydantic/_internal/__pycache__/_core_metadata.cpython-312.pyc,,
|
| 46 |
+
pydantic/_internal/__pycache__/_core_utils.cpython-312.pyc,,
|
| 47 |
+
pydantic/_internal/__pycache__/_dataclasses.cpython-312.pyc,,
|
| 48 |
+
pydantic/_internal/__pycache__/_decorators.cpython-312.pyc,,
|
| 49 |
+
pydantic/_internal/__pycache__/_decorators_v1.cpython-312.pyc,,
|
| 50 |
+
pydantic/_internal/__pycache__/_discriminated_union.cpython-312.pyc,,
|
| 51 |
+
pydantic/_internal/__pycache__/_docs_extraction.cpython-312.pyc,,
|
| 52 |
+
pydantic/_internal/__pycache__/_fields.cpython-312.pyc,,
|
| 53 |
+
pydantic/_internal/__pycache__/_forward_ref.cpython-312.pyc,,
|
| 54 |
+
pydantic/_internal/__pycache__/_generate_schema.cpython-312.pyc,,
|
| 55 |
+
pydantic/_internal/__pycache__/_generics.cpython-312.pyc,,
|
| 56 |
+
pydantic/_internal/__pycache__/_git.cpython-312.pyc,,
|
| 57 |
+
pydantic/_internal/__pycache__/_import_utils.cpython-312.pyc,,
|
| 58 |
+
pydantic/_internal/__pycache__/_internal_dataclass.cpython-312.pyc,,
|
| 59 |
+
pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-312.pyc,,
|
| 60 |
+
pydantic/_internal/__pycache__/_mock_val_ser.cpython-312.pyc,,
|
| 61 |
+
pydantic/_internal/__pycache__/_model_construction.cpython-312.pyc,,
|
| 62 |
+
pydantic/_internal/__pycache__/_namespace_utils.cpython-312.pyc,,
|
| 63 |
+
pydantic/_internal/__pycache__/_repr.cpython-312.pyc,,
|
| 64 |
+
pydantic/_internal/__pycache__/_schema_gather.cpython-312.pyc,,
|
| 65 |
+
pydantic/_internal/__pycache__/_schema_generation_shared.cpython-312.pyc,,
|
| 66 |
+
pydantic/_internal/__pycache__/_serializers.cpython-312.pyc,,
|
| 67 |
+
pydantic/_internal/__pycache__/_signature.cpython-312.pyc,,
|
| 68 |
+
pydantic/_internal/__pycache__/_typing_extra.cpython-312.pyc,,
|
| 69 |
+
pydantic/_internal/__pycache__/_utils.cpython-312.pyc,,
|
| 70 |
+
pydantic/_internal/__pycache__/_validate_call.cpython-312.pyc,,
|
| 71 |
+
pydantic/_internal/__pycache__/_validators.cpython-312.pyc,,
|
| 72 |
+
pydantic/_internal/_config.py,sha256=Rzys1Joffn4JczElcYDqsZLRgBgHn2lYWqDR55oASPA,14839
|
| 73 |
+
pydantic/_internal/_core_metadata.py,sha256=Y_g2t3i7uluK-wXCZvzJfRFMPUM23aBYLfae4FzBPy0,5162
|
| 74 |
+
pydantic/_internal/_core_utils.py,sha256=1jru4VbJ0x63R6dtVcuOI-dKQTC_d_lSnJWEBQzGNEQ,6487
|
| 75 |
+
pydantic/_internal/_dataclasses.py,sha256=Zgqcm1WaJLBwTQQC5mGKNowjlTgX3mfX_J5e2vd24lM,13188
|
| 76 |
+
pydantic/_internal/_decorators.py,sha256=RDEG_Jau5NiJcfO0xgdT7EOgsU1LgWIYlX7wN5rYtVs,33620
|
| 77 |
+
pydantic/_internal/_decorators_v1.py,sha256=tfdfdpQKY4R2XCOwqHbZeoQMur6VNigRrfhudXBHx38,6185
|
| 78 |
+
pydantic/_internal/_discriminated_union.py,sha256=JLx_MVLep7Mxl1zbpdNZjvHDcz-J3OEW6WcdV184dcM,26255
|
| 79 |
+
pydantic/_internal/_docs_extraction.py,sha256=fyznSAHh5AzohnXZStV0HvH-nRbavNHPyg-knx-S_EE,4127
|
| 80 |
+
pydantic/_internal/_fields.py,sha256=hXeb-zodGwTDvG9OK1um18P64b7cqHb0GOngP4jrgcY,31557
|
| 81 |
+
pydantic/_internal/_forward_ref.py,sha256=5n3Y7-3AKLn8_FS3Yc7KutLiPUhyXmAtkEZOaFnonwM,611
|
| 82 |
+
pydantic/_internal/_generate_schema.py,sha256=PjogUawIXmf8LuLNe9seJph7WLW4MJ7-GBsXwCsQC9Q,136348
|
| 83 |
+
pydantic/_internal/_generics.py,sha256=CXjcInlvci8VejaWn1f39kv0AcfJL0R523qRLFZVD-s,23393
|
| 84 |
+
pydantic/_internal/_git.py,sha256=IwPh3DPfa2Xq3rBuB9Nx8luR2A1i69QdeTfWWXIuCVg,809
|
| 85 |
+
pydantic/_internal/_import_utils.py,sha256=TRhxD5OuY6CUosioBdBcJUs0om7IIONiZdYAV7zQ8jM,402
|
| 86 |
+
pydantic/_internal/_internal_dataclass.py,sha256=_bedc1XbuuygRGiLZqkUkwwFpQaoR1hKLlR501nyySY,144
|
| 87 |
+
pydantic/_internal/_known_annotated_metadata.py,sha256=PynQIFQ61__4Gcrzn0D5ENllg7jPq_cxoLTmuFQBY88,16805
|
| 88 |
+
pydantic/_internal/_mock_val_ser.py,sha256=wmRRFSBvqfcLbI41PsFliB4u2AZ3mJpZeiERbD3xKTo,8885
|
| 89 |
+
pydantic/_internal/_model_construction.py,sha256=JoKmY4JrDBu3nG_tCIrJgtJJE1uq6v29TvTz5ElHE5g,38928
|
| 90 |
+
pydantic/_internal/_namespace_utils.py,sha256=hl3-TRAr82U2jTyPP3t-QqsvKLirxtkLfNfrN-fp0x8,12878
|
| 91 |
+
pydantic/_internal/_repr.py,sha256=jQfnJuyDxQpSRNhG29II9PX8e4Nv2qWZrEw2lqih3UE,5172
|
| 92 |
+
pydantic/_internal/_schema_gather.py,sha256=8nJ-uM6Y4z6xpasnGonEMubtNVX_mxeeRDFmd_qMVLA,9052
|
| 93 |
+
pydantic/_internal/_schema_generation_shared.py,sha256=F_rbQbrkoomgxsskdHpP0jUJ7TCfe0BADAEkq6CJ4nM,4842
|
| 94 |
+
pydantic/_internal/_serializers.py,sha256=YIWvSmAR5fnbGSWCOQduWt1yB4ZQY42eAruc-enrb6c,1491
|
| 95 |
+
pydantic/_internal/_signature.py,sha256=i_b6wtluiVWZRh1ZY8UvB2UZziP1KjqSXZgC-HxwOT0,6808
|
| 96 |
+
pydantic/_internal/_typing_extra.py,sha256=dDxqF46lzuqCoKLrAH_k95EDbayEeKb2lHjuTJ5OBoY,31574
|
| 97 |
+
pydantic/_internal/_utils.py,sha256=gN48BsR-FDrJDibCmo69ttQg67WbuFrdy_1NQL3cvLI,15959
|
| 98 |
+
pydantic/_internal/_validate_call.py,sha256=OD_BspHaL9FKzZ9XrndhiEuMnjF3SRIJUHtwv6yUffU,5366
|
| 99 |
+
pydantic/_internal/_validators.py,sha256=7GTjXXWFMLib4dxQ-HeaiHlAZiR2B2G8byCYMGrmQ48,20563
|
| 100 |
+
pydantic/_migration.py,sha256=VF73LRCUz3Irb5xVt13jb3NAcXVnEF6T1-J0OLfeZ5A,12160
|
| 101 |
+
pydantic/alias_generators.py,sha256=KM1n3u4JfLSBl1UuYg3hoYHzXJD-yvgrnq8u1ccwh_A,2124
|
| 102 |
+
pydantic/aliases.py,sha256=vhCHyoSWnX-EJ-wWb5qj4xyRssgGWnTQfzQp4GSZ9ug,4937
|
| 103 |
+
pydantic/annotated_handlers.py,sha256=WfyFSqwoEIFXBh7T73PycKloI1DiX45GWi0-JOsCR4Y,4407
|
| 104 |
+
pydantic/class_validators.py,sha256=i_V3j-PYdGLSLmj_IJZekTRjunO8SIVz8LMlquPyP7E,148
|
| 105 |
+
pydantic/color.py,sha256=AzqGfVQHF92_ZctDcue0DM4yTp2P6tekkwRINTWrLIo,21481
|
| 106 |
+
pydantic/config.py,sha256=o1P67FMWIQG-_RfGtKislkHwo4pXm_6jfsR41P88v78,44533
|
| 107 |
+
pydantic/dataclasses.py,sha256=4X9We0jj1KLwBtvYSkAFXNon46zrpBmRZANf4LfwbXg,18963
|
| 108 |
+
pydantic/datetime_parse.py,sha256=QC-WgMxMr_wQ_mNXUS7AVf-2hLEhvvsPY1PQyhSGOdk,150
|
| 109 |
+
pydantic/decorator.py,sha256=YX-jUApu5AKaVWKPoaV-n-4l7UbS69GEt9Ra3hszmKI,145
|
| 110 |
+
pydantic/deprecated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 111 |
+
pydantic/deprecated/__pycache__/__init__.cpython-312.pyc,,
|
| 112 |
+
pydantic/deprecated/__pycache__/class_validators.cpython-312.pyc,,
|
| 113 |
+
pydantic/deprecated/__pycache__/config.cpython-312.pyc,,
|
| 114 |
+
pydantic/deprecated/__pycache__/copy_internals.cpython-312.pyc,,
|
| 115 |
+
pydantic/deprecated/__pycache__/decorator.cpython-312.pyc,,
|
| 116 |
+
pydantic/deprecated/__pycache__/json.cpython-312.pyc,,
|
| 117 |
+
pydantic/deprecated/__pycache__/parse.cpython-312.pyc,,
|
| 118 |
+
pydantic/deprecated/__pycache__/tools.cpython-312.pyc,,
|
| 119 |
+
pydantic/deprecated/class_validators.py,sha256=n0jYQOcb5YQiw0b7YXyi7NPiYdV7ujWR4KyZumjTPok,10281
|
| 120 |
+
pydantic/deprecated/config.py,sha256=k_lsVk57paxLJOcBueH07cu1OgEgWdVBxm6lfaC3CCU,2663
|
| 121 |
+
pydantic/deprecated/copy_internals.py,sha256=Ghd-vkMd5EYCCgyCGtPKO58np9cEKBQC6qkBeIEFI2g,7618
|
| 122 |
+
pydantic/deprecated/decorator.py,sha256=TBm6bJ7wJsNih_8Wq5IzDcwP32m9_vfxs96desLuk00,10845
|
| 123 |
+
pydantic/deprecated/json.py,sha256=HlWCG35RRrxyzuTS6LTQiZBwRhmDZWmeqQH8rLW6wA8,4657
|
| 124 |
+
pydantic/deprecated/parse.py,sha256=Gzd6b_g8zJXcuE7QRq5adhx_EMJahXfcpXCF0RgrqqI,2511
|
| 125 |
+
pydantic/deprecated/tools.py,sha256=Nrm9oFRZWp8-jlfvPgJILEsywp4YzZD52XIGPDLxHcI,3330
|
| 126 |
+
pydantic/env_settings.py,sha256=6IHeeWEqlUPRUv3V-AXiF_W91fg2Jw_M3O0l34J_eyA,148
|
| 127 |
+
pydantic/error_wrappers.py,sha256=RK6mqATc9yMD-KBD9IJS9HpKCprWHd8wo84Bnm-3fR8,150
|
| 128 |
+
pydantic/errors.py,sha256=DrECPCWhSYrQ8Ba4O8hKzIAM2i9GBHTXWALzaFDpLf4,6013
|
| 129 |
+
pydantic/experimental/__init__.py,sha256=QT7rKYdDsCiTJ9GEjmsQdWHScwpKrrNkGq6vqONP6RQ,104
|
| 130 |
+
pydantic/experimental/__pycache__/__init__.cpython-312.pyc,,
|
| 131 |
+
pydantic/experimental/__pycache__/arguments_schema.cpython-312.pyc,,
|
| 132 |
+
pydantic/experimental/__pycache__/missing_sentinel.cpython-312.pyc,,
|
| 133 |
+
pydantic/experimental/__pycache__/pipeline.cpython-312.pyc,,
|
| 134 |
+
pydantic/experimental/arguments_schema.py,sha256=EFnjX_ulp-tPyUjQX5pmQtug1OFL_Acc8bcMbLd-fVY,1866
|
| 135 |
+
pydantic/experimental/missing_sentinel.py,sha256=hQejgtF00wUuQMni9429evg-eXyIwpKvjsD8ofqfj-w,127
|
| 136 |
+
pydantic/experimental/pipeline.py,sha256=auoW6l6g1FC41LciPmsI1M6ncf00Szde9B3C-yvb9mI,23956
|
| 137 |
+
pydantic/fields.py,sha256=a8ZhJfI8FD6TQDD7-cWMm1ZXlQva0wMdT2Ey5VvVe-M,82023
|
| 138 |
+
pydantic/functional_serializers.py,sha256=zwRAjZusORtEbtxpVU20kg8FpemyZz4Fq6wJk1mpwYQ,18117
|
| 139 |
+
pydantic/functional_validators.py,sha256=7p-4jvP__9jZyvcdXiide5pAa_JlM7fJ1BgkqbjxSWM,31724
|
| 140 |
+
pydantic/generics.py,sha256=0ZqZ9O9annIj_3mGBRqps4htey3b5lV1-d2tUxPMMnA,144
|
| 141 |
+
pydantic/json.py,sha256=ZH8RkI7h4Bz-zp8OdTAxbJUoVvcoU-jhMdRZ0B-k0xc,140
|
| 142 |
+
pydantic/json_schema.py,sha256=da3hQ9vQPLEhPsrDnUlijfQ1fuCCViGVnUzOrAZLgDs,125955
|
| 143 |
+
pydantic/main.py,sha256=NbhCz-ku8wDgYLQMBi75Ov7hywdc8LewA3oUNIZ8JVQ,85334
|
| 144 |
+
pydantic/mypy.py,sha256=sYmmZrL_GvoYSLcBuPRRDfSfgQWehkzw_ZvNkwDY2ME,60971
|
| 145 |
+
pydantic/networks.py,sha256=gCB96gt0G7tiVDhVnJfpKr1ARL5qkH-SPZkuNkmG2O4,42102
|
| 146 |
+
pydantic/parse.py,sha256=wkd82dgtvWtD895U_I6E1htqMlGhBSYEV39cuBSeo3A,141
|
| 147 |
+
pydantic/plugin/__init__.py,sha256=a7Tw366U6K3kltCCNZY76nc9ss-7uGGQ40TXad9OypQ,7333
|
| 148 |
+
pydantic/plugin/__pycache__/__init__.cpython-312.pyc,,
|
| 149 |
+
pydantic/plugin/__pycache__/_loader.cpython-312.pyc,,
|
| 150 |
+
pydantic/plugin/__pycache__/_schema_validator.cpython-312.pyc,,
|
| 151 |
+
pydantic/plugin/_loader.py,sha256=hAjgSljoIhGx3AVpIpuqw5SPttBNNeGBSTrqSMnNiJk,2213
|
| 152 |
+
pydantic/plugin/_schema_validator.py,sha256=5M5Ic1bZnjhNDxtRDVKbRPTQ6po6QuKMY7MguMkHeW0,5445
|
| 153 |
+
pydantic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 154 |
+
pydantic/root_model.py,sha256=cOMoeWdp536KF85uRIcW-oz1O7d5GRCpx9HTS4-1uf8,6394
|
| 155 |
+
pydantic/schema.py,sha256=Vqqjvq_LnapVknebUd3Bp_J1p2gXZZnZRgL48bVEG7o,142
|
| 156 |
+
pydantic/tools.py,sha256=iHQpd8SJ5DCTtPV5atAV06T89bjSaMFeZZ2LX9lasZY,141
|
| 157 |
+
pydantic/type_adapter.py,sha256=T05g8WQczBsVU_35RdKxgjIi7Y7LIip67pF3NI7X4GE,36123
|
| 158 |
+
pydantic/types.py,sha256=fpYcGnAncK4QjaFm3jZtqYwxJYzB7rJJPfAl89emzyQ,105961
|
| 159 |
+
pydantic/typing.py,sha256=P7feA35MwTcLsR1uL7db0S-oydBxobmXa55YDoBgajQ,138
|
| 160 |
+
pydantic/utils.py,sha256=15nR2QpqTBFlQV4TNtTItMyTJx_fbyV-gPmIEY1Gooc,141
|
| 161 |
+
pydantic/v1/__init__.py,sha256=SxQPklgBs4XHJwE6BZ9qoewYoGiNyYUnmHzEFCZbfnI,2946
|
| 162 |
+
pydantic/v1/__pycache__/__init__.cpython-312.pyc,,
|
| 163 |
+
pydantic/v1/__pycache__/_hypothesis_plugin.cpython-312.pyc,,
|
| 164 |
+
pydantic/v1/__pycache__/annotated_types.cpython-312.pyc,,
|
| 165 |
+
pydantic/v1/__pycache__/class_validators.cpython-312.pyc,,
|
| 166 |
+
pydantic/v1/__pycache__/color.cpython-312.pyc,,
|
| 167 |
+
pydantic/v1/__pycache__/config.cpython-312.pyc,,
|
| 168 |
+
pydantic/v1/__pycache__/dataclasses.cpython-312.pyc,,
|
| 169 |
+
pydantic/v1/__pycache__/datetime_parse.cpython-312.pyc,,
|
| 170 |
+
pydantic/v1/__pycache__/decorator.cpython-312.pyc,,
|
| 171 |
+
pydantic/v1/__pycache__/env_settings.cpython-312.pyc,,
|
| 172 |
+
pydantic/v1/__pycache__/error_wrappers.cpython-312.pyc,,
|
| 173 |
+
pydantic/v1/__pycache__/errors.cpython-312.pyc,,
|
| 174 |
+
pydantic/v1/__pycache__/fields.cpython-312.pyc,,
|
| 175 |
+
pydantic/v1/__pycache__/generics.cpython-312.pyc,,
|
| 176 |
+
pydantic/v1/__pycache__/json.cpython-312.pyc,,
|
| 177 |
+
pydantic/v1/__pycache__/main.cpython-312.pyc,,
|
| 178 |
+
pydantic/v1/__pycache__/mypy.cpython-312.pyc,,
|
| 179 |
+
pydantic/v1/__pycache__/networks.cpython-312.pyc,,
|
| 180 |
+
pydantic/v1/__pycache__/parse.cpython-312.pyc,,
|
| 181 |
+
pydantic/v1/__pycache__/schema.cpython-312.pyc,,
|
| 182 |
+
pydantic/v1/__pycache__/tools.cpython-312.pyc,,
|
| 183 |
+
pydantic/v1/__pycache__/types.cpython-312.pyc,,
|
| 184 |
+
pydantic/v1/__pycache__/typing.cpython-312.pyc,,
|
| 185 |
+
pydantic/v1/__pycache__/utils.cpython-312.pyc,,
|
| 186 |
+
pydantic/v1/__pycache__/validators.cpython-312.pyc,,
|
| 187 |
+
pydantic/v1/__pycache__/version.cpython-312.pyc,,
|
| 188 |
+
pydantic/v1/_hypothesis_plugin.py,sha256=5ES5xWuw1FQAsymLezy8QgnVz0ZpVfU3jkmT74H27VQ,14847
|
| 189 |
+
pydantic/v1/annotated_types.py,sha256=uk2NAAxqiNELKjiHhyhxKaIOh8F1lYW_LzrW3X7oZBc,3157
|
| 190 |
+
pydantic/v1/class_validators.py,sha256=ULOaIUgYUDBsHL7EEVEarcM-UubKUggoN8hSbDonsFE,14672
|
| 191 |
+
pydantic/v1/color.py,sha256=iZABLYp6OVoo2AFkP9Ipri_wSc6-Kklu8YuhSartd5g,16844
|
| 192 |
+
pydantic/v1/config.py,sha256=a6P0Wer9x4cbwKW7Xv8poSUqM4WP-RLWwX6YMpYq9AA,6532
|
| 193 |
+
pydantic/v1/dataclasses.py,sha256=784cqvInbwIPWr9usfpX3ch7z4t3J2tTK6N067_wk1o,18172
|
| 194 |
+
pydantic/v1/datetime_parse.py,sha256=4Qy1kQpq3rNVZJeIHeSPDpuS2Bvhp1KPtzJG1xu-H00,7724
|
| 195 |
+
pydantic/v1/decorator.py,sha256=zaaxxxoWPCm818D1bs0yhapRjXm32V8G0ZHWCdM1uXA,10339
|
| 196 |
+
pydantic/v1/env_settings.py,sha256=A9VXwtRl02AY-jH0C0ouy5VNw3fi6F_pkzuHDjgAAOM,14105
|
| 197 |
+
pydantic/v1/error_wrappers.py,sha256=6625Mfw9qkC2NwitB_JFAWe8B-Xv6zBU7rL9k28tfyo,5196
|
| 198 |
+
pydantic/v1/errors.py,sha256=mIwPED5vGM5Q5v4C4Z1JPldTRH-omvEylH6ksMhOmPw,17726
|
| 199 |
+
pydantic/v1/fields.py,sha256=VqWJCriUNiEyptXroDVJ501JpVA0en2VANcksqXL2b8,50649
|
| 200 |
+
pydantic/v1/generics.py,sha256=YzyKTZN6x5Q1RGJ3WQ9jN-uwHJxL3W4qoZqwcZXqxWg,17829
|
| 201 |
+
pydantic/v1/json.py,sha256=WQ5Hy_hIpfdR3YS8k6N2E6KMJzsdbBi_ldWOPJaV81M,3390
|
| 202 |
+
pydantic/v1/main.py,sha256=vRB1TbpkzPN3P5ijJlc-cjNuO-HciNOpC4b8K3zZnfc,45697
|
| 203 |
+
pydantic/v1/mypy.py,sha256=Cl8XRfCmIcVE3j5AEU52C8iDh8lcX__D3hz2jIWxMAs,38860
|
| 204 |
+
pydantic/v1/networks.py,sha256=HYNtKAfOmOnKJpsDg1g6SIkj9WPhU_-i8l5e2JKBpG4,22124
|
| 205 |
+
pydantic/v1/parse.py,sha256=BJtdqiZRtav9VRFCmOxoY-KImQmjPy-A_NoojiFUZxY,1821
|
| 206 |
+
pydantic/v1/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 207 |
+
pydantic/v1/schema.py,sha256=aqBuA--cq8gAVkim5BJPFASHzOZ8dFtmFX_fNGr6ip4,47801
|
| 208 |
+
pydantic/v1/tools.py,sha256=1lDdXHk0jL5uP3u5RCYAvUAlGClgAO-45lkq9j7fyBA,2881
|
| 209 |
+
pydantic/v1/types.py,sha256=Bzl-RcnitPBHnqwwj9iv7JjHuN1GpnWH24dKkF3l9e8,35455
|
| 210 |
+
pydantic/v1/typing.py,sha256=ovwtLpEZCbnghZaHfSNJupzetzHNkLXjn_66kgTnIV4,20102
|
| 211 |
+
pydantic/v1/utils.py,sha256=1PqOIlz6OVWwGds3HBKlw4Et6asFou0UUpAto7jFOCs,26014
|
| 212 |
+
pydantic/v1/validators.py,sha256=lyUkn1MWhHxlCX5ZfEgFj_CAHojoiPcaQeMdEM9XviU,22187
|
| 213 |
+
pydantic/v1/version.py,sha256=YpHWOQKtGoxfyikzGrcmXJVKUVYB9EBdoCR994QxSnE,1039
|
| 214 |
+
pydantic/validate_call_decorator.py,sha256=VLAi4hoFpjC-1eL0HixYaaWaEFO6htGcDctgsqa5VII,4416
|
| 215 |
+
pydantic/validators.py,sha256=pwbIJXVb1CV2mAE4w_EGfNj7DwzsKaWw_tTL6cviTus,146
|
| 216 |
+
pydantic/version.py,sha256=T5rziwDPrMjfaU0X--5fBdGVvh94QznNQ7QjD4LfgmA,3985
|
| 217 |
+
pydantic/warnings.py,sha256=3QyQo6lN35cO7OXDbFEXWcNaPlGCRaregVZA-G-lZwI,4822
|
venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.29.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/METADATA
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: pydantic_core
|
| 3 |
+
Version: 2.46.4
|
| 4 |
+
Classifier: Development Status :: 3 - Alpha
|
| 5 |
+
Classifier: Programming Language :: Python
|
| 6 |
+
Classifier: Programming Language :: Python :: 3
|
| 7 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 8 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 9 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 10 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 11 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 12 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 13 |
+
Classifier: Programming Language :: Python :: 3.14
|
| 14 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 15 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 16 |
+
Classifier: Programming Language :: Python :: Implementation :: GraalPy
|
| 17 |
+
Classifier: Programming Language :: Rust
|
| 18 |
+
Classifier: Framework :: Pydantic
|
| 19 |
+
Classifier: Intended Audience :: Developers
|
| 20 |
+
Classifier: Intended Audience :: Information Technology
|
| 21 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 22 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 23 |
+
Classifier: Operating System :: MacOS
|
| 24 |
+
Classifier: Typing :: Typed
|
| 25 |
+
Requires-Dist: typing-extensions>=4.14.1
|
| 26 |
+
License-File: LICENSE
|
| 27 |
+
Summary: Core functionality for Pydantic validation and serialization
|
| 28 |
+
Home-Page: https://github.com/pydantic/pydantic
|
| 29 |
+
Author-email: Samuel Colvin <s@muelcolvin.com>, Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, David Montague <david@pydantic.dev>, David Hewitt <mail@davidhewitt.dev>, Sydney Runkle <sydneymarierunkle@gmail.com>, Victorien Plot <contact@vctrn.dev>
|
| 30 |
+
License-Expression: MIT
|
| 31 |
+
Requires-Python: >=3.9
|
| 32 |
+
Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
|
| 33 |
+
Project-URL: Funding, https://github.com/sponsors/samuelcolvin
|
| 34 |
+
Project-URL: Homepage, https://github.com/pydantic
|
| 35 |
+
Project-URL: Source, https://github.com/pydantic/pydantic/tree/main/pydantic-core
|
| 36 |
+
|
| 37 |
+
# pydantic-core
|
| 38 |
+
|
| 39 |
+
[](https://github.com/pydantic/pydantic-core/actions?query=event%3Apush+branch%3Amain+workflow%3Aci)
|
| 40 |
+
[](https://codecov.io/gh/pydantic/pydantic-core)
|
| 41 |
+
[](https://pypi.python.org/pypi/pydantic-core)
|
| 42 |
+
[](https://github.com/pydantic/pydantic-core)
|
| 43 |
+
[](https://github.com/pydantic/pydantic-core/blob/main/LICENSE)
|
| 44 |
+
|
| 45 |
+
This package provides the core functionality for [pydantic](https://docs.pydantic.dev) validation and serialization.
|
| 46 |
+
|
| 47 |
+
Pydantic-core is currently around 17x faster than pydantic V1.
|
| 48 |
+
See [`tests/benchmarks/`](./tests/benchmarks/) for details.
|
| 49 |
+
|
| 50 |
+
## Example of direct usage
|
| 51 |
+
|
| 52 |
+
*NOTE: You should not need to use pydantic-core directly; instead, use pydantic, which in turn uses pydantic-core.*
|
| 53 |
+
|
| 54 |
+
```py
|
| 55 |
+
from pydantic_core import SchemaValidator, ValidationError
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
v = SchemaValidator(
|
| 59 |
+
{
|
| 60 |
+
'type': 'typed-dict',
|
| 61 |
+
'fields': {
|
| 62 |
+
'name': {
|
| 63 |
+
'type': 'typed-dict-field',
|
| 64 |
+
'schema': {
|
| 65 |
+
'type': 'str',
|
| 66 |
+
},
|
| 67 |
+
},
|
| 68 |
+
'age': {
|
| 69 |
+
'type': 'typed-dict-field',
|
| 70 |
+
'schema': {
|
| 71 |
+
'type': 'int',
|
| 72 |
+
'ge': 18,
|
| 73 |
+
},
|
| 74 |
+
},
|
| 75 |
+
'is_developer': {
|
| 76 |
+
'type': 'typed-dict-field',
|
| 77 |
+
'schema': {
|
| 78 |
+
'type': 'default',
|
| 79 |
+
'schema': {'type': 'bool'},
|
| 80 |
+
'default': True,
|
| 81 |
+
},
|
| 82 |
+
},
|
| 83 |
+
},
|
| 84 |
+
}
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
r1 = v.validate_python({'name': 'Samuel', 'age': 35})
|
| 88 |
+
assert r1 == {'name': 'Samuel', 'age': 35, 'is_developer': True}
|
| 89 |
+
|
| 90 |
+
# pydantic-core can also validate JSON directly
|
| 91 |
+
r2 = v.validate_json('{"name": "Samuel", "age": 35}')
|
| 92 |
+
assert r1 == r2
|
| 93 |
+
|
| 94 |
+
try:
|
| 95 |
+
v.validate_python({'name': 'Samuel', 'age': 11})
|
| 96 |
+
except ValidationError as e:
|
| 97 |
+
print(e)
|
| 98 |
+
"""
|
| 99 |
+
1 validation error for model
|
| 100 |
+
age
|
| 101 |
+
Input should be greater than or equal to 18
|
| 102 |
+
[type=greater_than_equal, context={ge: 18}, input_value=11, input_type=int]
|
| 103 |
+
"""
|
| 104 |
+
```
|
| 105 |
+
|
| 106 |
+
## Getting Started
|
| 107 |
+
|
| 108 |
+
### Prerequisites
|
| 109 |
+
|
| 110 |
+
You'll need:
|
| 111 |
+
|
| 112 |
+
1. **[Rust](https://rustup.rs/)** - Rust stable (or nightly for coverage)
|
| 113 |
+
2. **[uv](https://docs.astral.sh/uv/getting-started/installation/)** - Fast Python package manager (will install Python 3.9+ automatically)
|
| 114 |
+
3. **[git](https://git-scm.com/)** - For version control
|
| 115 |
+
4. **[make](https://www.gnu.org/software/make/)** - For running development commands (or use `nmake` on Windows)
|
| 116 |
+
|
| 117 |
+
### Quick Start
|
| 118 |
+
|
| 119 |
+
```bash
|
| 120 |
+
# Clone the repository (or from your fork)
|
| 121 |
+
git clone git@github.com:pydantic/pydantic-core.git
|
| 122 |
+
cd pydantic-core
|
| 123 |
+
|
| 124 |
+
# Install all dependencies using uv, setup pre-commit hooks, and build the development version
|
| 125 |
+
make install
|
| 126 |
+
```
|
| 127 |
+
|
| 128 |
+
Verify your installation by running:
|
| 129 |
+
|
| 130 |
+
```bash
|
| 131 |
+
make
|
| 132 |
+
```
|
| 133 |
+
|
| 134 |
+
This runs a full development cycle: formatting, building, linting, and testing
|
| 135 |
+
|
| 136 |
+
### Development Commands
|
| 137 |
+
|
| 138 |
+
Run `make help` to see all available commands, or use these common ones:
|
| 139 |
+
|
| 140 |
+
```bash
|
| 141 |
+
make build-dev # to build the package during development
|
| 142 |
+
make build-prod # to perform an optimised build for benchmarking
|
| 143 |
+
make test # to run the tests
|
| 144 |
+
make testcov # to run the tests and generate a coverage report
|
| 145 |
+
make lint # to run the linter
|
| 146 |
+
make format # to format python and rust code
|
| 147 |
+
make all # to run to run build-dev + format + lint + test
|
| 148 |
+
```
|
| 149 |
+
|
| 150 |
+
### Useful Resources
|
| 151 |
+
|
| 152 |
+
* [`python/pydantic_core/_pydantic_core.pyi`](./python/pydantic_core/_pydantic_core.pyi) - Python API types
|
| 153 |
+
* [`python/pydantic_core/core_schema.py`](./python/pydantic_core/core_schema.py) - Core schema definitions
|
| 154 |
+
* [`tests/`](./tests) - Comprehensive usage examples
|
| 155 |
+
|
| 156 |
+
## Profiling
|
| 157 |
+
|
| 158 |
+
It's possible to profile the code using the [`flamegraph` utility from `flamegraph-rs`](https://github.com/flamegraph-rs/flamegraph). (Tested on Linux.) You can install this with `cargo install flamegraph`.
|
| 159 |
+
|
| 160 |
+
Run `make build-profiling` to install a release build with debugging symbols included (needed for profiling).
|
| 161 |
+
|
| 162 |
+
Once that is built, you can profile pytest benchmarks with (e.g.):
|
| 163 |
+
|
| 164 |
+
```bash
|
| 165 |
+
flamegraph -- pytest tests/benchmarks/test_micro_benchmarks.py -k test_list_of_ints_core_py --benchmark-enable
|
| 166 |
+
```
|
| 167 |
+
|
| 168 |
+
The `flamegraph` command will produce an interactive SVG at `flamegraph.svg`.
|
| 169 |
+
|
| 170 |
+
## Releasing
|
| 171 |
+
|
| 172 |
+
TBC (needs to be integrated into `pydantic` repository release process).
|
| 173 |
+
|
venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/RECORD
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pydantic_core-2.46.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
pydantic_core-2.46.4.dist-info/METADATA,sha256=E1-osviUc-kZHHB4whZOPsbIknphwjecL93MB_EfO64,6573
|
| 3 |
+
pydantic_core-2.46.4.dist-info/RECORD,,
|
| 4 |
+
pydantic_core-2.46.4.dist-info/WHEEL,sha256=F6wS_EyyTApAlVDO0HV37reS5C0BQB3Ecd85S9pKlHg,147
|
| 5 |
+
pydantic_core-2.46.4.dist-info/licenses/LICENSE,sha256=Kv3TDVS01itvSIprzBVG6E7FBh8T9CCcA9ASNIeDeVo,1080
|
| 6 |
+
pydantic_core-2.46.4.dist-info/sboms/pydantic-core.cyclonedx.json,sha256=NGcCBzR-kL8sAQzWHddfazQNFGtlI8BXw9W64--sX84,125376
|
| 7 |
+
pydantic_core/__init__.py,sha256=nK1ikrdSVK9gapcKrpv_blrp8LCAic1jrK-jkbYHlNI,5115
|
| 8 |
+
pydantic_core/__pycache__/__init__.cpython-312.pyc,,
|
| 9 |
+
pydantic_core/__pycache__/core_schema.cpython-312.pyc,,
|
| 10 |
+
pydantic_core/_pydantic_core.cpython-312-x86_64-linux-gnu.so,sha256=ezsh6Ob8ci-mkTqJPR-CloVCE1sBGfg8fO3jX8ccSi8,4752552
|
| 11 |
+
pydantic_core/_pydantic_core.pyi,sha256=H_lG8iKiv3Bpfd3rixY7xzs67gE-s0eiAqJaSbnF8Ss,45932
|
| 12 |
+
pydantic_core/core_schema.py,sha256=1PM31yf_kGpm_H7GLnNFRkIPAQGPL9zh6rf2SxohVxg,155574
|
| 13 |
+
pydantic_core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: maturin (1.13.1)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp312-cp312-manylinux_2_17_x86_64
|
| 5 |
+
Tag: cp312-cp312-manylinux2014_x86_64
|
venv/lib/python3.12/site-packages/pydantic_core/__init__.py
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys as _sys
|
| 4 |
+
from typing import Any as _Any
|
| 5 |
+
|
| 6 |
+
from typing_extensions import Sentinel
|
| 7 |
+
|
| 8 |
+
from ._pydantic_core import (
|
| 9 |
+
ArgsKwargs,
|
| 10 |
+
MultiHostUrl,
|
| 11 |
+
PydanticCustomError,
|
| 12 |
+
PydanticKnownError,
|
| 13 |
+
PydanticOmit,
|
| 14 |
+
PydanticSerializationError,
|
| 15 |
+
PydanticSerializationUnexpectedValue,
|
| 16 |
+
PydanticUndefined,
|
| 17 |
+
PydanticUndefinedType,
|
| 18 |
+
PydanticUseDefault,
|
| 19 |
+
SchemaError,
|
| 20 |
+
SchemaSerializer,
|
| 21 |
+
SchemaValidator,
|
| 22 |
+
Some,
|
| 23 |
+
TzInfo,
|
| 24 |
+
Url,
|
| 25 |
+
ValidationError,
|
| 26 |
+
__version__,
|
| 27 |
+
from_json,
|
| 28 |
+
to_json,
|
| 29 |
+
to_jsonable_python,
|
| 30 |
+
)
|
| 31 |
+
from .core_schema import CoreConfig, CoreSchema, CoreSchemaType, ErrorType
|
| 32 |
+
|
| 33 |
+
if _sys.version_info < (3, 11):
|
| 34 |
+
from typing_extensions import NotRequired as _NotRequired
|
| 35 |
+
else:
|
| 36 |
+
from typing import NotRequired as _NotRequired
|
| 37 |
+
|
| 38 |
+
if _sys.version_info < (3, 12):
|
| 39 |
+
from typing_extensions import TypedDict as _TypedDict
|
| 40 |
+
else:
|
| 41 |
+
from typing import TypedDict as _TypedDict
|
| 42 |
+
|
| 43 |
+
__all__ = [
|
| 44 |
+
'__version__',
|
| 45 |
+
'UNSET',
|
| 46 |
+
'CoreConfig',
|
| 47 |
+
'CoreSchema',
|
| 48 |
+
'CoreSchemaType',
|
| 49 |
+
'SchemaValidator',
|
| 50 |
+
'SchemaSerializer',
|
| 51 |
+
'Some',
|
| 52 |
+
'Url',
|
| 53 |
+
'MultiHostUrl',
|
| 54 |
+
'ArgsKwargs',
|
| 55 |
+
'PydanticUndefined',
|
| 56 |
+
'PydanticUndefinedType',
|
| 57 |
+
'SchemaError',
|
| 58 |
+
'ErrorDetails',
|
| 59 |
+
'InitErrorDetails',
|
| 60 |
+
'ValidationError',
|
| 61 |
+
'PydanticCustomError',
|
| 62 |
+
'PydanticKnownError',
|
| 63 |
+
'PydanticOmit',
|
| 64 |
+
'PydanticUseDefault',
|
| 65 |
+
'PydanticSerializationError',
|
| 66 |
+
'PydanticSerializationUnexpectedValue',
|
| 67 |
+
'TzInfo',
|
| 68 |
+
'to_json',
|
| 69 |
+
'from_json',
|
| 70 |
+
'to_jsonable_python',
|
| 71 |
+
]
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class ErrorDetails(_TypedDict):
|
| 75 |
+
type: str
|
| 76 |
+
"""
|
| 77 |
+
The type of error that occurred, this is an identifier designed for
|
| 78 |
+
programmatic use that will change rarely or never.
|
| 79 |
+
|
| 80 |
+
`type` is unique for each error message, and can hence be used as an identifier to build custom error messages.
|
| 81 |
+
"""
|
| 82 |
+
loc: tuple[int | str, ...]
|
| 83 |
+
"""Tuple of strings and ints identifying where in the schema the error occurred."""
|
| 84 |
+
msg: str
|
| 85 |
+
"""A human readable error message."""
|
| 86 |
+
input: _Any
|
| 87 |
+
"""The input data at this `loc` that caused the error."""
|
| 88 |
+
ctx: _NotRequired[dict[str, _Any]]
|
| 89 |
+
"""
|
| 90 |
+
Values which are required to render the error message, and could hence be useful in rendering custom error messages.
|
| 91 |
+
Also useful for passing custom error data forward.
|
| 92 |
+
"""
|
| 93 |
+
url: _NotRequired[str]
|
| 94 |
+
"""
|
| 95 |
+
The documentation URL giving information about the error. No URL is available if
|
| 96 |
+
a [`PydanticCustomError`][pydantic_core.PydanticCustomError] is used.
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class InitErrorDetails(_TypedDict):
|
| 101 |
+
type: str | PydanticCustomError
|
| 102 |
+
"""The type of error that occurred, this should be a "slug" identifier that changes rarely or never."""
|
| 103 |
+
loc: _NotRequired[tuple[int | str, ...]]
|
| 104 |
+
"""Tuple of strings and ints identifying where in the schema the error occurred."""
|
| 105 |
+
input: _Any
|
| 106 |
+
"""The input data at this `loc` that caused the error."""
|
| 107 |
+
ctx: _NotRequired[dict[str, _Any]]
|
| 108 |
+
"""
|
| 109 |
+
Values which are required to render the error message, and could hence be useful in rendering custom error messages.
|
| 110 |
+
Also useful for passing custom error data forward.
|
| 111 |
+
"""
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class ErrorTypeInfo(_TypedDict):
|
| 115 |
+
"""
|
| 116 |
+
Gives information about errors.
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
type: ErrorType
|
| 120 |
+
"""The type of error that occurred, this should be a "slug" identifier that changes rarely or never."""
|
| 121 |
+
message_template_python: str
|
| 122 |
+
"""String template to render a human readable error message from using context, when the input is Python."""
|
| 123 |
+
example_message_python: str
|
| 124 |
+
"""Example of a human readable error message, when the input is Python."""
|
| 125 |
+
message_template_json: _NotRequired[str]
|
| 126 |
+
"""String template to render a human readable error message from using context, when the input is JSON data."""
|
| 127 |
+
example_message_json: _NotRequired[str]
|
| 128 |
+
"""Example of a human readable error message, when the input is JSON data."""
|
| 129 |
+
example_context: dict[str, _Any] | None
|
| 130 |
+
"""Example of context values."""
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class MultiHostHost(_TypedDict):
|
| 134 |
+
"""
|
| 135 |
+
A host part of a multi-host URL.
|
| 136 |
+
"""
|
| 137 |
+
|
| 138 |
+
username: str | None
|
| 139 |
+
"""The username part of this host, or `None`."""
|
| 140 |
+
password: str | None
|
| 141 |
+
"""The password part of this host, or `None`."""
|
| 142 |
+
host: str | None
|
| 143 |
+
"""The host part of this host, or `None`."""
|
| 144 |
+
port: int | None
|
| 145 |
+
"""The port part of this host, or `None`."""
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
MISSING = Sentinel('MISSING')
|
| 149 |
+
"""A singleton indicating a field value was not provided during validation.
|
| 150 |
+
|
| 151 |
+
This singleton can be used a default value, as an alternative to `None` when it has
|
| 152 |
+
an explicit meaning. During serialization, any field with `MISSING` as a value is excluded
|
| 153 |
+
from the output.
|
| 154 |
+
|
| 155 |
+
Example:
|
| 156 |
+
```python
|
| 157 |
+
from pydantic import BaseModel
|
| 158 |
+
|
| 159 |
+
from pydantic_core import MISSING
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
class Configuration(BaseModel):
|
| 163 |
+
timeout: int | None | MISSING = MISSING
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
# configuration defaults, stored somewhere else:
|
| 167 |
+
defaults = {'timeout': 200}
|
| 168 |
+
|
| 169 |
+
conf = Configuration.model_validate({...})
|
| 170 |
+
timeout = conf.timeout if timeout.timeout is not MISSING else defaults['timeout']
|
| 171 |
+
"""
|
venv/lib/python3.12/site-packages/pydantic_core/_pydantic_core.pyi
ADDED
|
@@ -0,0 +1,1056 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import datetime
|
| 2 |
+
from collections.abc import Mapping
|
| 3 |
+
from typing import Any, Callable, Generic, Literal, TypeVar, final
|
| 4 |
+
|
| 5 |
+
from _typeshed import SupportsAllComparisons
|
| 6 |
+
from typing_extensions import LiteralString, Self, TypeAlias
|
| 7 |
+
|
| 8 |
+
from pydantic_core import ErrorDetails, ErrorTypeInfo, InitErrorDetails, MultiHostHost
|
| 9 |
+
from pydantic_core.core_schema import CoreConfig, CoreSchema, ErrorType, ExtraBehavior
|
| 10 |
+
|
| 11 |
+
__all__ = [
|
| 12 |
+
'__version__',
|
| 13 |
+
'build_profile',
|
| 14 |
+
'build_info',
|
| 15 |
+
'_recursion_limit',
|
| 16 |
+
'ArgsKwargs',
|
| 17 |
+
'SchemaValidator',
|
| 18 |
+
'SchemaSerializer',
|
| 19 |
+
'Url',
|
| 20 |
+
'MultiHostUrl',
|
| 21 |
+
'SchemaError',
|
| 22 |
+
'ValidationError',
|
| 23 |
+
'PydanticCustomError',
|
| 24 |
+
'PydanticKnownError',
|
| 25 |
+
'PydanticOmit',
|
| 26 |
+
'PydanticUseDefault',
|
| 27 |
+
'PydanticSerializationError',
|
| 28 |
+
'PydanticSerializationUnexpectedValue',
|
| 29 |
+
'PydanticUndefined',
|
| 30 |
+
'PydanticUndefinedType',
|
| 31 |
+
'Some',
|
| 32 |
+
'to_json',
|
| 33 |
+
'from_json',
|
| 34 |
+
'to_jsonable_python',
|
| 35 |
+
'list_all_errors',
|
| 36 |
+
'TzInfo',
|
| 37 |
+
]
|
| 38 |
+
__version__: str
|
| 39 |
+
build_profile: str
|
| 40 |
+
build_info: str
|
| 41 |
+
_recursion_limit: int
|
| 42 |
+
|
| 43 |
+
_T = TypeVar('_T', default=Any, covariant=True)
|
| 44 |
+
|
| 45 |
+
_StringInput: TypeAlias = 'dict[str, _StringInput]'
|
| 46 |
+
|
| 47 |
+
@final
|
| 48 |
+
class Some(Generic[_T]):
|
| 49 |
+
"""
|
| 50 |
+
Similar to Rust's [`Option::Some`](https://doc.rust-lang.org/std/option/enum.Option.html) type, this
|
| 51 |
+
identifies a value as being present, and provides a way to access it.
|
| 52 |
+
|
| 53 |
+
Generally used in a union with `None` to different between "some value which could be None" and no value.
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
__match_args__ = ('value',)
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
def value(self) -> _T:
|
| 60 |
+
"""
|
| 61 |
+
Returns the value wrapped by `Some`.
|
| 62 |
+
"""
|
| 63 |
+
@classmethod
|
| 64 |
+
def __class_getitem__(cls, item: Any, /) -> type[Self]: ...
|
| 65 |
+
|
| 66 |
+
@final
|
| 67 |
+
class SchemaValidator:
|
| 68 |
+
"""
|
| 69 |
+
`SchemaValidator` is the Python wrapper for `pydantic-core`'s Rust validation logic, internally it owns one
|
| 70 |
+
`CombinedValidator` which may in turn own more `CombinedValidator`s which make up the full schema validator.
|
| 71 |
+
"""
|
| 72 |
+
|
| 73 |
+
# note: pyo3 currently supports __new__, but not __init__, though we include __init__ stubs
|
| 74 |
+
# and docstrings here (and in the following classes) for documentation purposes
|
| 75 |
+
|
| 76 |
+
def __init__(self, schema: CoreSchema, config: CoreConfig | None = None, _use_prebuilt: bool = True) -> None:
|
| 77 |
+
"""Initializes the `SchemaValidator`.
|
| 78 |
+
|
| 79 |
+
Arguments:
|
| 80 |
+
schema: The `CoreSchema` to use for validation.
|
| 81 |
+
config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to configure validation.
|
| 82 |
+
_use_prebuilt: Whether to use pre-built validators (False during rebuilds to avoid stale references).
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None, _use_prebuilt: bool = True) -> Self: ...
|
| 86 |
+
@property
|
| 87 |
+
def title(self) -> str:
|
| 88 |
+
"""
|
| 89 |
+
The title of the schema, as used in the heading of [`ValidationError.__str__()`][pydantic_core.ValidationError].
|
| 90 |
+
"""
|
| 91 |
+
def validate_python(
|
| 92 |
+
self,
|
| 93 |
+
input: Any,
|
| 94 |
+
*,
|
| 95 |
+
strict: bool | None = None,
|
| 96 |
+
extra: ExtraBehavior | None = None,
|
| 97 |
+
from_attributes: bool | None = None,
|
| 98 |
+
context: Any | None = None,
|
| 99 |
+
self_instance: Any | None = None,
|
| 100 |
+
allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
|
| 101 |
+
by_alias: bool | None = None,
|
| 102 |
+
by_name: bool | None = None,
|
| 103 |
+
) -> Any:
|
| 104 |
+
"""
|
| 105 |
+
Validate a Python object against the schema and return the validated object.
|
| 106 |
+
|
| 107 |
+
Arguments:
|
| 108 |
+
input: The Python object to validate.
|
| 109 |
+
strict: Whether to validate the object in strict mode.
|
| 110 |
+
If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used.
|
| 111 |
+
extra: Whether to ignore, allow, or forbid extra data during model validation.
|
| 112 |
+
If `None`, the value of [`CoreConfig.extra_fields_behavior`][pydantic_core.core_schema.CoreConfig] is used.
|
| 113 |
+
from_attributes: Whether to validate objects as inputs to models by extracting attributes.
|
| 114 |
+
If `None`, the value of [`CoreConfig.from_attributes`][pydantic_core.core_schema.CoreConfig] is used.
|
| 115 |
+
context: The context to use for validation, this is passed to functional validators as
|
| 116 |
+
[`info.context`][pydantic_core.core_schema.ValidationInfo.context].
|
| 117 |
+
self_instance: An instance of a model set attributes on from validation, this is used when running
|
| 118 |
+
validation from the `__init__` method of a model.
|
| 119 |
+
allow_partial: Whether to allow partial validation; if `True` errors in the last element of sequences
|
| 120 |
+
and mappings are ignored.
|
| 121 |
+
`'trailing-strings'` means any final unfinished JSON string is included in the result.
|
| 122 |
+
by_alias: Whether to use the field's alias when validating against the provided input data.
|
| 123 |
+
by_name: Whether to use the field's name when validating against the provided input data.
|
| 124 |
+
|
| 125 |
+
Raises:
|
| 126 |
+
ValidationError: If validation fails.
|
| 127 |
+
Exception: Other error types maybe raised if internal errors occur.
|
| 128 |
+
|
| 129 |
+
Returns:
|
| 130 |
+
The validated object.
|
| 131 |
+
"""
|
| 132 |
+
def isinstance_python(
|
| 133 |
+
self,
|
| 134 |
+
input: Any,
|
| 135 |
+
*,
|
| 136 |
+
strict: bool | None = None,
|
| 137 |
+
extra: ExtraBehavior | None = None,
|
| 138 |
+
from_attributes: bool | None = None,
|
| 139 |
+
context: Any | None = None,
|
| 140 |
+
self_instance: Any | None = None,
|
| 141 |
+
by_alias: bool | None = None,
|
| 142 |
+
by_name: bool | None = None,
|
| 143 |
+
) -> bool:
|
| 144 |
+
"""
|
| 145 |
+
Similar to [`validate_python()`][pydantic_core.SchemaValidator.validate_python] but returns a boolean.
|
| 146 |
+
|
| 147 |
+
Arguments match `validate_python()`. This method will not raise `ValidationError`s but will raise internal
|
| 148 |
+
errors.
|
| 149 |
+
|
| 150 |
+
Returns:
|
| 151 |
+
`True` if validation succeeds, `False` if validation fails.
|
| 152 |
+
"""
|
| 153 |
+
def validate_json(
|
| 154 |
+
self,
|
| 155 |
+
input: str | bytes | bytearray,
|
| 156 |
+
*,
|
| 157 |
+
strict: bool | None = None,
|
| 158 |
+
extra: ExtraBehavior | None = None,
|
| 159 |
+
context: Any | None = None,
|
| 160 |
+
self_instance: Any | None = None,
|
| 161 |
+
allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
|
| 162 |
+
by_alias: bool | None = None,
|
| 163 |
+
by_name: bool | None = None,
|
| 164 |
+
) -> Any:
|
| 165 |
+
"""
|
| 166 |
+
Validate JSON data directly against the schema and return the validated Python object.
|
| 167 |
+
|
| 168 |
+
This method should be significantly faster than `validate_python(json.loads(json_data))` as it avoids the
|
| 169 |
+
need to create intermediate Python objects
|
| 170 |
+
|
| 171 |
+
It also handles constructing the correct Python type even in strict mode, where
|
| 172 |
+
`validate_python(json.loads(json_data))` would fail validation.
|
| 173 |
+
|
| 174 |
+
Arguments:
|
| 175 |
+
input: The JSON data to validate.
|
| 176 |
+
strict: Whether to validate the object in strict mode.
|
| 177 |
+
If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used.
|
| 178 |
+
extra: Whether to ignore, allow, or forbid extra data during model validation.
|
| 179 |
+
If `None`, the value of [`CoreConfig.extra_fields_behavior`][pydantic_core.core_schema.CoreConfig] is used.
|
| 180 |
+
context: The context to use for validation, this is passed to functional validators as
|
| 181 |
+
[`info.context`][pydantic_core.core_schema.ValidationInfo.context].
|
| 182 |
+
self_instance: An instance of a model set attributes on from validation.
|
| 183 |
+
allow_partial: Whether to allow partial validation; if `True` incomplete JSON will be parsed successfully
|
| 184 |
+
and errors in the last element of sequences and mappings are ignored.
|
| 185 |
+
`'trailing-strings'` means any final unfinished JSON string is included in the result.
|
| 186 |
+
by_alias: Whether to use the field's alias when validating against the provided input data.
|
| 187 |
+
by_name: Whether to use the field's name when validating against the provided input data.
|
| 188 |
+
|
| 189 |
+
Raises:
|
| 190 |
+
ValidationError: If validation fails or if the JSON data is invalid.
|
| 191 |
+
Exception: Other error types maybe raised if internal errors occur.
|
| 192 |
+
|
| 193 |
+
Returns:
|
| 194 |
+
The validated Python object.
|
| 195 |
+
"""
|
| 196 |
+
def validate_strings(
|
| 197 |
+
self,
|
| 198 |
+
input: _StringInput,
|
| 199 |
+
*,
|
| 200 |
+
strict: bool | None = None,
|
| 201 |
+
extra: ExtraBehavior | None = None,
|
| 202 |
+
context: Any | None = None,
|
| 203 |
+
allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
|
| 204 |
+
by_alias: bool | None = None,
|
| 205 |
+
by_name: bool | None = None,
|
| 206 |
+
) -> Any:
|
| 207 |
+
"""
|
| 208 |
+
Validate a string against the schema and return the validated Python object.
|
| 209 |
+
|
| 210 |
+
This is similar to `validate_json` but applies to scenarios where the input will be a string but not
|
| 211 |
+
JSON data, e.g. URL fragments, query parameters, etc.
|
| 212 |
+
|
| 213 |
+
Arguments:
|
| 214 |
+
input: The input as a string, or bytes/bytearray if `strict=False`.
|
| 215 |
+
strict: Whether to validate the object in strict mode.
|
| 216 |
+
If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used.
|
| 217 |
+
extra: Whether to ignore, allow, or forbid extra data during model validation.
|
| 218 |
+
If `None`, the value of [`CoreConfig.extra_fields_behavior`][pydantic_core.core_schema.CoreConfig] is used.
|
| 219 |
+
context: The context to use for validation, this is passed to functional validators as
|
| 220 |
+
[`info.context`][pydantic_core.core_schema.ValidationInfo.context].
|
| 221 |
+
allow_partial: Whether to allow partial validation; if `True` errors in the last element of sequences
|
| 222 |
+
and mappings are ignored.
|
| 223 |
+
`'trailing-strings'` means any final unfinished JSON string is included in the result.
|
| 224 |
+
by_alias: Whether to use the field's alias when validating against the provided input data.
|
| 225 |
+
by_name: Whether to use the field's name when validating against the provided input data.
|
| 226 |
+
|
| 227 |
+
Raises:
|
| 228 |
+
ValidationError: If validation fails or if the JSON data is invalid.
|
| 229 |
+
Exception: Other error types maybe raised if internal errors occur.
|
| 230 |
+
|
| 231 |
+
Returns:
|
| 232 |
+
The validated Python object.
|
| 233 |
+
"""
|
| 234 |
+
def validate_assignment(
|
| 235 |
+
self,
|
| 236 |
+
obj: Any,
|
| 237 |
+
field_name: str,
|
| 238 |
+
field_value: Any,
|
| 239 |
+
*,
|
| 240 |
+
strict: bool | None = None,
|
| 241 |
+
extra: ExtraBehavior | None = None,
|
| 242 |
+
from_attributes: bool | None = None,
|
| 243 |
+
context: Any | None = None,
|
| 244 |
+
by_alias: bool | None = None,
|
| 245 |
+
by_name: bool | None = None,
|
| 246 |
+
) -> dict[str, Any] | tuple[dict[str, Any], dict[str, Any] | None, set[str]]:
|
| 247 |
+
"""
|
| 248 |
+
Validate an assignment to a field on a model.
|
| 249 |
+
|
| 250 |
+
Arguments:
|
| 251 |
+
obj: The model instance being assigned to.
|
| 252 |
+
field_name: The name of the field to validate assignment for.
|
| 253 |
+
field_value: The value to assign to the field.
|
| 254 |
+
strict: Whether to validate the object in strict mode.
|
| 255 |
+
If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used.
|
| 256 |
+
extra: Whether to ignore, allow, or forbid extra data during model validation.
|
| 257 |
+
If `None`, the value of [`CoreConfig.extra_fields_behavior`][pydantic_core.core_schema.CoreConfig] is used.
|
| 258 |
+
from_attributes: Whether to validate objects as inputs to models by extracting attributes.
|
| 259 |
+
If `None`, the value of [`CoreConfig.from_attributes`][pydantic_core.core_schema.CoreConfig] is used.
|
| 260 |
+
context: The context to use for validation, this is passed to functional validators as
|
| 261 |
+
[`info.context`][pydantic_core.core_schema.ValidationInfo.context].
|
| 262 |
+
by_alias: Whether to use the field's alias when validating against the provided input data.
|
| 263 |
+
by_name: Whether to use the field's name when validating against the provided input data.
|
| 264 |
+
|
| 265 |
+
Raises:
|
| 266 |
+
ValidationError: If validation fails.
|
| 267 |
+
Exception: Other error types maybe raised if internal errors occur.
|
| 268 |
+
|
| 269 |
+
Returns:
|
| 270 |
+
Either the model dict or a tuple of `(model_data, model_extra, fields_set)`
|
| 271 |
+
"""
|
| 272 |
+
def get_default_value(self, *, strict: bool | None = None, context: Any = None) -> Some | None:
|
| 273 |
+
"""
|
| 274 |
+
Get the default value for the schema, including running default value validation.
|
| 275 |
+
|
| 276 |
+
Arguments:
|
| 277 |
+
strict: Whether to validate the default value in strict mode.
|
| 278 |
+
If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used.
|
| 279 |
+
context: The context to use for validation, this is passed to functional validators as
|
| 280 |
+
[`info.context`][pydantic_core.core_schema.ValidationInfo.context].
|
| 281 |
+
|
| 282 |
+
Raises:
|
| 283 |
+
ValidationError: If validation fails.
|
| 284 |
+
Exception: Other error types maybe raised if internal errors occur.
|
| 285 |
+
|
| 286 |
+
Returns:
|
| 287 |
+
`None` if the schema has no default value, otherwise a [`Some`][pydantic_core.Some] containing the default.
|
| 288 |
+
"""
|
| 289 |
+
|
| 290 |
+
# In reality, `bool` should be replaced by `Literal[True]` but mypy fails to correctly apply bidirectional type inference
|
| 291 |
+
# (e.g. when using `{'a': {'b': True}}`).
|
| 292 |
+
_IncEx: TypeAlias = set[int] | set[str] | Mapping[int, _IncEx | bool] | Mapping[str, _IncEx | bool]
|
| 293 |
+
|
| 294 |
+
@final
|
| 295 |
+
class SchemaSerializer:
|
| 296 |
+
"""
|
| 297 |
+
`SchemaSerializer` is the Python wrapper for `pydantic-core`'s Rust serialization logic, internally it owns one
|
| 298 |
+
`CombinedSerializer` which may in turn own more `CombinedSerializer`s which make up the full schema serializer.
|
| 299 |
+
"""
|
| 300 |
+
|
| 301 |
+
def __init__(self, schema: CoreSchema, config: CoreConfig | None = None, _use_prebuilt: bool = True) -> None:
|
| 302 |
+
"""Initializes the `SchemaSerializer`.
|
| 303 |
+
|
| 304 |
+
Arguments:
|
| 305 |
+
schema: The `CoreSchema` to use for serialization.
|
| 306 |
+
config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to to configure serialization.
|
| 307 |
+
_use_prebuilt: Whether to use pre-built validators (False during rebuilds to avoid stale references).
|
| 308 |
+
"""
|
| 309 |
+
|
| 310 |
+
def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None, _use_prebuilt: bool = True) -> Self: ...
|
| 311 |
+
def to_python(
|
| 312 |
+
self,
|
| 313 |
+
value: Any,
|
| 314 |
+
*,
|
| 315 |
+
mode: str | None = None,
|
| 316 |
+
include: _IncEx | None = None,
|
| 317 |
+
exclude: _IncEx | None = None,
|
| 318 |
+
by_alias: bool | None = None,
|
| 319 |
+
exclude_unset: bool = False,
|
| 320 |
+
exclude_defaults: bool = False,
|
| 321 |
+
exclude_none: bool = False,
|
| 322 |
+
exclude_computed_fields: bool = False,
|
| 323 |
+
round_trip: bool = False,
|
| 324 |
+
warnings: bool | Literal['none', 'warn', 'error'] = True,
|
| 325 |
+
fallback: Callable[[Any], Any] | None = None,
|
| 326 |
+
serialize_as_any: bool = False,
|
| 327 |
+
polymorphic_serialization: bool | None = None,
|
| 328 |
+
context: Any | None = None,
|
| 329 |
+
) -> Any:
|
| 330 |
+
"""
|
| 331 |
+
Serialize/marshal a Python object to a Python object including transforming and filtering data.
|
| 332 |
+
|
| 333 |
+
Arguments:
|
| 334 |
+
value: The Python object to serialize.
|
| 335 |
+
mode: The serialization mode to use, either `'python'` or `'json'`, defaults to `'python'`. In JSON mode,
|
| 336 |
+
all values are converted to JSON compatible types, e.g. `None`, `int`, `float`, `str`, `list`, `dict`.
|
| 337 |
+
include: A set of fields to include, if `None` all fields are included.
|
| 338 |
+
exclude: A set of fields to exclude, if `None` no fields are excluded.
|
| 339 |
+
by_alias: Whether to use the alias names of fields.
|
| 340 |
+
exclude_unset: Whether to exclude fields that are not set,
|
| 341 |
+
e.g. are not included in `__pydantic_fields_set__`.
|
| 342 |
+
exclude_defaults: Whether to exclude fields that are equal to their default value.
|
| 343 |
+
exclude_none: Whether to exclude fields that have a value of `None`.
|
| 344 |
+
exclude_computed_fields: Whether to exclude computed fields.
|
| 345 |
+
round_trip: Whether to enable serialization and validation round-trip support.
|
| 346 |
+
warnings: How to handle invalid fields. False/"none" ignores them, True/"warn" logs errors,
|
| 347 |
+
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
|
| 348 |
+
fallback: A function to call when an unknown value is encountered,
|
| 349 |
+
if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
|
| 350 |
+
serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
|
| 351 |
+
polymorphic_serialization: Whether to use model and dataclass polymorphic serialization for this call.
|
| 352 |
+
context: The context to use for serialization, this is passed to functional serializers as
|
| 353 |
+
[`info.context`][pydantic_core.core_schema.SerializationInfo.context].
|
| 354 |
+
|
| 355 |
+
Raises:
|
| 356 |
+
PydanticSerializationError: If serialization fails and no `fallback` function is provided.
|
| 357 |
+
|
| 358 |
+
Returns:
|
| 359 |
+
The serialized Python object.
|
| 360 |
+
"""
|
| 361 |
+
def to_json(
|
| 362 |
+
self,
|
| 363 |
+
value: Any,
|
| 364 |
+
*,
|
| 365 |
+
indent: int | None = None,
|
| 366 |
+
ensure_ascii: bool = False,
|
| 367 |
+
include: _IncEx | None = None,
|
| 368 |
+
exclude: _IncEx | None = None,
|
| 369 |
+
by_alias: bool | None = None,
|
| 370 |
+
exclude_unset: bool = False,
|
| 371 |
+
exclude_defaults: bool = False,
|
| 372 |
+
exclude_none: bool = False,
|
| 373 |
+
exclude_computed_fields: bool = False,
|
| 374 |
+
round_trip: bool = False,
|
| 375 |
+
warnings: bool | Literal['none', 'warn', 'error'] = True,
|
| 376 |
+
fallback: Callable[[Any], Any] | None = None,
|
| 377 |
+
serialize_as_any: bool = False,
|
| 378 |
+
polymorphic_serialization: bool | None = None,
|
| 379 |
+
context: Any | None = None,
|
| 380 |
+
) -> bytes:
|
| 381 |
+
"""
|
| 382 |
+
Serialize a Python object to JSON including transforming and filtering data.
|
| 383 |
+
|
| 384 |
+
Arguments:
|
| 385 |
+
value: The Python object to serialize.
|
| 386 |
+
indent: If `None`, the JSON will be compact, otherwise it will be pretty-printed with the indent provided.
|
| 387 |
+
ensure_ascii: If `True`, the output is guaranteed to have all incoming non-ASCII characters escaped.
|
| 388 |
+
If `False` (the default), these characters will be output as-is.
|
| 389 |
+
include: A set of fields to include, if `None` all fields are included.
|
| 390 |
+
exclude: A set of fields to exclude, if `None` no fields are excluded.
|
| 391 |
+
by_alias: Whether to use the alias names of fields.
|
| 392 |
+
exclude_unset: Whether to exclude fields that are not set,
|
| 393 |
+
e.g. are not included in `__pydantic_fields_set__`.
|
| 394 |
+
exclude_defaults: Whether to exclude fields that are equal to their default value.
|
| 395 |
+
exclude_none: Whether to exclude fields that have a value of `None`.
|
| 396 |
+
exclude_computed_fields: Whether to exclude computed fields.
|
| 397 |
+
round_trip: Whether to enable serialization and validation round-trip support.
|
| 398 |
+
warnings: How to handle invalid fields. False/"none" ignores them, True/"warn" logs errors,
|
| 399 |
+
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
|
| 400 |
+
fallback: A function to call when an unknown value is encountered,
|
| 401 |
+
if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
|
| 402 |
+
serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
|
| 403 |
+
polymorphic_serialization: Whether to use model and dataclass polymorphic serialization for this call.
|
| 404 |
+
context: The context to use for serialization, this is passed to functional serializers as
|
| 405 |
+
[`info.context`][pydantic_core.core_schema.SerializationInfo.context].
|
| 406 |
+
|
| 407 |
+
Raises:
|
| 408 |
+
PydanticSerializationError: If serialization fails and no `fallback` function is provided.
|
| 409 |
+
|
| 410 |
+
Returns:
|
| 411 |
+
JSON bytes.
|
| 412 |
+
"""
|
| 413 |
+
|
| 414 |
+
def to_json(
|
| 415 |
+
value: Any,
|
| 416 |
+
*,
|
| 417 |
+
indent: int | None = None,
|
| 418 |
+
ensure_ascii: bool = False,
|
| 419 |
+
include: _IncEx | None = None,
|
| 420 |
+
exclude: _IncEx | None = None,
|
| 421 |
+
# Note: In Pydantic 2.11, the default value of `by_alias` on `SchemaSerializer` was changed from `True` to `None`,
|
| 422 |
+
# to be consistent with the Pydantic "dump" methods. However, the default of `True` was kept here for
|
| 423 |
+
# backwards compatibility. In Pydantic V3, `by_alias` is expected to default to `True` everywhere:
|
| 424 |
+
by_alias: bool = True,
|
| 425 |
+
exclude_none: bool = False,
|
| 426 |
+
round_trip: bool = False,
|
| 427 |
+
timedelta_mode: Literal['iso8601', 'float'] = 'iso8601',
|
| 428 |
+
temporal_mode: Literal['iso8601', 'seconds', 'milliseconds'] = 'iso8601',
|
| 429 |
+
bytes_mode: Literal['utf8', 'base64', 'hex'] = 'utf8',
|
| 430 |
+
inf_nan_mode: Literal['null', 'constants', 'strings'] = 'constants',
|
| 431 |
+
serialize_unknown: bool = False,
|
| 432 |
+
fallback: Callable[[Any], Any] | None = None,
|
| 433 |
+
serialize_as_any: bool = False,
|
| 434 |
+
polymorphic_serialization: bool | None = None,
|
| 435 |
+
context: Any | None = None,
|
| 436 |
+
) -> bytes:
|
| 437 |
+
"""
|
| 438 |
+
Serialize a Python object to JSON including transforming and filtering data.
|
| 439 |
+
|
| 440 |
+
This is effectively a standalone version of [`SchemaSerializer.to_json`][pydantic_core.SchemaSerializer.to_json].
|
| 441 |
+
|
| 442 |
+
Arguments:
|
| 443 |
+
value: The Python object to serialize.
|
| 444 |
+
indent: If `None`, the JSON will be compact, otherwise it will be pretty-printed with the indent provided.
|
| 445 |
+
ensure_ascii: If `True`, the output is guaranteed to have all incoming non-ASCII characters escaped.
|
| 446 |
+
If `False` (the default), these characters will be output as-is.
|
| 447 |
+
include: A set of fields to include, if `None` all fields are included.
|
| 448 |
+
exclude: A set of fields to exclude, if `None` no fields are excluded.
|
| 449 |
+
by_alias: Whether to use the alias names of fields.
|
| 450 |
+
exclude_none: Whether to exclude fields that have a value of `None`.
|
| 451 |
+
round_trip: Whether to enable serialization and validation round-trip support.
|
| 452 |
+
timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`.
|
| 453 |
+
temporal_mode: How to serialize datetime-like objects (`datetime`, `date`, `time`), either `'iso8601'`, `'seconds'`, or `'milliseconds'`.
|
| 454 |
+
`iso8601` returns an ISO 8601 string; `seconds` returns the Unix timestamp in seconds as a float; `milliseconds` returns the Unix timestamp in milliseconds as a float.
|
| 455 |
+
|
| 456 |
+
bytes_mode: How to serialize `bytes` objects, either `'utf8'`, `'base64'`, or `'hex'`.
|
| 457 |
+
inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'`, `'constants'`, or `'strings'`.
|
| 458 |
+
serialize_unknown: Attempt to serialize unknown types, `str(value)` will be used, if that fails
|
| 459 |
+
`"<Unserializable {value_type} object>"` will be used.
|
| 460 |
+
fallback: A function to call when an unknown value is encountered,
|
| 461 |
+
if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
|
| 462 |
+
serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
|
| 463 |
+
polymorphic_serialization: Whether to use model and dataclass polymorphic serialization for this call.
|
| 464 |
+
context: The context to use for serialization, this is passed to functional serializers as
|
| 465 |
+
[`info.context`][pydantic_core.core_schema.SerializationInfo.context].
|
| 466 |
+
|
| 467 |
+
Raises:
|
| 468 |
+
PydanticSerializationError: If serialization fails and no `fallback` function is provided.
|
| 469 |
+
|
| 470 |
+
Returns:
|
| 471 |
+
JSON bytes.
|
| 472 |
+
"""
|
| 473 |
+
|
| 474 |
+
def from_json(
|
| 475 |
+
data: str | bytes | bytearray,
|
| 476 |
+
*,
|
| 477 |
+
allow_inf_nan: bool = True,
|
| 478 |
+
cache_strings: bool | Literal['all', 'keys', 'none'] = True,
|
| 479 |
+
allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
|
| 480 |
+
) -> Any:
|
| 481 |
+
"""
|
| 482 |
+
Deserialize JSON data to a Python object.
|
| 483 |
+
|
| 484 |
+
This is effectively a faster version of `json.loads()`, with some extra functionality.
|
| 485 |
+
|
| 486 |
+
Arguments:
|
| 487 |
+
data: The JSON data to deserialize.
|
| 488 |
+
allow_inf_nan: Whether to allow `Infinity`, `-Infinity` and `NaN` values as `json.loads()` does by default.
|
| 489 |
+
cache_strings: Whether to cache strings to avoid constructing new Python objects,
|
| 490 |
+
this should have a significant impact on performance while increasing memory usage slightly,
|
| 491 |
+
`all/True` means cache all strings, `keys` means cache only dict keys, `none/False` means no caching.
|
| 492 |
+
allow_partial: Whether to allow partial deserialization, if `True` JSON data is returned if the end of the
|
| 493 |
+
input is reached before the full object is deserialized, e.g. `["aa", "bb", "c` would return `['aa', 'bb']`.
|
| 494 |
+
`'trailing-strings'` means any final unfinished JSON string is included in the result.
|
| 495 |
+
|
| 496 |
+
Raises:
|
| 497 |
+
ValueError: If deserialization fails.
|
| 498 |
+
|
| 499 |
+
Returns:
|
| 500 |
+
The deserialized Python object.
|
| 501 |
+
"""
|
| 502 |
+
|
| 503 |
+
def to_jsonable_python(
|
| 504 |
+
value: Any,
|
| 505 |
+
*,
|
| 506 |
+
include: _IncEx | None = None,
|
| 507 |
+
exclude: _IncEx | None = None,
|
| 508 |
+
# Note: In Pydantic 2.11, the default value of `by_alias` on `SchemaSerializer` was changed from `True` to `None`,
|
| 509 |
+
# to be consistent with the Pydantic "dump" methods. However, the default of `True` was kept here for
|
| 510 |
+
# backwards compatibility. In Pydantic V3, `by_alias` is expected to default to `True` everywhere:
|
| 511 |
+
by_alias: bool = True,
|
| 512 |
+
exclude_none: bool = False,
|
| 513 |
+
round_trip: bool = False,
|
| 514 |
+
timedelta_mode: Literal['iso8601', 'float'] = 'iso8601',
|
| 515 |
+
temporal_mode: Literal['iso8601', 'seconds', 'milliseconds'] = 'iso8601',
|
| 516 |
+
bytes_mode: Literal['utf8', 'base64', 'hex'] = 'utf8',
|
| 517 |
+
inf_nan_mode: Literal['null', 'constants', 'strings'] = 'constants',
|
| 518 |
+
serialize_unknown: bool = False,
|
| 519 |
+
fallback: Callable[[Any], Any] | None = None,
|
| 520 |
+
serialize_as_any: bool = False,
|
| 521 |
+
polymorphic_serialization: bool | None = None,
|
| 522 |
+
context: Any | None = None,
|
| 523 |
+
) -> Any:
|
| 524 |
+
"""
|
| 525 |
+
Serialize/marshal a Python object to a JSON-serializable Python object including transforming and filtering data.
|
| 526 |
+
|
| 527 |
+
This is effectively a standalone version of
|
| 528 |
+
[`SchemaSerializer.to_python(mode='json')`][pydantic_core.SchemaSerializer.to_python].
|
| 529 |
+
|
| 530 |
+
Args:
|
| 531 |
+
value: The Python object to serialize.
|
| 532 |
+
include: A set of fields to include, if `None` all fields are included.
|
| 533 |
+
exclude: A set of fields to exclude, if `None` no fields are excluded.
|
| 534 |
+
by_alias: Whether to use the alias names of fields.
|
| 535 |
+
exclude_none: Whether to exclude fields that have a value of `None`.
|
| 536 |
+
round_trip: Whether to enable serialization and validation round-trip support.
|
| 537 |
+
timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`.
|
| 538 |
+
temporal_mode: How to serialize datetime-like objects (`datetime`, `date`, `time`), either `'iso8601'`, `'seconds'`, or `'milliseconds'`.
|
| 539 |
+
`iso8601` returns an ISO 8601 string; `seconds` returns the Unix timestamp in seconds as a float; `milliseconds` returns the Unix timestamp in milliseconds as a float.
|
| 540 |
+
|
| 541 |
+
bytes_mode: How to serialize `bytes` objects, either `'utf8'`, `'base64'`, or `'hex'`.
|
| 542 |
+
inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'`, `'constants'`, or `'strings'`.
|
| 543 |
+
serialize_unknown: Attempt to serialize unknown types, `str(value)` will be used, if that fails
|
| 544 |
+
`"<Unserializable {value_type} object>"` will be used.
|
| 545 |
+
fallback: A function to call when an unknown value is encountered,
|
| 546 |
+
if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
|
| 547 |
+
serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
|
| 548 |
+
polymorphic_serialization: Whether to use model and dataclass polymorphic serialization for this call.
|
| 549 |
+
context: The context to use for serialization, this is passed to functional serializers as
|
| 550 |
+
[`info.context`][pydantic_core.core_schema.SerializationInfo.context].
|
| 551 |
+
|
| 552 |
+
Raises:
|
| 553 |
+
PydanticSerializationError: If serialization fails and no `fallback` function is provided.
|
| 554 |
+
|
| 555 |
+
Returns:
|
| 556 |
+
The serialized Python object.
|
| 557 |
+
"""
|
| 558 |
+
|
| 559 |
+
class Url(SupportsAllComparisons):
|
| 560 |
+
"""
|
| 561 |
+
A URL type, internal logic uses the [url rust crate](https://docs.rs/url/latest/url/) originally developed
|
| 562 |
+
by Mozilla.
|
| 563 |
+
"""
|
| 564 |
+
|
| 565 |
+
def __init__(self, url: str) -> None: ...
|
| 566 |
+
def __new__(cls, url: str) -> Self: ...
|
| 567 |
+
@property
|
| 568 |
+
def scheme(self) -> str: ...
|
| 569 |
+
@property
|
| 570 |
+
def username(self) -> str | None: ...
|
| 571 |
+
@property
|
| 572 |
+
def password(self) -> str | None: ...
|
| 573 |
+
@property
|
| 574 |
+
def host(self) -> str | None: ...
|
| 575 |
+
def unicode_host(self) -> str | None: ...
|
| 576 |
+
@property
|
| 577 |
+
def port(self) -> int | None: ...
|
| 578 |
+
@property
|
| 579 |
+
def path(self) -> str | None: ...
|
| 580 |
+
@property
|
| 581 |
+
def query(self) -> str | None: ...
|
| 582 |
+
def query_params(self) -> list[tuple[str, str]]: ...
|
| 583 |
+
@property
|
| 584 |
+
def fragment(self) -> str | None: ...
|
| 585 |
+
def unicode_string(self) -> str: ...
|
| 586 |
+
def __repr__(self) -> str: ...
|
| 587 |
+
def __str__(self) -> str: ...
|
| 588 |
+
def __deepcopy__(self, memo: dict) -> str: ...
|
| 589 |
+
@classmethod
|
| 590 |
+
def build(
|
| 591 |
+
cls,
|
| 592 |
+
*,
|
| 593 |
+
scheme: str,
|
| 594 |
+
username: str | None = None,
|
| 595 |
+
password: str | None = None,
|
| 596 |
+
host: str,
|
| 597 |
+
port: int | None = None,
|
| 598 |
+
path: str | None = None,
|
| 599 |
+
query: str | None = None,
|
| 600 |
+
fragment: str | None = None,
|
| 601 |
+
) -> Self: ...
|
| 602 |
+
|
| 603 |
+
class MultiHostUrl(SupportsAllComparisons):
|
| 604 |
+
"""
|
| 605 |
+
A URL type with support for multiple hosts, as used by some databases for DSNs, e.g. `https://foo.com,bar.com/path`.
|
| 606 |
+
|
| 607 |
+
Internal URL logic uses the [url rust crate](https://docs.rs/url/latest/url/) originally developed
|
| 608 |
+
by Mozilla.
|
| 609 |
+
"""
|
| 610 |
+
|
| 611 |
+
def __init__(self, url: str) -> None: ...
|
| 612 |
+
def __new__(cls, url: str) -> Self: ...
|
| 613 |
+
@property
|
| 614 |
+
def scheme(self) -> str: ...
|
| 615 |
+
@property
|
| 616 |
+
def path(self) -> str | None: ...
|
| 617 |
+
@property
|
| 618 |
+
def query(self) -> str | None: ...
|
| 619 |
+
def query_params(self) -> list[tuple[str, str]]: ...
|
| 620 |
+
@property
|
| 621 |
+
def fragment(self) -> str | None: ...
|
| 622 |
+
def hosts(self) -> list[MultiHostHost]: ...
|
| 623 |
+
def unicode_string(self) -> str: ...
|
| 624 |
+
def __repr__(self) -> str: ...
|
| 625 |
+
def __str__(self) -> str: ...
|
| 626 |
+
def __deepcopy__(self, memo: dict) -> Self: ...
|
| 627 |
+
@classmethod
|
| 628 |
+
def build(
|
| 629 |
+
cls,
|
| 630 |
+
*,
|
| 631 |
+
scheme: str,
|
| 632 |
+
hosts: list[MultiHostHost] | None = None,
|
| 633 |
+
username: str | None = None,
|
| 634 |
+
password: str | None = None,
|
| 635 |
+
host: str | None = None,
|
| 636 |
+
port: int | None = None,
|
| 637 |
+
path: str | None = None,
|
| 638 |
+
query: str | None = None,
|
| 639 |
+
fragment: str | None = None,
|
| 640 |
+
) -> Self: ...
|
| 641 |
+
|
| 642 |
+
@final
|
| 643 |
+
class SchemaError(Exception):
|
| 644 |
+
"""
|
| 645 |
+
Information about errors that occur while building a [`SchemaValidator`][pydantic_core.SchemaValidator]
|
| 646 |
+
or [`SchemaSerializer`][pydantic_core.SchemaSerializer].
|
| 647 |
+
"""
|
| 648 |
+
|
| 649 |
+
def error_count(self) -> int:
|
| 650 |
+
"""
|
| 651 |
+
Returns:
|
| 652 |
+
The number of errors in the schema.
|
| 653 |
+
"""
|
| 654 |
+
def errors(self) -> list[ErrorDetails]:
|
| 655 |
+
"""
|
| 656 |
+
Returns:
|
| 657 |
+
A list of [`ErrorDetails`][pydantic_core.ErrorDetails] for each error in the schema.
|
| 658 |
+
"""
|
| 659 |
+
|
| 660 |
+
class ValidationError(ValueError):
|
| 661 |
+
"""
|
| 662 |
+
`ValidationError` is the exception raised by `pydantic-core` when validation fails, it contains a list of errors
|
| 663 |
+
which detail why validation failed.
|
| 664 |
+
"""
|
| 665 |
+
@classmethod
|
| 666 |
+
def from_exception_data(
|
| 667 |
+
cls,
|
| 668 |
+
title: str,
|
| 669 |
+
line_errors: list[InitErrorDetails],
|
| 670 |
+
input_type: Literal['python', 'json'] = 'python',
|
| 671 |
+
hide_input: bool = False,
|
| 672 |
+
) -> Self:
|
| 673 |
+
"""
|
| 674 |
+
Python constructor for a Validation Error.
|
| 675 |
+
|
| 676 |
+
Arguments:
|
| 677 |
+
title: The title of the error, as used in the heading of `str(validation_error)`
|
| 678 |
+
line_errors: A list of [`InitErrorDetails`][pydantic_core.InitErrorDetails] which contain information
|
| 679 |
+
about errors that occurred during validation.
|
| 680 |
+
input_type: Whether the error is for a Python object or JSON.
|
| 681 |
+
hide_input: Whether to hide the input value in the error message.
|
| 682 |
+
"""
|
| 683 |
+
@property
|
| 684 |
+
def title(self) -> str:
|
| 685 |
+
"""
|
| 686 |
+
The title of the error, as used in the heading of `str(validation_error)`.
|
| 687 |
+
"""
|
| 688 |
+
def error_count(self) -> int:
|
| 689 |
+
"""
|
| 690 |
+
Returns:
|
| 691 |
+
The number of errors in the validation error.
|
| 692 |
+
"""
|
| 693 |
+
def errors(
|
| 694 |
+
self, *, include_url: bool = True, include_context: bool = True, include_input: bool = True
|
| 695 |
+
) -> list[ErrorDetails]:
|
| 696 |
+
"""
|
| 697 |
+
Details about each error in the validation error.
|
| 698 |
+
|
| 699 |
+
Args:
|
| 700 |
+
include_url: Whether to include a URL to documentation on the error each error.
|
| 701 |
+
include_context: Whether to include the context of each error.
|
| 702 |
+
include_input: Whether to include the input value of each error.
|
| 703 |
+
|
| 704 |
+
Returns:
|
| 705 |
+
A list of [`ErrorDetails`][pydantic_core.ErrorDetails] for each error in the validation error.
|
| 706 |
+
"""
|
| 707 |
+
def json(
|
| 708 |
+
self,
|
| 709 |
+
*,
|
| 710 |
+
indent: int | None = None,
|
| 711 |
+
include_url: bool = True,
|
| 712 |
+
include_context: bool = True,
|
| 713 |
+
include_input: bool = True,
|
| 714 |
+
) -> str:
|
| 715 |
+
"""
|
| 716 |
+
Same as [`errors()`][pydantic_core.ValidationError.errors] but returns a JSON string.
|
| 717 |
+
|
| 718 |
+
Args:
|
| 719 |
+
indent: The number of spaces to indent the JSON by, or `None` for no indentation - compact JSON.
|
| 720 |
+
include_url: Whether to include a URL to documentation on the error each error.
|
| 721 |
+
include_context: Whether to include the context of each error.
|
| 722 |
+
include_input: Whether to include the input value of each error.
|
| 723 |
+
|
| 724 |
+
Returns:
|
| 725 |
+
a JSON string.
|
| 726 |
+
"""
|
| 727 |
+
|
| 728 |
+
def __repr__(self) -> str:
|
| 729 |
+
"""
|
| 730 |
+
A string representation of the validation error.
|
| 731 |
+
|
| 732 |
+
Whether or not documentation URLs are included in the repr is controlled by the
|
| 733 |
+
environment variable `PYDANTIC_ERRORS_INCLUDE_URL` being set to `1` or
|
| 734 |
+
`true`; by default, URLs are shown.
|
| 735 |
+
|
| 736 |
+
Due to implementation details, this environment variable can only be set once,
|
| 737 |
+
before the first validation error is created.
|
| 738 |
+
"""
|
| 739 |
+
|
| 740 |
+
class PydanticCustomError(ValueError):
|
| 741 |
+
"""A custom exception providing flexible error handling for Pydantic validators.
|
| 742 |
+
|
| 743 |
+
You can raise this error in custom validators when you'd like flexibility in regards to the error type, message, and context.
|
| 744 |
+
|
| 745 |
+
Example:
|
| 746 |
+
```py
|
| 747 |
+
from pydantic_core import PydanticCustomError
|
| 748 |
+
|
| 749 |
+
def custom_validator(v) -> None:
|
| 750 |
+
if v <= 10:
|
| 751 |
+
raise PydanticCustomError('custom_value_error', 'Value must be greater than {value}', {'value': 10, 'extra_context': 'extra_data'})
|
| 752 |
+
return v
|
| 753 |
+
```
|
| 754 |
+
|
| 755 |
+
Arguments:
|
| 756 |
+
error_type: The error type.
|
| 757 |
+
message_template: The message template.
|
| 758 |
+
context: The data to inject into the message template.
|
| 759 |
+
"""
|
| 760 |
+
|
| 761 |
+
def __init__(
|
| 762 |
+
self, error_type: LiteralString, message_template: LiteralString, context: dict[str, Any] | None = None, /
|
| 763 |
+
) -> None: ...
|
| 764 |
+
@property
|
| 765 |
+
def context(self) -> dict[str, Any] | None:
|
| 766 |
+
"""Values which are required to render the error message, and could hence be useful in passing error data forward."""
|
| 767 |
+
|
| 768 |
+
@property
|
| 769 |
+
def type(self) -> str:
|
| 770 |
+
"""The error type associated with the error. For consistency with Pydantic, this is typically a snake_case string."""
|
| 771 |
+
|
| 772 |
+
@property
|
| 773 |
+
def message_template(self) -> str:
|
| 774 |
+
"""The message template associated with the error. This is a string that can be formatted with context variables in `{curly_braces}`."""
|
| 775 |
+
|
| 776 |
+
def message(self) -> str:
|
| 777 |
+
"""The formatted message associated with the error. This presents as the message template with context variables appropriately injected."""
|
| 778 |
+
|
| 779 |
+
@final
|
| 780 |
+
class PydanticKnownError(ValueError):
|
| 781 |
+
"""A helper class for raising exceptions that mimic Pydantic's built-in exceptions, with more flexibility in regards to context.
|
| 782 |
+
|
| 783 |
+
Unlike [`PydanticCustomError`][pydantic_core.PydanticCustomError], the `error_type` argument must be a known `ErrorType`.
|
| 784 |
+
|
| 785 |
+
Example:
|
| 786 |
+
```py
|
| 787 |
+
from pydantic_core import PydanticKnownError
|
| 788 |
+
|
| 789 |
+
def custom_validator(v) -> None:
|
| 790 |
+
if v <= 10:
|
| 791 |
+
raise PydanticKnownError('greater_than', {'gt': 10})
|
| 792 |
+
return v
|
| 793 |
+
```
|
| 794 |
+
|
| 795 |
+
Arguments:
|
| 796 |
+
error_type: The error type.
|
| 797 |
+
context: The data to inject into the message template.
|
| 798 |
+
"""
|
| 799 |
+
|
| 800 |
+
def __init__(self, error_type: ErrorType, context: dict[str, Any] | None = None, /) -> None: ...
|
| 801 |
+
@property
|
| 802 |
+
def context(self) -> dict[str, Any] | None:
|
| 803 |
+
"""Values which are required to render the error message, and could hence be useful in passing error data forward."""
|
| 804 |
+
|
| 805 |
+
@property
|
| 806 |
+
def type(self) -> ErrorType:
|
| 807 |
+
"""The type of the error."""
|
| 808 |
+
|
| 809 |
+
@property
|
| 810 |
+
def message_template(self) -> str:
|
| 811 |
+
"""The message template associated with the provided error type. This is a string that can be formatted with context variables in `{curly_braces}`."""
|
| 812 |
+
|
| 813 |
+
def message(self) -> str:
|
| 814 |
+
"""The formatted message associated with the error. This presents as the message template with context variables appropriately injected."""
|
| 815 |
+
|
| 816 |
+
@final
|
| 817 |
+
class PydanticOmit(Exception):
|
| 818 |
+
"""An exception to signal that a field should be omitted from a generated result.
|
| 819 |
+
|
| 820 |
+
This could span from omitting a field from a JSON Schema to omitting a field from a serialized result.
|
| 821 |
+
Upcoming: more robust support for using PydanticOmit in custom serializers is still in development.
|
| 822 |
+
Right now, this is primarily used in the JSON Schema generation process.
|
| 823 |
+
|
| 824 |
+
Example:
|
| 825 |
+
```py
|
| 826 |
+
from typing import Callable
|
| 827 |
+
|
| 828 |
+
from pydantic_core import PydanticOmit
|
| 829 |
+
|
| 830 |
+
from pydantic import BaseModel
|
| 831 |
+
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
|
| 832 |
+
|
| 833 |
+
|
| 834 |
+
class MyGenerateJsonSchema(GenerateJsonSchema):
|
| 835 |
+
def handle_invalid_for_json_schema(self, schema, error_info) -> JsonSchemaValue:
|
| 836 |
+
raise PydanticOmit
|
| 837 |
+
|
| 838 |
+
|
| 839 |
+
class Predicate(BaseModel):
|
| 840 |
+
name: str = 'no-op'
|
| 841 |
+
func: Callable = lambda x: x
|
| 842 |
+
|
| 843 |
+
|
| 844 |
+
instance_example = Predicate()
|
| 845 |
+
|
| 846 |
+
validation_schema = instance_example.model_json_schema(schema_generator=MyGenerateJsonSchema, mode='validation')
|
| 847 |
+
print(validation_schema)
|
| 848 |
+
'''
|
| 849 |
+
{'properties': {'name': {'default': 'no-op', 'title': 'Name', 'type': 'string'}}, 'title': 'Predicate', 'type': 'object'}
|
| 850 |
+
'''
|
| 851 |
+
```
|
| 852 |
+
|
| 853 |
+
For a more in depth example / explanation, see the [customizing JSON schema](../concepts/json_schema.md#customizing-the-json-schema-generation-process) docs.
|
| 854 |
+
"""
|
| 855 |
+
|
| 856 |
+
def __new__(cls) -> Self: ...
|
| 857 |
+
|
| 858 |
+
@final
|
| 859 |
+
class PydanticUseDefault(Exception):
|
| 860 |
+
"""An exception to signal that standard validation either failed or should be skipped, and the default value should be used instead.
|
| 861 |
+
|
| 862 |
+
This warning can be raised in custom validation functions to redirect the flow of validation.
|
| 863 |
+
|
| 864 |
+
Example:
|
| 865 |
+
```py
|
| 866 |
+
from pydantic_core import PydanticUseDefault
|
| 867 |
+
from datetime import datetime
|
| 868 |
+
from pydantic import BaseModel, field_validator
|
| 869 |
+
|
| 870 |
+
|
| 871 |
+
class Event(BaseModel):
|
| 872 |
+
name: str = 'meeting'
|
| 873 |
+
time: datetime
|
| 874 |
+
|
| 875 |
+
@field_validator('name', mode='plain')
|
| 876 |
+
def name_must_be_present(cls, v) -> str:
|
| 877 |
+
if not v or not isinstance(v, str):
|
| 878 |
+
raise PydanticUseDefault()
|
| 879 |
+
return v
|
| 880 |
+
|
| 881 |
+
|
| 882 |
+
event1 = Event(name='party', time=datetime(2024, 1, 1, 12, 0, 0))
|
| 883 |
+
print(repr(event1))
|
| 884 |
+
# > Event(name='party', time=datetime.datetime(2024, 1, 1, 12, 0))
|
| 885 |
+
event2 = Event(time=datetime(2024, 1, 1, 12, 0, 0))
|
| 886 |
+
print(repr(event2))
|
| 887 |
+
# > Event(name='meeting', time=datetime.datetime(2024, 1, 1, 12, 0))
|
| 888 |
+
```
|
| 889 |
+
|
| 890 |
+
For an additional example, see the [validating partial json data](../concepts/json.md#partial-json-parsing) section of the Pydantic documentation.
|
| 891 |
+
"""
|
| 892 |
+
|
| 893 |
+
def __new__(cls) -> Self: ...
|
| 894 |
+
|
| 895 |
+
@final
|
| 896 |
+
class PydanticSerializationError(ValueError):
|
| 897 |
+
"""An error raised when an issue occurs during serialization.
|
| 898 |
+
|
| 899 |
+
In custom serializers, this error can be used to indicate that serialization has failed.
|
| 900 |
+
|
| 901 |
+
Arguments:
|
| 902 |
+
message: The message associated with the error.
|
| 903 |
+
"""
|
| 904 |
+
|
| 905 |
+
def __init__(self, message: str, /) -> None: ...
|
| 906 |
+
|
| 907 |
+
@final
|
| 908 |
+
class PydanticSerializationUnexpectedValue(ValueError):
|
| 909 |
+
"""An error raised when an unexpected value is encountered during serialization.
|
| 910 |
+
|
| 911 |
+
This error is often caught and coerced into a warning, as `pydantic-core` generally makes a best attempt
|
| 912 |
+
at serializing values, in contrast with validation where errors are eagerly raised.
|
| 913 |
+
|
| 914 |
+
Example:
|
| 915 |
+
```py
|
| 916 |
+
from pydantic import BaseModel, field_serializer
|
| 917 |
+
from pydantic_core import PydanticSerializationUnexpectedValue
|
| 918 |
+
|
| 919 |
+
class BasicPoint(BaseModel):
|
| 920 |
+
x: int
|
| 921 |
+
y: int
|
| 922 |
+
|
| 923 |
+
@field_serializer('*')
|
| 924 |
+
def serialize(self, v):
|
| 925 |
+
if not isinstance(v, int):
|
| 926 |
+
raise PydanticSerializationUnexpectedValue(f'Expected type `int`, got {type(v)} with value {v}')
|
| 927 |
+
return v
|
| 928 |
+
|
| 929 |
+
point = BasicPoint(x=1, y=2)
|
| 930 |
+
# some sort of mutation
|
| 931 |
+
point.x = 'a'
|
| 932 |
+
|
| 933 |
+
print(point.model_dump())
|
| 934 |
+
'''
|
| 935 |
+
UserWarning: Pydantic serializer warnings:
|
| 936 |
+
PydanticSerializationUnexpectedValue(Expected type `int`, got <class 'str'> with value a)
|
| 937 |
+
return self.__pydantic_serializer__.to_python(
|
| 938 |
+
{'x': 'a', 'y': 2}
|
| 939 |
+
'''
|
| 940 |
+
```
|
| 941 |
+
|
| 942 |
+
This is often used internally in `pydantic-core` when unexpected types are encountered during serialization,
|
| 943 |
+
but it can also be used by users in custom serializers, as seen above.
|
| 944 |
+
|
| 945 |
+
Arguments:
|
| 946 |
+
message: The message associated with the unexpected value.
|
| 947 |
+
"""
|
| 948 |
+
|
| 949 |
+
def __init__(self, message: str, /) -> None: ...
|
| 950 |
+
|
| 951 |
+
@final
|
| 952 |
+
class ArgsKwargs:
|
| 953 |
+
"""A construct used to store arguments and keyword arguments for a function call.
|
| 954 |
+
|
| 955 |
+
This data structure is generally used to store information for core schemas associated with functions (like in an arguments schema).
|
| 956 |
+
This data structure is also currently used for some validation against dataclasses.
|
| 957 |
+
|
| 958 |
+
Example:
|
| 959 |
+
```py
|
| 960 |
+
from pydantic.dataclasses import dataclass
|
| 961 |
+
from pydantic import model_validator
|
| 962 |
+
|
| 963 |
+
|
| 964 |
+
@dataclass
|
| 965 |
+
class Model:
|
| 966 |
+
a: int
|
| 967 |
+
b: int
|
| 968 |
+
|
| 969 |
+
@model_validator(mode="before")
|
| 970 |
+
@classmethod
|
| 971 |
+
def no_op_validator(cls, values):
|
| 972 |
+
print(values)
|
| 973 |
+
return values
|
| 974 |
+
|
| 975 |
+
Model(1, b=2)
|
| 976 |
+
#> ArgsKwargs((1,), {"b": 2})
|
| 977 |
+
|
| 978 |
+
Model(1, 2)
|
| 979 |
+
#> ArgsKwargs((1, 2), {})
|
| 980 |
+
|
| 981 |
+
Model(a=1, b=2)
|
| 982 |
+
#> ArgsKwargs((), {"a": 1, "b": 2})
|
| 983 |
+
```
|
| 984 |
+
"""
|
| 985 |
+
|
| 986 |
+
def __init__(self, args: tuple[Any, ...], kwargs: dict[str, Any] | None = None) -> None:
|
| 987 |
+
"""Initializes the `ArgsKwargs`.
|
| 988 |
+
|
| 989 |
+
Arguments:
|
| 990 |
+
args: The arguments (inherently ordered) for a function call.
|
| 991 |
+
kwargs: The keyword arguments for a function call
|
| 992 |
+
"""
|
| 993 |
+
|
| 994 |
+
def __new__(cls, args: tuple[Any, ...], kwargs: dict[str, Any] | None = None) -> Self: ...
|
| 995 |
+
@property
|
| 996 |
+
def args(self) -> tuple[Any, ...]:
|
| 997 |
+
"""The arguments (inherently ordered) for a function call."""
|
| 998 |
+
|
| 999 |
+
@property
|
| 1000 |
+
def kwargs(self) -> dict[str, Any] | None:
|
| 1001 |
+
"""The keyword arguments for a function call."""
|
| 1002 |
+
|
| 1003 |
+
@final
|
| 1004 |
+
class PydanticUndefinedType:
|
| 1005 |
+
"""A type used as a sentinel for undefined values."""
|
| 1006 |
+
|
| 1007 |
+
def __copy__(self) -> Self: ...
|
| 1008 |
+
def __deepcopy__(self, memo: Any) -> Self: ...
|
| 1009 |
+
|
| 1010 |
+
PydanticUndefined: PydanticUndefinedType
|
| 1011 |
+
|
| 1012 |
+
def list_all_errors() -> list[ErrorTypeInfo]:
|
| 1013 |
+
"""
|
| 1014 |
+
Get information about all built-in errors.
|
| 1015 |
+
|
| 1016 |
+
Returns:
|
| 1017 |
+
A list of `ErrorTypeInfo` typed dicts.
|
| 1018 |
+
"""
|
| 1019 |
+
@final
|
| 1020 |
+
class TzInfo(datetime.tzinfo):
|
| 1021 |
+
"""An `pydantic-core` implementation of the abstract [`datetime.tzinfo`][] class."""
|
| 1022 |
+
|
| 1023 |
+
def __init__(self, seconds: float = 0.0) -> None:
|
| 1024 |
+
"""Initializes the `TzInfo`.
|
| 1025 |
+
|
| 1026 |
+
Arguments:
|
| 1027 |
+
seconds: The offset from UTC in seconds. Defaults to 0.0 (UTC).
|
| 1028 |
+
"""
|
| 1029 |
+
|
| 1030 |
+
def __new__(cls, seconds: float = 0.0) -> Self: ...
|
| 1031 |
+
|
| 1032 |
+
# Docstrings for attributes sourced from the abstract base class, [`datetime.tzinfo`](https://docs.python.org/3/library/datetime.html#datetime.tzinfo).
|
| 1033 |
+
|
| 1034 |
+
def tzname(self, dt: datetime.datetime | None) -> str | None:
|
| 1035 |
+
"""Return the time zone name corresponding to the [`datetime`][datetime.datetime] object _dt_, as a string.
|
| 1036 |
+
|
| 1037 |
+
For more info, see [`tzinfo.tzname`][datetime.tzinfo.tzname].
|
| 1038 |
+
"""
|
| 1039 |
+
|
| 1040 |
+
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None:
|
| 1041 |
+
"""Return offset of local time from UTC, as a [`timedelta`][datetime.timedelta] object that is positive east of UTC. If local time is west of UTC, this should be negative.
|
| 1042 |
+
|
| 1043 |
+
More info can be found at [`tzinfo.utcoffset`][datetime.tzinfo.utcoffset].
|
| 1044 |
+
"""
|
| 1045 |
+
|
| 1046 |
+
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None:
|
| 1047 |
+
"""Return the daylight saving time (DST) adjustment, as a [`timedelta`][datetime.timedelta] object or `None` if DST information isn’t known.
|
| 1048 |
+
|
| 1049 |
+
More info can be found at[`tzinfo.dst`][datetime.tzinfo.dst]."""
|
| 1050 |
+
|
| 1051 |
+
def fromutc(self, dt: datetime.datetime) -> datetime.datetime:
|
| 1052 |
+
"""Adjust the date and time data associated datetime object _dt_, returning an equivalent datetime in self’s local time.
|
| 1053 |
+
|
| 1054 |
+
More info can be found at [`tzinfo.fromutc`][datetime.tzinfo.fromutc]."""
|
| 1055 |
+
|
| 1056 |
+
def __deepcopy__(self, _memo: dict[Any, Any]) -> TzInfo: ...
|
venv/lib/python3.12/site-packages/pydantic_core/core_schema.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
venv/lib/python3.12/site-packages/pydantic_core/py.typed
ADDED
|
File without changes
|
venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: typing_extensions
|
| 3 |
+
Version: 4.15.0
|
| 4 |
+
Summary: Backported and Experimental Type Hints for Python 3.9+
|
| 5 |
+
Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
|
| 6 |
+
Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" <levkivskyi@gmail.com>
|
| 7 |
+
Requires-Python: >=3.9
|
| 8 |
+
Description-Content-Type: text/markdown
|
| 9 |
+
License-Expression: PSF-2.0
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Environment :: Console
|
| 12 |
+
Classifier: Intended Audience :: Developers
|
| 13 |
+
Classifier: Operating System :: OS Independent
|
| 14 |
+
Classifier: Programming Language :: Python :: 3
|
| 15 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.14
|
| 22 |
+
Classifier: Topic :: Software Development
|
| 23 |
+
License-File: LICENSE
|
| 24 |
+
Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues
|
| 25 |
+
Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md
|
| 26 |
+
Project-URL: Documentation, https://typing-extensions.readthedocs.io/
|
| 27 |
+
Project-URL: Home, https://github.com/python/typing_extensions
|
| 28 |
+
Project-URL: Q & A, https://github.com/python/typing/discussions
|
| 29 |
+
Project-URL: Repository, https://github.com/python/typing_extensions
|
| 30 |
+
|
| 31 |
+
# Typing Extensions
|
| 32 |
+
|
| 33 |
+
[](https://gitter.im/python/typing)
|
| 34 |
+
|
| 35 |
+
[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) –
|
| 36 |
+
[PyPI](https://pypi.org/project/typing-extensions/)
|
| 37 |
+
|
| 38 |
+
## Overview
|
| 39 |
+
|
| 40 |
+
The `typing_extensions` module serves two related purposes:
|
| 41 |
+
|
| 42 |
+
- Enable use of new type system features on older Python versions. For example,
|
| 43 |
+
`typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
|
| 44 |
+
users on previous Python versions to use it too.
|
| 45 |
+
- Enable experimentation with new type system PEPs before they are accepted and
|
| 46 |
+
added to the `typing` module.
|
| 47 |
+
|
| 48 |
+
`typing_extensions` is treated specially by static type checkers such as
|
| 49 |
+
mypy and pyright. Objects defined in `typing_extensions` are treated the same
|
| 50 |
+
way as equivalent forms in `typing`.
|
| 51 |
+
|
| 52 |
+
`typing_extensions` uses
|
| 53 |
+
[Semantic Versioning](https://semver.org/). The
|
| 54 |
+
major version will be incremented only for backwards-incompatible changes.
|
| 55 |
+
Therefore, it's safe to depend
|
| 56 |
+
on `typing_extensions` like this: `typing_extensions ~=x.y`,
|
| 57 |
+
where `x.y` is the first version that includes all features you need.
|
| 58 |
+
[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release)
|
| 59 |
+
is equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z`
|
| 60 |
+
unless you really know what you're doing; that defeats the purpose of
|
| 61 |
+
semantic versioning.
|
| 62 |
+
|
| 63 |
+
## Included items
|
| 64 |
+
|
| 65 |
+
See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
|
| 66 |
+
complete listing of module contents.
|
| 67 |
+
|
| 68 |
+
## Contributing
|
| 69 |
+
|
| 70 |
+
See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)
|
| 71 |
+
for how to contribute to `typing_extensions`.
|
| 72 |
+
|
venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__/typing_extensions.cpython-312.pyc,,
|
| 2 |
+
typing_extensions-4.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
typing_extensions-4.15.0.dist-info/METADATA,sha256=wTg3j-jxiTSsmd4GBTXFPsbBOu7WXpTDJkHafuMZKnI,3259
|
| 4 |
+
typing_extensions-4.15.0.dist-info/RECORD,,
|
| 5 |
+
typing_extensions-4.15.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
| 6 |
+
typing_extensions-4.15.0.dist-info/licenses/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
|
| 7 |
+
typing_extensions.py,sha256=Qz0R0XDTok0usGXrwb_oSM6n49fOaFZ6tSvqLUwvftg,160429
|
venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.12.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
venv/lib/python3.12/site-packages/typing_inspection/__init__.py
ADDED
|
File without changes
|
venv/lib/python3.12/site-packages/typing_inspection/introspection.py
ADDED
|
@@ -0,0 +1,587 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""High-level introspection utilities, used to inspect type annotations."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
import types
|
| 7 |
+
from collections.abc import Generator
|
| 8 |
+
from dataclasses import InitVar
|
| 9 |
+
from enum import Enum, IntEnum, auto
|
| 10 |
+
from typing import Any, Literal, NamedTuple, cast
|
| 11 |
+
|
| 12 |
+
from typing_extensions import TypeAlias, assert_never, get_args, get_origin
|
| 13 |
+
|
| 14 |
+
from . import typing_objects
|
| 15 |
+
|
| 16 |
+
__all__ = (
|
| 17 |
+
'AnnotationSource',
|
| 18 |
+
'ForbiddenQualifier',
|
| 19 |
+
'InspectedAnnotation',
|
| 20 |
+
'Qualifier',
|
| 21 |
+
'get_literal_values',
|
| 22 |
+
'inspect_annotation',
|
| 23 |
+
'is_union_origin',
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
if sys.version_info >= (3, 14) or sys.version_info < (3, 10):
|
| 27 |
+
|
| 28 |
+
def is_union_origin(obj: Any, /) -> bool:
|
| 29 |
+
"""Return whether the provided origin is the union form.
|
| 30 |
+
|
| 31 |
+
```pycon
|
| 32 |
+
>>> is_union_origin(typing.Union)
|
| 33 |
+
True
|
| 34 |
+
>>> is_union_origin(get_origin(int | str))
|
| 35 |
+
True
|
| 36 |
+
>>> is_union_origin(types.UnionType)
|
| 37 |
+
True
|
| 38 |
+
```
|
| 39 |
+
|
| 40 |
+
!!! note
|
| 41 |
+
Since Python 3.14, both `Union[<t1>, <t2>, ...]` and `<t1> | <t2> | ...` forms create instances
|
| 42 |
+
of the same [`typing.Union`][] class. As such, it is recommended to not use this function
|
| 43 |
+
anymore (provided that you only support Python 3.14 or greater), and instead use the
|
| 44 |
+
[`typing_objects.is_union()`][typing_inspection.typing_objects.is_union] function directly:
|
| 45 |
+
|
| 46 |
+
```python
|
| 47 |
+
from typing import Union, get_origin
|
| 48 |
+
|
| 49 |
+
from typing_inspection import typing_objects
|
| 50 |
+
|
| 51 |
+
typ = int | str # Or Union[int, str]
|
| 52 |
+
origin = get_origin(typ)
|
| 53 |
+
if typing_objects.is_union(origin):
|
| 54 |
+
...
|
| 55 |
+
```
|
| 56 |
+
"""
|
| 57 |
+
return typing_objects.is_union(obj)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
else:
|
| 61 |
+
|
| 62 |
+
def is_union_origin(obj: Any, /) -> bool:
|
| 63 |
+
"""Return whether the provided origin is the union form.
|
| 64 |
+
|
| 65 |
+
```pycon
|
| 66 |
+
>>> is_union_origin(typing.Union)
|
| 67 |
+
True
|
| 68 |
+
>>> is_union_origin(get_origin(int | str))
|
| 69 |
+
True
|
| 70 |
+
>>> is_union_origin(types.UnionType)
|
| 71 |
+
True
|
| 72 |
+
```
|
| 73 |
+
|
| 74 |
+
!!! note
|
| 75 |
+
Since Python 3.14, both `Union[<t1>, <t2>, ...]` and `<t1> | <t2> | ...` forms create instances
|
| 76 |
+
of the same [`typing.Union`][] class. As such, it is recommended to not use this function
|
| 77 |
+
anymore (provided that you only support Python 3.14 or greater), and instead use the
|
| 78 |
+
[`typing_objects.is_union()`][typing_inspection.typing_objects.is_union] function directly:
|
| 79 |
+
|
| 80 |
+
```python
|
| 81 |
+
from typing import Union, get_origin
|
| 82 |
+
|
| 83 |
+
from typing_inspection import typing_objects
|
| 84 |
+
|
| 85 |
+
typ = int | str # Or Union[int, str]
|
| 86 |
+
origin = get_origin(typ)
|
| 87 |
+
if typing_objects.is_union(origin):
|
| 88 |
+
...
|
| 89 |
+
```
|
| 90 |
+
"""
|
| 91 |
+
return typing_objects.is_union(obj) or obj is types.UnionType
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def _literal_type_check(value: Any, /) -> None:
|
| 95 |
+
"""Type check the provided literal value against the legal parameters."""
|
| 96 |
+
if (
|
| 97 |
+
not isinstance(value, (int, bytes, str, bool, Enum, typing_objects.NoneType))
|
| 98 |
+
and value is not typing_objects.NoneType
|
| 99 |
+
):
|
| 100 |
+
raise TypeError(f'{value} is not a valid literal value, must be one of: int, bytes, str, Enum, None.')
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def get_literal_values(
|
| 104 |
+
annotation: Any,
|
| 105 |
+
/,
|
| 106 |
+
*,
|
| 107 |
+
type_check: bool = False,
|
| 108 |
+
unpack_type_aliases: Literal['skip', 'lenient', 'eager'] = 'eager',
|
| 109 |
+
) -> Generator[Any]:
|
| 110 |
+
"""Yield the values contained in the provided [`Literal`][typing.Literal] [special form][].
|
| 111 |
+
|
| 112 |
+
Args:
|
| 113 |
+
annotation: The [`Literal`][typing.Literal] [special form][] to unpack.
|
| 114 |
+
type_check: Whether to check if the literal values are [legal parameters][literal-legal-parameters].
|
| 115 |
+
Raises a [`TypeError`][] otherwise.
|
| 116 |
+
unpack_type_aliases: What to do when encountering [PEP 695](https://peps.python.org/pep-0695/)
|
| 117 |
+
[type aliases][type-aliases]. Can be one of:
|
| 118 |
+
|
| 119 |
+
- `'skip'`: Do not try to parse type aliases. Note that this can lead to incorrect results:
|
| 120 |
+
```pycon
|
| 121 |
+
>>> type MyAlias = Literal[1, 2]
|
| 122 |
+
>>> list(get_literal_values(Literal[MyAlias, 3], unpack_type_aliases="skip"))
|
| 123 |
+
[MyAlias, 3]
|
| 124 |
+
```
|
| 125 |
+
|
| 126 |
+
- `'lenient'`: Try to parse type aliases, and fallback to `'skip'` if the type alias can't be inspected
|
| 127 |
+
(because of an undefined forward reference).
|
| 128 |
+
|
| 129 |
+
- `'eager'`: Parse type aliases and raise any encountered [`NameError`][] exceptions (the default):
|
| 130 |
+
```pycon
|
| 131 |
+
>>> type MyAlias = Literal[1, 2]
|
| 132 |
+
>>> list(get_literal_values(Literal[MyAlias, 3], unpack_type_aliases="eager"))
|
| 133 |
+
[1, 2, 3]
|
| 134 |
+
```
|
| 135 |
+
|
| 136 |
+
Note:
|
| 137 |
+
While `None` is [equivalent to][none] `type(None)`, the runtime implementation of [`Literal`][typing.Literal]
|
| 138 |
+
does not de-duplicate them. This function makes sure this de-duplication is applied:
|
| 139 |
+
|
| 140 |
+
```pycon
|
| 141 |
+
>>> list(get_literal_values(Literal[NoneType, None]))
|
| 142 |
+
[None]
|
| 143 |
+
```
|
| 144 |
+
|
| 145 |
+
Example:
|
| 146 |
+
```pycon
|
| 147 |
+
>>> type Ints = Literal[1, 2]
|
| 148 |
+
>>> list(get_literal_values(Literal[1, Ints], unpack_type_alias="skip"))
|
| 149 |
+
["a", Ints]
|
| 150 |
+
>>> list(get_literal_values(Literal[1, Ints]))
|
| 151 |
+
[1, 2]
|
| 152 |
+
>>> list(get_literal_values(Literal[1.0], type_check=True))
|
| 153 |
+
Traceback (most recent call last):
|
| 154 |
+
...
|
| 155 |
+
TypeError: 1.0 is not a valid literal value, must be one of: int, bytes, str, Enum, None.
|
| 156 |
+
```
|
| 157 |
+
"""
|
| 158 |
+
# `literal` is guaranteed to be a `Literal[...]` special form, so use
|
| 159 |
+
# `__args__` directly instead of calling `get_args()`.
|
| 160 |
+
|
| 161 |
+
if unpack_type_aliases == 'skip':
|
| 162 |
+
_has_none = False
|
| 163 |
+
# `Literal` parameters are already deduplicated, no need to do it ourselves.
|
| 164 |
+
# (we only check for `None` and `NoneType`, which should be considered as duplicates).
|
| 165 |
+
for arg in annotation.__args__:
|
| 166 |
+
if type_check:
|
| 167 |
+
_literal_type_check(arg)
|
| 168 |
+
if arg is None or arg is typing_objects.NoneType:
|
| 169 |
+
if not _has_none:
|
| 170 |
+
yield None
|
| 171 |
+
_has_none = True
|
| 172 |
+
else:
|
| 173 |
+
yield arg
|
| 174 |
+
else:
|
| 175 |
+
# We'll need to manually deduplicate parameters, see the `Literal` implementation in `typing`.
|
| 176 |
+
values_and_type: list[tuple[Any, type[Any]]] = []
|
| 177 |
+
|
| 178 |
+
for arg in annotation.__args__:
|
| 179 |
+
# Note: we could also check for generic aliases with a type alias as an origin.
|
| 180 |
+
# However, it is very unlikely that this happens as type variables can't appear in
|
| 181 |
+
# `Literal` forms, so the only valid (but unnecessary) use case would be something like:
|
| 182 |
+
# `type Test[T] = Literal['a']` (and then use `Test[SomeType]`).
|
| 183 |
+
if typing_objects.is_typealiastype(arg):
|
| 184 |
+
try:
|
| 185 |
+
alias_value = arg.__value__
|
| 186 |
+
except NameError:
|
| 187 |
+
if unpack_type_aliases == 'eager':
|
| 188 |
+
raise
|
| 189 |
+
# unpack_type_aliases == "lenient":
|
| 190 |
+
if type_check:
|
| 191 |
+
_literal_type_check(arg)
|
| 192 |
+
values_and_type.append((arg, type(arg)))
|
| 193 |
+
else:
|
| 194 |
+
sub_args = get_literal_values(
|
| 195 |
+
alias_value, type_check=type_check, unpack_type_aliases=unpack_type_aliases
|
| 196 |
+
)
|
| 197 |
+
values_and_type.extend((a, type(a)) for a in sub_args) # pyright: ignore[reportUnknownArgumentType]
|
| 198 |
+
else:
|
| 199 |
+
if type_check:
|
| 200 |
+
_literal_type_check(arg)
|
| 201 |
+
if arg is typing_objects.NoneType:
|
| 202 |
+
values_and_type.append((None, typing_objects.NoneType))
|
| 203 |
+
else:
|
| 204 |
+
values_and_type.append((arg, type(arg))) # pyright: ignore[reportUnknownArgumentType]
|
| 205 |
+
|
| 206 |
+
try:
|
| 207 |
+
dct = dict.fromkeys(values_and_type)
|
| 208 |
+
except TypeError:
|
| 209 |
+
# Unhashable parameters, the Python implementation allows them
|
| 210 |
+
yield from (p for p, _ in values_and_type)
|
| 211 |
+
else:
|
| 212 |
+
yield from (p for p, _ in dct)
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
Qualifier: TypeAlias = Literal['required', 'not_required', 'read_only', 'class_var', 'init_var', 'final']
|
| 216 |
+
"""A [type qualifier][]."""
|
| 217 |
+
|
| 218 |
+
_all_qualifiers: set[Qualifier] = set(get_args(Qualifier))
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
# TODO at some point, we could switch to an enum flag, so that multiple sources
|
| 222 |
+
# can be combined. However, is there a need for this?
|
| 223 |
+
class AnnotationSource(IntEnum):
|
| 224 |
+
# TODO if/when https://peps.python.org/pep-0767/ is accepted, add 'read_only'
|
| 225 |
+
# to CLASS and NAMED_TUPLE (even though for named tuples it is redundant).
|
| 226 |
+
|
| 227 |
+
"""The source of an annotation, e.g. a class or a function.
|
| 228 |
+
|
| 229 |
+
Depending on the source, different [type qualifiers][type qualifier] may be (dis)allowed.
|
| 230 |
+
"""
|
| 231 |
+
|
| 232 |
+
ASSIGNMENT_OR_VARIABLE = auto()
|
| 233 |
+
"""An annotation used in an assignment or variable annotation:
|
| 234 |
+
|
| 235 |
+
```python
|
| 236 |
+
x: Final[int] = 1
|
| 237 |
+
y: Final[str]
|
| 238 |
+
```
|
| 239 |
+
|
| 240 |
+
**Allowed type qualifiers:** [`Final`][typing.Final].
|
| 241 |
+
"""
|
| 242 |
+
|
| 243 |
+
CLASS = auto()
|
| 244 |
+
"""An annotation used in the body of a class:
|
| 245 |
+
|
| 246 |
+
```python
|
| 247 |
+
class Test:
|
| 248 |
+
x: Final[int] = 1
|
| 249 |
+
y: ClassVar[str]
|
| 250 |
+
```
|
| 251 |
+
|
| 252 |
+
**Allowed type qualifiers:** [`ClassVar`][typing.ClassVar], [`Final`][typing.Final].
|
| 253 |
+
"""
|
| 254 |
+
|
| 255 |
+
DATACLASS = auto()
|
| 256 |
+
"""An annotation used in the body of a dataclass:
|
| 257 |
+
|
| 258 |
+
```python
|
| 259 |
+
@dataclass
|
| 260 |
+
class Test:
|
| 261 |
+
x: Final[int] = 1
|
| 262 |
+
y: InitVar[str] = 'test'
|
| 263 |
+
```
|
| 264 |
+
|
| 265 |
+
**Allowed type qualifiers:** [`ClassVar`][typing.ClassVar], [`Final`][typing.Final], [`InitVar`][dataclasses.InitVar].
|
| 266 |
+
""" # noqa: E501
|
| 267 |
+
|
| 268 |
+
TYPED_DICT = auto()
|
| 269 |
+
"""An annotation used in the body of a [`TypedDict`][typing.TypedDict]:
|
| 270 |
+
|
| 271 |
+
```python
|
| 272 |
+
class TD(TypedDict):
|
| 273 |
+
x: Required[ReadOnly[int]]
|
| 274 |
+
y: ReadOnly[NotRequired[str]]
|
| 275 |
+
```
|
| 276 |
+
|
| 277 |
+
**Allowed type qualifiers:** [`ReadOnly`][typing.ReadOnly], [`Required`][typing.Required],
|
| 278 |
+
[`NotRequired`][typing.NotRequired].
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
NAMED_TUPLE = auto()
|
| 282 |
+
"""An annotation used in the body of a [`NamedTuple`][typing.NamedTuple].
|
| 283 |
+
|
| 284 |
+
```python
|
| 285 |
+
class NT(NamedTuple):
|
| 286 |
+
x: int
|
| 287 |
+
y: str
|
| 288 |
+
```
|
| 289 |
+
|
| 290 |
+
**Allowed type qualifiers:** none.
|
| 291 |
+
"""
|
| 292 |
+
|
| 293 |
+
FUNCTION = auto()
|
| 294 |
+
"""An annotation used in a function, either for a parameter or the return value.
|
| 295 |
+
|
| 296 |
+
```python
|
| 297 |
+
def func(a: int) -> str:
|
| 298 |
+
...
|
| 299 |
+
```
|
| 300 |
+
|
| 301 |
+
**Allowed type qualifiers:** none.
|
| 302 |
+
"""
|
| 303 |
+
|
| 304 |
+
ANY = auto()
|
| 305 |
+
"""An annotation that might come from any source.
|
| 306 |
+
|
| 307 |
+
**Allowed type qualifiers:** all.
|
| 308 |
+
"""
|
| 309 |
+
|
| 310 |
+
BARE = auto()
|
| 311 |
+
"""An annotation that is inspected as is.
|
| 312 |
+
|
| 313 |
+
**Allowed type qualifiers:** none.
|
| 314 |
+
"""
|
| 315 |
+
|
| 316 |
+
@property
|
| 317 |
+
def allowed_qualifiers(self) -> set[Qualifier]:
|
| 318 |
+
"""The allowed [type qualifiers][type qualifier] for this annotation source."""
|
| 319 |
+
# TODO use a match statement when Python 3.9 support is dropped.
|
| 320 |
+
if self is AnnotationSource.ASSIGNMENT_OR_VARIABLE:
|
| 321 |
+
return {'final'}
|
| 322 |
+
elif self is AnnotationSource.CLASS:
|
| 323 |
+
return {'final', 'class_var'}
|
| 324 |
+
elif self is AnnotationSource.DATACLASS:
|
| 325 |
+
return {'final', 'class_var', 'init_var'}
|
| 326 |
+
elif self is AnnotationSource.TYPED_DICT:
|
| 327 |
+
return {'required', 'not_required', 'read_only'}
|
| 328 |
+
elif self in (AnnotationSource.NAMED_TUPLE, AnnotationSource.FUNCTION, AnnotationSource.BARE):
|
| 329 |
+
return set()
|
| 330 |
+
elif self is AnnotationSource.ANY:
|
| 331 |
+
return _all_qualifiers
|
| 332 |
+
else: # pragma: no cover
|
| 333 |
+
assert_never(self)
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class ForbiddenQualifier(Exception):
|
| 337 |
+
"""The provided [type qualifier][] is forbidden."""
|
| 338 |
+
|
| 339 |
+
qualifier: Qualifier
|
| 340 |
+
"""The forbidden qualifier."""
|
| 341 |
+
|
| 342 |
+
def __init__(self, qualifier: Qualifier, /) -> None:
|
| 343 |
+
self.qualifier = qualifier
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
class _UnknownTypeEnum(Enum):
|
| 347 |
+
UNKNOWN = auto()
|
| 348 |
+
|
| 349 |
+
def __str__(self) -> str:
|
| 350 |
+
return 'UNKNOWN'
|
| 351 |
+
|
| 352 |
+
def __repr__(self) -> str:
|
| 353 |
+
return '<UNKNOWN>'
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
UNKNOWN = _UnknownTypeEnum.UNKNOWN
|
| 357 |
+
"""A sentinel value used when no [type expression][] is present."""
|
| 358 |
+
|
| 359 |
+
_UnkownType: TypeAlias = Literal[_UnknownTypeEnum.UNKNOWN]
|
| 360 |
+
"""The type of the [`UNKNOWN`][typing_inspection.introspection.UNKNOWN] sentinel value."""
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
class InspectedAnnotation(NamedTuple):
|
| 364 |
+
"""The result of the inspected annotation."""
|
| 365 |
+
|
| 366 |
+
type: Any | _UnkownType
|
| 367 |
+
"""The final [type expression][], with [type qualifiers][type qualifier] and annotated metadata stripped.
|
| 368 |
+
|
| 369 |
+
If no type expression is available, the [`UNKNOWN`][typing_inspection.introspection.UNKNOWN] sentinel
|
| 370 |
+
value is used instead. This is the case when a [type qualifier][] is used with no type annotation:
|
| 371 |
+
|
| 372 |
+
```python
|
| 373 |
+
ID: Final = 1
|
| 374 |
+
|
| 375 |
+
class C:
|
| 376 |
+
x: ClassVar = 'test'
|
| 377 |
+
```
|
| 378 |
+
"""
|
| 379 |
+
|
| 380 |
+
qualifiers: set[Qualifier]
|
| 381 |
+
"""The [type qualifiers][type qualifier] present on the annotation."""
|
| 382 |
+
|
| 383 |
+
metadata: list[Any]
|
| 384 |
+
"""The annotated metadata."""
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def inspect_annotation( # noqa: PLR0915
|
| 388 |
+
annotation: Any,
|
| 389 |
+
/,
|
| 390 |
+
*,
|
| 391 |
+
annotation_source: AnnotationSource,
|
| 392 |
+
unpack_type_aliases: Literal['skip', 'lenient', 'eager'] = 'skip',
|
| 393 |
+
) -> InspectedAnnotation:
|
| 394 |
+
"""Inspect an [annotation expression][], extracting any [type qualifier][] and metadata.
|
| 395 |
+
|
| 396 |
+
An [annotation expression][] is a [type expression][] optionally surrounded by one or more
|
| 397 |
+
[type qualifiers][type qualifier] or by [`Annotated`][typing.Annotated]. This function will:
|
| 398 |
+
|
| 399 |
+
- Unwrap the type expression, keeping track of the type qualifiers.
|
| 400 |
+
- Unwrap [`Annotated`][typing.Annotated] forms, keeping track of the annotated metadata.
|
| 401 |
+
|
| 402 |
+
Args:
|
| 403 |
+
annotation: The annotation expression to be inspected.
|
| 404 |
+
annotation_source: The source of the annotation. Depending on the source (e.g. a class), different type
|
| 405 |
+
qualifiers may be (dis)allowed. To allow any type qualifier, use
|
| 406 |
+
[`AnnotationSource.ANY`][typing_inspection.introspection.AnnotationSource.ANY].
|
| 407 |
+
unpack_type_aliases: What to do when encountering [PEP 695](https://peps.python.org/pep-0695/)
|
| 408 |
+
[type aliases][type-aliases]. Can be one of:
|
| 409 |
+
|
| 410 |
+
- `'skip'`: Do not try to parse type aliases (the default):
|
| 411 |
+
```pycon
|
| 412 |
+
>>> type MyInt = Annotated[int, 'meta']
|
| 413 |
+
>>> inspect_annotation(MyInt, annotation_source=AnnotationSource.BARE, unpack_type_aliases='skip')
|
| 414 |
+
InspectedAnnotation(type=MyInt, qualifiers={}, metadata=[])
|
| 415 |
+
```
|
| 416 |
+
|
| 417 |
+
- `'lenient'`: Try to parse type aliases, and fallback to `'skip'` if the type alias
|
| 418 |
+
can't be inspected (because of an undefined forward reference):
|
| 419 |
+
```pycon
|
| 420 |
+
>>> type MyInt = Annotated[Undefined, 'meta']
|
| 421 |
+
>>> inspect_annotation(MyInt, annotation_source=AnnotationSource.BARE, unpack_type_aliases='lenient')
|
| 422 |
+
InspectedAnnotation(type=MyInt, qualifiers={}, metadata=[])
|
| 423 |
+
>>> Undefined = int
|
| 424 |
+
>>> inspect_annotation(MyInt, annotation_source=AnnotationSource.BARE, unpack_type_aliases='lenient')
|
| 425 |
+
InspectedAnnotation(type=int, qualifiers={}, metadata=['meta'])
|
| 426 |
+
```
|
| 427 |
+
|
| 428 |
+
- `'eager'`: Parse type aliases and raise any encountered [`NameError`][] exceptions.
|
| 429 |
+
|
| 430 |
+
Returns:
|
| 431 |
+
The result of the inspected annotation, where the type expression, used qualifiers and metadata is stored.
|
| 432 |
+
|
| 433 |
+
Example:
|
| 434 |
+
```pycon
|
| 435 |
+
>>> inspect_annotation(
|
| 436 |
+
... Final[Annotated[ClassVar[Annotated[int, 'meta_1']], 'meta_2']],
|
| 437 |
+
... annotation_source=AnnotationSource.CLASS,
|
| 438 |
+
... )
|
| 439 |
+
...
|
| 440 |
+
InspectedAnnotation(type=int, qualifiers={'class_var', 'final'}, metadata=['meta_1', 'meta_2'])
|
| 441 |
+
```
|
| 442 |
+
"""
|
| 443 |
+
allowed_qualifiers = annotation_source.allowed_qualifiers
|
| 444 |
+
qualifiers: set[Qualifier] = set()
|
| 445 |
+
metadata: list[Any] = []
|
| 446 |
+
|
| 447 |
+
while True:
|
| 448 |
+
annotation, _meta = _unpack_annotated(annotation, unpack_type_aliases=unpack_type_aliases)
|
| 449 |
+
if _meta:
|
| 450 |
+
metadata = _meta + metadata
|
| 451 |
+
continue
|
| 452 |
+
|
| 453 |
+
origin = get_origin(annotation)
|
| 454 |
+
if origin is not None:
|
| 455 |
+
if typing_objects.is_classvar(origin):
|
| 456 |
+
if 'class_var' not in allowed_qualifiers:
|
| 457 |
+
raise ForbiddenQualifier('class_var')
|
| 458 |
+
qualifiers.add('class_var')
|
| 459 |
+
annotation = annotation.__args__[0]
|
| 460 |
+
elif typing_objects.is_final(origin):
|
| 461 |
+
if 'final' not in allowed_qualifiers:
|
| 462 |
+
raise ForbiddenQualifier('final')
|
| 463 |
+
qualifiers.add('final')
|
| 464 |
+
annotation = annotation.__args__[0]
|
| 465 |
+
elif typing_objects.is_required(origin):
|
| 466 |
+
if 'required' not in allowed_qualifiers:
|
| 467 |
+
raise ForbiddenQualifier('required')
|
| 468 |
+
qualifiers.add('required')
|
| 469 |
+
annotation = annotation.__args__[0]
|
| 470 |
+
elif typing_objects.is_notrequired(origin):
|
| 471 |
+
if 'not_required' not in allowed_qualifiers:
|
| 472 |
+
raise ForbiddenQualifier('not_required')
|
| 473 |
+
qualifiers.add('not_required')
|
| 474 |
+
annotation = annotation.__args__[0]
|
| 475 |
+
elif typing_objects.is_readonly(origin):
|
| 476 |
+
if 'read_only' not in allowed_qualifiers:
|
| 477 |
+
raise ForbiddenQualifier('not_required')
|
| 478 |
+
qualifiers.add('read_only')
|
| 479 |
+
annotation = annotation.__args__[0]
|
| 480 |
+
else:
|
| 481 |
+
# origin is not None but not a type qualifier nor `Annotated` (e.g. `list[int]`):
|
| 482 |
+
break
|
| 483 |
+
elif isinstance(annotation, InitVar):
|
| 484 |
+
if 'init_var' not in allowed_qualifiers:
|
| 485 |
+
raise ForbiddenQualifier('init_var')
|
| 486 |
+
qualifiers.add('init_var')
|
| 487 |
+
annotation = cast(Any, annotation.type)
|
| 488 |
+
else:
|
| 489 |
+
break
|
| 490 |
+
|
| 491 |
+
# `Final`, `ClassVar` and `InitVar` are type qualifiers allowed to be used as a bare annotation:
|
| 492 |
+
if typing_objects.is_final(annotation):
|
| 493 |
+
if 'final' not in allowed_qualifiers:
|
| 494 |
+
raise ForbiddenQualifier('final')
|
| 495 |
+
qualifiers.add('final')
|
| 496 |
+
annotation = UNKNOWN
|
| 497 |
+
elif typing_objects.is_classvar(annotation):
|
| 498 |
+
if 'class_var' not in allowed_qualifiers:
|
| 499 |
+
raise ForbiddenQualifier('class_var')
|
| 500 |
+
qualifiers.add('class_var')
|
| 501 |
+
annotation = UNKNOWN
|
| 502 |
+
elif annotation is InitVar:
|
| 503 |
+
if 'init_var' not in allowed_qualifiers:
|
| 504 |
+
raise ForbiddenQualifier('init_var')
|
| 505 |
+
qualifiers.add('init_var')
|
| 506 |
+
annotation = UNKNOWN
|
| 507 |
+
|
| 508 |
+
return InspectedAnnotation(annotation, qualifiers, metadata)
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
def _unpack_annotated_inner(
|
| 512 |
+
annotation: Any, unpack_type_aliases: Literal['lenient', 'eager'], check_annotated: bool
|
| 513 |
+
) -> tuple[Any, list[Any]]:
|
| 514 |
+
origin = get_origin(annotation)
|
| 515 |
+
if check_annotated and typing_objects.is_annotated(origin):
|
| 516 |
+
annotated_type = annotation.__origin__
|
| 517 |
+
metadata = list(annotation.__metadata__)
|
| 518 |
+
|
| 519 |
+
# The annotated type might be a PEP 695 type alias, so we need to recursively
|
| 520 |
+
# unpack it. Because Python already flattens `Annotated[Annotated[<type>, ...], ...]` forms,
|
| 521 |
+
# we can skip the `is_annotated()` check in the next call:
|
| 522 |
+
annotated_type, sub_meta = _unpack_annotated_inner(
|
| 523 |
+
annotated_type, unpack_type_aliases=unpack_type_aliases, check_annotated=False
|
| 524 |
+
)
|
| 525 |
+
metadata = sub_meta + metadata
|
| 526 |
+
return annotated_type, metadata
|
| 527 |
+
elif typing_objects.is_typealiastype(annotation):
|
| 528 |
+
try:
|
| 529 |
+
value = annotation.__value__
|
| 530 |
+
except NameError:
|
| 531 |
+
if unpack_type_aliases == 'eager':
|
| 532 |
+
raise
|
| 533 |
+
else:
|
| 534 |
+
typ, metadata = _unpack_annotated_inner(
|
| 535 |
+
value, unpack_type_aliases=unpack_type_aliases, check_annotated=True
|
| 536 |
+
)
|
| 537 |
+
if metadata:
|
| 538 |
+
# Having metadata means the type alias' `__value__` was an `Annotated` form
|
| 539 |
+
# (or, recursively, a type alias to an `Annotated` form). It is important to check
|
| 540 |
+
# for this, as we don't want to unpack other type aliases (e.g. `type MyInt = int`).
|
| 541 |
+
return typ, metadata
|
| 542 |
+
return annotation, []
|
| 543 |
+
elif typing_objects.is_typealiastype(origin):
|
| 544 |
+
# When parameterized, PEP 695 type aliases become generic aliases
|
| 545 |
+
# (e.g. with `type MyList[T] = Annotated[list[T], ...]`, `MyList[int]`
|
| 546 |
+
# is a generic alias).
|
| 547 |
+
try:
|
| 548 |
+
value = origin.__value__
|
| 549 |
+
except NameError:
|
| 550 |
+
if unpack_type_aliases == 'eager':
|
| 551 |
+
raise
|
| 552 |
+
else:
|
| 553 |
+
# While Python already handles type variable replacement for simple `Annotated` forms,
|
| 554 |
+
# we need to manually apply the same logic for PEP 695 type aliases:
|
| 555 |
+
# - With `MyList = Annotated[list[T], ...]`, `MyList[int] == Annotated[list[int], ...]`
|
| 556 |
+
# - With `type MyList[T] = Annotated[list[T], ...]`, `MyList[int].__value__ == Annotated[list[T], ...]`.
|
| 557 |
+
|
| 558 |
+
try:
|
| 559 |
+
# To do so, we emulate the parameterization of the value with the arguments:
|
| 560 |
+
# with `type MyList[T] = Annotated[list[T], ...]`, to emulate `MyList[int]`,
|
| 561 |
+
# we do `Annotated[list[T], ...][int]` (which gives `Annotated[list[T], ...]`):
|
| 562 |
+
value = value[annotation.__args__]
|
| 563 |
+
except TypeError:
|
| 564 |
+
# Might happen if the type alias is parameterized, but its value doesn't have any
|
| 565 |
+
# type variables, e.g. `type MyInt[T] = int`.
|
| 566 |
+
pass
|
| 567 |
+
typ, metadata = _unpack_annotated_inner(
|
| 568 |
+
value, unpack_type_aliases=unpack_type_aliases, check_annotated=True
|
| 569 |
+
)
|
| 570 |
+
if metadata:
|
| 571 |
+
return typ, metadata
|
| 572 |
+
return annotation, []
|
| 573 |
+
|
| 574 |
+
return annotation, []
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
# This could eventually be made public:
|
| 578 |
+
def _unpack_annotated(
|
| 579 |
+
annotation: Any, /, *, unpack_type_aliases: Literal['skip', 'lenient', 'eager'] = 'eager'
|
| 580 |
+
) -> tuple[Any, list[Any]]:
|
| 581 |
+
if unpack_type_aliases == 'skip':
|
| 582 |
+
if typing_objects.is_annotated(get_origin(annotation)):
|
| 583 |
+
return annotation.__origin__, list(annotation.__metadata__)
|
| 584 |
+
else:
|
| 585 |
+
return annotation, []
|
| 586 |
+
|
| 587 |
+
return _unpack_annotated_inner(annotation, unpack_type_aliases=unpack_type_aliases, check_annotated=True)
|
venv/lib/python3.12/site-packages/typing_inspection/py.typed
ADDED
|
File without changes
|
venv/lib/python3.12/site-packages/typing_inspection/typing_objects.py
ADDED
|
@@ -0,0 +1,607 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Low-level introspection utilities for [`typing`][] members.
|
| 2 |
+
|
| 3 |
+
The provided functions in this module check against both the [`typing`][] and [`typing_extensions`][]
|
| 4 |
+
variants, if they exists and are different.
|
| 5 |
+
"""
|
| 6 |
+
# ruff: noqa: UP006
|
| 7 |
+
|
| 8 |
+
import collections.abc
|
| 9 |
+
import contextlib
|
| 10 |
+
import re
|
| 11 |
+
import sys
|
| 12 |
+
import typing
|
| 13 |
+
import warnings
|
| 14 |
+
from textwrap import dedent
|
| 15 |
+
from types import FunctionType, GenericAlias
|
| 16 |
+
from typing import Any, Final
|
| 17 |
+
|
| 18 |
+
import typing_extensions
|
| 19 |
+
from typing_extensions import LiteralString, TypeAliasType, TypeIs, deprecated
|
| 20 |
+
|
| 21 |
+
__all__ = (
|
| 22 |
+
'DEPRECATED_ALIASES',
|
| 23 |
+
'NoneType',
|
| 24 |
+
'is_annotated',
|
| 25 |
+
'is_any',
|
| 26 |
+
'is_classvar',
|
| 27 |
+
'is_concatenate',
|
| 28 |
+
'is_deprecated',
|
| 29 |
+
'is_final',
|
| 30 |
+
'is_forwardref',
|
| 31 |
+
'is_generic',
|
| 32 |
+
'is_literal',
|
| 33 |
+
'is_literalstring',
|
| 34 |
+
'is_namedtuple',
|
| 35 |
+
'is_never',
|
| 36 |
+
'is_newtype',
|
| 37 |
+
'is_nodefault',
|
| 38 |
+
'is_noextraitems',
|
| 39 |
+
'is_noreturn',
|
| 40 |
+
'is_notrequired',
|
| 41 |
+
'is_paramspec',
|
| 42 |
+
'is_paramspecargs',
|
| 43 |
+
'is_paramspeckwargs',
|
| 44 |
+
'is_readonly',
|
| 45 |
+
'is_required',
|
| 46 |
+
'is_self',
|
| 47 |
+
'is_typealias',
|
| 48 |
+
'is_typealiastype',
|
| 49 |
+
'is_typeguard',
|
| 50 |
+
'is_typeis',
|
| 51 |
+
'is_typevar',
|
| 52 |
+
'is_typevartuple',
|
| 53 |
+
'is_union',
|
| 54 |
+
'is_unpack',
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
_IS_PY310 = sys.version_info[:2] == (3, 10)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _compile_identity_check_function(member: LiteralString, function_name: LiteralString) -> FunctionType:
|
| 61 |
+
"""Create a function checking that the function argument is the (unparameterized) typing `member`.
|
| 62 |
+
|
| 63 |
+
The function will make sure to check against both the `typing` and `typing_extensions`
|
| 64 |
+
variants as depending on the Python version, the `typing_extensions` variant might be different.
|
| 65 |
+
For instance, on Python 3.9:
|
| 66 |
+
|
| 67 |
+
```pycon
|
| 68 |
+
>>> from typing import Literal as t_Literal
|
| 69 |
+
>>> from typing_extensions import Literal as te_Literal, get_origin
|
| 70 |
+
|
| 71 |
+
>>> t_Literal is te_Literal
|
| 72 |
+
False
|
| 73 |
+
>>> get_origin(t_Literal[1])
|
| 74 |
+
typing.Literal
|
| 75 |
+
>>> get_origin(te_Literal[1])
|
| 76 |
+
typing_extensions.Literal
|
| 77 |
+
```
|
| 78 |
+
"""
|
| 79 |
+
in_typing = hasattr(typing, member)
|
| 80 |
+
in_typing_extensions = hasattr(typing_extensions, member)
|
| 81 |
+
|
| 82 |
+
if in_typing and in_typing_extensions:
|
| 83 |
+
if getattr(typing, member) is getattr(typing_extensions, member):
|
| 84 |
+
check_code = f'obj is typing.{member}'
|
| 85 |
+
else:
|
| 86 |
+
check_code = f'obj is typing.{member} or obj is typing_extensions.{member}'
|
| 87 |
+
elif in_typing and not in_typing_extensions:
|
| 88 |
+
check_code = f'obj is typing.{member}'
|
| 89 |
+
elif not in_typing and in_typing_extensions:
|
| 90 |
+
check_code = f'obj is typing_extensions.{member}'
|
| 91 |
+
else:
|
| 92 |
+
check_code = 'False'
|
| 93 |
+
|
| 94 |
+
func_code = dedent(f"""
|
| 95 |
+
def {function_name}(obj: Any, /) -> bool:
|
| 96 |
+
return {check_code}
|
| 97 |
+
""")
|
| 98 |
+
|
| 99 |
+
locals_: dict[str, Any] = {}
|
| 100 |
+
globals_: dict[str, Any] = {'Any': Any, 'typing': typing, 'typing_extensions': typing_extensions}
|
| 101 |
+
exec(func_code, globals_, locals_)
|
| 102 |
+
return locals_[function_name]
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def _compile_isinstance_check_function(member: LiteralString, function_name: LiteralString) -> FunctionType:
|
| 106 |
+
"""Create a function checking that the function is an instance of the typing `member`.
|
| 107 |
+
|
| 108 |
+
The function will make sure to check against both the `typing` and `typing_extensions`
|
| 109 |
+
variants as depending on the Python version, the `typing_extensions` variant might be different.
|
| 110 |
+
"""
|
| 111 |
+
in_typing = hasattr(typing, member)
|
| 112 |
+
in_typing_extensions = hasattr(typing_extensions, member)
|
| 113 |
+
|
| 114 |
+
if in_typing and in_typing_extensions:
|
| 115 |
+
if getattr(typing, member) is getattr(typing_extensions, member):
|
| 116 |
+
check_code = f'isinstance(obj, typing.{member})'
|
| 117 |
+
else:
|
| 118 |
+
check_code = f'isinstance(obj, (typing.{member}, typing_extensions.{member}))'
|
| 119 |
+
elif in_typing and not in_typing_extensions:
|
| 120 |
+
check_code = f'isinstance(obj, typing.{member})'
|
| 121 |
+
elif not in_typing and in_typing_extensions:
|
| 122 |
+
check_code = f'isinstance(obj, typing_extensions.{member})'
|
| 123 |
+
else:
|
| 124 |
+
check_code = 'False'
|
| 125 |
+
|
| 126 |
+
func_code = dedent(f"""
|
| 127 |
+
def {function_name}(obj: Any, /) -> 'TypeIs[{member}]':
|
| 128 |
+
return {check_code}
|
| 129 |
+
""")
|
| 130 |
+
|
| 131 |
+
locals_: dict[str, Any] = {}
|
| 132 |
+
globals_: dict[str, Any] = {'Any': Any, 'typing': typing, 'typing_extensions': typing_extensions}
|
| 133 |
+
exec(func_code, globals_, locals_)
|
| 134 |
+
return locals_[function_name]
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
if sys.version_info >= (3, 10):
|
| 138 |
+
from types import NoneType
|
| 139 |
+
else:
|
| 140 |
+
NoneType = type(None)
|
| 141 |
+
|
| 142 |
+
# Keep this ordered, as per `typing.__all__`:
|
| 143 |
+
|
| 144 |
+
is_annotated = _compile_identity_check_function('Annotated', 'is_annotated')
|
| 145 |
+
is_annotated.__doc__ = """
|
| 146 |
+
Return whether the argument is the [`Annotated`][typing.Annotated] [special form][].
|
| 147 |
+
|
| 148 |
+
```pycon
|
| 149 |
+
>>> is_annotated(Annotated)
|
| 150 |
+
True
|
| 151 |
+
>>> is_annotated(Annotated[int, ...])
|
| 152 |
+
False
|
| 153 |
+
```
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
is_any = _compile_identity_check_function('Any', 'is_any')
|
| 157 |
+
is_any.__doc__ = """
|
| 158 |
+
Return whether the argument is the [`Any`][typing.Any] [special form][].
|
| 159 |
+
|
| 160 |
+
```pycon
|
| 161 |
+
>>> is_any(Any)
|
| 162 |
+
True
|
| 163 |
+
```
|
| 164 |
+
"""
|
| 165 |
+
|
| 166 |
+
is_classvar = _compile_identity_check_function('ClassVar', 'is_classvar')
|
| 167 |
+
is_classvar.__doc__ = """
|
| 168 |
+
Return whether the argument is the [`ClassVar`][typing.ClassVar] [type qualifier][].
|
| 169 |
+
|
| 170 |
+
```pycon
|
| 171 |
+
>>> is_classvar(ClassVar)
|
| 172 |
+
True
|
| 173 |
+
>>> is_classvar(ClassVar[int])
|
| 174 |
+
>>> False
|
| 175 |
+
```
|
| 176 |
+
"""
|
| 177 |
+
|
| 178 |
+
is_concatenate = _compile_identity_check_function('Concatenate', 'is_concatenate')
|
| 179 |
+
is_concatenate.__doc__ = """
|
| 180 |
+
Return whether the argument is the [`Concatenate`][typing.Concatenate] [special form][].
|
| 181 |
+
|
| 182 |
+
```pycon
|
| 183 |
+
>>> is_concatenate(Concatenate)
|
| 184 |
+
True
|
| 185 |
+
>>> is_concatenate(Concatenate[int, P])
|
| 186 |
+
False
|
| 187 |
+
```
|
| 188 |
+
"""
|
| 189 |
+
|
| 190 |
+
is_final = _compile_identity_check_function('Final', 'is_final')
|
| 191 |
+
is_final.__doc__ = """
|
| 192 |
+
Return whether the argument is the [`Final`][typing.Final] [type qualifier][].
|
| 193 |
+
|
| 194 |
+
```pycon
|
| 195 |
+
>>> is_final(Final)
|
| 196 |
+
True
|
| 197 |
+
>>> is_final(Final[int])
|
| 198 |
+
False
|
| 199 |
+
```
|
| 200 |
+
"""
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
# Unlikely to have a different version in `typing-extensions`, but keep it consistent.
|
| 204 |
+
# Also note that starting in 3.14, this is an alias to `annotationlib.ForwardRef`, but
|
| 205 |
+
# accessing it from `typing` doesn't seem to be deprecated.
|
| 206 |
+
is_forwardref = _compile_isinstance_check_function('ForwardRef', 'is_forwardref')
|
| 207 |
+
is_forwardref.__doc__ = """
|
| 208 |
+
Return whether the argument is an instance of [`ForwardRef`][typing.ForwardRef].
|
| 209 |
+
|
| 210 |
+
```pycon
|
| 211 |
+
>>> is_forwardref(ForwardRef('T'))
|
| 212 |
+
True
|
| 213 |
+
```
|
| 214 |
+
"""
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
is_generic = _compile_identity_check_function('Generic', 'is_generic')
|
| 218 |
+
is_generic.__doc__ = """
|
| 219 |
+
Return whether the argument is the [`Generic`][typing.Generic] [special form][].
|
| 220 |
+
|
| 221 |
+
```pycon
|
| 222 |
+
>>> is_generic(Generic)
|
| 223 |
+
True
|
| 224 |
+
>>> is_generic(Generic[T])
|
| 225 |
+
False
|
| 226 |
+
```
|
| 227 |
+
"""
|
| 228 |
+
|
| 229 |
+
is_literal = _compile_identity_check_function('Literal', 'is_literal')
|
| 230 |
+
is_literal.__doc__ = """
|
| 231 |
+
Return whether the argument is the [`Literal`][typing.Literal] [special form][].
|
| 232 |
+
|
| 233 |
+
```pycon
|
| 234 |
+
>>> is_literal(Literal)
|
| 235 |
+
True
|
| 236 |
+
>>> is_literal(Literal["a"])
|
| 237 |
+
False
|
| 238 |
+
```
|
| 239 |
+
"""
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
# `get_origin(Optional[int]) is Union`, so `is_optional()` isn't implemented.
|
| 243 |
+
|
| 244 |
+
is_paramspec = _compile_isinstance_check_function('ParamSpec', 'is_paramspec')
|
| 245 |
+
is_paramspec.__doc__ = """
|
| 246 |
+
Return whether the argument is an instance of [`ParamSpec`][typing.ParamSpec].
|
| 247 |
+
|
| 248 |
+
```pycon
|
| 249 |
+
>>> P = ParamSpec('P')
|
| 250 |
+
>>> is_paramspec(P)
|
| 251 |
+
True
|
| 252 |
+
```
|
| 253 |
+
"""
|
| 254 |
+
|
| 255 |
+
# Protocol?
|
| 256 |
+
|
| 257 |
+
is_typevar = _compile_isinstance_check_function('TypeVar', 'is_typevar')
|
| 258 |
+
is_typevar.__doc__ = """
|
| 259 |
+
Return whether the argument is an instance of [`TypeVar`][typing.TypeVar].
|
| 260 |
+
|
| 261 |
+
```pycon
|
| 262 |
+
>>> T = TypeVar('T')
|
| 263 |
+
>>> is_typevar(T)
|
| 264 |
+
True
|
| 265 |
+
```
|
| 266 |
+
"""
|
| 267 |
+
|
| 268 |
+
is_typevartuple = _compile_isinstance_check_function('TypeVarTuple', 'is_typevartuple')
|
| 269 |
+
is_typevartuple.__doc__ = """
|
| 270 |
+
Return whether the argument is an instance of [`TypeVarTuple`][typing.TypeVarTuple].
|
| 271 |
+
|
| 272 |
+
```pycon
|
| 273 |
+
>>> Ts = TypeVarTuple('Ts')
|
| 274 |
+
>>> is_typevartuple(Ts)
|
| 275 |
+
True
|
| 276 |
+
```
|
| 277 |
+
"""
|
| 278 |
+
|
| 279 |
+
is_union = _compile_identity_check_function('Union', 'is_union')
|
| 280 |
+
is_union.__doc__ = """
|
| 281 |
+
Return whether the argument is the [`Union`][typing.Union] [special form][].
|
| 282 |
+
|
| 283 |
+
This function can also be used to check for the [`Optional`][typing.Optional] [special form][],
|
| 284 |
+
as at runtime, `Optional[int]` is equivalent to `Union[int, None]`.
|
| 285 |
+
|
| 286 |
+
```pycon
|
| 287 |
+
>>> is_union(Union)
|
| 288 |
+
True
|
| 289 |
+
>>> is_union(Union[int, str])
|
| 290 |
+
False
|
| 291 |
+
```
|
| 292 |
+
|
| 293 |
+
!!! warning
|
| 294 |
+
This does not check for unions using the [new syntax][types-union] (e.g. `int | str`).
|
| 295 |
+
"""
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def is_namedtuple(obj: Any, /) -> bool:
|
| 299 |
+
"""Return whether the argument is a named tuple type.
|
| 300 |
+
|
| 301 |
+
This includes [`NamedTuple`][typing.NamedTuple] subclasses and classes created from the
|
| 302 |
+
[`collections.namedtuple`][] factory function.
|
| 303 |
+
|
| 304 |
+
```pycon
|
| 305 |
+
>>> class User(NamedTuple):
|
| 306 |
+
... name: str
|
| 307 |
+
...
|
| 308 |
+
>>> is_namedtuple(User)
|
| 309 |
+
True
|
| 310 |
+
>>> City = collections.namedtuple('City', [])
|
| 311 |
+
>>> is_namedtuple(City)
|
| 312 |
+
True
|
| 313 |
+
>>> is_namedtuple(NamedTuple)
|
| 314 |
+
False
|
| 315 |
+
```
|
| 316 |
+
"""
|
| 317 |
+
return isinstance(obj, type) and issubclass(obj, tuple) and hasattr(obj, '_fields') # pyright: ignore[reportUnknownArgumentType]
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
# TypedDict?
|
| 321 |
+
|
| 322 |
+
# BinaryIO? IO? TextIO?
|
| 323 |
+
|
| 324 |
+
is_literalstring = _compile_identity_check_function('LiteralString', 'is_literalstring')
|
| 325 |
+
is_literalstring.__doc__ = """
|
| 326 |
+
Return whether the argument is the [`LiteralString`][typing.LiteralString] [special form][].
|
| 327 |
+
|
| 328 |
+
```pycon
|
| 329 |
+
>>> is_literalstring(LiteralString)
|
| 330 |
+
True
|
| 331 |
+
```
|
| 332 |
+
"""
|
| 333 |
+
|
| 334 |
+
is_never = _compile_identity_check_function('Never', 'is_never')
|
| 335 |
+
is_never.__doc__ = """
|
| 336 |
+
Return whether the argument is the [`Never`][typing.Never] [special form][].
|
| 337 |
+
|
| 338 |
+
```pycon
|
| 339 |
+
>>> is_never(Never)
|
| 340 |
+
True
|
| 341 |
+
```
|
| 342 |
+
"""
|
| 343 |
+
|
| 344 |
+
if sys.version_info >= (3, 10):
|
| 345 |
+
is_newtype = _compile_isinstance_check_function('NewType', 'is_newtype')
|
| 346 |
+
else: # On Python 3.10, `NewType` is a function.
|
| 347 |
+
|
| 348 |
+
def is_newtype(obj: Any, /) -> bool:
|
| 349 |
+
return hasattr(obj, '__supertype__')
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
is_newtype.__doc__ = """
|
| 353 |
+
Return whether the argument is a [`NewType`][typing.NewType].
|
| 354 |
+
|
| 355 |
+
```pycon
|
| 356 |
+
>>> UserId = NewType("UserId", int)
|
| 357 |
+
>>> is_newtype(UserId)
|
| 358 |
+
True
|
| 359 |
+
```
|
| 360 |
+
"""
|
| 361 |
+
|
| 362 |
+
is_nodefault = _compile_identity_check_function('NoDefault', 'is_nodefault')
|
| 363 |
+
is_nodefault.__doc__ = """
|
| 364 |
+
Return whether the argument is the [`NoDefault`][typing.NoDefault] sentinel object.
|
| 365 |
+
|
| 366 |
+
```pycon
|
| 367 |
+
>>> is_nodefault(NoDefault)
|
| 368 |
+
True
|
| 369 |
+
```
|
| 370 |
+
"""
|
| 371 |
+
|
| 372 |
+
is_noextraitems = _compile_identity_check_function('NoExtraItems', 'is_noextraitems')
|
| 373 |
+
is_noextraitems.__doc__ = """
|
| 374 |
+
Return whether the argument is the `NoExtraItems` sentinel object.
|
| 375 |
+
|
| 376 |
+
```pycon
|
| 377 |
+
>>> is_noextraitems(NoExtraItems)
|
| 378 |
+
True
|
| 379 |
+
```
|
| 380 |
+
"""
|
| 381 |
+
|
| 382 |
+
is_noreturn = _compile_identity_check_function('NoReturn', 'is_noreturn')
|
| 383 |
+
is_noreturn.__doc__ = """
|
| 384 |
+
Return whether the argument is the [`NoReturn`][typing.NoReturn] [special form][].
|
| 385 |
+
|
| 386 |
+
```pycon
|
| 387 |
+
>>> is_noreturn(NoReturn)
|
| 388 |
+
True
|
| 389 |
+
>>> is_noreturn(Never)
|
| 390 |
+
False
|
| 391 |
+
```
|
| 392 |
+
"""
|
| 393 |
+
|
| 394 |
+
is_notrequired = _compile_identity_check_function('NotRequired', 'is_notrequired')
|
| 395 |
+
is_notrequired.__doc__ = """
|
| 396 |
+
Return whether the argument is the [`NotRequired`][typing.NotRequired] [special form][].
|
| 397 |
+
|
| 398 |
+
```pycon
|
| 399 |
+
>>> is_notrequired(NotRequired)
|
| 400 |
+
True
|
| 401 |
+
```
|
| 402 |
+
"""
|
| 403 |
+
|
| 404 |
+
is_paramspecargs = _compile_isinstance_check_function('ParamSpecArgs', 'is_paramspecargs')
|
| 405 |
+
is_paramspecargs.__doc__ = """
|
| 406 |
+
Return whether the argument is an instance of [`ParamSpecArgs`][typing.ParamSpecArgs].
|
| 407 |
+
|
| 408 |
+
```pycon
|
| 409 |
+
>>> P = ParamSpec('P')
|
| 410 |
+
>>> is_paramspecargs(P.args)
|
| 411 |
+
True
|
| 412 |
+
```
|
| 413 |
+
"""
|
| 414 |
+
|
| 415 |
+
is_paramspeckwargs = _compile_isinstance_check_function('ParamSpecKwargs', 'is_paramspeckwargs')
|
| 416 |
+
is_paramspeckwargs.__doc__ = """
|
| 417 |
+
Return whether the argument is an instance of [`ParamSpecKwargs`][typing.ParamSpecKwargs].
|
| 418 |
+
|
| 419 |
+
```pycon
|
| 420 |
+
>>> P = ParamSpec('P')
|
| 421 |
+
>>> is_paramspeckwargs(P.kwargs)
|
| 422 |
+
True
|
| 423 |
+
```
|
| 424 |
+
"""
|
| 425 |
+
|
| 426 |
+
is_readonly = _compile_identity_check_function('ReadOnly', 'is_readonly')
|
| 427 |
+
is_readonly.__doc__ = """
|
| 428 |
+
Return whether the argument is the [`ReadOnly`][typing.ReadOnly] [special form][].
|
| 429 |
+
|
| 430 |
+
```pycon
|
| 431 |
+
>>> is_readonly(ReadOnly)
|
| 432 |
+
True
|
| 433 |
+
```
|
| 434 |
+
"""
|
| 435 |
+
|
| 436 |
+
is_required = _compile_identity_check_function('Required', 'is_required')
|
| 437 |
+
is_required.__doc__ = """
|
| 438 |
+
Return whether the argument is the [`Required`][typing.Required] [special form][].
|
| 439 |
+
|
| 440 |
+
```pycon
|
| 441 |
+
>>> is_required(Required)
|
| 442 |
+
True
|
| 443 |
+
```
|
| 444 |
+
"""
|
| 445 |
+
|
| 446 |
+
is_self = _compile_identity_check_function('Self', 'is_self')
|
| 447 |
+
is_self.__doc__ = """
|
| 448 |
+
Return whether the argument is the [`Self`][typing.Self] [special form][].
|
| 449 |
+
|
| 450 |
+
```pycon
|
| 451 |
+
>>> is_self(Self)
|
| 452 |
+
True
|
| 453 |
+
```
|
| 454 |
+
"""
|
| 455 |
+
|
| 456 |
+
# TYPE_CHECKING?
|
| 457 |
+
|
| 458 |
+
is_typealias = _compile_identity_check_function('TypeAlias', 'is_typealias')
|
| 459 |
+
is_typealias.__doc__ = """
|
| 460 |
+
Return whether the argument is the [`TypeAlias`][typing.TypeAlias] [special form][].
|
| 461 |
+
|
| 462 |
+
```pycon
|
| 463 |
+
>>> is_typealias(TypeAlias)
|
| 464 |
+
True
|
| 465 |
+
```
|
| 466 |
+
"""
|
| 467 |
+
|
| 468 |
+
is_typeguard = _compile_identity_check_function('TypeGuard', 'is_typeguard')
|
| 469 |
+
is_typeguard.__doc__ = """
|
| 470 |
+
Return whether the argument is the [`TypeGuard`][typing.TypeGuard] [special form][].
|
| 471 |
+
|
| 472 |
+
```pycon
|
| 473 |
+
>>> is_typeguard(TypeGuard)
|
| 474 |
+
True
|
| 475 |
+
```
|
| 476 |
+
"""
|
| 477 |
+
|
| 478 |
+
is_typeis = _compile_identity_check_function('TypeIs', 'is_typeis')
|
| 479 |
+
is_typeis.__doc__ = """
|
| 480 |
+
Return whether the argument is the [`TypeIs`][typing.TypeIs] [special form][].
|
| 481 |
+
|
| 482 |
+
```pycon
|
| 483 |
+
>>> is_typeis(TypeIs)
|
| 484 |
+
True
|
| 485 |
+
```
|
| 486 |
+
"""
|
| 487 |
+
|
| 488 |
+
_is_typealiastype_inner = _compile_isinstance_check_function('TypeAliasType', '_is_typealiastype_inner')
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
if _IS_PY310:
|
| 492 |
+
# Parameterized PEP 695 type aliases are instances of `types.GenericAlias` in typing_extensions>=4.13.0.
|
| 493 |
+
# On Python 3.10, with `Alias[int]` being such an instance of `GenericAlias`,
|
| 494 |
+
# `isinstance(Alias[int], TypeAliasType)` returns `True`.
|
| 495 |
+
# See https://github.com/python/cpython/issues/89828.
|
| 496 |
+
def is_typealiastype(obj: Any, /) -> 'TypeIs[TypeAliasType]':
|
| 497 |
+
return type(obj) is not GenericAlias and _is_typealiastype_inner(obj)
|
| 498 |
+
else:
|
| 499 |
+
is_typealiastype = _compile_isinstance_check_function('TypeAliasType', 'is_typealiastype')
|
| 500 |
+
|
| 501 |
+
is_typealiastype.__doc__ = """
|
| 502 |
+
Return whether the argument is a [`TypeAliasType`][typing.TypeAliasType] instance.
|
| 503 |
+
|
| 504 |
+
```pycon
|
| 505 |
+
>>> type MyInt = int
|
| 506 |
+
>>> is_typealiastype(MyInt)
|
| 507 |
+
True
|
| 508 |
+
>>> MyStr = TypeAliasType("MyStr", str)
|
| 509 |
+
>>> is_typealiastype(MyStr):
|
| 510 |
+
True
|
| 511 |
+
>>> type MyList[T] = list[T]
|
| 512 |
+
>>> is_typealiastype(MyList[int])
|
| 513 |
+
False
|
| 514 |
+
```
|
| 515 |
+
"""
|
| 516 |
+
|
| 517 |
+
is_unpack = _compile_identity_check_function('Unpack', 'is_unpack')
|
| 518 |
+
is_unpack.__doc__ = """
|
| 519 |
+
Return whether the argument is the [`Unpack`][typing.Unpack] [special form][].
|
| 520 |
+
|
| 521 |
+
```pycon
|
| 522 |
+
>>> is_unpack(Unpack)
|
| 523 |
+
True
|
| 524 |
+
>>> is_unpack(Unpack[Ts])
|
| 525 |
+
False
|
| 526 |
+
```
|
| 527 |
+
"""
|
| 528 |
+
|
| 529 |
+
|
| 530 |
+
if sys.version_info >= (3, 13):
|
| 531 |
+
|
| 532 |
+
def is_deprecated(obj: Any, /) -> 'TypeIs[deprecated]':
|
| 533 |
+
return isinstance(obj, (warnings.deprecated, typing_extensions.deprecated))
|
| 534 |
+
|
| 535 |
+
else:
|
| 536 |
+
|
| 537 |
+
def is_deprecated(obj: Any, /) -> 'TypeIs[deprecated]':
|
| 538 |
+
return isinstance(obj, typing_extensions.deprecated)
|
| 539 |
+
|
| 540 |
+
|
| 541 |
+
is_deprecated.__doc__ = """
|
| 542 |
+
Return whether the argument is a [`deprecated`][warnings.deprecated] instance.
|
| 543 |
+
|
| 544 |
+
This also includes the [`typing_extensions` backport][typing_extensions.deprecated].
|
| 545 |
+
|
| 546 |
+
```pycon
|
| 547 |
+
>>> is_deprecated(warnings.deprecated('message'))
|
| 548 |
+
True
|
| 549 |
+
>>> is_deprecated(typing_extensions.deprecated('message'))
|
| 550 |
+
True
|
| 551 |
+
```
|
| 552 |
+
"""
|
| 553 |
+
|
| 554 |
+
|
| 555 |
+
# Aliases defined in the `typing` module using `typing._SpecialGenericAlias` (itself aliased as `alias()`):
|
| 556 |
+
DEPRECATED_ALIASES: Final[dict[Any, type[Any]]] = {
|
| 557 |
+
typing.Hashable: collections.abc.Hashable,
|
| 558 |
+
typing.Awaitable: collections.abc.Awaitable,
|
| 559 |
+
typing.Coroutine: collections.abc.Coroutine,
|
| 560 |
+
typing.AsyncIterable: collections.abc.AsyncIterable,
|
| 561 |
+
typing.AsyncIterator: collections.abc.AsyncIterator,
|
| 562 |
+
typing.Iterable: collections.abc.Iterable,
|
| 563 |
+
typing.Iterator: collections.abc.Iterator,
|
| 564 |
+
typing.Reversible: collections.abc.Reversible,
|
| 565 |
+
typing.Sized: collections.abc.Sized,
|
| 566 |
+
typing.Container: collections.abc.Container,
|
| 567 |
+
typing.Collection: collections.abc.Collection,
|
| 568 |
+
# type ignore reason: https://github.com/python/typeshed/issues/6257:
|
| 569 |
+
typing.Callable: collections.abc.Callable, # pyright: ignore[reportAssignmentType, reportUnknownMemberType]
|
| 570 |
+
typing.AbstractSet: collections.abc.Set,
|
| 571 |
+
typing.MutableSet: collections.abc.MutableSet,
|
| 572 |
+
typing.Mapping: collections.abc.Mapping,
|
| 573 |
+
typing.MutableMapping: collections.abc.MutableMapping,
|
| 574 |
+
typing.Sequence: collections.abc.Sequence,
|
| 575 |
+
typing.MutableSequence: collections.abc.MutableSequence,
|
| 576 |
+
typing.Tuple: tuple,
|
| 577 |
+
typing.List: list,
|
| 578 |
+
typing.Deque: collections.deque,
|
| 579 |
+
typing.Set: set,
|
| 580 |
+
typing.FrozenSet: frozenset,
|
| 581 |
+
typing.MappingView: collections.abc.MappingView,
|
| 582 |
+
typing.KeysView: collections.abc.KeysView,
|
| 583 |
+
typing.ItemsView: collections.abc.ItemsView,
|
| 584 |
+
typing.ValuesView: collections.abc.ValuesView,
|
| 585 |
+
typing.Dict: dict,
|
| 586 |
+
typing.DefaultDict: collections.defaultdict,
|
| 587 |
+
typing.OrderedDict: collections.OrderedDict,
|
| 588 |
+
typing.Counter: collections.Counter,
|
| 589 |
+
typing.ChainMap: collections.ChainMap,
|
| 590 |
+
typing.Generator: collections.abc.Generator,
|
| 591 |
+
typing.AsyncGenerator: collections.abc.AsyncGenerator,
|
| 592 |
+
typing.Type: type,
|
| 593 |
+
# Defined in `typing.__getattr__`:
|
| 594 |
+
typing.Pattern: re.Pattern,
|
| 595 |
+
typing.Match: re.Match,
|
| 596 |
+
typing.ContextManager: contextlib.AbstractContextManager,
|
| 597 |
+
typing.AsyncContextManager: contextlib.AbstractAsyncContextManager,
|
| 598 |
+
# Skipped: `ByteString` (deprecated, removed in 3.14)
|
| 599 |
+
}
|
| 600 |
+
"""A mapping between the deprecated typing aliases to their replacement, as per [PEP 585](https://peps.python.org/pep-0585/)."""
|
| 601 |
+
|
| 602 |
+
|
| 603 |
+
# Add the `typing_extensions` aliases:
|
| 604 |
+
for alias, target in list(DEPRECATED_ALIASES.items()):
|
| 605 |
+
# Use `alias.__name__` when we drop support for Python 3.9
|
| 606 |
+
if (te_alias := getattr(typing_extensions, alias._name, None)) is not None:
|
| 607 |
+
DEPRECATED_ALIASES[te_alias] = target
|
venv/lib/python3.12/site-packages/typing_inspection/typing_objects.pyi
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Stub file generated using:
|
| 2 |
+
# `stubgen --inspect-mode --include-docstrings -m typing_inspection.typing_objects`
|
| 3 |
+
# (manual edits need to be applied).
|
| 4 |
+
"""Low-level introspection utilities for [`typing`][] members.
|
| 5 |
+
|
| 6 |
+
The provided functions in this module check against both the [`typing`][] and [`typing_extensions`][]
|
| 7 |
+
variants, if they exists and are different.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import sys
|
| 11 |
+
from typing import Any, Final, ForwardRef, NewType, TypeVar
|
| 12 |
+
|
| 13 |
+
from typing_extensions import ParamSpec, ParamSpecArgs, ParamSpecKwargs, TypeAliasType, TypeIs, TypeVarTuple, deprecated
|
| 14 |
+
|
| 15 |
+
__all__ = [
|
| 16 |
+
'DEPRECATED_ALIASES',
|
| 17 |
+
'NoneType',
|
| 18 |
+
'is_annotated',
|
| 19 |
+
'is_any',
|
| 20 |
+
'is_classvar',
|
| 21 |
+
'is_concatenate',
|
| 22 |
+
'is_deprecated',
|
| 23 |
+
'is_final',
|
| 24 |
+
'is_generic',
|
| 25 |
+
'is_literal',
|
| 26 |
+
'is_literalstring',
|
| 27 |
+
'is_namedtuple',
|
| 28 |
+
'is_never',
|
| 29 |
+
'is_newtype',
|
| 30 |
+
'is_nodefault',
|
| 31 |
+
'is_noextraitems',
|
| 32 |
+
'is_noreturn',
|
| 33 |
+
'is_notrequired',
|
| 34 |
+
'is_paramspec',
|
| 35 |
+
'is_paramspecargs',
|
| 36 |
+
'is_paramspeckwargs',
|
| 37 |
+
'is_readonly',
|
| 38 |
+
'is_required',
|
| 39 |
+
'is_self',
|
| 40 |
+
'is_typealias',
|
| 41 |
+
'is_typealiastype',
|
| 42 |
+
'is_typeguard',
|
| 43 |
+
'is_typeis',
|
| 44 |
+
'is_typevar',
|
| 45 |
+
'is_typevartuple',
|
| 46 |
+
'is_union',
|
| 47 |
+
'is_unpack',
|
| 48 |
+
]
|
| 49 |
+
|
| 50 |
+
if sys.version_info >= (3, 10):
|
| 51 |
+
from types import NoneType
|
| 52 |
+
else:
|
| 53 |
+
NoneType = type(None)
|
| 54 |
+
|
| 55 |
+
def is_annotated(obj: Any, /) -> bool:
|
| 56 |
+
"""
|
| 57 |
+
Return whether the argument is the [`Annotated`][typing.Annotated] [special form][].
|
| 58 |
+
|
| 59 |
+
```pycon
|
| 60 |
+
>>> is_annotated(Annotated)
|
| 61 |
+
True
|
| 62 |
+
>>> is_annotated(Annotated[int, ...])
|
| 63 |
+
False
|
| 64 |
+
```
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def is_any(obj: Any, /) -> bool:
|
| 68 |
+
"""
|
| 69 |
+
Return whether the argument is the [`Any`][typing.Any] [special form][].
|
| 70 |
+
|
| 71 |
+
```pycon
|
| 72 |
+
>>> is_any(Any)
|
| 73 |
+
True
|
| 74 |
+
```
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
def is_classvar(obj: Any, /) -> bool:
|
| 78 |
+
"""
|
| 79 |
+
Return whether the argument is the [`ClassVar`][typing.ClassVar] [type qualifier][].
|
| 80 |
+
|
| 81 |
+
```pycon
|
| 82 |
+
>>> is_classvar(ClassVar)
|
| 83 |
+
True
|
| 84 |
+
>>> is_classvar(ClassVar[int])
|
| 85 |
+
>>> False
|
| 86 |
+
```
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
def is_concatenate(obj: Any, /) -> bool:
|
| 90 |
+
"""
|
| 91 |
+
Return whether the argument is the [`Concatenate`][typing.Concatenate] [special form][].
|
| 92 |
+
|
| 93 |
+
```pycon
|
| 94 |
+
>>> is_concatenate(Concatenate)
|
| 95 |
+
True
|
| 96 |
+
>>> is_concatenate(Concatenate[int, P])
|
| 97 |
+
False
|
| 98 |
+
```
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def is_final(obj: Any, /) -> bool:
|
| 102 |
+
"""
|
| 103 |
+
Return whether the argument is the [`Final`][typing.Final] [type qualifier][].
|
| 104 |
+
|
| 105 |
+
```pycon
|
| 106 |
+
>>> is_final(Final)
|
| 107 |
+
True
|
| 108 |
+
>>> is_final(Final[int])
|
| 109 |
+
False
|
| 110 |
+
```
|
| 111 |
+
"""
|
| 112 |
+
|
| 113 |
+
def is_forwardref(obj: Any, /) -> TypeIs[ForwardRef]:
|
| 114 |
+
"""
|
| 115 |
+
Return whether the argument is an instance of [`ForwardRef`][typing.ForwardRef].
|
| 116 |
+
|
| 117 |
+
```pycon
|
| 118 |
+
>>> is_forwardref(ForwardRef('T'))
|
| 119 |
+
True
|
| 120 |
+
```
|
| 121 |
+
"""
|
| 122 |
+
|
| 123 |
+
def is_generic(obj: Any, /) -> bool:
|
| 124 |
+
"""
|
| 125 |
+
Return whether the argument is the [`Generic`][typing.Generic] [special form][].
|
| 126 |
+
|
| 127 |
+
```pycon
|
| 128 |
+
>>> is_generic(Generic)
|
| 129 |
+
True
|
| 130 |
+
>>> is_generic(Generic[T])
|
| 131 |
+
False
|
| 132 |
+
```
|
| 133 |
+
"""
|
| 134 |
+
|
| 135 |
+
def is_literal(obj: Any, /) -> bool:
|
| 136 |
+
"""
|
| 137 |
+
Return whether the argument is the [`Literal`][typing.Literal] [special form][].
|
| 138 |
+
|
| 139 |
+
```pycon
|
| 140 |
+
>>> is_literal(Literal)
|
| 141 |
+
True
|
| 142 |
+
>>> is_literal(Literal["a"])
|
| 143 |
+
False
|
| 144 |
+
```
|
| 145 |
+
"""
|
| 146 |
+
|
| 147 |
+
def is_paramspec(obj: Any, /) -> TypeIs[ParamSpec]:
|
| 148 |
+
"""
|
| 149 |
+
Return whether the argument is an instance of [`ParamSpec`][typing.ParamSpec].
|
| 150 |
+
|
| 151 |
+
```pycon
|
| 152 |
+
>>> P = ParamSpec('P')
|
| 153 |
+
>>> is_paramspec(P)
|
| 154 |
+
True
|
| 155 |
+
```
|
| 156 |
+
"""
|
| 157 |
+
|
| 158 |
+
def is_typevar(obj: Any, /) -> TypeIs[TypeVar]:
|
| 159 |
+
"""
|
| 160 |
+
Return whether the argument is an instance of [`TypeVar`][typing.TypeVar].
|
| 161 |
+
|
| 162 |
+
```pycon
|
| 163 |
+
>>> T = TypeVar('T')
|
| 164 |
+
>>> is_typevar(T)
|
| 165 |
+
True
|
| 166 |
+
```
|
| 167 |
+
"""
|
| 168 |
+
|
| 169 |
+
def is_typevartuple(obj: Any, /) -> TypeIs[TypeVarTuple]:
|
| 170 |
+
"""
|
| 171 |
+
Return whether the argument is an instance of [`TypeVarTuple`][typing.TypeVarTuple].
|
| 172 |
+
|
| 173 |
+
```pycon
|
| 174 |
+
>>> Ts = TypeVarTuple('Ts')
|
| 175 |
+
>>> is_typevartuple(Ts)
|
| 176 |
+
True
|
| 177 |
+
```
|
| 178 |
+
"""
|
| 179 |
+
|
| 180 |
+
def is_union(obj: Any, /) -> bool:
|
| 181 |
+
"""
|
| 182 |
+
Return whether the argument is the [`Union`][typing.Union] [special form][].
|
| 183 |
+
|
| 184 |
+
This function can also be used to check for the [`Optional`][typing.Optional] [special form][],
|
| 185 |
+
as at runtime, `Optional[int]` is equivalent to `Union[int, None]`.
|
| 186 |
+
|
| 187 |
+
```pycon
|
| 188 |
+
>>> is_union(Union)
|
| 189 |
+
True
|
| 190 |
+
>>> is_union(Union[int, str])
|
| 191 |
+
False
|
| 192 |
+
```
|
| 193 |
+
|
| 194 |
+
!!! warning
|
| 195 |
+
This does not check for unions using the [new syntax][types-union] (e.g. `int | str`).
|
| 196 |
+
"""
|
| 197 |
+
|
| 198 |
+
def is_namedtuple(obj: Any, /) -> bool:
|
| 199 |
+
"""Return whether the argument is a named tuple type.
|
| 200 |
+
|
| 201 |
+
This includes [`NamedTuple`][typing.NamedTuple] subclasses and classes created from the
|
| 202 |
+
[`collections.namedtuple`][] factory function.
|
| 203 |
+
|
| 204 |
+
```pycon
|
| 205 |
+
>>> class User(NamedTuple):
|
| 206 |
+
... name: str
|
| 207 |
+
...
|
| 208 |
+
>>> is_namedtuple(User)
|
| 209 |
+
True
|
| 210 |
+
>>> City = collections.namedtuple('City', [])
|
| 211 |
+
>>> is_namedtuple(City)
|
| 212 |
+
True
|
| 213 |
+
>>> is_namedtuple(NamedTuple)
|
| 214 |
+
False
|
| 215 |
+
```
|
| 216 |
+
"""
|
| 217 |
+
|
| 218 |
+
def is_literalstring(obj: Any, /) -> bool:
|
| 219 |
+
"""
|
| 220 |
+
Return whether the argument is the [`LiteralString`][typing.LiteralString] [special form][].
|
| 221 |
+
|
| 222 |
+
```pycon
|
| 223 |
+
>>> is_literalstring(LiteralString)
|
| 224 |
+
True
|
| 225 |
+
```
|
| 226 |
+
"""
|
| 227 |
+
|
| 228 |
+
def is_never(obj: Any, /) -> bool:
|
| 229 |
+
"""
|
| 230 |
+
Return whether the argument is the [`Never`][typing.Never] [special form][].
|
| 231 |
+
|
| 232 |
+
```pycon
|
| 233 |
+
>>> is_never(Never)
|
| 234 |
+
True
|
| 235 |
+
```
|
| 236 |
+
"""
|
| 237 |
+
|
| 238 |
+
def is_newtype(obj: Any, /) -> TypeIs[NewType]:
|
| 239 |
+
"""
|
| 240 |
+
Return whether the argument is a [`NewType`][typing.NewType].
|
| 241 |
+
|
| 242 |
+
```pycon
|
| 243 |
+
>>> UserId = NewType("UserId", int)
|
| 244 |
+
>>> is_newtype(UserId)
|
| 245 |
+
True
|
| 246 |
+
```
|
| 247 |
+
"""
|
| 248 |
+
|
| 249 |
+
def is_nodefault(obj: Any, /) -> bool:
|
| 250 |
+
"""
|
| 251 |
+
Return whether the argument is the [`NoDefault`][typing.NoDefault] sentinel object.
|
| 252 |
+
|
| 253 |
+
```pycon
|
| 254 |
+
>>> is_nodefault(NoDefault)
|
| 255 |
+
True
|
| 256 |
+
```
|
| 257 |
+
"""
|
| 258 |
+
|
| 259 |
+
def is_noextraitems(obj: Any, /) -> bool:
|
| 260 |
+
"""
|
| 261 |
+
Return whether the argument is the `NoExtraItems` sentinel object.
|
| 262 |
+
|
| 263 |
+
```pycon
|
| 264 |
+
>>> is_noextraitems(NoExtraItems)
|
| 265 |
+
True
|
| 266 |
+
```
|
| 267 |
+
"""
|
| 268 |
+
|
| 269 |
+
def is_noreturn(obj: Any, /) -> bool:
|
| 270 |
+
"""
|
| 271 |
+
Return whether the argument is the [`NoReturn`][typing.NoReturn] [special form][].
|
| 272 |
+
|
| 273 |
+
```pycon
|
| 274 |
+
>>> is_noreturn(NoReturn)
|
| 275 |
+
True
|
| 276 |
+
>>> is_noreturn(Never)
|
| 277 |
+
False
|
| 278 |
+
```
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
def is_notrequired(obj: Any, /) -> bool:
|
| 282 |
+
"""
|
| 283 |
+
Return whether the argument is the [`NotRequired`][typing.NotRequired] [special form][].
|
| 284 |
+
|
| 285 |
+
```pycon
|
| 286 |
+
>>> is_notrequired(NotRequired)
|
| 287 |
+
True
|
| 288 |
+
```
|
| 289 |
+
"""
|
| 290 |
+
|
| 291 |
+
def is_paramspecargs(obj: Any, /) -> TypeIs[ParamSpecArgs]:
|
| 292 |
+
"""
|
| 293 |
+
Return whether the argument is an instance of [`ParamSpecArgs`][typing.ParamSpecArgs].
|
| 294 |
+
|
| 295 |
+
```pycon
|
| 296 |
+
>>> P = ParamSpec('P')
|
| 297 |
+
>>> is_paramspecargs(P.args)
|
| 298 |
+
True
|
| 299 |
+
```
|
| 300 |
+
"""
|
| 301 |
+
|
| 302 |
+
def is_paramspeckwargs(obj: Any, /) -> TypeIs[ParamSpecKwargs]:
|
| 303 |
+
"""
|
| 304 |
+
Return whether the argument is an instance of [`ParamSpecKwargs`][typing.ParamSpecKwargs].
|
| 305 |
+
|
| 306 |
+
```pycon
|
| 307 |
+
>>> P = ParamSpec('P')
|
| 308 |
+
>>> is_paramspeckwargs(P.kwargs)
|
| 309 |
+
True
|
| 310 |
+
```
|
| 311 |
+
"""
|
| 312 |
+
|
| 313 |
+
def is_readonly(obj: Any, /) -> bool:
|
| 314 |
+
"""
|
| 315 |
+
Return whether the argument is the [`ReadOnly`][typing.ReadOnly] [special form][].
|
| 316 |
+
|
| 317 |
+
```pycon
|
| 318 |
+
>>> is_readonly(ReadOnly)
|
| 319 |
+
True
|
| 320 |
+
```
|
| 321 |
+
"""
|
| 322 |
+
|
| 323 |
+
def is_required(obj: Any, /) -> bool:
|
| 324 |
+
"""
|
| 325 |
+
Return whether the argument is the [`Required`][typing.Required] [special form][].
|
| 326 |
+
|
| 327 |
+
```pycon
|
| 328 |
+
>>> is_required(Required)
|
| 329 |
+
True
|
| 330 |
+
```
|
| 331 |
+
"""
|
| 332 |
+
|
| 333 |
+
def is_self(obj: Any, /) -> bool:
|
| 334 |
+
"""
|
| 335 |
+
Return whether the argument is the [`Self`][typing.Self] [special form][].
|
| 336 |
+
|
| 337 |
+
```pycon
|
| 338 |
+
>>> is_self(Self)
|
| 339 |
+
True
|
| 340 |
+
```
|
| 341 |
+
"""
|
| 342 |
+
|
| 343 |
+
def is_typealias(obj: Any, /) -> bool:
|
| 344 |
+
"""
|
| 345 |
+
Return whether the argument is the [`TypeAlias`][typing.TypeAlias] [special form][].
|
| 346 |
+
|
| 347 |
+
```pycon
|
| 348 |
+
>>> is_typealias(TypeAlias)
|
| 349 |
+
True
|
| 350 |
+
```
|
| 351 |
+
"""
|
| 352 |
+
|
| 353 |
+
def is_typeguard(obj: Any, /) -> bool:
|
| 354 |
+
"""
|
| 355 |
+
Return whether the argument is the [`TypeGuard`][typing.TypeGuard] [special form][].
|
| 356 |
+
|
| 357 |
+
```pycon
|
| 358 |
+
>>> is_typeguard(TypeGuard)
|
| 359 |
+
True
|
| 360 |
+
```
|
| 361 |
+
"""
|
| 362 |
+
|
| 363 |
+
def is_typeis(obj: Any, /) -> bool:
|
| 364 |
+
"""
|
| 365 |
+
Return whether the argument is the [`TypeIs`][typing.TypeIs] [special form][].
|
| 366 |
+
|
| 367 |
+
```pycon
|
| 368 |
+
>>> is_typeis(TypeIs)
|
| 369 |
+
True
|
| 370 |
+
```
|
| 371 |
+
"""
|
| 372 |
+
|
| 373 |
+
def is_typealiastype(obj: Any, /) -> TypeIs[TypeAliasType]:
|
| 374 |
+
"""
|
| 375 |
+
Return whether the argument is a [`TypeAliasType`][typing.TypeAliasType] instance.
|
| 376 |
+
|
| 377 |
+
```pycon
|
| 378 |
+
>>> type MyInt = int
|
| 379 |
+
>>> is_typealiastype(MyInt)
|
| 380 |
+
True
|
| 381 |
+
>>> MyStr = TypeAliasType("MyStr", str)
|
| 382 |
+
>>> is_typealiastype(MyStr):
|
| 383 |
+
True
|
| 384 |
+
>>> type MyList[T] = list[T]
|
| 385 |
+
>>> is_typealiastype(MyList[int])
|
| 386 |
+
False
|
| 387 |
+
```
|
| 388 |
+
"""
|
| 389 |
+
|
| 390 |
+
def is_unpack(obj: Any, /) -> bool:
|
| 391 |
+
"""
|
| 392 |
+
Return whether the argument is the [`Unpack`][typing.Unpack] [special form][].
|
| 393 |
+
|
| 394 |
+
```pycon
|
| 395 |
+
>>> is_unpack(Unpack)
|
| 396 |
+
True
|
| 397 |
+
>>> is_unpack(Unpack[Ts])
|
| 398 |
+
False
|
| 399 |
+
```
|
| 400 |
+
"""
|
| 401 |
+
|
| 402 |
+
def is_deprecated(obj: Any, /) -> TypeIs[deprecated]:
|
| 403 |
+
"""
|
| 404 |
+
Return whether the argument is a [`deprecated`][warnings.deprecated] instance.
|
| 405 |
+
|
| 406 |
+
This also includes the [`typing_extensions` backport][typing_extensions.deprecated].
|
| 407 |
+
|
| 408 |
+
```pycon
|
| 409 |
+
>>> is_deprecated(warnings.deprecated('message'))
|
| 410 |
+
True
|
| 411 |
+
>>> is_deprecated(typing_extensions.deprecated('deprecated'))
|
| 412 |
+
True
|
| 413 |
+
```
|
| 414 |
+
"""
|
| 415 |
+
|
| 416 |
+
DEPRECATED_ALIASES: Final[dict[Any, type[Any]]]
|
| 417 |
+
"""A mapping between the deprecated typing aliases to their replacement, as per [PEP 585](https://peps.python.org/pep-0585/)."""
|
venv/lib/python3.12/site-packages/websockets-16.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/lib/python3.12/site-packages/websockets-16.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: websockets
|
| 3 |
+
Version: 16.0
|
| 4 |
+
Summary: An implementation of the WebSocket Protocol (RFC 6455 & 7692)
|
| 5 |
+
Author-email: Aymeric Augustin <aymeric.augustin@m4x.org>
|
| 6 |
+
License-Expression: BSD-3-Clause
|
| 7 |
+
Project-URL: Homepage, https://github.com/python-websockets/websockets
|
| 8 |
+
Project-URL: Changelog, https://websockets.readthedocs.io/en/stable/project/changelog.html
|
| 9 |
+
Project-URL: Documentation, https://websockets.readthedocs.io/
|
| 10 |
+
Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=readme
|
| 11 |
+
Project-URL: Tracker, https://github.com/python-websockets/websockets/issues
|
| 12 |
+
Keywords: WebSocket
|
| 13 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 14 |
+
Classifier: Environment :: Web Environment
|
| 15 |
+
Classifier: Intended Audience :: Developers
|
| 16 |
+
Classifier: Operating System :: OS Independent
|
| 17 |
+
Classifier: Programming Language :: Python
|
| 18 |
+
Classifier: Programming Language :: Python :: 3
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.14
|
| 24 |
+
Requires-Python: >=3.10
|
| 25 |
+
Description-Content-Type: text/x-rst
|
| 26 |
+
License-File: LICENSE
|
| 27 |
+
Dynamic: description
|
| 28 |
+
Dynamic: description-content-type
|
| 29 |
+
Dynamic: license-file
|
| 30 |
+
|
| 31 |
+
.. image:: logo/horizontal.svg
|
| 32 |
+
:width: 480px
|
| 33 |
+
:alt: websockets
|
| 34 |
+
|
| 35 |
+
|licence| |version| |pyversions| |tests| |docs| |openssf|
|
| 36 |
+
|
| 37 |
+
.. |licence| image:: https://img.shields.io/pypi/l/websockets.svg
|
| 38 |
+
:target: https://pypi.python.org/pypi/websockets
|
| 39 |
+
|
| 40 |
+
.. |version| image:: https://img.shields.io/pypi/v/websockets.svg
|
| 41 |
+
:target: https://pypi.python.org/pypi/websockets
|
| 42 |
+
|
| 43 |
+
.. |pyversions| image:: https://img.shields.io/pypi/pyversions/websockets.svg
|
| 44 |
+
:target: https://pypi.python.org/pypi/websockets
|
| 45 |
+
|
| 46 |
+
.. |tests| image:: https://img.shields.io/github/checks-status/python-websockets/websockets/main?label=tests
|
| 47 |
+
:target: https://github.com/python-websockets/websockets/actions/workflows/tests.yml
|
| 48 |
+
|
| 49 |
+
.. |docs| image:: https://img.shields.io/readthedocs/websockets.svg
|
| 50 |
+
:target: https://websockets.readthedocs.io/
|
| 51 |
+
|
| 52 |
+
.. |openssf| image:: https://bestpractices.coreinfrastructure.org/projects/6475/badge
|
| 53 |
+
:target: https://bestpractices.coreinfrastructure.org/projects/6475
|
| 54 |
+
|
| 55 |
+
What is ``websockets``?
|
| 56 |
+
-----------------------
|
| 57 |
+
|
| 58 |
+
websockets is a library for building WebSocket_ servers and clients in Python
|
| 59 |
+
with a focus on correctness, simplicity, robustness, and performance.
|
| 60 |
+
|
| 61 |
+
.. _WebSocket: https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API
|
| 62 |
+
|
| 63 |
+
Built on top of ``asyncio``, Python's standard asynchronous I/O framework, the
|
| 64 |
+
default implementation provides an elegant coroutine-based API.
|
| 65 |
+
|
| 66 |
+
An implementation on top of ``threading`` and a Sans-I/O implementation are also
|
| 67 |
+
available.
|
| 68 |
+
|
| 69 |
+
`Documentation is available on Read the Docs. <https://websockets.readthedocs.io/>`_
|
| 70 |
+
|
| 71 |
+
.. copy-pasted because GitHub doesn't support the include directive
|
| 72 |
+
|
| 73 |
+
Here's an echo server with the ``asyncio`` API:
|
| 74 |
+
|
| 75 |
+
.. code:: python
|
| 76 |
+
|
| 77 |
+
#!/usr/bin/env python
|
| 78 |
+
|
| 79 |
+
import asyncio
|
| 80 |
+
from websockets.asyncio.server import serve
|
| 81 |
+
|
| 82 |
+
async def echo(websocket):
|
| 83 |
+
async for message in websocket:
|
| 84 |
+
await websocket.send(message)
|
| 85 |
+
|
| 86 |
+
async def main():
|
| 87 |
+
async with serve(echo, "localhost", 8765) as server:
|
| 88 |
+
await server.serve_forever()
|
| 89 |
+
|
| 90 |
+
asyncio.run(main())
|
| 91 |
+
|
| 92 |
+
Here's how a client sends and receives messages with the ``threading`` API:
|
| 93 |
+
|
| 94 |
+
.. code:: python
|
| 95 |
+
|
| 96 |
+
#!/usr/bin/env python
|
| 97 |
+
|
| 98 |
+
from websockets.sync.client import connect
|
| 99 |
+
|
| 100 |
+
def hello():
|
| 101 |
+
with connect("ws://localhost:8765") as websocket:
|
| 102 |
+
websocket.send("Hello world!")
|
| 103 |
+
message = websocket.recv()
|
| 104 |
+
print(f"Received: {message}")
|
| 105 |
+
|
| 106 |
+
hello()
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
Does that look good?
|
| 110 |
+
|
| 111 |
+
`Get started with the tutorial! <https://websockets.readthedocs.io/en/stable/intro/index.html>`_
|
| 112 |
+
|
| 113 |
+
Why should I use ``websockets``?
|
| 114 |
+
--------------------------------
|
| 115 |
+
|
| 116 |
+
The development of ``websockets`` is shaped by four principles:
|
| 117 |
+
|
| 118 |
+
1. **Correctness**: ``websockets`` is heavily tested for compliance with
|
| 119 |
+
:rfc:`6455`. Continuous integration fails under 100% branch coverage.
|
| 120 |
+
|
| 121 |
+
2. **Simplicity**: all you need to understand is ``msg = await ws.recv()`` and
|
| 122 |
+
``await ws.send(msg)``. ``websockets`` takes care of managing connections
|
| 123 |
+
so you can focus on your application.
|
| 124 |
+
|
| 125 |
+
3. **Robustness**: ``websockets`` is built for production. For example, it was
|
| 126 |
+
the only library to `handle backpressure correctly`_ before the issue
|
| 127 |
+
became widely known in the Python community.
|
| 128 |
+
|
| 129 |
+
4. **Performance**: memory usage is optimized and configurable. A C extension
|
| 130 |
+
accelerates expensive operations. It's pre-compiled for Linux, macOS and
|
| 131 |
+
Windows and packaged in the wheel format for each system and Python version.
|
| 132 |
+
|
| 133 |
+
Documentation is a first class concern in the project. Head over to `Read the
|
| 134 |
+
Docs`_ and see for yourself.
|
| 135 |
+
|
| 136 |
+
.. _Read the Docs: https://websockets.readthedocs.io/
|
| 137 |
+
.. _handle backpressure correctly: https://vorpus.org/blog/some-thoughts-on-asynchronous-api-design-in-a-post-asyncawait-world/#websocket-servers
|
| 138 |
+
|
| 139 |
+
Why shouldn't I use ``websockets``?
|
| 140 |
+
-----------------------------------
|
| 141 |
+
|
| 142 |
+
* If you prefer callbacks over coroutines: ``websockets`` was created to
|
| 143 |
+
provide the best coroutine-based API to manage WebSocket connections in
|
| 144 |
+
Python. Pick another library for a callback-based API.
|
| 145 |
+
|
| 146 |
+
* If you're looking for a mixed HTTP / WebSocket library: ``websockets`` aims
|
| 147 |
+
at being an excellent implementation of :rfc:`6455`: The WebSocket Protocol
|
| 148 |
+
and :rfc:`7692`: Compression Extensions for WebSocket. Its support for HTTP
|
| 149 |
+
is minimal — just enough for an HTTP health check.
|
| 150 |
+
|
| 151 |
+
If you want to do both in the same server, look at HTTP + WebSocket servers
|
| 152 |
+
that build on top of ``websockets`` to support WebSocket connections, like
|
| 153 |
+
uvicorn_ or Sanic_.
|
| 154 |
+
|
| 155 |
+
.. _uvicorn: https://www.uvicorn.org/
|
| 156 |
+
.. _Sanic: https://sanic.dev/en/
|
| 157 |
+
|
| 158 |
+
What else?
|
| 159 |
+
----------
|
| 160 |
+
|
| 161 |
+
Bug reports, patches and suggestions are welcome!
|
| 162 |
+
|
| 163 |
+
To report a security vulnerability, please use the `Tidelift security
|
| 164 |
+
contact`_. Tidelift will coordinate the fix and disclosure.
|
| 165 |
+
|
| 166 |
+
.. _Tidelift security contact: https://tidelift.com/security
|
| 167 |
+
|
| 168 |
+
For anything else, please open an issue_ or send a `pull request`_.
|
| 169 |
+
|
| 170 |
+
.. _issue: https://github.com/python-websockets/websockets/issues/new
|
| 171 |
+
.. _pull request: https://github.com/python-websockets/websockets/compare/
|
| 172 |
+
|
| 173 |
+
Participants must uphold the `Contributor Covenant code of conduct`_.
|
| 174 |
+
|
| 175 |
+
.. _Contributor Covenant code of conduct: https://github.com/python-websockets/websockets/blob/main/CODE_OF_CONDUCT.md
|
| 176 |
+
|
| 177 |
+
``websockets`` is released under the `BSD license`_.
|
| 178 |
+
|
| 179 |
+
.. _BSD license: https://github.com/python-websockets/websockets/blob/main/LICENSE
|
venv/lib/python3.12/site-packages/websockets-16.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/websockets,sha256=D_3MTHmaV-G5O6izycwRWvfNbD2MzYGNaRjlCgvBImo,242
|
| 2 |
+
websockets-16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
websockets-16.0.dist-info/METADATA,sha256=JcDvWo8DVSw5uoDAFbk9N8fJXuRJvnrcLXVBFyBjwN8,6799
|
| 4 |
+
websockets-16.0.dist-info/RECORD,,
|
| 5 |
+
websockets-16.0.dist-info/WHEEL,sha256=mX4U4odf6w47aVjwZUmTYd1MF9BbrhVLKlaWSvZwHEk,186
|
| 6 |
+
websockets-16.0.dist-info/entry_points.txt,sha256=Dnhn4dm5EsI4ZMAsHldGF6CwBXZrGXnR7cnK2-XR7zY,51
|
| 7 |
+
websockets-16.0.dist-info/licenses/LICENSE,sha256=PWoMBQ2L7FL6utUC5F-yW9ArytvXDeo01Ee2oP9Obag,1514
|
| 8 |
+
websockets-16.0.dist-info/top_level.txt,sha256=CMpdKklxKsvZgCgyltxUWOHibZXZ1uYIVpca9xsQ8Hk,11
|
| 9 |
+
websockets/__init__.py,sha256=AC2Hq92uSc_WOo9_xvITpGshJ7Dy0Md5m2_ywsdSt_Y,7058
|
| 10 |
+
websockets/__main__.py,sha256=wu5N2wk8mvBgyvr2ghmQf4prezAe0_i-p123VVreyYc,62
|
| 11 |
+
websockets/__pycache__/__init__.cpython-312.pyc,,
|
| 12 |
+
websockets/__pycache__/__main__.cpython-312.pyc,,
|
| 13 |
+
websockets/__pycache__/auth.cpython-312.pyc,,
|
| 14 |
+
websockets/__pycache__/cli.cpython-312.pyc,,
|
| 15 |
+
websockets/__pycache__/client.cpython-312.pyc,,
|
| 16 |
+
websockets/__pycache__/connection.cpython-312.pyc,,
|
| 17 |
+
websockets/__pycache__/datastructures.cpython-312.pyc,,
|
| 18 |
+
websockets/__pycache__/exceptions.cpython-312.pyc,,
|
| 19 |
+
websockets/__pycache__/frames.cpython-312.pyc,,
|
| 20 |
+
websockets/__pycache__/headers.cpython-312.pyc,,
|
| 21 |
+
websockets/__pycache__/http.cpython-312.pyc,,
|
| 22 |
+
websockets/__pycache__/http11.cpython-312.pyc,,
|
| 23 |
+
websockets/__pycache__/imports.cpython-312.pyc,,
|
| 24 |
+
websockets/__pycache__/protocol.cpython-312.pyc,,
|
| 25 |
+
websockets/__pycache__/proxy.cpython-312.pyc,,
|
| 26 |
+
websockets/__pycache__/server.cpython-312.pyc,,
|
| 27 |
+
websockets/__pycache__/streams.cpython-312.pyc,,
|
| 28 |
+
websockets/__pycache__/typing.cpython-312.pyc,,
|
| 29 |
+
websockets/__pycache__/uri.cpython-312.pyc,,
|
| 30 |
+
websockets/__pycache__/utils.cpython-312.pyc,,
|
| 31 |
+
websockets/__pycache__/version.cpython-312.pyc,,
|
| 32 |
+
websockets/asyncio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 33 |
+
websockets/asyncio/__pycache__/__init__.cpython-312.pyc,,
|
| 34 |
+
websockets/asyncio/__pycache__/async_timeout.cpython-312.pyc,,
|
| 35 |
+
websockets/asyncio/__pycache__/client.cpython-312.pyc,,
|
| 36 |
+
websockets/asyncio/__pycache__/compatibility.cpython-312.pyc,,
|
| 37 |
+
websockets/asyncio/__pycache__/connection.cpython-312.pyc,,
|
| 38 |
+
websockets/asyncio/__pycache__/messages.cpython-312.pyc,,
|
| 39 |
+
websockets/asyncio/__pycache__/router.cpython-312.pyc,,
|
| 40 |
+
websockets/asyncio/__pycache__/server.cpython-312.pyc,,
|
| 41 |
+
websockets/asyncio/async_timeout.py,sha256=N-6Mubyiaoh66PAXGvCzhgxCM-7V2XiRnH32Xi6J6TE,8971
|
| 42 |
+
websockets/asyncio/client.py,sha256=e4xlgtzb3v29M2vN-UDiyoUtThg--d5GqKg3lt2pDdE,30850
|
| 43 |
+
websockets/asyncio/compatibility.py,sha256=gkenDDhzNbm6_iXV5Edvbvp6uHZYdrTvGNjt8P_JtyQ,786
|
| 44 |
+
websockets/asyncio/connection.py,sha256=87RdVURijJk8V-ShWAWfTEyhW5Z1YUXKV8ezUzxt5L0,49099
|
| 45 |
+
websockets/asyncio/messages.py,sha256=u2M5WKY9xPyw8G3nKoXfdO5K41hrTnf4MdizVHzgdM4,11129
|
| 46 |
+
websockets/asyncio/router.py,sha256=S-69vszK-SqUCcZbXXPOnux-eH2fTHYC2JNh7tOtmmA,7520
|
| 47 |
+
websockets/asyncio/server.py,sha256=wQ9oBc0WBOIzbXKDYJ8UhXRTeoXrSfLu6CWCrUl-vck,37941
|
| 48 |
+
websockets/auth.py,sha256=U_Jwmn59ZRQ6EecpOvMizQCG_ZbAvgUf1ik7haZRC3c,568
|
| 49 |
+
websockets/cli.py,sha256=YnegH59z93JxSVIGiXiWhR3ktgI6k1_pf_BRLanxKrQ,5336
|
| 50 |
+
websockets/client.py,sha256=fljI5k5oQ-Sfm53MCoyTlr2jFtOOIuO13H9bbtpBPes,13789
|
| 51 |
+
websockets/connection.py,sha256=OLiMVkNd25_86sB8Q7CrCwBoXy9nA0OCgdgLRA8WUR8,323
|
| 52 |
+
websockets/datastructures.py,sha256=Uq2CpjmXak9_pPWcOqh36rzJMo8eCi2lVPTFWDvK5sA,5518
|
| 53 |
+
websockets/exceptions.py,sha256=bgaMdqQGGZosAEULeCB30XW2YnwomWa3c8YOrEfeOoY,12859
|
| 54 |
+
websockets/extensions/__init__.py,sha256=QkZsxaJVllVSp1uhdD5uPGibdbx_091GrVVfS5LXcpw,98
|
| 55 |
+
websockets/extensions/__pycache__/__init__.cpython-312.pyc,,
|
| 56 |
+
websockets/extensions/__pycache__/base.cpython-312.pyc,,
|
| 57 |
+
websockets/extensions/__pycache__/permessage_deflate.cpython-312.pyc,,
|
| 58 |
+
websockets/extensions/base.py,sha256=JNfyk543C7VuPH0QOobiqKoGrzjJILje6sz5ILvOPl4,2903
|
| 59 |
+
websockets/extensions/permessage_deflate.py,sha256=AkuhkAKFo5lqJQMXnckbSs9b2KBBrOFsE1DHIcbLL3k,25770
|
| 60 |
+
websockets/frames.py,sha256=5IK4GZpl8ukr0bZ_UA_jjjztK09yYQAl9m5NVmGLiK0,12889
|
| 61 |
+
websockets/headers.py,sha256=yQnPljVZwV1_V-pOSRKNLG_u827wFC1h72cciojcQ8M,16046
|
| 62 |
+
websockets/http.py,sha256=T1tNLmbkFCneXQ6qepBmsVVDXyP9i500IVzTJTeBMR4,659
|
| 63 |
+
websockets/http11.py,sha256=T8ai5BcBGkV0n9It63oDeNpmtQMyg8Cpav5rf_yT0r4,15619
|
| 64 |
+
websockets/imports.py,sha256=T_B9TUmHoceKMQ-PNphdQQAH2XdxAxwSQNeQEgqILkE,2795
|
| 65 |
+
websockets/legacy/__init__.py,sha256=wQ5zRIENGUS_5eKNAX9CRE7x1TwKapKimrQFFWN9Sxs,276
|
| 66 |
+
websockets/legacy/__pycache__/__init__.cpython-312.pyc,,
|
| 67 |
+
websockets/legacy/__pycache__/auth.cpython-312.pyc,,
|
| 68 |
+
websockets/legacy/__pycache__/client.cpython-312.pyc,,
|
| 69 |
+
websockets/legacy/__pycache__/exceptions.cpython-312.pyc,,
|
| 70 |
+
websockets/legacy/__pycache__/framing.cpython-312.pyc,,
|
| 71 |
+
websockets/legacy/__pycache__/handshake.cpython-312.pyc,,
|
| 72 |
+
websockets/legacy/__pycache__/http.cpython-312.pyc,,
|
| 73 |
+
websockets/legacy/__pycache__/protocol.cpython-312.pyc,,
|
| 74 |
+
websockets/legacy/__pycache__/server.cpython-312.pyc,,
|
| 75 |
+
websockets/legacy/auth.py,sha256=DcQcCSeVeP93JcH8vFWE0HIJL-X-f23LZ0DsJpav1So,6531
|
| 76 |
+
websockets/legacy/client.py,sha256=fV2mbiU9rciXhJfAEKVSm0GztJDUbDpRQ-K5EMbkuQ0,26815
|
| 77 |
+
websockets/legacy/exceptions.py,sha256=ViEjpoT09fzx_Zqf0aNGDVtRDNjXaOw0gdCta3LkjFc,1924
|
| 78 |
+
websockets/legacy/framing.py,sha256=r9P1wiXv_1XuAVQw8SOPkuE9d4eZ0r_JowAkz9-WV4w,6366
|
| 79 |
+
websockets/legacy/handshake.py,sha256=2Nzr5AN2xvDC5EdNP-kB3lOcrAaUNlYuj_-hr_jv7pM,5285
|
| 80 |
+
websockets/legacy/http.py,sha256=cOCQmDWhIKQmm8UWGXPW7CDZg03wjogCsb0LP9oetNQ,7061
|
| 81 |
+
websockets/legacy/protocol.py,sha256=ajtVXDb-lEm9BN0NF3iEaTI_b1q5fBCKTB9wvUoGOxY,63632
|
| 82 |
+
websockets/legacy/server.py,sha256=7mwY-yD0ljNF93oPYumTWD7OIVbCWtaEOw1FFJBhIAM,45251
|
| 83 |
+
websockets/protocol.py,sha256=vTqjPIg2HmO-bSxsczuEmWMxPTxPXU1hmVUjqnahV44,27247
|
| 84 |
+
websockets/proxy.py,sha256=oFrbEYtasYWv-WDcniObD9nBR5Q5qkHpyCVLngx7WMQ,4969
|
| 85 |
+
websockets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 86 |
+
websockets/server.py,sha256=E4SWBA8WZRmAOpsUm-oCqacBGZre9e0iDmDIrfpV21Q,21790
|
| 87 |
+
websockets/speedups.c,sha256=u_dncR4M38EX6He_fzb1TY6D3Hke67ZpoHLLhZZ0hvQ,5920
|
| 88 |
+
websockets/speedups.cpython-312-x86_64-linux-gnu.so,sha256=F8FiVerlQi_Z0YSsuY_ASEHvWcddXkyyRa3ylkV80B0,38048
|
| 89 |
+
websockets/speedups.pyi,sha256=unjvBNg-uW4c7z-9OW4WiSzZk_QH2bLEcjYAMuoSgBI,102
|
| 90 |
+
websockets/streams.py,sha256=pXqga7ttjuF6lChWYiWLSfUlt3FCaQpEX1ae_jvcCeQ,4071
|
| 91 |
+
websockets/sync/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 92 |
+
websockets/sync/__pycache__/__init__.cpython-312.pyc,,
|
| 93 |
+
websockets/sync/__pycache__/client.cpython-312.pyc,,
|
| 94 |
+
websockets/sync/__pycache__/connection.cpython-312.pyc,,
|
| 95 |
+
websockets/sync/__pycache__/messages.cpython-312.pyc,,
|
| 96 |
+
websockets/sync/__pycache__/router.cpython-312.pyc,,
|
| 97 |
+
websockets/sync/__pycache__/server.cpython-312.pyc,,
|
| 98 |
+
websockets/sync/__pycache__/utils.cpython-312.pyc,,
|
| 99 |
+
websockets/sync/client.py,sha256=_2Erytw1f3f9O_u2jLtS1oNV4HsHUi_h3lGvT9ZEaDQ,22108
|
| 100 |
+
websockets/sync/connection.py,sha256=1pJYEMRHLWIN7538vJcIeFVnvSXVrD0n1xrfX7wDNSc,41868
|
| 101 |
+
websockets/sync/messages.py,sha256=yZV1zhY07ZD0vRF5b1yDa7ug0rbA5UDOCCCQmWwAcds,12858
|
| 102 |
+
websockets/sync/router.py,sha256=BqKSAKNZYtRWiOxol9qYeyfgyXRrMNJ6FrTTZLNcXMg,7172
|
| 103 |
+
websockets/sync/server.py,sha256=s07HNK_2s1kLN62Uqc77uvND0z7C0YTXGePsCiBtXaE,27655
|
| 104 |
+
websockets/sync/utils.py,sha256=TtW-ncYFvJmiSW2gO86ngE2BVsnnBdL-4H88kWNDYbg,1107
|
| 105 |
+
websockets/typing.py,sha256=A6xh4m65pRzKAbuOs0kFuGhL4DWIIko-ppS4wvJVc0Q,1946
|
| 106 |
+
websockets/uri.py,sha256=2fFMw-AbKJ5HVHNCuw1Rx1MnkCkNWRpogxWhhM30EU4,3125
|
| 107 |
+
websockets/utils.py,sha256=AwhS4UmlbKv7meAaR7WNbUqD5JFoStOP1bAyo9sRMus,1197
|
| 108 |
+
websockets/version.py,sha256=IhaztWxysdY-pd-0nOubnnPduvySSvdoBwrQdJKtZ2g,3202
|
venv/lib/python3.12/site-packages/websockets-16.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (80.9.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp312-cp312-manylinux_2_5_x86_64
|
| 5 |
+
Tag: cp312-cp312-manylinux1_x86_64
|
| 6 |
+
Tag: cp312-cp312-manylinux_2_28_x86_64
|
| 7 |
+
|
venv/lib/python3.12/site-packages/websockets-16.0.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
websockets = websockets.cli:main
|
venv/lib/python3.12/site-packages/websockets-16.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
websockets
|