koichi12 commited on
Commit
ed5792f
·
verified ·
1 Parent(s): 15cdb41

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. .venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/INSTALLER +1 -0
  3. .venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/RECORD +817 -0
  4. .venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/WHEEL +4 -0
  5. .venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/entry_points.txt +2 -0
  6. .venv/lib/python3.11/site-packages/watchfiles/_rust_notify.cpython-311-x86_64-linux-gnu.so +3 -0
  7. .venv/lib/python3.11/site-packages/websockets/__init__.py +214 -0
  8. .venv/lib/python3.11/site-packages/websockets/__pycache__/__init__.cpython-311.pyc +0 -0
  9. .venv/lib/python3.11/site-packages/websockets/__pycache__/__main__.cpython-311.pyc +0 -0
  10. .venv/lib/python3.11/site-packages/websockets/__pycache__/auth.cpython-311.pyc +0 -0
  11. .venv/lib/python3.11/site-packages/websockets/__pycache__/client.cpython-311.pyc +0 -0
  12. .venv/lib/python3.11/site-packages/websockets/__pycache__/connection.cpython-311.pyc +0 -0
  13. .venv/lib/python3.11/site-packages/websockets/__pycache__/datastructures.cpython-311.pyc +0 -0
  14. .venv/lib/python3.11/site-packages/websockets/__pycache__/exceptions.cpython-311.pyc +0 -0
  15. .venv/lib/python3.11/site-packages/websockets/__pycache__/frames.cpython-311.pyc +0 -0
  16. .venv/lib/python3.11/site-packages/websockets/__pycache__/headers.cpython-311.pyc +0 -0
  17. .venv/lib/python3.11/site-packages/websockets/__pycache__/http.cpython-311.pyc +0 -0
  18. .venv/lib/python3.11/site-packages/websockets/__pycache__/http11.cpython-311.pyc +0 -0
  19. .venv/lib/python3.11/site-packages/websockets/__pycache__/imports.cpython-311.pyc +0 -0
  20. .venv/lib/python3.11/site-packages/websockets/__pycache__/protocol.cpython-311.pyc +0 -0
  21. .venv/lib/python3.11/site-packages/websockets/__pycache__/server.cpython-311.pyc +0 -0
  22. .venv/lib/python3.11/site-packages/websockets/__pycache__/streams.cpython-311.pyc +0 -0
  23. .venv/lib/python3.11/site-packages/websockets/__pycache__/typing.cpython-311.pyc +0 -0
  24. .venv/lib/python3.11/site-packages/websockets/__pycache__/uri.cpython-311.pyc +0 -0
  25. .venv/lib/python3.11/site-packages/websockets/__pycache__/utils.cpython-311.pyc +0 -0
  26. .venv/lib/python3.11/site-packages/websockets/__pycache__/version.cpython-311.pyc +0 -0
  27. .venv/lib/python3.11/site-packages/websockets/asyncio/__init__.py +0 -0
  28. .venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/__init__.cpython-311.pyc +0 -0
  29. .venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/async_timeout.cpython-311.pyc +0 -0
  30. .venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/client.cpython-311.pyc +0 -0
  31. .venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/compatibility.cpython-311.pyc +0 -0
  32. .venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/connection.cpython-311.pyc +0 -0
  33. .venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/messages.cpython-311.pyc +0 -0
  34. .venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/server.cpython-311.pyc +0 -0
  35. .venv/lib/python3.11/site-packages/websockets/asyncio/async_timeout.py +282 -0
  36. .venv/lib/python3.11/site-packages/websockets/asyncio/client.py +567 -0
  37. .venv/lib/python3.11/site-packages/websockets/asyncio/compatibility.py +30 -0
  38. .venv/lib/python3.11/site-packages/websockets/asyncio/connection.py +1214 -0
  39. .venv/lib/python3.11/site-packages/websockets/asyncio/messages.py +296 -0
  40. .venv/lib/python3.11/site-packages/websockets/asyncio/server.py +978 -0
  41. .venv/lib/python3.11/site-packages/websockets/extensions/__init__.py +4 -0
  42. .venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/__init__.cpython-311.pyc +0 -0
  43. .venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/base.cpython-311.pyc +0 -0
  44. .venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/permessage_deflate.cpython-311.pyc +0 -0
  45. .venv/lib/python3.11/site-packages/websockets/extensions/base.py +123 -0
  46. .venv/lib/python3.11/site-packages/websockets/extensions/permessage_deflate.py +697 -0
  47. .venv/lib/python3.11/site-packages/websockets/legacy/__init__.py +11 -0
  48. .venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/__init__.cpython-311.pyc +0 -0
  49. .venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/auth.cpython-311.pyc +0 -0
  50. .venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/client.cpython-311.pyc +0 -0
.gitattributes CHANGED
@@ -208,3 +208,4 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/_inductor/_
208
  .venv/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
209
  .venv/lib/python3.11/site-packages/__pycache__/pynvml.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
210
  .venv/lib/python3.11/site-packages/rpds/rpds.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
208
  .venv/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
209
  .venv/lib/python3.11/site-packages/__pycache__/pynvml.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
210
  .venv/lib/python3.11/site-packages/rpds/rpds.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
211
+ .venv/lib/python3.11/site-packages/watchfiles/_rust_notify.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
.venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
.venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/RECORD ADDED
@@ -0,0 +1,817 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ../../../bin/openai,sha256=gtBl2bXXpyuWMzBGAvlsfQz2tciM9xZEoJCIm-7tb68,227
2
+ openai-1.61.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
3
+ openai-1.61.1.dist-info/METADATA,sha256=rODiteTbY_VyL7NtyJ4RG4zqAFnrg9L0jjEjzDuh7u0,27598
4
+ openai-1.61.1.dist-info/RECORD,,
5
+ openai-1.61.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
6
+ openai-1.61.1.dist-info/entry_points.txt,sha256=kAYhQEmziJwsKs5raYAIOvJ2LWmbz5dulEXOzsY71ro,43
7
+ openai-1.61.1.dist-info/licenses/LICENSE,sha256=1xHtN7sZrnJJr40JO4_G6nWP01VLkqxhUAwa08wOP7k,11336
8
+ openai/__init__.py,sha256=UZfk6nnPAGguY3XX7QQfqa4kZjzvFEGp-TUyxrBcTlI,10296
9
+ openai/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
10
+ openai/__pycache__/__init__.cpython-311.pyc,,
11
+ openai/__pycache__/__main__.cpython-311.pyc,,
12
+ openai/__pycache__/_base_client.cpython-311.pyc,,
13
+ openai/__pycache__/_client.cpython-311.pyc,,
14
+ openai/__pycache__/_compat.cpython-311.pyc,,
15
+ openai/__pycache__/_constants.cpython-311.pyc,,
16
+ openai/__pycache__/_exceptions.cpython-311.pyc,,
17
+ openai/__pycache__/_files.cpython-311.pyc,,
18
+ openai/__pycache__/_legacy_response.cpython-311.pyc,,
19
+ openai/__pycache__/_models.cpython-311.pyc,,
20
+ openai/__pycache__/_module_client.cpython-311.pyc,,
21
+ openai/__pycache__/_qs.cpython-311.pyc,,
22
+ openai/__pycache__/_resource.cpython-311.pyc,,
23
+ openai/__pycache__/_response.cpython-311.pyc,,
24
+ openai/__pycache__/_streaming.cpython-311.pyc,,
25
+ openai/__pycache__/_types.cpython-311.pyc,,
26
+ openai/__pycache__/_version.cpython-311.pyc,,
27
+ openai/__pycache__/pagination.cpython-311.pyc,,
28
+ openai/__pycache__/version.cpython-311.pyc,,
29
+ openai/_base_client.py,sha256=dp8TJR8ZBuS0RbjnNKVkZC--tbstwz33Q_P_UB7dKCE,69238
30
+ openai/_client.py,sha256=FJRGkrdpHAFV2TOs04tO5uyKCA-cudlk4BlvCX3KI3Q,23355
31
+ openai/_compat.py,sha256=Mtzi28qOK99ZBPcGcQqdjoUFk2MzzpqjaafjuwQ4NO0,6982
32
+ openai/_constants.py,sha256=WmCwgT4tGmFsSrltb26f3bM8ftUyFYkzh32Ny5yl-So,467
33
+ openai/_exceptions.py,sha256=2BEuXwqce9z7X6lWLLXRqg1vOay_q-OdLz9lcj6Pluw,4798
34
+ openai/_extras/__init__.py,sha256=LZbJLZ7aFHRcI7uiY4-wFQTdMp-BF6FER1QMhKVFkWk,107
35
+ openai/_extras/__pycache__/__init__.cpython-311.pyc,,
36
+ openai/_extras/__pycache__/_common.cpython-311.pyc,,
37
+ openai/_extras/__pycache__/numpy_proxy.cpython-311.pyc,,
38
+ openai/_extras/__pycache__/pandas_proxy.cpython-311.pyc,,
39
+ openai/_extras/_common.py,sha256=NWWtgbdJsO3hQGQxaXGfVk0LjeIE5AFZ8VS_795hhMc,364
40
+ openai/_extras/numpy_proxy.py,sha256=hwZXa_JBAPD5taRhor1tGxK26g5IaK52JclQDl-dky0,799
41
+ openai/_extras/pandas_proxy.py,sha256=NCEt1Dqwc_0H85YdsWPDE3lPDJtYnBT8G-gJE_BCeEc,637
42
+ openai/_files.py,sha256=WEf6hxJN1u3pVkdnPCpinhxCUnOV2olt4J6vLoJ_k48,3616
43
+ openai/_legacy_response.py,sha256=Ovp62-lhxDVdL4nqI6qb73rF3yeZKv1ZZEEqQzgr634,16238
44
+ openai/_models.py,sha256=ARFMTesMqckhYnwq6ZYK-C6R91pP85xk_kI3aYSaAGM,30413
45
+ openai/_module_client.py,sha256=gF_2bbdosIwUt29sQgrQRJOgNREvXF-IDxe4XKGhHjY,2523
46
+ openai/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846
47
+ openai/_resource.py,sha256=IQihFzFLhGOiGSlT2dO1ESWSTg2XypgbtAldtGdTOqU,1100
48
+ openai/_response.py,sha256=3HxbumVKhz09cWpm2lZ0v5wvBU7cLq9PTjVzX9IfnAk,29511
49
+ openai/_streaming.py,sha256=t1UZrg53fVJB5Rs6k2sT9PBbvjp-IGrQzUq_5nlxKG4,13102
50
+ openai/_types.py,sha256=GxKqy9_2_AUqbaRROzqhCJ47a7c-q_T6Bu8kV9a2qhA,6242
51
+ openai/_utils/__init__.py,sha256=WnJrKMH-HJifY1H9sSTocSjuVSm4s2W_2QnIm3-wxZI,2222
52
+ openai/_utils/__pycache__/__init__.cpython-311.pyc,,
53
+ openai/_utils/__pycache__/_logs.cpython-311.pyc,,
54
+ openai/_utils/__pycache__/_proxy.cpython-311.pyc,,
55
+ openai/_utils/__pycache__/_reflection.cpython-311.pyc,,
56
+ openai/_utils/__pycache__/_streams.cpython-311.pyc,,
57
+ openai/_utils/__pycache__/_sync.cpython-311.pyc,,
58
+ openai/_utils/__pycache__/_transform.cpython-311.pyc,,
59
+ openai/_utils/__pycache__/_typing.cpython-311.pyc,,
60
+ openai/_utils/__pycache__/_utils.cpython-311.pyc,,
61
+ openai/_utils/_logs.py,sha256=IC5iwPflwelNpJEpWsvK3up-pol5hR8k_VL9fSukk_Y,1351
62
+ openai/_utils/_proxy.py,sha256=z3zsateHtb0EARTWKk8QZNHfPkqJbqwd1lM993LBwGE,1902
63
+ openai/_utils/_reflection.py,sha256=aTXm-W0Kww4PJo5LPkUnQ92N-2UvrK1-D67cJVBlIgw,1426
64
+ openai/_utils/_streams.py,sha256=SMC90diFFecpEg_zgDRVbdR3hSEIgVVij4taD-noMLM,289
65
+ openai/_utils/_sync.py,sha256=03JeD-UR_e2O8dJEtD-v4zcyhlEpFkrcH8bgrSJMrxI,2437
66
+ openai/_utils/_transform.py,sha256=Dkkyr7OveGmOolepcvXmVJWE3kqim4b0nM0h7yWbgeY,13468
67
+ openai/_utils/_typing.py,sha256=nTJz0jcrQbEgxwy4TtAkNxuU0QHHlmc6mQtA6vIR8tg,4501
68
+ openai/_utils/_utils.py,sha256=MiRKO6s2cFkNzeBUwBc7x1MQiH_3s2-uG1WYySqwveg,12419
69
+ openai/_version.py,sha256=gBzGr1hPE-MY5AhEzNrBvfZxNuDn-jXNWP9Etq_jH_k,159
70
+ openai/cli/__init__.py,sha256=soGgtqyomgddl92H0KJRqHqGuaXIaghq86qkzLuVp7U,31
71
+ openai/cli/__pycache__/__init__.cpython-311.pyc,,
72
+ openai/cli/__pycache__/_cli.cpython-311.pyc,,
73
+ openai/cli/__pycache__/_errors.cpython-311.pyc,,
74
+ openai/cli/__pycache__/_models.cpython-311.pyc,,
75
+ openai/cli/__pycache__/_progress.cpython-311.pyc,,
76
+ openai/cli/__pycache__/_utils.cpython-311.pyc,,
77
+ openai/cli/_api/__init__.py,sha256=cj92MZq-9_1PQM8A4TQVsqKn5mcTDAGxHllJ0UvJOPE,58
78
+ openai/cli/_api/__pycache__/__init__.cpython-311.pyc,,
79
+ openai/cli/_api/__pycache__/_main.cpython-311.pyc,,
80
+ openai/cli/_api/__pycache__/audio.cpython-311.pyc,,
81
+ openai/cli/_api/__pycache__/completions.cpython-311.pyc,,
82
+ openai/cli/_api/__pycache__/files.cpython-311.pyc,,
83
+ openai/cli/_api/__pycache__/image.cpython-311.pyc,,
84
+ openai/cli/_api/__pycache__/models.cpython-311.pyc,,
85
+ openai/cli/_api/_main.py,sha256=5yyfLURqCEaAN8B61gHaqVAaYgtyb9Xq0ncQ3P2BAh0,451
86
+ openai/cli/_api/audio.py,sha256=IPbABMwryQ0CQTF4gi6VS3hJi6qFjoyj6IDV2ZoPT6A,3787
87
+ openai/cli/_api/chat/__init__.py,sha256=MhFUQH9F6QCtbPMlbsU_DWTd7wc5DSCZ7Wy3FBGVij0,300
88
+ openai/cli/_api/chat/__pycache__/__init__.cpython-311.pyc,,
89
+ openai/cli/_api/chat/__pycache__/completions.cpython-311.pyc,,
90
+ openai/cli/_api/chat/completions.py,sha256=DbR8wmXxI6-09g-dv394uHUwEcxjb4-MyXQn5JFmSLg,5536
91
+ openai/cli/_api/completions.py,sha256=ysOmnbXpFz3VB5N_5USPdObiYew62vEn6rMtNFwTJGQ,6412
92
+ openai/cli/_api/files.py,sha256=6nKXFnsC2QE0bGnVUAG7BTLSu6K1_MhPE0ZJACmzgRY,2345
93
+ openai/cli/_api/image.py,sha256=ovBExdn8oUK9ImOpsPafesfAlmcftLP2p7d37hcUtKU,5062
94
+ openai/cli/_api/models.py,sha256=pGmIGZToj3raGGpKvPSq_EVUR-dqg4Vi0PNfZH98D2E,1295
95
+ openai/cli/_cli.py,sha256=o6zWCnq84u-DIGZuR9YoOUxTGTpx-oCU5mgAKDi555c,6779
96
+ openai/cli/_errors.py,sha256=nejlu1HnOyAIr2n7uqpFtWn8XclWj_9N8FwgfT3BPK8,471
97
+ openai/cli/_models.py,sha256=tgsldjG216KpwgAZ5pS0sV02FQvONDJU2ElA4kCCiIU,491
98
+ openai/cli/_progress.py,sha256=aMLssU9jh-LoqRYH3608jNos7r6vZKnHTRlHxFznzv4,1406
99
+ openai/cli/_tools/__init__.py,sha256=cj92MZq-9_1PQM8A4TQVsqKn5mcTDAGxHllJ0UvJOPE,58
100
+ openai/cli/_tools/__pycache__/__init__.cpython-311.pyc,,
101
+ openai/cli/_tools/__pycache__/_main.cpython-311.pyc,,
102
+ openai/cli/_tools/__pycache__/fine_tunes.cpython-311.pyc,,
103
+ openai/cli/_tools/__pycache__/migrate.cpython-311.pyc,,
104
+ openai/cli/_tools/_main.py,sha256=pakjEXHRHqYlTml-RxV7fNrRtRXzmZBinoPi1AJipFY,467
105
+ openai/cli/_tools/fine_tunes.py,sha256=RQgYMzifk6S7Y1I1K6huqco2QxmXa7gVUlHl6SrKTSU,1543
106
+ openai/cli/_tools/migrate.py,sha256=o-iomzhtC6N6X5H5GDlgQ_QOaIovE2YA9oHc_tIAUj8,4497
107
+ openai/cli/_utils.py,sha256=oiTc9MnxQh_zxAZ1OIHPkoDpCll0NF9ZgkdFHz4T-Bs,848
108
+ openai/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224
109
+ openai/lib/__init__.py,sha256=BMTfMnlbugMgDA1STDIAlx4bI4t4l_8bQmJxd0th0n8,126
110
+ openai/lib/__pycache__/__init__.cpython-311.pyc,,
111
+ openai/lib/__pycache__/_old_api.cpython-311.pyc,,
112
+ openai/lib/__pycache__/_pydantic.cpython-311.pyc,,
113
+ openai/lib/__pycache__/_tools.cpython-311.pyc,,
114
+ openai/lib/__pycache__/_validators.cpython-311.pyc,,
115
+ openai/lib/__pycache__/azure.cpython-311.pyc,,
116
+ openai/lib/_old_api.py,sha256=XZnXBrEKuTd70iJirj5mGW35fZoqruJobbBTq6bvg10,1947
117
+ openai/lib/_parsing/__init__.py,sha256=wS3BYvMGj9TqiPqOe3rO1sleaAJqHVuCaQuCE5rZIUw,539
118
+ openai/lib/_parsing/__pycache__/__init__.cpython-311.pyc,,
119
+ openai/lib/_parsing/__pycache__/_completions.cpython-311.pyc,,
120
+ openai/lib/_parsing/_completions.py,sha256=4pgJokd0iWSbvs5gKguEM5IMxpnGcz1EPXyMvPo6zEE,9126
121
+ openai/lib/_pydantic.py,sha256=MF-M_S4atYolma-qpAMUBgGp1nUDJY6bxnzQEtYId1U,5617
122
+ openai/lib/_tools.py,sha256=xrzM7jNgehZGsRQ9kSgn1q33z9cHrgf0b8UMo5wrTFw,1501
123
+ openai/lib/_validators.py,sha256=cXJXFuaAl7jeJcYHXXnFa4NHGtHs-_zt3Zs1VVCmQo4,35288
124
+ openai/lib/azure.py,sha256=8rGDip2BVCTvZnvaq_fT8pGQZ3479-JP6oL9WtI5NpM,23563
125
+ openai/lib/streaming/__init__.py,sha256=kD3LpjsqU7caDQDhB-YjTUl9qqbb5sPnGGSI2yQYC70,379
126
+ openai/lib/streaming/__pycache__/__init__.cpython-311.pyc,,
127
+ openai/lib/streaming/__pycache__/_assistants.cpython-311.pyc,,
128
+ openai/lib/streaming/__pycache__/_deltas.cpython-311.pyc,,
129
+ openai/lib/streaming/_assistants.py,sha256=LUWSinmYopQIkQ5xSg73b6BWbkRkQS5JvX62w_V9xSw,40692
130
+ openai/lib/streaming/_deltas.py,sha256=I7B_AznXZwlBmE8Puau7ayTQUx6hMIEVE8FYTQm2fjs,2502
131
+ openai/lib/streaming/chat/__init__.py,sha256=7krL_atOvvpQkY_byWSglSfDsMs5hdoxHmz4Ulq7lcc,1305
132
+ openai/lib/streaming/chat/__pycache__/__init__.cpython-311.pyc,,
133
+ openai/lib/streaming/chat/__pycache__/_completions.cpython-311.pyc,,
134
+ openai/lib/streaming/chat/__pycache__/_events.cpython-311.pyc,,
135
+ openai/lib/streaming/chat/__pycache__/_types.cpython-311.pyc,,
136
+ openai/lib/streaming/chat/_completions.py,sha256=icXzr6TwaQvOOEZHRLIfw106YVUT9mLGjQt6QJ1ObKI,29944
137
+ openai/lib/streaming/chat/_events.py,sha256=lstVmM6YR2Cs9drikzrY9JCZn9Nbfym0aKIPtNpxL6w,2618
138
+ openai/lib/streaming/chat/_types.py,sha256=-SYVBNhGkOUoJ-8dotxpCRqPJpfyOQ8hwR2_HrsQCRI,739
139
+ openai/pagination.py,sha256=B9ejXEAR_hYGLHfqb9xEEsE0u5dCUMjvplOce5dpY7M,2760
140
+ openai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
141
+ openai/resources/__init__.py,sha256=eYonVyf6AAmk-b8JYSYmo5EEMv89ovxiAY5A83ti8J8,4533
142
+ openai/resources/__pycache__/__init__.cpython-311.pyc,,
143
+ openai/resources/__pycache__/batches.cpython-311.pyc,,
144
+ openai/resources/__pycache__/completions.cpython-311.pyc,,
145
+ openai/resources/__pycache__/embeddings.cpython-311.pyc,,
146
+ openai/resources/__pycache__/files.cpython-311.pyc,,
147
+ openai/resources/__pycache__/images.cpython-311.pyc,,
148
+ openai/resources/__pycache__/models.cpython-311.pyc,,
149
+ openai/resources/__pycache__/moderations.cpython-311.pyc,,
150
+ openai/resources/audio/__init__.py,sha256=YM7FHvPKVlj_v6EIgfpUQsb6q4hS2hVQ3gfkgic0sP0,1687
151
+ openai/resources/audio/__pycache__/__init__.cpython-311.pyc,,
152
+ openai/resources/audio/__pycache__/audio.cpython-311.pyc,,
153
+ openai/resources/audio/__pycache__/speech.cpython-311.pyc,,
154
+ openai/resources/audio/__pycache__/transcriptions.cpython-311.pyc,,
155
+ openai/resources/audio/__pycache__/translations.cpython-311.pyc,,
156
+ openai/resources/audio/audio.py,sha256=nEIB4q7a1MSYdQkcYH2O6jB-_rNCMDCBJyUuqOL67CI,5491
157
+ openai/resources/audio/speech.py,sha256=8vr4mg4dPwFiyo7lhKMJN7Vv7TIrvmUhmAZ5kHPzxyo,8989
158
+ openai/resources/audio/transcriptions.py,sha256=AWJMuDpydq0bYW2XzzUu9DZxMaNX2vX_lJR_Voq0oJg,18523
159
+ openai/resources/audio/translations.py,sha256=lbTvAqMyZJCQHgh3U1uWcRxtBmRyMdpCI3NDvs__EdQ,15703
160
+ openai/resources/batches.py,sha256=nqozyuGcU4yr9h2vpW14d33OgB8UDTy24qXLY5-IlAc,20273
161
+ openai/resources/beta/__init__.py,sha256=nXoV4P8WCrbEZuNMtptbIuy_LqlVafY9lJ2qfW35GFc,1636
162
+ openai/resources/beta/__pycache__/__init__.cpython-311.pyc,,
163
+ openai/resources/beta/__pycache__/assistants.cpython-311.pyc,,
164
+ openai/resources/beta/__pycache__/beta.cpython-311.pyc,,
165
+ openai/resources/beta/assistants.py,sha256=JuPSeOuGJYfAyQpgtvpp6EnkJWUrA-mlBtqmv3brKQY,40934
166
+ openai/resources/beta/beta.py,sha256=I7epCNm03AMq0FV4f6x7CkSiQ5aeTcIKPa-flpNHNBA,6594
167
+ openai/resources/beta/chat/__init__.py,sha256=d_fpyFMAG3iRAPIXANPfRG4HtEm6U_uMUYep7Skj2uY,263
168
+ openai/resources/beta/chat/__pycache__/__init__.cpython-311.pyc,,
169
+ openai/resources/beta/chat/__pycache__/chat.cpython-311.pyc,,
170
+ openai/resources/beta/chat/__pycache__/completions.cpython-311.pyc,,
171
+ openai/resources/beta/chat/chat.py,sha256=sNvU8Fi_o3dWkD_X4Mobafv9XWBP6Y2dJxng-NdFXUs,597
172
+ openai/resources/beta/chat/completions.py,sha256=1tBYk0pUllhKGLslbFnlA8F_gfzSEw4hT-biQGFDKbw,28574
173
+ openai/resources/beta/realtime/__init__.py,sha256=0TBjHlLRsG-hudbiE8f-EXETNkDRAxqkCVAgODiUnYo,862
174
+ openai/resources/beta/realtime/__pycache__/__init__.cpython-311.pyc,,
175
+ openai/resources/beta/realtime/__pycache__/realtime.cpython-311.pyc,,
176
+ openai/resources/beta/realtime/__pycache__/sessions.cpython-311.pyc,,
177
+ openai/resources/beta/realtime/realtime.py,sha256=jLnqGtLHA87ytxKzuaakGAR-t7rxjifBpA5XWYqaNkI,37474
178
+ openai/resources/beta/realtime/sessions.py,sha256=fTEdpx9UlQTTaU9UjYcI4jSkVC9yKUEmTcJEz6kvQT8,17107
179
+ openai/resources/beta/threads/__init__.py,sha256=fQ_qdUVSfouVS5h47DlTb5mamChT4K-v-siPuuAB6do,1177
180
+ openai/resources/beta/threads/__pycache__/__init__.cpython-311.pyc,,
181
+ openai/resources/beta/threads/__pycache__/messages.cpython-311.pyc,,
182
+ openai/resources/beta/threads/__pycache__/threads.cpython-311.pyc,,
183
+ openai/resources/beta/threads/messages.py,sha256=tg8MeQIxlrCkNV1qNvkwMI3yY_TTrs6osEurOVcdVMs,27774
184
+ openai/resources/beta/threads/runs/__init__.py,sha256=2FfDaqwmJJCd-IVpY_CrzWcFvw0KFyQ3cm5jnTfI-DQ,771
185
+ openai/resources/beta/threads/runs/__pycache__/__init__.cpython-311.pyc,,
186
+ openai/resources/beta/threads/runs/__pycache__/runs.cpython-311.pyc,,
187
+ openai/resources/beta/threads/runs/__pycache__/steps.cpython-311.pyc,,
188
+ openai/resources/beta/threads/runs/runs.py,sha256=2I9u1dEh-BN5l2JdzsoByLQza9mwJjd5Sx8ScA5dxcs,143384
189
+ openai/resources/beta/threads/runs/steps.py,sha256=9kq7LOhNaP5Kr00FTZ4NnFHqzY2k5T5h3NrTsT9HSRg,15809
190
+ openai/resources/beta/threads/threads.py,sha256=4IhQQX2snKztbV9_CFolSa80_MAyOjoap4mO_I_w1fw,95608
191
+ openai/resources/beta/vector_stores/__init__.py,sha256=11Xn1vhgndWiI0defJHv31vmbtbDgh2GwZT3gX8GgHk,1296
192
+ openai/resources/beta/vector_stores/__pycache__/__init__.cpython-311.pyc,,
193
+ openai/resources/beta/vector_stores/__pycache__/file_batches.cpython-311.pyc,,
194
+ openai/resources/beta/vector_stores/__pycache__/files.cpython-311.pyc,,
195
+ openai/resources/beta/vector_stores/__pycache__/vector_stores.cpython-311.pyc,,
196
+ openai/resources/beta/vector_stores/file_batches.py,sha256=O__hUBO3OjSCSmTx2-chulx4S1yvp66NjOPL8k5bQWY,31977
197
+ openai/resources/beta/vector_stores/files.py,sha256=guB5HOwoMBqiAZQfhU2Z8I3oCWGJXNWzXPMkij0F8Qo,29716
198
+ openai/resources/beta/vector_stores/vector_stores.py,sha256=ROgkpMibvFO2vrNctkAvteMW6FtUyXQURDLaxRfei6Q,29318
199
+ openai/resources/chat/__init__.py,sha256=8Q9ODRo1wIpFa34VaNwuaWFmxqFxagDtUhIAkQNvxEU,849
200
+ openai/resources/chat/__pycache__/__init__.cpython-311.pyc,,
201
+ openai/resources/chat/__pycache__/chat.cpython-311.pyc,,
202
+ openai/resources/chat/__pycache__/completions.cpython-311.pyc,,
203
+ openai/resources/chat/chat.py,sha256=9ln8TL1kqIccplWXnTGtclLoaG9GIjF0ZREX6GYXfPw,3352
204
+ openai/resources/chat/completions.py,sha256=oxeao7t9fXFr9I43NyCpv3n3impXGjDZcsFz38iks4k,100314
205
+ openai/resources/completions.py,sha256=t_nL57mr_PXp96JJ-i_sdMeOCJjTHiZNbOEtiumVRo8,59452
206
+ openai/resources/embeddings.py,sha256=CS574dgW65M_uYXc4VzI9VPS4S-3Tr2rtKB56NXFI38,11843
207
+ openai/resources/files.py,sha256=oLj947TTax4bQqT2N7w5sfqZ4uoKk7w-MsJQ07Xytqg,30085
208
+ openai/resources/fine_tuning/__init__.py,sha256=s6uoq7gM4gwoywdOOZQkPeYiSbUl-OwpeuMhwJJk0lc,837
209
+ openai/resources/fine_tuning/__pycache__/__init__.cpython-311.pyc,,
210
+ openai/resources/fine_tuning/__pycache__/fine_tuning.cpython-311.pyc,,
211
+ openai/resources/fine_tuning/fine_tuning.py,sha256=0Llezl8H-0WszRtorvJ9vY304oQbC8YhfUTpeLbyBdE,3386
212
+ openai/resources/fine_tuning/jobs/__init__.py,sha256=_smlrwijZOCcsDWqKnofLxQM2QLucZzXgboL9zJBPHw,849
213
+ openai/resources/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc,,
214
+ openai/resources/fine_tuning/jobs/__pycache__/checkpoints.cpython-311.pyc,,
215
+ openai/resources/fine_tuning/jobs/__pycache__/jobs.cpython-311.pyc,,
216
+ openai/resources/fine_tuning/jobs/checkpoints.py,sha256=Z6p_IBzmVu3oRldxLKVKGVm1E8Xf7UUnItSnV7PJI9Y,7466
217
+ openai/resources/fine_tuning/jobs/jobs.py,sha256=LuFNBcQkBK2b2xd-_CeIQZlJjYWCQAoR7vyIFzC_z7U,29384
218
+ openai/resources/images.py,sha256=QtIW5FFmcfuWHTqlKYeCFVyh32VghdK7pXeVeu0-dds,25626
219
+ openai/resources/models.py,sha256=CzLpB5Oj1x7U6eNKOcK0Z7M-NjEIpZvdWQLDAyIm7wM,11232
220
+ openai/resources/moderations.py,sha256=gQcfE7pifgFS1GqkHXkaVneQmN7Wwu5oTF_0SwyRlBs,7797
221
+ openai/resources/uploads/__init__.py,sha256=HmY3WQgvUI2bN3CjfWHWQOk7UUC6Ozna97_lHhrrRSA,810
222
+ openai/resources/uploads/__pycache__/__init__.cpython-311.pyc,,
223
+ openai/resources/uploads/__pycache__/parts.cpython-311.pyc,,
224
+ openai/resources/uploads/__pycache__/uploads.cpython-311.pyc,,
225
+ openai/resources/uploads/parts.py,sha256=_ldj9IM_c4xWdFKtqqzHtuQ4Ass1S8lOwJoIG23qu6Y,8142
226
+ openai/resources/uploads/uploads.py,sha256=4hYxsFX10JFTp6sQZpOnMGPie7hKbcVvQ-sQg07sqKY,24910
227
+ openai/types/__init__.py,sha256=icvtLB2GSv5bgO1VNkgesL89Ta_jzFeWU7FfIa_zOcQ,3203
228
+ openai/types/__pycache__/__init__.cpython-311.pyc,,
229
+ openai/types/__pycache__/audio_model.cpython-311.pyc,,
230
+ openai/types/__pycache__/audio_response_format.cpython-311.pyc,,
231
+ openai/types/__pycache__/batch.cpython-311.pyc,,
232
+ openai/types/__pycache__/batch_create_params.cpython-311.pyc,,
233
+ openai/types/__pycache__/batch_error.cpython-311.pyc,,
234
+ openai/types/__pycache__/batch_list_params.cpython-311.pyc,,
235
+ openai/types/__pycache__/batch_request_counts.cpython-311.pyc,,
236
+ openai/types/__pycache__/chat_model.cpython-311.pyc,,
237
+ openai/types/__pycache__/completion.cpython-311.pyc,,
238
+ openai/types/__pycache__/completion_choice.cpython-311.pyc,,
239
+ openai/types/__pycache__/completion_create_params.cpython-311.pyc,,
240
+ openai/types/__pycache__/completion_usage.cpython-311.pyc,,
241
+ openai/types/__pycache__/create_embedding_response.cpython-311.pyc,,
242
+ openai/types/__pycache__/embedding.cpython-311.pyc,,
243
+ openai/types/__pycache__/embedding_create_params.cpython-311.pyc,,
244
+ openai/types/__pycache__/embedding_model.cpython-311.pyc,,
245
+ openai/types/__pycache__/file_content.cpython-311.pyc,,
246
+ openai/types/__pycache__/file_create_params.cpython-311.pyc,,
247
+ openai/types/__pycache__/file_deleted.cpython-311.pyc,,
248
+ openai/types/__pycache__/file_list_params.cpython-311.pyc,,
249
+ openai/types/__pycache__/file_object.cpython-311.pyc,,
250
+ openai/types/__pycache__/file_purpose.cpython-311.pyc,,
251
+ openai/types/__pycache__/image.cpython-311.pyc,,
252
+ openai/types/__pycache__/image_create_variation_params.cpython-311.pyc,,
253
+ openai/types/__pycache__/image_edit_params.cpython-311.pyc,,
254
+ openai/types/__pycache__/image_generate_params.cpython-311.pyc,,
255
+ openai/types/__pycache__/image_model.cpython-311.pyc,,
256
+ openai/types/__pycache__/images_response.cpython-311.pyc,,
257
+ openai/types/__pycache__/model.cpython-311.pyc,,
258
+ openai/types/__pycache__/model_deleted.cpython-311.pyc,,
259
+ openai/types/__pycache__/moderation.cpython-311.pyc,,
260
+ openai/types/__pycache__/moderation_create_params.cpython-311.pyc,,
261
+ openai/types/__pycache__/moderation_create_response.cpython-311.pyc,,
262
+ openai/types/__pycache__/moderation_image_url_input_param.cpython-311.pyc,,
263
+ openai/types/__pycache__/moderation_model.cpython-311.pyc,,
264
+ openai/types/__pycache__/moderation_multi_modal_input_param.cpython-311.pyc,,
265
+ openai/types/__pycache__/moderation_text_input_param.cpython-311.pyc,,
266
+ openai/types/__pycache__/upload.cpython-311.pyc,,
267
+ openai/types/__pycache__/upload_complete_params.cpython-311.pyc,,
268
+ openai/types/__pycache__/upload_create_params.cpython-311.pyc,,
269
+ openai/types/__pycache__/websocket_connection_options.cpython-311.pyc,,
270
+ openai/types/audio/__init__.py,sha256=sR9_rMb-gO0stG4ozTq6XJs714C_BfjB3KCgFvyhXVA,1050
271
+ openai/types/audio/__pycache__/__init__.cpython-311.pyc,,
272
+ openai/types/audio/__pycache__/speech_create_params.cpython-311.pyc,,
273
+ openai/types/audio/__pycache__/speech_model.cpython-311.pyc,,
274
+ openai/types/audio/__pycache__/transcription.cpython-311.pyc,,
275
+ openai/types/audio/__pycache__/transcription_create_params.cpython-311.pyc,,
276
+ openai/types/audio/__pycache__/transcription_create_response.cpython-311.pyc,,
277
+ openai/types/audio/__pycache__/transcription_segment.cpython-311.pyc,,
278
+ openai/types/audio/__pycache__/transcription_verbose.cpython-311.pyc,,
279
+ openai/types/audio/__pycache__/transcription_word.cpython-311.pyc,,
280
+ openai/types/audio/__pycache__/translation.cpython-311.pyc,,
281
+ openai/types/audio/__pycache__/translation_create_params.cpython-311.pyc,,
282
+ openai/types/audio/__pycache__/translation_create_response.cpython-311.pyc,,
283
+ openai/types/audio/__pycache__/translation_verbose.cpython-311.pyc,,
284
+ openai/types/audio/speech_create_params.py,sha256=mRqj_hlLuq6iAXEPy-hppWsdo4dtLvO2L2eoUaPjrx4,1347
285
+ openai/types/audio/speech_model.py,sha256=RUimvc__LYAxwEEmfrf-lj18O3EWrU1OlWZXEXN2AKY,218
286
+ openai/types/audio/transcription.py,sha256=FP9QMwwwdqgvP3xY9P-40gBiFmMwFKxXM5yv5x8xPVk,230
287
+ openai/types/audio/transcription_create_params.py,sha256=u6a507HF_jhv164rQut0e9pC6AvHnMfGFEPgal708gI,2276
288
+ openai/types/audio/transcription_create_response.py,sha256=-PLGH8he9EdJtvBXV-ZrE31CLVnk4bc0VQ1ixRoN8Ck,378
289
+ openai/types/audio/transcription_segment.py,sha256=-pPAGolwIIXUBMic-H5U7aR0u_Aq-pipSA4xTtn_viA,1153
290
+ openai/types/audio/transcription_verbose.py,sha256=QkQBIdpvsubHjSvmvTb5ryo8Yzog3ZMvv4HZukEsjxI,760
291
+ openai/types/audio/transcription_word.py,sha256=sNDdtjoqIiba6qKsD_lI2Ffs1Lr7qP9HyS59AFh5cTc,368
292
+ openai/types/audio/translation.py,sha256=5l-Zk9Cg7AZti-TTn2-4ydsoZj2zdvDwyzzVjVp9W0g,194
293
+ openai/types/audio/translation_create_params.py,sha256=lFQEh5IRG5XT-Z3TV7FDSNbIRqAt6yA3EsSvSsb0wsU,1585
294
+ openai/types/audio/translation_create_response.py,sha256=x6H0yjTbZR3vd3d7LdABcn9nrMDNdeMjepcjW1oUfVc,362
295
+ openai/types/audio/translation_verbose.py,sha256=lGB5FqkV-ne__aaGbMTFbEciJ-Sl3wBhlKmETmtROT8,615
296
+ openai/types/audio_model.py,sha256=pxBVwf1HGd6mW-_jd-TDVMRZtTvvCUn_rL8Pt1BXzuo,208
297
+ openai/types/audio_response_format.py,sha256=EEItnQdwXinG8bOe1We2039Z7lp2Z8wSXXvTlFlkXzM,259
298
+ openai/types/batch.py,sha256=FuGQ-x8kK6VMyYIQeP5gu_LEmfzXMCht5ySHdFfJQnE,2880
299
+ openai/types/batch_create_params.py,sha256=1pEYBpbPBdRnp7imv5TrGb7MGkpIFF7r1HcMurPFSpg,1782
300
+ openai/types/batch_error.py,sha256=Xxl-gYm0jerpYyI-mKSSVxRMQRubkoLUiOP9U3v72EM,622
301
+ openai/types/batch_list_params.py,sha256=X1_sfRspuIMSDyXWVh0YnJ9vJLeOOH66TrvgEHueC84,705
302
+ openai/types/batch_request_counts.py,sha256=GHHrJKdJwJ3foBa1j9v5Vece_zzkdXXXgOcne8W1E30,409
303
+ openai/types/beta/__init__.py,sha256=vtfIT3jsS-5tql68Kr58KYl8uCzos3H2SGuH9h1x-Ts,3606
304
+ openai/types/beta/__pycache__/__init__.cpython-311.pyc,,
305
+ openai/types/beta/__pycache__/assistant.cpython-311.pyc,,
306
+ openai/types/beta/__pycache__/assistant_create_params.cpython-311.pyc,,
307
+ openai/types/beta/__pycache__/assistant_deleted.cpython-311.pyc,,
308
+ openai/types/beta/__pycache__/assistant_list_params.cpython-311.pyc,,
309
+ openai/types/beta/__pycache__/assistant_response_format_option.cpython-311.pyc,,
310
+ openai/types/beta/__pycache__/assistant_response_format_option_param.cpython-311.pyc,,
311
+ openai/types/beta/__pycache__/assistant_stream_event.cpython-311.pyc,,
312
+ openai/types/beta/__pycache__/assistant_tool.cpython-311.pyc,,
313
+ openai/types/beta/__pycache__/assistant_tool_choice.cpython-311.pyc,,
314
+ openai/types/beta/__pycache__/assistant_tool_choice_function.cpython-311.pyc,,
315
+ openai/types/beta/__pycache__/assistant_tool_choice_function_param.cpython-311.pyc,,
316
+ openai/types/beta/__pycache__/assistant_tool_choice_option.cpython-311.pyc,,
317
+ openai/types/beta/__pycache__/assistant_tool_choice_option_param.cpython-311.pyc,,
318
+ openai/types/beta/__pycache__/assistant_tool_choice_param.cpython-311.pyc,,
319
+ openai/types/beta/__pycache__/assistant_tool_param.cpython-311.pyc,,
320
+ openai/types/beta/__pycache__/assistant_update_params.cpython-311.pyc,,
321
+ openai/types/beta/__pycache__/auto_file_chunking_strategy_param.cpython-311.pyc,,
322
+ openai/types/beta/__pycache__/code_interpreter_tool.cpython-311.pyc,,
323
+ openai/types/beta/__pycache__/code_interpreter_tool_param.cpython-311.pyc,,
324
+ openai/types/beta/__pycache__/file_chunking_strategy.cpython-311.pyc,,
325
+ openai/types/beta/__pycache__/file_chunking_strategy_param.cpython-311.pyc,,
326
+ openai/types/beta/__pycache__/file_search_tool.cpython-311.pyc,,
327
+ openai/types/beta/__pycache__/file_search_tool_param.cpython-311.pyc,,
328
+ openai/types/beta/__pycache__/function_tool.cpython-311.pyc,,
329
+ openai/types/beta/__pycache__/function_tool_param.cpython-311.pyc,,
330
+ openai/types/beta/__pycache__/other_file_chunking_strategy_object.cpython-311.pyc,,
331
+ openai/types/beta/__pycache__/static_file_chunking_strategy.cpython-311.pyc,,
332
+ openai/types/beta/__pycache__/static_file_chunking_strategy_object.cpython-311.pyc,,
333
+ openai/types/beta/__pycache__/static_file_chunking_strategy_object_param.cpython-311.pyc,,
334
+ openai/types/beta/__pycache__/static_file_chunking_strategy_param.cpython-311.pyc,,
335
+ openai/types/beta/__pycache__/thread.cpython-311.pyc,,
336
+ openai/types/beta/__pycache__/thread_create_and_run_params.cpython-311.pyc,,
337
+ openai/types/beta/__pycache__/thread_create_params.cpython-311.pyc,,
338
+ openai/types/beta/__pycache__/thread_deleted.cpython-311.pyc,,
339
+ openai/types/beta/__pycache__/thread_update_params.cpython-311.pyc,,
340
+ openai/types/beta/__pycache__/vector_store.cpython-311.pyc,,
341
+ openai/types/beta/__pycache__/vector_store_create_params.cpython-311.pyc,,
342
+ openai/types/beta/__pycache__/vector_store_deleted.cpython-311.pyc,,
343
+ openai/types/beta/__pycache__/vector_store_list_params.cpython-311.pyc,,
344
+ openai/types/beta/__pycache__/vector_store_update_params.cpython-311.pyc,,
345
+ openai/types/beta/assistant.py,sha256=_OgFKmjaMXM2yNOTFTcCj5qVo_-F9p7uiEXJnYbB0XE,5054
346
+ openai/types/beta/assistant_create_params.py,sha256=owmfhehcYbDlT9D5Uek76QPXOT8-vIUBF2v7pwYII4Y,6189
347
+ openai/types/beta/assistant_deleted.py,sha256=bTTUl5FPHTBI5nRm7d0sGuR9VCSBDZ-IbOn9G_IpmJQ,301
348
+ openai/types/beta/assistant_list_params.py,sha256=yW-lj6AUkG0IRZQKre0veEr9p4VMN-9YdELFMYs74Cw,1222
349
+ openai/types/beta/assistant_response_format_option.py,sha256=yNeoAWxM-_8Sjmwqu8exqyKRFhVZIKeTypetPY55VFA,561
350
+ openai/types/beta/assistant_response_format_option_param.py,sha256=dyPMhwRSLBZ0ltpxiD7KM-9X6BzWnbGeG-nT_3SenuQ,628
351
+ openai/types/beta/assistant_stream_event.py,sha256=vP4LDqYWzSKGcZ1JAfyNw7YqC__XsVPe0nqZ2qdn93E,6930
352
+ openai/types/beta/assistant_tool.py,sha256=_0FC7Db4Ctq_0yLaKJ93zNTB5HthuJWEAHx3fadDRlw,506
353
+ openai/types/beta/assistant_tool_choice.py,sha256=Hy4HIfPQCkWD8VruHHicuTkomNwljGHviQHk36prKhg,544
354
+ openai/types/beta/assistant_tool_choice_function.py,sha256=aYMlVrZdX2JxmehDlyGALRK2PIEkO7VFEfsvY3VH6T4,270
355
+ openai/types/beta/assistant_tool_choice_function_param.py,sha256=-O38277LhSaqOVhTp0haHP0ZnVTLpEBvcLJa5MRo7wE,355
356
+ openai/types/beta/assistant_tool_choice_option.py,sha256=jrXMd_IYIQ1pt8Lkc-KrPd4CR3lR8sFV4m7_lpG8A4Y,362
357
+ openai/types/beta/assistant_tool_choice_option_param.py,sha256=VcatO5Nej9e5eqfrwetG4uM1vFoewnBEcFz47IxAK2E,424
358
+ openai/types/beta/assistant_tool_choice_param.py,sha256=NOWx9SzZEwYaHeAyFZTQlG3pmogMNXzjPJDGQUlbv7Q,572
359
+ openai/types/beta/assistant_tool_param.py,sha256=6DcaU3nMjurur2VkVIYcCaRAY1QLQscXXjCd0ZHHGho,501
360
+ openai/types/beta/assistant_update_params.py,sha256=2PgobUjbgyaZyBdel0nwmArSrWcI95oJYjdVu4gEeqo,4809
361
+ openai/types/beta/auto_file_chunking_strategy_param.py,sha256=hbBtARkJXSJE7_4RqC-ZR3NiztUp9S4WuG3s3W0GpqY,351
362
+ openai/types/beta/chat/__init__.py,sha256=OKfJYcKb4NObdiRObqJV_dOyDQ8feXekDUge2o_4pXQ,122
363
+ openai/types/beta/chat/__pycache__/__init__.cpython-311.pyc,,
364
+ openai/types/beta/code_interpreter_tool.py,sha256=7mgQc9OtD_ZUnZeNhoobMFcmmvtZPFCNYGB-PEnNnfs,333
365
+ openai/types/beta/code_interpreter_tool_param.py,sha256=X6mwzFyZx1RCKEYbBCPs4kh_tZkxFxydPMK4yFNJkLs,389
366
+ openai/types/beta/file_chunking_strategy.py,sha256=6nRvYetBl_BHgN8biTyTut-tw8G13YttgxSKtJsJLeM,560
367
+ openai/types/beta/file_chunking_strategy_param.py,sha256=mOFh18BKAGkzVTrWv_3Iphzbs-EbT6hq-jChCA4HgAE,517
368
+ openai/types/beta/file_search_tool.py,sha256=5aNU8RZj-UNdmuqqpjCXNaa1pI9GzSP5qCPtvVSJ1oQ,1769
369
+ openai/types/beta/file_search_tool_param.py,sha256=o6sWPrzRYY8wtNaVuF8h3D1sAQV3N0L3dbdiiaMisW0,1765
370
+ openai/types/beta/function_tool.py,sha256=oYGJfcfPpUohKw2ikgshDjOI1HXCK-5pAWyegYNezeU,397
371
+ openai/types/beta/function_tool_param.py,sha256=hCclpGO4Re-TxiGy_QxX75g1kcN6_ElubicO6SdJ_YI,471
372
+ openai/types/beta/other_file_chunking_strategy_object.py,sha256=hJz1OeSkvvcWJVftPfvz2pB5ujdawWEEa3v38E6tt7g,311
373
+ openai/types/beta/realtime/__init__.py,sha256=x0IcUtawQQLINQlbg-ZA3ywr5-otkVWr1Z-Hy6msdtI,6410
374
+ openai/types/beta/realtime/__pycache__/__init__.cpython-311.pyc,,
375
+ openai/types/beta/realtime/__pycache__/conversation_created_event.cpython-311.pyc,,
376
+ openai/types/beta/realtime/__pycache__/conversation_item.cpython-311.pyc,,
377
+ openai/types/beta/realtime/__pycache__/conversation_item_content.cpython-311.pyc,,
378
+ openai/types/beta/realtime/__pycache__/conversation_item_content_param.cpython-311.pyc,,
379
+ openai/types/beta/realtime/__pycache__/conversation_item_create_event.cpython-311.pyc,,
380
+ openai/types/beta/realtime/__pycache__/conversation_item_create_event_param.cpython-311.pyc,,
381
+ openai/types/beta/realtime/__pycache__/conversation_item_created_event.cpython-311.pyc,,
382
+ openai/types/beta/realtime/__pycache__/conversation_item_delete_event.cpython-311.pyc,,
383
+ openai/types/beta/realtime/__pycache__/conversation_item_delete_event_param.cpython-311.pyc,,
384
+ openai/types/beta/realtime/__pycache__/conversation_item_deleted_event.cpython-311.pyc,,
385
+ openai/types/beta/realtime/__pycache__/conversation_item_input_audio_transcription_completed_event.cpython-311.pyc,,
386
+ openai/types/beta/realtime/__pycache__/conversation_item_input_audio_transcription_failed_event.cpython-311.pyc,,
387
+ openai/types/beta/realtime/__pycache__/conversation_item_param.cpython-311.pyc,,
388
+ openai/types/beta/realtime/__pycache__/conversation_item_truncate_event.cpython-311.pyc,,
389
+ openai/types/beta/realtime/__pycache__/conversation_item_truncate_event_param.cpython-311.pyc,,
390
+ openai/types/beta/realtime/__pycache__/conversation_item_truncated_event.cpython-311.pyc,,
391
+ openai/types/beta/realtime/__pycache__/conversation_item_with_reference.cpython-311.pyc,,
392
+ openai/types/beta/realtime/__pycache__/conversation_item_with_reference_param.cpython-311.pyc,,
393
+ openai/types/beta/realtime/__pycache__/error_event.cpython-311.pyc,,
394
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_append_event.cpython-311.pyc,,
395
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_append_event_param.cpython-311.pyc,,
396
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_clear_event.cpython-311.pyc,,
397
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_clear_event_param.cpython-311.pyc,,
398
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_cleared_event.cpython-311.pyc,,
399
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_commit_event.cpython-311.pyc,,
400
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_commit_event_param.cpython-311.pyc,,
401
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_committed_event.cpython-311.pyc,,
402
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_speech_started_event.cpython-311.pyc,,
403
+ openai/types/beta/realtime/__pycache__/input_audio_buffer_speech_stopped_event.cpython-311.pyc,,
404
+ openai/types/beta/realtime/__pycache__/rate_limits_updated_event.cpython-311.pyc,,
405
+ openai/types/beta/realtime/__pycache__/realtime_client_event.cpython-311.pyc,,
406
+ openai/types/beta/realtime/__pycache__/realtime_client_event_param.cpython-311.pyc,,
407
+ openai/types/beta/realtime/__pycache__/realtime_connect_params.cpython-311.pyc,,
408
+ openai/types/beta/realtime/__pycache__/realtime_response.cpython-311.pyc,,
409
+ openai/types/beta/realtime/__pycache__/realtime_response_status.cpython-311.pyc,,
410
+ openai/types/beta/realtime/__pycache__/realtime_response_usage.cpython-311.pyc,,
411
+ openai/types/beta/realtime/__pycache__/realtime_server_event.cpython-311.pyc,,
412
+ openai/types/beta/realtime/__pycache__/response_audio_delta_event.cpython-311.pyc,,
413
+ openai/types/beta/realtime/__pycache__/response_audio_done_event.cpython-311.pyc,,
414
+ openai/types/beta/realtime/__pycache__/response_audio_transcript_delta_event.cpython-311.pyc,,
415
+ openai/types/beta/realtime/__pycache__/response_audio_transcript_done_event.cpython-311.pyc,,
416
+ openai/types/beta/realtime/__pycache__/response_cancel_event.cpython-311.pyc,,
417
+ openai/types/beta/realtime/__pycache__/response_cancel_event_param.cpython-311.pyc,,
418
+ openai/types/beta/realtime/__pycache__/response_content_part_added_event.cpython-311.pyc,,
419
+ openai/types/beta/realtime/__pycache__/response_content_part_done_event.cpython-311.pyc,,
420
+ openai/types/beta/realtime/__pycache__/response_create_event.cpython-311.pyc,,
421
+ openai/types/beta/realtime/__pycache__/response_create_event_param.cpython-311.pyc,,
422
+ openai/types/beta/realtime/__pycache__/response_created_event.cpython-311.pyc,,
423
+ openai/types/beta/realtime/__pycache__/response_done_event.cpython-311.pyc,,
424
+ openai/types/beta/realtime/__pycache__/response_function_call_arguments_delta_event.cpython-311.pyc,,
425
+ openai/types/beta/realtime/__pycache__/response_function_call_arguments_done_event.cpython-311.pyc,,
426
+ openai/types/beta/realtime/__pycache__/response_output_item_added_event.cpython-311.pyc,,
427
+ openai/types/beta/realtime/__pycache__/response_output_item_done_event.cpython-311.pyc,,
428
+ openai/types/beta/realtime/__pycache__/response_text_delta_event.cpython-311.pyc,,
429
+ openai/types/beta/realtime/__pycache__/response_text_done_event.cpython-311.pyc,,
430
+ openai/types/beta/realtime/__pycache__/session.cpython-311.pyc,,
431
+ openai/types/beta/realtime/__pycache__/session_create_params.cpython-311.pyc,,
432
+ openai/types/beta/realtime/__pycache__/session_create_response.cpython-311.pyc,,
433
+ openai/types/beta/realtime/__pycache__/session_created_event.cpython-311.pyc,,
434
+ openai/types/beta/realtime/__pycache__/session_update_event.cpython-311.pyc,,
435
+ openai/types/beta/realtime/__pycache__/session_update_event_param.cpython-311.pyc,,
436
+ openai/types/beta/realtime/__pycache__/session_updated_event.cpython-311.pyc,,
437
+ openai/types/beta/realtime/conversation_created_event.py,sha256=U4-nesN8rAep2_25E2DrkXUMafQejj3NE_0llXKj5Y8,752
438
+ openai/types/beta/realtime/conversation_item.py,sha256=av6WCjWVuRxBjccmxv4j26cd3TCKURj2a7cf8uS3P3s,2297
439
+ openai/types/beta/realtime/conversation_item_content.py,sha256=dj0XAEPqj4UPVb3E2nIgb8bZBA-PRNK-E7o3des6wmw,1005
440
+ openai/types/beta/realtime/conversation_item_content_param.py,sha256=CKEwY9j6ApnvfsLKrdkEFfOW1CtxUWyY9OL-rIMUNaw,927
441
+ openai/types/beta/realtime/conversation_item_create_event.py,sha256=jYXYdmqJh_znzcAgDuCxJXo5shf-t_DwmsyFkaDVnAE,1081
442
+ openai/types/beta/realtime/conversation_item_create_event_param.py,sha256=vxTag6TrOLu1bf46F3mUmRkl5dd1Kb6bUp65gBDVmhM,1101
443
+ openai/types/beta/realtime/conversation_item_created_event.py,sha256=DIeG7YQ5HdKrnbnorklB1Zfsz42yRdPKDOx5TPzfvw0,722
444
+ openai/types/beta/realtime/conversation_item_delete_event.py,sha256=p-O6R1Ku5pxZvaxhSi4YTPqLXS1SHhdLGgJuPQyPcHY,549
445
+ openai/types/beta/realtime/conversation_item_delete_event_param.py,sha256=a17h8Hd8MxUbXT6NQg8YpTr1ICt1ztRecpfukHw4g34,569
446
+ openai/types/beta/realtime/conversation_item_deleted_event.py,sha256=uWHSqX5ig550romSdhtROwrdQmdeN31Oz1Vpr9IuQFI,492
447
+ openai/types/beta/realtime/conversation_item_input_audio_transcription_completed_event.py,sha256=7tX1hI3g0SbrXGHcaC_Y1xAzhsoziReYwlqyA8ycB3E,764
448
+ openai/types/beta/realtime/conversation_item_input_audio_transcription_failed_event.py,sha256=xYNSBIyERQJ4P-5YoFF1VptfPa8JnJ0sWaH6LGsPow0,1077
449
+ openai/types/beta/realtime/conversation_item_param.py,sha256=x12A5-yjNWodFNJEnbHKY1WJzSzX9s7EQr2c5FuYKBQ,2177
450
+ openai/types/beta/realtime/conversation_item_truncate_event.py,sha256=1c2_BamaTkgD26eyGZJU5xwbz7lRHupqU2HqcK0VniI,943
451
+ openai/types/beta/realtime/conversation_item_truncate_event_param.py,sha256=hSnVOSMMtLf16nn4ISHkevYCfEsiN9kNcgxXRtHa8Kc,983
452
+ openai/types/beta/realtime/conversation_item_truncated_event.py,sha256=K4S35U85J-UNRba9nkm-7G1ReZu8gA8Sa1z0-Vlozc0,704
453
+ openai/types/beta/realtime/conversation_item_with_reference.py,sha256=WF4r7-aw9Z6m6aNEy_fe9aHq8W-YxhwgU65PnLAQTgw,2564
454
+ openai/types/beta/realtime/conversation_item_with_reference_param.py,sha256=yPM2TL7pMhz5UfJ37_FTn1H6r2WRbdxkAaW5jGCMfh8,2444
455
+ openai/types/beta/realtime/error_event.py,sha256=goNkorKXUHKiYVsVunEsnaRa6_3dsDKVtrxXQtzZCmk,877
456
+ openai/types/beta/realtime/input_audio_buffer_append_event.py,sha256=lTKWd_WFbtDAy6AdaCjeQYBV0dgHuVNNt_PbrtPB8tg,662
457
+ openai/types/beta/realtime/input_audio_buffer_append_event_param.py,sha256=XmN2bE6jBRrkKGVPJdnPjJql5dqMPqwbmFnxo-z22JE,682
458
+ openai/types/beta/realtime/input_audio_buffer_clear_event.py,sha256=7AfCQfMxZQ-UoQXF9edYKw5GcTELPcfvvJWWpuLS41c,489
459
+ openai/types/beta/realtime/input_audio_buffer_clear_event_param.py,sha256=y-zfWqJsh1n6r2i0MgLDpnNC4g1dq3GCS66Twfkng38,499
460
+ openai/types/beta/realtime/input_audio_buffer_cleared_event.py,sha256=j9gpm7aGVmrUt48wqtvBMN8NOgtvqHciegjXjOnWm7A,429
461
+ openai/types/beta/realtime/input_audio_buffer_commit_event.py,sha256=SLZR2xxRd6uO3IQL6-LuozkjROXiGyblKoHYQjwXk4I,493
462
+ openai/types/beta/realtime/input_audio_buffer_commit_event_param.py,sha256=B8agXC-rUl-D-RijJ5MeTLgw43qVYzmf2_2oAVokhLY,503
463
+ openai/types/beta/realtime/input_audio_buffer_committed_event.py,sha256=wXMxuXLw1jmT4e-FmTp6rSxcSc_4l55zO3gT7jI1Mp4,628
464
+ openai/types/beta/realtime/input_audio_buffer_speech_started_event.py,sha256=NVp60RUsLFtte9Ilknmu_5lRk2dZp_1fXCgGHd4EvSM,861
465
+ openai/types/beta/realtime/input_audio_buffer_speech_stopped_event.py,sha256=gszRuYQtAW8upIhd7CJZ7pxboDk-K7sqidjqxgf47q4,779
466
+ openai/types/beta/realtime/rate_limits_updated_event.py,sha256=kBnf_p-49Q_LNdJsj0R1Szi8R4TGYAAJ_KifLuuyFZw,949
467
+ openai/types/beta/realtime/realtime_client_event.py,sha256=TD_qJi1hNgvurWTUzG-xb27thuvUT2-2AK_pouAY3vc,1249
468
+ openai/types/beta/realtime/realtime_client_event_param.py,sha256=qNStVbW_imzF0F8qfEHHE07AZoPIQLvjcTw9mXu4mFY,1294
469
+ openai/types/beta/realtime/realtime_connect_params.py,sha256=AvTypkFCYmDn9qMeektVqij6cqzgovr3PpgpMalJoJ4,290
470
+ openai/types/beta/realtime/realtime_response.py,sha256=ETEAGWXW8uheyaFFzM3YEtxkff_ZdFJrD23cgcs1D0w,3527
471
+ openai/types/beta/realtime/realtime_response_status.py,sha256=gU-59Pr_58TRfMZqFzdCloc53e1qOnU4aaHY3yURUK8,1326
472
+ openai/types/beta/realtime/realtime_response_usage.py,sha256=6XOFjCjPWioHoICZ0Q8KXuUzktQugx6WuTz0O5UvzZg,1541
473
+ openai/types/beta/realtime/realtime_server_event.py,sha256=j8s9jdl5cARv3fVM5jEjo04f83FmNELPRS_lq5Ao_Q0,3512
474
+ openai/types/beta/realtime/response_audio_delta_event.py,sha256=UjbnK4u_WSNTOColZj8SmJgHnAc2H8iRXD76ZnPbz7E,742
475
+ openai/types/beta/realtime/response_audio_done_event.py,sha256=1XEWBPh1JiOgyr6V03mRt_3sLm0YFUq5ft1AhfFlNEg,679
476
+ openai/types/beta/realtime/response_audio_transcript_delta_event.py,sha256=HEVNQ_R2_Nyo6BvNvsliMnN__b17eVd2Jx5udRHg0Hg,773
477
+ openai/types/beta/realtime/response_audio_transcript_done_event.py,sha256=Cn5l4mJnKK3LeSN9qFL4LLqs1WOWg4kt1SaYThB-5c0,787
478
+ openai/types/beta/realtime/response_cancel_event.py,sha256=EKx8IZUISJHdl-_3tCdHtz2BINQ85Tq_ocadnsEGPSk,637
479
+ openai/types/beta/realtime/response_cancel_event_param.py,sha256=nidzBL83liHwyImiNGiz9Ad0V34EtFAQDw1utqcF6ns,630
480
+ openai/types/beta/realtime/response_content_part_added_event.py,sha256=a8-rm1NAwX685fk7GdT6Xi0Yr-JfeAkyUr94-RoFe34,1232
481
+ openai/types/beta/realtime/response_content_part_done_event.py,sha256=jO2TZygxPabbnEG9E1AfNP-JYJv1QtCMnCzgcZ_3n18,1190
482
+ openai/types/beta/realtime/response_create_event.py,sha256=QYKbNiinBokra-M-Uxbs27UgoM27Vs3utxHG-i9k6cE,4753
483
+ openai/types/beta/realtime/response_create_event_param.py,sha256=OfgOLB5A4GfSZZasagobXjoQd2n0S7-PkBfP6aYfFw8,4605
484
+ openai/types/beta/realtime/response_created_event.py,sha256=zZtHx-1YjehXxX6aNE88SFINDaKOBzpzejo6sTNjq9g,506
485
+ openai/types/beta/realtime/response_done_event.py,sha256=_yUPoECCli89iHLtV3NQkXQOW6Lc1JlxVPFw04ziBGY,494
486
+ openai/types/beta/realtime/response_function_call_arguments_delta_event.py,sha256=Yh2mQZDucfnTLiO8LRyG9r7zeS1sjwLcMF1JPMdTFJc,793
487
+ openai/types/beta/realtime/response_function_call_arguments_done_event.py,sha256=kxSPK6nbNWL6pxveY7zaNGgCkCXqyBFJPVYJrw9cbOw,793
488
+ openai/types/beta/realtime/response_output_item_added_event.py,sha256=-_BZjvAqcgv3NIz-EMhvYMxIwvcXTt68FVNp0pw09dI,713
489
+ openai/types/beta/realtime/response_output_item_done_event.py,sha256=0ClNVMZmeIxKghlEid9VGoWiZ97wp00hIdNnev4qBD8,709
490
+ openai/types/beta/realtime/response_text_delta_event.py,sha256=B1yyuc6iMOMoG5Wh6W5KoQNYtVD1vEm2cKqHnl2CuFQ,721
491
+ openai/types/beta/realtime/response_text_done_event.py,sha256=mPgVG6nWxwkZ3aZOX-JkVF7CpaWP5-bvtbxFrr4fK7g,724
492
+ openai/types/beta/realtime/session.py,sha256=_P7MVR1_tpsEZY52fOuMSa-108tihJ4xNjcnTuR7Jj0,5598
493
+ openai/types/beta/realtime/session_create_params.py,sha256=5iknrovjuShYJrZZ0UKp4VjY5O-aiq4GPSljo4ut2-w,6015
494
+ openai/types/beta/realtime/session_create_response.py,sha256=LI1L0HiwH0ZWva90zKYNtMFlagYOb0h5ik-RRtBRdEM,5360
495
+ openai/types/beta/realtime/session_created_event.py,sha256=rTElnBlE7z1htmkdmpdPN4q_dUYS6Su4BkmsqO65hUc,489
496
+ openai/types/beta/realtime/session_update_event.py,sha256=h2ODbMFAXhBtcl9LZ3yrVNOQf8jdPEDuUIv1dIGV5Zo,6752
497
+ openai/types/beta/realtime/session_update_event_param.py,sha256=tGl42AzaaP5WgJ1Rqs0C1tX7nY7_UTJ_dLJnBT2jWro,6437
498
+ openai/types/beta/realtime/session_updated_event.py,sha256=HyR-Pz3U9finVO-bUCvnmeqsANw-fceNvVqEIF6ey10,489
499
+ openai/types/beta/static_file_chunking_strategy.py,sha256=nHaLv70q1rencY2u8mqS7mW7X7enzHrc-zM9mg22dHw,597
500
+ openai/types/beta/static_file_chunking_strategy_object.py,sha256=aOPxudte299F0j3bzniXcKJ7j-w4ZfQpgFHTa3CFyZ8,425
501
+ openai/types/beta/static_file_chunking_strategy_object_param.py,sha256=OwAOs1PT2ygBm4RpzHVVsr-93-Uqjg_IcCoNhtEPT7I,508
502
+ openai/types/beta/static_file_chunking_strategy_param.py,sha256=kCMmgyOxO0XIF2wjCWjUXtyn9S6q_7mNmyUCauqrjsg,692
503
+ openai/types/beta/thread.py,sha256=RrArSK1-_prQY_YBexgD_SU87y_k2rmRq_tti66i7s4,2132
504
+ openai/types/beta/thread_create_and_run_params.py,sha256=ZDwMQjoR3zLLZN7wnkBwYoxJbbrnSez7m8I5DuE38TM,13605
505
+ openai/types/beta/thread_create_params.py,sha256=HNw9iQMRoYJYXAdrqLMyl6LAK60aX7BoGTRnGwcmTww,5252
506
+ openai/types/beta/thread_deleted.py,sha256=MaYG_jZIjSiB9h_ZBiTtpMsRSwFKkCY83ziM5GO_oUk,292
507
+ openai/types/beta/thread_update_params.py,sha256=FXMPLWIBCmWJnZ3Ktdn8PkSvyA4_Tx0HHzVovBs_lOU,1877
508
+ openai/types/beta/threads/__init__.py,sha256=0WsJo0tXp08CgayozR7Tqc3b8sqzotWzvBun19CEIWc,3066
509
+ openai/types/beta/threads/__pycache__/__init__.cpython-311.pyc,,
510
+ openai/types/beta/threads/__pycache__/annotation.cpython-311.pyc,,
511
+ openai/types/beta/threads/__pycache__/annotation_delta.cpython-311.pyc,,
512
+ openai/types/beta/threads/__pycache__/file_citation_annotation.cpython-311.pyc,,
513
+ openai/types/beta/threads/__pycache__/file_citation_delta_annotation.cpython-311.pyc,,
514
+ openai/types/beta/threads/__pycache__/file_path_annotation.cpython-311.pyc,,
515
+ openai/types/beta/threads/__pycache__/file_path_delta_annotation.cpython-311.pyc,,
516
+ openai/types/beta/threads/__pycache__/image_file.cpython-311.pyc,,
517
+ openai/types/beta/threads/__pycache__/image_file_content_block.cpython-311.pyc,,
518
+ openai/types/beta/threads/__pycache__/image_file_content_block_param.cpython-311.pyc,,
519
+ openai/types/beta/threads/__pycache__/image_file_delta.cpython-311.pyc,,
520
+ openai/types/beta/threads/__pycache__/image_file_delta_block.cpython-311.pyc,,
521
+ openai/types/beta/threads/__pycache__/image_file_param.cpython-311.pyc,,
522
+ openai/types/beta/threads/__pycache__/image_url.cpython-311.pyc,,
523
+ openai/types/beta/threads/__pycache__/image_url_content_block.cpython-311.pyc,,
524
+ openai/types/beta/threads/__pycache__/image_url_content_block_param.cpython-311.pyc,,
525
+ openai/types/beta/threads/__pycache__/image_url_delta.cpython-311.pyc,,
526
+ openai/types/beta/threads/__pycache__/image_url_delta_block.cpython-311.pyc,,
527
+ openai/types/beta/threads/__pycache__/image_url_param.cpython-311.pyc,,
528
+ openai/types/beta/threads/__pycache__/message.cpython-311.pyc,,
529
+ openai/types/beta/threads/__pycache__/message_content.cpython-311.pyc,,
530
+ openai/types/beta/threads/__pycache__/message_content_delta.cpython-311.pyc,,
531
+ openai/types/beta/threads/__pycache__/message_content_part_param.cpython-311.pyc,,
532
+ openai/types/beta/threads/__pycache__/message_create_params.cpython-311.pyc,,
533
+ openai/types/beta/threads/__pycache__/message_deleted.cpython-311.pyc,,
534
+ openai/types/beta/threads/__pycache__/message_delta.cpython-311.pyc,,
535
+ openai/types/beta/threads/__pycache__/message_delta_event.cpython-311.pyc,,
536
+ openai/types/beta/threads/__pycache__/message_list_params.cpython-311.pyc,,
537
+ openai/types/beta/threads/__pycache__/message_update_params.cpython-311.pyc,,
538
+ openai/types/beta/threads/__pycache__/refusal_content_block.cpython-311.pyc,,
539
+ openai/types/beta/threads/__pycache__/refusal_delta_block.cpython-311.pyc,,
540
+ openai/types/beta/threads/__pycache__/required_action_function_tool_call.cpython-311.pyc,,
541
+ openai/types/beta/threads/__pycache__/run.cpython-311.pyc,,
542
+ openai/types/beta/threads/__pycache__/run_create_params.cpython-311.pyc,,
543
+ openai/types/beta/threads/__pycache__/run_list_params.cpython-311.pyc,,
544
+ openai/types/beta/threads/__pycache__/run_status.cpython-311.pyc,,
545
+ openai/types/beta/threads/__pycache__/run_submit_tool_outputs_params.cpython-311.pyc,,
546
+ openai/types/beta/threads/__pycache__/run_update_params.cpython-311.pyc,,
547
+ openai/types/beta/threads/__pycache__/text.cpython-311.pyc,,
548
+ openai/types/beta/threads/__pycache__/text_content_block.cpython-311.pyc,,
549
+ openai/types/beta/threads/__pycache__/text_content_block_param.cpython-311.pyc,,
550
+ openai/types/beta/threads/__pycache__/text_delta.cpython-311.pyc,,
551
+ openai/types/beta/threads/__pycache__/text_delta_block.cpython-311.pyc,,
552
+ openai/types/beta/threads/annotation.py,sha256=Ce3Y0mSodmYRkoqyhtyIdep6WfWew6KJJgtrENOnfek,462
553
+ openai/types/beta/threads/annotation_delta.py,sha256=iNsE-1Gn1yU0TlTHoxqKbOvPRUxWuXsF72qY_mMnWGY,510
554
+ openai/types/beta/threads/file_citation_annotation.py,sha256=0Rs1Sr-eCLQpLsu8-WwHG7kv5Ihud4kiHO1NL7xHO0s,595
555
+ openai/types/beta/threads/file_citation_delta_annotation.py,sha256=R87tcXkJ0RiH5UJo0Qknwk7X_c4qF1qvGsu2spOPx-I,873
556
+ openai/types/beta/threads/file_path_annotation.py,sha256=hNc4ebprJynqMG1yk0gLvgzTpjtVzgEbXriMZftkgew,552
557
+ openai/types/beta/threads/file_path_delta_annotation.py,sha256=RW9dgDF9Ggf357fPZ-vUu2ge3U-Hf11DVTr-ecklsBY,755
558
+ openai/types/beta/threads/image_file.py,sha256=QVXLiplb-CigZqdMZtXlmebXKt6tF74kI-3vHxe_qUE,707
559
+ openai/types/beta/threads/image_file_content_block.py,sha256=31I5trSERP2qLZpJ4ugZtIyta4DDoBhBvxkM4LovL3w,363
560
+ openai/types/beta/threads/image_file_content_block_param.py,sha256=3ryZ6AV-DLwWYVP2XSK11UHkvutTUollxn6z8BZ4rSA,445
561
+ openai/types/beta/threads/image_file_delta.py,sha256=nUJoSuP-3YyqqwBsmPJ0AqiQydz2FymVDCXQVkNYwOk,734
562
+ openai/types/beta/threads/image_file_delta_block.py,sha256=XJ2YVX_cq0OiNcGbNmXO0_dca1IvPockOvvoM7pDvbI,492
563
+ openai/types/beta/threads/image_file_param.py,sha256=BaKD31JPxQ5CjRfZ_0RcOG3lDTZeW_k85XCvwyctD54,717
564
+ openai/types/beta/threads/image_url.py,sha256=EzEK-CYoO0YyqFmejIPu7pMfTEgMmp5NFscsRd2pCos,592
565
+ openai/types/beta/threads/image_url_content_block.py,sha256=_sg3BWrtVGw-8XtAh15Rs4co6NCBB9Y3zCp_XOAz4U8,365
566
+ openai/types/beta/threads/image_url_content_block_param.py,sha256=RWzo5KkBiwvgJSviZl6JUlsfv3VQKIFr6cp9lhkLu8E,447
567
+ openai/types/beta/threads/image_url_delta.py,sha256=MXCp-OmuNT4njbWA9DWAbocP7pD3VpdcUy2wgeOjwm4,582
568
+ openai/types/beta/threads/image_url_delta_block.py,sha256=Jjdfub4g9ceNKF8GuuTIghOmYba2vEeX3320mg5PWIA,484
569
+ openai/types/beta/threads/image_url_param.py,sha256=VRLaxZf-wxnvAOcKGwyF_o6KEvwktBfE3B6KmYE5LZo,602
570
+ openai/types/beta/threads/message.py,sha256=vk5lEpeA_aykADtn9GB8sLye7TByWZmV3ghauCh2s3c,3414
571
+ openai/types/beta/threads/message_content.py,sha256=b8IC_EG28hcXk28z09EABfJwPkYZ7U-lTp_9ykdoxvU,630
572
+ openai/types/beta/threads/message_content_delta.py,sha256=o4Edlx9BtdH2Z4OMwGWWXex8wiijknNRihJ-wu8PDUQ,615
573
+ openai/types/beta/threads/message_content_part_param.py,sha256=RXrnoDP2-UMQHoR2jJvaT3JHrCeffLi6WzXzH05cDGI,550
574
+ openai/types/beta/threads/message_create_params.py,sha256=7fXlNyqy7tzuLgMsCYfJegL2sZcjKwYNLihwteODyg0,2083
575
+ openai/types/beta/threads/message_deleted.py,sha256=DNnrSfGZ3kWEazmo4mVTdLhiKlIHxs-D8Ef5sNdHY1o,303
576
+ openai/types/beta/threads/message_delta.py,sha256=-kaRyvnIA8Yr2QV5jKRn15BU2Ni068a_WtWJ4PqlLfE,570
577
+ openai/types/beta/threads/message_delta_event.py,sha256=7SpE4Dd3Lrc_cm97SzBwZzGGhfLqiFViDeTRQz-5YmQ,579
578
+ openai/types/beta/threads/message_list_params.py,sha256=iuwzDccnViooUxHlq-WoE1FEJArNy5-zrYCoaNgVS8k,1296
579
+ openai/types/beta/threads/message_update_params.py,sha256=XNCSLfRkk531F8mNbUB9bRYcCzJfW8NiFQ9c0Aq75Dk,757
580
+ openai/types/beta/threads/refusal_content_block.py,sha256=qB9jrS2Wv9UQ7XXaIVKe62dTAU1WOnN3qenR_E43mhg,310
581
+ openai/types/beta/threads/refusal_delta_block.py,sha256=ZhgFC8KqA9LIwo_CQIX-w3VVg3Vj0h71xC1Hh1bwmnU,423
582
+ openai/types/beta/threads/required_action_function_tool_call.py,sha256=XsR4OBbxI-RWteLvhcLEDBan6eUUGvhLORFRKjPbsLg,888
583
+ openai/types/beta/threads/run.py,sha256=erWl8z0MiFq9_dbFb_HN6AHdUru_H3NFM97OTZjBECE,8337
584
+ openai/types/beta/threads/run_create_params.py,sha256=Wr-wBUdt5JIHQkJMYk6KM2G8PqxgC28KuJbV7fq9C8E,9875
585
+ openai/types/beta/threads/run_list_params.py,sha256=TgepSLrupUUtuQV2kbVcoGH1YA0FVUX9ESkszKuwyHY,1210
586
+ openai/types/beta/threads/run_status.py,sha256=OU1hzoyYXaRJ3lupX4YcZ-HZkTpctNE4tzAcp6X8Q9U,351
587
+ openai/types/beta/threads/run_submit_tool_outputs_params.py,sha256=cKiyD374BsZN_Oih5o5n5gOf_DYsxErVrbgxveNhmPI,1643
588
+ openai/types/beta/threads/run_update_params.py,sha256=sVjkl6ayjU75Tk8t69r6xgIg80OlTikyRdS0sa2Gavg,749
589
+ openai/types/beta/threads/runs/__init__.py,sha256=mg_roY9yL1bClJ8isizkQgHOAkN17iSdVr2m65iyBrs,1653
590
+ openai/types/beta/threads/runs/__pycache__/__init__.cpython-311.pyc,,
591
+ openai/types/beta/threads/runs/__pycache__/code_interpreter_logs.cpython-311.pyc,,
592
+ openai/types/beta/threads/runs/__pycache__/code_interpreter_output_image.cpython-311.pyc,,
593
+ openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call.cpython-311.pyc,,
594
+ openai/types/beta/threads/runs/__pycache__/code_interpreter_tool_call_delta.cpython-311.pyc,,
595
+ openai/types/beta/threads/runs/__pycache__/file_search_tool_call.cpython-311.pyc,,
596
+ openai/types/beta/threads/runs/__pycache__/file_search_tool_call_delta.cpython-311.pyc,,
597
+ openai/types/beta/threads/runs/__pycache__/function_tool_call.cpython-311.pyc,,
598
+ openai/types/beta/threads/runs/__pycache__/function_tool_call_delta.cpython-311.pyc,,
599
+ openai/types/beta/threads/runs/__pycache__/message_creation_step_details.cpython-311.pyc,,
600
+ openai/types/beta/threads/runs/__pycache__/run_step.cpython-311.pyc,,
601
+ openai/types/beta/threads/runs/__pycache__/run_step_delta.cpython-311.pyc,,
602
+ openai/types/beta/threads/runs/__pycache__/run_step_delta_event.cpython-311.pyc,,
603
+ openai/types/beta/threads/runs/__pycache__/run_step_delta_message_delta.cpython-311.pyc,,
604
+ openai/types/beta/threads/runs/__pycache__/run_step_include.cpython-311.pyc,,
605
+ openai/types/beta/threads/runs/__pycache__/step_list_params.cpython-311.pyc,,
606
+ openai/types/beta/threads/runs/__pycache__/step_retrieve_params.cpython-311.pyc,,
607
+ openai/types/beta/threads/runs/__pycache__/tool_call.cpython-311.pyc,,
608
+ openai/types/beta/threads/runs/__pycache__/tool_call_delta.cpython-311.pyc,,
609
+ openai/types/beta/threads/runs/__pycache__/tool_call_delta_object.cpython-311.pyc,,
610
+ openai/types/beta/threads/runs/__pycache__/tool_calls_step_details.cpython-311.pyc,,
611
+ openai/types/beta/threads/runs/code_interpreter_logs.py,sha256=7wXZpUE9I-oZJ0K3mFG0Nwmfm2bKGiSpWJyBeo7txwo,482
612
+ openai/types/beta/threads/runs/code_interpreter_output_image.py,sha256=8o99k0ZHMHpqH0taXkOkYR9WaDUpCN-G0Ifd5XsJpb8,613
613
+ openai/types/beta/threads/runs/code_interpreter_tool_call.py,sha256=ekiIuH1kVCN51hCzY3AYr5i3_a4vlgUiZHJ59pl17oY,1810
614
+ openai/types/beta/threads/runs/code_interpreter_tool_call_delta.py,sha256=Qr2cen-bKyXTW2NDEUHnmJRE0jY-nkLcnO4NzCbBPDo,1479
615
+ openai/types/beta/threads/runs/file_search_tool_call.py,sha256=XBgsM_USVr3ZrwTZx4L1-YG94Qv8c8GXI19ZHtDrZq8,1897
616
+ openai/types/beta/threads/runs/file_search_tool_call_delta.py,sha256=Gx8c7GSgGYuOvGadcAr3ZIspEFMZS3e2OY7vBo_MYnM,655
617
+ openai/types/beta/threads/runs/function_tool_call.py,sha256=aOq5yOtKOi6C5Q1FIQRxqtJJR1AcSW_K5PvRiKISNCI,920
618
+ openai/types/beta/threads/runs/function_tool_call_delta.py,sha256=VFRtCJkj4PHX97upM1cXpJAk9-JvJSgyngie06fBIjQ,1076
619
+ openai/types/beta/threads/runs/message_creation_step_details.py,sha256=tRFMNF2Rf4DekVliUKkoujItiOjjAE9EG9bbxJvpVPA,506
620
+ openai/types/beta/threads/runs/run_step.py,sha256=zTSlNBowJx507-oo6QJ7A30BFXdUt9k3lTZ4o34L1wI,3589
621
+ openai/types/beta/threads/runs/run_step_delta.py,sha256=FNYDTddRrTO3PT_fgi7AsJ1PeMtyWsVzcxoihjbBzAw,663
622
+ openai/types/beta/threads/runs/run_step_delta_event.py,sha256=rkDyvHSXt-hc1LngB41f9vglkn6t03kS62bsn0iGaxU,585
623
+ openai/types/beta/threads/runs/run_step_delta_message_delta.py,sha256=UIo6oPH8STLjPHiWL-A4CtKfYe49uptvIAHWNnZ3Ums,564
624
+ openai/types/beta/threads/runs/run_step_include.py,sha256=u-9Cw1hruRiWr70f_hw4XG0w1cwOAYfRJYKva2dEacs,264
625
+ openai/types/beta/threads/runs/step_list_params.py,sha256=zorF5juogCzLMsZLjzMZTs_iIBcPj9WUur5HcrXuH8M,1752
626
+ openai/types/beta/threads/runs/step_retrieve_params.py,sha256=aJ7l8RDJLPyEmqjfO4XsTV54VZOOqyb_gKSUvqp33ZI,815
627
+ openai/types/beta/threads/runs/tool_call.py,sha256=1rwq4IbLgjQAQ-ORXYkNpmJyi9SREDnqA57nJbj_NiU,537
628
+ openai/types/beta/threads/runs/tool_call_delta.py,sha256=t5wF8ndW3z99lHF981FL-IN5xXBS9p7eonH9bxvKu_c,600
629
+ openai/types/beta/threads/runs/tool_call_delta_object.py,sha256=eK20VsIswEyT48XbkGu60HUrE7OD3fhpn1fbXrVauM4,615
630
+ openai/types/beta/threads/runs/tool_calls_step_details.py,sha256=bDa-yybVF3a8H6VqhDGmFZMkpn-0gtPQM2jWWsmUvYo,574
631
+ openai/types/beta/threads/text.py,sha256=9gjmDCqoptnxQ8Jhym87pECyd6m1lB3daCxKNzSFp4Y,319
632
+ openai/types/beta/threads/text_content_block.py,sha256=pdGlKYM1IF9PjTvxjxo1oDg1XeGCFdJdl0kJVpZ7jIs,319
633
+ openai/types/beta/threads/text_content_block_param.py,sha256=feQr0muF845tc1q3FJrzgYOhXeuKLU3x1x5DGFTN2Q0,407
634
+ openai/types/beta/threads/text_delta.py,sha256=2EFeQCkg_cc8nYEJ6BtYAA3_TqgMTbmEXoMvLjzaB34,389
635
+ openai/types/beta/threads/text_delta_block.py,sha256=pkHkVBgNsmHi9JURzs5ayPqxQXSkex3F0jH0MqJXik0,448
636
+ openai/types/beta/vector_store.py,sha256=f5U_tHU60wkk71eHJRdRWHyjnWkqwCatLaYnAECnL2k,2472
637
+ openai/types/beta/vector_store_create_params.py,sha256=Jug3p3SuOPJZCmSSfq-gL6U_wVF0nB7FN9kSQeDa0pc,1725
638
+ openai/types/beta/vector_store_deleted.py,sha256=Yq0E1orRLShseLwZ1deiBdDEUgEw_tcYVxGYa5gbIrM,308
639
+ openai/types/beta/vector_store_list_params.py,sha256=KeSeQaEdqO2EiPEVtq1Nun-uRRdkfwW0P8aHeCmL5zA,1226
640
+ openai/types/beta/vector_store_update_params.py,sha256=kIh9UumXwf326NyCiFOcDWwbb-k5mNkWhemvdR1p1T0,1241
641
+ openai/types/beta/vector_stores/__init__.py,sha256=gXfm8V5Ad0iueaC_VoHDUQvSdwSfBzk2cQNwZldvY0s,671
642
+ openai/types/beta/vector_stores/__pycache__/__init__.cpython-311.pyc,,
643
+ openai/types/beta/vector_stores/__pycache__/file_batch_create_params.cpython-311.pyc,,
644
+ openai/types/beta/vector_stores/__pycache__/file_batch_list_files_params.cpython-311.pyc,,
645
+ openai/types/beta/vector_stores/__pycache__/file_create_params.cpython-311.pyc,,
646
+ openai/types/beta/vector_stores/__pycache__/file_list_params.cpython-311.pyc,,
647
+ openai/types/beta/vector_stores/__pycache__/vector_store_file.cpython-311.pyc,,
648
+ openai/types/beta/vector_stores/__pycache__/vector_store_file_batch.cpython-311.pyc,,
649
+ openai/types/beta/vector_stores/__pycache__/vector_store_file_deleted.cpython-311.pyc,,
650
+ openai/types/beta/vector_stores/file_batch_create_params.py,sha256=lV4t5kikvEhl431RZgGDyQdFKTl-zXI-Q7YnbM0Qmv8,798
651
+ openai/types/beta/vector_stores/file_batch_list_files_params.py,sha256=FPpQvCQI2skyLB8YCuwdCj7RbO9ba1UjaHAtvrWxAbs,1451
652
+ openai/types/beta/vector_stores/file_create_params.py,sha256=kwSqe-le2UaYrcXGPxlP41QhH2OGvLXBbntAGlmK288,748
653
+ openai/types/beta/vector_stores/file_list_params.py,sha256=AIzmNH1oFuy-qlpRhj9eXu9yyTA-2z_IppLYFclMtZw,1385
654
+ openai/types/beta/vector_stores/vector_store_file.py,sha256=X8aQg4jYlK7iQumxn7B-eammIKVjUbu4lapPeq9jDWo,1788
655
+ openai/types/beta/vector_stores/vector_store_file_batch.py,sha256=ubvj8z95EOdRGAp0rgI94g5uFQx0ob8hLgwOWHKda4E,1457
656
+ openai/types/beta/vector_stores/vector_store_file_deleted.py,sha256=37J7oL2WYCgOd7Rhg2jX6IavaZT63vgUf3u6LC6C3Hs,322
657
+ openai/types/chat/__init__.py,sha256=coi_C98uX9XhThMVJ0GgjPVpzOYOMgj-ZmCWulEE3EA,3849
658
+ openai/types/chat/__pycache__/__init__.cpython-311.pyc,,
659
+ openai/types/chat/__pycache__/chat_completion.cpython-311.pyc,,
660
+ openai/types/chat/__pycache__/chat_completion_assistant_message_param.cpython-311.pyc,,
661
+ openai/types/chat/__pycache__/chat_completion_audio.cpython-311.pyc,,
662
+ openai/types/chat/__pycache__/chat_completion_audio_param.cpython-311.pyc,,
663
+ openai/types/chat/__pycache__/chat_completion_chunk.cpython-311.pyc,,
664
+ openai/types/chat/__pycache__/chat_completion_content_part_image_param.cpython-311.pyc,,
665
+ openai/types/chat/__pycache__/chat_completion_content_part_input_audio_param.cpython-311.pyc,,
666
+ openai/types/chat/__pycache__/chat_completion_content_part_param.cpython-311.pyc,,
667
+ openai/types/chat/__pycache__/chat_completion_content_part_refusal_param.cpython-311.pyc,,
668
+ openai/types/chat/__pycache__/chat_completion_content_part_text_param.cpython-311.pyc,,
669
+ openai/types/chat/__pycache__/chat_completion_developer_message_param.cpython-311.pyc,,
670
+ openai/types/chat/__pycache__/chat_completion_function_call_option_param.cpython-311.pyc,,
671
+ openai/types/chat/__pycache__/chat_completion_function_message_param.cpython-311.pyc,,
672
+ openai/types/chat/__pycache__/chat_completion_message.cpython-311.pyc,,
673
+ openai/types/chat/__pycache__/chat_completion_message_param.cpython-311.pyc,,
674
+ openai/types/chat/__pycache__/chat_completion_message_tool_call.cpython-311.pyc,,
675
+ openai/types/chat/__pycache__/chat_completion_message_tool_call_param.cpython-311.pyc,,
676
+ openai/types/chat/__pycache__/chat_completion_modality.cpython-311.pyc,,
677
+ openai/types/chat/__pycache__/chat_completion_named_tool_choice_param.cpython-311.pyc,,
678
+ openai/types/chat/__pycache__/chat_completion_prediction_content_param.cpython-311.pyc,,
679
+ openai/types/chat/__pycache__/chat_completion_reasoning_effort.cpython-311.pyc,,
680
+ openai/types/chat/__pycache__/chat_completion_role.cpython-311.pyc,,
681
+ openai/types/chat/__pycache__/chat_completion_stream_options_param.cpython-311.pyc,,
682
+ openai/types/chat/__pycache__/chat_completion_system_message_param.cpython-311.pyc,,
683
+ openai/types/chat/__pycache__/chat_completion_token_logprob.cpython-311.pyc,,
684
+ openai/types/chat/__pycache__/chat_completion_tool_choice_option_param.cpython-311.pyc,,
685
+ openai/types/chat/__pycache__/chat_completion_tool_message_param.cpython-311.pyc,,
686
+ openai/types/chat/__pycache__/chat_completion_tool_param.cpython-311.pyc,,
687
+ openai/types/chat/__pycache__/chat_completion_user_message_param.cpython-311.pyc,,
688
+ openai/types/chat/__pycache__/completion_create_params.cpython-311.pyc,,
689
+ openai/types/chat/__pycache__/parsed_chat_completion.cpython-311.pyc,,
690
+ openai/types/chat/__pycache__/parsed_function_tool_call.cpython-311.pyc,,
691
+ openai/types/chat/chat_completion.py,sha256=iQ2kNt1pW80lnWkpbyr0QhXks_TmFBSPlI5HWPS2BwQ,2586
692
+ openai/types/chat/chat_completion_assistant_message_param.py,sha256=E6ZrsjEN_JHOHO-wC7Uk90Fa7Qz7bfgx8jea0z6g30s,2421
693
+ openai/types/chat/chat_completion_audio.py,sha256=vzWeaAAAbomkvbFksXQu6qpw1RVJiuFytJZswO6h6vI,656
694
+ openai/types/chat/chat_completion_audio_param.py,sha256=MnY4PNK8-OOaODkHNhBbSbzH4HmqykKvwftsOjVpOAE,801
695
+ openai/types/chat/chat_completion_chunk.py,sha256=5HGpTKt0mi8BTu-tkjl7WUPmTgpjI_TyYrVwC40jELE,4942
696
+ openai/types/chat/chat_completion_content_part_image_param.py,sha256=Gqv98qyD8jB81THZp49c8v2tHrId_iQp4NzciT9SKI0,797
697
+ openai/types/chat/chat_completion_content_part_input_audio_param.py,sha256=r1EXNEtjJo5oJ9AnP3omaJzACE1gSfdmob5Q0HKsOm4,704
698
+ openai/types/chat/chat_completion_content_part_param.py,sha256=7lCk-fZB5iT5keHLWw9eM-Hd5jsnPh2IIHICIUpoEXk,686
699
+ openai/types/chat/chat_completion_content_part_refusal_param.py,sha256=TV1vu-IgrvKa5IBlPSIdBxUaW8g1zDhMOOBOEmhU2w0,467
700
+ openai/types/chat/chat_completion_content_part_text_param.py,sha256=4IpiXMKM9AuTyop5PRptPBbBhh9s93xy2vjg4Yw6NIw,429
701
+ openai/types/chat/chat_completion_developer_message_param.py,sha256=OCFKdTWkff94VtgY7AaDUUFiZLT8LBn7WWxjbcIq2OM,830
702
+ openai/types/chat/chat_completion_function_call_option_param.py,sha256=M-IqWHyBLkvYBcwFxxp4ydCIxbPDaMlNl4bik9UoFd4,365
703
+ openai/types/chat/chat_completion_function_message_param.py,sha256=jIaZbBHHbt4v4xHCIyvYtYLst_X4jOznRjYNcTf0MF0,591
704
+ openai/types/chat/chat_completion_message.py,sha256=AH7JpjgKfphxBRJyI4PhwHCMREy_-D-a4_4u4NHjSfc,1674
705
+ openai/types/chat/chat_completion_message_param.py,sha256=aLrz_cX_CYymFdW9cMIPZpv0Z4zM50RECV3SH6QNZsc,1019
706
+ openai/types/chat/chat_completion_message_tool_call.py,sha256=XlIe2vhSYvrt8o8Yol5AQqnacI1xHqpEIV26G4oNrZY,900
707
+ openai/types/chat/chat_completion_message_tool_call_param.py,sha256=XNhuUpGr5qwVTo0K8YavJwleHYSdwN_urK51eKlqC24,1009
708
+ openai/types/chat/chat_completion_modality.py,sha256=8Ga0kruwJc43WD2OIqNudn7KrVRTPDQaalVkh_8bp9I,236
709
+ openai/types/chat/chat_completion_named_tool_choice_param.py,sha256=JsxfSJYpOmF7zIreQ0JrXRSLp07OGCBSycRRcF6OZmg,569
710
+ openai/types/chat/chat_completion_prediction_content_param.py,sha256=Xw4K_4F379LsXENOpZvREDn55cCnbmZ69xa4fw9w3bg,868
711
+ openai/types/chat/chat_completion_reasoning_effort.py,sha256=Bs4xRaukXpM-_NW-QSKKnUyIPDw1ffSqnWaHU-rMdIE,258
712
+ openai/types/chat/chat_completion_role.py,sha256=LW6-tqXaqpD7H53PiSXrjvIo6g4RfHhWityDm6Nfvig,275
713
+ openai/types/chat/chat_completion_stream_options_param.py,sha256=7-R2mYh7dbtX9qDOL3UkeyVH6FNWC_4aTCLtHYObMbs,628
714
+ openai/types/chat/chat_completion_system_message_param.py,sha256=WYtzmsNP8ZI3Ie8cd-oU7RuNoaBF6-bBR3mOzST9hMw,815
715
+ openai/types/chat/chat_completion_token_logprob.py,sha256=6-ipUFfsXMf5L7FDFi127NaVkDtmEooVgGBF6Ts965A,1769
716
+ openai/types/chat/chat_completion_tool_choice_option_param.py,sha256=ef71WSM9HMQhIQUocRgVJUVW-bSRwK2_1NjFSB5TPiI,472
717
+ openai/types/chat/chat_completion_tool_message_param.py,sha256=5K7jfKpwTuKNi1PTFabq_LHH-7wun8CUsLDh90U8zQE,730
718
+ openai/types/chat/chat_completion_tool_param.py,sha256=J9r2TAWygkIBDInWEKx29gBE0wiCgc7HpXFyQhxSkAU,503
719
+ openai/types/chat/chat_completion_user_message_param.py,sha256=mik-MRkwb543C5FSJ52LtTkeA2E_HdLUgtoHEdO73XQ,792
720
+ openai/types/chat/completion_create_params.py,sha256=l_3Xnl7NKnFGaMXDe4m9o1JEAQnqgWqoo_HTVBof-wM,14071
721
+ openai/types/chat/parsed_chat_completion.py,sha256=KwcwCtj0yexl6gB7yuOnyETRW-uUvNRYbVzPMkwCe5Q,1437
722
+ openai/types/chat/parsed_function_tool_call.py,sha256=hJzcKOpzf1tnXC6RGbPhaeCawq8EFdnLK_MfRITkW1U,920
723
+ openai/types/chat_model.py,sha256=cWRfyQj8VNe0UNeRMxptrURQFTChmAd25Nn4NUZ7c60,1148
724
+ openai/types/completion.py,sha256=yuYVEVkJcMVUINNLglkxOJqCx097HKCYFeJun3Js73A,1172
725
+ openai/types/completion_choice.py,sha256=PUk77T3Cp34UJSXoMfSzTKGWDK0rQQwq84X_PSlOUJo,965
726
+ openai/types/completion_create_params.py,sha256=TWNRWlGAcvirzY3Piy6AeYKyNxG7ktmtwjS27Q4bTi8,7535
727
+ openai/types/completion_usage.py,sha256=uf5n0vzlCkGAU67BBn_h7yhjd_G4OHpQbJnvzz0eO2A,1735
728
+ openai/types/create_embedding_response.py,sha256=lTAu_Pym76kFljDnnDRoDB2GNQSzWmwwlqf5ff7FNPM,798
729
+ openai/types/embedding.py,sha256=2pV6RTSf5UV6E86Xeud5ZwmjQjMS93m_4LrQ0GN3fho,637
730
+ openai/types/embedding_create_params.py,sha256=8I17QIDql3rbN7PsUDm0mtvJArIZfTrc2eqw1Ip11yE,1938
731
+ openai/types/embedding_model.py,sha256=0dDL87len4vZ4DR6eCp7JZJCJpgwWphRmJhMK3Se8f4,281
732
+ openai/types/file_content.py,sha256=qLlM4J8kgu1BfrtlmYftPsQVCJu4VqYeiS1T28u8EQ8,184
733
+ openai/types/file_create_params.py,sha256=N1I3rER1se27usx46fhkvdtn-blJ6Y9ECT7Wwzve37Q,913
734
+ openai/types/file_deleted.py,sha256=H_r9U7XthT5xHAo_4ay1EGGkc21eURt8MkkIBRYiQcw,277
735
+ openai/types/file_list_params.py,sha256=TmmqvM7droAJ49YlgpeFzrhPv5uVkSZDxqlG6hhumPo,960
736
+ openai/types/file_object.py,sha256=ESuRYCTLbDtHxyuhzybKTF_TztIcq_F7TzCTQ6JToE0,1309
737
+ openai/types/file_purpose.py,sha256=o1TzR-41XsNsQ0791GTGPe3DLkU9FEODucKdP6Q6sPc,243
738
+ openai/types/fine_tuning/__init__.py,sha256=SZvjq_22oY9E4zcnrvVd0ul9U4sk_IBeOd0MsNALu5s,806
739
+ openai/types/fine_tuning/__pycache__/__init__.cpython-311.pyc,,
740
+ openai/types/fine_tuning/__pycache__/fine_tuning_job.cpython-311.pyc,,
741
+ openai/types/fine_tuning/__pycache__/fine_tuning_job_event.cpython-311.pyc,,
742
+ openai/types/fine_tuning/__pycache__/fine_tuning_job_integration.cpython-311.pyc,,
743
+ openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration.cpython-311.pyc,,
744
+ openai/types/fine_tuning/__pycache__/fine_tuning_job_wandb_integration_object.cpython-311.pyc,,
745
+ openai/types/fine_tuning/__pycache__/job_create_params.cpython-311.pyc,,
746
+ openai/types/fine_tuning/__pycache__/job_list_events_params.cpython-311.pyc,,
747
+ openai/types/fine_tuning/__pycache__/job_list_params.cpython-311.pyc,,
748
+ openai/types/fine_tuning/fine_tuning_job.py,sha256=bu-afb1RZqgNmpUQ7MoXymTjFs3i5JSsBLMV4TKHhi8,6473
749
+ openai/types/fine_tuning/fine_tuning_job_event.py,sha256=POxSD7-WxAtJV2KuEpA9EmZi7W_u0PikOUtUzxIXii4,854
750
+ openai/types/fine_tuning/fine_tuning_job_integration.py,sha256=c3Uy7RMVJ32Xlat-6s9eG-5vZLl4w66COXc0B3pWk4g,242
751
+ openai/types/fine_tuning/fine_tuning_job_wandb_integration.py,sha256=YnBeiz14UuhUSpnD0KBj5V143qLvJbDIMcUVWOCBLXY,1026
752
+ openai/types/fine_tuning/fine_tuning_job_wandb_integration_object.py,sha256=7vEc2uEV2c_DENBjhq0Qy5X8B-rzxsKvGECjnvF1Wdw,804
753
+ openai/types/fine_tuning/job_create_params.py,sha256=TwQlyQrZfxrgqD7nmJDWE8pwklsdUUmkYaitvB7LY34,7222
754
+ openai/types/fine_tuning/job_list_events_params.py,sha256=4xOED4H2ky2mI9sIDytjmfJz5bNAdNWb70WIb_0bBWs,400
755
+ openai/types/fine_tuning/job_list_params.py,sha256=yjxaEnESVTRpJ9ItvjKq30KcD_xz_trqKMIxG2eAriE,396
756
+ openai/types/fine_tuning/jobs/__init__.py,sha256=nuWhOUsmsoVKTKMU35kknmr8sfpTF-kkIzyuOlRbJj0,295
757
+ openai/types/fine_tuning/jobs/__pycache__/__init__.cpython-311.pyc,,
758
+ openai/types/fine_tuning/jobs/__pycache__/checkpoint_list_params.cpython-311.pyc,,
759
+ openai/types/fine_tuning/jobs/__pycache__/fine_tuning_job_checkpoint.cpython-311.pyc,,
760
+ openai/types/fine_tuning/jobs/checkpoint_list_params.py,sha256=XoDLkkKCWmf5an5rnoVEpNK8mtQHq1fHw9EqmezfrXM,415
761
+ openai/types/fine_tuning/jobs/fine_tuning_job_checkpoint.py,sha256=Z_sUhebJY9nWSssZU7QoOJwe5sez76sCAuVeSO63XhY,1347
762
+ openai/types/image.py,sha256=9No-8GHesOUbjchemY1jqtMwh_s22oBmLVFlLn2KoQo,607
763
+ openai/types/image_create_variation_params.py,sha256=PvvPvHXvz0etrRrzVIyvRjvDvNbjGspPu85hOq2fLII,1477
764
+ openai/types/image_edit_params.py,sha256=cxpBybs5peY0DJMTWHgoIx3dWIXj0Y0YmvgxrjGmWjo,1837
765
+ openai/types/image_generate_params.py,sha256=bD2AEIetbt37YDp65vEFfGxkLndOFCwhzJol1I63wfA,2132
766
+ openai/types/image_model.py,sha256=W4YchkhJT2wZdlNDUpVkEKg8zdDDfp9S3oTf4D8Wr8g,219
767
+ openai/types/images_response.py,sha256=EJ4qxYZ8CPGh2SZdRsyw6I0FnUvlgwxwc4NgPovJrvk,274
768
+ openai/types/model.py,sha256=DMw8KwQx8B6S6sAI038D0xdzkmYdY5-r0oMhCUG4l6w,532
769
+ openai/types/model_deleted.py,sha256=tXZybg03DunoOSYvwhT7zKj7KTN42R0VEs_-3PRliMo,229
770
+ openai/types/moderation.py,sha256=6CZmxhZiafnT50gKa7BeybrTSoYfCAk7wvD5CQHvBP0,6789
771
+ openai/types/moderation_create_params.py,sha256=EaZ2cej25g5WbRB2kIY7JFCXQPKSQQ95iyoUAAelGr4,992
772
+ openai/types/moderation_create_response.py,sha256=e6SVfWX2_JX25Za0C6KojcnbMTtDB2A7cjUm6cFMKcs,484
773
+ openai/types/moderation_image_url_input_param.py,sha256=t1r9WD3c-CK2Al1lpB4-DjfzLFSwgETR0g8nsRdoL0Y,622
774
+ openai/types/moderation_model.py,sha256=BFeqSyel2My2WKC6MCa_mAIHJx4uXU3-p8UNudJANeM,319
775
+ openai/types/moderation_multi_modal_input_param.py,sha256=RFdiEPsakWIscutX896ir5_rnEA2TLX5xQkjO5QR2vs,483
776
+ openai/types/moderation_text_input_param.py,sha256=ardCbBcdaULf8bkFuzkSKukV9enrINSjNWvb7m0LjZg,406
777
+ openai/types/shared/__init__.py,sha256=ezIePSVicJwxPmQ_5I2C0rO4xIqb1zWjg2sDxk_SYSU,594
778
+ openai/types/shared/__pycache__/__init__.cpython-311.pyc,,
779
+ openai/types/shared/__pycache__/error_object.cpython-311.pyc,,
780
+ openai/types/shared/__pycache__/function_definition.cpython-311.pyc,,
781
+ openai/types/shared/__pycache__/function_parameters.cpython-311.pyc,,
782
+ openai/types/shared/__pycache__/metadata.cpython-311.pyc,,
783
+ openai/types/shared/__pycache__/response_format_json_object.cpython-311.pyc,,
784
+ openai/types/shared/__pycache__/response_format_json_schema.cpython-311.pyc,,
785
+ openai/types/shared/__pycache__/response_format_text.cpython-311.pyc,,
786
+ openai/types/shared/error_object.py,sha256=G7SGPZ9Qw3gewTKbi3fK69eM6L2Ur0C2D57N8iEapJA,305
787
+ openai/types/shared/function_definition.py,sha256=8a5uHoIKrkrwTgfwTyE9ly4PgsZ3iLA_yRUAjubTb7Y,1447
788
+ openai/types/shared/function_parameters.py,sha256=Dkc_pm98zCKyouQmYrl934cK8ZWX7heY_IIyunW8x7c,236
789
+ openai/types/shared/metadata.py,sha256=DC0SFof2EeVvFK0EsmQH8W5b_HnpI_bdp47s51E5LKw,213
790
+ openai/types/shared/response_format_json_object.py,sha256=15KTCXJ0o1W4c5V1vAcOQAx-u0eoIfAjxrHLoN3NuE4,344
791
+ openai/types/shared/response_format_json_schema.py,sha256=rZS7diOPeqK48O_R6OYMJ6AtSGy_88PKTxzha6_56Fo,1399
792
+ openai/types/shared/response_format_text.py,sha256=GX0u_40OLmDdSyawDrUcUk4jcrz1qWsKmmAMP4AD7hc,318
793
+ openai/types/shared_params/__init__.py,sha256=C4z-UCQf4cDJss3uSoSXXFNaW9t23wdEJTd5vWBIhKU,541
794
+ openai/types/shared_params/__pycache__/__init__.cpython-311.pyc,,
795
+ openai/types/shared_params/__pycache__/function_definition.cpython-311.pyc,,
796
+ openai/types/shared_params/__pycache__/function_parameters.cpython-311.pyc,,
797
+ openai/types/shared_params/__pycache__/metadata.cpython-311.pyc,,
798
+ openai/types/shared_params/__pycache__/response_format_json_object.cpython-311.pyc,,
799
+ openai/types/shared_params/__pycache__/response_format_json_schema.cpython-311.pyc,,
800
+ openai/types/shared_params/__pycache__/response_format_text.cpython-311.pyc,,
801
+ openai/types/shared_params/function_definition.py,sha256=ciMXqn1tFXnp1tg9weJW0uvtyvMLrnph3WXMg4IG1Vk,1482
802
+ openai/types/shared_params/function_parameters.py,sha256=UvxKz_3b9b5ECwWr8RFrIH511htbU2JZsp9Z9BMkF-o,272
803
+ openai/types/shared_params/metadata.py,sha256=YCb9eFyy17EuLwtVHjUBUjW2FU8SbWp4NV-aEr_it54,249
804
+ openai/types/shared_params/response_format_json_object.py,sha256=QT4uJCK7RzN3HK17eGjEo36jLKOIBBNGjiX-zIa9iT4,390
805
+ openai/types/shared_params/response_format_json_schema.py,sha256=Uu2ioeSbI64bm-jJ61OY8Lr3PpofTR4d2LNBcaYxlec,1360
806
+ openai/types/shared_params/response_format_text.py,sha256=SjHeZAfgM1-HXAoKLrkiH-VZEnQ73XPTk_RgtJmEbU4,364
807
+ openai/types/upload.py,sha256=orjmcr2glbIgp2_BtMNq7gbHamrvG0UeM9pPXOnlfuM,1207
808
+ openai/types/upload_complete_params.py,sha256=7On-iVAlA9p_nksLSFPBPR4QbB0xEtAW-skyh7S9gR0,504
809
+ openai/types/upload_create_params.py,sha256=ZiZr1yC6g2VqL7KEnw7lhE4kZvU-F3DfTAc2TPk-XBo,889
810
+ openai/types/uploads/__init__.py,sha256=fDsmd3L0nIWbFldbViOLvcQavsFA4SL3jsXDfAueAck,242
811
+ openai/types/uploads/__pycache__/__init__.cpython-311.pyc,,
812
+ openai/types/uploads/__pycache__/part_create_params.cpython-311.pyc,,
813
+ openai/types/uploads/__pycache__/upload_part.cpython-311.pyc,,
814
+ openai/types/uploads/part_create_params.py,sha256=pBByUzngaj70ov1knoSo_gpeBjaWP9D5EdiHwiG4G7U,362
815
+ openai/types/uploads/upload_part.py,sha256=U9953cr9lJJLWEfhTiwHphRzLKARq3gWAWqrjxbhTR4,590
816
+ openai/types/websocket_connection_options.py,sha256=4cAWpv1KKp_9pvnez7pGYzO3s8zh1WvX2xpBhpe-96k,1840
817
+ openai/version.py,sha256=cjbXKO8Ut3aiv4YlQnugff7AdC48MpSndcx96q88Yb8,62
.venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/WHEEL ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.27.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
.venv/lib/python3.11/site-packages/openai-1.61.1.dist-info/entry_points.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [console_scripts]
2
+ openai = openai.cli:main
.venv/lib/python3.11/site-packages/watchfiles/_rust_notify.cpython-311-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffacd5aea8c801b16de58fb2202a5df9471fc8fada302c8e71e613059c9eb42d
3
+ size 1091064
.venv/lib/python3.11/site-packages/websockets/__init__.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+
5
+ from .imports import lazy_import
6
+ from .version import version as __version__ # noqa: F401
7
+
8
+
9
+ __all__ = [
10
+ # .asyncio.client
11
+ "connect",
12
+ "unix_connect",
13
+ "ClientConnection",
14
+ # .asyncio.server
15
+ "basic_auth",
16
+ "broadcast",
17
+ "serve",
18
+ "unix_serve",
19
+ "ServerConnection",
20
+ "Server",
21
+ # .client
22
+ "ClientProtocol",
23
+ # .datastructures
24
+ "Headers",
25
+ "HeadersLike",
26
+ "MultipleValuesError",
27
+ # .exceptions
28
+ "ConcurrencyError",
29
+ "ConnectionClosed",
30
+ "ConnectionClosedError",
31
+ "ConnectionClosedOK",
32
+ "DuplicateParameter",
33
+ "InvalidHandshake",
34
+ "InvalidHeader",
35
+ "InvalidHeaderFormat",
36
+ "InvalidHeaderValue",
37
+ "InvalidMessage",
38
+ "InvalidOrigin",
39
+ "InvalidParameterName",
40
+ "InvalidParameterValue",
41
+ "InvalidState",
42
+ "InvalidStatus",
43
+ "InvalidUpgrade",
44
+ "InvalidURI",
45
+ "NegotiationError",
46
+ "PayloadTooBig",
47
+ "ProtocolError",
48
+ "SecurityError",
49
+ "WebSocketException",
50
+ # .frames
51
+ "Close",
52
+ "CloseCode",
53
+ "Frame",
54
+ "Opcode",
55
+ # .http11
56
+ "Request",
57
+ "Response",
58
+ # .protocol
59
+ "Protocol",
60
+ "Side",
61
+ "State",
62
+ # .server
63
+ "ServerProtocol",
64
+ # .typing
65
+ "Data",
66
+ "ExtensionName",
67
+ "ExtensionParameter",
68
+ "LoggerLike",
69
+ "StatusLike",
70
+ "Origin",
71
+ "Subprotocol",
72
+ ]
73
+
74
+ # When type checking, import non-deprecated aliases eagerly. Else, import on demand.
75
+ if typing.TYPE_CHECKING:
76
+ from .asyncio.client import ClientConnection, connect, unix_connect
77
+ from .asyncio.server import (
78
+ Server,
79
+ ServerConnection,
80
+ basic_auth,
81
+ broadcast,
82
+ serve,
83
+ unix_serve,
84
+ )
85
+ from .client import ClientProtocol
86
+ from .datastructures import Headers, HeadersLike, MultipleValuesError
87
+ from .exceptions import (
88
+ ConcurrencyError,
89
+ ConnectionClosed,
90
+ ConnectionClosedError,
91
+ ConnectionClosedOK,
92
+ DuplicateParameter,
93
+ InvalidHandshake,
94
+ InvalidHeader,
95
+ InvalidHeaderFormat,
96
+ InvalidHeaderValue,
97
+ InvalidMessage,
98
+ InvalidOrigin,
99
+ InvalidParameterName,
100
+ InvalidParameterValue,
101
+ InvalidState,
102
+ InvalidStatus,
103
+ InvalidUpgrade,
104
+ InvalidURI,
105
+ NegotiationError,
106
+ PayloadTooBig,
107
+ ProtocolError,
108
+ SecurityError,
109
+ WebSocketException,
110
+ )
111
+ from .frames import Close, CloseCode, Frame, Opcode
112
+ from .http11 import Request, Response
113
+ from .protocol import Protocol, Side, State
114
+ from .server import ServerProtocol
115
+ from .typing import (
116
+ Data,
117
+ ExtensionName,
118
+ ExtensionParameter,
119
+ LoggerLike,
120
+ Origin,
121
+ StatusLike,
122
+ Subprotocol,
123
+ )
124
+ else:
125
+ lazy_import(
126
+ globals(),
127
+ aliases={
128
+ # .asyncio.client
129
+ "connect": ".asyncio.client",
130
+ "unix_connect": ".asyncio.client",
131
+ "ClientConnection": ".asyncio.client",
132
+ # .asyncio.server
133
+ "basic_auth": ".asyncio.server",
134
+ "broadcast": ".asyncio.server",
135
+ "serve": ".asyncio.server",
136
+ "unix_serve": ".asyncio.server",
137
+ "ServerConnection": ".asyncio.server",
138
+ "Server": ".asyncio.server",
139
+ # .client
140
+ "ClientProtocol": ".client",
141
+ # .datastructures
142
+ "Headers": ".datastructures",
143
+ "HeadersLike": ".datastructures",
144
+ "MultipleValuesError": ".datastructures",
145
+ # .exceptions
146
+ "ConcurrencyError": ".exceptions",
147
+ "ConnectionClosed": ".exceptions",
148
+ "ConnectionClosedError": ".exceptions",
149
+ "ConnectionClosedOK": ".exceptions",
150
+ "DuplicateParameter": ".exceptions",
151
+ "InvalidHandshake": ".exceptions",
152
+ "InvalidHeader": ".exceptions",
153
+ "InvalidHeaderFormat": ".exceptions",
154
+ "InvalidHeaderValue": ".exceptions",
155
+ "InvalidMessage": ".exceptions",
156
+ "InvalidOrigin": ".exceptions",
157
+ "InvalidParameterName": ".exceptions",
158
+ "InvalidParameterValue": ".exceptions",
159
+ "InvalidState": ".exceptions",
160
+ "InvalidStatus": ".exceptions",
161
+ "InvalidUpgrade": ".exceptions",
162
+ "InvalidURI": ".exceptions",
163
+ "NegotiationError": ".exceptions",
164
+ "PayloadTooBig": ".exceptions",
165
+ "ProtocolError": ".exceptions",
166
+ "SecurityError": ".exceptions",
167
+ "WebSocketException": ".exceptions",
168
+ # .frames
169
+ "Close": ".frames",
170
+ "CloseCode": ".frames",
171
+ "Frame": ".frames",
172
+ "Opcode": ".frames",
173
+ # .http11
174
+ "Request": ".http11",
175
+ "Response": ".http11",
176
+ # .protocol
177
+ "Protocol": ".protocol",
178
+ "Side": ".protocol",
179
+ "State": ".protocol",
180
+ # .server
181
+ "ServerProtocol": ".server",
182
+ # .typing
183
+ "Data": ".typing",
184
+ "ExtensionName": ".typing",
185
+ "ExtensionParameter": ".typing",
186
+ "LoggerLike": ".typing",
187
+ "Origin": ".typing",
188
+ "StatusLike": ".typing",
189
+ "Subprotocol": ".typing",
190
+ },
191
+ deprecated_aliases={
192
+ # deprecated in 9.0 - 2021-09-01
193
+ "framing": ".legacy",
194
+ "handshake": ".legacy",
195
+ "parse_uri": ".uri",
196
+ "WebSocketURI": ".uri",
197
+ # deprecated in 14.0 - 2024-11-09
198
+ # .legacy.auth
199
+ "BasicAuthWebSocketServerProtocol": ".legacy.auth",
200
+ "basic_auth_protocol_factory": ".legacy.auth",
201
+ # .legacy.client
202
+ "WebSocketClientProtocol": ".legacy.client",
203
+ # .legacy.exceptions
204
+ "AbortHandshake": ".legacy.exceptions",
205
+ "InvalidStatusCode": ".legacy.exceptions",
206
+ "RedirectHandshake": ".legacy.exceptions",
207
+ "WebSocketProtocolError": ".legacy.exceptions",
208
+ # .legacy.protocol
209
+ "WebSocketCommonProtocol": ".legacy.protocol",
210
+ # .legacy.server
211
+ "WebSocketServer": ".legacy.server",
212
+ "WebSocketServerProtocol": ".legacy.server",
213
+ },
214
+ )
.venv/lib/python3.11/site-packages/websockets/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (4.76 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/__main__.cpython-311.pyc ADDED
Binary file (6.84 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/auth.cpython-311.pyc ADDED
Binary file (927 Bytes). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/client.cpython-311.pyc ADDED
Binary file (18.2 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/connection.cpython-311.pyc ADDED
Binary file (607 Bytes). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/datastructures.cpython-311.pyc ADDED
Binary file (10.1 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/exceptions.cpython-311.pyc ADDED
Binary file (17.7 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/frames.cpython-311.pyc ADDED
Binary file (16.6 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/headers.cpython-311.pyc ADDED
Binary file (19.9 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/http.cpython-311.pyc ADDED
Binary file (1.06 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/http11.cpython-311.pyc ADDED
Binary file (16.7 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/imports.cpython-311.pyc ADDED
Binary file (3.88 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/protocol.cpython-311.pyc ADDED
Binary file (26.2 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/server.cpython-311.pyc ADDED
Binary file (25.3 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/streams.cpython-311.pyc ADDED
Binary file (5.74 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/typing.cpython-311.pyc ADDED
Binary file (1.37 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/uri.cpython-311.pyc ADDED
Binary file (4.32 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/utils.cpython-311.pyc ADDED
Binary file (2.46 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/__pycache__/version.cpython-311.pyc ADDED
Binary file (3.34 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/__init__.py ADDED
File without changes
.venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (191 Bytes). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/async_timeout.cpython-311.pyc ADDED
Binary file (11.2 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/client.cpython-311.pyc ADDED
Binary file (24.2 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/compatibility.cpython-311.pyc ADDED
Binary file (1.24 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/connection.cpython-311.pyc ADDED
Binary file (55.3 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/messages.cpython-311.pyc ADDED
Binary file (13.1 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/__pycache__/server.cpython-311.pyc ADDED
Binary file (41.1 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/asyncio/async_timeout.py ADDED
@@ -0,0 +1,282 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # From https://github.com/aio-libs/async-timeout/blob/master/async_timeout/__init__.py
2
+ # Licensed under the Apache License (Apache-2.0)
3
+
4
+ import asyncio
5
+ import enum
6
+ import sys
7
+ import warnings
8
+ from types import TracebackType
9
+ from typing import Optional, Type
10
+
11
+
12
+ if sys.version_info >= (3, 11):
13
+ from typing import final
14
+ else:
15
+ # From https://github.com/python/typing_extensions/blob/main/src/typing_extensions.py
16
+ # Licensed under the Python Software Foundation License (PSF-2.0)
17
+
18
+ # @final exists in 3.8+, but we backport it for all versions
19
+ # before 3.11 to keep support for the __final__ attribute.
20
+ # See https://bugs.python.org/issue46342
21
+ def final(f):
22
+ """This decorator can be used to indicate to type checkers that
23
+ the decorated method cannot be overridden, and decorated class
24
+ cannot be subclassed. For example:
25
+
26
+ class Base:
27
+ @final
28
+ def done(self) -> None:
29
+ ...
30
+ class Sub(Base):
31
+ def done(self) -> None: # Error reported by type checker
32
+ ...
33
+ @final
34
+ class Leaf:
35
+ ...
36
+ class Other(Leaf): # Error reported by type checker
37
+ ...
38
+
39
+ There is no runtime checking of these properties. The decorator
40
+ sets the ``__final__`` attribute to ``True`` on the decorated object
41
+ to allow runtime introspection.
42
+ """
43
+ try:
44
+ f.__final__ = True
45
+ except (AttributeError, TypeError):
46
+ # Skip the attribute silently if it is not writable.
47
+ # AttributeError happens if the object has __slots__ or a
48
+ # read-only property, TypeError if it's a builtin class.
49
+ pass
50
+ return f
51
+
52
+ # End https://github.com/python/typing_extensions/blob/main/src/typing_extensions.py
53
+
54
+
55
+ if sys.version_info >= (3, 11):
56
+
57
+ def _uncancel_task(task: "asyncio.Task[object]") -> None:
58
+ task.uncancel()
59
+
60
+ else:
61
+
62
+ def _uncancel_task(task: "asyncio.Task[object]") -> None:
63
+ pass
64
+
65
+
66
+ __version__ = "4.0.3"
67
+
68
+
69
+ __all__ = ("timeout", "timeout_at", "Timeout")
70
+
71
+
72
+ def timeout(delay: Optional[float]) -> "Timeout":
73
+ """timeout context manager.
74
+
75
+ Useful in cases when you want to apply timeout logic around block
76
+ of code or in cases when asyncio.wait_for is not suitable. For example:
77
+
78
+ >>> async with timeout(0.001):
79
+ ... async with aiohttp.get('https://github.com') as r:
80
+ ... await r.text()
81
+
82
+
83
+ delay - value in seconds or None to disable timeout logic
84
+ """
85
+ loop = asyncio.get_running_loop()
86
+ if delay is not None:
87
+ deadline = loop.time() + delay # type: Optional[float]
88
+ else:
89
+ deadline = None
90
+ return Timeout(deadline, loop)
91
+
92
+
93
+ def timeout_at(deadline: Optional[float]) -> "Timeout":
94
+ """Schedule the timeout at absolute time.
95
+
96
+ deadline argument points on the time in the same clock system
97
+ as loop.time().
98
+
99
+ Please note: it is not POSIX time but a time with
100
+ undefined starting base, e.g. the time of the system power on.
101
+
102
+ >>> async with timeout_at(loop.time() + 10):
103
+ ... async with aiohttp.get('https://github.com') as r:
104
+ ... await r.text()
105
+
106
+
107
+ """
108
+ loop = asyncio.get_running_loop()
109
+ return Timeout(deadline, loop)
110
+
111
+
112
+ class _State(enum.Enum):
113
+ INIT = "INIT"
114
+ ENTER = "ENTER"
115
+ TIMEOUT = "TIMEOUT"
116
+ EXIT = "EXIT"
117
+
118
+
119
+ @final
120
+ class Timeout:
121
+ # Internal class, please don't instantiate it directly
122
+ # Use timeout() and timeout_at() public factories instead.
123
+ #
124
+ # Implementation note: `async with timeout()` is preferred
125
+ # over `with timeout()`.
126
+ # While technically the Timeout class implementation
127
+ # doesn't need to be async at all,
128
+ # the `async with` statement explicitly points that
129
+ # the context manager should be used from async function context.
130
+ #
131
+ # This design allows to avoid many silly misusages.
132
+ #
133
+ # TimeoutError is raised immediately when scheduled
134
+ # if the deadline is passed.
135
+ # The purpose is to time out as soon as possible
136
+ # without waiting for the next await expression.
137
+
138
+ __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task")
139
+
140
+ def __init__(
141
+ self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
142
+ ) -> None:
143
+ self._loop = loop
144
+ self._state = _State.INIT
145
+
146
+ self._task: Optional["asyncio.Task[object]"] = None
147
+ self._timeout_handler = None # type: Optional[asyncio.Handle]
148
+ if deadline is None:
149
+ self._deadline = None # type: Optional[float]
150
+ else:
151
+ self.update(deadline)
152
+
153
+ def __enter__(self) -> "Timeout":
154
+ warnings.warn(
155
+ "with timeout() is deprecated, use async with timeout() instead",
156
+ DeprecationWarning,
157
+ stacklevel=2,
158
+ )
159
+ self._do_enter()
160
+ return self
161
+
162
+ def __exit__(
163
+ self,
164
+ exc_type: Optional[Type[BaseException]],
165
+ exc_val: Optional[BaseException],
166
+ exc_tb: Optional[TracebackType],
167
+ ) -> Optional[bool]:
168
+ self._do_exit(exc_type)
169
+ return None
170
+
171
+ async def __aenter__(self) -> "Timeout":
172
+ self._do_enter()
173
+ return self
174
+
175
+ async def __aexit__(
176
+ self,
177
+ exc_type: Optional[Type[BaseException]],
178
+ exc_val: Optional[BaseException],
179
+ exc_tb: Optional[TracebackType],
180
+ ) -> Optional[bool]:
181
+ self._do_exit(exc_type)
182
+ return None
183
+
184
+ @property
185
+ def expired(self) -> bool:
186
+ """Is timeout expired during execution?"""
187
+ return self._state == _State.TIMEOUT
188
+
189
+ @property
190
+ def deadline(self) -> Optional[float]:
191
+ return self._deadline
192
+
193
+ def reject(self) -> None:
194
+ """Reject scheduled timeout if any."""
195
+ # cancel is maybe better name but
196
+ # task.cancel() raises CancelledError in asyncio world.
197
+ if self._state not in (_State.INIT, _State.ENTER):
198
+ raise RuntimeError(f"invalid state {self._state.value}")
199
+ self._reject()
200
+
201
+ def _reject(self) -> None:
202
+ self._task = None
203
+ if self._timeout_handler is not None:
204
+ self._timeout_handler.cancel()
205
+ self._timeout_handler = None
206
+
207
+ def shift(self, delay: float) -> None:
208
+ """Advance timeout on delay seconds.
209
+
210
+ The delay can be negative.
211
+
212
+ Raise RuntimeError if shift is called when deadline is not scheduled
213
+ """
214
+ deadline = self._deadline
215
+ if deadline is None:
216
+ raise RuntimeError("cannot shift timeout if deadline is not scheduled")
217
+ self.update(deadline + delay)
218
+
219
+ def update(self, deadline: float) -> None:
220
+ """Set deadline to absolute value.
221
+
222
+ deadline argument points on the time in the same clock system
223
+ as loop.time().
224
+
225
+ If new deadline is in the past the timeout is raised immediately.
226
+
227
+ Please note: it is not POSIX time but a time with
228
+ undefined starting base, e.g. the time of the system power on.
229
+ """
230
+ if self._state == _State.EXIT:
231
+ raise RuntimeError("cannot reschedule after exit from context manager")
232
+ if self._state == _State.TIMEOUT:
233
+ raise RuntimeError("cannot reschedule expired timeout")
234
+ if self._timeout_handler is not None:
235
+ self._timeout_handler.cancel()
236
+ self._deadline = deadline
237
+ if self._state != _State.INIT:
238
+ self._reschedule()
239
+
240
+ def _reschedule(self) -> None:
241
+ assert self._state == _State.ENTER
242
+ deadline = self._deadline
243
+ if deadline is None:
244
+ return
245
+
246
+ now = self._loop.time()
247
+ if self._timeout_handler is not None:
248
+ self._timeout_handler.cancel()
249
+
250
+ self._task = asyncio.current_task()
251
+ if deadline <= now:
252
+ self._timeout_handler = self._loop.call_soon(self._on_timeout)
253
+ else:
254
+ self._timeout_handler = self._loop.call_at(deadline, self._on_timeout)
255
+
256
+ def _do_enter(self) -> None:
257
+ if self._state != _State.INIT:
258
+ raise RuntimeError(f"invalid state {self._state.value}")
259
+ self._state = _State.ENTER
260
+ self._reschedule()
261
+
262
+ def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
263
+ if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
264
+ assert self._task is not None
265
+ _uncancel_task(self._task)
266
+ self._timeout_handler = None
267
+ self._task = None
268
+ raise asyncio.TimeoutError
269
+ # timeout has not expired
270
+ self._state = _State.EXIT
271
+ self._reject()
272
+ return None
273
+
274
+ def _on_timeout(self) -> None:
275
+ assert self._task is not None
276
+ self._task.cancel()
277
+ self._state = _State.TIMEOUT
278
+ # drop the reference early
279
+ self._timeout_handler = None
280
+
281
+
282
+ # End https://github.com/aio-libs/async-timeout/blob/master/async_timeout/__init__.py
.venv/lib/python3.11/site-packages/websockets/asyncio/client.py ADDED
@@ -0,0 +1,567 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import logging
5
+ import os
6
+ import traceback
7
+ import urllib.parse
8
+ from collections.abc import AsyncIterator, Generator, Sequence
9
+ from types import TracebackType
10
+ from typing import Any, Callable
11
+
12
+ from ..client import ClientProtocol, backoff
13
+ from ..datastructures import HeadersLike
14
+ from ..exceptions import InvalidMessage, InvalidStatus, SecurityError
15
+ from ..extensions.base import ClientExtensionFactory
16
+ from ..extensions.permessage_deflate import enable_client_permessage_deflate
17
+ from ..headers import validate_subprotocols
18
+ from ..http11 import USER_AGENT, Response
19
+ from ..protocol import CONNECTING, Event
20
+ from ..typing import LoggerLike, Origin, Subprotocol
21
+ from ..uri import WebSocketURI, parse_uri
22
+ from .compatibility import TimeoutError, asyncio_timeout
23
+ from .connection import Connection
24
+
25
+
26
+ __all__ = ["connect", "unix_connect", "ClientConnection"]
27
+
28
+ MAX_REDIRECTS = int(os.environ.get("WEBSOCKETS_MAX_REDIRECTS", "10"))
29
+
30
+
31
+ class ClientConnection(Connection):
32
+ """
33
+ :mod:`asyncio` implementation of a WebSocket client connection.
34
+
35
+ :class:`ClientConnection` provides :meth:`recv` and :meth:`send` coroutines
36
+ for receiving and sending messages.
37
+
38
+ It supports asynchronous iteration to receive messages::
39
+
40
+ async for message in websocket:
41
+ await process(message)
42
+
43
+ The iterator exits normally when the connection is closed with close code
44
+ 1000 (OK) or 1001 (going away) or without a close code. It raises a
45
+ :exc:`~websockets.exceptions.ConnectionClosedError` when the connection is
46
+ closed with any other code.
47
+
48
+ The ``ping_interval``, ``ping_timeout``, ``close_timeout``, ``max_queue``,
49
+ and ``write_limit`` arguments have the same meaning as in :func:`connect`.
50
+
51
+ Args:
52
+ protocol: Sans-I/O connection.
53
+
54
+ """
55
+
56
+ def __init__(
57
+ self,
58
+ protocol: ClientProtocol,
59
+ *,
60
+ ping_interval: float | None = 20,
61
+ ping_timeout: float | None = 20,
62
+ close_timeout: float | None = 10,
63
+ max_queue: int | None | tuple[int | None, int | None] = 16,
64
+ write_limit: int | tuple[int, int | None] = 2**15,
65
+ ) -> None:
66
+ self.protocol: ClientProtocol
67
+ super().__init__(
68
+ protocol,
69
+ ping_interval=ping_interval,
70
+ ping_timeout=ping_timeout,
71
+ close_timeout=close_timeout,
72
+ max_queue=max_queue,
73
+ write_limit=write_limit,
74
+ )
75
+ self.response_rcvd: asyncio.Future[None] = self.loop.create_future()
76
+
77
+ async def handshake(
78
+ self,
79
+ additional_headers: HeadersLike | None = None,
80
+ user_agent_header: str | None = USER_AGENT,
81
+ ) -> None:
82
+ """
83
+ Perform the opening handshake.
84
+
85
+ """
86
+ async with self.send_context(expected_state=CONNECTING):
87
+ self.request = self.protocol.connect()
88
+ if additional_headers is not None:
89
+ self.request.headers.update(additional_headers)
90
+ if user_agent_header:
91
+ self.request.headers["User-Agent"] = user_agent_header
92
+ self.protocol.send_request(self.request)
93
+
94
+ await asyncio.wait(
95
+ [self.response_rcvd, self.connection_lost_waiter],
96
+ return_when=asyncio.FIRST_COMPLETED,
97
+ )
98
+
99
+ # self.protocol.handshake_exc is set when the connection is lost before
100
+ # receiving a response, when the response cannot be parsed, or when the
101
+ # response fails the handshake.
102
+
103
+ if self.protocol.handshake_exc is not None:
104
+ raise self.protocol.handshake_exc
105
+
106
+ def process_event(self, event: Event) -> None:
107
+ """
108
+ Process one incoming event.
109
+
110
+ """
111
+ # First event - handshake response.
112
+ if self.response is None:
113
+ assert isinstance(event, Response)
114
+ self.response = event
115
+ self.response_rcvd.set_result(None)
116
+ # Later events - frames.
117
+ else:
118
+ super().process_event(event)
119
+
120
+
121
+ def process_exception(exc: Exception) -> Exception | None:
122
+ """
123
+ Determine whether a connection error is retryable or fatal.
124
+
125
+ When reconnecting automatically with ``async for ... in connect(...)``, if a
126
+ connection attempt fails, :func:`process_exception` is called to determine
127
+ whether to retry connecting or to raise the exception.
128
+
129
+ This function defines the default behavior, which is to retry on:
130
+
131
+ * :exc:`EOFError`, :exc:`OSError`, :exc:`asyncio.TimeoutError`: network
132
+ errors;
133
+ * :exc:`~websockets.exceptions.InvalidStatus` when the status code is 500,
134
+ 502, 503, or 504: server or proxy errors.
135
+
136
+ All other exceptions are considered fatal.
137
+
138
+ You can change this behavior with the ``process_exception`` argument of
139
+ :func:`connect`.
140
+
141
+ Return :obj:`None` if the exception is retryable i.e. when the error could
142
+ be transient and trying to reconnect with the same parameters could succeed.
143
+ The exception will be logged at the ``INFO`` level.
144
+
145
+ Return an exception, either ``exc`` or a new exception, if the exception is
146
+ fatal i.e. when trying to reconnect will most likely produce the same error.
147
+ That exception will be raised, breaking out of the retry loop.
148
+
149
+ """
150
+ if isinstance(exc, (OSError, asyncio.TimeoutError)):
151
+ return None
152
+ if isinstance(exc, InvalidMessage) and isinstance(exc.__cause__, EOFError):
153
+ return None
154
+ if isinstance(exc, InvalidStatus) and exc.response.status_code in [
155
+ 500, # Internal Server Error
156
+ 502, # Bad Gateway
157
+ 503, # Service Unavailable
158
+ 504, # Gateway Timeout
159
+ ]:
160
+ return None
161
+ return exc
162
+
163
+
164
+ # This is spelled in lower case because it's exposed as a callable in the API.
165
+ class connect:
166
+ """
167
+ Connect to the WebSocket server at ``uri``.
168
+
169
+ This coroutine returns a :class:`ClientConnection` instance, which you can
170
+ use to send and receive messages.
171
+
172
+ :func:`connect` may be used as an asynchronous context manager::
173
+
174
+ from websockets.asyncio.client import connect
175
+
176
+ async with connect(...) as websocket:
177
+ ...
178
+
179
+ The connection is closed automatically when exiting the context.
180
+
181
+ :func:`connect` can be used as an infinite asynchronous iterator to
182
+ reconnect automatically on errors::
183
+
184
+ async for websocket in connect(...):
185
+ try:
186
+ ...
187
+ except websockets.exceptions.ConnectionClosed:
188
+ continue
189
+
190
+ If the connection fails with a transient error, it is retried with
191
+ exponential backoff. If it fails with a fatal error, the exception is
192
+ raised, breaking out of the loop.
193
+
194
+ The connection is closed automatically after each iteration of the loop.
195
+
196
+ Args:
197
+ uri: URI of the WebSocket server.
198
+ origin: Value of the ``Origin`` header, for servers that require it.
199
+ extensions: List of supported extensions, in order in which they
200
+ should be negotiated and run.
201
+ subprotocols: List of supported subprotocols, in order of decreasing
202
+ preference.
203
+ additional_headers (HeadersLike | None): Arbitrary HTTP headers to add
204
+ to the handshake request.
205
+ user_agent_header: Value of the ``User-Agent`` request header.
206
+ It defaults to ``"Python/x.y.z websockets/X.Y"``.
207
+ Setting it to :obj:`None` removes the header.
208
+ compression: The "permessage-deflate" extension is enabled by default.
209
+ Set ``compression`` to :obj:`None` to disable it. See the
210
+ :doc:`compression guide <../../topics/compression>` for details.
211
+ process_exception: When reconnecting automatically, tell whether an
212
+ error is transient or fatal. The default behavior is defined by
213
+ :func:`process_exception`. Refer to its documentation for details.
214
+ open_timeout: Timeout for opening the connection in seconds.
215
+ :obj:`None` disables the timeout.
216
+ ping_interval: Interval between keepalive pings in seconds.
217
+ :obj:`None` disables keepalive.
218
+ ping_timeout: Timeout for keepalive pings in seconds.
219
+ :obj:`None` disables timeouts.
220
+ close_timeout: Timeout for closing the connection in seconds.
221
+ :obj:`None` disables the timeout.
222
+ max_size: Maximum size of incoming messages in bytes.
223
+ :obj:`None` disables the limit.
224
+ max_queue: High-water mark of the buffer where frames are received.
225
+ It defaults to 16 frames. The low-water mark defaults to ``max_queue
226
+ // 4``. You may pass a ``(high, low)`` tuple to set the high-water
227
+ and low-water marks. If you want to disable flow control entirely,
228
+ you may set it to ``None``, although that's a bad idea.
229
+ write_limit: High-water mark of write buffer in bytes. It is passed to
230
+ :meth:`~asyncio.WriteTransport.set_write_buffer_limits`. It defaults
231
+ to 32 KiB. You may pass a ``(high, low)`` tuple to set the
232
+ high-water and low-water marks.
233
+ logger: Logger for this client.
234
+ It defaults to ``logging.getLogger("websockets.client")``.
235
+ See the :doc:`logging guide <../../topics/logging>` for details.
236
+ create_connection: Factory for the :class:`ClientConnection` managing
237
+ the connection. Set it to a wrapper or a subclass to customize
238
+ connection handling.
239
+
240
+ Any other keyword arguments are passed to the event loop's
241
+ :meth:`~asyncio.loop.create_connection` method.
242
+
243
+ For example:
244
+
245
+ * You can set ``ssl`` to a :class:`~ssl.SSLContext` to enforce TLS settings.
246
+ When connecting to a ``wss://`` URI, if ``ssl`` isn't provided, a TLS
247
+ context is created with :func:`~ssl.create_default_context`.
248
+
249
+ * You can set ``server_hostname`` to override the host name from ``uri`` in
250
+ the TLS handshake.
251
+
252
+ * You can set ``host`` and ``port`` to connect to a different host and port
253
+ from those found in ``uri``. This only changes the destination of the TCP
254
+ connection. The host name from ``uri`` is still used in the TLS handshake
255
+ for secure connections and in the ``Host`` header.
256
+
257
+ * You can set ``sock`` to provide a preexisting TCP socket. You may call
258
+ :func:`socket.create_connection` (not to be confused with the event loop's
259
+ :meth:`~asyncio.loop.create_connection` method) to create a suitable
260
+ client socket and customize it.
261
+
262
+ Raises:
263
+ InvalidURI: If ``uri`` isn't a valid WebSocket URI.
264
+ OSError: If the TCP connection fails.
265
+ InvalidHandshake: If the opening handshake fails.
266
+ TimeoutError: If the opening handshake times out.
267
+
268
+ """
269
+
270
+ def __init__(
271
+ self,
272
+ uri: str,
273
+ *,
274
+ # WebSocket
275
+ origin: Origin | None = None,
276
+ extensions: Sequence[ClientExtensionFactory] | None = None,
277
+ subprotocols: Sequence[Subprotocol] | None = None,
278
+ additional_headers: HeadersLike | None = None,
279
+ user_agent_header: str | None = USER_AGENT,
280
+ compression: str | None = "deflate",
281
+ process_exception: Callable[[Exception], Exception | None] = process_exception,
282
+ # Timeouts
283
+ open_timeout: float | None = 10,
284
+ ping_interval: float | None = 20,
285
+ ping_timeout: float | None = 20,
286
+ close_timeout: float | None = 10,
287
+ # Limits
288
+ max_size: int | None = 2**20,
289
+ max_queue: int | None | tuple[int | None, int | None] = 16,
290
+ write_limit: int | tuple[int, int | None] = 2**15,
291
+ # Logging
292
+ logger: LoggerLike | None = None,
293
+ # Escape hatch for advanced customization
294
+ create_connection: type[ClientConnection] | None = None,
295
+ # Other keyword arguments are passed to loop.create_connection
296
+ **kwargs: Any,
297
+ ) -> None:
298
+ self.uri = uri
299
+
300
+ if subprotocols is not None:
301
+ validate_subprotocols(subprotocols)
302
+
303
+ if compression == "deflate":
304
+ extensions = enable_client_permessage_deflate(extensions)
305
+ elif compression is not None:
306
+ raise ValueError(f"unsupported compression: {compression}")
307
+
308
+ if logger is None:
309
+ logger = logging.getLogger("websockets.client")
310
+
311
+ if create_connection is None:
312
+ create_connection = ClientConnection
313
+
314
+ def protocol_factory(wsuri: WebSocketURI) -> ClientConnection:
315
+ # This is a protocol in the Sans-I/O implementation of websockets.
316
+ protocol = ClientProtocol(
317
+ wsuri,
318
+ origin=origin,
319
+ extensions=extensions,
320
+ subprotocols=subprotocols,
321
+ max_size=max_size,
322
+ logger=logger,
323
+ )
324
+ # This is a connection in websockets and a protocol in asyncio.
325
+ connection = create_connection(
326
+ protocol,
327
+ ping_interval=ping_interval,
328
+ ping_timeout=ping_timeout,
329
+ close_timeout=close_timeout,
330
+ max_queue=max_queue,
331
+ write_limit=write_limit,
332
+ )
333
+ return connection
334
+
335
+ self.protocol_factory = protocol_factory
336
+ self.handshake_args = (
337
+ additional_headers,
338
+ user_agent_header,
339
+ )
340
+ self.process_exception = process_exception
341
+ self.open_timeout = open_timeout
342
+ self.logger = logger
343
+ self.connection_kwargs = kwargs
344
+
345
+ async def create_connection(self) -> ClientConnection:
346
+ """Create TCP or Unix connection."""
347
+ loop = asyncio.get_running_loop()
348
+
349
+ wsuri = parse_uri(self.uri)
350
+ kwargs = self.connection_kwargs.copy()
351
+
352
+ def factory() -> ClientConnection:
353
+ return self.protocol_factory(wsuri)
354
+
355
+ if wsuri.secure:
356
+ kwargs.setdefault("ssl", True)
357
+ kwargs.setdefault("server_hostname", wsuri.host)
358
+ if kwargs.get("ssl") is None:
359
+ raise ValueError("ssl=None is incompatible with a wss:// URI")
360
+ else:
361
+ if kwargs.get("ssl") is not None:
362
+ raise ValueError("ssl argument is incompatible with a ws:// URI")
363
+
364
+ if kwargs.pop("unix", False):
365
+ _, connection = await loop.create_unix_connection(factory, **kwargs)
366
+ else:
367
+ if kwargs.get("sock") is None:
368
+ kwargs.setdefault("host", wsuri.host)
369
+ kwargs.setdefault("port", wsuri.port)
370
+ _, connection = await loop.create_connection(factory, **kwargs)
371
+ return connection
372
+
373
+ def process_redirect(self, exc: Exception) -> Exception | str:
374
+ """
375
+ Determine whether a connection error is a redirect that can be followed.
376
+
377
+ Return the new URI if it's a valid redirect. Else, return an exception.
378
+
379
+ """
380
+ if not (
381
+ isinstance(exc, InvalidStatus)
382
+ and exc.response.status_code
383
+ in [
384
+ 300, # Multiple Choices
385
+ 301, # Moved Permanently
386
+ 302, # Found
387
+ 303, # See Other
388
+ 307, # Temporary Redirect
389
+ 308, # Permanent Redirect
390
+ ]
391
+ and "Location" in exc.response.headers
392
+ ):
393
+ return exc
394
+
395
+ old_wsuri = parse_uri(self.uri)
396
+ new_uri = urllib.parse.urljoin(self.uri, exc.response.headers["Location"])
397
+ new_wsuri = parse_uri(new_uri)
398
+
399
+ # If connect() received a socket, it is closed and cannot be reused.
400
+ if self.connection_kwargs.get("sock") is not None:
401
+ return ValueError(
402
+ f"cannot follow redirect to {new_uri} with a preexisting socket"
403
+ )
404
+
405
+ # TLS downgrade is forbidden.
406
+ if old_wsuri.secure and not new_wsuri.secure:
407
+ return SecurityError(f"cannot follow redirect to non-secure URI {new_uri}")
408
+
409
+ # Apply restrictions to cross-origin redirects.
410
+ if (
411
+ old_wsuri.secure != new_wsuri.secure
412
+ or old_wsuri.host != new_wsuri.host
413
+ or old_wsuri.port != new_wsuri.port
414
+ ):
415
+ # Cross-origin redirects on Unix sockets don't quite make sense.
416
+ if self.connection_kwargs.get("unix", False):
417
+ return ValueError(
418
+ f"cannot follow cross-origin redirect to {new_uri} "
419
+ f"with a Unix socket"
420
+ )
421
+
422
+ # Cross-origin redirects when host and port are overridden are ill-defined.
423
+ if (
424
+ self.connection_kwargs.get("host") is not None
425
+ or self.connection_kwargs.get("port") is not None
426
+ ):
427
+ return ValueError(
428
+ f"cannot follow cross-origin redirect to {new_uri} "
429
+ f"with an explicit host or port"
430
+ )
431
+
432
+ return new_uri
433
+
434
+ # ... = await connect(...)
435
+
436
+ def __await__(self) -> Generator[Any, None, ClientConnection]:
437
+ # Create a suitable iterator by calling __await__ on a coroutine.
438
+ return self.__await_impl__().__await__()
439
+
440
+ async def __await_impl__(self) -> ClientConnection:
441
+ try:
442
+ async with asyncio_timeout(self.open_timeout):
443
+ for _ in range(MAX_REDIRECTS):
444
+ self.connection = await self.create_connection()
445
+ try:
446
+ await self.connection.handshake(*self.handshake_args)
447
+ except asyncio.CancelledError:
448
+ self.connection.transport.abort()
449
+ raise
450
+ except Exception as exc:
451
+ # Always close the connection even though keep-alive is
452
+ # the default in HTTP/1.1 because create_connection ties
453
+ # opening the network connection with initializing the
454
+ # protocol. In the current design of connect(), there is
455
+ # no easy way to reuse the network connection that works
456
+ # in every case nor to reinitialize the protocol.
457
+ self.connection.transport.abort()
458
+
459
+ uri_or_exc = self.process_redirect(exc)
460
+ # Response is a valid redirect; follow it.
461
+ if isinstance(uri_or_exc, str):
462
+ self.uri = uri_or_exc
463
+ continue
464
+ # Response isn't a valid redirect; raise the exception.
465
+ if uri_or_exc is exc:
466
+ raise
467
+ else:
468
+ raise uri_or_exc from exc
469
+
470
+ else:
471
+ self.connection.start_keepalive()
472
+ return self.connection
473
+ else:
474
+ raise SecurityError(f"more than {MAX_REDIRECTS} redirects")
475
+
476
+ except TimeoutError:
477
+ # Re-raise exception with an informative error message.
478
+ raise TimeoutError("timed out during handshake") from None
479
+
480
+ # ... = yield from connect(...) - remove when dropping Python < 3.10
481
+
482
+ __iter__ = __await__
483
+
484
+ # async with connect(...) as ...: ...
485
+
486
+ async def __aenter__(self) -> ClientConnection:
487
+ return await self
488
+
489
+ async def __aexit__(
490
+ self,
491
+ exc_type: type[BaseException] | None,
492
+ exc_value: BaseException | None,
493
+ traceback: TracebackType | None,
494
+ ) -> None:
495
+ await self.connection.close()
496
+
497
+ # async for ... in connect(...):
498
+
499
+ async def __aiter__(self) -> AsyncIterator[ClientConnection]:
500
+ delays: Generator[float] | None = None
501
+ while True:
502
+ try:
503
+ async with self as protocol:
504
+ yield protocol
505
+ except Exception as exc:
506
+ # Determine whether the exception is retryable or fatal.
507
+ # The API of process_exception is "return an exception or None";
508
+ # "raise an exception" is also supported because it's a frequent
509
+ # mistake. It isn't documented in order to keep the API simple.
510
+ try:
511
+ new_exc = self.process_exception(exc)
512
+ except Exception as raised_exc:
513
+ new_exc = raised_exc
514
+
515
+ # The connection failed with a fatal error.
516
+ # Raise the exception and exit the loop.
517
+ if new_exc is exc:
518
+ raise
519
+ if new_exc is not None:
520
+ raise new_exc from exc
521
+
522
+ # The connection failed with a retryable error.
523
+ # Start or continue backoff and reconnect.
524
+ if delays is None:
525
+ delays = backoff()
526
+ delay = next(delays)
527
+ self.logger.info(
528
+ "connect failed; reconnecting in %.1f seconds: %s",
529
+ delay,
530
+ # Remove first argument when dropping Python 3.9.
531
+ traceback.format_exception_only(type(exc), exc)[0].strip(),
532
+ )
533
+ await asyncio.sleep(delay)
534
+ continue
535
+
536
+ else:
537
+ # The connection succeeded. Reset backoff.
538
+ delays = None
539
+
540
+
541
+ def unix_connect(
542
+ path: str | None = None,
543
+ uri: str | None = None,
544
+ **kwargs: Any,
545
+ ) -> connect:
546
+ """
547
+ Connect to a WebSocket server listening on a Unix socket.
548
+
549
+ This function accepts the same keyword arguments as :func:`connect`.
550
+
551
+ It's only available on Unix.
552
+
553
+ It's mainly useful for debugging servers listening on Unix sockets.
554
+
555
+ Args:
556
+ path: File system path to the Unix socket.
557
+ uri: URI of the WebSocket server. ``uri`` defaults to
558
+ ``ws://localhost/`` or, when a ``ssl`` argument is provided, to
559
+ ``wss://localhost/``.
560
+
561
+ """
562
+ if uri is None:
563
+ if kwargs.get("ssl") is None:
564
+ uri = "ws://localhost/"
565
+ else:
566
+ uri = "wss://localhost/"
567
+ return connect(uri=uri, unix=True, path=path, **kwargs)
.venv/lib/python3.11/site-packages/websockets/asyncio/compatibility.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+
5
+
6
+ __all__ = ["TimeoutError", "aiter", "anext", "asyncio_timeout", "asyncio_timeout_at"]
7
+
8
+
9
+ if sys.version_info[:2] >= (3, 11):
10
+ TimeoutError = TimeoutError
11
+ aiter = aiter
12
+ anext = anext
13
+ from asyncio import (
14
+ timeout as asyncio_timeout, # noqa: F401
15
+ timeout_at as asyncio_timeout_at, # noqa: F401
16
+ )
17
+
18
+ else: # Python < 3.11
19
+ from asyncio import TimeoutError
20
+
21
+ def aiter(async_iterable):
22
+ return type(async_iterable).__aiter__(async_iterable)
23
+
24
+ async def anext(async_iterator):
25
+ return await type(async_iterator).__anext__(async_iterator)
26
+
27
+ from .async_timeout import (
28
+ timeout as asyncio_timeout, # noqa: F401
29
+ timeout_at as asyncio_timeout_at, # noqa: F401
30
+ )
.venv/lib/python3.11/site-packages/websockets/asyncio/connection.py ADDED
@@ -0,0 +1,1214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import collections
5
+ import contextlib
6
+ import logging
7
+ import random
8
+ import struct
9
+ import sys
10
+ import traceback
11
+ import uuid
12
+ from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Iterable, Mapping
13
+ from types import TracebackType
14
+ from typing import Any, cast
15
+
16
+ from ..exceptions import (
17
+ ConcurrencyError,
18
+ ConnectionClosed,
19
+ ConnectionClosedOK,
20
+ ProtocolError,
21
+ )
22
+ from ..frames import DATA_OPCODES, BytesLike, CloseCode, Frame, Opcode
23
+ from ..http11 import Request, Response
24
+ from ..protocol import CLOSED, OPEN, Event, Protocol, State
25
+ from ..typing import Data, LoggerLike, Subprotocol
26
+ from .compatibility import (
27
+ TimeoutError,
28
+ aiter,
29
+ anext,
30
+ asyncio_timeout,
31
+ asyncio_timeout_at,
32
+ )
33
+ from .messages import Assembler
34
+
35
+
36
+ __all__ = ["Connection"]
37
+
38
+
39
+ class Connection(asyncio.Protocol):
40
+ """
41
+ :mod:`asyncio` implementation of a WebSocket connection.
42
+
43
+ :class:`Connection` provides APIs shared between WebSocket servers and
44
+ clients.
45
+
46
+ You shouldn't use it directly. Instead, use
47
+ :class:`~websockets.asyncio.client.ClientConnection` or
48
+ :class:`~websockets.asyncio.server.ServerConnection`.
49
+
50
+ """
51
+
52
+ def __init__(
53
+ self,
54
+ protocol: Protocol,
55
+ *,
56
+ ping_interval: float | None = 20,
57
+ ping_timeout: float | None = 20,
58
+ close_timeout: float | None = 10,
59
+ max_queue: int | None | tuple[int | None, int | None] = 16,
60
+ write_limit: int | tuple[int, int | None] = 2**15,
61
+ ) -> None:
62
+ self.protocol = protocol
63
+ self.ping_interval = ping_interval
64
+ self.ping_timeout = ping_timeout
65
+ self.close_timeout = close_timeout
66
+ if isinstance(max_queue, int) or max_queue is None:
67
+ max_queue = (max_queue, None)
68
+ self.max_queue = max_queue
69
+ if isinstance(write_limit, int):
70
+ write_limit = (write_limit, None)
71
+ self.write_limit = write_limit
72
+
73
+ # Inject reference to this instance in the protocol's logger.
74
+ self.protocol.logger = logging.LoggerAdapter(
75
+ self.protocol.logger,
76
+ {"websocket": self},
77
+ )
78
+
79
+ # Copy attributes from the protocol for convenience.
80
+ self.id: uuid.UUID = self.protocol.id
81
+ """Unique identifier of the connection. Useful in logs."""
82
+ self.logger: LoggerLike = self.protocol.logger
83
+ """Logger for this connection."""
84
+ self.debug = self.protocol.debug
85
+
86
+ # HTTP handshake request and response.
87
+ self.request: Request | None = None
88
+ """Opening handshake request."""
89
+ self.response: Response | None = None
90
+ """Opening handshake response."""
91
+
92
+ # Event loop running this connection.
93
+ self.loop = asyncio.get_running_loop()
94
+
95
+ # Assembler turning frames into messages and serializing reads.
96
+ self.recv_messages: Assembler # initialized in connection_made
97
+
98
+ # Deadline for the closing handshake.
99
+ self.close_deadline: float | None = None
100
+
101
+ # Protect sending fragmented messages.
102
+ self.fragmented_send_waiter: asyncio.Future[None] | None = None
103
+
104
+ # Mapping of ping IDs to pong waiters, in chronological order.
105
+ self.pong_waiters: dict[bytes, tuple[asyncio.Future[float], float]] = {}
106
+
107
+ self.latency: float = 0
108
+ """
109
+ Latency of the connection, in seconds.
110
+
111
+ Latency is defined as the round-trip time of the connection. It is
112
+ measured by sending a Ping frame and waiting for a matching Pong frame.
113
+ Before the first measurement, :attr:`latency` is ``0``.
114
+
115
+ By default, websockets enables a :ref:`keepalive <keepalive>` mechanism
116
+ that sends Ping frames automatically at regular intervals. You can also
117
+ send Ping frames and measure latency with :meth:`ping`.
118
+ """
119
+
120
+ # Task that sends keepalive pings. None when ping_interval is None.
121
+ self.keepalive_task: asyncio.Task[None] | None = None
122
+
123
+ # Exception raised while reading from the connection, to be chained to
124
+ # ConnectionClosed in order to show why the TCP connection dropped.
125
+ self.recv_exc: BaseException | None = None
126
+
127
+ # Completed when the TCP connection is closed and the WebSocket
128
+ # connection state becomes CLOSED.
129
+ self.connection_lost_waiter: asyncio.Future[None] = self.loop.create_future()
130
+
131
+ # Adapted from asyncio.FlowControlMixin
132
+ self.paused: bool = False
133
+ self.drain_waiters: collections.deque[asyncio.Future[None]] = (
134
+ collections.deque()
135
+ )
136
+
137
+ # Public attributes
138
+
139
+ @property
140
+ def local_address(self) -> Any:
141
+ """
142
+ Local address of the connection.
143
+
144
+ For IPv4 connections, this is a ``(host, port)`` tuple.
145
+
146
+ The format of the address depends on the address family.
147
+ See :meth:`~socket.socket.getsockname`.
148
+
149
+ """
150
+ return self.transport.get_extra_info("sockname")
151
+
152
+ @property
153
+ def remote_address(self) -> Any:
154
+ """
155
+ Remote address of the connection.
156
+
157
+ For IPv4 connections, this is a ``(host, port)`` tuple.
158
+
159
+ The format of the address depends on the address family.
160
+ See :meth:`~socket.socket.getpeername`.
161
+
162
+ """
163
+ return self.transport.get_extra_info("peername")
164
+
165
+ @property
166
+ def state(self) -> State:
167
+ """
168
+ State of the WebSocket connection, defined in :rfc:`6455`.
169
+
170
+ This attribute is provided for completeness. Typical applications
171
+ shouldn't check its value. Instead, they should call :meth:`~recv` or
172
+ :meth:`send` and handle :exc:`~websockets.exceptions.ConnectionClosed`
173
+ exceptions.
174
+
175
+ """
176
+ return self.protocol.state
177
+
178
+ @property
179
+ def subprotocol(self) -> Subprotocol | None:
180
+ """
181
+ Subprotocol negotiated during the opening handshake.
182
+
183
+ :obj:`None` if no subprotocol was negotiated.
184
+
185
+ """
186
+ return self.protocol.subprotocol
187
+
188
+ @property
189
+ def close_code(self) -> int | None:
190
+ """
191
+ State of the WebSocket connection, defined in :rfc:`6455`.
192
+
193
+ This attribute is provided for completeness. Typical applications
194
+ shouldn't check its value. Instead, they should inspect attributes
195
+ of :exc:`~websockets.exceptions.ConnectionClosed` exceptions.
196
+
197
+ """
198
+ return self.protocol.close_code
199
+
200
+ @property
201
+ def close_reason(self) -> str | None:
202
+ """
203
+ State of the WebSocket connection, defined in :rfc:`6455`.
204
+
205
+ This attribute is provided for completeness. Typical applications
206
+ shouldn't check its value. Instead, they should inspect attributes
207
+ of :exc:`~websockets.exceptions.ConnectionClosed` exceptions.
208
+
209
+ """
210
+ return self.protocol.close_reason
211
+
212
+ # Public methods
213
+
214
+ async def __aenter__(self) -> Connection:
215
+ return self
216
+
217
+ async def __aexit__(
218
+ self,
219
+ exc_type: type[BaseException] | None,
220
+ exc_value: BaseException | None,
221
+ traceback: TracebackType | None,
222
+ ) -> None:
223
+ if exc_type is None:
224
+ await self.close()
225
+ else:
226
+ await self.close(CloseCode.INTERNAL_ERROR)
227
+
228
+ async def __aiter__(self) -> AsyncIterator[Data]:
229
+ """
230
+ Iterate on incoming messages.
231
+
232
+ The iterator calls :meth:`recv` and yields messages asynchronously in an
233
+ infinite loop.
234
+
235
+ It exits when the connection is closed normally. It raises a
236
+ :exc:`~websockets.exceptions.ConnectionClosedError` exception after a
237
+ protocol error or a network failure.
238
+
239
+ """
240
+ try:
241
+ while True:
242
+ yield await self.recv()
243
+ except ConnectionClosedOK:
244
+ return
245
+
246
+ async def recv(self, decode: bool | None = None) -> Data:
247
+ """
248
+ Receive the next message.
249
+
250
+ When the connection is closed, :meth:`recv` raises
251
+ :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it raises
252
+ :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal closure
253
+ and :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
254
+ error or a network failure. This is how you detect the end of the
255
+ message stream.
256
+
257
+ Canceling :meth:`recv` is safe. There's no risk of losing data. The next
258
+ invocation of :meth:`recv` will return the next message.
259
+
260
+ This makes it possible to enforce a timeout by wrapping :meth:`recv` in
261
+ :func:`~asyncio.timeout` or :func:`~asyncio.wait_for`.
262
+
263
+ When the message is fragmented, :meth:`recv` waits until all fragments
264
+ are received, reassembles them, and returns the whole message.
265
+
266
+ Args:
267
+ decode: Set this flag to override the default behavior of returning
268
+ :class:`str` or :class:`bytes`. See below for details.
269
+
270
+ Returns:
271
+ A string (:class:`str`) for a Text_ frame or a bytestring
272
+ (:class:`bytes`) for a Binary_ frame.
273
+
274
+ .. _Text: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
275
+ .. _Binary: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
276
+
277
+ You may override this behavior with the ``decode`` argument:
278
+
279
+ * Set ``decode=False`` to disable UTF-8 decoding of Text_ frames and
280
+ return a bytestring (:class:`bytes`). This improves performance
281
+ when decoding isn't needed, for example if the message contains
282
+ JSON and you're using a JSON library that expects a bytestring.
283
+ * Set ``decode=True`` to force UTF-8 decoding of Binary_ frames
284
+ and return a string (:class:`str`). This may be useful for
285
+ servers that send binary frames instead of text frames.
286
+
287
+ Raises:
288
+ ConnectionClosed: When the connection is closed.
289
+ ConcurrencyError: If two coroutines call :meth:`recv` or
290
+ :meth:`recv_streaming` concurrently.
291
+
292
+ """
293
+ try:
294
+ return await self.recv_messages.get(decode)
295
+ except EOFError:
296
+ pass
297
+ # fallthrough
298
+ except ConcurrencyError:
299
+ raise ConcurrencyError(
300
+ "cannot call recv while another coroutine "
301
+ "is already running recv or recv_streaming"
302
+ ) from None
303
+ except UnicodeDecodeError as exc:
304
+ async with self.send_context():
305
+ self.protocol.fail(
306
+ CloseCode.INVALID_DATA,
307
+ f"{exc.reason} at position {exc.start}",
308
+ )
309
+ # fallthrough
310
+
311
+ # Wait for the protocol state to be CLOSED before accessing close_exc.
312
+ await asyncio.shield(self.connection_lost_waiter)
313
+ raise self.protocol.close_exc from self.recv_exc
314
+
315
+ async def recv_streaming(self, decode: bool | None = None) -> AsyncIterator[Data]:
316
+ """
317
+ Receive the next message frame by frame.
318
+
319
+ This method is designed for receiving fragmented messages. It returns an
320
+ asynchronous iterator that yields each fragment as it is received. This
321
+ iterator must be fully consumed. Else, future calls to :meth:`recv` or
322
+ :meth:`recv_streaming` will raise
323
+ :exc:`~websockets.exceptions.ConcurrencyError`, making the connection
324
+ unusable.
325
+
326
+ :meth:`recv_streaming` raises the same exceptions as :meth:`recv`.
327
+
328
+ Canceling :meth:`recv_streaming` before receiving the first frame is
329
+ safe. Canceling it after receiving one or more frames leaves the
330
+ iterator in a partially consumed state, making the connection unusable.
331
+ Instead, you should close the connection with :meth:`close`.
332
+
333
+ Args:
334
+ decode: Set this flag to override the default behavior of returning
335
+ :class:`str` or :class:`bytes`. See below for details.
336
+
337
+ Returns:
338
+ An iterator of strings (:class:`str`) for a Text_ frame or
339
+ bytestrings (:class:`bytes`) for a Binary_ frame.
340
+
341
+ .. _Text: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
342
+ .. _Binary: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
343
+
344
+ You may override this behavior with the ``decode`` argument:
345
+
346
+ * Set ``decode=False`` to disable UTF-8 decoding of Text_ frames
347
+ and return bytestrings (:class:`bytes`). This may be useful to
348
+ optimize performance when decoding isn't needed.
349
+ * Set ``decode=True`` to force UTF-8 decoding of Binary_ frames
350
+ and return strings (:class:`str`). This is useful for servers
351
+ that send binary frames instead of text frames.
352
+
353
+ Raises:
354
+ ConnectionClosed: When the connection is closed.
355
+ ConcurrencyError: If two coroutines call :meth:`recv` or
356
+ :meth:`recv_streaming` concurrently.
357
+
358
+ """
359
+ try:
360
+ async for frame in self.recv_messages.get_iter(decode):
361
+ yield frame
362
+ return
363
+ except EOFError:
364
+ pass
365
+ # fallthrough
366
+ except ConcurrencyError:
367
+ raise ConcurrencyError(
368
+ "cannot call recv_streaming while another coroutine "
369
+ "is already running recv or recv_streaming"
370
+ ) from None
371
+ except UnicodeDecodeError as exc:
372
+ async with self.send_context():
373
+ self.protocol.fail(
374
+ CloseCode.INVALID_DATA,
375
+ f"{exc.reason} at position {exc.start}",
376
+ )
377
+ # fallthrough
378
+
379
+ # Wait for the protocol state to be CLOSED before accessing close_exc.
380
+ await asyncio.shield(self.connection_lost_waiter)
381
+ raise self.protocol.close_exc from self.recv_exc
382
+
383
+ async def send(
384
+ self,
385
+ message: Data | Iterable[Data] | AsyncIterable[Data],
386
+ text: bool | None = None,
387
+ ) -> None:
388
+ """
389
+ Send a message.
390
+
391
+ A string (:class:`str`) is sent as a Text_ frame. A bytestring or
392
+ bytes-like object (:class:`bytes`, :class:`bytearray`, or
393
+ :class:`memoryview`) is sent as a Binary_ frame.
394
+
395
+ .. _Text: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
396
+ .. _Binary: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
397
+
398
+ You may override this behavior with the ``text`` argument:
399
+
400
+ * Set ``text=True`` to send a bytestring or bytes-like object
401
+ (:class:`bytes`, :class:`bytearray`, or :class:`memoryview`) as a
402
+ Text_ frame. This improves performance when the message is already
403
+ UTF-8 encoded, for example if the message contains JSON and you're
404
+ using a JSON library that produces a bytestring.
405
+ * Set ``text=False`` to send a string (:class:`str`) in a Binary_
406
+ frame. This may be useful for servers that expect binary frames
407
+ instead of text frames.
408
+
409
+ :meth:`send` also accepts an iterable or an asynchronous iterable of
410
+ strings, bytestrings, or bytes-like objects to enable fragmentation_.
411
+ Each item is treated as a message fragment and sent in its own frame.
412
+ All items must be of the same type, or else :meth:`send` will raise a
413
+ :exc:`TypeError` and the connection will be closed.
414
+
415
+ .. _fragmentation: https://datatracker.ietf.org/doc/html/rfc6455#section-5.4
416
+
417
+ :meth:`send` rejects dict-like objects because this is often an error.
418
+ (If you really want to send the keys of a dict-like object as fragments,
419
+ call its :meth:`~dict.keys` method and pass the result to :meth:`send`.)
420
+
421
+ Canceling :meth:`send` is discouraged. Instead, you should close the
422
+ connection with :meth:`close`. Indeed, there are only two situations
423
+ where :meth:`send` may yield control to the event loop and then get
424
+ canceled; in both cases, :meth:`close` has the same effect and is
425
+ more clear:
426
+
427
+ 1. The write buffer is full. If you don't want to wait until enough
428
+ data is sent, your only alternative is to close the connection.
429
+ :meth:`close` will likely time out then abort the TCP connection.
430
+ 2. ``message`` is an asynchronous iterator that yields control.
431
+ Stopping in the middle of a fragmented message will cause a
432
+ protocol error and the connection will be closed.
433
+
434
+ When the connection is closed, :meth:`send` raises
435
+ :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it
436
+ raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal
437
+ connection closure and
438
+ :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
439
+ error or a network failure.
440
+
441
+ Args:
442
+ message: Message to send.
443
+
444
+ Raises:
445
+ ConnectionClosed: When the connection is closed.
446
+ TypeError: If ``message`` doesn't have a supported type.
447
+
448
+ """
449
+ # While sending a fragmented message, prevent sending other messages
450
+ # until all fragments are sent.
451
+ while self.fragmented_send_waiter is not None:
452
+ await asyncio.shield(self.fragmented_send_waiter)
453
+
454
+ # Unfragmented message -- this case must be handled first because
455
+ # strings and bytes-like objects are iterable.
456
+
457
+ if isinstance(message, str):
458
+ async with self.send_context():
459
+ if text is False:
460
+ self.protocol.send_binary(message.encode())
461
+ else:
462
+ self.protocol.send_text(message.encode())
463
+
464
+ elif isinstance(message, BytesLike):
465
+ async with self.send_context():
466
+ if text is True:
467
+ self.protocol.send_text(message)
468
+ else:
469
+ self.protocol.send_binary(message)
470
+
471
+ # Catch a common mistake -- passing a dict to send().
472
+
473
+ elif isinstance(message, Mapping):
474
+ raise TypeError("data is a dict-like object")
475
+
476
+ # Fragmented message -- regular iterator.
477
+
478
+ elif isinstance(message, Iterable):
479
+ chunks = iter(message)
480
+ try:
481
+ chunk = next(chunks)
482
+ except StopIteration:
483
+ return
484
+
485
+ assert self.fragmented_send_waiter is None
486
+ self.fragmented_send_waiter = self.loop.create_future()
487
+ try:
488
+ # First fragment.
489
+ if isinstance(chunk, str):
490
+ async with self.send_context():
491
+ if text is False:
492
+ self.protocol.send_binary(chunk.encode(), fin=False)
493
+ else:
494
+ self.protocol.send_text(chunk.encode(), fin=False)
495
+ encode = True
496
+ elif isinstance(chunk, BytesLike):
497
+ async with self.send_context():
498
+ if text is True:
499
+ self.protocol.send_text(chunk, fin=False)
500
+ else:
501
+ self.protocol.send_binary(chunk, fin=False)
502
+ encode = False
503
+ else:
504
+ raise TypeError("iterable must contain bytes or str")
505
+
506
+ # Other fragments
507
+ for chunk in chunks:
508
+ if isinstance(chunk, str) and encode:
509
+ async with self.send_context():
510
+ self.protocol.send_continuation(chunk.encode(), fin=False)
511
+ elif isinstance(chunk, BytesLike) and not encode:
512
+ async with self.send_context():
513
+ self.protocol.send_continuation(chunk, fin=False)
514
+ else:
515
+ raise TypeError("iterable must contain uniform types")
516
+
517
+ # Final fragment.
518
+ async with self.send_context():
519
+ self.protocol.send_continuation(b"", fin=True)
520
+
521
+ except Exception:
522
+ # We're half-way through a fragmented message and we can't
523
+ # complete it. This makes the connection unusable.
524
+ async with self.send_context():
525
+ self.protocol.fail(
526
+ CloseCode.INTERNAL_ERROR,
527
+ "error in fragmented message",
528
+ )
529
+ raise
530
+
531
+ finally:
532
+ self.fragmented_send_waiter.set_result(None)
533
+ self.fragmented_send_waiter = None
534
+
535
+ # Fragmented message -- async iterator.
536
+
537
+ elif isinstance(message, AsyncIterable):
538
+ achunks = aiter(message)
539
+ try:
540
+ chunk = await anext(achunks)
541
+ except StopAsyncIteration:
542
+ return
543
+
544
+ assert self.fragmented_send_waiter is None
545
+ self.fragmented_send_waiter = self.loop.create_future()
546
+ try:
547
+ # First fragment.
548
+ if isinstance(chunk, str):
549
+ if text is False:
550
+ async with self.send_context():
551
+ self.protocol.send_binary(chunk.encode(), fin=False)
552
+ else:
553
+ async with self.send_context():
554
+ self.protocol.send_text(chunk.encode(), fin=False)
555
+ encode = True
556
+ elif isinstance(chunk, BytesLike):
557
+ if text is True:
558
+ async with self.send_context():
559
+ self.protocol.send_text(chunk, fin=False)
560
+ else:
561
+ async with self.send_context():
562
+ self.protocol.send_binary(chunk, fin=False)
563
+ encode = False
564
+ else:
565
+ raise TypeError("async iterable must contain bytes or str")
566
+
567
+ # Other fragments
568
+ async for chunk in achunks:
569
+ if isinstance(chunk, str) and encode:
570
+ async with self.send_context():
571
+ self.protocol.send_continuation(chunk.encode(), fin=False)
572
+ elif isinstance(chunk, BytesLike) and not encode:
573
+ async with self.send_context():
574
+ self.protocol.send_continuation(chunk, fin=False)
575
+ else:
576
+ raise TypeError("async iterable must contain uniform types")
577
+
578
+ # Final fragment.
579
+ async with self.send_context():
580
+ self.protocol.send_continuation(b"", fin=True)
581
+
582
+ except Exception:
583
+ # We're half-way through a fragmented message and we can't
584
+ # complete it. This makes the connection unusable.
585
+ async with self.send_context():
586
+ self.protocol.fail(
587
+ CloseCode.INTERNAL_ERROR,
588
+ "error in fragmented message",
589
+ )
590
+ raise
591
+
592
+ finally:
593
+ self.fragmented_send_waiter.set_result(None)
594
+ self.fragmented_send_waiter = None
595
+
596
+ else:
597
+ raise TypeError("data must be str, bytes, iterable, or async iterable")
598
+
599
+ async def close(self, code: int = 1000, reason: str = "") -> None:
600
+ """
601
+ Perform the closing handshake.
602
+
603
+ :meth:`close` waits for the other end to complete the handshake and
604
+ for the TCP connection to terminate.
605
+
606
+ :meth:`close` is idempotent: it doesn't do anything once the
607
+ connection is closed.
608
+
609
+ Args:
610
+ code: WebSocket close code.
611
+ reason: WebSocket close reason.
612
+
613
+ """
614
+ try:
615
+ # The context manager takes care of waiting for the TCP connection
616
+ # to terminate after calling a method that sends a close frame.
617
+ async with self.send_context():
618
+ if self.fragmented_send_waiter is not None:
619
+ self.protocol.fail(
620
+ CloseCode.INTERNAL_ERROR,
621
+ "close during fragmented message",
622
+ )
623
+ else:
624
+ self.protocol.send_close(code, reason)
625
+ except ConnectionClosed:
626
+ # Ignore ConnectionClosed exceptions raised from send_context().
627
+ # They mean that the connection is closed, which was the goal.
628
+ pass
629
+
630
+ async def wait_closed(self) -> None:
631
+ """
632
+ Wait until the connection is closed.
633
+
634
+ :meth:`wait_closed` waits for the closing handshake to complete and for
635
+ the TCP connection to terminate.
636
+
637
+ """
638
+ await asyncio.shield(self.connection_lost_waiter)
639
+
640
+ async def ping(self, data: Data | None = None) -> Awaitable[float]:
641
+ """
642
+ Send a Ping_.
643
+
644
+ .. _Ping: https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.2
645
+
646
+ A ping may serve as a keepalive or as a check that the remote endpoint
647
+ received all messages up to this point
648
+
649
+ Args:
650
+ data: Payload of the ping. A :class:`str` will be encoded to UTF-8.
651
+ If ``data`` is :obj:`None`, the payload is four random bytes.
652
+
653
+ Returns:
654
+ A future that will be completed when the corresponding pong is
655
+ received. You can ignore it if you don't intend to wait. The result
656
+ of the future is the latency of the connection in seconds.
657
+
658
+ ::
659
+
660
+ pong_waiter = await ws.ping()
661
+ # only if you want to wait for the corresponding pong
662
+ latency = await pong_waiter
663
+
664
+ Raises:
665
+ ConnectionClosed: When the connection is closed.
666
+ ConcurrencyError: If another ping was sent with the same data and
667
+ the corresponding pong wasn't received yet.
668
+
669
+ """
670
+ if isinstance(data, BytesLike):
671
+ data = bytes(data)
672
+ elif isinstance(data, str):
673
+ data = data.encode()
674
+ elif data is not None:
675
+ raise TypeError("data must be str or bytes-like")
676
+
677
+ async with self.send_context():
678
+ # Protect against duplicates if a payload is explicitly set.
679
+ if data in self.pong_waiters:
680
+ raise ConcurrencyError("already waiting for a pong with the same data")
681
+
682
+ # Generate a unique random payload otherwise.
683
+ while data is None or data in self.pong_waiters:
684
+ data = struct.pack("!I", random.getrandbits(32))
685
+
686
+ pong_waiter = self.loop.create_future()
687
+ # The event loop's default clock is time.monotonic(). Its resolution
688
+ # is a bit low on Windows (~16ms). This is improved in Python 3.13.
689
+ ping_timestamp = self.loop.time()
690
+ self.pong_waiters[data] = (pong_waiter, ping_timestamp)
691
+ self.protocol.send_ping(data)
692
+ return pong_waiter
693
+
694
+ async def pong(self, data: Data = b"") -> None:
695
+ """
696
+ Send a Pong_.
697
+
698
+ .. _Pong: https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.3
699
+
700
+ An unsolicited pong may serve as a unidirectional heartbeat.
701
+
702
+ Args:
703
+ data: Payload of the pong. A :class:`str` will be encoded to UTF-8.
704
+
705
+ Raises:
706
+ ConnectionClosed: When the connection is closed.
707
+
708
+ """
709
+ if isinstance(data, BytesLike):
710
+ data = bytes(data)
711
+ elif isinstance(data, str):
712
+ data = data.encode()
713
+ else:
714
+ raise TypeError("data must be str or bytes-like")
715
+
716
+ async with self.send_context():
717
+ self.protocol.send_pong(data)
718
+
719
+ # Private methods
720
+
721
+ def process_event(self, event: Event) -> None:
722
+ """
723
+ Process one incoming event.
724
+
725
+ This method is overridden in subclasses to handle the handshake.
726
+
727
+ """
728
+ assert isinstance(event, Frame)
729
+ if event.opcode in DATA_OPCODES:
730
+ self.recv_messages.put(event)
731
+
732
+ if event.opcode is Opcode.PONG:
733
+ self.acknowledge_pings(bytes(event.data))
734
+
735
+ def acknowledge_pings(self, data: bytes) -> None:
736
+ """
737
+ Acknowledge pings when receiving a pong.
738
+
739
+ """
740
+ # Ignore unsolicited pong.
741
+ if data not in self.pong_waiters:
742
+ return
743
+
744
+ pong_timestamp = self.loop.time()
745
+
746
+ # Sending a pong for only the most recent ping is legal.
747
+ # Acknowledge all previous pings too in that case.
748
+ ping_id = None
749
+ ping_ids = []
750
+ for ping_id, (pong_waiter, ping_timestamp) in self.pong_waiters.items():
751
+ ping_ids.append(ping_id)
752
+ latency = pong_timestamp - ping_timestamp
753
+ if not pong_waiter.done():
754
+ pong_waiter.set_result(latency)
755
+ if ping_id == data:
756
+ self.latency = latency
757
+ break
758
+ else:
759
+ raise AssertionError("solicited pong not found in pings")
760
+
761
+ # Remove acknowledged pings from self.pong_waiters.
762
+ for ping_id in ping_ids:
763
+ del self.pong_waiters[ping_id]
764
+
765
+ def abort_pings(self) -> None:
766
+ """
767
+ Raise ConnectionClosed in pending pings.
768
+
769
+ They'll never receive a pong once the connection is closed.
770
+
771
+ """
772
+ assert self.protocol.state is CLOSED
773
+ exc = self.protocol.close_exc
774
+
775
+ for pong_waiter, _ping_timestamp in self.pong_waiters.values():
776
+ if not pong_waiter.done():
777
+ pong_waiter.set_exception(exc)
778
+ # If the exception is never retrieved, it will be logged when ping
779
+ # is garbage-collected. This is confusing for users.
780
+ # Given that ping is done (with an exception), canceling it does
781
+ # nothing, but it prevents logging the exception.
782
+ pong_waiter.cancel()
783
+
784
+ self.pong_waiters.clear()
785
+
786
+ async def keepalive(self) -> None:
787
+ """
788
+ Send a Ping frame and wait for a Pong frame at regular intervals.
789
+
790
+ """
791
+ assert self.ping_interval is not None
792
+ latency = 0.0
793
+ try:
794
+ while True:
795
+ # If self.ping_timeout > latency > self.ping_interval, pings
796
+ # will be sent immediately after receiving pongs. The period
797
+ # will be longer than self.ping_interval.
798
+ await asyncio.sleep(self.ping_interval - latency)
799
+
800
+ self.logger.debug("% sending keepalive ping")
801
+ pong_waiter = await self.ping()
802
+
803
+ if self.ping_timeout is not None:
804
+ try:
805
+ async with asyncio_timeout(self.ping_timeout):
806
+ # connection_lost cancels keepalive immediately
807
+ # after setting a ConnectionClosed exception on
808
+ # pong_waiter. A CancelledError is raised here,
809
+ # not a ConnectionClosed exception.
810
+ latency = await pong_waiter
811
+ self.logger.debug("% received keepalive pong")
812
+ except asyncio.TimeoutError:
813
+ if self.debug:
814
+ self.logger.debug("- timed out waiting for keepalive pong")
815
+ async with self.send_context():
816
+ self.protocol.fail(
817
+ CloseCode.INTERNAL_ERROR,
818
+ "keepalive ping timeout",
819
+ )
820
+ raise AssertionError(
821
+ "send_context() should wait for connection_lost(), "
822
+ "which cancels keepalive()"
823
+ )
824
+ except Exception:
825
+ self.logger.error("keepalive ping failed", exc_info=True)
826
+
827
+ def start_keepalive(self) -> None:
828
+ """
829
+ Run :meth:`keepalive` in a task, unless keepalive is disabled.
830
+
831
+ """
832
+ if self.ping_interval is not None:
833
+ self.keepalive_task = self.loop.create_task(self.keepalive())
834
+
835
+ @contextlib.asynccontextmanager
836
+ async def send_context(
837
+ self,
838
+ *,
839
+ expected_state: State = OPEN, # CONNECTING during the opening handshake
840
+ ) -> AsyncIterator[None]:
841
+ """
842
+ Create a context for writing to the connection from user code.
843
+
844
+ On entry, :meth:`send_context` checks that the connection is open; on
845
+ exit, it writes outgoing data to the socket::
846
+
847
+ async with self.send_context():
848
+ self.protocol.send_text(message.encode())
849
+
850
+ When the connection isn't open on entry, when the connection is expected
851
+ to close on exit, or when an unexpected error happens, terminating the
852
+ connection, :meth:`send_context` waits until the connection is closed
853
+ then raises :exc:`~websockets.exceptions.ConnectionClosed`.
854
+
855
+ """
856
+ # Should we wait until the connection is closed?
857
+ wait_for_close = False
858
+ # Should we close the transport and raise ConnectionClosed?
859
+ raise_close_exc = False
860
+ # What exception should we chain ConnectionClosed to?
861
+ original_exc: BaseException | None = None
862
+
863
+ if self.protocol.state is expected_state:
864
+ # Let the caller interact with the protocol.
865
+ try:
866
+ yield
867
+ except (ProtocolError, ConcurrencyError):
868
+ # The protocol state wasn't changed. Exit immediately.
869
+ raise
870
+ except Exception as exc:
871
+ self.logger.error("unexpected internal error", exc_info=True)
872
+ # This branch should never run. It's a safety net in case of
873
+ # bugs. Since we don't know what happened, we will close the
874
+ # connection and raise the exception to the caller.
875
+ wait_for_close = False
876
+ raise_close_exc = True
877
+ original_exc = exc
878
+ else:
879
+ # Check if the connection is expected to close soon.
880
+ if self.protocol.close_expected():
881
+ wait_for_close = True
882
+ # If the connection is expected to close soon, set the
883
+ # close deadline based on the close timeout.
884
+ # Since we tested earlier that protocol.state was OPEN
885
+ # (or CONNECTING), self.close_deadline is still None.
886
+ if self.close_timeout is not None:
887
+ assert self.close_deadline is None
888
+ self.close_deadline = self.loop.time() + self.close_timeout
889
+ # Write outgoing data to the socket and enforce flow control.
890
+ try:
891
+ self.send_data()
892
+ await self.drain()
893
+ except Exception as exc:
894
+ if self.debug:
895
+ self.logger.debug("! error while sending data", exc_info=True)
896
+ # While the only expected exception here is OSError,
897
+ # other exceptions would be treated identically.
898
+ wait_for_close = False
899
+ raise_close_exc = True
900
+ original_exc = exc
901
+
902
+ else: # self.protocol.state is not expected_state
903
+ # Minor layering violation: we assume that the connection
904
+ # will be closing soon if it isn't in the expected state.
905
+ wait_for_close = True
906
+ # Calculate close_deadline if it wasn't set yet.
907
+ if self.close_timeout is not None:
908
+ if self.close_deadline is None:
909
+ self.close_deadline = self.loop.time() + self.close_timeout
910
+ raise_close_exc = True
911
+
912
+ # If the connection is expected to close soon and the close timeout
913
+ # elapses, close the socket to terminate the connection.
914
+ if wait_for_close:
915
+ try:
916
+ async with asyncio_timeout_at(self.close_deadline):
917
+ await asyncio.shield(self.connection_lost_waiter)
918
+ except TimeoutError:
919
+ # There's no risk to overwrite another error because
920
+ # original_exc is never set when wait_for_close is True.
921
+ assert original_exc is None
922
+ original_exc = TimeoutError("timed out while closing connection")
923
+ # Set recv_exc before closing the transport in order to get
924
+ # proper exception reporting.
925
+ raise_close_exc = True
926
+ self.set_recv_exc(original_exc)
927
+
928
+ # If an error occurred, close the transport to terminate the connection and
929
+ # raise an exception.
930
+ if raise_close_exc:
931
+ self.transport.abort()
932
+ # Wait for the protocol state to be CLOSED before accessing close_exc.
933
+ await asyncio.shield(self.connection_lost_waiter)
934
+ raise self.protocol.close_exc from original_exc
935
+
936
+ def send_data(self) -> None:
937
+ """
938
+ Send outgoing data.
939
+
940
+ Raises:
941
+ OSError: When a socket operations fails.
942
+
943
+ """
944
+ for data in self.protocol.data_to_send():
945
+ if data:
946
+ self.transport.write(data)
947
+ else:
948
+ # Half-close the TCP connection when possible i.e. no TLS.
949
+ if self.transport.can_write_eof():
950
+ if self.debug:
951
+ self.logger.debug("x half-closing TCP connection")
952
+ # write_eof() doesn't document which exceptions it raises.
953
+ # OSError is plausible. uvloop can raise RuntimeError here.
954
+ try:
955
+ self.transport.write_eof()
956
+ except (OSError, RuntimeError): # pragma: no cover
957
+ pass
958
+ # Else, close the TCP connection.
959
+ else: # pragma: no cover
960
+ if self.debug:
961
+ self.logger.debug("x closing TCP connection")
962
+ self.transport.close()
963
+
964
+ def set_recv_exc(self, exc: BaseException | None) -> None:
965
+ """
966
+ Set recv_exc, if not set yet.
967
+
968
+ """
969
+ if self.recv_exc is None:
970
+ self.recv_exc = exc
971
+
972
+ # asyncio.Protocol methods
973
+
974
+ # Connection callbacks
975
+
976
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
977
+ transport = cast(asyncio.Transport, transport)
978
+ self.recv_messages = Assembler(
979
+ *self.max_queue,
980
+ pause=transport.pause_reading,
981
+ resume=transport.resume_reading,
982
+ )
983
+ transport.set_write_buffer_limits(*self.write_limit)
984
+ self.transport = transport
985
+
986
+ def connection_lost(self, exc: Exception | None) -> None:
987
+ # Calling protocol.receive_eof() is safe because it's idempotent.
988
+ # This guarantees that the protocol state becomes CLOSED.
989
+ self.protocol.receive_eof()
990
+ assert self.protocol.state is CLOSED
991
+
992
+ self.set_recv_exc(exc)
993
+
994
+ # Abort recv() and pending pings with a ConnectionClosed exception.
995
+ self.recv_messages.close()
996
+ self.abort_pings()
997
+
998
+ if self.keepalive_task is not None:
999
+ self.keepalive_task.cancel()
1000
+
1001
+ # If self.connection_lost_waiter isn't pending, that's a bug, because:
1002
+ # - it's set only here in connection_lost() which is called only once;
1003
+ # - it must never be canceled.
1004
+ self.connection_lost_waiter.set_result(None)
1005
+
1006
+ # Adapted from asyncio.streams.FlowControlMixin
1007
+ if self.paused: # pragma: no cover
1008
+ self.paused = False
1009
+ for waiter in self.drain_waiters:
1010
+ if not waiter.done():
1011
+ if exc is None:
1012
+ waiter.set_result(None)
1013
+ else:
1014
+ waiter.set_exception(exc)
1015
+
1016
+ # Flow control callbacks
1017
+
1018
+ def pause_writing(self) -> None: # pragma: no cover
1019
+ # Adapted from asyncio.streams.FlowControlMixin
1020
+ assert not self.paused
1021
+ self.paused = True
1022
+
1023
+ def resume_writing(self) -> None: # pragma: no cover
1024
+ # Adapted from asyncio.streams.FlowControlMixin
1025
+ assert self.paused
1026
+ self.paused = False
1027
+ for waiter in self.drain_waiters:
1028
+ if not waiter.done():
1029
+ waiter.set_result(None)
1030
+
1031
+ async def drain(self) -> None: # pragma: no cover
1032
+ # We don't check if the connection is closed because we call drain()
1033
+ # immediately after write() and write() would fail in that case.
1034
+
1035
+ # Adapted from asyncio.streams.StreamWriter
1036
+ # Yield to the event loop so that connection_lost() may be called.
1037
+ if self.transport.is_closing():
1038
+ await asyncio.sleep(0)
1039
+
1040
+ # Adapted from asyncio.streams.FlowControlMixin
1041
+ if self.paused:
1042
+ waiter = self.loop.create_future()
1043
+ self.drain_waiters.append(waiter)
1044
+ try:
1045
+ await waiter
1046
+ finally:
1047
+ self.drain_waiters.remove(waiter)
1048
+
1049
+ # Streaming protocol callbacks
1050
+
1051
+ def data_received(self, data: bytes) -> None:
1052
+ # Feed incoming data to the protocol.
1053
+ self.protocol.receive_data(data)
1054
+
1055
+ # This isn't expected to raise an exception.
1056
+ events = self.protocol.events_received()
1057
+
1058
+ # Write outgoing data to the transport.
1059
+ try:
1060
+ self.send_data()
1061
+ except Exception as exc:
1062
+ if self.debug:
1063
+ self.logger.debug("! error while sending data", exc_info=True)
1064
+ self.set_recv_exc(exc)
1065
+
1066
+ if self.protocol.close_expected():
1067
+ # If the connection is expected to close soon, set the
1068
+ # close deadline based on the close timeout.
1069
+ if self.close_timeout is not None:
1070
+ if self.close_deadline is None:
1071
+ self.close_deadline = self.loop.time() + self.close_timeout
1072
+
1073
+ for event in events:
1074
+ # This isn't expected to raise an exception.
1075
+ self.process_event(event)
1076
+
1077
+ def eof_received(self) -> None:
1078
+ # Feed the end of the data stream to the connection.
1079
+ self.protocol.receive_eof()
1080
+
1081
+ # This isn't expected to raise an exception.
1082
+ events = self.protocol.events_received()
1083
+
1084
+ # There is no error handling because send_data() can only write
1085
+ # the end of the data stream here and it shouldn't raise errors.
1086
+ self.send_data()
1087
+
1088
+ # This code path is triggered when receiving an HTTP response
1089
+ # without a Content-Length header. This is the only case where
1090
+ # reading until EOF generates an event; all other events have
1091
+ # a known length. Ignore for coverage measurement because tests
1092
+ # are in test_client.py rather than test_connection.py.
1093
+ for event in events: # pragma: no cover
1094
+ # This isn't expected to raise an exception.
1095
+ self.process_event(event)
1096
+
1097
+ # The WebSocket protocol has its own closing handshake: endpoints close
1098
+ # the TCP or TLS connection after sending and receiving a close frame.
1099
+ # As a consequence, they never need to write after receiving EOF, so
1100
+ # there's no reason to keep the transport open by returning True.
1101
+ # Besides, that doesn't work on TLS connections.
1102
+
1103
+
1104
+ # broadcast() is defined in the connection module even though it's primarily
1105
+ # used by servers and documented in the server module because it works with
1106
+ # client connections too and because it's easier to test together with the
1107
+ # Connection class.
1108
+
1109
+
1110
+ def broadcast(
1111
+ connections: Iterable[Connection],
1112
+ message: Data,
1113
+ raise_exceptions: bool = False,
1114
+ ) -> None:
1115
+ """
1116
+ Broadcast a message to several WebSocket connections.
1117
+
1118
+ A string (:class:`str`) is sent as a Text_ frame. A bytestring or bytes-like
1119
+ object (:class:`bytes`, :class:`bytearray`, or :class:`memoryview`) is sent
1120
+ as a Binary_ frame.
1121
+
1122
+ .. _Text: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
1123
+ .. _Binary: https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
1124
+
1125
+ :func:`broadcast` pushes the message synchronously to all connections even
1126
+ if their write buffers are overflowing. There's no backpressure.
1127
+
1128
+ If you broadcast messages faster than a connection can handle them, messages
1129
+ will pile up in its write buffer until the connection times out. Keep
1130
+ ``ping_interval`` and ``ping_timeout`` low to prevent excessive memory usage
1131
+ from slow connections.
1132
+
1133
+ Unlike :meth:`~websockets.asyncio.connection.Connection.send`,
1134
+ :func:`broadcast` doesn't support sending fragmented messages. Indeed,
1135
+ fragmentation is useful for sending large messages without buffering them in
1136
+ memory, while :func:`broadcast` buffers one copy per connection as fast as
1137
+ possible.
1138
+
1139
+ :func:`broadcast` skips connections that aren't open in order to avoid
1140
+ errors on connections where the closing handshake is in progress.
1141
+
1142
+ :func:`broadcast` ignores failures to write the message on some connections.
1143
+ It continues writing to other connections. On Python 3.11 and above, you may
1144
+ set ``raise_exceptions`` to :obj:`True` to record failures and raise all
1145
+ exceptions in a :pep:`654` :exc:`ExceptionGroup`.
1146
+
1147
+ While :func:`broadcast` makes more sense for servers, it works identically
1148
+ with clients, if you have a use case for opening connections to many servers
1149
+ and broadcasting a message to them.
1150
+
1151
+ Args:
1152
+ websockets: WebSocket connections to which the message will be sent.
1153
+ message: Message to send.
1154
+ raise_exceptions: Whether to raise an exception in case of failures.
1155
+
1156
+ Raises:
1157
+ TypeError: If ``message`` doesn't have a supported type.
1158
+
1159
+ """
1160
+ if isinstance(message, str):
1161
+ send_method = "send_text"
1162
+ message = message.encode()
1163
+ elif isinstance(message, BytesLike):
1164
+ send_method = "send_binary"
1165
+ else:
1166
+ raise TypeError("data must be str or bytes")
1167
+
1168
+ if raise_exceptions:
1169
+ if sys.version_info[:2] < (3, 11): # pragma: no cover
1170
+ raise ValueError("raise_exceptions requires at least Python 3.11")
1171
+ exceptions: list[Exception] = []
1172
+
1173
+ for connection in connections:
1174
+ exception: Exception
1175
+
1176
+ if connection.protocol.state is not OPEN:
1177
+ continue
1178
+
1179
+ if connection.fragmented_send_waiter is not None:
1180
+ if raise_exceptions:
1181
+ exception = ConcurrencyError("sending a fragmented message")
1182
+ exceptions.append(exception)
1183
+ else:
1184
+ connection.logger.warning(
1185
+ "skipped broadcast: sending a fragmented message",
1186
+ )
1187
+ continue
1188
+
1189
+ try:
1190
+ # Call connection.protocol.send_text or send_binary.
1191
+ # Either way, message is already converted to bytes.
1192
+ getattr(connection.protocol, send_method)(message)
1193
+ connection.send_data()
1194
+ except Exception as write_exception:
1195
+ if raise_exceptions:
1196
+ exception = RuntimeError("failed to write message")
1197
+ exception.__cause__ = write_exception
1198
+ exceptions.append(exception)
1199
+ else:
1200
+ connection.logger.warning(
1201
+ "skipped broadcast: failed to write message: %s",
1202
+ traceback.format_exception_only(
1203
+ # Remove first argument when dropping Python 3.9.
1204
+ type(write_exception),
1205
+ write_exception,
1206
+ )[0].strip(),
1207
+ )
1208
+
1209
+ if raise_exceptions and exceptions:
1210
+ raise ExceptionGroup("skipped broadcast", exceptions)
1211
+
1212
+
1213
+ # Pretend that broadcast is actually defined in the server module.
1214
+ broadcast.__module__ = "websockets.asyncio.server"
.venv/lib/python3.11/site-packages/websockets/asyncio/messages.py ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import codecs
5
+ import collections
6
+ from collections.abc import AsyncIterator, Iterable
7
+ from typing import Any, Callable, Generic, TypeVar
8
+
9
+ from ..exceptions import ConcurrencyError
10
+ from ..frames import OP_BINARY, OP_CONT, OP_TEXT, Frame
11
+ from ..typing import Data
12
+
13
+
14
+ __all__ = ["Assembler"]
15
+
16
+ UTF8Decoder = codecs.getincrementaldecoder("utf-8")
17
+
18
+ T = TypeVar("T")
19
+
20
+
21
+ class SimpleQueue(Generic[T]):
22
+ """
23
+ Simplified version of :class:`asyncio.Queue`.
24
+
25
+ Provides only the subset of functionality needed by :class:`Assembler`.
26
+
27
+ """
28
+
29
+ def __init__(self) -> None:
30
+ self.loop = asyncio.get_running_loop()
31
+ self.get_waiter: asyncio.Future[None] | None = None
32
+ self.queue: collections.deque[T] = collections.deque()
33
+
34
+ def __len__(self) -> int:
35
+ return len(self.queue)
36
+
37
+ def put(self, item: T) -> None:
38
+ """Put an item into the queue without waiting."""
39
+ self.queue.append(item)
40
+ if self.get_waiter is not None and not self.get_waiter.done():
41
+ self.get_waiter.set_result(None)
42
+
43
+ async def get(self, block: bool = True) -> T:
44
+ """Remove and return an item from the queue, waiting if necessary."""
45
+ if not self.queue:
46
+ if not block:
47
+ raise EOFError("stream of frames ended")
48
+ assert self.get_waiter is None, "cannot call get() concurrently"
49
+ self.get_waiter = self.loop.create_future()
50
+ try:
51
+ await self.get_waiter
52
+ finally:
53
+ self.get_waiter.cancel()
54
+ self.get_waiter = None
55
+ return self.queue.popleft()
56
+
57
+ def reset(self, items: Iterable[T]) -> None:
58
+ """Put back items into an empty, idle queue."""
59
+ assert self.get_waiter is None, "cannot reset() while get() is running"
60
+ assert not self.queue, "cannot reset() while queue isn't empty"
61
+ self.queue.extend(items)
62
+
63
+ def abort(self) -> None:
64
+ """Close the queue, raising EOFError in get() if necessary."""
65
+ if self.get_waiter is not None and not self.get_waiter.done():
66
+ self.get_waiter.set_exception(EOFError("stream of frames ended"))
67
+
68
+
69
+ class Assembler:
70
+ """
71
+ Assemble messages from frames.
72
+
73
+ :class:`Assembler` expects only data frames. The stream of frames must
74
+ respect the protocol; if it doesn't, the behavior is undefined.
75
+
76
+ Args:
77
+ pause: Called when the buffer of frames goes above the high water mark;
78
+ should pause reading from the network.
79
+ resume: Called when the buffer of frames goes below the low water mark;
80
+ should resume reading from the network.
81
+
82
+ """
83
+
84
+ # coverage reports incorrectly: "line NN didn't jump to the function exit"
85
+ def __init__( # pragma: no cover
86
+ self,
87
+ high: int | None = None,
88
+ low: int | None = None,
89
+ pause: Callable[[], Any] = lambda: None,
90
+ resume: Callable[[], Any] = lambda: None,
91
+ ) -> None:
92
+ # Queue of incoming frames.
93
+ self.frames: SimpleQueue[Frame] = SimpleQueue()
94
+
95
+ # We cannot put a hard limit on the size of the queue because a single
96
+ # call to Protocol.data_received() could produce thousands of frames,
97
+ # which must be buffered. Instead, we pause reading when the buffer goes
98
+ # above the high limit and we resume when it goes under the low limit.
99
+ if high is not None and low is None:
100
+ low = high // 4
101
+ if high is None and low is not None:
102
+ high = low * 4
103
+ if high is not None and low is not None:
104
+ if low < 0:
105
+ raise ValueError("low must be positive or equal to zero")
106
+ if high < low:
107
+ raise ValueError("high must be greater than or equal to low")
108
+ self.high, self.low = high, low
109
+ self.pause = pause
110
+ self.resume = resume
111
+ self.paused = False
112
+
113
+ # This flag prevents concurrent calls to get() by user code.
114
+ self.get_in_progress = False
115
+
116
+ # This flag marks the end of the connection.
117
+ self.closed = False
118
+
119
+ async def get(self, decode: bool | None = None) -> Data:
120
+ """
121
+ Read the next message.
122
+
123
+ :meth:`get` returns a single :class:`str` or :class:`bytes`.
124
+
125
+ If the message is fragmented, :meth:`get` waits until the last frame is
126
+ received, then it reassembles the message and returns it. To receive
127
+ messages frame by frame, use :meth:`get_iter` instead.
128
+
129
+ Args:
130
+ decode: :obj:`False` disables UTF-8 decoding of text frames and
131
+ returns :class:`bytes`. :obj:`True` forces UTF-8 decoding of
132
+ binary frames and returns :class:`str`.
133
+
134
+ Raises:
135
+ EOFError: If the stream of frames has ended.
136
+ UnicodeDecodeError: If a text frame contains invalid UTF-8.
137
+ ConcurrencyError: If two coroutines run :meth:`get` or
138
+ :meth:`get_iter` concurrently.
139
+
140
+ """
141
+ if self.get_in_progress:
142
+ raise ConcurrencyError("get() or get_iter() is already running")
143
+ self.get_in_progress = True
144
+
145
+ # Locking with get_in_progress prevents concurrent execution
146
+ # until get() fetches a complete message or is cancelled.
147
+
148
+ try:
149
+ # First frame
150
+ frame = await self.frames.get(not self.closed)
151
+ self.maybe_resume()
152
+ assert frame.opcode is OP_TEXT or frame.opcode is OP_BINARY
153
+ if decode is None:
154
+ decode = frame.opcode is OP_TEXT
155
+ frames = [frame]
156
+
157
+ # Following frames, for fragmented messages
158
+ while not frame.fin:
159
+ try:
160
+ frame = await self.frames.get(not self.closed)
161
+ except asyncio.CancelledError:
162
+ # Put frames already received back into the queue
163
+ # so that future calls to get() can return them.
164
+ self.frames.reset(frames)
165
+ raise
166
+ self.maybe_resume()
167
+ assert frame.opcode is OP_CONT
168
+ frames.append(frame)
169
+
170
+ finally:
171
+ self.get_in_progress = False
172
+
173
+ data = b"".join(frame.data for frame in frames)
174
+ if decode:
175
+ return data.decode()
176
+ else:
177
+ return data
178
+
179
+ async def get_iter(self, decode: bool | None = None) -> AsyncIterator[Data]:
180
+ """
181
+ Stream the next message.
182
+
183
+ Iterating the return value of :meth:`get_iter` asynchronously yields a
184
+ :class:`str` or :class:`bytes` for each frame in the message.
185
+
186
+ The iterator must be fully consumed before calling :meth:`get_iter` or
187
+ :meth:`get` again. Else, :exc:`ConcurrencyError` is raised.
188
+
189
+ This method only makes sense for fragmented messages. If messages aren't
190
+ fragmented, use :meth:`get` instead.
191
+
192
+ Args:
193
+ decode: :obj:`False` disables UTF-8 decoding of text frames and
194
+ returns :class:`bytes`. :obj:`True` forces UTF-8 decoding of
195
+ binary frames and returns :class:`str`.
196
+
197
+ Raises:
198
+ EOFError: If the stream of frames has ended.
199
+ UnicodeDecodeError: If a text frame contains invalid UTF-8.
200
+ ConcurrencyError: If two coroutines run :meth:`get` or
201
+ :meth:`get_iter` concurrently.
202
+
203
+ """
204
+ if self.get_in_progress:
205
+ raise ConcurrencyError("get() or get_iter() is already running")
206
+ self.get_in_progress = True
207
+
208
+ # Locking with get_in_progress prevents concurrent execution
209
+ # until get_iter() fetches a complete message or is cancelled.
210
+
211
+ # If get_iter() raises an exception e.g. in decoder.decode(),
212
+ # get_in_progress remains set and the connection becomes unusable.
213
+
214
+ # First frame
215
+ try:
216
+ frame = await self.frames.get(not self.closed)
217
+ except asyncio.CancelledError:
218
+ self.get_in_progress = False
219
+ raise
220
+ self.maybe_resume()
221
+ assert frame.opcode is OP_TEXT or frame.opcode is OP_BINARY
222
+ if decode is None:
223
+ decode = frame.opcode is OP_TEXT
224
+ if decode:
225
+ decoder = UTF8Decoder()
226
+ yield decoder.decode(frame.data, frame.fin)
227
+ else:
228
+ yield frame.data
229
+
230
+ # Following frames, for fragmented messages
231
+ while not frame.fin:
232
+ # We cannot handle asyncio.CancelledError because we don't buffer
233
+ # previous fragments — we're streaming them. Canceling get_iter()
234
+ # here will leave the assembler in a stuck state. Future calls to
235
+ # get() or get_iter() will raise ConcurrencyError.
236
+ frame = await self.frames.get(not self.closed)
237
+ self.maybe_resume()
238
+ assert frame.opcode is OP_CONT
239
+ if decode:
240
+ yield decoder.decode(frame.data, frame.fin)
241
+ else:
242
+ yield frame.data
243
+
244
+ self.get_in_progress = False
245
+
246
+ def put(self, frame: Frame) -> None:
247
+ """
248
+ Add ``frame`` to the next message.
249
+
250
+ Raises:
251
+ EOFError: If the stream of frames has ended.
252
+
253
+ """
254
+ if self.closed:
255
+ raise EOFError("stream of frames ended")
256
+
257
+ self.frames.put(frame)
258
+ self.maybe_pause()
259
+
260
+ def maybe_pause(self) -> None:
261
+ """Pause the writer if queue is above the high water mark."""
262
+ # Skip if flow control is disabled
263
+ if self.high is None:
264
+ return
265
+
266
+ # Check for "> high" to support high = 0
267
+ if len(self.frames) > self.high and not self.paused:
268
+ self.paused = True
269
+ self.pause()
270
+
271
+ def maybe_resume(self) -> None:
272
+ """Resume the writer if queue is below the low water mark."""
273
+ # Skip if flow control is disabled
274
+ if self.low is None:
275
+ return
276
+
277
+ # Check for "<= low" to support low = 0
278
+ if len(self.frames) <= self.low and self.paused:
279
+ self.paused = False
280
+ self.resume()
281
+
282
+ def close(self) -> None:
283
+ """
284
+ End the stream of frames.
285
+
286
+ Calling :meth:`close` concurrently with :meth:`get`, :meth:`get_iter`,
287
+ or :meth:`put` is safe. They will raise :exc:`EOFError`.
288
+
289
+ """
290
+ if self.closed:
291
+ return
292
+
293
+ self.closed = True
294
+
295
+ # Unblock get() or get_iter().
296
+ self.frames.abort()
.venv/lib/python3.11/site-packages/websockets/asyncio/server.py ADDED
@@ -0,0 +1,978 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import hmac
5
+ import http
6
+ import logging
7
+ import re
8
+ import socket
9
+ import sys
10
+ from collections.abc import Awaitable, Generator, Iterable, Sequence
11
+ from types import TracebackType
12
+ from typing import Any, Callable, cast
13
+
14
+ from ..exceptions import InvalidHeader
15
+ from ..extensions.base import ServerExtensionFactory
16
+ from ..extensions.permessage_deflate import enable_server_permessage_deflate
17
+ from ..frames import CloseCode
18
+ from ..headers import (
19
+ build_www_authenticate_basic,
20
+ parse_authorization_basic,
21
+ validate_subprotocols,
22
+ )
23
+ from ..http11 import SERVER, Request, Response
24
+ from ..protocol import CONNECTING, OPEN, Event
25
+ from ..server import ServerProtocol
26
+ from ..typing import LoggerLike, Origin, StatusLike, Subprotocol
27
+ from .compatibility import asyncio_timeout
28
+ from .connection import Connection, broadcast
29
+
30
+
31
+ __all__ = [
32
+ "broadcast",
33
+ "serve",
34
+ "unix_serve",
35
+ "ServerConnection",
36
+ "Server",
37
+ "basic_auth",
38
+ ]
39
+
40
+
41
+ class ServerConnection(Connection):
42
+ """
43
+ :mod:`asyncio` implementation of a WebSocket server connection.
44
+
45
+ :class:`ServerConnection` provides :meth:`recv` and :meth:`send` methods for
46
+ receiving and sending messages.
47
+
48
+ It supports asynchronous iteration to receive messages::
49
+
50
+ async for message in websocket:
51
+ await process(message)
52
+
53
+ The iterator exits normally when the connection is closed with close code
54
+ 1000 (OK) or 1001 (going away) or without a close code. It raises a
55
+ :exc:`~websockets.exceptions.ConnectionClosedError` when the connection is
56
+ closed with any other code.
57
+
58
+ The ``ping_interval``, ``ping_timeout``, ``close_timeout``, ``max_queue``,
59
+ and ``write_limit`` arguments have the same meaning as in :func:`serve`.
60
+
61
+ Args:
62
+ protocol: Sans-I/O connection.
63
+ server: Server that manages this connection.
64
+
65
+ """
66
+
67
+ def __init__(
68
+ self,
69
+ protocol: ServerProtocol,
70
+ server: Server,
71
+ *,
72
+ ping_interval: float | None = 20,
73
+ ping_timeout: float | None = 20,
74
+ close_timeout: float | None = 10,
75
+ max_queue: int | None | tuple[int | None, int | None] = 16,
76
+ write_limit: int | tuple[int, int | None] = 2**15,
77
+ ) -> None:
78
+ self.protocol: ServerProtocol
79
+ super().__init__(
80
+ protocol,
81
+ ping_interval=ping_interval,
82
+ ping_timeout=ping_timeout,
83
+ close_timeout=close_timeout,
84
+ max_queue=max_queue,
85
+ write_limit=write_limit,
86
+ )
87
+ self.server = server
88
+ self.request_rcvd: asyncio.Future[None] = self.loop.create_future()
89
+ self.username: str # see basic_auth()
90
+
91
+ def respond(self, status: StatusLike, text: str) -> Response:
92
+ """
93
+ Create a plain text HTTP response.
94
+
95
+ ``process_request`` and ``process_response`` may call this method to
96
+ return an HTTP response instead of performing the WebSocket opening
97
+ handshake.
98
+
99
+ You can modify the response before returning it, for example by changing
100
+ HTTP headers.
101
+
102
+ Args:
103
+ status: HTTP status code.
104
+ text: HTTP response body; it will be encoded to UTF-8.
105
+
106
+ Returns:
107
+ HTTP response to send to the client.
108
+
109
+ """
110
+ return self.protocol.reject(status, text)
111
+
112
+ async def handshake(
113
+ self,
114
+ process_request: (
115
+ Callable[
116
+ [ServerConnection, Request],
117
+ Awaitable[Response | None] | Response | None,
118
+ ]
119
+ | None
120
+ ) = None,
121
+ process_response: (
122
+ Callable[
123
+ [ServerConnection, Request, Response],
124
+ Awaitable[Response | None] | Response | None,
125
+ ]
126
+ | None
127
+ ) = None,
128
+ server_header: str | None = SERVER,
129
+ ) -> None:
130
+ """
131
+ Perform the opening handshake.
132
+
133
+ """
134
+ await asyncio.wait(
135
+ [self.request_rcvd, self.connection_lost_waiter],
136
+ return_when=asyncio.FIRST_COMPLETED,
137
+ )
138
+
139
+ if self.request is not None:
140
+ async with self.send_context(expected_state=CONNECTING):
141
+ response = None
142
+
143
+ if process_request is not None:
144
+ try:
145
+ response = process_request(self, self.request)
146
+ if isinstance(response, Awaitable):
147
+ response = await response
148
+ except Exception as exc:
149
+ self.protocol.handshake_exc = exc
150
+ response = self.protocol.reject(
151
+ http.HTTPStatus.INTERNAL_SERVER_ERROR,
152
+ (
153
+ "Failed to open a WebSocket connection.\n"
154
+ "See server log for more information.\n"
155
+ ),
156
+ )
157
+
158
+ if response is None:
159
+ if self.server.is_serving():
160
+ self.response = self.protocol.accept(self.request)
161
+ else:
162
+ self.response = self.protocol.reject(
163
+ http.HTTPStatus.SERVICE_UNAVAILABLE,
164
+ "Server is shutting down.\n",
165
+ )
166
+ else:
167
+ assert isinstance(response, Response) # help mypy
168
+ self.response = response
169
+
170
+ if server_header:
171
+ self.response.headers["Server"] = server_header
172
+
173
+ response = None
174
+
175
+ if process_response is not None:
176
+ try:
177
+ response = process_response(self, self.request, self.response)
178
+ if isinstance(response, Awaitable):
179
+ response = await response
180
+ except Exception as exc:
181
+ self.protocol.handshake_exc = exc
182
+ response = self.protocol.reject(
183
+ http.HTTPStatus.INTERNAL_SERVER_ERROR,
184
+ (
185
+ "Failed to open a WebSocket connection.\n"
186
+ "See server log for more information.\n"
187
+ ),
188
+ )
189
+
190
+ if response is not None:
191
+ assert isinstance(response, Response) # help mypy
192
+ self.response = response
193
+
194
+ self.protocol.send_response(self.response)
195
+
196
+ # self.protocol.handshake_exc is set when the connection is lost before
197
+ # receiving a request, when the request cannot be parsed, or when the
198
+ # handshake fails, including when process_request or process_response
199
+ # raises an exception.
200
+
201
+ # It isn't set when process_request or process_response sends an HTTP
202
+ # response that rejects the handshake.
203
+
204
+ if self.protocol.handshake_exc is not None:
205
+ raise self.protocol.handshake_exc
206
+
207
+ def process_event(self, event: Event) -> None:
208
+ """
209
+ Process one incoming event.
210
+
211
+ """
212
+ # First event - handshake request.
213
+ if self.request is None:
214
+ assert isinstance(event, Request)
215
+ self.request = event
216
+ self.request_rcvd.set_result(None)
217
+ # Later events - frames.
218
+ else:
219
+ super().process_event(event)
220
+
221
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
222
+ super().connection_made(transport)
223
+ self.server.start_connection_handler(self)
224
+
225
+
226
+ class Server:
227
+ """
228
+ WebSocket server returned by :func:`serve`.
229
+
230
+ This class mirrors the API of :class:`asyncio.Server`.
231
+
232
+ It keeps track of WebSocket connections in order to close them properly
233
+ when shutting down.
234
+
235
+ Args:
236
+ handler: Connection handler. It receives the WebSocket connection,
237
+ which is a :class:`ServerConnection`, in argument.
238
+ process_request: Intercept the request during the opening handshake.
239
+ Return an HTTP response to force the response. Return :obj:`None` to
240
+ continue normally. When you force an HTTP 101 Continue response, the
241
+ handshake is successful. Else, the connection is aborted.
242
+ ``process_request`` may be a function or a coroutine.
243
+ process_response: Intercept the response during the opening handshake.
244
+ Modify the response or return a new HTTP response to force the
245
+ response. Return :obj:`None` to continue normally. When you force an
246
+ HTTP 101 Continue response, the handshake is successful. Else, the
247
+ connection is aborted. ``process_response`` may be a function or a
248
+ coroutine.
249
+ server_header: Value of the ``Server`` response header.
250
+ It defaults to ``"Python/x.y.z websockets/X.Y"``. Setting it to
251
+ :obj:`None` removes the header.
252
+ open_timeout: Timeout for opening connections in seconds.
253
+ :obj:`None` disables the timeout.
254
+ logger: Logger for this server.
255
+ It defaults to ``logging.getLogger("websockets.server")``.
256
+ See the :doc:`logging guide <../../topics/logging>` for details.
257
+
258
+ """
259
+
260
+ def __init__(
261
+ self,
262
+ handler: Callable[[ServerConnection], Awaitable[None]],
263
+ *,
264
+ process_request: (
265
+ Callable[
266
+ [ServerConnection, Request],
267
+ Awaitable[Response | None] | Response | None,
268
+ ]
269
+ | None
270
+ ) = None,
271
+ process_response: (
272
+ Callable[
273
+ [ServerConnection, Request, Response],
274
+ Awaitable[Response | None] | Response | None,
275
+ ]
276
+ | None
277
+ ) = None,
278
+ server_header: str | None = SERVER,
279
+ open_timeout: float | None = 10,
280
+ logger: LoggerLike | None = None,
281
+ ) -> None:
282
+ self.loop = asyncio.get_running_loop()
283
+ self.handler = handler
284
+ self.process_request = process_request
285
+ self.process_response = process_response
286
+ self.server_header = server_header
287
+ self.open_timeout = open_timeout
288
+ if logger is None:
289
+ logger = logging.getLogger("websockets.server")
290
+ self.logger = logger
291
+
292
+ # Keep track of active connections.
293
+ self.handlers: dict[ServerConnection, asyncio.Task[None]] = {}
294
+
295
+ # Task responsible for closing the server and terminating connections.
296
+ self.close_task: asyncio.Task[None] | None = None
297
+
298
+ # Completed when the server is closed and connections are terminated.
299
+ self.closed_waiter: asyncio.Future[None] = self.loop.create_future()
300
+
301
+ @property
302
+ def connections(self) -> set[ServerConnection]:
303
+ """
304
+ Set of active connections.
305
+
306
+ This property contains all connections that completed the opening
307
+ handshake successfully and didn't start the closing handshake yet.
308
+ It can be useful in combination with :func:`~broadcast`.
309
+
310
+ """
311
+ return {connection for connection in self.handlers if connection.state is OPEN}
312
+
313
+ def wrap(self, server: asyncio.Server) -> None:
314
+ """
315
+ Attach to a given :class:`asyncio.Server`.
316
+
317
+ Since :meth:`~asyncio.loop.create_server` doesn't support injecting a
318
+ custom ``Server`` class, the easiest solution that doesn't rely on
319
+ private :mod:`asyncio` APIs is to:
320
+
321
+ - instantiate a :class:`Server`
322
+ - give the protocol factory a reference to that instance
323
+ - call :meth:`~asyncio.loop.create_server` with the factory
324
+ - attach the resulting :class:`asyncio.Server` with this method
325
+
326
+ """
327
+ self.server = server
328
+ for sock in server.sockets:
329
+ if sock.family == socket.AF_INET:
330
+ name = "%s:%d" % sock.getsockname()
331
+ elif sock.family == socket.AF_INET6:
332
+ name = "[%s]:%d" % sock.getsockname()[:2]
333
+ elif sock.family == socket.AF_UNIX:
334
+ name = sock.getsockname()
335
+ # In the unlikely event that someone runs websockets over a
336
+ # protocol other than IP or Unix sockets, avoid crashing.
337
+ else: # pragma: no cover
338
+ name = str(sock.getsockname())
339
+ self.logger.info("server listening on %s", name)
340
+
341
+ async def conn_handler(self, connection: ServerConnection) -> None:
342
+ """
343
+ Handle the lifecycle of a WebSocket connection.
344
+
345
+ Since this method doesn't have a caller that can handle exceptions,
346
+ it attempts to log relevant ones.
347
+
348
+ It guarantees that the TCP connection is closed before exiting.
349
+
350
+ """
351
+ try:
352
+ async with asyncio_timeout(self.open_timeout):
353
+ try:
354
+ await connection.handshake(
355
+ self.process_request,
356
+ self.process_response,
357
+ self.server_header,
358
+ )
359
+ except asyncio.CancelledError:
360
+ connection.transport.abort()
361
+ raise
362
+ except Exception:
363
+ connection.logger.error("opening handshake failed", exc_info=True)
364
+ connection.transport.abort()
365
+ return
366
+
367
+ if connection.protocol.state is not OPEN:
368
+ # process_request or process_response rejected the handshake.
369
+ connection.transport.abort()
370
+ return
371
+
372
+ try:
373
+ connection.start_keepalive()
374
+ await self.handler(connection)
375
+ except Exception:
376
+ connection.logger.error("connection handler failed", exc_info=True)
377
+ await connection.close(CloseCode.INTERNAL_ERROR)
378
+ else:
379
+ await connection.close()
380
+
381
+ except TimeoutError:
382
+ # When the opening handshake times out, there's nothing to log.
383
+ pass
384
+
385
+ except Exception: # pragma: no cover
386
+ # Don't leak connections on unexpected errors.
387
+ connection.transport.abort()
388
+
389
+ finally:
390
+ # Registration is tied to the lifecycle of conn_handler() because
391
+ # the server waits for connection handlers to terminate, even if
392
+ # all connections are already closed.
393
+ del self.handlers[connection]
394
+
395
+ def start_connection_handler(self, connection: ServerConnection) -> None:
396
+ """
397
+ Register a connection with this server.
398
+
399
+ """
400
+ # The connection must be registered in self.handlers immediately.
401
+ # If it was registered in conn_handler(), a race condition could
402
+ # happen when closing the server after scheduling conn_handler()
403
+ # but before it starts executing.
404
+ self.handlers[connection] = self.loop.create_task(self.conn_handler(connection))
405
+
406
+ def close(self, close_connections: bool = True) -> None:
407
+ """
408
+ Close the server.
409
+
410
+ * Close the underlying :class:`asyncio.Server`.
411
+ * When ``close_connections`` is :obj:`True`, which is the default,
412
+ close existing connections. Specifically:
413
+
414
+ * Reject opening WebSocket connections with an HTTP 503 (service
415
+ unavailable) error. This happens when the server accepted the TCP
416
+ connection but didn't complete the opening handshake before closing.
417
+ * Close open WebSocket connections with close code 1001 (going away).
418
+
419
+ * Wait until all connection handlers terminate.
420
+
421
+ :meth:`close` is idempotent.
422
+
423
+ """
424
+ if self.close_task is None:
425
+ self.close_task = self.get_loop().create_task(
426
+ self._close(close_connections)
427
+ )
428
+
429
+ async def _close(self, close_connections: bool) -> None:
430
+ """
431
+ Implementation of :meth:`close`.
432
+
433
+ This calls :meth:`~asyncio.Server.close` on the underlying
434
+ :class:`asyncio.Server` object to stop accepting new connections and
435
+ then closes open connections with close code 1001.
436
+
437
+ """
438
+ self.logger.info("server closing")
439
+
440
+ # Stop accepting new connections.
441
+ self.server.close()
442
+
443
+ # Wait until all accepted connections reach connection_made() and call
444
+ # register(). See https://github.com/python/cpython/issues/79033 for
445
+ # details. This workaround can be removed when dropping Python < 3.11.
446
+ await asyncio.sleep(0)
447
+
448
+ if close_connections:
449
+ # Close OPEN connections with close code 1001. After server.close(),
450
+ # handshake() closes OPENING connections with an HTTP 503 error.
451
+ close_tasks = [
452
+ asyncio.create_task(connection.close(1001))
453
+ for connection in self.handlers
454
+ if connection.protocol.state is not CONNECTING
455
+ ]
456
+ # asyncio.wait doesn't accept an empty first argument.
457
+ if close_tasks:
458
+ await asyncio.wait(close_tasks)
459
+
460
+ # Wait until all TCP connections are closed.
461
+ await self.server.wait_closed()
462
+
463
+ # Wait until all connection handlers terminate.
464
+ # asyncio.wait doesn't accept an empty first argument.
465
+ if self.handlers:
466
+ await asyncio.wait(self.handlers.values())
467
+
468
+ # Tell wait_closed() to return.
469
+ self.closed_waiter.set_result(None)
470
+
471
+ self.logger.info("server closed")
472
+
473
+ async def wait_closed(self) -> None:
474
+ """
475
+ Wait until the server is closed.
476
+
477
+ When :meth:`wait_closed` returns, all TCP connections are closed and
478
+ all connection handlers have returned.
479
+
480
+ To ensure a fast shutdown, a connection handler should always be
481
+ awaiting at least one of:
482
+
483
+ * :meth:`~ServerConnection.recv`: when the connection is closed,
484
+ it raises :exc:`~websockets.exceptions.ConnectionClosedOK`;
485
+ * :meth:`~ServerConnection.wait_closed`: when the connection is
486
+ closed, it returns.
487
+
488
+ Then the connection handler is immediately notified of the shutdown;
489
+ it can clean up and exit.
490
+
491
+ """
492
+ await asyncio.shield(self.closed_waiter)
493
+
494
+ def get_loop(self) -> asyncio.AbstractEventLoop:
495
+ """
496
+ See :meth:`asyncio.Server.get_loop`.
497
+
498
+ """
499
+ return self.server.get_loop()
500
+
501
+ def is_serving(self) -> bool: # pragma: no cover
502
+ """
503
+ See :meth:`asyncio.Server.is_serving`.
504
+
505
+ """
506
+ return self.server.is_serving()
507
+
508
+ async def start_serving(self) -> None: # pragma: no cover
509
+ """
510
+ See :meth:`asyncio.Server.start_serving`.
511
+
512
+ Typical use::
513
+
514
+ server = await serve(..., start_serving=False)
515
+ # perform additional setup here...
516
+ # ... then start the server
517
+ await server.start_serving()
518
+
519
+ """
520
+ await self.server.start_serving()
521
+
522
+ async def serve_forever(self) -> None: # pragma: no cover
523
+ """
524
+ See :meth:`asyncio.Server.serve_forever`.
525
+
526
+ Typical use::
527
+
528
+ server = await serve(...)
529
+ # this coroutine doesn't return
530
+ # canceling it stops the server
531
+ await server.serve_forever()
532
+
533
+ This is an alternative to using :func:`serve` as an asynchronous context
534
+ manager. Shutdown is triggered by canceling :meth:`serve_forever`
535
+ instead of exiting a :func:`serve` context.
536
+
537
+ """
538
+ await self.server.serve_forever()
539
+
540
+ @property
541
+ def sockets(self) -> Iterable[socket.socket]:
542
+ """
543
+ See :attr:`asyncio.Server.sockets`.
544
+
545
+ """
546
+ return self.server.sockets
547
+
548
+ async def __aenter__(self) -> Server: # pragma: no cover
549
+ return self
550
+
551
+ async def __aexit__(
552
+ self,
553
+ exc_type: type[BaseException] | None,
554
+ exc_value: BaseException | None,
555
+ traceback: TracebackType | None,
556
+ ) -> None: # pragma: no cover
557
+ self.close()
558
+ await self.wait_closed()
559
+
560
+
561
+ # This is spelled in lower case because it's exposed as a callable in the API.
562
+ class serve:
563
+ """
564
+ Create a WebSocket server listening on ``host`` and ``port``.
565
+
566
+ Whenever a client connects, the server creates a :class:`ServerConnection`,
567
+ performs the opening handshake, and delegates to the ``handler`` coroutine.
568
+
569
+ The handler receives the :class:`ServerConnection` instance, which you can
570
+ use to send and receive messages.
571
+
572
+ Once the handler completes, either normally or with an exception, the server
573
+ performs the closing handshake and closes the connection.
574
+
575
+ This coroutine returns a :class:`Server` whose API mirrors
576
+ :class:`asyncio.Server`. Treat it as an asynchronous context manager to
577
+ ensure that the server will be closed::
578
+
579
+ from websockets.asyncio.server import serve
580
+
581
+ def handler(websocket):
582
+ ...
583
+
584
+ # set this future to exit the server
585
+ stop = asyncio.get_running_loop().create_future()
586
+
587
+ async with serve(handler, host, port):
588
+ await stop
589
+
590
+ Alternatively, call :meth:`~Server.serve_forever` to serve requests and
591
+ cancel it to stop the server::
592
+
593
+ server = await serve(handler, host, port)
594
+ await server.serve_forever()
595
+
596
+ Args:
597
+ handler: Connection handler. It receives the WebSocket connection,
598
+ which is a :class:`ServerConnection`, in argument.
599
+ host: Network interfaces the server binds to.
600
+ See :meth:`~asyncio.loop.create_server` for details.
601
+ port: TCP port the server listens on.
602
+ See :meth:`~asyncio.loop.create_server` for details.
603
+ origins: Acceptable values of the ``Origin`` header, for defending
604
+ against Cross-Site WebSocket Hijacking attacks. Values can be
605
+ :class:`str` to test for an exact match or regular expressions
606
+ compiled by :func:`re.compile` to test against a pattern. Include
607
+ :obj:`None` in the list if the lack of an origin is acceptable.
608
+ extensions: List of supported extensions, in order in which they
609
+ should be negotiated and run.
610
+ subprotocols: List of supported subprotocols, in order of decreasing
611
+ preference.
612
+ select_subprotocol: Callback for selecting a subprotocol among
613
+ those supported by the client and the server. It receives a
614
+ :class:`ServerConnection` (not a
615
+ :class:`~websockets.server.ServerProtocol`!) instance and a list of
616
+ subprotocols offered by the client. Other than the first argument,
617
+ it has the same behavior as the
618
+ :meth:`ServerProtocol.select_subprotocol
619
+ <websockets.server.ServerProtocol.select_subprotocol>` method.
620
+ process_request: Intercept the request during the opening handshake.
621
+ Return an HTTP response to force the response or :obj:`None` to
622
+ continue normally. When you force an HTTP 101 Continue response, the
623
+ handshake is successful. Else, the connection is aborted.
624
+ ``process_request`` may be a function or a coroutine.
625
+ process_response: Intercept the response during the opening handshake.
626
+ Return an HTTP response to force the response or :obj:`None` to
627
+ continue normally. When you force an HTTP 101 Continue response, the
628
+ handshake is successful. Else, the connection is aborted.
629
+ ``process_response`` may be a function or a coroutine.
630
+ server_header: Value of the ``Server`` response header.
631
+ It defaults to ``"Python/x.y.z websockets/X.Y"``. Setting it to
632
+ :obj:`None` removes the header.
633
+ compression: The "permessage-deflate" extension is enabled by default.
634
+ Set ``compression`` to :obj:`None` to disable it. See the
635
+ :doc:`compression guide <../../topics/compression>` for details.
636
+ open_timeout: Timeout for opening connections in seconds.
637
+ :obj:`None` disables the timeout.
638
+ ping_interval: Interval between keepalive pings in seconds.
639
+ :obj:`None` disables keepalive.
640
+ ping_timeout: Timeout for keepalive pings in seconds.
641
+ :obj:`None` disables timeouts.
642
+ close_timeout: Timeout for closing connections in seconds.
643
+ :obj:`None` disables the timeout.
644
+ max_size: Maximum size of incoming messages in bytes.
645
+ :obj:`None` disables the limit.
646
+ max_queue: High-water mark of the buffer where frames are received.
647
+ It defaults to 16 frames. The low-water mark defaults to ``max_queue
648
+ // 4``. You may pass a ``(high, low)`` tuple to set the high-water
649
+ and low-water marks. If you want to disable flow control entirely,
650
+ you may set it to ``None``, although that's a bad idea.
651
+ write_limit: High-water mark of write buffer in bytes. It is passed to
652
+ :meth:`~asyncio.WriteTransport.set_write_buffer_limits`. It defaults
653
+ to 32 KiB. You may pass a ``(high, low)`` tuple to set the
654
+ high-water and low-water marks.
655
+ logger: Logger for this server.
656
+ It defaults to ``logging.getLogger("websockets.server")``. See the
657
+ :doc:`logging guide <../../topics/logging>` for details.
658
+ create_connection: Factory for the :class:`ServerConnection` managing
659
+ the connection. Set it to a wrapper or a subclass to customize
660
+ connection handling.
661
+
662
+ Any other keyword arguments are passed to the event loop's
663
+ :meth:`~asyncio.loop.create_server` method.
664
+
665
+ For example:
666
+
667
+ * You can set ``ssl`` to a :class:`~ssl.SSLContext` to enable TLS.
668
+
669
+ * You can set ``sock`` to provide a preexisting TCP socket. You may call
670
+ :func:`socket.create_server` (not to be confused with the event loop's
671
+ :meth:`~asyncio.loop.create_server` method) to create a suitable server
672
+ socket and customize it.
673
+
674
+ * You can set ``start_serving`` to ``False`` to start accepting connections
675
+ only after you call :meth:`~Server.start_serving()` or
676
+ :meth:`~Server.serve_forever()`.
677
+
678
+ """
679
+
680
+ def __init__(
681
+ self,
682
+ handler: Callable[[ServerConnection], Awaitable[None]],
683
+ host: str | None = None,
684
+ port: int | None = None,
685
+ *,
686
+ # WebSocket
687
+ origins: Sequence[Origin | re.Pattern[str] | None] | None = None,
688
+ extensions: Sequence[ServerExtensionFactory] | None = None,
689
+ subprotocols: Sequence[Subprotocol] | None = None,
690
+ select_subprotocol: (
691
+ Callable[
692
+ [ServerConnection, Sequence[Subprotocol]],
693
+ Subprotocol | None,
694
+ ]
695
+ | None
696
+ ) = None,
697
+ process_request: (
698
+ Callable[
699
+ [ServerConnection, Request],
700
+ Awaitable[Response | None] | Response | None,
701
+ ]
702
+ | None
703
+ ) = None,
704
+ process_response: (
705
+ Callable[
706
+ [ServerConnection, Request, Response],
707
+ Awaitable[Response | None] | Response | None,
708
+ ]
709
+ | None
710
+ ) = None,
711
+ server_header: str | None = SERVER,
712
+ compression: str | None = "deflate",
713
+ # Timeouts
714
+ open_timeout: float | None = 10,
715
+ ping_interval: float | None = 20,
716
+ ping_timeout: float | None = 20,
717
+ close_timeout: float | None = 10,
718
+ # Limits
719
+ max_size: int | None = 2**20,
720
+ max_queue: int | None | tuple[int | None, int | None] = 16,
721
+ write_limit: int | tuple[int, int | None] = 2**15,
722
+ # Logging
723
+ logger: LoggerLike | None = None,
724
+ # Escape hatch for advanced customization
725
+ create_connection: type[ServerConnection] | None = None,
726
+ # Other keyword arguments are passed to loop.create_server
727
+ **kwargs: Any,
728
+ ) -> None:
729
+ if subprotocols is not None:
730
+ validate_subprotocols(subprotocols)
731
+
732
+ if compression == "deflate":
733
+ extensions = enable_server_permessage_deflate(extensions)
734
+ elif compression is not None:
735
+ raise ValueError(f"unsupported compression: {compression}")
736
+
737
+ if create_connection is None:
738
+ create_connection = ServerConnection
739
+
740
+ self.server = Server(
741
+ handler,
742
+ process_request=process_request,
743
+ process_response=process_response,
744
+ server_header=server_header,
745
+ open_timeout=open_timeout,
746
+ logger=logger,
747
+ )
748
+
749
+ if kwargs.get("ssl") is not None:
750
+ kwargs.setdefault("ssl_handshake_timeout", open_timeout)
751
+ if sys.version_info[:2] >= (3, 11): # pragma: no branch
752
+ kwargs.setdefault("ssl_shutdown_timeout", close_timeout)
753
+
754
+ def factory() -> ServerConnection:
755
+ """
756
+ Create an asyncio protocol for managing a WebSocket connection.
757
+
758
+ """
759
+ # Create a closure to give select_subprotocol access to connection.
760
+ protocol_select_subprotocol: (
761
+ Callable[
762
+ [ServerProtocol, Sequence[Subprotocol]],
763
+ Subprotocol | None,
764
+ ]
765
+ | None
766
+ ) = None
767
+ if select_subprotocol is not None:
768
+
769
+ def protocol_select_subprotocol(
770
+ protocol: ServerProtocol,
771
+ subprotocols: Sequence[Subprotocol],
772
+ ) -> Subprotocol | None:
773
+ # mypy doesn't know that select_subprotocol is immutable.
774
+ assert select_subprotocol is not None
775
+ # Ensure this function is only used in the intended context.
776
+ assert protocol is connection.protocol
777
+ return select_subprotocol(connection, subprotocols)
778
+
779
+ # This is a protocol in the Sans-I/O implementation of websockets.
780
+ protocol = ServerProtocol(
781
+ origins=origins,
782
+ extensions=extensions,
783
+ subprotocols=subprotocols,
784
+ select_subprotocol=protocol_select_subprotocol,
785
+ max_size=max_size,
786
+ logger=logger,
787
+ )
788
+ # This is a connection in websockets and a protocol in asyncio.
789
+ connection = create_connection(
790
+ protocol,
791
+ self.server,
792
+ ping_interval=ping_interval,
793
+ ping_timeout=ping_timeout,
794
+ close_timeout=close_timeout,
795
+ max_queue=max_queue,
796
+ write_limit=write_limit,
797
+ )
798
+ return connection
799
+
800
+ loop = asyncio.get_running_loop()
801
+ if kwargs.pop("unix", False):
802
+ self.create_server = loop.create_unix_server(factory, **kwargs)
803
+ else:
804
+ # mypy cannot tell that kwargs must provide sock when port is None.
805
+ self.create_server = loop.create_server(factory, host, port, **kwargs) # type: ignore[arg-type]
806
+
807
+ # async with serve(...) as ...: ...
808
+
809
+ async def __aenter__(self) -> Server:
810
+ return await self
811
+
812
+ async def __aexit__(
813
+ self,
814
+ exc_type: type[BaseException] | None,
815
+ exc_value: BaseException | None,
816
+ traceback: TracebackType | None,
817
+ ) -> None:
818
+ self.server.close()
819
+ await self.server.wait_closed()
820
+
821
+ # ... = await serve(...)
822
+
823
+ def __await__(self) -> Generator[Any, None, Server]:
824
+ # Create a suitable iterator by calling __await__ on a coroutine.
825
+ return self.__await_impl__().__await__()
826
+
827
+ async def __await_impl__(self) -> Server:
828
+ server = await self.create_server
829
+ self.server.wrap(server)
830
+ return self.server
831
+
832
+ # ... = yield from serve(...) - remove when dropping Python < 3.10
833
+
834
+ __iter__ = __await__
835
+
836
+
837
+ def unix_serve(
838
+ handler: Callable[[ServerConnection], Awaitable[None]],
839
+ path: str | None = None,
840
+ **kwargs: Any,
841
+ ) -> Awaitable[Server]:
842
+ """
843
+ Create a WebSocket server listening on a Unix socket.
844
+
845
+ This function is identical to :func:`serve`, except the ``host`` and
846
+ ``port`` arguments are replaced by ``path``. It's only available on Unix.
847
+
848
+ It's useful for deploying a server behind a reverse proxy such as nginx.
849
+
850
+ Args:
851
+ handler: Connection handler. It receives the WebSocket connection,
852
+ which is a :class:`ServerConnection`, in argument.
853
+ path: File system path to the Unix socket.
854
+
855
+ """
856
+ return serve(handler, unix=True, path=path, **kwargs)
857
+
858
+
859
+ def is_credentials(credentials: Any) -> bool:
860
+ try:
861
+ username, password = credentials
862
+ except (TypeError, ValueError):
863
+ return False
864
+ else:
865
+ return isinstance(username, str) and isinstance(password, str)
866
+
867
+
868
+ def basic_auth(
869
+ realm: str = "",
870
+ credentials: tuple[str, str] | Iterable[tuple[str, str]] | None = None,
871
+ check_credentials: Callable[[str, str], Awaitable[bool] | bool] | None = None,
872
+ ) -> Callable[[ServerConnection, Request], Awaitable[Response | None]]:
873
+ """
874
+ Factory for ``process_request`` to enforce HTTP Basic Authentication.
875
+
876
+ :func:`basic_auth` is designed to integrate with :func:`serve` as follows::
877
+
878
+ from websockets.asyncio.server import basic_auth, serve
879
+
880
+ async with serve(
881
+ ...,
882
+ process_request=basic_auth(
883
+ realm="my dev server",
884
+ credentials=("hello", "iloveyou"),
885
+ ),
886
+ ):
887
+
888
+ If authentication succeeds, the connection's ``username`` attribute is set.
889
+ If it fails, the server responds with an HTTP 401 Unauthorized status.
890
+
891
+ One of ``credentials`` or ``check_credentials`` must be provided; not both.
892
+
893
+ Args:
894
+ realm: Scope of protection. It should contain only ASCII characters
895
+ because the encoding of non-ASCII characters is undefined. Refer to
896
+ section 2.2 of :rfc:`7235` for details.
897
+ credentials: Hard coded authorized credentials. It can be a
898
+ ``(username, password)`` pair or a list of such pairs.
899
+ check_credentials: Function or coroutine that verifies credentials.
900
+ It receives ``username`` and ``password`` arguments and returns
901
+ whether they're valid.
902
+ Raises:
903
+ TypeError: If ``credentials`` or ``check_credentials`` is wrong.
904
+ ValueError: If ``credentials`` and ``check_credentials`` are both
905
+ provided or both not provided.
906
+
907
+ """
908
+ if (credentials is None) == (check_credentials is None):
909
+ raise ValueError("provide either credentials or check_credentials")
910
+
911
+ if credentials is not None:
912
+ if is_credentials(credentials):
913
+ credentials_list = [cast(tuple[str, str], credentials)]
914
+ elif isinstance(credentials, Iterable):
915
+ credentials_list = list(cast(Iterable[tuple[str, str]], credentials))
916
+ if not all(is_credentials(item) for item in credentials_list):
917
+ raise TypeError(f"invalid credentials argument: {credentials}")
918
+ else:
919
+ raise TypeError(f"invalid credentials argument: {credentials}")
920
+
921
+ credentials_dict = dict(credentials_list)
922
+
923
+ def check_credentials(username: str, password: str) -> bool:
924
+ try:
925
+ expected_password = credentials_dict[username]
926
+ except KeyError:
927
+ return False
928
+ return hmac.compare_digest(expected_password, password)
929
+
930
+ assert check_credentials is not None # help mypy
931
+
932
+ async def process_request(
933
+ connection: ServerConnection,
934
+ request: Request,
935
+ ) -> Response | None:
936
+ """
937
+ Perform HTTP Basic Authentication.
938
+
939
+ If it succeeds, set the connection's ``username`` attribute and return
940
+ :obj:`None`. If it fails, return an HTTP 401 Unauthorized responss.
941
+
942
+ """
943
+ try:
944
+ authorization = request.headers["Authorization"]
945
+ except KeyError:
946
+ response = connection.respond(
947
+ http.HTTPStatus.UNAUTHORIZED,
948
+ "Missing credentials\n",
949
+ )
950
+ response.headers["WWW-Authenticate"] = build_www_authenticate_basic(realm)
951
+ return response
952
+
953
+ try:
954
+ username, password = parse_authorization_basic(authorization)
955
+ except InvalidHeader:
956
+ response = connection.respond(
957
+ http.HTTPStatus.UNAUTHORIZED,
958
+ "Unsupported credentials\n",
959
+ )
960
+ response.headers["WWW-Authenticate"] = build_www_authenticate_basic(realm)
961
+ return response
962
+
963
+ valid_credentials = check_credentials(username, password)
964
+ if isinstance(valid_credentials, Awaitable):
965
+ valid_credentials = await valid_credentials
966
+
967
+ if not valid_credentials:
968
+ response = connection.respond(
969
+ http.HTTPStatus.UNAUTHORIZED,
970
+ "Invalid credentials\n",
971
+ )
972
+ response.headers["WWW-Authenticate"] = build_www_authenticate_basic(realm)
973
+ return response
974
+
975
+ connection.username = username
976
+ return None
977
+
978
+ return process_request
.venv/lib/python3.11/site-packages/websockets/extensions/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .base import *
2
+
3
+
4
+ __all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"]
.venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (313 Bytes). View file
 
.venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/base.cpython-311.pyc ADDED
Binary file (4.33 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/permessage_deflate.cpython-311.pyc ADDED
Binary file (20.1 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/extensions/base.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Sequence
4
+
5
+ from ..frames import Frame
6
+ from ..typing import ExtensionName, ExtensionParameter
7
+
8
+
9
+ __all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"]
10
+
11
+
12
+ class Extension:
13
+ """
14
+ Base class for extensions.
15
+
16
+ """
17
+
18
+ name: ExtensionName
19
+ """Extension identifier."""
20
+
21
+ def decode(self, frame: Frame, *, max_size: int | None = None) -> Frame:
22
+ """
23
+ Decode an incoming frame.
24
+
25
+ Args:
26
+ frame: Incoming frame.
27
+ max_size: Maximum payload size in bytes.
28
+
29
+ Returns:
30
+ Decoded frame.
31
+
32
+ Raises:
33
+ PayloadTooBig: If decoding the payload exceeds ``max_size``.
34
+
35
+ """
36
+ raise NotImplementedError
37
+
38
+ def encode(self, frame: Frame) -> Frame:
39
+ """
40
+ Encode an outgoing frame.
41
+
42
+ Args:
43
+ frame: Outgoing frame.
44
+
45
+ Returns:
46
+ Encoded frame.
47
+
48
+ """
49
+ raise NotImplementedError
50
+
51
+
52
+ class ClientExtensionFactory:
53
+ """
54
+ Base class for client-side extension factories.
55
+
56
+ """
57
+
58
+ name: ExtensionName
59
+ """Extension identifier."""
60
+
61
+ def get_request_params(self) -> list[ExtensionParameter]:
62
+ """
63
+ Build parameters to send to the server for this extension.
64
+
65
+ Returns:
66
+ Parameters to send to the server.
67
+
68
+ """
69
+ raise NotImplementedError
70
+
71
+ def process_response_params(
72
+ self,
73
+ params: Sequence[ExtensionParameter],
74
+ accepted_extensions: Sequence[Extension],
75
+ ) -> Extension:
76
+ """
77
+ Process parameters received from the server.
78
+
79
+ Args:
80
+ params: Parameters received from the server for this extension.
81
+ accepted_extensions: List of previously accepted extensions.
82
+
83
+ Returns:
84
+ An extension instance.
85
+
86
+ Raises:
87
+ NegotiationError: If parameters aren't acceptable.
88
+
89
+ """
90
+ raise NotImplementedError
91
+
92
+
93
+ class ServerExtensionFactory:
94
+ """
95
+ Base class for server-side extension factories.
96
+
97
+ """
98
+
99
+ name: ExtensionName
100
+ """Extension identifier."""
101
+
102
+ def process_request_params(
103
+ self,
104
+ params: Sequence[ExtensionParameter],
105
+ accepted_extensions: Sequence[Extension],
106
+ ) -> tuple[list[ExtensionParameter], Extension]:
107
+ """
108
+ Process parameters received from the client.
109
+
110
+ Args:
111
+ params: Parameters received from the client for this extension.
112
+ accepted_extensions: List of previously accepted extensions.
113
+
114
+ Returns:
115
+ To accept the offer, parameters to send to the client for this
116
+ extension and an extension instance.
117
+
118
+ Raises:
119
+ NegotiationError: To reject the offer, if parameters received from
120
+ the client aren't acceptable.
121
+
122
+ """
123
+ raise NotImplementedError
.venv/lib/python3.11/site-packages/websockets/extensions/permessage_deflate.py ADDED
@@ -0,0 +1,697 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import zlib
4
+ from collections.abc import Sequence
5
+ from typing import Any
6
+
7
+ from .. import frames
8
+ from ..exceptions import (
9
+ DuplicateParameter,
10
+ InvalidParameterName,
11
+ InvalidParameterValue,
12
+ NegotiationError,
13
+ PayloadTooBig,
14
+ ProtocolError,
15
+ )
16
+ from ..typing import ExtensionName, ExtensionParameter
17
+ from .base import ClientExtensionFactory, Extension, ServerExtensionFactory
18
+
19
+
20
+ __all__ = [
21
+ "PerMessageDeflate",
22
+ "ClientPerMessageDeflateFactory",
23
+ "enable_client_permessage_deflate",
24
+ "ServerPerMessageDeflateFactory",
25
+ "enable_server_permessage_deflate",
26
+ ]
27
+
28
+ _EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff"
29
+
30
+ _MAX_WINDOW_BITS_VALUES = [str(bits) for bits in range(8, 16)]
31
+
32
+
33
+ class PerMessageDeflate(Extension):
34
+ """
35
+ Per-Message Deflate extension.
36
+
37
+ """
38
+
39
+ name = ExtensionName("permessage-deflate")
40
+
41
+ def __init__(
42
+ self,
43
+ remote_no_context_takeover: bool,
44
+ local_no_context_takeover: bool,
45
+ remote_max_window_bits: int,
46
+ local_max_window_bits: int,
47
+ compress_settings: dict[Any, Any] | None = None,
48
+ ) -> None:
49
+ """
50
+ Configure the Per-Message Deflate extension.
51
+
52
+ """
53
+ if compress_settings is None:
54
+ compress_settings = {}
55
+
56
+ assert remote_no_context_takeover in [False, True]
57
+ assert local_no_context_takeover in [False, True]
58
+ assert 8 <= remote_max_window_bits <= 15
59
+ assert 8 <= local_max_window_bits <= 15
60
+ assert "wbits" not in compress_settings
61
+
62
+ self.remote_no_context_takeover = remote_no_context_takeover
63
+ self.local_no_context_takeover = local_no_context_takeover
64
+ self.remote_max_window_bits = remote_max_window_bits
65
+ self.local_max_window_bits = local_max_window_bits
66
+ self.compress_settings = compress_settings
67
+
68
+ if not self.remote_no_context_takeover:
69
+ self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
70
+
71
+ if not self.local_no_context_takeover:
72
+ self.encoder = zlib.compressobj(
73
+ wbits=-self.local_max_window_bits,
74
+ **self.compress_settings,
75
+ )
76
+
77
+ # To handle continuation frames properly, we must keep track of
78
+ # whether that initial frame was encoded.
79
+ self.decode_cont_data = False
80
+ # There's no need for self.encode_cont_data because we always encode
81
+ # outgoing frames, so it would always be True.
82
+
83
+ def __repr__(self) -> str:
84
+ return (
85
+ f"PerMessageDeflate("
86
+ f"remote_no_context_takeover={self.remote_no_context_takeover}, "
87
+ f"local_no_context_takeover={self.local_no_context_takeover}, "
88
+ f"remote_max_window_bits={self.remote_max_window_bits}, "
89
+ f"local_max_window_bits={self.local_max_window_bits})"
90
+ )
91
+
92
+ def decode(
93
+ self,
94
+ frame: frames.Frame,
95
+ *,
96
+ max_size: int | None = None,
97
+ ) -> frames.Frame:
98
+ """
99
+ Decode an incoming frame.
100
+
101
+ """
102
+ # Skip control frames.
103
+ if frame.opcode in frames.CTRL_OPCODES:
104
+ return frame
105
+
106
+ # Handle continuation data frames:
107
+ # - skip if the message isn't encoded
108
+ # - reset "decode continuation data" flag if it's a final frame
109
+ if frame.opcode is frames.OP_CONT:
110
+ if not self.decode_cont_data:
111
+ return frame
112
+ if frame.fin:
113
+ self.decode_cont_data = False
114
+
115
+ # Handle text and binary data frames:
116
+ # - skip if the message isn't encoded
117
+ # - unset the rsv1 flag on the first frame of a compressed message
118
+ # - set "decode continuation data" flag if it's a non-final frame
119
+ else:
120
+ if not frame.rsv1:
121
+ return frame
122
+ if not frame.fin:
123
+ self.decode_cont_data = True
124
+
125
+ # Re-initialize per-message decoder.
126
+ if self.remote_no_context_takeover:
127
+ self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
128
+
129
+ # Uncompress data. Protect against zip bombs by preventing zlib from
130
+ # decompressing more than max_length bytes (except when the limit is
131
+ # disabled with max_size = None).
132
+ if frame.fin and len(frame.data) < 2044:
133
+ # Profiling shows that appending four bytes, which makes a copy, is
134
+ # faster than calling decompress() again when data is less than 2kB.
135
+ data = bytes(frame.data) + _EMPTY_UNCOMPRESSED_BLOCK
136
+ else:
137
+ data = frame.data
138
+ max_length = 0 if max_size is None else max_size
139
+ try:
140
+ data = self.decoder.decompress(data, max_length)
141
+ if self.decoder.unconsumed_tail:
142
+ assert max_size is not None # help mypy
143
+ raise PayloadTooBig(None, max_size)
144
+ if frame.fin and len(frame.data) >= 2044:
145
+ # This cannot generate additional data.
146
+ self.decoder.decompress(_EMPTY_UNCOMPRESSED_BLOCK)
147
+ except zlib.error as exc:
148
+ raise ProtocolError("decompression failed") from exc
149
+
150
+ # Allow garbage collection of the decoder if it won't be reused.
151
+ if frame.fin and self.remote_no_context_takeover:
152
+ del self.decoder
153
+
154
+ return frames.Frame(
155
+ frame.opcode,
156
+ data,
157
+ frame.fin,
158
+ # Unset the rsv1 flag on the first frame of a compressed message.
159
+ False,
160
+ frame.rsv2,
161
+ frame.rsv3,
162
+ )
163
+
164
+ def encode(self, frame: frames.Frame) -> frames.Frame:
165
+ """
166
+ Encode an outgoing frame.
167
+
168
+ """
169
+ # Skip control frames.
170
+ if frame.opcode in frames.CTRL_OPCODES:
171
+ return frame
172
+
173
+ # Since we always encode messages, there's no "encode continuation
174
+ # data" flag similar to "decode continuation data" at this time.
175
+
176
+ if frame.opcode is not frames.OP_CONT:
177
+ # Re-initialize per-message decoder.
178
+ if self.local_no_context_takeover:
179
+ self.encoder = zlib.compressobj(
180
+ wbits=-self.local_max_window_bits,
181
+ **self.compress_settings,
182
+ )
183
+
184
+ # Compress data.
185
+ data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH)
186
+ if frame.fin:
187
+ # Sync flush generates between 5 or 6 bytes, ending with the bytes
188
+ # 0x00 0x00 0xff 0xff, which must be removed.
189
+ assert data[-4:] == _EMPTY_UNCOMPRESSED_BLOCK
190
+ # Making a copy is faster than memoryview(a)[:-4] until 2kB.
191
+ if len(data) < 2048:
192
+ data = data[:-4]
193
+ else:
194
+ data = memoryview(data)[:-4]
195
+
196
+ # Allow garbage collection of the encoder if it won't be reused.
197
+ if frame.fin and self.local_no_context_takeover:
198
+ del self.encoder
199
+
200
+ return frames.Frame(
201
+ frame.opcode,
202
+ data,
203
+ frame.fin,
204
+ # Set the rsv1 flag on the first frame of a compressed message.
205
+ frame.opcode is not frames.OP_CONT,
206
+ frame.rsv2,
207
+ frame.rsv3,
208
+ )
209
+
210
+
211
+ def _build_parameters(
212
+ server_no_context_takeover: bool,
213
+ client_no_context_takeover: bool,
214
+ server_max_window_bits: int | None,
215
+ client_max_window_bits: int | bool | None,
216
+ ) -> list[ExtensionParameter]:
217
+ """
218
+ Build a list of ``(name, value)`` pairs for some compression parameters.
219
+
220
+ """
221
+ params: list[ExtensionParameter] = []
222
+ if server_no_context_takeover:
223
+ params.append(("server_no_context_takeover", None))
224
+ if client_no_context_takeover:
225
+ params.append(("client_no_context_takeover", None))
226
+ if server_max_window_bits:
227
+ params.append(("server_max_window_bits", str(server_max_window_bits)))
228
+ if client_max_window_bits is True: # only in handshake requests
229
+ params.append(("client_max_window_bits", None))
230
+ elif client_max_window_bits:
231
+ params.append(("client_max_window_bits", str(client_max_window_bits)))
232
+ return params
233
+
234
+
235
+ def _extract_parameters(
236
+ params: Sequence[ExtensionParameter], *, is_server: bool
237
+ ) -> tuple[bool, bool, int | None, int | bool | None]:
238
+ """
239
+ Extract compression parameters from a list of ``(name, value)`` pairs.
240
+
241
+ If ``is_server`` is :obj:`True`, ``client_max_window_bits`` may be
242
+ provided without a value. This is only allowed in handshake requests.
243
+
244
+ """
245
+ server_no_context_takeover: bool = False
246
+ client_no_context_takeover: bool = False
247
+ server_max_window_bits: int | None = None
248
+ client_max_window_bits: int | bool | None = None
249
+
250
+ for name, value in params:
251
+ if name == "server_no_context_takeover":
252
+ if server_no_context_takeover:
253
+ raise DuplicateParameter(name)
254
+ if value is None:
255
+ server_no_context_takeover = True
256
+ else:
257
+ raise InvalidParameterValue(name, value)
258
+
259
+ elif name == "client_no_context_takeover":
260
+ if client_no_context_takeover:
261
+ raise DuplicateParameter(name)
262
+ if value is None:
263
+ client_no_context_takeover = True
264
+ else:
265
+ raise InvalidParameterValue(name, value)
266
+
267
+ elif name == "server_max_window_bits":
268
+ if server_max_window_bits is not None:
269
+ raise DuplicateParameter(name)
270
+ if value in _MAX_WINDOW_BITS_VALUES:
271
+ server_max_window_bits = int(value)
272
+ else:
273
+ raise InvalidParameterValue(name, value)
274
+
275
+ elif name == "client_max_window_bits":
276
+ if client_max_window_bits is not None:
277
+ raise DuplicateParameter(name)
278
+ if is_server and value is None: # only in handshake requests
279
+ client_max_window_bits = True
280
+ elif value in _MAX_WINDOW_BITS_VALUES:
281
+ client_max_window_bits = int(value)
282
+ else:
283
+ raise InvalidParameterValue(name, value)
284
+
285
+ else:
286
+ raise InvalidParameterName(name)
287
+
288
+ return (
289
+ server_no_context_takeover,
290
+ client_no_context_takeover,
291
+ server_max_window_bits,
292
+ client_max_window_bits,
293
+ )
294
+
295
+
296
+ class ClientPerMessageDeflateFactory(ClientExtensionFactory):
297
+ """
298
+ Client-side extension factory for the Per-Message Deflate extension.
299
+
300
+ Parameters behave as described in `section 7.1 of RFC 7692`_.
301
+
302
+ .. _section 7.1 of RFC 7692: https://datatracker.ietf.org/doc/html/rfc7692#section-7.1
303
+
304
+ Set them to :obj:`True` to include them in the negotiation offer without a
305
+ value or to an integer value to include them with this value.
306
+
307
+ Args:
308
+ server_no_context_takeover: Prevent server from using context takeover.
309
+ client_no_context_takeover: Prevent client from using context takeover.
310
+ server_max_window_bits: Maximum size of the server's LZ77 sliding window
311
+ in bits, between 8 and 15.
312
+ client_max_window_bits: Maximum size of the client's LZ77 sliding window
313
+ in bits, between 8 and 15, or :obj:`True` to indicate support without
314
+ setting a limit.
315
+ compress_settings: Additional keyword arguments for :func:`zlib.compressobj`,
316
+ excluding ``wbits``.
317
+
318
+ """
319
+
320
+ name = ExtensionName("permessage-deflate")
321
+
322
+ def __init__(
323
+ self,
324
+ server_no_context_takeover: bool = False,
325
+ client_no_context_takeover: bool = False,
326
+ server_max_window_bits: int | None = None,
327
+ client_max_window_bits: int | bool | None = True,
328
+ compress_settings: dict[str, Any] | None = None,
329
+ ) -> None:
330
+ """
331
+ Configure the Per-Message Deflate extension factory.
332
+
333
+ """
334
+ if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
335
+ raise ValueError("server_max_window_bits must be between 8 and 15")
336
+ if not (
337
+ client_max_window_bits is None
338
+ or client_max_window_bits is True
339
+ or 8 <= client_max_window_bits <= 15
340
+ ):
341
+ raise ValueError("client_max_window_bits must be between 8 and 15")
342
+ if compress_settings is not None and "wbits" in compress_settings:
343
+ raise ValueError(
344
+ "compress_settings must not include wbits, "
345
+ "set client_max_window_bits instead"
346
+ )
347
+
348
+ self.server_no_context_takeover = server_no_context_takeover
349
+ self.client_no_context_takeover = client_no_context_takeover
350
+ self.server_max_window_bits = server_max_window_bits
351
+ self.client_max_window_bits = client_max_window_bits
352
+ self.compress_settings = compress_settings
353
+
354
+ def get_request_params(self) -> list[ExtensionParameter]:
355
+ """
356
+ Build request parameters.
357
+
358
+ """
359
+ return _build_parameters(
360
+ self.server_no_context_takeover,
361
+ self.client_no_context_takeover,
362
+ self.server_max_window_bits,
363
+ self.client_max_window_bits,
364
+ )
365
+
366
+ def process_response_params(
367
+ self,
368
+ params: Sequence[ExtensionParameter],
369
+ accepted_extensions: Sequence[Extension],
370
+ ) -> PerMessageDeflate:
371
+ """
372
+ Process response parameters.
373
+
374
+ Return an extension instance.
375
+
376
+ """
377
+ if any(other.name == self.name for other in accepted_extensions):
378
+ raise NegotiationError(f"received duplicate {self.name}")
379
+
380
+ # Request parameters are available in instance variables.
381
+
382
+ # Load response parameters in local variables.
383
+ (
384
+ server_no_context_takeover,
385
+ client_no_context_takeover,
386
+ server_max_window_bits,
387
+ client_max_window_bits,
388
+ ) = _extract_parameters(params, is_server=False)
389
+
390
+ # After comparing the request and the response, the final
391
+ # configuration must be available in the local variables.
392
+
393
+ # server_no_context_takeover
394
+ #
395
+ # Req. Resp. Result
396
+ # ------ ------ --------------------------------------------------
397
+ # False False False
398
+ # False True True
399
+ # True False Error!
400
+ # True True True
401
+
402
+ if self.server_no_context_takeover:
403
+ if not server_no_context_takeover:
404
+ raise NegotiationError("expected server_no_context_takeover")
405
+
406
+ # client_no_context_takeover
407
+ #
408
+ # Req. Resp. Result
409
+ # ------ ------ --------------------------------------------------
410
+ # False False False
411
+ # False True True
412
+ # True False True - must change value
413
+ # True True True
414
+
415
+ if self.client_no_context_takeover:
416
+ if not client_no_context_takeover:
417
+ client_no_context_takeover = True
418
+
419
+ # server_max_window_bits
420
+
421
+ # Req. Resp. Result
422
+ # ------ ------ --------------------------------------------------
423
+ # None None None
424
+ # None 8≤M≤15 M
425
+ # 8≤N≤15 None Error!
426
+ # 8≤N≤15 8≤M≤N M
427
+ # 8≤N≤15 N<M≤15 Error!
428
+
429
+ if self.server_max_window_bits is None:
430
+ pass
431
+
432
+ else:
433
+ if server_max_window_bits is None:
434
+ raise NegotiationError("expected server_max_window_bits")
435
+ elif server_max_window_bits > self.server_max_window_bits:
436
+ raise NegotiationError("unsupported server_max_window_bits")
437
+
438
+ # client_max_window_bits
439
+
440
+ # Req. Resp. Result
441
+ # ------ ------ --------------------------------------------------
442
+ # None None None
443
+ # None 8≤M≤15 Error!
444
+ # True None None
445
+ # True 8≤M≤15 M
446
+ # 8≤N≤15 None N - must change value
447
+ # 8≤N≤15 8≤M≤N M
448
+ # 8≤N≤15 N<M≤15 Error!
449
+
450
+ if self.client_max_window_bits is None:
451
+ if client_max_window_bits is not None:
452
+ raise NegotiationError("unexpected client_max_window_bits")
453
+
454
+ elif self.client_max_window_bits is True:
455
+ pass
456
+
457
+ else:
458
+ if client_max_window_bits is None:
459
+ client_max_window_bits = self.client_max_window_bits
460
+ elif client_max_window_bits > self.client_max_window_bits:
461
+ raise NegotiationError("unsupported client_max_window_bits")
462
+
463
+ return PerMessageDeflate(
464
+ server_no_context_takeover, # remote_no_context_takeover
465
+ client_no_context_takeover, # local_no_context_takeover
466
+ server_max_window_bits or 15, # remote_max_window_bits
467
+ client_max_window_bits or 15, # local_max_window_bits
468
+ self.compress_settings,
469
+ )
470
+
471
+
472
+ def enable_client_permessage_deflate(
473
+ extensions: Sequence[ClientExtensionFactory] | None,
474
+ ) -> Sequence[ClientExtensionFactory]:
475
+ """
476
+ Enable Per-Message Deflate with default settings in client extensions.
477
+
478
+ If the extension is already present, perhaps with non-default settings,
479
+ the configuration isn't changed.
480
+
481
+ """
482
+ if extensions is None:
483
+ extensions = []
484
+ if not any(
485
+ extension_factory.name == ClientPerMessageDeflateFactory.name
486
+ for extension_factory in extensions
487
+ ):
488
+ extensions = list(extensions) + [
489
+ ClientPerMessageDeflateFactory(
490
+ compress_settings={"memLevel": 5},
491
+ )
492
+ ]
493
+ return extensions
494
+
495
+
496
+ class ServerPerMessageDeflateFactory(ServerExtensionFactory):
497
+ """
498
+ Server-side extension factory for the Per-Message Deflate extension.
499
+
500
+ Parameters behave as described in `section 7.1 of RFC 7692`_.
501
+
502
+ .. _section 7.1 of RFC 7692: https://datatracker.ietf.org/doc/html/rfc7692#section-7.1
503
+
504
+ Set them to :obj:`True` to include them in the negotiation offer without a
505
+ value or to an integer value to include them with this value.
506
+
507
+ Args:
508
+ server_no_context_takeover: Prevent server from using context takeover.
509
+ client_no_context_takeover: Prevent client from using context takeover.
510
+ server_max_window_bits: Maximum size of the server's LZ77 sliding window
511
+ in bits, between 8 and 15.
512
+ client_max_window_bits: Maximum size of the client's LZ77 sliding window
513
+ in bits, between 8 and 15.
514
+ compress_settings: Additional keyword arguments for :func:`zlib.compressobj`,
515
+ excluding ``wbits``.
516
+ require_client_max_window_bits: Do not enable compression at all if
517
+ client doesn't advertise support for ``client_max_window_bits``;
518
+ the default behavior is to enable compression without enforcing
519
+ ``client_max_window_bits``.
520
+
521
+ """
522
+
523
+ name = ExtensionName("permessage-deflate")
524
+
525
+ def __init__(
526
+ self,
527
+ server_no_context_takeover: bool = False,
528
+ client_no_context_takeover: bool = False,
529
+ server_max_window_bits: int | None = None,
530
+ client_max_window_bits: int | None = None,
531
+ compress_settings: dict[str, Any] | None = None,
532
+ require_client_max_window_bits: bool = False,
533
+ ) -> None:
534
+ """
535
+ Configure the Per-Message Deflate extension factory.
536
+
537
+ """
538
+ if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
539
+ raise ValueError("server_max_window_bits must be between 8 and 15")
540
+ if not (client_max_window_bits is None or 8 <= client_max_window_bits <= 15):
541
+ raise ValueError("client_max_window_bits must be between 8 and 15")
542
+ if compress_settings is not None and "wbits" in compress_settings:
543
+ raise ValueError(
544
+ "compress_settings must not include wbits, "
545
+ "set server_max_window_bits instead"
546
+ )
547
+ if client_max_window_bits is None and require_client_max_window_bits:
548
+ raise ValueError(
549
+ "require_client_max_window_bits is enabled, "
550
+ "but client_max_window_bits isn't configured"
551
+ )
552
+
553
+ self.server_no_context_takeover = server_no_context_takeover
554
+ self.client_no_context_takeover = client_no_context_takeover
555
+ self.server_max_window_bits = server_max_window_bits
556
+ self.client_max_window_bits = client_max_window_bits
557
+ self.compress_settings = compress_settings
558
+ self.require_client_max_window_bits = require_client_max_window_bits
559
+
560
+ def process_request_params(
561
+ self,
562
+ params: Sequence[ExtensionParameter],
563
+ accepted_extensions: Sequence[Extension],
564
+ ) -> tuple[list[ExtensionParameter], PerMessageDeflate]:
565
+ """
566
+ Process request parameters.
567
+
568
+ Return response params and an extension instance.
569
+
570
+ """
571
+ if any(other.name == self.name for other in accepted_extensions):
572
+ raise NegotiationError(f"skipped duplicate {self.name}")
573
+
574
+ # Load request parameters in local variables.
575
+ (
576
+ server_no_context_takeover,
577
+ client_no_context_takeover,
578
+ server_max_window_bits,
579
+ client_max_window_bits,
580
+ ) = _extract_parameters(params, is_server=True)
581
+
582
+ # Configuration parameters are available in instance variables.
583
+
584
+ # After comparing the request and the configuration, the response must
585
+ # be available in the local variables.
586
+
587
+ # server_no_context_takeover
588
+ #
589
+ # Config Req. Resp.
590
+ # ------ ------ --------------------------------------------------
591
+ # False False False
592
+ # False True True
593
+ # True False True - must change value to True
594
+ # True True True
595
+
596
+ if self.server_no_context_takeover:
597
+ if not server_no_context_takeover:
598
+ server_no_context_takeover = True
599
+
600
+ # client_no_context_takeover
601
+ #
602
+ # Config Req. Resp.
603
+ # ------ ------ --------------------------------------------------
604
+ # False False False
605
+ # False True True (or False)
606
+ # True False True - must change value to True
607
+ # True True True (or False)
608
+
609
+ if self.client_no_context_takeover:
610
+ if not client_no_context_takeover:
611
+ client_no_context_takeover = True
612
+
613
+ # server_max_window_bits
614
+
615
+ # Config Req. Resp.
616
+ # ------ ------ --------------------------------------------------
617
+ # None None None
618
+ # None 8≤M≤15 M
619
+ # 8≤N≤15 None N - must change value
620
+ # 8≤N≤15 8≤M≤N M
621
+ # 8≤N≤15 N<M≤15 N - must change value
622
+
623
+ if self.server_max_window_bits is None:
624
+ pass
625
+
626
+ else:
627
+ if server_max_window_bits is None:
628
+ server_max_window_bits = self.server_max_window_bits
629
+ elif server_max_window_bits > self.server_max_window_bits:
630
+ server_max_window_bits = self.server_max_window_bits
631
+
632
+ # client_max_window_bits
633
+
634
+ # Config Req. Resp.
635
+ # ------ ------ --------------------------------------------------
636
+ # None None None
637
+ # None True None - must change value
638
+ # None 8≤M≤15 M (or None)
639
+ # 8≤N≤15 None None or Error!
640
+ # 8≤N≤15 True N - must change value
641
+ # 8≤N≤15 8≤M≤N M (or None)
642
+ # 8≤N≤15 N<M≤15 N
643
+
644
+ if self.client_max_window_bits is None:
645
+ if client_max_window_bits is True:
646
+ client_max_window_bits = self.client_max_window_bits
647
+
648
+ else:
649
+ if client_max_window_bits is None:
650
+ if self.require_client_max_window_bits:
651
+ raise NegotiationError("required client_max_window_bits")
652
+ elif client_max_window_bits is True:
653
+ client_max_window_bits = self.client_max_window_bits
654
+ elif self.client_max_window_bits < client_max_window_bits:
655
+ client_max_window_bits = self.client_max_window_bits
656
+
657
+ return (
658
+ _build_parameters(
659
+ server_no_context_takeover,
660
+ client_no_context_takeover,
661
+ server_max_window_bits,
662
+ client_max_window_bits,
663
+ ),
664
+ PerMessageDeflate(
665
+ client_no_context_takeover, # remote_no_context_takeover
666
+ server_no_context_takeover, # local_no_context_takeover
667
+ client_max_window_bits or 15, # remote_max_window_bits
668
+ server_max_window_bits or 15, # local_max_window_bits
669
+ self.compress_settings,
670
+ ),
671
+ )
672
+
673
+
674
+ def enable_server_permessage_deflate(
675
+ extensions: Sequence[ServerExtensionFactory] | None,
676
+ ) -> Sequence[ServerExtensionFactory]:
677
+ """
678
+ Enable Per-Message Deflate with default settings in server extensions.
679
+
680
+ If the extension is already present, perhaps with non-default settings,
681
+ the configuration isn't changed.
682
+
683
+ """
684
+ if extensions is None:
685
+ extensions = []
686
+ if not any(
687
+ ext_factory.name == ServerPerMessageDeflateFactory.name
688
+ for ext_factory in extensions
689
+ ):
690
+ extensions = list(extensions) + [
691
+ ServerPerMessageDeflateFactory(
692
+ server_max_window_bits=12,
693
+ client_max_window_bits=12,
694
+ compress_settings={"memLevel": 5},
695
+ )
696
+ ]
697
+ return extensions
.venv/lib/python3.11/site-packages/websockets/legacy/__init__.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import warnings
4
+
5
+
6
+ warnings.warn( # deprecated in 14.0 - 2024-11-09
7
+ "websockets.legacy is deprecated; "
8
+ "see https://websockets.readthedocs.io/en/stable/howto/upgrade.html "
9
+ "for upgrade instructions",
10
+ DeprecationWarning,
11
+ )
.venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (501 Bytes). View file
 
.venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/auth.cpython-311.pyc ADDED
Binary file (8.36 kB). View file
 
.venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/client.cpython-311.pyc ADDED
Binary file (29.5 kB). View file