ZTWHHH commited on
Commit
7df45f1
·
verified ·
1 Parent(s): 837e651

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. parrot/lib/python3.10/site-packages/_multiprocess/__init__.py +8 -0
  3. parrot/lib/python3.10/site-packages/_multiprocess/__pycache__/__init__.cpython-310.pyc +0 -0
  4. parrot/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so +3 -0
  5. parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE +201 -0
  6. parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD +11 -0
  7. parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/REQUESTED +0 -0
  8. parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL +5 -0
  9. parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt +1 -0
  10. parrot/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc +0 -0
  11. parrot/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc +0 -0
  12. parrot/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc +0 -0
  13. parrot/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc +0 -0
  14. parrot/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc +0 -0
  15. parrot/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc +0 -0
  16. parrot/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc +0 -0
  17. parrot/lib/python3.10/site-packages/attr/_config.py +31 -0
  18. parrot/lib/python3.10/site-packages/attr/_make.py +2960 -0
  19. parrot/lib/python3.10/site-packages/attr/filters.py +72 -0
  20. parrot/lib/python3.10/site-packages/attr/setters.py +79 -0
  21. parrot/lib/python3.10/site-packages/attr/setters.pyi +20 -0
  22. parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/INSTALLER +1 -0
  23. parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/METADATA +419 -0
  24. parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/RECORD +0 -0
  25. parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/REQUESTED +0 -0
  26. parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/WHEEL +5 -0
  27. parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/entry_points.txt +2 -0
  28. parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/top_level.txt +1 -0
  29. parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/INSTALLER +1 -0
  30. parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/LICENSE +21 -0
  31. parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/METADATA +18 -0
  32. parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/RECORD +13 -0
  33. parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/REQUESTED +0 -0
  34. parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/WHEEL +5 -0
  35. parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/top_level.txt +1 -0
  36. parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/LICENSE +21 -0
  37. parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/METADATA +0 -0
  38. parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/RECORD +643 -0
  39. parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/WHEEL +6 -0
  40. parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/top_level.txt +1 -0
  41. parrot/lib/python3.10/site-packages/h11/__init__.py +62 -0
  42. parrot/lib/python3.10/site-packages/h11/_abnf.py +132 -0
  43. parrot/lib/python3.10/site-packages/h11/_connection.py +633 -0
  44. parrot/lib/python3.10/site-packages/h11/_events.py +369 -0
  45. parrot/lib/python3.10/site-packages/h11/_headers.py +278 -0
  46. parrot/lib/python3.10/site-packages/h11/_readers.py +247 -0
  47. parrot/lib/python3.10/site-packages/h11/_receivebuffer.py +153 -0
  48. parrot/lib/python3.10/site-packages/h11/_util.py +135 -0
  49. parrot/lib/python3.10/site-packages/h11/_version.py +16 -0
  50. parrot/lib/python3.10/site-packages/h11/_writers.py +145 -0
.gitattributes CHANGED
@@ -122,3 +122,5 @@ parrot/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_compute.cpyth
122
  parrot/lib/python3.10/site-packages/pyarrow/_orc.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
123
  parrot/lib/libncurses.a filter=lfs diff=lfs merge=lfs -text
124
  parrot/lib/python3.10/site-packages/pyarrow/_s3fs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
 
122
  parrot/lib/python3.10/site-packages/pyarrow/_orc.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
123
  parrot/lib/libncurses.a filter=lfs diff=lfs merge=lfs -text
124
  parrot/lib/python3.10/site-packages/pyarrow/_s3fs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
125
+ parrot/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
126
+ parrot/lib/python3.10/site-packages/pyarrow/_acero.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
parrot/lib/python3.10/site-packages/_multiprocess/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/multiprocess/blob/master/LICENSE
7
+
8
+ from _multiprocessing import *
parrot/lib/python3.10/site-packages/_multiprocess/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (198 Bytes). View file
 
parrot/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35769dee5631a7ebc7ee0db2404e68d7df9a298c7521bfc9bbf5ff2bd3aea355
3
+ size 414160
parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "{}"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiosignal-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ aiosignal-1.3.1.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
3
+ aiosignal-1.3.1.dist-info/METADATA,sha256=c0HRnlYzfXKztZPTFDlPfygizTherhG5WdwXlvco0Ug,4008
4
+ aiosignal-1.3.1.dist-info/RECORD,,
5
+ aiosignal-1.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ aiosignal-1.3.1.dist-info/WHEEL,sha256=ZL1lC_LiPDNRgDnOl2taCMc83aPEUZgHHv2h-LDgdiM,92
7
+ aiosignal-1.3.1.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10
8
+ aiosignal/__init__.py,sha256=zQNfFYRSd84bswvpFv8ZWjEr5DeYwV3LXbMSyo2222s,867
9
+ aiosignal/__init__.pyi,sha256=xeCddYSS8fZAkz8S4HuKSR2IDe3N7RW_LKcXDPPA1Xk,311
10
+ aiosignal/__pycache__/__init__.cpython-310.pyc,,
11
+ aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/REQUESTED ADDED
File without changes
parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.38.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ aiosignal
parrot/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc ADDED
Binary file (2.59 kB). View file
 
parrot/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc ADDED
Binary file (987 Bytes). View file
 
parrot/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc ADDED
Binary file (12 kB). View file
 
parrot/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc ADDED
Binary file (23.8 kB). View file
 
parrot/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc ADDED
Binary file (2.3 kB). View file
 
parrot/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (3.15 kB). View file
 
parrot/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc ADDED
Binary file (1.58 kB). View file
 
parrot/lib/python3.10/site-packages/attr/_config.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ __all__ = ["set_run_validators", "get_run_validators"]
4
+
5
+ _run_validators = True
6
+
7
+
8
+ def set_run_validators(run):
9
+ """
10
+ Set whether or not validators are run. By default, they are run.
11
+
12
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
13
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
14
+ instead.
15
+ """
16
+ if not isinstance(run, bool):
17
+ msg = "'run' must be bool."
18
+ raise TypeError(msg)
19
+ global _run_validators
20
+ _run_validators = run
21
+
22
+
23
+ def get_run_validators():
24
+ """
25
+ Return whether or not validators are run.
26
+
27
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
28
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
29
+ instead.
30
+ """
31
+ return _run_validators
parrot/lib/python3.10/site-packages/attr/_make.py ADDED
@@ -0,0 +1,2960 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ from __future__ import annotations
4
+
5
+ import abc
6
+ import contextlib
7
+ import copy
8
+ import enum
9
+ import functools
10
+ import inspect
11
+ import itertools
12
+ import linecache
13
+ import sys
14
+ import types
15
+ import typing
16
+
17
+ from operator import itemgetter
18
+
19
+ # We need to import _compat itself in addition to the _compat members to avoid
20
+ # having the thread-local in the globals here.
21
+ from . import _compat, _config, setters
22
+ from ._compat import (
23
+ PY_3_8_PLUS,
24
+ PY_3_10_PLUS,
25
+ PY_3_11_PLUS,
26
+ _AnnotationExtractor,
27
+ _get_annotations,
28
+ get_generic_base,
29
+ )
30
+ from .exceptions import (
31
+ DefaultAlreadySetError,
32
+ FrozenInstanceError,
33
+ NotAnAttrsClassError,
34
+ UnannotatedAttributeError,
35
+ )
36
+
37
+
38
+ # This is used at least twice, so cache it here.
39
+ _OBJ_SETATTR = object.__setattr__
40
+ _INIT_FACTORY_PAT = "__attr_factory_%s"
41
+ _CLASSVAR_PREFIXES = (
42
+ "typing.ClassVar",
43
+ "t.ClassVar",
44
+ "ClassVar",
45
+ "typing_extensions.ClassVar",
46
+ )
47
+ # we don't use a double-underscore prefix because that triggers
48
+ # name mangling when trying to create a slot for the field
49
+ # (when slots=True)
50
+ _HASH_CACHE_FIELD = "_attrs_cached_hash"
51
+
52
+ _EMPTY_METADATA_SINGLETON = types.MappingProxyType({})
53
+
54
+ # Unique object for unequivocal getattr() defaults.
55
+ _SENTINEL = object()
56
+
57
+ _DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate)
58
+
59
+
60
+ class _Nothing(enum.Enum):
61
+ """
62
+ Sentinel to indicate the lack of a value when `None` is ambiguous.
63
+
64
+ If extending attrs, you can use ``typing.Literal[NOTHING]`` to show
65
+ that a value may be ``NOTHING``.
66
+
67
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
68
+ .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant.
69
+ """
70
+
71
+ NOTHING = enum.auto()
72
+
73
+ def __repr__(self):
74
+ return "NOTHING"
75
+
76
+ def __bool__(self):
77
+ return False
78
+
79
+
80
+ NOTHING = _Nothing.NOTHING
81
+ """
82
+ Sentinel to indicate the lack of a value when `None` is ambiguous.
83
+ """
84
+
85
+
86
+ class _CacheHashWrapper(int):
87
+ """
88
+ An integer subclass that pickles / copies as None
89
+
90
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
91
+ serializing a potentially (even likely) invalid hash value. Since `None`
92
+ is the default value for uncalculated hashes, whenever this is copied,
93
+ the copy's value for the hash should automatically reset.
94
+
95
+ See GH #613 for more details.
96
+ """
97
+
98
+ def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008
99
+ return _none_constructor, _args
100
+
101
+
102
+ def attrib(
103
+ default=NOTHING,
104
+ validator=None,
105
+ repr=True,
106
+ cmp=None,
107
+ hash=None,
108
+ init=True,
109
+ metadata=None,
110
+ type=None,
111
+ converter=None,
112
+ factory=None,
113
+ kw_only=False,
114
+ eq=None,
115
+ order=None,
116
+ on_setattr=None,
117
+ alias=None,
118
+ ):
119
+ """
120
+ Create a new field / attribute on a class.
121
+
122
+ Identical to `attrs.field`, except it's not keyword-only.
123
+
124
+ Consider using `attrs.field` in new code (``attr.ib`` will *never* go away,
125
+ though).
126
+
127
+ .. warning::
128
+
129
+ Does **nothing** unless the class is also decorated with
130
+ `attr.s` (or similar)!
131
+
132
+
133
+ .. versionadded:: 15.2.0 *convert*
134
+ .. versionadded:: 16.3.0 *metadata*
135
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
136
+ .. versionchanged:: 17.1.0
137
+ *hash* is `None` and therefore mirrors *eq* by default.
138
+ .. versionadded:: 17.3.0 *type*
139
+ .. deprecated:: 17.4.0 *convert*
140
+ .. versionadded:: 17.4.0
141
+ *converter* as a replacement for the deprecated *convert* to achieve
142
+ consistency with other noun-based arguments.
143
+ .. versionadded:: 18.1.0
144
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
145
+ .. versionadded:: 18.2.0 *kw_only*
146
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
147
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
148
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
149
+ .. versionadded:: 19.2.0 *eq* and *order*
150
+ .. versionadded:: 20.1.0 *on_setattr*
151
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
152
+ .. versionchanged:: 21.1.0
153
+ *eq*, *order*, and *cmp* also accept a custom callable
154
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
155
+ .. versionadded:: 22.2.0 *alias*
156
+ """
157
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
158
+ cmp, eq, order, True
159
+ )
160
+
161
+ if hash is not None and hash is not True and hash is not False:
162
+ msg = "Invalid value for hash. Must be True, False, or None."
163
+ raise TypeError(msg)
164
+
165
+ if factory is not None:
166
+ if default is not NOTHING:
167
+ msg = (
168
+ "The `default` and `factory` arguments are mutually exclusive."
169
+ )
170
+ raise ValueError(msg)
171
+ if not callable(factory):
172
+ msg = "The `factory` argument must be a callable."
173
+ raise ValueError(msg)
174
+ default = Factory(factory)
175
+
176
+ if metadata is None:
177
+ metadata = {}
178
+
179
+ # Apply syntactic sugar by auto-wrapping.
180
+ if isinstance(on_setattr, (list, tuple)):
181
+ on_setattr = setters.pipe(*on_setattr)
182
+
183
+ if validator and isinstance(validator, (list, tuple)):
184
+ validator = and_(*validator)
185
+
186
+ if converter and isinstance(converter, (list, tuple)):
187
+ converter = pipe(*converter)
188
+
189
+ return _CountingAttr(
190
+ default=default,
191
+ validator=validator,
192
+ repr=repr,
193
+ cmp=None,
194
+ hash=hash,
195
+ init=init,
196
+ converter=converter,
197
+ metadata=metadata,
198
+ type=type,
199
+ kw_only=kw_only,
200
+ eq=eq,
201
+ eq_key=eq_key,
202
+ order=order,
203
+ order_key=order_key,
204
+ on_setattr=on_setattr,
205
+ alias=alias,
206
+ )
207
+
208
+
209
+ def _compile_and_eval(script, globs, locs=None, filename=""):
210
+ """
211
+ Evaluate the script with the given global (globs) and local (locs)
212
+ variables.
213
+ """
214
+ bytecode = compile(script, filename, "exec")
215
+ eval(bytecode, globs, locs)
216
+
217
+
218
+ def _make_method(name, script, filename, globs, locals=None):
219
+ """
220
+ Create the method with the script given and return the method object.
221
+ """
222
+ locs = {} if locals is None else locals
223
+
224
+ # In order of debuggers like PDB being able to step through the code,
225
+ # we add a fake linecache entry.
226
+ count = 1
227
+ base_filename = filename
228
+ while True:
229
+ linecache_tuple = (
230
+ len(script),
231
+ None,
232
+ script.splitlines(True),
233
+ filename,
234
+ )
235
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
236
+ if old_val == linecache_tuple:
237
+ break
238
+
239
+ filename = f"{base_filename[:-1]}-{count}>"
240
+ count += 1
241
+
242
+ _compile_and_eval(script, globs, locs, filename)
243
+
244
+ return locs[name]
245
+
246
+
247
+ def _make_attr_tuple_class(cls_name, attr_names):
248
+ """
249
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
250
+
251
+ The subclass is a bare tuple with properties for names.
252
+
253
+ class MyClassAttributes(tuple):
254
+ __slots__ = ()
255
+ x = property(itemgetter(0))
256
+ """
257
+ attr_class_name = f"{cls_name}Attributes"
258
+ attr_class_template = [
259
+ f"class {attr_class_name}(tuple):",
260
+ " __slots__ = ()",
261
+ ]
262
+ if attr_names:
263
+ for i, attr_name in enumerate(attr_names):
264
+ attr_class_template.append(
265
+ f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))"
266
+ )
267
+ else:
268
+ attr_class_template.append(" pass")
269
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
270
+ _compile_and_eval("\n".join(attr_class_template), globs)
271
+ return globs[attr_class_name]
272
+
273
+
274
+ # Tuple class for extracted attributes from a class definition.
275
+ # `base_attrs` is a subset of `attrs`.
276
+ _Attributes = _make_attr_tuple_class(
277
+ "_Attributes",
278
+ [
279
+ # all attributes to build dunder methods for
280
+ "attrs",
281
+ # attributes that have been inherited
282
+ "base_attrs",
283
+ # map inherited attributes to their originating classes
284
+ "base_attrs_map",
285
+ ],
286
+ )
287
+
288
+
289
+ def _is_class_var(annot):
290
+ """
291
+ Check whether *annot* is a typing.ClassVar.
292
+
293
+ The string comparison hack is used to avoid evaluating all string
294
+ annotations which would put attrs-based classes at a performance
295
+ disadvantage compared to plain old classes.
296
+ """
297
+ annot = str(annot)
298
+
299
+ # Annotation can be quoted.
300
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
301
+ annot = annot[1:-1]
302
+
303
+ return annot.startswith(_CLASSVAR_PREFIXES)
304
+
305
+
306
+ def _has_own_attribute(cls, attrib_name):
307
+ """
308
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
309
+ """
310
+ return attrib_name in cls.__dict__
311
+
312
+
313
+ def _collect_base_attrs(cls, taken_attr_names):
314
+ """
315
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
316
+ """
317
+ base_attrs = []
318
+ base_attr_map = {} # A dictionary of base attrs to their classes.
319
+
320
+ # Traverse the MRO and collect attributes.
321
+ for base_cls in reversed(cls.__mro__[1:-1]):
322
+ for a in getattr(base_cls, "__attrs_attrs__", []):
323
+ if a.inherited or a.name in taken_attr_names:
324
+ continue
325
+
326
+ a = a.evolve(inherited=True) # noqa: PLW2901
327
+ base_attrs.append(a)
328
+ base_attr_map[a.name] = base_cls
329
+
330
+ # For each name, only keep the freshest definition i.e. the furthest at the
331
+ # back. base_attr_map is fine because it gets overwritten with every new
332
+ # instance.
333
+ filtered = []
334
+ seen = set()
335
+ for a in reversed(base_attrs):
336
+ if a.name in seen:
337
+ continue
338
+ filtered.insert(0, a)
339
+ seen.add(a.name)
340
+
341
+ return filtered, base_attr_map
342
+
343
+
344
+ def _collect_base_attrs_broken(cls, taken_attr_names):
345
+ """
346
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
347
+
348
+ N.B. *taken_attr_names* will be mutated.
349
+
350
+ Adhere to the old incorrect behavior.
351
+
352
+ Notably it collects from the front and considers inherited attributes which
353
+ leads to the buggy behavior reported in #428.
354
+ """
355
+ base_attrs = []
356
+ base_attr_map = {} # A dictionary of base attrs to their classes.
357
+
358
+ # Traverse the MRO and collect attributes.
359
+ for base_cls in cls.__mro__[1:-1]:
360
+ for a in getattr(base_cls, "__attrs_attrs__", []):
361
+ if a.name in taken_attr_names:
362
+ continue
363
+
364
+ a = a.evolve(inherited=True) # noqa: PLW2901
365
+ taken_attr_names.add(a.name)
366
+ base_attrs.append(a)
367
+ base_attr_map[a.name] = base_cls
368
+
369
+ return base_attrs, base_attr_map
370
+
371
+
372
+ def _transform_attrs(
373
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
374
+ ):
375
+ """
376
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
377
+
378
+ If *these* is passed, use that and don't look for them on the class.
379
+
380
+ If *collect_by_mro* is True, collect them in the correct MRO order,
381
+ otherwise use the old -- incorrect -- order. See #428.
382
+
383
+ Return an `_Attributes`.
384
+ """
385
+ cd = cls.__dict__
386
+ anns = _get_annotations(cls)
387
+
388
+ if these is not None:
389
+ ca_list = list(these.items())
390
+ elif auto_attribs is True:
391
+ ca_names = {
392
+ name
393
+ for name, attr in cd.items()
394
+ if isinstance(attr, _CountingAttr)
395
+ }
396
+ ca_list = []
397
+ annot_names = set()
398
+ for attr_name, type in anns.items():
399
+ if _is_class_var(type):
400
+ continue
401
+ annot_names.add(attr_name)
402
+ a = cd.get(attr_name, NOTHING)
403
+
404
+ if not isinstance(a, _CountingAttr):
405
+ a = attrib() if a is NOTHING else attrib(default=a)
406
+ ca_list.append((attr_name, a))
407
+
408
+ unannotated = ca_names - annot_names
409
+ if len(unannotated) > 0:
410
+ raise UnannotatedAttributeError(
411
+ "The following `attr.ib`s lack a type annotation: "
412
+ + ", ".join(
413
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
414
+ )
415
+ + "."
416
+ )
417
+ else:
418
+ ca_list = sorted(
419
+ (
420
+ (name, attr)
421
+ for name, attr in cd.items()
422
+ if isinstance(attr, _CountingAttr)
423
+ ),
424
+ key=lambda e: e[1].counter,
425
+ )
426
+
427
+ own_attrs = [
428
+ Attribute.from_counting_attr(
429
+ name=attr_name, ca=ca, type=anns.get(attr_name)
430
+ )
431
+ for attr_name, ca in ca_list
432
+ ]
433
+
434
+ if collect_by_mro:
435
+ base_attrs, base_attr_map = _collect_base_attrs(
436
+ cls, {a.name for a in own_attrs}
437
+ )
438
+ else:
439
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
440
+ cls, {a.name for a in own_attrs}
441
+ )
442
+
443
+ if kw_only:
444
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
445
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
446
+
447
+ attrs = base_attrs + own_attrs
448
+
449
+ # Mandatory vs non-mandatory attr order only matters when they are part of
450
+ # the __init__ signature and when they aren't kw_only (which are moved to
451
+ # the end and can be mandatory or non-mandatory in any order, as they will
452
+ # be specified as keyword args anyway). Check the order of those attrs:
453
+ had_default = False
454
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
455
+ if had_default is True and a.default is NOTHING:
456
+ msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}"
457
+ raise ValueError(msg)
458
+
459
+ if had_default is False and a.default is not NOTHING:
460
+ had_default = True
461
+
462
+ if field_transformer is not None:
463
+ attrs = field_transformer(cls, attrs)
464
+
465
+ # Resolve default field alias after executing field_transformer.
466
+ # This allows field_transformer to differentiate between explicit vs
467
+ # default aliases and supply their own defaults.
468
+ attrs = [
469
+ a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a
470
+ for a in attrs
471
+ ]
472
+
473
+ # Create AttrsClass *after* applying the field_transformer since it may
474
+ # add or remove attributes!
475
+ attr_names = [a.name for a in attrs]
476
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
477
+
478
+ return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
479
+
480
+
481
+ def _make_cached_property_getattr(cached_properties, original_getattr, cls):
482
+ lines = [
483
+ # Wrapped to get `__class__` into closure cell for super()
484
+ # (It will be replaced with the newly constructed class after construction).
485
+ "def wrapper(_cls):",
486
+ " __class__ = _cls",
487
+ " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):",
488
+ " func = cached_properties.get(item)",
489
+ " if func is not None:",
490
+ " result = func(self)",
491
+ " _setter = _cached_setattr_get(self)",
492
+ " _setter(item, result)",
493
+ " return result",
494
+ ]
495
+ if original_getattr is not None:
496
+ lines.append(
497
+ " return original_getattr(self, item)",
498
+ )
499
+ else:
500
+ lines.extend(
501
+ [
502
+ " try:",
503
+ " return super().__getattribute__(item)",
504
+ " except AttributeError:",
505
+ " if not hasattr(super(), '__getattr__'):",
506
+ " raise",
507
+ " return super().__getattr__(item)",
508
+ " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"",
509
+ " raise AttributeError(original_error)",
510
+ ]
511
+ )
512
+
513
+ lines.extend(
514
+ [
515
+ " return __getattr__",
516
+ "__getattr__ = wrapper(_cls)",
517
+ ]
518
+ )
519
+
520
+ unique_filename = _generate_unique_filename(cls, "getattr")
521
+
522
+ glob = {
523
+ "cached_properties": cached_properties,
524
+ "_cached_setattr_get": _OBJ_SETATTR.__get__,
525
+ "original_getattr": original_getattr,
526
+ }
527
+
528
+ return _make_method(
529
+ "__getattr__",
530
+ "\n".join(lines),
531
+ unique_filename,
532
+ glob,
533
+ locals={
534
+ "_cls": cls,
535
+ },
536
+ )
537
+
538
+
539
+ def _frozen_setattrs(self, name, value):
540
+ """
541
+ Attached to frozen classes as __setattr__.
542
+ """
543
+ if isinstance(self, BaseException) and name in (
544
+ "__cause__",
545
+ "__context__",
546
+ "__traceback__",
547
+ ):
548
+ BaseException.__setattr__(self, name, value)
549
+ return
550
+
551
+ raise FrozenInstanceError()
552
+
553
+
554
+ def _frozen_delattrs(self, name):
555
+ """
556
+ Attached to frozen classes as __delattr__.
557
+ """
558
+ raise FrozenInstanceError()
559
+
560
+
561
+ class _ClassBuilder:
562
+ """
563
+ Iteratively build *one* class.
564
+ """
565
+
566
+ __slots__ = (
567
+ "_attr_names",
568
+ "_attrs",
569
+ "_base_attr_map",
570
+ "_base_names",
571
+ "_cache_hash",
572
+ "_cls",
573
+ "_cls_dict",
574
+ "_delete_attribs",
575
+ "_frozen",
576
+ "_has_pre_init",
577
+ "_pre_init_has_args",
578
+ "_has_post_init",
579
+ "_is_exc",
580
+ "_on_setattr",
581
+ "_slots",
582
+ "_weakref_slot",
583
+ "_wrote_own_setattr",
584
+ "_has_custom_setattr",
585
+ )
586
+
587
+ def __init__(
588
+ self,
589
+ cls,
590
+ these,
591
+ slots,
592
+ frozen,
593
+ weakref_slot,
594
+ getstate_setstate,
595
+ auto_attribs,
596
+ kw_only,
597
+ cache_hash,
598
+ is_exc,
599
+ collect_by_mro,
600
+ on_setattr,
601
+ has_custom_setattr,
602
+ field_transformer,
603
+ ):
604
+ attrs, base_attrs, base_map = _transform_attrs(
605
+ cls,
606
+ these,
607
+ auto_attribs,
608
+ kw_only,
609
+ collect_by_mro,
610
+ field_transformer,
611
+ )
612
+
613
+ self._cls = cls
614
+ self._cls_dict = dict(cls.__dict__) if slots else {}
615
+ self._attrs = attrs
616
+ self._base_names = {a.name for a in base_attrs}
617
+ self._base_attr_map = base_map
618
+ self._attr_names = tuple(a.name for a in attrs)
619
+ self._slots = slots
620
+ self._frozen = frozen
621
+ self._weakref_slot = weakref_slot
622
+ self._cache_hash = cache_hash
623
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
624
+ self._pre_init_has_args = False
625
+ if self._has_pre_init:
626
+ # Check if the pre init method has more arguments than just `self`
627
+ # We want to pass arguments if pre init expects arguments
628
+ pre_init_func = cls.__attrs_pre_init__
629
+ pre_init_signature = inspect.signature(pre_init_func)
630
+ self._pre_init_has_args = len(pre_init_signature.parameters) > 1
631
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
632
+ self._delete_attribs = not bool(these)
633
+ self._is_exc = is_exc
634
+ self._on_setattr = on_setattr
635
+
636
+ self._has_custom_setattr = has_custom_setattr
637
+ self._wrote_own_setattr = False
638
+
639
+ self._cls_dict["__attrs_attrs__"] = self._attrs
640
+
641
+ if frozen:
642
+ self._cls_dict["__setattr__"] = _frozen_setattrs
643
+ self._cls_dict["__delattr__"] = _frozen_delattrs
644
+
645
+ self._wrote_own_setattr = True
646
+ elif on_setattr in (
647
+ _DEFAULT_ON_SETATTR,
648
+ setters.validate,
649
+ setters.convert,
650
+ ):
651
+ has_validator = has_converter = False
652
+ for a in attrs:
653
+ if a.validator is not None:
654
+ has_validator = True
655
+ if a.converter is not None:
656
+ has_converter = True
657
+
658
+ if has_validator and has_converter:
659
+ break
660
+ if (
661
+ (
662
+ on_setattr == _DEFAULT_ON_SETATTR
663
+ and not (has_validator or has_converter)
664
+ )
665
+ or (on_setattr == setters.validate and not has_validator)
666
+ or (on_setattr == setters.convert and not has_converter)
667
+ ):
668
+ # If class-level on_setattr is set to convert + validate, but
669
+ # there's no field to convert or validate, pretend like there's
670
+ # no on_setattr.
671
+ self._on_setattr = None
672
+
673
+ if getstate_setstate:
674
+ (
675
+ self._cls_dict["__getstate__"],
676
+ self._cls_dict["__setstate__"],
677
+ ) = self._make_getstate_setstate()
678
+
679
+ def __repr__(self):
680
+ return f"<_ClassBuilder(cls={self._cls.__name__})>"
681
+
682
+ def build_class(self):
683
+ """
684
+ Finalize class based on the accumulated configuration.
685
+
686
+ Builder cannot be used after calling this method.
687
+ """
688
+ if self._slots is True:
689
+ cls = self._create_slots_class()
690
+ else:
691
+ cls = self._patch_original_class()
692
+ if PY_3_10_PLUS:
693
+ cls = abc.update_abstractmethods(cls)
694
+
695
+ # The method gets only called if it's not inherited from a base class.
696
+ # _has_own_attribute does NOT work properly for classmethods.
697
+ if (
698
+ getattr(cls, "__attrs_init_subclass__", None)
699
+ and "__attrs_init_subclass__" not in cls.__dict__
700
+ ):
701
+ cls.__attrs_init_subclass__()
702
+
703
+ return cls
704
+
705
+ def _patch_original_class(self):
706
+ """
707
+ Apply accumulated methods and return the class.
708
+ """
709
+ cls = self._cls
710
+ base_names = self._base_names
711
+
712
+ # Clean class of attribute definitions (`attr.ib()`s).
713
+ if self._delete_attribs:
714
+ for name in self._attr_names:
715
+ if (
716
+ name not in base_names
717
+ and getattr(cls, name, _SENTINEL) is not _SENTINEL
718
+ ):
719
+ # An AttributeError can happen if a base class defines a
720
+ # class variable and we want to set an attribute with the
721
+ # same name by using only a type annotation.
722
+ with contextlib.suppress(AttributeError):
723
+ delattr(cls, name)
724
+
725
+ # Attach our dunder methods.
726
+ for name, value in self._cls_dict.items():
727
+ setattr(cls, name, value)
728
+
729
+ # If we've inherited an attrs __setattr__ and don't write our own,
730
+ # reset it to object's.
731
+ if not self._wrote_own_setattr and getattr(
732
+ cls, "__attrs_own_setattr__", False
733
+ ):
734
+ cls.__attrs_own_setattr__ = False
735
+
736
+ if not self._has_custom_setattr:
737
+ cls.__setattr__ = _OBJ_SETATTR
738
+
739
+ return cls
740
+
741
+ def _create_slots_class(self):
742
+ """
743
+ Build and return a new class with a `__slots__` attribute.
744
+ """
745
+ cd = {
746
+ k: v
747
+ for k, v in self._cls_dict.items()
748
+ if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
749
+ }
750
+
751
+ # If our class doesn't have its own implementation of __setattr__
752
+ # (either from the user or by us), check the bases, if one of them has
753
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
754
+ # MRO because we only care about our immediate base classes.
755
+ # XXX: This can be confused by subclassing a slotted attrs class with
756
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
757
+ # XXX: class. See `test_slotted_confused` for details. For now that's
758
+ # XXX: OK with us.
759
+ if not self._wrote_own_setattr:
760
+ cd["__attrs_own_setattr__"] = False
761
+
762
+ if not self._has_custom_setattr:
763
+ for base_cls in self._cls.__bases__:
764
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
765
+ cd["__setattr__"] = _OBJ_SETATTR
766
+ break
767
+
768
+ # Traverse the MRO to collect existing slots
769
+ # and check for an existing __weakref__.
770
+ existing_slots = {}
771
+ weakref_inherited = False
772
+ for base_cls in self._cls.__mro__[1:-1]:
773
+ if base_cls.__dict__.get("__weakref__", None) is not None:
774
+ weakref_inherited = True
775
+ existing_slots.update(
776
+ {
777
+ name: getattr(base_cls, name)
778
+ for name in getattr(base_cls, "__slots__", [])
779
+ }
780
+ )
781
+
782
+ base_names = set(self._base_names)
783
+
784
+ names = self._attr_names
785
+ if (
786
+ self._weakref_slot
787
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
788
+ and "__weakref__" not in names
789
+ and not weakref_inherited
790
+ ):
791
+ names += ("__weakref__",)
792
+
793
+ if PY_3_8_PLUS:
794
+ cached_properties = {
795
+ name: cached_property.func
796
+ for name, cached_property in cd.items()
797
+ if isinstance(cached_property, functools.cached_property)
798
+ }
799
+ else:
800
+ # `functools.cached_property` was introduced in 3.8.
801
+ # So can't be used before this.
802
+ cached_properties = {}
803
+
804
+ # Collect methods with a `__class__` reference that are shadowed in the new class.
805
+ # To know to update them.
806
+ additional_closure_functions_to_update = []
807
+ if cached_properties:
808
+ class_annotations = _get_annotations(self._cls)
809
+ for name, func in cached_properties.items():
810
+ # Add cached properties to names for slotting.
811
+ names += (name,)
812
+ # Clear out function from class to avoid clashing.
813
+ del cd[name]
814
+ additional_closure_functions_to_update.append(func)
815
+ annotation = inspect.signature(func).return_annotation
816
+ if annotation is not inspect.Parameter.empty:
817
+ class_annotations[name] = annotation
818
+
819
+ original_getattr = cd.get("__getattr__")
820
+ if original_getattr is not None:
821
+ additional_closure_functions_to_update.append(original_getattr)
822
+
823
+ cd["__getattr__"] = _make_cached_property_getattr(
824
+ cached_properties, original_getattr, self._cls
825
+ )
826
+
827
+ # We only add the names of attributes that aren't inherited.
828
+ # Setting __slots__ to inherited attributes wastes memory.
829
+ slot_names = [name for name in names if name not in base_names]
830
+
831
+ # There are slots for attributes from current class
832
+ # that are defined in parent classes.
833
+ # As their descriptors may be overridden by a child class,
834
+ # we collect them here and update the class dict
835
+ reused_slots = {
836
+ slot: slot_descriptor
837
+ for slot, slot_descriptor in existing_slots.items()
838
+ if slot in slot_names
839
+ }
840
+ slot_names = [name for name in slot_names if name not in reused_slots]
841
+ cd.update(reused_slots)
842
+ if self._cache_hash:
843
+ slot_names.append(_HASH_CACHE_FIELD)
844
+
845
+ cd["__slots__"] = tuple(slot_names)
846
+
847
+ cd["__qualname__"] = self._cls.__qualname__
848
+
849
+ # Create new class based on old class and our methods.
850
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
851
+
852
+ # The following is a fix for
853
+ # <https://github.com/python-attrs/attrs/issues/102>.
854
+ # If a method mentions `__class__` or uses the no-arg super(), the
855
+ # compiler will bake a reference to the class in the method itself
856
+ # as `method.__closure__`. Since we replace the class with a
857
+ # clone, we rewrite these references so it keeps working.
858
+ for item in itertools.chain(
859
+ cls.__dict__.values(), additional_closure_functions_to_update
860
+ ):
861
+ if isinstance(item, (classmethod, staticmethod)):
862
+ # Class- and staticmethods hide their functions inside.
863
+ # These might need to be rewritten as well.
864
+ closure_cells = getattr(item.__func__, "__closure__", None)
865
+ elif isinstance(item, property):
866
+ # Workaround for property `super()` shortcut (PY3-only).
867
+ # There is no universal way for other descriptors.
868
+ closure_cells = getattr(item.fget, "__closure__", None)
869
+ else:
870
+ closure_cells = getattr(item, "__closure__", None)
871
+
872
+ if not closure_cells: # Catch None or the empty list.
873
+ continue
874
+ for cell in closure_cells:
875
+ try:
876
+ match = cell.cell_contents is self._cls
877
+ except ValueError: # noqa: PERF203
878
+ # ValueError: Cell is empty
879
+ pass
880
+ else:
881
+ if match:
882
+ cell.cell_contents = cls
883
+ return cls
884
+
885
+ def add_repr(self, ns):
886
+ self._cls_dict["__repr__"] = self._add_method_dunders(
887
+ _make_repr(self._attrs, ns, self._cls)
888
+ )
889
+ return self
890
+
891
+ def add_str(self):
892
+ repr = self._cls_dict.get("__repr__")
893
+ if repr is None:
894
+ msg = "__str__ can only be generated if a __repr__ exists."
895
+ raise ValueError(msg)
896
+
897
+ def __str__(self):
898
+ return self.__repr__()
899
+
900
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
901
+ return self
902
+
903
+ def _make_getstate_setstate(self):
904
+ """
905
+ Create custom __setstate__ and __getstate__ methods.
906
+ """
907
+ # __weakref__ is not writable.
908
+ state_attr_names = tuple(
909
+ an for an in self._attr_names if an != "__weakref__"
910
+ )
911
+
912
+ def slots_getstate(self):
913
+ """
914
+ Automatically created by attrs.
915
+ """
916
+ return {name: getattr(self, name) for name in state_attr_names}
917
+
918
+ hash_caching_enabled = self._cache_hash
919
+
920
+ def slots_setstate(self, state):
921
+ """
922
+ Automatically created by attrs.
923
+ """
924
+ __bound_setattr = _OBJ_SETATTR.__get__(self)
925
+ if isinstance(state, tuple):
926
+ # Backward compatibility with attrs instances pickled with
927
+ # attrs versions before v22.2.0 which stored tuples.
928
+ for name, value in zip(state_attr_names, state):
929
+ __bound_setattr(name, value)
930
+ else:
931
+ for name in state_attr_names:
932
+ if name in state:
933
+ __bound_setattr(name, state[name])
934
+
935
+ # The hash code cache is not included when the object is
936
+ # serialized, but it still needs to be initialized to None to
937
+ # indicate that the first call to __hash__ should be a cache
938
+ # miss.
939
+ if hash_caching_enabled:
940
+ __bound_setattr(_HASH_CACHE_FIELD, None)
941
+
942
+ return slots_getstate, slots_setstate
943
+
944
+ def make_unhashable(self):
945
+ self._cls_dict["__hash__"] = None
946
+ return self
947
+
948
+ def add_hash(self):
949
+ self._cls_dict["__hash__"] = self._add_method_dunders(
950
+ _make_hash(
951
+ self._cls,
952
+ self._attrs,
953
+ frozen=self._frozen,
954
+ cache_hash=self._cache_hash,
955
+ )
956
+ )
957
+
958
+ return self
959
+
960
+ def add_init(self):
961
+ self._cls_dict["__init__"] = self._add_method_dunders(
962
+ _make_init(
963
+ self._cls,
964
+ self._attrs,
965
+ self._has_pre_init,
966
+ self._pre_init_has_args,
967
+ self._has_post_init,
968
+ self._frozen,
969
+ self._slots,
970
+ self._cache_hash,
971
+ self._base_attr_map,
972
+ self._is_exc,
973
+ self._on_setattr,
974
+ attrs_init=False,
975
+ )
976
+ )
977
+
978
+ return self
979
+
980
+ def add_match_args(self):
981
+ self._cls_dict["__match_args__"] = tuple(
982
+ field.name
983
+ for field in self._attrs
984
+ if field.init and not field.kw_only
985
+ )
986
+
987
+ def add_attrs_init(self):
988
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
989
+ _make_init(
990
+ self._cls,
991
+ self._attrs,
992
+ self._has_pre_init,
993
+ self._pre_init_has_args,
994
+ self._has_post_init,
995
+ self._frozen,
996
+ self._slots,
997
+ self._cache_hash,
998
+ self._base_attr_map,
999
+ self._is_exc,
1000
+ self._on_setattr,
1001
+ attrs_init=True,
1002
+ )
1003
+ )
1004
+
1005
+ return self
1006
+
1007
+ def add_eq(self):
1008
+ cd = self._cls_dict
1009
+
1010
+ cd["__eq__"] = self._add_method_dunders(
1011
+ _make_eq(self._cls, self._attrs)
1012
+ )
1013
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
1014
+
1015
+ return self
1016
+
1017
+ def add_order(self):
1018
+ cd = self._cls_dict
1019
+
1020
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
1021
+ self._add_method_dunders(meth)
1022
+ for meth in _make_order(self._cls, self._attrs)
1023
+ )
1024
+
1025
+ return self
1026
+
1027
+ def add_setattr(self):
1028
+ if self._frozen:
1029
+ return self
1030
+
1031
+ sa_attrs = {}
1032
+ for a in self._attrs:
1033
+ on_setattr = a.on_setattr or self._on_setattr
1034
+ if on_setattr and on_setattr is not setters.NO_OP:
1035
+ sa_attrs[a.name] = a, on_setattr
1036
+
1037
+ if not sa_attrs:
1038
+ return self
1039
+
1040
+ if self._has_custom_setattr:
1041
+ # We need to write a __setattr__ but there already is one!
1042
+ msg = "Can't combine custom __setattr__ with on_setattr hooks."
1043
+ raise ValueError(msg)
1044
+
1045
+ # docstring comes from _add_method_dunders
1046
+ def __setattr__(self, name, val):
1047
+ try:
1048
+ a, hook = sa_attrs[name]
1049
+ except KeyError:
1050
+ nval = val
1051
+ else:
1052
+ nval = hook(self, a, val)
1053
+
1054
+ _OBJ_SETATTR(self, name, nval)
1055
+
1056
+ self._cls_dict["__attrs_own_setattr__"] = True
1057
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
1058
+ self._wrote_own_setattr = True
1059
+
1060
+ return self
1061
+
1062
+ def _add_method_dunders(self, method):
1063
+ """
1064
+ Add __module__ and __qualname__ to a *method* if possible.
1065
+ """
1066
+ with contextlib.suppress(AttributeError):
1067
+ method.__module__ = self._cls.__module__
1068
+
1069
+ with contextlib.suppress(AttributeError):
1070
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
1071
+
1072
+ with contextlib.suppress(AttributeError):
1073
+ method.__doc__ = (
1074
+ "Method generated by attrs for class "
1075
+ f"{self._cls.__qualname__}."
1076
+ )
1077
+
1078
+ return method
1079
+
1080
+
1081
+ def _determine_attrs_eq_order(cmp, eq, order, default_eq):
1082
+ """
1083
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
1084
+ values of eq and order. If *eq* is None, set it to *default_eq*.
1085
+ """
1086
+ if cmp is not None and any((eq is not None, order is not None)):
1087
+ msg = "Don't mix `cmp` with `eq' and `order`."
1088
+ raise ValueError(msg)
1089
+
1090
+ # cmp takes precedence due to bw-compatibility.
1091
+ if cmp is not None:
1092
+ return cmp, cmp
1093
+
1094
+ # If left None, equality is set to the specified default and ordering
1095
+ # mirrors equality.
1096
+ if eq is None:
1097
+ eq = default_eq
1098
+
1099
+ if order is None:
1100
+ order = eq
1101
+
1102
+ if eq is False and order is True:
1103
+ msg = "`order` can only be True if `eq` is True too."
1104
+ raise ValueError(msg)
1105
+
1106
+ return eq, order
1107
+
1108
+
1109
+ def _determine_attrib_eq_order(cmp, eq, order, default_eq):
1110
+ """
1111
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
1112
+ values of eq and order. If *eq* is None, set it to *default_eq*.
1113
+ """
1114
+ if cmp is not None and any((eq is not None, order is not None)):
1115
+ msg = "Don't mix `cmp` with `eq' and `order`."
1116
+ raise ValueError(msg)
1117
+
1118
+ def decide_callable_or_boolean(value):
1119
+ """
1120
+ Decide whether a key function is used.
1121
+ """
1122
+ if callable(value):
1123
+ value, key = True, value
1124
+ else:
1125
+ key = None
1126
+ return value, key
1127
+
1128
+ # cmp takes precedence due to bw-compatibility.
1129
+ if cmp is not None:
1130
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
1131
+ return cmp, cmp_key, cmp, cmp_key
1132
+
1133
+ # If left None, equality is set to the specified default and ordering
1134
+ # mirrors equality.
1135
+ if eq is None:
1136
+ eq, eq_key = default_eq, None
1137
+ else:
1138
+ eq, eq_key = decide_callable_or_boolean(eq)
1139
+
1140
+ if order is None:
1141
+ order, order_key = eq, eq_key
1142
+ else:
1143
+ order, order_key = decide_callable_or_boolean(order)
1144
+
1145
+ if eq is False and order is True:
1146
+ msg = "`order` can only be True if `eq` is True too."
1147
+ raise ValueError(msg)
1148
+
1149
+ return eq, eq_key, order, order_key
1150
+
1151
+
1152
+ def _determine_whether_to_implement(
1153
+ cls, flag, auto_detect, dunders, default=True
1154
+ ):
1155
+ """
1156
+ Check whether we should implement a set of methods for *cls*.
1157
+
1158
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
1159
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
1160
+ whose presence signal that the user has implemented it themselves.
1161
+
1162
+ Return *default* if no reason for either for or against is found.
1163
+ """
1164
+ if flag is True or flag is False:
1165
+ return flag
1166
+
1167
+ if flag is None and auto_detect is False:
1168
+ return default
1169
+
1170
+ # Logically, flag is None and auto_detect is True here.
1171
+ for dunder in dunders:
1172
+ if _has_own_attribute(cls, dunder):
1173
+ return False
1174
+
1175
+ return default
1176
+
1177
+
1178
+ def attrs(
1179
+ maybe_cls=None,
1180
+ these=None,
1181
+ repr_ns=None,
1182
+ repr=None,
1183
+ cmp=None,
1184
+ hash=None,
1185
+ init=None,
1186
+ slots=False,
1187
+ frozen=False,
1188
+ weakref_slot=True,
1189
+ str=False,
1190
+ auto_attribs=False,
1191
+ kw_only=False,
1192
+ cache_hash=False,
1193
+ auto_exc=False,
1194
+ eq=None,
1195
+ order=None,
1196
+ auto_detect=False,
1197
+ collect_by_mro=False,
1198
+ getstate_setstate=None,
1199
+ on_setattr=None,
1200
+ field_transformer=None,
1201
+ match_args=True,
1202
+ unsafe_hash=None,
1203
+ ):
1204
+ r"""
1205
+ A class decorator that adds :term:`dunder methods` according to the
1206
+ specified attributes using `attr.ib` or the *these* argument.
1207
+
1208
+ Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will
1209
+ *never* go away, though).
1210
+
1211
+ Args:
1212
+ repr_ns (str):
1213
+ When using nested classes, there was no way in Python 2 to
1214
+ automatically detect that. This argument allows to set a custom
1215
+ name for a more meaningful ``repr`` output. This argument is
1216
+ pointless in Python 3 and is therefore deprecated.
1217
+
1218
+ .. caution::
1219
+ Refer to `attrs.define` for the rest of the parameters, but note that they
1220
+ can have different defaults.
1221
+
1222
+ Notably, leaving *on_setattr* as `None` will **not** add any hooks.
1223
+
1224
+ .. versionadded:: 16.0.0 *slots*
1225
+ .. versionadded:: 16.1.0 *frozen*
1226
+ .. versionadded:: 16.3.0 *str*
1227
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
1228
+ .. versionchanged:: 17.1.0
1229
+ *hash* supports `None` as value which is also the default now.
1230
+ .. versionadded:: 17.3.0 *auto_attribs*
1231
+ .. versionchanged:: 18.1.0
1232
+ If *these* is passed, no attributes are deleted from the class body.
1233
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
1234
+ .. versionadded:: 18.2.0 *weakref_slot*
1235
+ .. deprecated:: 18.2.0
1236
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
1237
+ `DeprecationWarning` if the classes compared are subclasses of
1238
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
1239
+ to each other.
1240
+ .. versionchanged:: 19.2.0
1241
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
1242
+ subclasses comparable anymore.
1243
+ .. versionadded:: 18.2.0 *kw_only*
1244
+ .. versionadded:: 18.2.0 *cache_hash*
1245
+ .. versionadded:: 19.1.0 *auto_exc*
1246
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
1247
+ .. versionadded:: 19.2.0 *eq* and *order*
1248
+ .. versionadded:: 20.1.0 *auto_detect*
1249
+ .. versionadded:: 20.1.0 *collect_by_mro*
1250
+ .. versionadded:: 20.1.0 *getstate_setstate*
1251
+ .. versionadded:: 20.1.0 *on_setattr*
1252
+ .. versionadded:: 20.3.0 *field_transformer*
1253
+ .. versionchanged:: 21.1.0
1254
+ ``init=False`` injects ``__attrs_init__``
1255
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
1256
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
1257
+ .. versionadded:: 21.3.0 *match_args*
1258
+ .. versionadded:: 22.2.0
1259
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
1260
+ .. deprecated:: 24.1.0 *repr_ns*
1261
+ .. versionchanged:: 24.1.0
1262
+ Instances are not compared as tuples of attributes anymore, but using a
1263
+ big ``and`` condition. This is faster and has more correct behavior for
1264
+ uncomparable values like `math.nan`.
1265
+ .. versionadded:: 24.1.0
1266
+ If a class has an *inherited* classmethod called
1267
+ ``__attrs_init_subclass__``, it is executed after the class is created.
1268
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
1269
+ """
1270
+ if repr_ns is not None:
1271
+ import warnings
1272
+
1273
+ warnings.warn(
1274
+ DeprecationWarning(
1275
+ "The `repr_ns` argument is deprecated and will be removed in or after August 2025."
1276
+ ),
1277
+ stacklevel=2,
1278
+ )
1279
+
1280
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
1281
+
1282
+ # unsafe_hash takes precedence due to PEP 681.
1283
+ if unsafe_hash is not None:
1284
+ hash = unsafe_hash
1285
+
1286
+ if isinstance(on_setattr, (list, tuple)):
1287
+ on_setattr = setters.pipe(*on_setattr)
1288
+
1289
+ def wrap(cls):
1290
+ is_frozen = frozen or _has_frozen_base_class(cls)
1291
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
1292
+ has_own_setattr = auto_detect and _has_own_attribute(
1293
+ cls, "__setattr__"
1294
+ )
1295
+
1296
+ if has_own_setattr and is_frozen:
1297
+ msg = "Can't freeze a class with a custom __setattr__."
1298
+ raise ValueError(msg)
1299
+
1300
+ builder = _ClassBuilder(
1301
+ cls,
1302
+ these,
1303
+ slots,
1304
+ is_frozen,
1305
+ weakref_slot,
1306
+ _determine_whether_to_implement(
1307
+ cls,
1308
+ getstate_setstate,
1309
+ auto_detect,
1310
+ ("__getstate__", "__setstate__"),
1311
+ default=slots,
1312
+ ),
1313
+ auto_attribs,
1314
+ kw_only,
1315
+ cache_hash,
1316
+ is_exc,
1317
+ collect_by_mro,
1318
+ on_setattr,
1319
+ has_own_setattr,
1320
+ field_transformer,
1321
+ )
1322
+ if _determine_whether_to_implement(
1323
+ cls, repr, auto_detect, ("__repr__",)
1324
+ ):
1325
+ builder.add_repr(repr_ns)
1326
+ if str is True:
1327
+ builder.add_str()
1328
+
1329
+ eq = _determine_whether_to_implement(
1330
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
1331
+ )
1332
+ if not is_exc and eq is True:
1333
+ builder.add_eq()
1334
+ if not is_exc and _determine_whether_to_implement(
1335
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
1336
+ ):
1337
+ builder.add_order()
1338
+
1339
+ builder.add_setattr()
1340
+
1341
+ nonlocal hash
1342
+ if (
1343
+ hash is None
1344
+ and auto_detect is True
1345
+ and _has_own_attribute(cls, "__hash__")
1346
+ ):
1347
+ hash = False
1348
+
1349
+ if hash is not True and hash is not False and hash is not None:
1350
+ # Can't use `hash in` because 1 == True for example.
1351
+ msg = "Invalid value for hash. Must be True, False, or None."
1352
+ raise TypeError(msg)
1353
+
1354
+ if hash is False or (hash is None and eq is False) or is_exc:
1355
+ # Don't do anything. Should fall back to __object__'s __hash__
1356
+ # which is by id.
1357
+ if cache_hash:
1358
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
1359
+ raise TypeError(msg)
1360
+ elif hash is True or (
1361
+ hash is None and eq is True and is_frozen is True
1362
+ ):
1363
+ # Build a __hash__ if told so, or if it's safe.
1364
+ builder.add_hash()
1365
+ else:
1366
+ # Raise TypeError on attempts to hash.
1367
+ if cache_hash:
1368
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
1369
+ raise TypeError(msg)
1370
+ builder.make_unhashable()
1371
+
1372
+ if _determine_whether_to_implement(
1373
+ cls, init, auto_detect, ("__init__",)
1374
+ ):
1375
+ builder.add_init()
1376
+ else:
1377
+ builder.add_attrs_init()
1378
+ if cache_hash:
1379
+ msg = "Invalid value for cache_hash. To use hash caching, init must be True."
1380
+ raise TypeError(msg)
1381
+
1382
+ if (
1383
+ PY_3_10_PLUS
1384
+ and match_args
1385
+ and not _has_own_attribute(cls, "__match_args__")
1386
+ ):
1387
+ builder.add_match_args()
1388
+
1389
+ return builder.build_class()
1390
+
1391
+ # maybe_cls's type depends on the usage of the decorator. It's a class
1392
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
1393
+ if maybe_cls is None:
1394
+ return wrap
1395
+
1396
+ return wrap(maybe_cls)
1397
+
1398
+
1399
+ _attrs = attrs
1400
+ """
1401
+ Internal alias so we can use it in functions that take an argument called
1402
+ *attrs*.
1403
+ """
1404
+
1405
+
1406
+ def _has_frozen_base_class(cls):
1407
+ """
1408
+ Check whether *cls* has a frozen ancestor by looking at its
1409
+ __setattr__.
1410
+ """
1411
+ return cls.__setattr__ is _frozen_setattrs
1412
+
1413
+
1414
+ def _generate_unique_filename(cls, func_name):
1415
+ """
1416
+ Create a "filename" suitable for a function being generated.
1417
+ """
1418
+ return (
1419
+ f"<attrs generated {func_name} {cls.__module__}."
1420
+ f"{getattr(cls, '__qualname__', cls.__name__)}>"
1421
+ )
1422
+
1423
+
1424
+ def _make_hash(cls, attrs, frozen, cache_hash):
1425
+ attrs = tuple(
1426
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
1427
+ )
1428
+
1429
+ tab = " "
1430
+
1431
+ unique_filename = _generate_unique_filename(cls, "hash")
1432
+ type_hash = hash(unique_filename)
1433
+ # If eq is custom generated, we need to include the functions in globs
1434
+ globs = {}
1435
+
1436
+ hash_def = "def __hash__(self"
1437
+ hash_func = "hash(("
1438
+ closing_braces = "))"
1439
+ if not cache_hash:
1440
+ hash_def += "):"
1441
+ else:
1442
+ hash_def += ", *"
1443
+
1444
+ hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
1445
+ hash_func = "_cache_wrapper(" + hash_func
1446
+ closing_braces += ")"
1447
+
1448
+ method_lines = [hash_def]
1449
+
1450
+ def append_hash_computation_lines(prefix, indent):
1451
+ """
1452
+ Generate the code for actually computing the hash code.
1453
+ Below this will either be returned directly or used to compute
1454
+ a value which is then cached, depending on the value of cache_hash
1455
+ """
1456
+
1457
+ method_lines.extend(
1458
+ [
1459
+ indent + prefix + hash_func,
1460
+ indent + f" {type_hash},",
1461
+ ]
1462
+ )
1463
+
1464
+ for a in attrs:
1465
+ if a.eq_key:
1466
+ cmp_name = f"_{a.name}_key"
1467
+ globs[cmp_name] = a.eq_key
1468
+ method_lines.append(
1469
+ indent + f" {cmp_name}(self.{a.name}),"
1470
+ )
1471
+ else:
1472
+ method_lines.append(indent + f" self.{a.name},")
1473
+
1474
+ method_lines.append(indent + " " + closing_braces)
1475
+
1476
+ if cache_hash:
1477
+ method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:")
1478
+ if frozen:
1479
+ append_hash_computation_lines(
1480
+ f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2
1481
+ )
1482
+ method_lines.append(tab * 2 + ")") # close __setattr__
1483
+ else:
1484
+ append_hash_computation_lines(
1485
+ f"self.{_HASH_CACHE_FIELD} = ", tab * 2
1486
+ )
1487
+ method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}")
1488
+ else:
1489
+ append_hash_computation_lines("return ", tab)
1490
+
1491
+ script = "\n".join(method_lines)
1492
+ return _make_method("__hash__", script, unique_filename, globs)
1493
+
1494
+
1495
+ def _add_hash(cls, attrs):
1496
+ """
1497
+ Add a hash method to *cls*.
1498
+ """
1499
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
1500
+ return cls
1501
+
1502
+
1503
+ def _make_ne():
1504
+ """
1505
+ Create __ne__ method.
1506
+ """
1507
+
1508
+ def __ne__(self, other):
1509
+ """
1510
+ Check equality and either forward a NotImplemented or
1511
+ return the result negated.
1512
+ """
1513
+ result = self.__eq__(other)
1514
+ if result is NotImplemented:
1515
+ return NotImplemented
1516
+
1517
+ return not result
1518
+
1519
+ return __ne__
1520
+
1521
+
1522
+ def _make_eq(cls, attrs):
1523
+ """
1524
+ Create __eq__ method for *cls* with *attrs*.
1525
+ """
1526
+ attrs = [a for a in attrs if a.eq]
1527
+
1528
+ unique_filename = _generate_unique_filename(cls, "eq")
1529
+ lines = [
1530
+ "def __eq__(self, other):",
1531
+ " if other.__class__ is not self.__class__:",
1532
+ " return NotImplemented",
1533
+ ]
1534
+
1535
+ # We can't just do a big self.x = other.x and... clause due to
1536
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
1537
+ globs = {}
1538
+ if attrs:
1539
+ lines.append(" return (")
1540
+ for a in attrs:
1541
+ if a.eq_key:
1542
+ cmp_name = f"_{a.name}_key"
1543
+ # Add the key function to the global namespace
1544
+ # of the evaluated function.
1545
+ globs[cmp_name] = a.eq_key
1546
+ lines.append(
1547
+ f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})"
1548
+ )
1549
+ else:
1550
+ lines.append(f" self.{a.name} == other.{a.name}")
1551
+ if a is not attrs[-1]:
1552
+ lines[-1] = f"{lines[-1]} and"
1553
+ lines.append(" )")
1554
+ else:
1555
+ lines.append(" return True")
1556
+
1557
+ script = "\n".join(lines)
1558
+
1559
+ return _make_method("__eq__", script, unique_filename, globs)
1560
+
1561
+
1562
+ def _make_order(cls, attrs):
1563
+ """
1564
+ Create ordering methods for *cls* with *attrs*.
1565
+ """
1566
+ attrs = [a for a in attrs if a.order]
1567
+
1568
+ def attrs_to_tuple(obj):
1569
+ """
1570
+ Save us some typing.
1571
+ """
1572
+ return tuple(
1573
+ key(value) if key else value
1574
+ for value, key in (
1575
+ (getattr(obj, a.name), a.order_key) for a in attrs
1576
+ )
1577
+ )
1578
+
1579
+ def __lt__(self, other):
1580
+ """
1581
+ Automatically created by attrs.
1582
+ """
1583
+ if other.__class__ is self.__class__:
1584
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
1585
+
1586
+ return NotImplemented
1587
+
1588
+ def __le__(self, other):
1589
+ """
1590
+ Automatically created by attrs.
1591
+ """
1592
+ if other.__class__ is self.__class__:
1593
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
1594
+
1595
+ return NotImplemented
1596
+
1597
+ def __gt__(self, other):
1598
+ """
1599
+ Automatically created by attrs.
1600
+ """
1601
+ if other.__class__ is self.__class__:
1602
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
1603
+
1604
+ return NotImplemented
1605
+
1606
+ def __ge__(self, other):
1607
+ """
1608
+ Automatically created by attrs.
1609
+ """
1610
+ if other.__class__ is self.__class__:
1611
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
1612
+
1613
+ return NotImplemented
1614
+
1615
+ return __lt__, __le__, __gt__, __ge__
1616
+
1617
+
1618
+ def _add_eq(cls, attrs=None):
1619
+ """
1620
+ Add equality methods to *cls* with *attrs*.
1621
+ """
1622
+ if attrs is None:
1623
+ attrs = cls.__attrs_attrs__
1624
+
1625
+ cls.__eq__ = _make_eq(cls, attrs)
1626
+ cls.__ne__ = _make_ne()
1627
+
1628
+ return cls
1629
+
1630
+
1631
+ def _make_repr(attrs, ns, cls):
1632
+ unique_filename = _generate_unique_filename(cls, "repr")
1633
+ # Figure out which attributes to include, and which function to use to
1634
+ # format them. The a.repr value can be either bool or a custom
1635
+ # callable.
1636
+ attr_names_with_reprs = tuple(
1637
+ (a.name, (repr if a.repr is True else a.repr), a.init)
1638
+ for a in attrs
1639
+ if a.repr is not False
1640
+ )
1641
+ globs = {
1642
+ name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr
1643
+ }
1644
+ globs["_compat"] = _compat
1645
+ globs["AttributeError"] = AttributeError
1646
+ globs["NOTHING"] = NOTHING
1647
+ attribute_fragments = []
1648
+ for name, r, i in attr_names_with_reprs:
1649
+ accessor = (
1650
+ "self." + name if i else 'getattr(self, "' + name + '", NOTHING)'
1651
+ )
1652
+ fragment = (
1653
+ "%s={%s!r}" % (name, accessor)
1654
+ if r == repr
1655
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
1656
+ )
1657
+ attribute_fragments.append(fragment)
1658
+ repr_fragment = ", ".join(attribute_fragments)
1659
+
1660
+ if ns is None:
1661
+ cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
1662
+ else:
1663
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
1664
+
1665
+ lines = [
1666
+ "def __repr__(self):",
1667
+ " try:",
1668
+ " already_repring = _compat.repr_context.already_repring",
1669
+ " except AttributeError:",
1670
+ " already_repring = {id(self),}",
1671
+ " _compat.repr_context.already_repring = already_repring",
1672
+ " else:",
1673
+ " if id(self) in already_repring:",
1674
+ " return '...'",
1675
+ " else:",
1676
+ " already_repring.add(id(self))",
1677
+ " try:",
1678
+ f" return f'{cls_name_fragment}({repr_fragment})'",
1679
+ " finally:",
1680
+ " already_repring.remove(id(self))",
1681
+ ]
1682
+
1683
+ return _make_method(
1684
+ "__repr__", "\n".join(lines), unique_filename, globs=globs
1685
+ )
1686
+
1687
+
1688
+ def _add_repr(cls, ns=None, attrs=None):
1689
+ """
1690
+ Add a repr method to *cls*.
1691
+ """
1692
+ if attrs is None:
1693
+ attrs = cls.__attrs_attrs__
1694
+
1695
+ cls.__repr__ = _make_repr(attrs, ns, cls)
1696
+ return cls
1697
+
1698
+
1699
+ def fields(cls):
1700
+ """
1701
+ Return the tuple of *attrs* attributes for a class.
1702
+
1703
+ The tuple also allows accessing the fields by their names (see below for
1704
+ examples).
1705
+
1706
+ Args:
1707
+ cls (type): Class to introspect.
1708
+
1709
+ Raises:
1710
+ TypeError: If *cls* is not a class.
1711
+
1712
+ attrs.exceptions.NotAnAttrsClassError:
1713
+ If *cls* is not an *attrs* class.
1714
+
1715
+ Returns:
1716
+ tuple (with name accessors) of `attrs.Attribute`
1717
+
1718
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
1719
+ by name.
1720
+ .. versionchanged:: 23.1.0 Add support for generic classes.
1721
+ """
1722
+ generic_base = get_generic_base(cls)
1723
+
1724
+ if generic_base is None and not isinstance(cls, type):
1725
+ msg = "Passed object must be a class."
1726
+ raise TypeError(msg)
1727
+
1728
+ attrs = getattr(cls, "__attrs_attrs__", None)
1729
+
1730
+ if attrs is None:
1731
+ if generic_base is not None:
1732
+ attrs = getattr(generic_base, "__attrs_attrs__", None)
1733
+ if attrs is not None:
1734
+ # Even though this is global state, stick it on here to speed
1735
+ # it up. We rely on `cls` being cached for this to be
1736
+ # efficient.
1737
+ cls.__attrs_attrs__ = attrs
1738
+ return attrs
1739
+ msg = f"{cls!r} is not an attrs-decorated class."
1740
+ raise NotAnAttrsClassError(msg)
1741
+
1742
+ return attrs
1743
+
1744
+
1745
+ def fields_dict(cls):
1746
+ """
1747
+ Return an ordered dictionary of *attrs* attributes for a class, whose keys
1748
+ are the attribute names.
1749
+
1750
+ Args:
1751
+ cls (type): Class to introspect.
1752
+
1753
+ Raises:
1754
+ TypeError: If *cls* is not a class.
1755
+
1756
+ attrs.exceptions.NotAnAttrsClassError:
1757
+ If *cls* is not an *attrs* class.
1758
+
1759
+ Returns:
1760
+ dict[str, attrs.Attribute]: Dict of attribute name to definition
1761
+
1762
+ .. versionadded:: 18.1.0
1763
+ """
1764
+ if not isinstance(cls, type):
1765
+ msg = "Passed object must be a class."
1766
+ raise TypeError(msg)
1767
+ attrs = getattr(cls, "__attrs_attrs__", None)
1768
+ if attrs is None:
1769
+ msg = f"{cls!r} is not an attrs-decorated class."
1770
+ raise NotAnAttrsClassError(msg)
1771
+ return {a.name: a for a in attrs}
1772
+
1773
+
1774
+ def validate(inst):
1775
+ """
1776
+ Validate all attributes on *inst* that have a validator.
1777
+
1778
+ Leaves all exceptions through.
1779
+
1780
+ Args:
1781
+ inst: Instance of a class with *attrs* attributes.
1782
+ """
1783
+ if _config._run_validators is False:
1784
+ return
1785
+
1786
+ for a in fields(inst.__class__):
1787
+ v = a.validator
1788
+ if v is not None:
1789
+ v(inst, a, getattr(inst, a.name))
1790
+
1791
+
1792
+ def _is_slot_attr(a_name, base_attr_map):
1793
+ """
1794
+ Check if the attribute name comes from a slot class.
1795
+ """
1796
+ cls = base_attr_map.get(a_name)
1797
+ return cls and "__slots__" in cls.__dict__
1798
+
1799
+
1800
+ def _make_init(
1801
+ cls,
1802
+ attrs,
1803
+ pre_init,
1804
+ pre_init_has_args,
1805
+ post_init,
1806
+ frozen,
1807
+ slots,
1808
+ cache_hash,
1809
+ base_attr_map,
1810
+ is_exc,
1811
+ cls_on_setattr,
1812
+ attrs_init,
1813
+ ):
1814
+ has_cls_on_setattr = (
1815
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
1816
+ )
1817
+
1818
+ if frozen and has_cls_on_setattr:
1819
+ msg = "Frozen classes can't use on_setattr."
1820
+ raise ValueError(msg)
1821
+
1822
+ needs_cached_setattr = cache_hash or frozen
1823
+ filtered_attrs = []
1824
+ attr_dict = {}
1825
+ for a in attrs:
1826
+ if not a.init and a.default is NOTHING:
1827
+ continue
1828
+
1829
+ filtered_attrs.append(a)
1830
+ attr_dict[a.name] = a
1831
+
1832
+ if a.on_setattr is not None:
1833
+ if frozen is True:
1834
+ msg = "Frozen classes can't use on_setattr."
1835
+ raise ValueError(msg)
1836
+
1837
+ needs_cached_setattr = True
1838
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
1839
+ needs_cached_setattr = True
1840
+
1841
+ unique_filename = _generate_unique_filename(cls, "init")
1842
+
1843
+ script, globs, annotations = _attrs_to_init_script(
1844
+ filtered_attrs,
1845
+ frozen,
1846
+ slots,
1847
+ pre_init,
1848
+ pre_init_has_args,
1849
+ post_init,
1850
+ cache_hash,
1851
+ base_attr_map,
1852
+ is_exc,
1853
+ needs_cached_setattr,
1854
+ has_cls_on_setattr,
1855
+ "__attrs_init__" if attrs_init else "__init__",
1856
+ )
1857
+ if cls.__module__ in sys.modules:
1858
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
1859
+ globs.update(sys.modules[cls.__module__].__dict__)
1860
+
1861
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
1862
+
1863
+ if needs_cached_setattr:
1864
+ # Save the lookup overhead in __init__ if we need to circumvent
1865
+ # setattr hooks.
1866
+ globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__
1867
+
1868
+ init = _make_method(
1869
+ "__attrs_init__" if attrs_init else "__init__",
1870
+ script,
1871
+ unique_filename,
1872
+ globs,
1873
+ )
1874
+ init.__annotations__ = annotations
1875
+
1876
+ return init
1877
+
1878
+
1879
+ def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str:
1880
+ """
1881
+ Use the cached object.setattr to set *attr_name* to *value_var*.
1882
+ """
1883
+ return f"_setattr('{attr_name}', {value_var})"
1884
+
1885
+
1886
+ def _setattr_with_converter(
1887
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
1888
+ ) -> str:
1889
+ """
1890
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
1891
+ its converter first.
1892
+ """
1893
+ return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})"
1894
+
1895
+
1896
+ def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str:
1897
+ """
1898
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
1899
+ relegate to _setattr.
1900
+ """
1901
+ if has_on_setattr:
1902
+ return _setattr(attr_name, value, True)
1903
+
1904
+ return f"self.{attr_name} = {value}"
1905
+
1906
+
1907
+ def _assign_with_converter(
1908
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
1909
+ ) -> str:
1910
+ """
1911
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
1912
+ conversion. Otherwise relegate to _setattr_with_converter.
1913
+ """
1914
+ if has_on_setattr:
1915
+ return _setattr_with_converter(attr_name, value_var, True, converter)
1916
+
1917
+ return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}"
1918
+
1919
+
1920
+ def _determine_setters(
1921
+ frozen: bool, slots: bool, base_attr_map: dict[str, type]
1922
+ ):
1923
+ """
1924
+ Determine the correct setter functions based on whether a class is frozen
1925
+ and/or slotted.
1926
+ """
1927
+ if frozen is True:
1928
+ if slots is True:
1929
+ return (), _setattr, _setattr_with_converter
1930
+
1931
+ # Dict frozen classes assign directly to __dict__.
1932
+ # But only if the attribute doesn't come from an ancestor slot
1933
+ # class.
1934
+ # Note _inst_dict will be used again below if cache_hash is True
1935
+
1936
+ def fmt_setter(
1937
+ attr_name: str, value_var: str, has_on_setattr: bool
1938
+ ) -> str:
1939
+ if _is_slot_attr(attr_name, base_attr_map):
1940
+ return _setattr(attr_name, value_var, has_on_setattr)
1941
+
1942
+ return f"_inst_dict['{attr_name}'] = {value_var}"
1943
+
1944
+ def fmt_setter_with_converter(
1945
+ attr_name: str,
1946
+ value_var: str,
1947
+ has_on_setattr: bool,
1948
+ converter: Converter,
1949
+ ) -> str:
1950
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
1951
+ return _setattr_with_converter(
1952
+ attr_name, value_var, has_on_setattr, converter
1953
+ )
1954
+
1955
+ return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}"
1956
+
1957
+ return (
1958
+ ("_inst_dict = self.__dict__",),
1959
+ fmt_setter,
1960
+ fmt_setter_with_converter,
1961
+ )
1962
+
1963
+ # Not frozen -- we can just assign directly.
1964
+ return (), _assign, _assign_with_converter
1965
+
1966
+
1967
+ def _attrs_to_init_script(
1968
+ attrs: list[Attribute],
1969
+ is_frozen: bool,
1970
+ is_slotted: bool,
1971
+ call_pre_init: bool,
1972
+ pre_init_has_args: bool,
1973
+ call_post_init: bool,
1974
+ does_cache_hash: bool,
1975
+ base_attr_map: dict[str, type],
1976
+ is_exc: bool,
1977
+ needs_cached_setattr: bool,
1978
+ has_cls_on_setattr: bool,
1979
+ method_name: str,
1980
+ ) -> tuple[str, dict, dict]:
1981
+ """
1982
+ Return a script of an initializer for *attrs*, a dict of globals, and
1983
+ annotations for the initializer.
1984
+
1985
+ The globals are required by the generated script.
1986
+ """
1987
+ lines = ["self.__attrs_pre_init__()"] if call_pre_init else []
1988
+
1989
+ if needs_cached_setattr:
1990
+ lines.append(
1991
+ # Circumvent the __setattr__ descriptor to save one lookup per
1992
+ # assignment. Note _setattr will be used again below if
1993
+ # does_cache_hash is True.
1994
+ "_setattr = _cached_setattr_get(self)"
1995
+ )
1996
+
1997
+ extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters(
1998
+ is_frozen, is_slotted, base_attr_map
1999
+ )
2000
+ lines.extend(extra_lines)
2001
+
2002
+ args = []
2003
+ kw_only_args = []
2004
+ attrs_to_validate = []
2005
+
2006
+ # This is a dictionary of names to validator and converter callables.
2007
+ # Injecting this into __init__ globals lets us avoid lookups.
2008
+ names_for_globals = {}
2009
+ annotations = {"return": None}
2010
+
2011
+ for a in attrs:
2012
+ if a.validator:
2013
+ attrs_to_validate.append(a)
2014
+
2015
+ attr_name = a.name
2016
+ has_on_setattr = a.on_setattr is not None or (
2017
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
2018
+ )
2019
+ # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not
2020
+ # explicitly provided
2021
+ arg_name = a.alias
2022
+
2023
+ has_factory = isinstance(a.default, Factory)
2024
+ maybe_self = "self" if has_factory and a.default.takes_self else ""
2025
+
2026
+ if a.converter and not isinstance(a.converter, Converter):
2027
+ converter = Converter(a.converter)
2028
+ else:
2029
+ converter = a.converter
2030
+
2031
+ if a.init is False:
2032
+ if has_factory:
2033
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
2034
+ if converter is not None:
2035
+ lines.append(
2036
+ fmt_setter_with_converter(
2037
+ attr_name,
2038
+ init_factory_name + f"({maybe_self})",
2039
+ has_on_setattr,
2040
+ converter,
2041
+ )
2042
+ )
2043
+ names_for_globals[converter._get_global_name(a.name)] = (
2044
+ converter.converter
2045
+ )
2046
+ else:
2047
+ lines.append(
2048
+ fmt_setter(
2049
+ attr_name,
2050
+ init_factory_name + f"({maybe_self})",
2051
+ has_on_setattr,
2052
+ )
2053
+ )
2054
+ names_for_globals[init_factory_name] = a.default.factory
2055
+ elif converter is not None:
2056
+ lines.append(
2057
+ fmt_setter_with_converter(
2058
+ attr_name,
2059
+ f"attr_dict['{attr_name}'].default",
2060
+ has_on_setattr,
2061
+ converter,
2062
+ )
2063
+ )
2064
+ names_for_globals[converter._get_global_name(a.name)] = (
2065
+ converter.converter
2066
+ )
2067
+ else:
2068
+ lines.append(
2069
+ fmt_setter(
2070
+ attr_name,
2071
+ f"attr_dict['{attr_name}'].default",
2072
+ has_on_setattr,
2073
+ )
2074
+ )
2075
+ elif a.default is not NOTHING and not has_factory:
2076
+ arg = f"{arg_name}=attr_dict['{attr_name}'].default"
2077
+ if a.kw_only:
2078
+ kw_only_args.append(arg)
2079
+ else:
2080
+ args.append(arg)
2081
+
2082
+ if converter is not None:
2083
+ lines.append(
2084
+ fmt_setter_with_converter(
2085
+ attr_name, arg_name, has_on_setattr, converter
2086
+ )
2087
+ )
2088
+ names_for_globals[converter._get_global_name(a.name)] = (
2089
+ converter.converter
2090
+ )
2091
+ else:
2092
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
2093
+
2094
+ elif has_factory:
2095
+ arg = f"{arg_name}=NOTHING"
2096
+ if a.kw_only:
2097
+ kw_only_args.append(arg)
2098
+ else:
2099
+ args.append(arg)
2100
+ lines.append(f"if {arg_name} is not NOTHING:")
2101
+
2102
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
2103
+ if converter is not None:
2104
+ lines.append(
2105
+ " "
2106
+ + fmt_setter_with_converter(
2107
+ attr_name, arg_name, has_on_setattr, converter
2108
+ )
2109
+ )
2110
+ lines.append("else:")
2111
+ lines.append(
2112
+ " "
2113
+ + fmt_setter_with_converter(
2114
+ attr_name,
2115
+ init_factory_name + "(" + maybe_self + ")",
2116
+ has_on_setattr,
2117
+ converter,
2118
+ )
2119
+ )
2120
+ names_for_globals[converter._get_global_name(a.name)] = (
2121
+ converter.converter
2122
+ )
2123
+ else:
2124
+ lines.append(
2125
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
2126
+ )
2127
+ lines.append("else:")
2128
+ lines.append(
2129
+ " "
2130
+ + fmt_setter(
2131
+ attr_name,
2132
+ init_factory_name + "(" + maybe_self + ")",
2133
+ has_on_setattr,
2134
+ )
2135
+ )
2136
+ names_for_globals[init_factory_name] = a.default.factory
2137
+ else:
2138
+ if a.kw_only:
2139
+ kw_only_args.append(arg_name)
2140
+ else:
2141
+ args.append(arg_name)
2142
+
2143
+ if converter is not None:
2144
+ lines.append(
2145
+ fmt_setter_with_converter(
2146
+ attr_name, arg_name, has_on_setattr, converter
2147
+ )
2148
+ )
2149
+ names_for_globals[converter._get_global_name(a.name)] = (
2150
+ converter.converter
2151
+ )
2152
+ else:
2153
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
2154
+
2155
+ if a.init is True:
2156
+ if a.type is not None and converter is None:
2157
+ annotations[arg_name] = a.type
2158
+ elif converter is not None and converter._first_param_type:
2159
+ # Use the type from the converter if present.
2160
+ annotations[arg_name] = converter._first_param_type
2161
+
2162
+ if attrs_to_validate: # we can skip this if there are no validators.
2163
+ names_for_globals["_config"] = _config
2164
+ lines.append("if _config._run_validators is True:")
2165
+ for a in attrs_to_validate:
2166
+ val_name = "__attr_validator_" + a.name
2167
+ attr_name = "__attr_" + a.name
2168
+ lines.append(f" {val_name}(self, {attr_name}, self.{a.name})")
2169
+ names_for_globals[val_name] = a.validator
2170
+ names_for_globals[attr_name] = a
2171
+
2172
+ if call_post_init:
2173
+ lines.append("self.__attrs_post_init__()")
2174
+
2175
+ # Because this is set only after __attrs_post_init__ is called, a crash
2176
+ # will result if post-init tries to access the hash code. This seemed
2177
+ # preferable to setting this beforehand, in which case alteration to field
2178
+ # values during post-init combined with post-init accessing the hash code
2179
+ # would result in silent bugs.
2180
+ if does_cache_hash:
2181
+ if is_frozen:
2182
+ if is_slotted:
2183
+ init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)"
2184
+ else:
2185
+ init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None"
2186
+ else:
2187
+ init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None"
2188
+ lines.append(init_hash_cache)
2189
+
2190
+ # For exceptions we rely on BaseException.__init__ for proper
2191
+ # initialization.
2192
+ if is_exc:
2193
+ vals = ",".join(f"self.{a.name}" for a in attrs if a.init)
2194
+
2195
+ lines.append(f"BaseException.__init__(self, {vals})")
2196
+
2197
+ args = ", ".join(args)
2198
+ pre_init_args = args
2199
+ if kw_only_args:
2200
+ # leading comma & kw_only args
2201
+ args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}"
2202
+ pre_init_kw_only_args = ", ".join(
2203
+ [
2204
+ f"{kw_arg_name}={kw_arg_name}"
2205
+ # We need to remove the defaults from the kw_only_args.
2206
+ for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args)
2207
+ ]
2208
+ )
2209
+ pre_init_args += ", " if pre_init_args else ""
2210
+ pre_init_args += pre_init_kw_only_args
2211
+
2212
+ if call_pre_init and pre_init_has_args:
2213
+ # If pre init method has arguments, pass same arguments as `__init__`.
2214
+ lines[0] = f"self.__attrs_pre_init__({pre_init_args})"
2215
+
2216
+ # Python 3.7 doesn't allow backslashes in f strings.
2217
+ NL = "\n "
2218
+ return (
2219
+ f"""def {method_name}(self, {args}):
2220
+ {NL.join(lines) if lines else 'pass'}
2221
+ """,
2222
+ names_for_globals,
2223
+ annotations,
2224
+ )
2225
+
2226
+
2227
+ def _default_init_alias_for(name: str) -> str:
2228
+ """
2229
+ The default __init__ parameter name for a field.
2230
+
2231
+ This performs private-name adjustment via leading-unscore stripping,
2232
+ and is the default value of Attribute.alias if not provided.
2233
+ """
2234
+
2235
+ return name.lstrip("_")
2236
+
2237
+
2238
+ class Attribute:
2239
+ """
2240
+ *Read-only* representation of an attribute.
2241
+
2242
+ .. warning::
2243
+
2244
+ You should never instantiate this class yourself.
2245
+
2246
+ The class has *all* arguments of `attr.ib` (except for ``factory`` which is
2247
+ only syntactic sugar for ``default=Factory(...)`` plus the following:
2248
+
2249
+ - ``name`` (`str`): The name of the attribute.
2250
+ - ``alias`` (`str`): The __init__ parameter name of the attribute, after
2251
+ any explicit overrides and default private-attribute-name handling.
2252
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
2253
+ from a base class.
2254
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The
2255
+ callables that are used for comparing and ordering objects by this
2256
+ attribute, respectively. These are set by passing a callable to
2257
+ `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also
2258
+ :ref:`comparison customization <custom-comparison>`.
2259
+
2260
+ Instances of this class are frequently used for introspection purposes
2261
+ like:
2262
+
2263
+ - `fields` returns a tuple of them.
2264
+ - Validators get them passed as the first argument.
2265
+ - The :ref:`field transformer <transform-fields>` hook receives a list of
2266
+ them.
2267
+ - The ``alias`` property exposes the __init__ parameter name of the field,
2268
+ with any overrides and default private-attribute handling applied.
2269
+
2270
+
2271
+ .. versionadded:: 20.1.0 *inherited*
2272
+ .. versionadded:: 20.1.0 *on_setattr*
2273
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
2274
+ equality checks and hashing anymore.
2275
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
2276
+ .. versionadded:: 22.2.0 *alias*
2277
+
2278
+ For the full version history of the fields, see `attr.ib`.
2279
+ """
2280
+
2281
+ __slots__ = (
2282
+ "name",
2283
+ "default",
2284
+ "validator",
2285
+ "repr",
2286
+ "eq",
2287
+ "eq_key",
2288
+ "order",
2289
+ "order_key",
2290
+ "hash",
2291
+ "init",
2292
+ "metadata",
2293
+ "type",
2294
+ "converter",
2295
+ "kw_only",
2296
+ "inherited",
2297
+ "on_setattr",
2298
+ "alias",
2299
+ )
2300
+
2301
+ def __init__(
2302
+ self,
2303
+ name,
2304
+ default,
2305
+ validator,
2306
+ repr,
2307
+ cmp, # XXX: unused, remove along with other cmp code.
2308
+ hash,
2309
+ init,
2310
+ inherited,
2311
+ metadata=None,
2312
+ type=None,
2313
+ converter=None,
2314
+ kw_only=False,
2315
+ eq=None,
2316
+ eq_key=None,
2317
+ order=None,
2318
+ order_key=None,
2319
+ on_setattr=None,
2320
+ alias=None,
2321
+ ):
2322
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
2323
+ cmp, eq_key or eq, order_key or order, True
2324
+ )
2325
+
2326
+ # Cache this descriptor here to speed things up later.
2327
+ bound_setattr = _OBJ_SETATTR.__get__(self)
2328
+
2329
+ # Despite the big red warning, people *do* instantiate `Attribute`
2330
+ # themselves.
2331
+ bound_setattr("name", name)
2332
+ bound_setattr("default", default)
2333
+ bound_setattr("validator", validator)
2334
+ bound_setattr("repr", repr)
2335
+ bound_setattr("eq", eq)
2336
+ bound_setattr("eq_key", eq_key)
2337
+ bound_setattr("order", order)
2338
+ bound_setattr("order_key", order_key)
2339
+ bound_setattr("hash", hash)
2340
+ bound_setattr("init", init)
2341
+ bound_setattr("converter", converter)
2342
+ bound_setattr(
2343
+ "metadata",
2344
+ (
2345
+ types.MappingProxyType(dict(metadata)) # Shallow copy
2346
+ if metadata
2347
+ else _EMPTY_METADATA_SINGLETON
2348
+ ),
2349
+ )
2350
+ bound_setattr("type", type)
2351
+ bound_setattr("kw_only", kw_only)
2352
+ bound_setattr("inherited", inherited)
2353
+ bound_setattr("on_setattr", on_setattr)
2354
+ bound_setattr("alias", alias)
2355
+
2356
+ def __setattr__(self, name, value):
2357
+ raise FrozenInstanceError()
2358
+
2359
+ @classmethod
2360
+ def from_counting_attr(cls, name, ca, type=None):
2361
+ # type holds the annotated value. deal with conflicts:
2362
+ if type is None:
2363
+ type = ca.type
2364
+ elif ca.type is not None:
2365
+ msg = "Type annotation and type argument cannot both be present"
2366
+ raise ValueError(msg)
2367
+ inst_dict = {
2368
+ k: getattr(ca, k)
2369
+ for k in Attribute.__slots__
2370
+ if k
2371
+ not in (
2372
+ "name",
2373
+ "validator",
2374
+ "default",
2375
+ "type",
2376
+ "inherited",
2377
+ ) # exclude methods and deprecated alias
2378
+ }
2379
+ return cls(
2380
+ name=name,
2381
+ validator=ca._validator,
2382
+ default=ca._default,
2383
+ type=type,
2384
+ cmp=None,
2385
+ inherited=False,
2386
+ **inst_dict,
2387
+ )
2388
+
2389
+ # Don't use attrs.evolve since fields(Attribute) doesn't work
2390
+ def evolve(self, **changes):
2391
+ """
2392
+ Copy *self* and apply *changes*.
2393
+
2394
+ This works similarly to `attrs.evolve` but that function does not work
2395
+ with {class}`Attribute`.
2396
+
2397
+ It is mainly meant to be used for `transform-fields`.
2398
+
2399
+ .. versionadded:: 20.3.0
2400
+ """
2401
+ new = copy.copy(self)
2402
+
2403
+ new._setattrs(changes.items())
2404
+
2405
+ return new
2406
+
2407
+ # Don't use _add_pickle since fields(Attribute) doesn't work
2408
+ def __getstate__(self):
2409
+ """
2410
+ Play nice with pickle.
2411
+ """
2412
+ return tuple(
2413
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
2414
+ for name in self.__slots__
2415
+ )
2416
+
2417
+ def __setstate__(self, state):
2418
+ """
2419
+ Play nice with pickle.
2420
+ """
2421
+ self._setattrs(zip(self.__slots__, state))
2422
+
2423
+ def _setattrs(self, name_values_pairs):
2424
+ bound_setattr = _OBJ_SETATTR.__get__(self)
2425
+ for name, value in name_values_pairs:
2426
+ if name != "metadata":
2427
+ bound_setattr(name, value)
2428
+ else:
2429
+ bound_setattr(
2430
+ name,
2431
+ (
2432
+ types.MappingProxyType(dict(value))
2433
+ if value
2434
+ else _EMPTY_METADATA_SINGLETON
2435
+ ),
2436
+ )
2437
+
2438
+
2439
+ _a = [
2440
+ Attribute(
2441
+ name=name,
2442
+ default=NOTHING,
2443
+ validator=None,
2444
+ repr=True,
2445
+ cmp=None,
2446
+ eq=True,
2447
+ order=False,
2448
+ hash=(name != "metadata"),
2449
+ init=True,
2450
+ inherited=False,
2451
+ alias=_default_init_alias_for(name),
2452
+ )
2453
+ for name in Attribute.__slots__
2454
+ ]
2455
+
2456
+ Attribute = _add_hash(
2457
+ _add_eq(
2458
+ _add_repr(Attribute, attrs=_a),
2459
+ attrs=[a for a in _a if a.name != "inherited"],
2460
+ ),
2461
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
2462
+ )
2463
+
2464
+
2465
+ class _CountingAttr:
2466
+ """
2467
+ Intermediate representation of attributes that uses a counter to preserve
2468
+ the order in which the attributes have been defined.
2469
+
2470
+ *Internal* data structure of the attrs library. Running into is most
2471
+ likely the result of a bug like a forgotten `@attr.s` decorator.
2472
+ """
2473
+
2474
+ __slots__ = (
2475
+ "counter",
2476
+ "_default",
2477
+ "repr",
2478
+ "eq",
2479
+ "eq_key",
2480
+ "order",
2481
+ "order_key",
2482
+ "hash",
2483
+ "init",
2484
+ "metadata",
2485
+ "_validator",
2486
+ "converter",
2487
+ "type",
2488
+ "kw_only",
2489
+ "on_setattr",
2490
+ "alias",
2491
+ )
2492
+ __attrs_attrs__ = (
2493
+ *tuple(
2494
+ Attribute(
2495
+ name=name,
2496
+ alias=_default_init_alias_for(name),
2497
+ default=NOTHING,
2498
+ validator=None,
2499
+ repr=True,
2500
+ cmp=None,
2501
+ hash=True,
2502
+ init=True,
2503
+ kw_only=False,
2504
+ eq=True,
2505
+ eq_key=None,
2506
+ order=False,
2507
+ order_key=None,
2508
+ inherited=False,
2509
+ on_setattr=None,
2510
+ )
2511
+ for name in (
2512
+ "counter",
2513
+ "_default",
2514
+ "repr",
2515
+ "eq",
2516
+ "order",
2517
+ "hash",
2518
+ "init",
2519
+ "on_setattr",
2520
+ "alias",
2521
+ )
2522
+ ),
2523
+ Attribute(
2524
+ name="metadata",
2525
+ alias="metadata",
2526
+ default=None,
2527
+ validator=None,
2528
+ repr=True,
2529
+ cmp=None,
2530
+ hash=False,
2531
+ init=True,
2532
+ kw_only=False,
2533
+ eq=True,
2534
+ eq_key=None,
2535
+ order=False,
2536
+ order_key=None,
2537
+ inherited=False,
2538
+ on_setattr=None,
2539
+ ),
2540
+ )
2541
+ cls_counter = 0
2542
+
2543
+ def __init__(
2544
+ self,
2545
+ default,
2546
+ validator,
2547
+ repr,
2548
+ cmp,
2549
+ hash,
2550
+ init,
2551
+ converter,
2552
+ metadata,
2553
+ type,
2554
+ kw_only,
2555
+ eq,
2556
+ eq_key,
2557
+ order,
2558
+ order_key,
2559
+ on_setattr,
2560
+ alias,
2561
+ ):
2562
+ _CountingAttr.cls_counter += 1
2563
+ self.counter = _CountingAttr.cls_counter
2564
+ self._default = default
2565
+ self._validator = validator
2566
+ self.converter = converter
2567
+ self.repr = repr
2568
+ self.eq = eq
2569
+ self.eq_key = eq_key
2570
+ self.order = order
2571
+ self.order_key = order_key
2572
+ self.hash = hash
2573
+ self.init = init
2574
+ self.metadata = metadata
2575
+ self.type = type
2576
+ self.kw_only = kw_only
2577
+ self.on_setattr = on_setattr
2578
+ self.alias = alias
2579
+
2580
+ def validator(self, meth):
2581
+ """
2582
+ Decorator that adds *meth* to the list of validators.
2583
+
2584
+ Returns *meth* unchanged.
2585
+
2586
+ .. versionadded:: 17.1.0
2587
+ """
2588
+ if self._validator is None:
2589
+ self._validator = meth
2590
+ else:
2591
+ self._validator = and_(self._validator, meth)
2592
+ return meth
2593
+
2594
+ def default(self, meth):
2595
+ """
2596
+ Decorator that allows to set the default for an attribute.
2597
+
2598
+ Returns *meth* unchanged.
2599
+
2600
+ Raises:
2601
+ DefaultAlreadySetError: If default has been set before.
2602
+
2603
+ .. versionadded:: 17.1.0
2604
+ """
2605
+ if self._default is not NOTHING:
2606
+ raise DefaultAlreadySetError()
2607
+
2608
+ self._default = Factory(meth, takes_self=True)
2609
+
2610
+ return meth
2611
+
2612
+
2613
+ _CountingAttr = _add_eq(_add_repr(_CountingAttr))
2614
+
2615
+
2616
+ class Factory:
2617
+ """
2618
+ Stores a factory callable.
2619
+
2620
+ If passed as the default value to `attrs.field`, the factory is used to
2621
+ generate a new value.
2622
+
2623
+ Args:
2624
+ factory (typing.Callable):
2625
+ A callable that takes either none or exactly one mandatory
2626
+ positional argument depending on *takes_self*.
2627
+
2628
+ takes_self (bool):
2629
+ Pass the partially initialized instance that is being initialized
2630
+ as a positional argument.
2631
+
2632
+ .. versionadded:: 17.1.0 *takes_self*
2633
+ """
2634
+
2635
+ __slots__ = ("factory", "takes_self")
2636
+
2637
+ def __init__(self, factory, takes_self=False):
2638
+ self.factory = factory
2639
+ self.takes_self = takes_self
2640
+
2641
+ def __getstate__(self):
2642
+ """
2643
+ Play nice with pickle.
2644
+ """
2645
+ return tuple(getattr(self, name) for name in self.__slots__)
2646
+
2647
+ def __setstate__(self, state):
2648
+ """
2649
+ Play nice with pickle.
2650
+ """
2651
+ for name, value in zip(self.__slots__, state):
2652
+ setattr(self, name, value)
2653
+
2654
+
2655
+ _f = [
2656
+ Attribute(
2657
+ name=name,
2658
+ default=NOTHING,
2659
+ validator=None,
2660
+ repr=True,
2661
+ cmp=None,
2662
+ eq=True,
2663
+ order=False,
2664
+ hash=True,
2665
+ init=True,
2666
+ inherited=False,
2667
+ )
2668
+ for name in Factory.__slots__
2669
+ ]
2670
+
2671
+ Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
2672
+
2673
+
2674
+ class Converter:
2675
+ """
2676
+ Stores a converter callable.
2677
+
2678
+ Allows for the wrapped converter to take additional arguments. The
2679
+ arguments are passed in the order they are documented.
2680
+
2681
+ Args:
2682
+ converter (Callable): A callable that converts the passed value.
2683
+
2684
+ takes_self (bool):
2685
+ Pass the partially initialized instance that is being initialized
2686
+ as a positional argument. (default: `False`)
2687
+
2688
+ takes_field (bool):
2689
+ Pass the field definition (an :class:`Attribute`) into the
2690
+ converter as a positional argument. (default: `False`)
2691
+
2692
+ .. versionadded:: 24.1.0
2693
+ """
2694
+
2695
+ __slots__ = (
2696
+ "converter",
2697
+ "takes_self",
2698
+ "takes_field",
2699
+ "_first_param_type",
2700
+ "_global_name",
2701
+ "__call__",
2702
+ )
2703
+
2704
+ def __init__(self, converter, *, takes_self=False, takes_field=False):
2705
+ self.converter = converter
2706
+ self.takes_self = takes_self
2707
+ self.takes_field = takes_field
2708
+
2709
+ ex = _AnnotationExtractor(converter)
2710
+ self._first_param_type = ex.get_first_param_type()
2711
+
2712
+ if not (self.takes_self or self.takes_field):
2713
+ self.__call__ = lambda value, _, __: self.converter(value)
2714
+ elif self.takes_self and not self.takes_field:
2715
+ self.__call__ = lambda value, instance, __: self.converter(
2716
+ value, instance
2717
+ )
2718
+ elif not self.takes_self and self.takes_field:
2719
+ self.__call__ = lambda value, __, field: self.converter(
2720
+ value, field
2721
+ )
2722
+ else:
2723
+ self.__call__ = lambda value, instance, field: self.converter(
2724
+ value, instance, field
2725
+ )
2726
+
2727
+ rt = ex.get_return_type()
2728
+ if rt is not None:
2729
+ self.__call__.__annotations__["return"] = rt
2730
+
2731
+ @staticmethod
2732
+ def _get_global_name(attr_name: str) -> str:
2733
+ """
2734
+ Return the name that a converter for an attribute name *attr_name*
2735
+ would have.
2736
+ """
2737
+ return f"__attr_converter_{attr_name}"
2738
+
2739
+ def _fmt_converter_call(self, attr_name: str, value_var: str) -> str:
2740
+ """
2741
+ Return a string that calls the converter for an attribute name
2742
+ *attr_name* and the value in variable named *value_var* according to
2743
+ `self.takes_self` and `self.takes_field`.
2744
+ """
2745
+ if not (self.takes_self or self.takes_field):
2746
+ return f"{self._get_global_name(attr_name)}({value_var})"
2747
+
2748
+ if self.takes_self and self.takes_field:
2749
+ return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])"
2750
+
2751
+ if self.takes_self:
2752
+ return f"{self._get_global_name(attr_name)}({value_var}, self)"
2753
+
2754
+ return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])"
2755
+
2756
+ def __getstate__(self):
2757
+ """
2758
+ Return a dict containing only converter and takes_self -- the rest gets
2759
+ computed when loading.
2760
+ """
2761
+ return {
2762
+ "converter": self.converter,
2763
+ "takes_self": self.takes_self,
2764
+ "takes_field": self.takes_field,
2765
+ }
2766
+
2767
+ def __setstate__(self, state):
2768
+ """
2769
+ Load instance from state.
2770
+ """
2771
+ self.__init__(**state)
2772
+
2773
+
2774
+ _f = [
2775
+ Attribute(
2776
+ name=name,
2777
+ default=NOTHING,
2778
+ validator=None,
2779
+ repr=True,
2780
+ cmp=None,
2781
+ eq=True,
2782
+ order=False,
2783
+ hash=True,
2784
+ init=True,
2785
+ inherited=False,
2786
+ )
2787
+ for name in ("converter", "takes_self", "takes_field")
2788
+ ]
2789
+
2790
+ Converter = _add_hash(
2791
+ _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f
2792
+ )
2793
+
2794
+
2795
+ def make_class(
2796
+ name, attrs, bases=(object,), class_body=None, **attributes_arguments
2797
+ ):
2798
+ r"""
2799
+ A quick way to create a new class called *name* with *attrs*.
2800
+
2801
+ Args:
2802
+ name (str): The name for the new class.
2803
+
2804
+ attrs( list | dict):
2805
+ A list of names or a dictionary of mappings of names to `attr.ib`\
2806
+ s / `attrs.field`\ s.
2807
+
2808
+ The order is deduced from the order of the names or attributes
2809
+ inside *attrs*. Otherwise the order of the definition of the
2810
+ attributes is used.
2811
+
2812
+ bases (tuple[type, ...]): Classes that the new class will subclass.
2813
+
2814
+ class_body (dict):
2815
+ An optional dictionary of class attributes for the new class.
2816
+
2817
+ attributes_arguments: Passed unmodified to `attr.s`.
2818
+
2819
+ Returns:
2820
+ type: A new class with *attrs*.
2821
+
2822
+ .. versionadded:: 17.1.0 *bases*
2823
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
2824
+ .. versionchanged:: 23.2.0 *class_body*
2825
+ """
2826
+ if isinstance(attrs, dict):
2827
+ cls_dict = attrs
2828
+ elif isinstance(attrs, (list, tuple)):
2829
+ cls_dict = {a: attrib() for a in attrs}
2830
+ else:
2831
+ msg = "attrs argument must be a dict or a list."
2832
+ raise TypeError(msg)
2833
+
2834
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
2835
+ post_init = cls_dict.pop("__attrs_post_init__", None)
2836
+ user_init = cls_dict.pop("__init__", None)
2837
+
2838
+ body = {}
2839
+ if class_body is not None:
2840
+ body.update(class_body)
2841
+ if pre_init is not None:
2842
+ body["__attrs_pre_init__"] = pre_init
2843
+ if post_init is not None:
2844
+ body["__attrs_post_init__"] = post_init
2845
+ if user_init is not None:
2846
+ body["__init__"] = user_init
2847
+
2848
+ type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
2849
+
2850
+ # For pickling to work, the __module__ variable needs to be set to the
2851
+ # frame where the class is created. Bypass this step in environments where
2852
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
2853
+ # defined for arguments greater than 0 (IronPython).
2854
+ with contextlib.suppress(AttributeError, ValueError):
2855
+ type_.__module__ = sys._getframe(1).f_globals.get(
2856
+ "__name__", "__main__"
2857
+ )
2858
+
2859
+ # We do it here for proper warnings with meaningful stacklevel.
2860
+ cmp = attributes_arguments.pop("cmp", None)
2861
+ (
2862
+ attributes_arguments["eq"],
2863
+ attributes_arguments["order"],
2864
+ ) = _determine_attrs_eq_order(
2865
+ cmp,
2866
+ attributes_arguments.get("eq"),
2867
+ attributes_arguments.get("order"),
2868
+ True,
2869
+ )
2870
+
2871
+ cls = _attrs(these=cls_dict, **attributes_arguments)(type_)
2872
+ # Only add type annotations now or "_attrs()" will complain:
2873
+ cls.__annotations__ = {
2874
+ k: v.type for k, v in cls_dict.items() if v.type is not None
2875
+ }
2876
+ return cls
2877
+
2878
+
2879
+ # These are required by within this module so we define them here and merely
2880
+ # import into .validators / .converters.
2881
+
2882
+
2883
+ @attrs(slots=True, unsafe_hash=True)
2884
+ class _AndValidator:
2885
+ """
2886
+ Compose many validators to a single one.
2887
+ """
2888
+
2889
+ _validators = attrib()
2890
+
2891
+ def __call__(self, inst, attr, value):
2892
+ for v in self._validators:
2893
+ v(inst, attr, value)
2894
+
2895
+
2896
+ def and_(*validators):
2897
+ """
2898
+ A validator that composes multiple validators into one.
2899
+
2900
+ When called on a value, it runs all wrapped validators.
2901
+
2902
+ Args:
2903
+ validators (~collections.abc.Iterable[typing.Callable]):
2904
+ Arbitrary number of validators.
2905
+
2906
+ .. versionadded:: 17.1.0
2907
+ """
2908
+ vals = []
2909
+ for validator in validators:
2910
+ vals.extend(
2911
+ validator._validators
2912
+ if isinstance(validator, _AndValidator)
2913
+ else [validator]
2914
+ )
2915
+
2916
+ return _AndValidator(tuple(vals))
2917
+
2918
+
2919
+ def pipe(*converters):
2920
+ """
2921
+ A converter that composes multiple converters into one.
2922
+
2923
+ When called on a value, it runs all wrapped converters, returning the
2924
+ *last* value.
2925
+
2926
+ Type annotations will be inferred from the wrapped converters', if they
2927
+ have any.
2928
+
2929
+ converters (~collections.abc.Iterable[typing.Callable]):
2930
+ Arbitrary number of converters.
2931
+
2932
+ .. versionadded:: 20.1.0
2933
+ """
2934
+
2935
+ def pipe_converter(val, inst, field):
2936
+ for c in converters:
2937
+ val = c(val, inst, field) if isinstance(c, Converter) else c(val)
2938
+
2939
+ return val
2940
+
2941
+ if not converters:
2942
+ # If the converter list is empty, pipe_converter is the identity.
2943
+ A = typing.TypeVar("A")
2944
+ pipe_converter.__annotations__.update({"val": A, "return": A})
2945
+ else:
2946
+ # Get parameter type from first converter.
2947
+ t = _AnnotationExtractor(converters[0]).get_first_param_type()
2948
+ if t:
2949
+ pipe_converter.__annotations__["val"] = t
2950
+
2951
+ last = converters[-1]
2952
+ if not PY_3_11_PLUS and isinstance(last, Converter):
2953
+ last = last.__call__
2954
+
2955
+ # Get return type from last converter.
2956
+ rt = _AnnotationExtractor(last).get_return_type()
2957
+ if rt:
2958
+ pipe_converter.__annotations__["return"] = rt
2959
+
2960
+ return Converter(pipe_converter, takes_self=True, takes_field=True)
parrot/lib/python3.10/site-packages/attr/filters.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
5
+ """
6
+
7
+ from ._make import Attribute
8
+
9
+
10
+ def _split_what(what):
11
+ """
12
+ Returns a tuple of `frozenset`s of classes and attributes.
13
+ """
14
+ return (
15
+ frozenset(cls for cls in what if isinstance(cls, type)),
16
+ frozenset(cls for cls in what if isinstance(cls, str)),
17
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
18
+ )
19
+
20
+
21
+ def include(*what):
22
+ """
23
+ Create a filter that only allows *what*.
24
+
25
+ Args:
26
+ what (list[type, str, attrs.Attribute]):
27
+ What to include. Can be a type, a name, or an attribute.
28
+
29
+ Returns:
30
+ Callable:
31
+ A callable that can be passed to `attrs.asdict`'s and
32
+ `attrs.astuple`'s *filter* argument.
33
+
34
+ .. versionchanged:: 23.1.0 Accept strings with field names.
35
+ """
36
+ cls, names, attrs = _split_what(what)
37
+
38
+ def include_(attribute, value):
39
+ return (
40
+ value.__class__ in cls
41
+ or attribute.name in names
42
+ or attribute in attrs
43
+ )
44
+
45
+ return include_
46
+
47
+
48
+ def exclude(*what):
49
+ """
50
+ Create a filter that does **not** allow *what*.
51
+
52
+ Args:
53
+ what (list[type, str, attrs.Attribute]):
54
+ What to exclude. Can be a type, a name, or an attribute.
55
+
56
+ Returns:
57
+ Callable:
58
+ A callable that can be passed to `attrs.asdict`'s and
59
+ `attrs.astuple`'s *filter* argument.
60
+
61
+ .. versionchanged:: 23.3.0 Accept field name string as input argument
62
+ """
63
+ cls, names, attrs = _split_what(what)
64
+
65
+ def exclude_(attribute, value):
66
+ return not (
67
+ value.__class__ in cls
68
+ or attribute.name in names
69
+ or attribute in attrs
70
+ )
71
+
72
+ return exclude_
parrot/lib/python3.10/site-packages/attr/setters.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Commonly used hooks for on_setattr.
5
+ """
6
+
7
+ from . import _config
8
+ from .exceptions import FrozenAttributeError
9
+
10
+
11
+ def pipe(*setters):
12
+ """
13
+ Run all *setters* and return the return value of the last one.
14
+
15
+ .. versionadded:: 20.1.0
16
+ """
17
+
18
+ def wrapped_pipe(instance, attrib, new_value):
19
+ rv = new_value
20
+
21
+ for setter in setters:
22
+ rv = setter(instance, attrib, rv)
23
+
24
+ return rv
25
+
26
+ return wrapped_pipe
27
+
28
+
29
+ def frozen(_, __, ___):
30
+ """
31
+ Prevent an attribute to be modified.
32
+
33
+ .. versionadded:: 20.1.0
34
+ """
35
+ raise FrozenAttributeError()
36
+
37
+
38
+ def validate(instance, attrib, new_value):
39
+ """
40
+ Run *attrib*'s validator on *new_value* if it has one.
41
+
42
+ .. versionadded:: 20.1.0
43
+ """
44
+ if _config._run_validators is False:
45
+ return new_value
46
+
47
+ v = attrib.validator
48
+ if not v:
49
+ return new_value
50
+
51
+ v(instance, attrib, new_value)
52
+
53
+ return new_value
54
+
55
+
56
+ def convert(instance, attrib, new_value):
57
+ """
58
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
59
+ result.
60
+
61
+ .. versionadded:: 20.1.0
62
+ """
63
+ c = attrib.converter
64
+ if c:
65
+ # This can be removed once we drop 3.8 and use attrs.Converter instead.
66
+ from ._make import Converter
67
+
68
+ if not isinstance(c, Converter):
69
+ return c(new_value)
70
+
71
+ return c(new_value, instance, attrib)
72
+
73
+ return new_value
74
+
75
+
76
+ # Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
77
+ # Sphinx's autodata stopped working, so the docstring is inlined in the API
78
+ # docs.
79
+ NO_OP = object()
parrot/lib/python3.10/site-packages/attr/setters.pyi ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, NewType, NoReturn, TypeVar
2
+
3
+ from . import Attribute
4
+ from attrs import _OnSetAttrType
5
+
6
+ _T = TypeVar("_T")
7
+
8
+ def frozen(
9
+ instance: Any, attribute: Attribute[Any], new_value: Any
10
+ ) -> NoReturn: ...
11
+ def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
12
+ def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
13
+
14
+ # convert is allowed to return Any, because they can be chained using pipe.
15
+ def convert(
16
+ instance: Any, attribute: Attribute[Any], new_value: Any
17
+ ) -> Any: ...
18
+
19
+ _NoOpType = NewType("_NoOpType", object)
20
+ NO_OP: _NoOpType
parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/METADATA ADDED
@@ -0,0 +1,419 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.2
2
+ Name: deepspeed
3
+ Version: 0.14.0
4
+ Summary: DeepSpeed library
5
+ Home-page: http://deepspeed.ai
6
+ Author: DeepSpeed Team
7
+ Author-email: deepspeed-info@microsoft.com
8
+ License: Apache Software License 2.0
9
+ Project-URL: Documentation, https://deepspeed.readthedocs.io
10
+ Project-URL: Source, https://github.com/microsoft/DeepSpeed
11
+ Classifier: Programming Language :: Python :: 3.6
12
+ Classifier: Programming Language :: Python :: 3.7
13
+ Classifier: Programming Language :: Python :: 3.8
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Description-Content-Type: text/markdown
17
+ Requires-Dist: hjson
18
+ Requires-Dist: ninja
19
+ Requires-Dist: numpy
20
+ Requires-Dist: packaging>=20.0
21
+ Requires-Dist: psutil
22
+ Requires-Dist: py-cpuinfo
23
+ Requires-Dist: pydantic
24
+ Requires-Dist: pynvml
25
+ Requires-Dist: torch
26
+ Requires-Dist: tqdm
27
+ Provides-Extra: 1bit
28
+ Provides-Extra: 1bit-mpi
29
+ Requires-Dist: mpi4py; extra == "1bit-mpi"
30
+ Provides-Extra: readthedocs
31
+ Requires-Dist: autodoc_pydantic; extra == "readthedocs"
32
+ Requires-Dist: docutils<0.18; extra == "readthedocs"
33
+ Requires-Dist: hjson; extra == "readthedocs"
34
+ Requires-Dist: packaging; extra == "readthedocs"
35
+ Requires-Dist: psutil; extra == "readthedocs"
36
+ Requires-Dist: py-cpuinfo; extra == "readthedocs"
37
+ Requires-Dist: pydantic<2.0.0; extra == "readthedocs"
38
+ Requires-Dist: recommonmark; extra == "readthedocs"
39
+ Requires-Dist: sphinx_rtd_theme; extra == "readthedocs"
40
+ Requires-Dist: torch; extra == "readthedocs"
41
+ Requires-Dist: tqdm; extra == "readthedocs"
42
+ Provides-Extra: dev
43
+ Requires-Dist: accelerate; extra == "dev"
44
+ Requires-Dist: clang-format==16.0.2; extra == "dev"
45
+ Requires-Dist: deepspeed-kernels; sys_platform == "linux" and extra == "dev"
46
+ Requires-Dist: docutils<0.18; extra == "dev"
47
+ Requires-Dist: future; extra == "dev"
48
+ Requires-Dist: importlib-metadata>=4; extra == "dev"
49
+ Requires-Dist: mup; extra == "dev"
50
+ Requires-Dist: pre-commit>=2.20.0; extra == "dev"
51
+ Requires-Dist: pytest<=8.0.0; extra == "dev"
52
+ Requires-Dist: pytest-forked; extra == "dev"
53
+ Requires-Dist: pytest-randomly; extra == "dev"
54
+ Requires-Dist: pytest-xdist; extra == "dev"
55
+ Requires-Dist: recommonmark; extra == "dev"
56
+ Requires-Dist: sphinx; extra == "dev"
57
+ Requires-Dist: sphinx-rtd-theme; extra == "dev"
58
+ Requires-Dist: tensorboard; extra == "dev"
59
+ Requires-Dist: torchvision; extra == "dev"
60
+ Requires-Dist: transformers>=4.32.1; extra == "dev"
61
+ Requires-Dist: wandb; extra == "dev"
62
+ Provides-Extra: autotuning
63
+ Requires-Dist: tabulate; extra == "autotuning"
64
+ Provides-Extra: autotuning-ml
65
+ Requires-Dist: hjson; extra == "autotuning-ml"
66
+ Requires-Dist: tabulate; extra == "autotuning-ml"
67
+ Requires-Dist: xgboost; extra == "autotuning-ml"
68
+ Provides-Extra: sparse-attn
69
+ Requires-Dist: triton==1.0.0; extra == "sparse-attn"
70
+ Provides-Extra: sparse
71
+ Requires-Dist: neural-compressor==2.1.0; extra == "sparse"
72
+ Provides-Extra: inf
73
+ Requires-Dist: google; extra == "inf"
74
+ Requires-Dist: lm-eval==0.3.0; extra == "inf"
75
+ Requires-Dist: protobuf; extra == "inf"
76
+ Requires-Dist: qtorch; extra == "inf"
77
+ Requires-Dist: safetensors; extra == "inf"
78
+ Requires-Dist: sentencepiece; extra == "inf"
79
+ Requires-Dist: transformers>=4.32.1; extra == "inf"
80
+ Provides-Extra: sd
81
+ Requires-Dist: diffusers>=0.25.0; extra == "sd"
82
+ Requires-Dist: triton>=2.1.0; extra == "sd"
83
+ Provides-Extra: triton
84
+ Requires-Dist: triton==2.1.0; extra == "triton"
85
+ Provides-Extra: all
86
+ Requires-Dist: torch; extra == "all"
87
+ Requires-Dist: importlib-metadata>=4; extra == "all"
88
+ Requires-Dist: torchvision; extra == "all"
89
+ Requires-Dist: sphinx; extra == "all"
90
+ Requires-Dist: qtorch; extra == "all"
91
+ Requires-Dist: pytest-forked; extra == "all"
92
+ Requires-Dist: mup; extra == "all"
93
+ Requires-Dist: autodoc_pydantic; extra == "all"
94
+ Requires-Dist: pytest<=8.0.0; extra == "all"
95
+ Requires-Dist: transformers>=4.32.1; extra == "all"
96
+ Requires-Dist: triton==1.0.0; extra == "all"
97
+ Requires-Dist: pre-commit>=2.20.0; extra == "all"
98
+ Requires-Dist: triton>=2.1.0; extra == "all"
99
+ Requires-Dist: sphinx_rtd_theme; extra == "all"
100
+ Requires-Dist: lm-eval==0.3.0; extra == "all"
101
+ Requires-Dist: psutil; extra == "all"
102
+ Requires-Dist: wandb; extra == "all"
103
+ Requires-Dist: accelerate; extra == "all"
104
+ Requires-Dist: tqdm; extra == "all"
105
+ Requires-Dist: py-cpuinfo; extra == "all"
106
+ Requires-Dist: tensorboard; extra == "all"
107
+ Requires-Dist: packaging; extra == "all"
108
+ Requires-Dist: hjson; extra == "all"
109
+ Requires-Dist: protobuf; extra == "all"
110
+ Requires-Dist: pydantic<2.0.0; extra == "all"
111
+ Requires-Dist: tabulate; extra == "all"
112
+ Requires-Dist: sentencepiece; extra == "all"
113
+ Requires-Dist: recommonmark; extra == "all"
114
+ Requires-Dist: clang-format==16.0.2; extra == "all"
115
+ Requires-Dist: xgboost; extra == "all"
116
+ Requires-Dist: pytest-randomly; extra == "all"
117
+ Requires-Dist: google; extra == "all"
118
+ Requires-Dist: triton==2.1.0; extra == "all"
119
+ Requires-Dist: deepspeed-kernels; sys_platform == "linux" and extra == "all"
120
+ Requires-Dist: sphinx-rtd-theme; extra == "all"
121
+ Requires-Dist: safetensors; extra == "all"
122
+ Requires-Dist: mpi4py; extra == "all"
123
+ Requires-Dist: docutils<0.18; extra == "all"
124
+ Requires-Dist: future; extra == "all"
125
+ Requires-Dist: diffusers>=0.25.0; extra == "all"
126
+ Requires-Dist: pytest-xdist; extra == "all"
127
+ Requires-Dist: neural-compressor==2.1.0; extra == "all"
128
+ Dynamic: author
129
+ Dynamic: author-email
130
+ Dynamic: classifier
131
+ Dynamic: description
132
+ Dynamic: description-content-type
133
+ Dynamic: home-page
134
+ Dynamic: license
135
+ Dynamic: project-url
136
+ Dynamic: provides-extra
137
+ Dynamic: requires-dist
138
+ Dynamic: summary
139
+
140
+ [![License Apache 2.0](https://badgen.net/badge/license/apache2.0/blue)](https://github.com/Microsoft/DeepSpeed/blob/master/LICENSE)
141
+ [![PyPI version](https://badge.fury.io/py/deepspeed.svg)](https://pypi.org/project/deepspeed/)
142
+ [![Downloads](https://static.pepy.tech/badge/deepspeed)](https://pepy.tech/project/deepspeed)
143
+ [![Build](https://badgen.net/badge/build/check-status/blue)](#build-pipeline-status)
144
+ [![Twitter](https://img.shields.io/twitter/follow/MSFTDeepSpeed)](https://twitter.com/intent/follow?screen_name=MSFTDeepSpeed)
145
+ [![Japanese Twitter](https://img.shields.io/badge/%E6%97%A5%E6%9C%AC%E8%AA%9ETwitter-%40MSFTDeepSpeedJP-blue)](https://twitter.com/MSFTDeepSpeedJP)
146
+ [![Chinese Zhihu](https://img.shields.io/badge/%E7%9F%A5%E4%B9%8E-%E5%BE%AE%E8%BD%AFDeepSpeed-blue)](https://www.zhihu.com/people/deepspeed)
147
+
148
+
149
+ <div align="center">
150
+ <img src="docs/assets/images/DeepSpeed_light.svg#gh-light-mode-only" width="400px">
151
+ <img src="docs/assets/images/DeepSpeed_dark_transparent.svg#gh-dark-mode-only" width="400px">
152
+ </div>
153
+
154
+ ## Latest News
155
+ <b> <span style="color:orange" > DeepSpeed empowers ChatGPT-like model training with a single click, offering 15x speedup over SOTA RLHF systems with unprecedented cost reduction at all scales; [learn how](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-chat)</span>.</b>
156
+
157
+ * [2024/03] [DeepSpeed-FP6:The power of FP6-Centric Serving for Large Language Models](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fp6/03-05-2024)
158
+ * [2024/01] [DeepSpeed-FastGen: Introducting Mixtral, Phi-2, and Falcon support with major performance and feature enhancements.](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen/2024-01-19)
159
+ * [2023/11] [Llama 2 Inference on 4th Gen Intel® Xeon® Scalable Processor with DeepSpeed](https://github.com/microsoft/DeepSpeed/tree/master/blogs/intel-inference) [[Intel version]](https://www.intel.com/content/www/us/en/developer/articles/technical/xllama-2-on-xeon-scalable-processor-with-deepspeed.html)
160
+ * [2023/11] [DeepSpeed ZeRO-Offload++: 6x Higher Training Throughput via Collaborative CPU/GPU Twin-Flow](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-offloadpp)
161
+ * [2023/11] [DeepSpeed-FastGen: High-throughput Text Generation for LLMs via MII and DeepSpeed-Inference](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen) [[English](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen)] [[中文](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen/chinese/README.md)] [[日本語](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen/japanese/README.md)]
162
+ * [2023/10] [DeepSpeed-VisualChat: Improve Your Chat Experience with Multi-Round Multi-Image Inputs](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-visualchat/10-03-2023/README.md) [[English](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-visualchat/10-03-2023/README.md)] [[中文](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed-visualchat/10-03-2023/README-Chinese.md)] [[日本語](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed-visualchat/10-03-2023/README-Japanese.md)]
163
+ * [2023/09] Announcing the DeepSpeed4Science Initiative: Enabling large-scale scientific discovery through sophisticated AI system technologies [[DeepSpeed4Science website](https://deepspeed4science.ai/)] [[Tutorials](https://www.deepspeed.ai/deepspeed4science/)] [[White paper](https://arxiv.org/abs/2310.04610)] [[Blog](https://www.microsoft.com/en-us/research/blog/announcing-the-deepspeed4science-initiative-enabling-large-scale-scientific-discovery-through-sophisticated-ai-system-technologies/)] [[中文](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed4science/chinese/README.md)] [[日本語](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed4science/japanese/README.md)]
164
+
165
+
166
+ <!-- NOTE: we must use html for news items otherwise links will be broken in the 'more news' section -->
167
+ <details>
168
+ <summary>More news</summary>
169
+ <ul>
170
+ <li>[2023/08] <a href="https://github.com/microsoft/DeepSpeedExamples/blob/master/inference/huggingface/zero_inference/README.md">DeepSpeed ZeRO-Inference: 20x faster inference through weight quantization and KV cache offloading</a></li>
171
+
172
+ <li>[2023/08] <a href="https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-chat/ds-chat-release-8-31/README.md">DeepSpeed-Chat: Llama/Llama-2 system support, efficiency boost, and training stability improvements</a></li>
173
+
174
+ <li>[2023/08] <a href="https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-ulysses">DeepSpeed Ulysses: System Optimizations for Enabling Training of Extreme Long Sequence Transformer Models</a> [<a href="https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed-ulysses/chinese/README.md">中文</a>] [<a href="https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed-ulysses/japanese/README.md">日本語</a>]</li>
175
+
176
+ <li>[2023/06] <a href="https://www.microsoft.com/en-us/research/blog/deepspeed-zero-a-leap-in-speed-for-llm-and-chat-model-training-with-4x-less-communication/">ZeRO++: A leap in speed for LLM and chat model training with 4X less communication</a> [<a href="https://www.microsoft.com/en-us/research/blog/deepspeed-zero-a-leap-in-speed-for-llm-and-chat-model-training-with-4x-less-communication/">English</a>] [<a href="https://github.com/microsoft/DeepSpeed/blob/master/blogs/zeropp/chinese/README.md">中文</a>] [<a href="https://github.com/microsoft/DeepSpeed/blob/master/blogs/zeropp/japanese/README.md">日本語</a>]</li>
177
+ </ul>
178
+ </details>
179
+
180
+ ---
181
+
182
+ # Extreme Speed and Scale for DL Training and Inference
183
+
184
+ ***[DeepSpeed](https://www.deepspeed.ai/) enables world's most powerful language models like [MT-530B](https://www.microsoft.com/en-us/research/blog/using-deepspeed-and-megatron-to-train-megatron-turing-nlg-530b-the-worlds-largest-and-most-powerful-generative-language-model/) and [BLOOM](https://huggingface.co/blog/bloom-megatron-deepspeed)***. It is an easy-to-use deep learning optimization software suite that powers unprecedented scale and speed for both training and inference. With DeepSpeed you can:
185
+
186
+ * Train/Inference dense or sparse models with billions or trillions of parameters
187
+ * Achieve excellent system throughput and efficiently scale to thousands of GPUs
188
+ * Train/Inference on resource constrained GPU systems
189
+ * Achieve unprecedented low latency and high throughput for inference
190
+ * Achieve extreme compression for an unparalleled inference latency and model size reduction with low costs
191
+
192
+ ---
193
+
194
+ # DeepSpeed's four innovation pillars
195
+
196
+ <img src="docs/assets/images/DeepSpeed-pillars.png" width="800px">
197
+
198
+
199
+ ## DeepSpeed-Training
200
+
201
+ DeepSpeed offers a confluence of system innovations, that has made large scale DL training effective, and efficient, greatly improved ease of use, and redefined the DL training landscape in terms of scale that is possible. These innovations such as ZeRO, 3D-Parallelism, DeepSpeed-MoE, ZeRO-Infinity, etc. fall under the training pillar. Learn more: [DeepSpeed-Training](https://www.deepspeed.ai/training/)
202
+
203
+ ## DeepSpeed-Inference
204
+
205
+ DeepSpeed brings together innovations in parallelism technology such as tensor, pipeline, expert and ZeRO-parallelism, and combines them with high performance custom inference kernels, communication optimizations and heterogeneous memory technologies to enable inference at an unprecedented scale, while achieving unparalleled latency, throughput and cost reduction. This systematic composition of system technologies for inference falls under the inference pillar. Learn more: [DeepSpeed-Inference](https://www.deepspeed.ai/inference)
206
+
207
+
208
+ ## DeepSpeed-Compression
209
+
210
+ To further increase the inference efficiency, DeepSpeed offers easy-to-use and flexible-to-compose compression techniques for researchers and practitioners to compress their models while delivering faster speed, smaller model size, and significantly reduced compression cost. Moreover, SoTA innovations on compression like ZeroQuant and XTC are included under the compression pillar. Learn more: [DeepSpeed-Compression](https://www.deepspeed.ai/compression)
211
+
212
+ ## DeepSpeed4Science
213
+
214
+ In line with Microsoft's mission to solve humanity's most pressing challenges, the DeepSpeed team at Microsoft is responding to this opportunity by launching a new initiative called *DeepSpeed4Science*, aiming to build unique capabilities through AI system technology innovations to help domain experts to unlock today's biggest science mysteries. Learn more: [DeepSpeed4Science website](https://deepspeed4science.ai/) and [tutorials](https://www.deepspeed.ai/deepspeed4science/)
215
+
216
+ ---
217
+
218
+ # DeepSpeed Software Suite
219
+
220
+ ## DeepSpeed Library
221
+
222
+ The [DeepSpeed](https://github.com/microsoft/deepspeed) library (this repository) implements and packages the innovations and technologies in DeepSpeed Training, Inference and Compression Pillars into a single easy-to-use, open-sourced repository. It allows for easy composition of multitude of features within a single training, inference or compression pipeline. The DeepSpeed Library is heavily adopted by the DL community, and has been used to enable some of the most powerful models (see [DeepSpeed Adoption](#deepspeed-adoption)).
223
+
224
+ ## Model Implementations for Inference (MII)
225
+
226
+ [Model Implementations for Inference (MII)](https://github.com/microsoft/deepspeed-mii) is an open-sourced repository for making low-latency and high-throughput inference accessible to all data scientists by alleviating the need to apply complex system optimization techniques themselves. Out-of-box, MII offers support for thousands of widely used DL models, optimized using DeepSpeed-Inference, that can be deployed with a few lines of code, while achieving significant latency reduction compared to their vanilla open-sourced versions.
227
+
228
+ ## DeepSpeed on Azure
229
+
230
+ DeepSpeed users are diverse and have access to different environments. We recommend to try DeepSpeed on Azure as it is the simplest and easiest method. The recommended method to try DeepSpeed on Azure is through AzureML [recipes](https://github.com/Azure/azureml-examples/tree/main/v1/python-sdk/workflows/train/deepspeed). The job submission and data preparation scripts have been made available [here](https://github.com/microsoft/Megatron-DeepSpeed/tree/main/examples_deepspeed/azureml). For more details on how to use DeepSpeed on Azure, please follow the [Azure tutorial](https://www.deepspeed.ai/tutorials/azure/).
231
+
232
+ ---
233
+
234
+ # DeepSpeed Adoption
235
+
236
+ DeepSpeed is an important part of Microsoft’s new
237
+ [AI at Scale](https://www.microsoft.com/en-us/research/project/ai-at-scale/)
238
+ initiative to enable next-generation AI capabilities at scale, where you can find more
239
+ information [here](https://innovation.microsoft.com/en-us/exploring-ai-at-scale).
240
+
241
+ DeepSpeed has been used to train many different large-scale models, below is a list of several examples that we are aware of (if you'd like to include your model please submit a PR):
242
+
243
+ * [Megatron-Turing NLG (530B)](https://www.microsoft.com/en-us/research/blog/using-deepspeed-and-megatron-to-train-megatron-turing-nlg-530b-the-worlds-largest-and-most-powerful-generative-language-model/)
244
+ * [Jurassic-1 (178B)](https://uploads-ssl.webflow.com/60fd4503684b466578c0d307/61138924626a6981ee09caf6_jurassic_tech_paper.pdf)
245
+ * [BLOOM (176B)](https://huggingface.co/blog/bloom-megatron-deepspeed)
246
+ * [GLM (130B)](https://github.com/THUDM/GLM-130B)
247
+ * [xTrimoPGLM (100B)](https://www.biorxiv.org/content/10.1101/2023.07.05.547496v2)
248
+ * [YaLM (100B)](https://github.com/yandex/YaLM-100B)
249
+ * [GPT-NeoX (20B)](https://github.com/EleutherAI/gpt-neox)
250
+ * [AlexaTM (20B)](https://www.amazon.science/blog/20b-parameter-alexa-model-sets-new-marks-in-few-shot-learning)
251
+ * [Turing NLG (17B)](https://www.microsoft.com/en-us/research/blog/turing-nlg-a-17-billion-parameter-language-model-by-microsoft/)
252
+ * [METRO-LM (5.4B)](https://arxiv.org/pdf/2204.06644.pdf)
253
+
254
+ DeepSpeed has been integrated with several different popular open-source DL frameworks such as:
255
+
256
+ | | Documentation |
257
+ | ---------------------------------------------------------------------------------------------- | -------------------------------------------- |
258
+ <img src="docs/assets/images/transformers-light.png#gh-light-mode-only" width="250px"><img src="docs/assets/images/transformers-dark.png#gh-dark-mode-only" width="250px"> | [Transformers with DeepSpeed](https://huggingface.co/docs/transformers/main/main_classes/deepspeed) |
259
+ | <img src="docs/assets/images/accelerate-light.png#gh-light-mode-only" width="250px"><img src="docs/assets/images/accelerate-dark.png#gh-dark-mode-only" width="250px"> | [Accelerate with DeepSpeed](https://huggingface.co/docs/accelerate/usage_guides/deepspeed) |
260
+ | <img src="docs/assets/images/lightning-light.svg#gh-light-mode-only" width="200px"><img src="docs/assets/images/lightning-dark.svg#gh-dark-mode-only" width="200px"> | [Lightning with DeepSpeed](https://lightning.ai/docs/pytorch/stable/advanced/model_parallel.html#deepspeed) |
261
+ | <img src="docs/assets/images/mosaicml.svg" width="200px"> | [MosaicML with DeepSpeed](https://docs.mosaicml.com/projects/composer/en/latest/trainer/using_the_trainer.html?highlight=deepspeed#deepspeed-integration) |
262
+ | <img src="docs/assets/images/determined.svg" width="225px"> | [Determined with DeepSpeed](https://docs.determined.ai/latest/training/apis-howto/deepspeed/overview.html) |
263
+ | <img src="https://user-images.githubusercontent.com/58739961/187154444-fce76639-ac8d-429b-9354-c6fac64b7ef8.jpg" width=150> | [MMEngine with DeepSpeed](https://mmengine.readthedocs.io/en/latest/common_usage/large_model_training.html#deepspeed) |
264
+
265
+ ---
266
+
267
+ # Build Pipeline Status
268
+
269
+ | Description | Status |
270
+ | ----------- | ------ |
271
+ | NVIDIA | [![nv-torch110-p40](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-p40.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-p40.yml) [![nv-torch110-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-v100.yml) [![nv-torch-latest-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-latest-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-latest-v100.yml) [![nv-h100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-h100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-h100.yml) [![nv-inference](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-inference.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-inference.yml) [![nv-nightly](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-nightly.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-nightly.yml) |
272
+ | AMD | [![amd-mi200](https://github.com/microsoft/DeepSpeed/actions/workflows/amd-mi200.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/amd-mi200.yml) |
273
+ | CPU | [![nv-torch-latest-cpu](https://github.com/microsoft/DeepSpeed/actions/workflows/cpu-torch-latest.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/cpu-torch-latest.yml) |
274
+ | PyTorch Nightly | [![nv-torch-nightly-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-nightly-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-nightly-v100.yml) |
275
+ | Integrations | [![nv-transformers-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-transformers-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-transformers-v100.yml) [![nv-lightning-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-lightning-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-lightning-v100.yml) [![nv-accelerate-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-accelerate-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-accelerate-v100.yml) [![nv-mii](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-mii.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-mii.yml) [![nv-ds-chat](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-ds-chat.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-ds-chat.yml) [![nv-sd](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-sd.yml/badge.svg)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-sd.yml) |
276
+ | Misc | [![Formatting](https://github.com/microsoft/DeepSpeed/actions/workflows/formatting.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/formatting.yml) [![pages-build-deployment](https://github.com/microsoft/DeepSpeed/actions/workflows/pages/pages-build-deployment/badge.svg)](https://github.com/microsoft/DeepSpeed/actions/workflows/pages/pages-build-deployment) [![Documentation Status](https://readthedocs.org/projects/deepspeed/badge/?version=latest)](https://deepspeed.readthedocs.io/en/latest/?badge=latest)[![python](https://github.com/microsoft/DeepSpeed/actions/workflows/python.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/python.yml) |
277
+
278
+ # Installation
279
+
280
+ The quickest way to get started with DeepSpeed is via pip, this will install
281
+ the latest release of DeepSpeed which is not tied to specific PyTorch or CUDA
282
+ versions. DeepSpeed includes several C++/CUDA extensions that we commonly refer
283
+ to as our 'ops'. By default, all of these extensions/ops will be built
284
+ just-in-time (JIT) using [torch's JIT C++ extension loader that relies on
285
+ ninja](https://pytorch.org/docs/stable/cpp_extension.html) to build and
286
+ dynamically link them at runtime.
287
+
288
+ ## Requirements
289
+ * [PyTorch](https://pytorch.org/) must be installed _before_ installing DeepSpeed.
290
+ * For full feature support we recommend a version of PyTorch that is >= 1.9 and ideally the latest PyTorch stable release.
291
+ * A CUDA or ROCm compiler such as [nvcc](https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/#introduction) or [hipcc](https://github.com/ROCm-Developer-Tools/HIPCC) used to compile C++/CUDA/HIP extensions.
292
+ * Specific GPUs we develop and test against are listed below, this doesn't mean your GPU will not work if it doesn't fall into this category it's just DeepSpeed is most well tested on the following:
293
+ * NVIDIA: Pascal, Volta, Ampere, and Hopper architectures
294
+ * AMD: MI100 and MI200
295
+
296
+ ## PyPI
297
+ We regularly push releases to [PyPI](https://pypi.org/project/deepspeed/) and encourage users to install from there in most cases.
298
+
299
+ ```bash
300
+ pip install deepspeed
301
+ ```
302
+
303
+ After installation, you can validate your install and see which extensions/ops
304
+ your machine is compatible with via the DeepSpeed environment report.
305
+
306
+ ```bash
307
+ ds_report
308
+ ```
309
+
310
+ If you would like to pre-install any of the DeepSpeed extensions/ops (instead
311
+ of JIT compiling) or install pre-compiled ops via PyPI please see our [advanced
312
+ installation instructions](https://www.deepspeed.ai/tutorials/advanced-install/).
313
+
314
+ ## Windows
315
+ Windows support is partially supported with DeepSpeed. On Windows you can build wheel with following steps, currently only inference mode is supported.
316
+ 1. Install pytorch, such as pytorch 1.8 + cuda 11.1
317
+ 2. Install visual cpp build tools, such as VS2019 C++ x64/x86 build tools
318
+ 3. Launch cmd console with Administrator privilege for creating required symlink folders
319
+ 4. Run `python setup.py bdist_wheel` to build wheel in `dist` folder
320
+
321
+ # Features
322
+
323
+ Please checkout [DeepSpeed-Training](https://www.deepspeed.ai/training), [DeepSpeed-Inference](https://www.deepspeed.ai/inference) and [DeepSpeed-Compression](https://www.deepspeed.ai/compression) pages for full set of features offered along each of these three pillars.
324
+
325
+ # Further Reading
326
+
327
+ All DeepSpeed documentation, tutorials, and blogs can be found on our website: [deepspeed.ai](https://www.deepspeed.ai/)
328
+
329
+
330
+ | | Description |
331
+ | ---------------------------------------------------------------------------------------------- | -------------------------------------------- |
332
+ | [Getting Started](https://www.deepspeed.ai/getting-started/) | First steps with DeepSpeed |
333
+ | [DeepSpeed JSON Configuration](https://www.deepspeed.ai/docs/config-json/) | Configuring DeepSpeed |
334
+ | [API Documentation](https://deepspeed.readthedocs.io/en/latest/) | Generated DeepSpeed API documentation |
335
+ | [Tutorials](https://www.deepspeed.ai/tutorials/) | Tutorials |
336
+ | [Blogs](https://www.deepspeed.ai/posts/) | Blogs |
337
+
338
+
339
+ # Contributing
340
+ DeepSpeed welcomes your contributions! Please see our
341
+ [contributing](CONTRIBUTING.md) guide for more details on formatting, testing,
342
+ etc.<br/>
343
+ Thanks so much to all of our amazing contributors!
344
+
345
+ <a href="https://github.com/microsoft/DeepSpeed/graphs/contributors">
346
+ <img src="https://contrib.rocks/image?repo=microsoft/DeepSpeed&r=" width="800px"/>
347
+ </a>
348
+
349
+ ## Contributor License Agreement
350
+ This project welcomes contributions and suggestions. Most contributions require you to
351
+ agree to a Contributor License Agreement (CLA) declaring that you have the right to, and
352
+ actually do, grant us the rights to use your contribution. For details, visit
353
+ https://cla.opensource.microsoft.com.
354
+
355
+ When you submit a pull request, a CLA bot will automatically determine whether you need
356
+ to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply
357
+ follow the instructions provided by the bot. You will only need to do this once across
358
+ all repos using our CLA.
359
+
360
+ ## Code of Conduct
361
+ This project has adopted the [Microsoft Open Source Code of
362
+ Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the
363
+ [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact
364
+ [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
365
+
366
+ # Publications
367
+ 1. Samyam Rajbhandari, Jeff Rasley, Olatunji Ruwase, Yuxiong He. (2019) ZeRO: memory optimizations toward training trillion parameter models. [arXiv:1910.02054](https://arxiv.org/abs/1910.02054) and [In Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis (SC '20)](https://dl.acm.org/doi/10.5555/3433701.3433727).
368
+ 2. Jeff Rasley, Samyam Rajbhandari, Olatunji Ruwase, and Yuxiong He. (2020) DeepSpeed: System Optimizations Enable Training Deep Learning Models with Over 100 Billion Parameters. [In Proceedings of the 26th ACM SIGKDD International Conference on Knowledge Discovery & Data Mining (KDD '20, Tutorial)](https://dl.acm.org/doi/10.1145/3394486.3406703).
369
+ 3. Minjia Zhang, Yuxiong He. (2020) Accelerating Training of Transformer-Based Language Models with Progressive Layer Dropping. [arXiv:2010.13369](https://arxiv.org/abs/2010.13369) and [NeurIPS 2020](https://proceedings.neurips.cc/paper/2020/hash/a1140a3d0df1c81e24ae954d935e8926-Abstract.html).
370
+ 4. Jie Ren, Samyam Rajbhandari, Reza Yazdani Aminabadi, Olatunji Ruwase, Shuangyan Yang, Minjia Zhang, Dong Li, Yuxiong He. (2021) ZeRO-Offload: Democratizing Billion-Scale Model Training. [arXiv:2101.06840](https://arxiv.org/abs/2101.06840) and [USENIX ATC 2021](https://www.usenix.org/conference/atc21/presentation/ren-jie). [[paper]](https://arxiv.org/abs/2101.06840) [[slides]](https://www.usenix.org/system/files/atc21_slides_ren-jie.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-extreme-scale-model-training-for-everyone/)
371
+ 5. Hanlin Tang, Shaoduo Gan, Ammar Ahmad Awan, Samyam Rajbhandari, Conglong Li, Xiangru Lian, Ji Liu, Ce Zhang, Yuxiong He. (2021) 1-bit Adam: Communication Efficient Large-Scale Training with Adam's Convergence Speed. [arXiv:2102.02888](https://arxiv.org/abs/2102.02888) and [ICML 2021](http://proceedings.mlr.press/v139/tang21a.html).
372
+ 6. Samyam Rajbhandari, Olatunji Ruwase, Jeff Rasley, Shaden Smith, Yuxiong He. (2021) ZeRO-Infinity: Breaking the GPU Memory Wall for Extreme Scale Deep Learning. [arXiv:2104.07857](https://arxiv.org/abs/2104.07857) and [SC 2021](https://dl.acm.org/doi/abs/10.1145/3458817.3476205). [[paper]](https://arxiv.org/abs/2104.07857) [[slides]](docs/assets/files/SC21-ZeRO-Infinity.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/zero-infinity-and-deepspeed-unlocking-unprecedented-model-scale-for-deep-learning-training/)
373
+ 7. Conglong Li, Ammar Ahmad Awan, Hanlin Tang, Samyam Rajbhandari, Yuxiong He. (2021) 1-bit LAMB: Communication Efficient Large-Scale Large-Batch Training with LAMB's Convergence Speed. [arXiv:2104.06069](https://arxiv.org/abs/2104.06069) and [HiPC 2022](https://hipc.org/advance-program/).
374
+ 8. Conglong Li, Minjia Zhang, Yuxiong He. (2021) The Stability-Efficiency Dilemma: Investigating Sequence Length Warmup for Training GPT Models. [arXiv:2108.06084](https://arxiv.org/abs/2108.06084) and [NeurIPS 2022](https://openreview.net/forum?id=JpZ5du_Kdh).
375
+ 9. Yucheng Lu, Conglong Li, Minjia Zhang, Christopher De Sa, Yuxiong He. (2022) Maximizing Communication Efficiency for Large-scale Training via 0/1 Adam. [arXiv:2202.06009](https://arxiv.org/abs/2202.06009).
376
+ 10. Samyam Rajbhandari, Conglong Li, Zhewei Yao, Minjia Zhang, Reza Yazdani Aminabadi, Ammar Ahmad Awan, Jeff Rasley, Yuxiong He. (2022) DeepSpeed-MoE: Advancing Mixture-of-Experts Inference and Training to Power Next-Generation AI Scale [arXiv:2201.05596](https://arxiv.org/abs/2201.05596) and [ICML 2022](https://proceedings.mlr.press/v162/rajbhandari22a.html). [[pdf]](https://arxiv.org/abs/2201.05596) [[slides]](docs/assets/files/ICML-5mins.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-advancing-moe-inference-and-training-to-power-next-generation-ai-scale/)
377
+ 11. Shaden Smith, Mostofa Patwary, Brandon Norick, Patrick LeGresley, Samyam Rajbhandari, Jared Casper, Zhun Liu, Shrimai Prabhumoye, George Zerveas, Vijay Korthikanti, Elton Zhang, Rewon Child, Reza Yazdani Aminabadi, Julie Bernauer, Xia Song, Mohammad Shoeybi, Yuxiong He, Michael Houston, Saurabh Tiwary, Bryan Catanzaro. (2022) Using DeepSpeed and Megatron to Train Megatron-Turing NLG 530B, A Large-Scale Generative Language Model [arXiv:2201.11990](https://arxiv.org/abs/2201.11990).
378
+ 12. Xiaoxia Wu, Zhewei Yao, Minjia Zhang, Conglong Li, Yuxiong He. (2022) Extreme Compression for Pre-trained Transformers Made Simple and Efficient. [arXiv:2206.01859](https://arxiv.org/abs/2206.01859) and [NeurIPS 2022](https://openreview.net/forum?id=xNeAhc2CNAl).
379
+ 13. Zhewei Yao, Reza Yazdani Aminabadi, Minjia Zhang, Xiaoxia Wu, Conglong Li, Yuxiong He. (2022) ZeroQuant: Efficient and Affordable Post-Training Quantization for Large-Scale Transformers. [arXiv:2206.01861](https://arxiv.org/abs/2206.01861) and [NeurIPS 2022](https://openreview.net/forum?id=f-fVCElZ-G1) [[slides]](docs/assets/files/zeroquant_series.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-compression-a-composable-library-for-extreme-compression-and-zero-cost-quantization/)
380
+ 14. Reza Yazdani Aminabadi, Samyam Rajbhandari, Minjia Zhang, Ammar Ahmad Awan, Cheng Li, Du Li, Elton Zheng, Jeff Rasley, Shaden Smith, Olatunji Ruwase, Yuxiong He. (2022) DeepSpeed Inference: Enabling Efficient Inference of Transformer Models at Unprecedented Scale. [arXiv:2207.00032](https://arxiv.org/abs/2207.00032) and [SC 2022](https://dl.acm.org/doi/abs/10.5555/3571885.3571946). [[paper]](https://arxiv.org/abs/2207.00032) [[slides]](docs/assets/files/sc22-ds-inference.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-accelerating-large-scale-model-inference-and-training-via-system-optimizations-and-compression/)
381
+ 15. Zhewei Yao, Xiaoxia Wu, Conglong Li, Connor Holmes, Minjia Zhang, Cheng Li, Yuxiong He. (2022) Random-LTD: Random and Layerwise Token Dropping Brings Efficient Training for Large-scale Transformers. [arXiv:2211.11586](https://arxiv.org/abs/2211.11586).
382
+ 16. Conglong Li, Zhewei Yao, Xiaoxia Wu, Minjia Zhang, Yuxiong He. (2022) DeepSpeed Data Efficiency: Improving Deep Learning Model Quality and Training Efficiency via Efficient Data Sampling and Routing. [arXiv:2212.03597](https://arxiv.org/abs/2212.03597) [ENLSP2023 Workshop at NeurIPS2023](https://neurips2023-enlsp.github.io/)
383
+ 17. Xiaoxia Wu, Cheng Li, Reza Yazdani Aminabadi, Zhewei Yao, Yuxiong He. (2023) Understanding INT4 Quantization for Transformer Models: Latency Speedup, Composability, and Failure Cases. [arXiv:2301.12017](https://arxiv.org/abs/2301.12017) and [ICML2023](https://icml.cc/Conferences/2023).
384
+ 18. Syed Zawad, Cheng Li, Zhewei Yao, Elton Zheng, Yuxiong He, Feng Yan. (2023) DySR: Adaptive Super-Resolution via Algorithm and System Co-design. [ICLR:2023](https://openreview.net/forum?id=Pgtn4l6eKjv).
385
+ 19. Sheng Shen, Zhewei Yao, Chunyuan Li, Trevor Darrell, Kurt Keutzer, Yuxiong He. (2023) Scaling Vision-Language Models with Sparse Mixture of Experts. [arXiv:2303.07226](https://arxiv.org/abs/2303.07226) and [Finding at EMNLP2023](https://2023.emnlp.org/).
386
+ 20. Quentin Anthony, Ammar Ahmad Awan, Jeff Rasley, Yuxiong He, Aamir Shafi, Mustafa Abduljabbar, Hari Subramoni, Dhabaleswar Panda. (2023) MCR-DL: Mix-and-Match Communication Runtime for Deep Learning [arXiv:2303.08374](https://arxiv.org/abs/2303.08374) and will appear at IPDPS 2023.
387
+ 21. Siddharth Singh, Olatunji Ruwase, Ammar Ahmad Awan, Samyam Rajbhandari, Yuxiong He, Abhinav Bhatele. (2023) A Hybrid Tensor-Expert-Data Parallelism Approach to Optimize Mixture-of-Experts Training [arXiv:2303.06318](https://arxiv.org/abs/2303.06318) and will appear at ICS 2023.
388
+ 22. Guanhua Wang, Heyang Qin, Sam Ade Jacobs, Xiaoxia Wu, Connor Holmes, Zhewei Yao, Samyam Rajbhandari, Olatunji Ruwase, Feng Yan, Lei Yang, Yuxiong He. (2023) ZeRO++: Extremely Efficient Collective Communication for Giant Model Training [arXiv:2306.10209](https://arxiv.org/abs/2306.10209) and [ML for Sys Workshop at NeurIPS2023](http://mlforsystems.org/) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-zero-a-leap-in-speed-for-llm-and-chat-model-training-with-4x-less-communication/)
389
+ 23. Zhewei Yao, Xiaoxia Wu, Cheng Li, Stephen Youn, Yuxiong He. (2023) ZeroQuant-V2: Exploring Post-training Quantization in LLMs from Comprehensive Study to Low Rank Compensation [arXiv:2303.08302](https://arxiv.org/abs/2303.08302) and [ENLSP2023 Workshop at NeurIPS2023](https://neurips2023-enlsp.github.io/) [[slides]](docs/assets/files/zeroquant_series.pdf)
390
+ 24. Pareesa Ameneh Golnari, Zhewei Yao, Yuxiong He. (2023) Selective Guidance: Are All the Denoising Steps of Guided Diffusion Important? [arXiv:2305.09847](https://arxiv.org/abs/2305.09847)
391
+ 25. Zhewei Yao, Reza Yazdani Aminabadi, Olatunji Ruwase, Samyam Rajbhandari, Xiaoxia Wu, Ammar Ahmad Awan, Jeff Rasley, Minjia Zhang, Conglong Li, Connor Holmes, Zhongzhu Zhou, Michael Wyatt, Molly Smith, Lev Kurilenko, Heyang Qin, Masahiro Tanaka, Shuai Che, Shuaiwen Leon Song, Yuxiong He. (2023) DeepSpeed-Chat: Easy, Fast and Affordable RLHF Training of ChatGPT-like Models at All Scales [arXiv:2308.01320](https://arxiv.org/abs/2308.01320).
392
+ 26. Xiaoxia Wu, Zhewei Yao, Yuxiong He. (2023) ZeroQuant-FP: A Leap Forward in LLMs Post-Training W4A8 Quantization Using Floating-Point Formats [arXiv:2307.09782](https://arxiv.org/abs/2307.09782) and [ENLSP2023 Workshop at NeurIPS2023](https://neurips2023-enlsp.github.io/) [[slides]](docs/assets/files/zeroquant_series.pdf)
393
+ 27. Zhewei Yao, Xiaoxia Wu, Conglong Li, Minjia Zhang, Heyang Qin, Olatunji Ruwase, Ammar Ahmad Awan, Samyam Rajbhandari, Yuxiong He. (2023) DeepSpeed-VisualChat: Multi-Round Multi-Image Interleave Chat via Multi-Modal Causal Attention [arXiv:2309.14327](https://arxiv.org/pdf/2309.14327.pdf)
394
+ 28. Shuaiwen Leon Song, Bonnie Kruft, Minjia Zhang, Conglong Li, Shiyang Chen, Chengming Zhang, Masahiro Tanaka, Xiaoxia Wu, Jeff Rasley, Ammar Ahmad Awan, Connor Holmes, Martin Cai, Adam Ghanem, Zhongzhu Zhou, Yuxiong He, et al. (2023) DeepSpeed4Science Initiative: Enabling Large-Scale Scientific Discovery through Sophisticated AI System Technologies [arXiv:2310.04610](https://arxiv.org/abs/2310.04610) [[blog]](https://www.microsoft.com/en-us/research/blog/announcing-the-deepspeed4science-initiative-enabling-large-scale-scientific-discovery-through-sophisticated-ai-system-technologies/)
395
+ 29. Zhewei Yao, Reza Yazdani Aminabadi, Stephen Youn, Xiaoxia Wu, Elton Zheng, Yuxiong He. (2023) ZeroQuant-HERO: Hardware-Enhanced Robust Optimized Post-Training Quantization Framework for W8A8 Transformers [arXiv:2310.17723](https://arxiv.org/abs/2310.17723)
396
+
397
+ 30. Xiaoxia Wu, Haojun Xia, Stephen Youn, Zhen Zheng, Shiyang Chen, Arash Bakhtiari, Michael Wyatt, Reza Yazdani Aminabadi, Yuxiong He, Olatunji Ruwase, Leon Song, Zhewei Yao (2023) ZeroQuant(4+2): Redefining LLMs Quantization with a New FP6-Centric Strategy for Diverse Generative Tasks [arXiv:2312.08583](https://arxiv.org/abs/2312.08583)
398
+
399
+ 31. Haojun Xia, Zhen Zheng, Xiaoxia Wu, Shiyang Chen, Zhewei Yao, Stephen Youn, Arash Bakhtiari, Michael Wyatt, Donglin Zhuang, Zhongzhu Zhou, Olatunji Ruwase, Yuxiong He, Shuaiwen Leon Song. (2024) FP6-LLM: Efficiently Serving Large Language Models Through FP6-Centric Algorithm-System Co-Design [arXiv:2401.14112](https://arxiv.org/abs/2401.14112)
400
+
401
+
402
+
403
+ # Videos
404
+ 1. DeepSpeed KDD 2020 Tutorial
405
+ 1. [Overview](https://www.youtube.com/watch?v=CaseqC45DNc&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=29)
406
+ 2. [ZeRO + large model training](https://www.youtube.com/watch?v=y4_bCiAsIAk&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=28)
407
+ 3. [17B T-NLG demo](https://www.youtube.com/watch?v=9V-ZbP92drg&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=27)
408
+ 4. [Fastest BERT training + RScan tuning](https://www.youtube.com/watch?v=o1K-ZG9F6u0&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=26)
409
+ 5. DeepSpeed hands on deep dive: [part 1](https://www.youtube.com/watch?v=_NOk-mBwDYg&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=92), [part 2](https://www.youtube.com/watch?v=sG6_c4VXLww&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=94), [part 3](https://www.youtube.com/watch?v=k9yPkBTayos&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=93)
410
+ 6. [FAQ](https://www.youtube.com/watch?v=nsHu6vEgPew&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=24)
411
+ 2. Microsoft Research Webinar
412
+ * Registration is free and all videos are available on-demand.
413
+ * [ZeRO & Fastest BERT: Increasing the scale and speed of deep learning training in DeepSpeed](https://note.microsoft.com/MSR-Webinar-DeepSpeed-Registration-On-Demand.html).
414
+ 3. [DeepSpeed on AzureML](https://youtu.be/yBVXR8G8Bg8)
415
+ 4. [Large Model Training and Inference with DeepSpeed // Samyam Rajbhandari // LLMs in Prod Conference](https://www.youtube.com/watch?v=cntxC3g22oU) [[slides]](docs/assets/files/presentation-mlops.pdf)
416
+ 5. Community Tutorials
417
+ * [DeepSpeed: All the tricks to scale to gigantic models (Mark Saroufim)](https://www.youtube.com/watch?v=pDGI668pNg0)
418
+ * [Turing-NLG, DeepSpeed and the ZeRO optimizer (Yannic Kilcher)](https://www.youtube.com/watch?v=tC01FRB0M7w)
419
+ * [Ultimate Guide To Scaling ML Models (The AI Epiphany)](https://www.youtube.com/watch?v=hc0u4avAkuM)
parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/RECORD ADDED
The diff for this file is too large to render. See raw diff
 
parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/REQUESTED ADDED
File without changes
parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (75.8.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/entry_points.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [pytest_randomly.random_seeder]
2
+ deepspeed = deepspeed.runtime.utils:set_random_seed
parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ deepspeed
parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Phil Wang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/METADATA ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: einops-exts
3
+ Version: 0.0.4
4
+ Summary: Einops Extensions
5
+ Home-page: https://github.com/lucidrains/einops-exts
6
+ Author: Phil Wang
7
+ Author-email: lucidrains@gmail.com
8
+ License: MIT
9
+ Keywords: artificial intelligence,deep learning,tensor manipulation
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3.6
15
+ Description-Content-Type: text/markdown
16
+ License-File: LICENSE
17
+ Requires-Dist: einops (>=0.4)
18
+
parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/RECORD ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ einops_exts-0.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ einops_exts-0.0.4.dist-info/LICENSE,sha256=xZDkKtpHE2TPCAeqKe1fjdpKernl1YW-d01j_1ltkAU,1066
3
+ einops_exts-0.0.4.dist-info/METADATA,sha256=yIU5EfeQdzzh8Dc-Feg8_a6p4LVHj8J1OrDFWHbvOdw,621
4
+ einops_exts-0.0.4.dist-info/RECORD,,
5
+ einops_exts-0.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ einops_exts-0.0.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
7
+ einops_exts-0.0.4.dist-info/top_level.txt,sha256=SckcduaUqHpfn7q_H49iPlKEPbKXTwKcCczc806qzes,12
8
+ einops_exts/__init__.py,sha256=FT0AocRvAC7bgRVinoglTY4uNjWZwfSfu9xZYHEwV4k,232
9
+ einops_exts/__pycache__/__init__.cpython-310.pyc,,
10
+ einops_exts/__pycache__/einops_exts.cpython-310.pyc,,
11
+ einops_exts/__pycache__/torch.cpython-310.pyc,,
12
+ einops_exts/einops_exts.py,sha256=HHeQbJgZcuA_04R9NpRRlBFASCb9xBNtgwlmsABcU7U,2131
13
+ einops_exts/torch.py,sha256=A0orev4xcv41qp3EmXiDjWFcXclNCs9kHkadqYiOpv8,1045
parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/REQUESTED ADDED
File without changes
parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.38.4)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ einops_exts
parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2017 Just van Rossum
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/METADATA ADDED
The diff for this file is too large to render. See raw diff
 
parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/RECORD ADDED
@@ -0,0 +1,643 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ../../../bin/fonttools,sha256=P48YST5UvCM333Sbg7SXc6Vxr_X-mXJgpmOCvDs4rkM,229
2
+ ../../../bin/pyftmerge,sha256=m3eET_RQ16kplqhro_EECz9-0ypaeZ9Wc09e_rQfJGg,226
3
+ ../../../bin/pyftsubset,sha256=n1h66COY4XMNZyatgzuitOX_yE9qdK2ktDAakWXxm5U,227
4
+ ../../../bin/ttx,sha256=wIiiSulL8BPTMwfKODhjOMlvJGJ98-40jUBvCZoxxdA,224
5
+ ../../../share/man/man1/ttx.1,sha256=cLbm_pOOj1C76T2QXvDxzwDj9gk-GTd5RztvTMsouFw,5377
6
+ fontTools/__init__.py,sha256=QpXvyS41alnu5EXBOrRREAHue4HET1On9zYFOvSTOs8,183
7
+ fontTools/__main__.py,sha256=VjkGh1UD-i1zTDA1dXo1uecSs6PxHdGQ5vlCk_mCCYs,925
8
+ fontTools/__pycache__/__init__.cpython-310.pyc,,
9
+ fontTools/__pycache__/__main__.cpython-310.pyc,,
10
+ fontTools/__pycache__/afmLib.cpython-310.pyc,,
11
+ fontTools/__pycache__/agl.cpython-310.pyc,,
12
+ fontTools/__pycache__/fontBuilder.cpython-310.pyc,,
13
+ fontTools/__pycache__/help.cpython-310.pyc,,
14
+ fontTools/__pycache__/tfmLib.cpython-310.pyc,,
15
+ fontTools/__pycache__/ttx.cpython-310.pyc,,
16
+ fontTools/__pycache__/unicode.cpython-310.pyc,,
17
+ fontTools/afmLib.py,sha256=1MagIItOzRV4vV5kKPxeDZbPJsfxLB3wdHLFkQvl0uk,13164
18
+ fontTools/agl.py,sha256=05bm8Uq45uVWW8nPbP6xbNgmFyxQr8sWhYAiP0VSjnI,112975
19
+ fontTools/cffLib/CFF2ToCFF.py,sha256=K0bgyxhiVq7E7W3T68SJvMWBjWqDBSqx2whmNaPVPgc,5572
20
+ fontTools/cffLib/CFFToCFF2.py,sha256=t3J641CQ2lI-LvynUaVpCiaiu-nVoLgWJwNe1kFGMjQ,9976
21
+ fontTools/cffLib/__init__.py,sha256=itWKwFH6ij4lEbwgqP7NQRdb-Q_84UQPZD7M1SPOTY0,106866
22
+ fontTools/cffLib/__pycache__/CFF2ToCFF.cpython-310.pyc,,
23
+ fontTools/cffLib/__pycache__/CFFToCFF2.cpython-310.pyc,,
24
+ fontTools/cffLib/__pycache__/__init__.cpython-310.pyc,,
25
+ fontTools/cffLib/__pycache__/specializer.cpython-310.pyc,,
26
+ fontTools/cffLib/__pycache__/transforms.cpython-310.pyc,,
27
+ fontTools/cffLib/__pycache__/width.cpython-310.pyc,,
28
+ fontTools/cffLib/specializer.py,sha256=q3AeBbxWnfP_teaqTh1YfM-9e34Pxs_yGicxc93QMrc,30533
29
+ fontTools/cffLib/transforms.py,sha256=gbcUnFz-MTBxkSYm-eZRox6J2UMlqBTgMgpPmx6SHwo,17015
30
+ fontTools/cffLib/width.py,sha256=IqGL0CLyCZqi_hvsHySG08qpYxS3kaqW-tsAT-bjHV4,6074
31
+ fontTools/colorLib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
+ fontTools/colorLib/__pycache__/__init__.cpython-310.pyc,,
33
+ fontTools/colorLib/__pycache__/builder.cpython-310.pyc,,
34
+ fontTools/colorLib/__pycache__/errors.cpython-310.pyc,,
35
+ fontTools/colorLib/__pycache__/geometry.cpython-310.pyc,,
36
+ fontTools/colorLib/__pycache__/table_builder.cpython-310.pyc,,
37
+ fontTools/colorLib/__pycache__/unbuilder.cpython-310.pyc,,
38
+ fontTools/colorLib/builder.py,sha256=kmO7OuudQQb3fEOS7aLzgTDVjqS9i2xIQmk9p1uBe8A,23008
39
+ fontTools/colorLib/errors.py,sha256=CsaviiRxxrpgVX4blm7KCyK8553ljwL44xkJOeC5U7U,41
40
+ fontTools/colorLib/geometry.py,sha256=3ScySrR2YDJa7d5K5_xM5Yt1-3NCV-ry8ikYA5VwVbI,5518
41
+ fontTools/colorLib/table_builder.py,sha256=ZeltWY6n-YPiJv_hQ1iBXoEFAG70EKxZyScgsMKUFGU,7469
42
+ fontTools/colorLib/unbuilder.py,sha256=iW-E5I39WsV82K3NgCO4Cjzwm1WqzGrtypHt8epwbHM,2142
43
+ fontTools/config/__init__.py,sha256=Ti5jpozjMqp5qhnrmwNcWI6b9uvHzhZlbWXHTqVZlGI,2643
44
+ fontTools/config/__pycache__/__init__.cpython-310.pyc,,
45
+ fontTools/cu2qu/__init__.py,sha256=Cuc7Uglb0nSgaraTxXY5J8bReznH5wApW0uakN7MycY,618
46
+ fontTools/cu2qu/__main__.py,sha256=kTUI-jczsHeelULLlory74QEeFjZWp9zigCc7PrdVQY,92
47
+ fontTools/cu2qu/__pycache__/__init__.cpython-310.pyc,,
48
+ fontTools/cu2qu/__pycache__/__main__.cpython-310.pyc,,
49
+ fontTools/cu2qu/__pycache__/benchmark.cpython-310.pyc,,
50
+ fontTools/cu2qu/__pycache__/cli.cpython-310.pyc,,
51
+ fontTools/cu2qu/__pycache__/cu2qu.cpython-310.pyc,,
52
+ fontTools/cu2qu/__pycache__/errors.cpython-310.pyc,,
53
+ fontTools/cu2qu/__pycache__/ufo.cpython-310.pyc,,
54
+ fontTools/cu2qu/benchmark.py,sha256=wasPJmf8q9k9UHjpHChC3WQAGbBAyHN9PvJzXvWC0Fw,1296
55
+ fontTools/cu2qu/cli.py,sha256=MbAQnOpZwrUFe_tjAP3Tgf6uLdOgHlONUcPNeTXwH0Y,6076
56
+ fontTools/cu2qu/cu2qu.c,sha256=xCmVdXDI_c_mtbU0KNZPs9cAP4h4nJo14Y_Mo7WWbao,593463
57
+ fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so,sha256=GWul8HeQZExM3O8sjWZ3lHBoFuwbVZP4Pk57eQB7-XE,976056
58
+ fontTools/cu2qu/cu2qu.py,sha256=UIFGlFq9X6Pj_NuaXg7KWIzLyR1jnx7nMCX-hFVG0SQ,16466
59
+ fontTools/cu2qu/errors.py,sha256=PyJNMy8lHDtKpfFkc0nkM8F4jNLZAC4lPQCN1Km4bpg,2441
60
+ fontTools/cu2qu/ufo.py,sha256=qZR70uWdCia19Ff8GLn5NeItscvvn69DegjDZVF4eNI,11794
61
+ fontTools/designspaceLib/__init__.py,sha256=gQKalFNhackVAU9t3Z0nz6zChKkm2-79qpArmud37tA,129251
62
+ fontTools/designspaceLib/__main__.py,sha256=xhtYXo1T1tsykhQDD0tcconSNYgWL5hoTBORpVDUYrc,103
63
+ fontTools/designspaceLib/__pycache__/__init__.cpython-310.pyc,,
64
+ fontTools/designspaceLib/__pycache__/__main__.cpython-310.pyc,,
65
+ fontTools/designspaceLib/__pycache__/split.cpython-310.pyc,,
66
+ fontTools/designspaceLib/__pycache__/statNames.cpython-310.pyc,,
67
+ fontTools/designspaceLib/__pycache__/types.cpython-310.pyc,,
68
+ fontTools/designspaceLib/split.py,sha256=FB1NuvhUO453UXveQZi9oyrW_caoCPM3RADp1rYWkDs,19239
69
+ fontTools/designspaceLib/statNames.py,sha256=lDqFxZAKSbpMuLsgbK6XtyHA5lqLyAK0t561wsSWmaM,9069
70
+ fontTools/designspaceLib/types.py,sha256=ofK65qXNADqcpl7zI72Pa5s07-cm7G41iEmLVV44-Es,5320
71
+ fontTools/encodings/MacRoman.py,sha256=4vEooUDm2gLCG8KIIDhRxm5-A64w7XrhP9cjDRr2Eo0,3576
72
+ fontTools/encodings/StandardEncoding.py,sha256=Eo3AGE8FE_p-IVYYuV097KouSsF3UrXoRRN0XyvYbrs,3581
73
+ fontTools/encodings/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75
74
+ fontTools/encodings/__pycache__/MacRoman.cpython-310.pyc,,
75
+ fontTools/encodings/__pycache__/StandardEncoding.cpython-310.pyc,,
76
+ fontTools/encodings/__pycache__/__init__.cpython-310.pyc,,
77
+ fontTools/encodings/__pycache__/codecs.cpython-310.pyc,,
78
+ fontTools/encodings/codecs.py,sha256=u50ruwz9fcRsrUrRGpR17Cr55Ovn1fvCHCKrElVumDE,4721
79
+ fontTools/feaLib/__init__.py,sha256=jlIru2ghxvb1HhC5Je2BCXjFJmFQlYKpruorPoz3BvQ,213
80
+ fontTools/feaLib/__main__.py,sha256=Df2PA6LXwna98lSXiL7R4as_ZEdWCIk3egSM5w7GpvM,2240
81
+ fontTools/feaLib/__pycache__/__init__.cpython-310.pyc,,
82
+ fontTools/feaLib/__pycache__/__main__.cpython-310.pyc,,
83
+ fontTools/feaLib/__pycache__/ast.cpython-310.pyc,,
84
+ fontTools/feaLib/__pycache__/builder.cpython-310.pyc,,
85
+ fontTools/feaLib/__pycache__/error.cpython-310.pyc,,
86
+ fontTools/feaLib/__pycache__/lexer.cpython-310.pyc,,
87
+ fontTools/feaLib/__pycache__/location.cpython-310.pyc,,
88
+ fontTools/feaLib/__pycache__/lookupDebugInfo.cpython-310.pyc,,
89
+ fontTools/feaLib/__pycache__/parser.cpython-310.pyc,,
90
+ fontTools/feaLib/__pycache__/variableScalar.cpython-310.pyc,,
91
+ fontTools/feaLib/ast.py,sha256=_27skibzPidJtI5lUFeVjEv5NVaNPbuz4u8oZfMuxMk,73801
92
+ fontTools/feaLib/builder.py,sha256=H-WC0TzlkQB1GZuLbijD66f8_aJexoXxQJ6L-cXQ0Bo,69994
93
+ fontTools/feaLib/error.py,sha256=Tq2dZUlCOyLfjTr3qibsT2g9t-S_JEf6bKgyNX55oCE,643
94
+ fontTools/feaLib/lexer.c,sha256=vQ4myMvJqvp8rdY6YeEQJHM2Crw_eFajkHWefik884Q,750756
95
+ fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so,sha256=glRZueojI8WPTmhu_zopdIzYICdHsoy4urTEZOaiW6o,1346664
96
+ fontTools/feaLib/lexer.py,sha256=emyMPmRoqNZkzxnJyI6JRCCtXrbCOFofwa9O6ABGLiw,11121
97
+ fontTools/feaLib/location.py,sha256=JXzHqGV56EHdcq823AwA5oaK05hf_1ySWpScbo3zGC0,234
98
+ fontTools/feaLib/lookupDebugInfo.py,sha256=gVRr5-APWfT_a5-25hRuawSVX8fEvXVsOSLWkH91T2w,304
99
+ fontTools/feaLib/parser.py,sha256=wbfG_-rqrn2RWMRQMlR3-uaiM9k4_mzCVF-wPLr00rQ,98466
100
+ fontTools/feaLib/variableScalar.py,sha256=Xu8tpDlQbfIfjnKnYDEf43EqVdyIJUy8_1ROVPg9_mg,4069
101
+ fontTools/fontBuilder.py,sha256=phkTJlv-VKaZVG1MLyFCoxLwn5J8fpsfPVGz6Cjm7BM,33299
102
+ fontTools/help.py,sha256=bAjatvIhV7TJyXI7WhsxdYO4YVlhScZXu_kRtHANEPo,1125
103
+ fontTools/merge/__init__.py,sha256=-l65-mbTwSh0gjarnojIfsAX-ZkMtdz3vGTjtYHQ2ws,8250
104
+ fontTools/merge/__main__.py,sha256=hDx3gfbUBO83AJKumSEhiV-xqNTJNNgK2uFjazOGTmw,94
105
+ fontTools/merge/__pycache__/__init__.cpython-310.pyc,,
106
+ fontTools/merge/__pycache__/__main__.cpython-310.pyc,,
107
+ fontTools/merge/__pycache__/base.cpython-310.pyc,,
108
+ fontTools/merge/__pycache__/cmap.cpython-310.pyc,,
109
+ fontTools/merge/__pycache__/layout.cpython-310.pyc,,
110
+ fontTools/merge/__pycache__/options.cpython-310.pyc,,
111
+ fontTools/merge/__pycache__/tables.cpython-310.pyc,,
112
+ fontTools/merge/__pycache__/unicode.cpython-310.pyc,,
113
+ fontTools/merge/__pycache__/util.cpython-310.pyc,,
114
+ fontTools/merge/base.py,sha256=l0G1Px98E9ZdVuFLMUBKWdtr7Jb8JX8vxcjeaDUUnzY,2389
115
+ fontTools/merge/cmap.py,sha256=_oCBnZfm5M7ebYRJnOYw5wUEICFmdR6kMUe1w6jsVuM,5545
116
+ fontTools/merge/layout.py,sha256=fkMPGPLxEdxohS3scVM4W7LmNthSz-UPyocsffe2KqE,16075
117
+ fontTools/merge/options.py,sha256=xko_1-WErcNQkirECzIOOYxSJR_bRtdQYQYOtmgccYI,2501
118
+ fontTools/merge/tables.py,sha256=uBD1-XqOCDzFxp0D7ZDvrMRdd8R7eAm58WtYKhz-m5w,10640
119
+ fontTools/merge/unicode.py,sha256=kb1Jrfuoq1KUcVhhSKnflAED_wMZxXDjVwB-CI9k05Y,4273
120
+ fontTools/merge/util.py,sha256=BH3bZWNFy-Tsj1cth7aSpGVJ18YXKXqDakPn6Wzku6U,3378
121
+ fontTools/misc/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75
122
+ fontTools/misc/__pycache__/__init__.cpython-310.pyc,,
123
+ fontTools/misc/__pycache__/arrayTools.cpython-310.pyc,,
124
+ fontTools/misc/__pycache__/bezierTools.cpython-310.pyc,,
125
+ fontTools/misc/__pycache__/classifyTools.cpython-310.pyc,,
126
+ fontTools/misc/__pycache__/cliTools.cpython-310.pyc,,
127
+ fontTools/misc/__pycache__/configTools.cpython-310.pyc,,
128
+ fontTools/misc/__pycache__/cython.cpython-310.pyc,,
129
+ fontTools/misc/__pycache__/dictTools.cpython-310.pyc,,
130
+ fontTools/misc/__pycache__/eexec.cpython-310.pyc,,
131
+ fontTools/misc/__pycache__/encodingTools.cpython-310.pyc,,
132
+ fontTools/misc/__pycache__/etree.cpython-310.pyc,,
133
+ fontTools/misc/__pycache__/filenames.cpython-310.pyc,,
134
+ fontTools/misc/__pycache__/fixedTools.cpython-310.pyc,,
135
+ fontTools/misc/__pycache__/intTools.cpython-310.pyc,,
136
+ fontTools/misc/__pycache__/iterTools.cpython-310.pyc,,
137
+ fontTools/misc/__pycache__/lazyTools.cpython-310.pyc,,
138
+ fontTools/misc/__pycache__/loggingTools.cpython-310.pyc,,
139
+ fontTools/misc/__pycache__/macCreatorType.cpython-310.pyc,,
140
+ fontTools/misc/__pycache__/macRes.cpython-310.pyc,,
141
+ fontTools/misc/__pycache__/psCharStrings.cpython-310.pyc,,
142
+ fontTools/misc/__pycache__/psLib.cpython-310.pyc,,
143
+ fontTools/misc/__pycache__/psOperators.cpython-310.pyc,,
144
+ fontTools/misc/__pycache__/py23.cpython-310.pyc,,
145
+ fontTools/misc/__pycache__/roundTools.cpython-310.pyc,,
146
+ fontTools/misc/__pycache__/sstruct.cpython-310.pyc,,
147
+ fontTools/misc/__pycache__/symfont.cpython-310.pyc,,
148
+ fontTools/misc/__pycache__/testTools.cpython-310.pyc,,
149
+ fontTools/misc/__pycache__/textTools.cpython-310.pyc,,
150
+ fontTools/misc/__pycache__/timeTools.cpython-310.pyc,,
151
+ fontTools/misc/__pycache__/transform.cpython-310.pyc,,
152
+ fontTools/misc/__pycache__/treeTools.cpython-310.pyc,,
153
+ fontTools/misc/__pycache__/vector.cpython-310.pyc,,
154
+ fontTools/misc/__pycache__/visitor.cpython-310.pyc,,
155
+ fontTools/misc/__pycache__/xmlReader.cpython-310.pyc,,
156
+ fontTools/misc/__pycache__/xmlWriter.cpython-310.pyc,,
157
+ fontTools/misc/arrayTools.py,sha256=jZk__GE-K9VViZE_H-LPPj0smWbKng-yfPE8BfGp8HI,11483
158
+ fontTools/misc/bezierTools.c,sha256=HuOs781YMbzNHWkGotqCqfhHwft4QkfyYYGG9mN5MwU,1806287
159
+ fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so,sha256=hCl0R5smfg2ZFsamvEg8TAJY4XINOQDraOAO3SRq4-0,4340864
160
+ fontTools/misc/bezierTools.py,sha256=JKPfNC8xY3tj_RrILA1N2dh4oy3hEXQ_BfKiA2_dqM0,44758
161
+ fontTools/misc/classifyTools.py,sha256=zcg3EM4GOerBW9c063ljaLllgeeZ772EpFZjp9CdgLI,5613
162
+ fontTools/misc/cliTools.py,sha256=qCznJMLCQu3ZHQD_4ctUnr3TkfAUdkGl-UuxZUrppy0,1862
163
+ fontTools/misc/configTools.py,sha256=YXBE_vL2dMWCnK4oY3vtU15B79q82DtKp7h7XRqJc1Q,11188
164
+ fontTools/misc/cython.py,sha256=eyLcL2Bw-SSToYro8f44dkkYRlQfiFbhcza0afS-qHE,682
165
+ fontTools/misc/dictTools.py,sha256=VxjarsGJuk_wa3z29FSCtKZNCFfXtMBiNEu0RPAlpDk,2417
166
+ fontTools/misc/eexec.py,sha256=GNn2OCRvO1HbbIeDPxk9i0glO7cux_AQaoVMXhBR8y8,3331
167
+ fontTools/misc/encodingTools.py,sha256=hCv5PFfnXQJVCZA8Wyn1vr3vzLBbUuEPtGk5CzWM9RY,2073
168
+ fontTools/misc/etree.py,sha256=EPldipUNNMvbPimNX7qOUwKkbpJMY4uyElhe-wqKWkM,17079
169
+ fontTools/misc/filenames.py,sha256=MMCO3xjk1pcDc-baobcKd8IdoFPt-bcGqu8t8HUGAkI,8223
170
+ fontTools/misc/fixedTools.py,sha256=gsotTCOJLyMis13M4_jQJ8-QPob2Gl2TtNJhW6FER1I,7647
171
+ fontTools/misc/intTools.py,sha256=l6pjk4UYlXcyLtfC0DdOC5RL6UJ8ihRR0zRiYow5xA8,586
172
+ fontTools/misc/iterTools.py,sha256=17H6LPZszp32bTKoNorp6uZF1PKj47BAbe5QG8irUjo,390
173
+ fontTools/misc/lazyTools.py,sha256=BC6MmF-OzJ3GrBD8TYDZ-VCSN4UOx0pN0r3oF4GSoiw,1020
174
+ fontTools/misc/loggingTools.py,sha256=2uXks8fEnBjdgJEcxMLvD77-lbOPto3neJ86bMqV_qM,19898
175
+ fontTools/misc/macCreatorType.py,sha256=Je9jtqUr7EPbpH3QxlVl3pizoQ-1AOPMBIctHIMTM3k,1593
176
+ fontTools/misc/macRes.py,sha256=GT_pnfPw2NCvvOF86nHLAnOtZ6SMHqEuLntaplXzvHM,8579
177
+ fontTools/misc/plistlib/__init__.py,sha256=1HfhHPt3As6u2eRSlFfl6XdnXv_ypQImeQdWIw6wK7Y,21113
178
+ fontTools/misc/plistlib/__pycache__/__init__.cpython-310.pyc,,
179
+ fontTools/misc/plistlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
180
+ fontTools/misc/psCharStrings.py,sha256=Tb5-k_5krP0eu7qD054iGxE4Zybk9oB4jdiKzcsV0rw,43036
181
+ fontTools/misc/psLib.py,sha256=ioIPm5x3MHkBXF2vzNkC4iVZYobrkWcyvFhmYsjOrPY,12099
182
+ fontTools/misc/psOperators.py,sha256=9SLl5PPBulLo0Xxg_dqlJMitNIBdiGKdkXhOWsNSYZE,15700
183
+ fontTools/misc/py23.py,sha256=aPVCEUz_deggwLBCeTSsccX6QgJavZqvdVtuhpzrPvA,2238
184
+ fontTools/misc/roundTools.py,sha256=1RSXZ0gyi1qW42tz6WSBMJD1FlPdtgqKfWixVN9bd78,3173
185
+ fontTools/misc/sstruct.py,sha256=HuXwoRr9-mAbBxI3gJ3n34ML7NAGSHsAAazaaloWQB4,7158
186
+ fontTools/misc/symfont.py,sha256=dPh-kIzlSS33Ff61PeZM_qd3lMxe_gdCRlmG3wl-u1Q,7012
187
+ fontTools/misc/testTools.py,sha256=P0lianKHKQ1re3IrLW5JGfoLgUXdtVJJceaNO5stA3o,6933
188
+ fontTools/misc/textTools.py,sha256=pbhr6LVhm3J-0Z4saYnJfxBDzyoiw4BR9pAgwypiOw8,3377
189
+ fontTools/misc/timeTools.py,sha256=e9h5pgzL04tBDXmCv_8eRGB4boFV8GKXlS6dq3ggEpw,2234
190
+ fontTools/misc/transform.py,sha256=BfESxMaKIo_PtCZI-HyNLOADatCUAltf2c-REGVOJf8,14822
191
+ fontTools/misc/treeTools.py,sha256=tLWkwyDHeZUPVOGNnJeD4Pn7x2bQeZetwJKaEAW2J2M,1269
192
+ fontTools/misc/vector.py,sha256=6lqZcDjAgHJFQgjzD-ULQ_PrigAMfeZKaBZmAfcC0ig,4062
193
+ fontTools/misc/visitor.py,sha256=S3I_OCavPhkwGQpwIKV9XjNCaWUcafo7HQCyxDI0nQg,5314
194
+ fontTools/misc/xmlReader.py,sha256=igut4_d13RT4WarliqVvuuPybO1uSXVeoBOeW4j0_e4,6580
195
+ fontTools/misc/xmlWriter.py,sha256=CA1c-Ov5vFTF9tT4bGk-f3yBvaX7lVmSdLPYygUqlAE,6046
196
+ fontTools/mtiLib/__init__.py,sha256=vPgS5Ko7dE0GJX1aDmXSwLOaBENDUgdAAFvYVdQ4boo,46617
197
+ fontTools/mtiLib/__main__.py,sha256=gd8X89jnZOe-752k7uaR1lWoiju-2zIT5Yx35Kl0Xek,94
198
+ fontTools/mtiLib/__pycache__/__init__.cpython-310.pyc,,
199
+ fontTools/mtiLib/__pycache__/__main__.cpython-310.pyc,,
200
+ fontTools/otlLib/__init__.py,sha256=D2leUW-3gsUTOFcJYGC18edBYjIJ804ut4qitJYWsaQ,45
201
+ fontTools/otlLib/__pycache__/__init__.cpython-310.pyc,,
202
+ fontTools/otlLib/__pycache__/builder.cpython-310.pyc,,
203
+ fontTools/otlLib/__pycache__/error.cpython-310.pyc,,
204
+ fontTools/otlLib/__pycache__/maxContextCalc.cpython-310.pyc,,
205
+ fontTools/otlLib/builder.py,sha256=n5WTJwuHsE8lgBdZJQyeUhW7qxbTSFrWTUODN2_qP2Y,119693
206
+ fontTools/otlLib/error.py,sha256=cthuhBuOwZYpkTLi5gFPupUxkXkCHe-L_YgkE7N1wCI,335
207
+ fontTools/otlLib/maxContextCalc.py,sha256=3es4Kt84TaZ49sA2ev1zrlwPJikJCAECx5KavwhyB-I,3175
208
+ fontTools/otlLib/optimize/__init__.py,sha256=UUQRpNkHU2RczCRt-Gz7sEiYE9AQq9BHLXZEOyvsnX4,1530
209
+ fontTools/otlLib/optimize/__main__.py,sha256=BvP472kA9KxBb9RMyyehPNevAfpmgW9MfdazkUiAO3M,104
210
+ fontTools/otlLib/optimize/__pycache__/__init__.cpython-310.pyc,,
211
+ fontTools/otlLib/optimize/__pycache__/__main__.cpython-310.pyc,,
212
+ fontTools/otlLib/optimize/__pycache__/gpos.cpython-310.pyc,,
213
+ fontTools/otlLib/optimize/gpos.py,sha256=NTDLwjo90L4GiqdIdWkBEycQ7VcT7cOxxype73mFz8c,18474
214
+ fontTools/pens/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75
215
+ fontTools/pens/__pycache__/__init__.cpython-310.pyc,,
216
+ fontTools/pens/__pycache__/areaPen.cpython-310.pyc,,
217
+ fontTools/pens/__pycache__/basePen.cpython-310.pyc,,
218
+ fontTools/pens/__pycache__/boundsPen.cpython-310.pyc,,
219
+ fontTools/pens/__pycache__/cairoPen.cpython-310.pyc,,
220
+ fontTools/pens/__pycache__/cocoaPen.cpython-310.pyc,,
221
+ fontTools/pens/__pycache__/cu2quPen.cpython-310.pyc,,
222
+ fontTools/pens/__pycache__/explicitClosingLinePen.cpython-310.pyc,,
223
+ fontTools/pens/__pycache__/filterPen.cpython-310.pyc,,
224
+ fontTools/pens/__pycache__/freetypePen.cpython-310.pyc,,
225
+ fontTools/pens/__pycache__/hashPointPen.cpython-310.pyc,,
226
+ fontTools/pens/__pycache__/momentsPen.cpython-310.pyc,,
227
+ fontTools/pens/__pycache__/perimeterPen.cpython-310.pyc,,
228
+ fontTools/pens/__pycache__/pointInsidePen.cpython-310.pyc,,
229
+ fontTools/pens/__pycache__/pointPen.cpython-310.pyc,,
230
+ fontTools/pens/__pycache__/qtPen.cpython-310.pyc,,
231
+ fontTools/pens/__pycache__/qu2cuPen.cpython-310.pyc,,
232
+ fontTools/pens/__pycache__/quartzPen.cpython-310.pyc,,
233
+ fontTools/pens/__pycache__/recordingPen.cpython-310.pyc,,
234
+ fontTools/pens/__pycache__/reportLabPen.cpython-310.pyc,,
235
+ fontTools/pens/__pycache__/reverseContourPen.cpython-310.pyc,,
236
+ fontTools/pens/__pycache__/roundingPen.cpython-310.pyc,,
237
+ fontTools/pens/__pycache__/statisticsPen.cpython-310.pyc,,
238
+ fontTools/pens/__pycache__/svgPathPen.cpython-310.pyc,,
239
+ fontTools/pens/__pycache__/t2CharStringPen.cpython-310.pyc,,
240
+ fontTools/pens/__pycache__/teePen.cpython-310.pyc,,
241
+ fontTools/pens/__pycache__/transformPen.cpython-310.pyc,,
242
+ fontTools/pens/__pycache__/ttGlyphPen.cpython-310.pyc,,
243
+ fontTools/pens/__pycache__/wxPen.cpython-310.pyc,,
244
+ fontTools/pens/areaPen.py,sha256=Y1WkmqzcC4z_bpGAR0IZUKrtHFtxKUQBmr5-64_zCOk,1472
245
+ fontTools/pens/basePen.py,sha256=eIGSKrKm6w4LLHuG6XJoQZ3eObtoKV5P6aF4gT4sk7U,17073
246
+ fontTools/pens/boundsPen.py,sha256=wE3owOQA8DfhH-zBGC3lJvnVwp-oyIt0KZrEqXbmS9I,3129
247
+ fontTools/pens/cairoPen.py,sha256=wuuOJ1qQDSt_K3zscM2nukRyHZTZMwMzzCXCirfq_qQ,592
248
+ fontTools/pens/cocoaPen.py,sha256=IJRQcAxRuVOTQ90bB_Bgjnmz7px_ST5uLF9CW-Y0KPY,612
249
+ fontTools/pens/cu2quPen.py,sha256=gMUwFUsm_-WzBlDjTMQiNnEuI2heomGeOJBX81zYXPo,13007
250
+ fontTools/pens/explicitClosingLinePen.py,sha256=kKKtdZiwaf8Cj4_ytrIDdGB2GMpPPDXm5Nwbw5WDgwU,3219
251
+ fontTools/pens/filterPen.py,sha256=kKSvLmWCW4MkCF0ciJhjTj-LdUGOQL593PFkpm5PhP8,7790
252
+ fontTools/pens/freetypePen.py,sha256=HD-gXJSbgImJdBc8sIBk0HWBdjv3WKFofs6PgCCsGOY,19908
253
+ fontTools/pens/hashPointPen.py,sha256=gElrFyQoOQp3ZbpKHRWPwC61A9OgT2Js8crVUD8BQAY,3573
254
+ fontTools/pens/momentsPen.c,sha256=HrqBRPCkiD0WXslFguGGEBgCq1h9Jx7mGkV9FX5vgkg,541056
255
+ fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so,sha256=50DL3LHfXmKzXjGwqT3DuNliatfL8ZXY0uXHElxQjGE,1077352
256
+ fontTools/pens/momentsPen.py,sha256=JsIL7KtRz0ZWG1_TPDzkwK0eNEr9excg3ggttRNGqIs,25685
257
+ fontTools/pens/perimeterPen.py,sha256=lr6NzrIWxi4TXBJPbcJsKzqABWfQeil2Bgm9BgUD3N4,2153
258
+ fontTools/pens/pointInsidePen.py,sha256=noEUvBQIeAheDMJwzvvfnEiKhmwbS1i0RQE9jik6Gl4,6355
259
+ fontTools/pens/pointPen.py,sha256=IA0JVDaf8_aAvjRQv3asXItxxfzhv4gEEFvrlDlCx_k,22296
260
+ fontTools/pens/qtPen.py,sha256=QRNLIry2rQl4E_7ct2tu10-qLHneQp0XV7FfaZ-tcL8,634
261
+ fontTools/pens/qu2cuPen.py,sha256=pRST43-rUpzlOP83Z_Rr0IvIQBCx6RWI6nnNaitQcLk,3985
262
+ fontTools/pens/quartzPen.py,sha256=EH482Kz_xsqYhVRovv6N_T1CXaSvOzUKPLxTaN956tU,1287
263
+ fontTools/pens/recordingPen.py,sha256=VgFZ4NMhnZt1qSTzFEU0cma-gw3kBe47bfSxPYH73rs,12489
264
+ fontTools/pens/reportLabPen.py,sha256=kpfMfOLXt2vOQ5smPsU82ft80FpCPWJzQLl7ENOH8Ew,2066
265
+ fontTools/pens/reverseContourPen.py,sha256=oz64ZRhLAvT7DYMAwGKoLzZXQK8l81jRiYnTZkW6a-Y,4022
266
+ fontTools/pens/roundingPen.py,sha256=Q4vvG0Esq_sLNODU0TITU4F3wcXcKWo4BA7DWdDaVcM,4649
267
+ fontTools/pens/statisticsPen.py,sha256=Hjy8SmXxRzOtkTjpvKVmfY_2WcIZ5veZYuX-t6YQ1yA,9640
268
+ fontTools/pens/svgPathPen.py,sha256=T3b6SZS9B9sVWMK9mSFDtjHeviQs_yOJOZKq5Sg5Zdg,8572
269
+ fontTools/pens/t2CharStringPen.py,sha256=uq9KCOxrk5TEZGYpcOG-pgkWHYCe4dMwb2hx5uYOmWA,2391
270
+ fontTools/pens/teePen.py,sha256=P1ARJOCMJ6MxK-PB1yZ-ips3CUfnadWYnQ_do6VIasQ,1290
271
+ fontTools/pens/transformPen.py,sha256=s0kUyQdnemUwHvYr2SFboFmh4WY1S9OHBL8L4PJKRwE,4056
272
+ fontTools/pens/ttGlyphPen.py,sha256=yLtB-E5pTQR59OKVYySttWBu1xC2vR8ezSaRhIMtVwg,11870
273
+ fontTools/pens/wxPen.py,sha256=W9RRHlBWHp-CVC4Exvk3ytBmRaB4-LgJPP5Bv7o9BA0,680
274
+ fontTools/qu2cu/__init__.py,sha256=Jfm1JljXbt91w4gyvZn6jzEmVnhRx50sh2fDongrOsE,618
275
+ fontTools/qu2cu/__main__.py,sha256=9FWf6SIZaRaC8SiL0LhjAWC2yIdY9N_9wlRko8m1l2Q,93
276
+ fontTools/qu2cu/__pycache__/__init__.cpython-310.pyc,,
277
+ fontTools/qu2cu/__pycache__/__main__.cpython-310.pyc,,
278
+ fontTools/qu2cu/__pycache__/benchmark.cpython-310.pyc,,
279
+ fontTools/qu2cu/__pycache__/cli.cpython-310.pyc,,
280
+ fontTools/qu2cu/__pycache__/qu2cu.cpython-310.pyc,,
281
+ fontTools/qu2cu/benchmark.py,sha256=GMcr_4r7L6K9SmJ13itt-_XKhnKqSVUDPlXUG6IZmmM,1400
282
+ fontTools/qu2cu/cli.py,sha256=U2rooYnVVEalGRAWGFHk-Kp6Okys8wtzdaWLjw1bngY,3714
283
+ fontTools/qu2cu/qu2cu.c,sha256=C3rzZrEWjwbuKDwHOi0zl1woncn_aAgo5CZz9j1XWdI,658853
284
+ fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so,sha256=FDmq-FeVXwv4kml6aXIZpgfW7LBAaEecDxVn9AjBSMU,1109672
285
+ fontTools/qu2cu/qu2cu.py,sha256=1RKhaMBBiDvo5PtkNqR5p0X2HQ4yel4TbWT8MFU6Hps,12315
286
+ fontTools/subset/__init__.py,sha256=nXS7IZ7HsPUn90nsgYSsttw6VABp9uy5lnyEx8BH9Dc,133662
287
+ fontTools/subset/__main__.py,sha256=bhtfP2SqP4k799pxtksFgnC-XGNQDr3LcO4lc8T5e5g,95
288
+ fontTools/subset/__pycache__/__init__.cpython-310.pyc,,
289
+ fontTools/subset/__pycache__/__main__.cpython-310.pyc,,
290
+ fontTools/subset/__pycache__/cff.cpython-310.pyc,,
291
+ fontTools/subset/__pycache__/svg.cpython-310.pyc,,
292
+ fontTools/subset/__pycache__/util.cpython-310.pyc,,
293
+ fontTools/subset/cff.py,sha256=rqMRJOlX5FacV1LW8aDlVOglgEM87TkMA9bdsYenask,6145
294
+ fontTools/subset/svg.py,sha256=8dLBzQlnIt4_fOKEFDAVlKTucdHvcbCcyG9-a6UBZZ0,9384
295
+ fontTools/subset/util.py,sha256=9SXFYb5Ef9Z58uXmYPCQil8B2i3Q7aFB_1fFDFSppdU,754
296
+ fontTools/svgLib/__init__.py,sha256=IGCLwSbU8jLhq6HI2vSdPQgNs6zDUi5774TgX5MCXPY,75
297
+ fontTools/svgLib/__pycache__/__init__.cpython-310.pyc,,
298
+ fontTools/svgLib/path/__init__.py,sha256=C82fh7xH6ZHsSFVnV848-xeDezpokx1EwTmayJCouFU,1996
299
+ fontTools/svgLib/path/__pycache__/__init__.cpython-310.pyc,,
300
+ fontTools/svgLib/path/__pycache__/arc.cpython-310.pyc,,
301
+ fontTools/svgLib/path/__pycache__/parser.cpython-310.pyc,,
302
+ fontTools/svgLib/path/__pycache__/shapes.cpython-310.pyc,,
303
+ fontTools/svgLib/path/arc.py,sha256=-f5Ym6q4tDWQ76sMNSTUTWgL_7AfgXojvBhtBS7bWwQ,5812
304
+ fontTools/svgLib/path/parser.py,sha256=8T6okMstvgM9ufb2zBcwSzsuuoYbqfnUjNYgb6kjznU,10788
305
+ fontTools/svgLib/path/shapes.py,sha256=xvBUIckKyT9JLy7q_ZP50r6TjvZANyHdZP7wFDzErcI,5322
306
+ fontTools/t1Lib/__init__.py,sha256=p42y70wEIbuX0IIxZG7-b_I-gHto1VLy0gLsDvxCfkw,20865
307
+ fontTools/t1Lib/__pycache__/__init__.cpython-310.pyc,,
308
+ fontTools/tfmLib.py,sha256=UMbkM73JXRJVS9t2B-BJc13rSjImaWBuzCoehLwHFhs,14270
309
+ fontTools/ttLib/__init__.py,sha256=fjOFcwbRed9b_giTgJ7FLsqeJC8ndnx327WfJztW-Tc,553
310
+ fontTools/ttLib/__main__.py,sha256=3yxwadpQ5YTM27RXqG3sFE3EaOSFLQVHaUUH9P0qrSw,3443
311
+ fontTools/ttLib/__pycache__/__init__.cpython-310.pyc,,
312
+ fontTools/ttLib/__pycache__/__main__.cpython-310.pyc,,
313
+ fontTools/ttLib/__pycache__/macUtils.cpython-310.pyc,,
314
+ fontTools/ttLib/__pycache__/removeOverlaps.cpython-310.pyc,,
315
+ fontTools/ttLib/__pycache__/reorderGlyphs.cpython-310.pyc,,
316
+ fontTools/ttLib/__pycache__/scaleUpem.cpython-310.pyc,,
317
+ fontTools/ttLib/__pycache__/sfnt.cpython-310.pyc,,
318
+ fontTools/ttLib/__pycache__/standardGlyphOrder.cpython-310.pyc,,
319
+ fontTools/ttLib/__pycache__/ttCollection.cpython-310.pyc,,
320
+ fontTools/ttLib/__pycache__/ttFont.cpython-310.pyc,,
321
+ fontTools/ttLib/__pycache__/ttGlyphSet.cpython-310.pyc,,
322
+ fontTools/ttLib/__pycache__/ttVisitor.cpython-310.pyc,,
323
+ fontTools/ttLib/__pycache__/woff2.cpython-310.pyc,,
324
+ fontTools/ttLib/macUtils.py,sha256=lj3oeFpyjV7ko_JqnluneITmAtlc119J-vwTTg2s73A,1737
325
+ fontTools/ttLib/removeOverlaps.py,sha256=-jUIyAAukmUaPaxgCCtvnjwAqL95-zRPJ9MBnmGDz30,12463
326
+ fontTools/ttLib/reorderGlyphs.py,sha256=y4UAVABTMykRWIF9_BJP1B8X4JRLde5GzIOkAafofE8,10011
327
+ fontTools/ttLib/scaleUpem.py,sha256=U_-NGkwfS9GRIackdEXjGYZ-wSomcUPXQahDneLeArI,14618
328
+ fontTools/ttLib/sfnt.py,sha256=rkznKfteU_Rn9P65WSjFaiwQgpEAoh-TrQpvkQhdIlo,22832
329
+ fontTools/ttLib/standardGlyphOrder.py,sha256=7AY_fVWdtwZ4iv5uWdyKAUcbEQiSDt1lN4sqx9xXwE0,5785
330
+ fontTools/ttLib/tables/B_A_S_E_.py,sha256=fotjQyGgXXMrLeWH-eu_R-OJ_ZepQ3GHOzQ3auhZ82Y,88
331
+ fontTools/ttLib/tables/BitmapGlyphMetrics.py,sha256=9gcGPVzsxEYnVBO7YLWfeOuht9PaCl09GmbAqDYqKi0,1769
332
+ fontTools/ttLib/tables/C_B_D_T_.py,sha256=cmxOO93VXhtS_nS6-iG9K2UUKHqTTEiFThV2wPMi0vA,3331
333
+ fontTools/ttLib/tables/C_B_L_C_.py,sha256=2Qr_xPnZn6yKMgWU5LzKfPyOu-dUK7q6XtyKAOOJl-0,188
334
+ fontTools/ttLib/tables/C_F_F_.py,sha256=jFX4ClhxD57IxfYDkDDCq2oJqSdbgAp1ghNQw5AYU7M,1443
335
+ fontTools/ttLib/tables/C_F_F__2.py,sha256=TTX4_bKYGmFGt2lihlFfKw8LLc-wIr6uE2P45Rv4qW0,425
336
+ fontTools/ttLib/tables/C_O_L_R_.py,sha256=qmexaOF-RtKSzHmekBPQIOa4Q2bmFMV3X_ytaCZhwhc,5725
337
+ fontTools/ttLib/tables/C_P_A_L_.py,sha256=4bXVL-qFKQaQhW_llYQzXZQClL24aJkEy0ms0-Bh2gk,11631
338
+ fontTools/ttLib/tables/D_S_I_G_.py,sha256=U5OCCI0sjhK5HvhNKaEonD0wucXzHXdfz5l3sb4CB8U,5327
339
+ fontTools/ttLib/tables/D__e_b_g.py,sha256=vROIV3UTxbK9eN3rmHOu1ARwBiOXL6K5ihmq0QMToJQ,443
340
+ fontTools/ttLib/tables/DefaultTable.py,sha256=cOtgkLWPY9qmOH2BSPt4c4IUSdANWTKx2rK1CTxQ4h0,1487
341
+ fontTools/ttLib/tables/E_B_D_T_.py,sha256=8iakmy4PP8BNiem9ZT_P7ysu8BkV1gWFJD94K5ThVSo,32276
342
+ fontTools/ttLib/tables/E_B_L_C_.py,sha256=yzlTk7EViBdSqw_8BzElEPZt7NsREH1nVobEBTlm6bg,29779
343
+ fontTools/ttLib/tables/F_F_T_M_.py,sha256=aq9FsyfMegjxRsAWF8U2a3OpxFCPHJjNiLlC63dmqnI,1354
344
+ fontTools/ttLib/tables/F__e_a_t.py,sha256=x3ryfFJPsGVWqy10a4ulXADBnsB2JEVpyx_DuWYqy8k,5380
345
+ fontTools/ttLib/tables/G_D_E_F_.py,sha256=xN2hcW8GPMOos7dTpXJSWNJxUbGzUrnQ_2i-vxlNT_E,88
346
+ fontTools/ttLib/tables/G_M_A_P_.py,sha256=S0KyulRo88aZ4YM8OJ_l8Mf0husmlI03IlXP6aa1C1w,4515
347
+ fontTools/ttLib/tables/G_P_K_G_.py,sha256=XbfsF-qCk9ortdZycw7r6DEo94lfg6TTb3fN7HPYCuM,4441
348
+ fontTools/ttLib/tables/G_P_O_S_.py,sha256=nVSjCI8k7-8aIkzIMc7bCmd2aHeVvjwPIh2jhwn9KY4,88
349
+ fontTools/ttLib/tables/G_S_U_B_.py,sha256=-e_9Jxihz6AUSzSBCdW3tycdu4QZUsL8hZI6A7lMt9Q,88
350
+ fontTools/ttLib/tables/G__l_a_t.py,sha256=rWcOEnv9GmNIvJu7y-cpnrAUkc82527LroBIYA7NQTI,8568
351
+ fontTools/ttLib/tables/G__l_o_c.py,sha256=_MFYx8IUuJseNrS65QN-P8oq4CcGZnSxdGXKyv92Kco,2598
352
+ fontTools/ttLib/tables/H_V_A_R_.py,sha256=bdU_ktJJ2-MQ_zFn1wWTtGpZar7OTFeOEnXyrzDhts8,88
353
+ fontTools/ttLib/tables/J_S_T_F_.py,sha256=d36nOt42I5EY-7JDOulBHKtv1StpxxuvLU7gSOC6OGw,88
354
+ fontTools/ttLib/tables/L_T_S_H_.py,sha256=DG559txp9zRwe5xlhhq8_HqkOvKrgbWUBw-11nKtw-o,1826
355
+ fontTools/ttLib/tables/M_A_T_H_.py,sha256=zXSUNz98761iTREcge-YQ4LcEGCFhp1VVWAZt8B4TTQ,88
356
+ fontTools/ttLib/tables/M_E_T_A_.py,sha256=0IZysRvZur6rhe4DP7P2JnKW0O9SgbxLBHBmAJMx5vA,11784
357
+ fontTools/ttLib/tables/M_V_A_R_.py,sha256=uMresSbbzC43VL8Lou2bHjNmN3aY8wxxrV3qa6SSmR4,88
358
+ fontTools/ttLib/tables/O_S_2f_2.py,sha256=4TN66vch-0lJnr-f-ErbfWbxuDF_JRTOt-qy84oDG2k,27752
359
+ fontTools/ttLib/tables/S_I_N_G_.py,sha256=73zv425wym8w3MndveArHsp1TzM6VOQAz1gvwB9GgoQ,3112
360
+ fontTools/ttLib/tables/S_T_A_T_.py,sha256=tPbD_6x4aJACOux8bKe_sFlk0PEat7aiZn8pnXoUGws,88
361
+ fontTools/ttLib/tables/S_V_G_.py,sha256=8h8arIl9gedLB3GRRNF8V0x2pq1GikF7If9e_srB69I,7463
362
+ fontTools/ttLib/tables/S__i_l_f.py,sha256=5hZ1ze12-tRyYIu-hEewRlgMWiuGHNf40om7Rs369_Q,34901
363
+ fontTools/ttLib/tables/S__i_l_l.py,sha256=KvjK_vrh_YyPHtYwLyrHLx33gcTYg5lBnvUYie6b06M,3104
364
+ fontTools/ttLib/tables/T_S_I_B_.py,sha256=CMcquVV86ug63Zk_yTB37DKqO91FZW14WtzwBI2aPjY,86
365
+ fontTools/ttLib/tables/T_S_I_C_.py,sha256=TjDKgGdFEaL4Affo9MTInuVKbYUHMa0pJX18pzgYxT0,88
366
+ fontTools/ttLib/tables/T_S_I_D_.py,sha256=OP_tHge02Fs7Y5lnVrgUGfr4FdIu-iv3GVtMEyH3Nrw,86
367
+ fontTools/ttLib/tables/T_S_I_J_.py,sha256=soJ3cf52aXLQTqvhQV2bHzyRSh6bsxxvZcpAV4Z9tlc,86
368
+ fontTools/ttLib/tables/T_S_I_P_.py,sha256=SvDvtRhxiC96WvZxNb2RoyTf0IXjeVMF_UP42ZD_vwU,86
369
+ fontTools/ttLib/tables/T_S_I_S_.py,sha256=IHJsyWONSgbg9hm5VnkCeq70SQcwnNJZZO_dBtJGZFc,86
370
+ fontTools/ttLib/tables/T_S_I_V_.py,sha256=Pqr8g0zrgCZl2sSJlxE5AYXazlZE29o1BO8oMVblBUs,655
371
+ fontTools/ttLib/tables/T_S_I__0.py,sha256=c0F4nKBKTeURqxCFv3nwxCu9Dl0mh7wr0PhOrLKMjho,2043
372
+ fontTools/ttLib/tables/T_S_I__1.py,sha256=N-BoLR5WWZv8tglokn5WZv8w_52jzKDG8jiZn5bS__k,6982
373
+ fontTools/ttLib/tables/T_S_I__2.py,sha256=ZV39h3SKtVSxKF9dKkI4sC0X5oXLkQDSPCcOeBTxUTM,420
374
+ fontTools/ttLib/tables/T_S_I__3.py,sha256=wQnwccPX3IaxGjzCdJHwtLh2ZqSsoAS-vWjhdI2h5dQ,467
375
+ fontTools/ttLib/tables/T_S_I__5.py,sha256=jB-P8RMFC3KOGdtTQH5uzvqEJDIWhRlDFsuvAix0cl0,1510
376
+ fontTools/ttLib/tables/T_T_F_A_.py,sha256=7wiKnyzrHiLgdtz6klG02flh8S7hm7GKarif7lw3IMc,81
377
+ fontTools/ttLib/tables/TupleVariation.py,sha256=RBHDqKkdR-MQtN_pWRsVpBax4jzYcDIhA8zXSfJh4ZQ,30912
378
+ fontTools/ttLib/tables/V_A_R_C_.py,sha256=KOtRqzdvsBXvl9vkUieGVROvIu0mTXuAXLXizNeSDWY,88
379
+ fontTools/ttLib/tables/V_D_M_X_.py,sha256=dqE3G2Hg4ByQNteceOMctgFu2Er_DHh4_vOlAAaP5nM,10189
380
+ fontTools/ttLib/tables/V_O_R_G_.py,sha256=XasThyPjPNah6Yn0TCFVv9H5kmYDx5FIMaH8B9sA2oU,5762
381
+ fontTools/ttLib/tables/V_V_A_R_.py,sha256=X9C_r2HiSnI2mYqUQ93yK4zLpweRzobJ0Kh1J2lTsAw,88
382
+ fontTools/ttLib/tables/__init__.py,sha256=iZ5iQZNhlH9M12ovQDu8EcnFwCe1ShoTvxEh22hGOoc,2624
383
+ fontTools/ttLib/tables/__pycache__/B_A_S_E_.cpython-310.pyc,,
384
+ fontTools/ttLib/tables/__pycache__/BitmapGlyphMetrics.cpython-310.pyc,,
385
+ fontTools/ttLib/tables/__pycache__/C_B_D_T_.cpython-310.pyc,,
386
+ fontTools/ttLib/tables/__pycache__/C_B_L_C_.cpython-310.pyc,,
387
+ fontTools/ttLib/tables/__pycache__/C_F_F_.cpython-310.pyc,,
388
+ fontTools/ttLib/tables/__pycache__/C_F_F__2.cpython-310.pyc,,
389
+ fontTools/ttLib/tables/__pycache__/C_O_L_R_.cpython-310.pyc,,
390
+ fontTools/ttLib/tables/__pycache__/C_P_A_L_.cpython-310.pyc,,
391
+ fontTools/ttLib/tables/__pycache__/D_S_I_G_.cpython-310.pyc,,
392
+ fontTools/ttLib/tables/__pycache__/D__e_b_g.cpython-310.pyc,,
393
+ fontTools/ttLib/tables/__pycache__/DefaultTable.cpython-310.pyc,,
394
+ fontTools/ttLib/tables/__pycache__/E_B_D_T_.cpython-310.pyc,,
395
+ fontTools/ttLib/tables/__pycache__/E_B_L_C_.cpython-310.pyc,,
396
+ fontTools/ttLib/tables/__pycache__/F_F_T_M_.cpython-310.pyc,,
397
+ fontTools/ttLib/tables/__pycache__/F__e_a_t.cpython-310.pyc,,
398
+ fontTools/ttLib/tables/__pycache__/G_D_E_F_.cpython-310.pyc,,
399
+ fontTools/ttLib/tables/__pycache__/G_M_A_P_.cpython-310.pyc,,
400
+ fontTools/ttLib/tables/__pycache__/G_P_K_G_.cpython-310.pyc,,
401
+ fontTools/ttLib/tables/__pycache__/G_P_O_S_.cpython-310.pyc,,
402
+ fontTools/ttLib/tables/__pycache__/G_S_U_B_.cpython-310.pyc,,
403
+ fontTools/ttLib/tables/__pycache__/G__l_a_t.cpython-310.pyc,,
404
+ fontTools/ttLib/tables/__pycache__/G__l_o_c.cpython-310.pyc,,
405
+ fontTools/ttLib/tables/__pycache__/H_V_A_R_.cpython-310.pyc,,
406
+ fontTools/ttLib/tables/__pycache__/J_S_T_F_.cpython-310.pyc,,
407
+ fontTools/ttLib/tables/__pycache__/L_T_S_H_.cpython-310.pyc,,
408
+ fontTools/ttLib/tables/__pycache__/M_A_T_H_.cpython-310.pyc,,
409
+ fontTools/ttLib/tables/__pycache__/M_E_T_A_.cpython-310.pyc,,
410
+ fontTools/ttLib/tables/__pycache__/M_V_A_R_.cpython-310.pyc,,
411
+ fontTools/ttLib/tables/__pycache__/O_S_2f_2.cpython-310.pyc,,
412
+ fontTools/ttLib/tables/__pycache__/S_I_N_G_.cpython-310.pyc,,
413
+ fontTools/ttLib/tables/__pycache__/S_T_A_T_.cpython-310.pyc,,
414
+ fontTools/ttLib/tables/__pycache__/S_V_G_.cpython-310.pyc,,
415
+ fontTools/ttLib/tables/__pycache__/S__i_l_f.cpython-310.pyc,,
416
+ fontTools/ttLib/tables/__pycache__/S__i_l_l.cpython-310.pyc,,
417
+ fontTools/ttLib/tables/__pycache__/T_S_I_B_.cpython-310.pyc,,
418
+ fontTools/ttLib/tables/__pycache__/T_S_I_C_.cpython-310.pyc,,
419
+ fontTools/ttLib/tables/__pycache__/T_S_I_D_.cpython-310.pyc,,
420
+ fontTools/ttLib/tables/__pycache__/T_S_I_J_.cpython-310.pyc,,
421
+ fontTools/ttLib/tables/__pycache__/T_S_I_P_.cpython-310.pyc,,
422
+ fontTools/ttLib/tables/__pycache__/T_S_I_S_.cpython-310.pyc,,
423
+ fontTools/ttLib/tables/__pycache__/T_S_I_V_.cpython-310.pyc,,
424
+ fontTools/ttLib/tables/__pycache__/T_S_I__0.cpython-310.pyc,,
425
+ fontTools/ttLib/tables/__pycache__/T_S_I__1.cpython-310.pyc,,
426
+ fontTools/ttLib/tables/__pycache__/T_S_I__2.cpython-310.pyc,,
427
+ fontTools/ttLib/tables/__pycache__/T_S_I__3.cpython-310.pyc,,
428
+ fontTools/ttLib/tables/__pycache__/T_S_I__5.cpython-310.pyc,,
429
+ fontTools/ttLib/tables/__pycache__/T_T_F_A_.cpython-310.pyc,,
430
+ fontTools/ttLib/tables/__pycache__/TupleVariation.cpython-310.pyc,,
431
+ fontTools/ttLib/tables/__pycache__/V_A_R_C_.cpython-310.pyc,,
432
+ fontTools/ttLib/tables/__pycache__/V_D_M_X_.cpython-310.pyc,,
433
+ fontTools/ttLib/tables/__pycache__/V_O_R_G_.cpython-310.pyc,,
434
+ fontTools/ttLib/tables/__pycache__/V_V_A_R_.cpython-310.pyc,,
435
+ fontTools/ttLib/tables/__pycache__/__init__.cpython-310.pyc,,
436
+ fontTools/ttLib/tables/__pycache__/_a_n_k_r.cpython-310.pyc,,
437
+ fontTools/ttLib/tables/__pycache__/_a_v_a_r.cpython-310.pyc,,
438
+ fontTools/ttLib/tables/__pycache__/_b_s_l_n.cpython-310.pyc,,
439
+ fontTools/ttLib/tables/__pycache__/_c_i_d_g.cpython-310.pyc,,
440
+ fontTools/ttLib/tables/__pycache__/_c_m_a_p.cpython-310.pyc,,
441
+ fontTools/ttLib/tables/__pycache__/_c_v_a_r.cpython-310.pyc,,
442
+ fontTools/ttLib/tables/__pycache__/_c_v_t.cpython-310.pyc,,
443
+ fontTools/ttLib/tables/__pycache__/_f_e_a_t.cpython-310.pyc,,
444
+ fontTools/ttLib/tables/__pycache__/_f_p_g_m.cpython-310.pyc,,
445
+ fontTools/ttLib/tables/__pycache__/_f_v_a_r.cpython-310.pyc,,
446
+ fontTools/ttLib/tables/__pycache__/_g_a_s_p.cpython-310.pyc,,
447
+ fontTools/ttLib/tables/__pycache__/_g_c_i_d.cpython-310.pyc,,
448
+ fontTools/ttLib/tables/__pycache__/_g_l_y_f.cpython-310.pyc,,
449
+ fontTools/ttLib/tables/__pycache__/_g_v_a_r.cpython-310.pyc,,
450
+ fontTools/ttLib/tables/__pycache__/_h_d_m_x.cpython-310.pyc,,
451
+ fontTools/ttLib/tables/__pycache__/_h_e_a_d.cpython-310.pyc,,
452
+ fontTools/ttLib/tables/__pycache__/_h_h_e_a.cpython-310.pyc,,
453
+ fontTools/ttLib/tables/__pycache__/_h_m_t_x.cpython-310.pyc,,
454
+ fontTools/ttLib/tables/__pycache__/_k_e_r_n.cpython-310.pyc,,
455
+ fontTools/ttLib/tables/__pycache__/_l_c_a_r.cpython-310.pyc,,
456
+ fontTools/ttLib/tables/__pycache__/_l_o_c_a.cpython-310.pyc,,
457
+ fontTools/ttLib/tables/__pycache__/_l_t_a_g.cpython-310.pyc,,
458
+ fontTools/ttLib/tables/__pycache__/_m_a_x_p.cpython-310.pyc,,
459
+ fontTools/ttLib/tables/__pycache__/_m_e_t_a.cpython-310.pyc,,
460
+ fontTools/ttLib/tables/__pycache__/_m_o_r_t.cpython-310.pyc,,
461
+ fontTools/ttLib/tables/__pycache__/_m_o_r_x.cpython-310.pyc,,
462
+ fontTools/ttLib/tables/__pycache__/_n_a_m_e.cpython-310.pyc,,
463
+ fontTools/ttLib/tables/__pycache__/_o_p_b_d.cpython-310.pyc,,
464
+ fontTools/ttLib/tables/__pycache__/_p_o_s_t.cpython-310.pyc,,
465
+ fontTools/ttLib/tables/__pycache__/_p_r_e_p.cpython-310.pyc,,
466
+ fontTools/ttLib/tables/__pycache__/_p_r_o_p.cpython-310.pyc,,
467
+ fontTools/ttLib/tables/__pycache__/_s_b_i_x.cpython-310.pyc,,
468
+ fontTools/ttLib/tables/__pycache__/_t_r_a_k.cpython-310.pyc,,
469
+ fontTools/ttLib/tables/__pycache__/_v_h_e_a.cpython-310.pyc,,
470
+ fontTools/ttLib/tables/__pycache__/_v_m_t_x.cpython-310.pyc,,
471
+ fontTools/ttLib/tables/__pycache__/asciiTable.cpython-310.pyc,,
472
+ fontTools/ttLib/tables/__pycache__/grUtils.cpython-310.pyc,,
473
+ fontTools/ttLib/tables/__pycache__/otBase.cpython-310.pyc,,
474
+ fontTools/ttLib/tables/__pycache__/otConverters.cpython-310.pyc,,
475
+ fontTools/ttLib/tables/__pycache__/otData.cpython-310.pyc,,
476
+ fontTools/ttLib/tables/__pycache__/otTables.cpython-310.pyc,,
477
+ fontTools/ttLib/tables/__pycache__/otTraverse.cpython-310.pyc,,
478
+ fontTools/ttLib/tables/__pycache__/sbixGlyph.cpython-310.pyc,,
479
+ fontTools/ttLib/tables/__pycache__/sbixStrike.cpython-310.pyc,,
480
+ fontTools/ttLib/tables/__pycache__/ttProgram.cpython-310.pyc,,
481
+ fontTools/ttLib/tables/_a_n_k_r.py,sha256=DhIUAWnvXZZdC1jlh9ubcsobFahdtlJMsk7v_2s-WaM,462
482
+ fontTools/ttLib/tables/_a_v_a_r.py,sha256=QqLK59G2UUYGZMjpTjhw9l01dcspANRydvnBwB8GnE0,7038
483
+ fontTools/ttLib/tables/_b_s_l_n.py,sha256=D1tRo8TDAUxeCqVWsTma9u2VxRzxUkCpF84Lv_hy4rU,170
484
+ fontTools/ttLib/tables/_c_i_d_g.py,sha256=A6llfYvsJQl0Mj6fnrRxUGXUlBkyEowo1J2euUulHM4,787
485
+ fontTools/ttLib/tables/_c_m_a_p.py,sha256=OP0WuHxErqVIDEuGnJ20lel04jd9JeAYIYTENqKK--Y,61643
486
+ fontTools/ttLib/tables/_c_v_a_r.py,sha256=Nlf8etrchBixD7qxFgxuDZ51VHA0XtsHfABDSgPG2RU,3307
487
+ fontTools/ttLib/tables/_c_v_t.py,sha256=E_mDVniDspGjbBQk9CDEm8y3LJ5FbnHxZHRGbq-okHA,1361
488
+ fontTools/ttLib/tables/_f_e_a_t.py,sha256=cshl7jgxj2RgzE8kECCkQVAW2ibJqgKLpZdT1PwyvuM,560
489
+ fontTools/ttLib/tables/_f_p_g_m.py,sha256=-a5WYucI482KQ65rmbl8YwsD4q9BRyDIunJ_9MYAeyc,1170
490
+ fontTools/ttLib/tables/_f_v_a_r.py,sha256=QJ90oDYxZv3o0u9piylqGGKyk-1ZGqt0vcsHtTfGwYY,8591
491
+ fontTools/ttLib/tables/_g_a_s_p.py,sha256=Sp31uXdZyQO2Bbp4Qh5QBu75TvnDmxNQYhfMXf6PkCg,1916
492
+ fontTools/ttLib/tables/_g_c_i_d.py,sha256=4VWq2u6c21ZOQ5_EJ5EwtZXC-zDz6SOPYwDDRZWRczA,170
493
+ fontTools/ttLib/tables/_g_l_y_f.py,sha256=Jl3i3QPZw6AnSMQXCFpU9Gh9IbIEmDOOcQK_u71qO98,82196
494
+ fontTools/ttLib/tables/_g_v_a_r.py,sha256=bVeiEmFZHdGvcaaZx3_shV_giz0FD1cnPNcLC8QZRU4,10191
495
+ fontTools/ttLib/tables/_h_d_m_x.py,sha256=BOadCwbQhtiwQZoduvkvt6rtevP7BQiyd5KYnfjE0Cc,4024
496
+ fontTools/ttLib/tables/_h_e_a_d.py,sha256=cWH7gPQdb7SoWH88eyHHv0HeJ-k7xyXWjorPVTMIMGs,4745
497
+ fontTools/ttLib/tables/_h_h_e_a.py,sha256=YSMaTvNp3CD4G6WgGLmYdJGv_TKghKkT-IHW5Gw0iio,4434
498
+ fontTools/ttLib/tables/_h_m_t_x.py,sha256=DEcruWWtBYNW6sHtuv17snMCUYkvdaVtx_lrZLLhBfc,5767
499
+ fontTools/ttLib/tables/_k_e_r_n.py,sha256=SXkBnwz39gd6YHrQizGqz1orFEETp02vLgxzJSCNdYQ,10437
500
+ fontTools/ttLib/tables/_l_c_a_r.py,sha256=SKmQ65spClbLnsYMDoqecsUOWWNyBDsFWut-Y6ahVhk,88
501
+ fontTools/ttLib/tables/_l_o_c_a.py,sha256=JkfJoEMtrWPRPuTsxbARrvVJzJWMQv42NZ816KMOra8,1917
502
+ fontTools/ttLib/tables/_l_t_a_g.py,sha256=L1ekoPzh4pMdWGRr-cdjL3M2asf4CqeUHq7zh4wvwrw,2274
503
+ fontTools/ttLib/tables/_m_a_x_p.py,sha256=_aoIWOx9c6Sj-5OtBb7qu0dfARQEcOkV7VGxlnGmiDc,5061
504
+ fontTools/ttLib/tables/_m_e_t_a.py,sha256=MslEJ7E0oO-JNHyAhtkRsBCBp0kK4OXfAgRqtRF9GDA,3651
505
+ fontTools/ttLib/tables/_m_o_r_t.py,sha256=2p7PzPGzdOtFhg-Fxvdh0PO4yRs6_z_WjQegexeZCsw,170
506
+ fontTools/ttLib/tables/_m_o_r_x.py,sha256=UJhBbA3mgVQO1oGmu_2bNXUwQreVSztG85F9k7DpmiQ,170
507
+ fontTools/ttLib/tables/_n_a_m_e.py,sha256=geoF-ka_1h43vuuAF7QThLei_mlEESkrIuAI4tCfKGY,41030
508
+ fontTools/ttLib/tables/_o_p_b_d.py,sha256=t3eqUkZPyaQbahEmKaqp7brDNbt4MQje2Vq1jBu-fEc,170
509
+ fontTools/ttLib/tables/_p_o_s_t.py,sha256=DusC5HkI4eJw9jw9idb0GA1Xr9YuhQMnmsz4GM36kVI,11284
510
+ fontTools/ttLib/tables/_p_r_e_p.py,sha256=97rDk0OiGoOD-foAIzqzYM1IKhB4gQuWyBrkH1PVvP0,115
511
+ fontTools/ttLib/tables/_p_r_o_p.py,sha256=3JHFloIJwg9n4dzoe4KLobHc75oJh6DLNe51sakfz8E,170
512
+ fontTools/ttLib/tables/_s_b_i_x.py,sha256=eHzNG4I8732aeW7iUNEEdYsxgsHT9sTtbaD2vvAxxR8,4443
513
+ fontTools/ttLib/tables/_t_r_a_k.py,sha256=fZV1pQrAilSNc0Yd3x0XoIGbqlNoDv67LB2gb_CejMo,11069
514
+ fontTools/ttLib/tables/_v_h_e_a.py,sha256=zHokAcH7CQ4tZPQAGmdTuv0_X-FHwyLWea1f9aFb1Gg,4130
515
+ fontTools/ttLib/tables/_v_m_t_x.py,sha256=oUrskRNAf3FLIZaYLuk03np_IsIWBGUWbMFcdjU3Sys,229
516
+ fontTools/ttLib/tables/asciiTable.py,sha256=4c69jsAirUnDEpylf9CYBoCKTzwbmfbtUAOrtPnpHjY,637
517
+ fontTools/ttLib/tables/grUtils.py,sha256=hcOJ5oJPOd2uJWnWA7qwR7AfL37YZ5zUT7g8o5BBV80,2270
518
+ fontTools/ttLib/tables/otBase.py,sha256=Vq8fv4lm-e3oPCuSPcWTNMQI2KmX0iOIz_lO9kE1mKE,53355
519
+ fontTools/ttLib/tables/otConverters.py,sha256=YburADbtHu1kVg5v5eHl2CRrUOQCNCycUoWgvbteAsk,74054
520
+ fontTools/ttLib/tables/otData.py,sha256=esZs8p10aaJjioueGZ5plMou2LnzhJeuD-q1AOA-Kek,197260
521
+ fontTools/ttLib/tables/otTables.py,sha256=aCWou5-h4uhH2nPM2jwyD6OfoPhtnsOXm_ZefAawp4I,96937
522
+ fontTools/ttLib/tables/otTraverse.py,sha256=oTr7nA7u7kEltLAhl4Kfl1RPD8O2_bKaoXa5l0hkRVA,5497
523
+ fontTools/ttLib/tables/sbixGlyph.py,sha256=tjEUPVRfx6gr5yme8UytGTtVrimKN5qmbzT1GZPjXiM,5796
524
+ fontTools/ttLib/tables/sbixStrike.py,sha256=gFyOlhRIGnd59y0SrhtsT2Ce4L3yaBrLoFJ_dK9u9mQ,6663
525
+ fontTools/ttLib/tables/table_API_readme.txt,sha256=eZlRTLUkLzc_9Ot3pdfhyMb3ahU0_Iipx0vSbzOVGy8,2748
526
+ fontTools/ttLib/tables/ttProgram.py,sha256=tgtxgd-EnOq-2PUlYEihp-6NHu_7HnE5rxeSAtmXOtU,35888
527
+ fontTools/ttLib/ttCollection.py,sha256=aRph2MkBK3kd9-JCLqhJ1EN9pffN_lVX6WWmOTTewc8,3963
528
+ fontTools/ttLib/ttFont.py,sha256=UXPMV4c5pctOWNygu2F6_kR6FFE9zWLLOGFjh9282WU,40976
529
+ fontTools/ttLib/ttGlyphSet.py,sha256=1SAEMFLuzm5KSjjtXG23c--ihPnIvy0Lq37cHMV73Oc,17376
530
+ fontTools/ttLib/ttVisitor.py,sha256=_tah4C42Tv6Pm9QeLNQwwVCxqI4VNEAqYCbmThp6cvY,1025
531
+ fontTools/ttLib/woff2.py,sha256=Ryw4WVwUFMtdEo9FcIejP1OTV92Z4B9y5Wq7nWDW3lE,61058
532
+ fontTools/ttx.py,sha256=XCerBn2ySMc5Bn54io4j5U5cW228GFREYvEeuvp0ZfM,16652
533
+ fontTools/ufoLib/__init__.py,sha256=eGn4PHQc1PlY1VRuKj3WLHoT_XGkKjrnf99XYTLWjSI,93679
534
+ fontTools/ufoLib/__pycache__/__init__.cpython-310.pyc,,
535
+ fontTools/ufoLib/__pycache__/converters.cpython-310.pyc,,
536
+ fontTools/ufoLib/__pycache__/errors.cpython-310.pyc,,
537
+ fontTools/ufoLib/__pycache__/etree.cpython-310.pyc,,
538
+ fontTools/ufoLib/__pycache__/filenames.cpython-310.pyc,,
539
+ fontTools/ufoLib/__pycache__/glifLib.cpython-310.pyc,,
540
+ fontTools/ufoLib/__pycache__/kerning.cpython-310.pyc,,
541
+ fontTools/ufoLib/__pycache__/plistlib.cpython-310.pyc,,
542
+ fontTools/ufoLib/__pycache__/pointPen.cpython-310.pyc,,
543
+ fontTools/ufoLib/__pycache__/utils.cpython-310.pyc,,
544
+ fontTools/ufoLib/__pycache__/validators.cpython-310.pyc,,
545
+ fontTools/ufoLib/converters.py,sha256=EjuBkQxFltzeb-qnt2jzwieJH92f9ybcdZwAvQJi_Kw,10558
546
+ fontTools/ufoLib/errors.py,sha256=UULZ4h1i_Lb9lywjScgC6N-wC4yyPceTSin0BebbhJk,584
547
+ fontTools/ufoLib/etree.py,sha256=YQpCsRlLv0zfZUK8_i9cNFKBvyq1Gyy6HQbKyPLCoEY,224
548
+ fontTools/ufoLib/filenames.py,sha256=Trm8k9AzXYYaYo0VwAgLJKCtWgsA1QjBlirmgXdZhjg,7562
549
+ fontTools/ufoLib/glifLib.py,sha256=wpoSz624xqocPJbdzzElyCAgmEOjZVQeGr2KiZtHvAA,72053
550
+ fontTools/ufoLib/kerning.py,sha256=0jPFd7mti884yvPjvYcU8lAWDwvVsNOObeQvVmPRJ3k,2973
551
+ fontTools/ufoLib/plistlib.py,sha256=IpMh2FH9-6dxcvjSK4YR7L01HTIP1_RnQ8mWliyds1E,1499
552
+ fontTools/ufoLib/pointPen.py,sha256=QGg6b_UeosZodcqqfAIPyAPUbfT7KgCxDwYfSR0GlCI,233
553
+ fontTools/ufoLib/utils.py,sha256=8aqNHdFUd_imnawCQFY3UaXpF_s_4sHeinH0lqELTos,1893
554
+ fontTools/ufoLib/validators.py,sha256=zIcp2weAYLOJBCvxbqBqAy34TaJrqpAlXKshJIkdhWI,30805
555
+ fontTools/unicode.py,sha256=ZZ7OMmWvIyV1IL1k6ioTzaRAh3tUvm6gvK7QgFbOIHY,1237
556
+ fontTools/unicodedata/Blocks.py,sha256=K72YZjkqty9zQH_UUIOa-hwblt3GrUrjXUbcOWhR0rg,32416
557
+ fontTools/unicodedata/OTTags.py,sha256=wOPpbMsNcp_gdvPFeITtgVMnTN8TJSNAsVEdu_nuPXE,1196
558
+ fontTools/unicodedata/ScriptExtensions.py,sha256=cm34XQGJEeSmkqSpNGLgWoScWvVXObQ-NnygIp64fMk,27713
559
+ fontTools/unicodedata/Scripts.py,sha256=-aLU0oxjjgdVHsT9h5Ej3gMJwzxE7I7RLKOwKREIkkw,130272
560
+ fontTools/unicodedata/__init__.py,sha256=DOCX0X9-Eo3mEju7Zjgcod7d8aswTo3vjDRzV7-8Z4g,8824
561
+ fontTools/unicodedata/__pycache__/Blocks.cpython-310.pyc,,
562
+ fontTools/unicodedata/__pycache__/OTTags.cpython-310.pyc,,
563
+ fontTools/unicodedata/__pycache__/ScriptExtensions.cpython-310.pyc,,
564
+ fontTools/unicodedata/__pycache__/Scripts.cpython-310.pyc,,
565
+ fontTools/unicodedata/__pycache__/__init__.cpython-310.pyc,,
566
+ fontTools/varLib/__init__.py,sha256=mVDyxGfpPMKALtfnry-VgRt7fK8XdCqNWyV-b2aXgGI,53537
567
+ fontTools/varLib/__main__.py,sha256=wbdYC5bPjWCxA0I4SKcLO88gl-UMtsYS8MxdW9ySTkY,95
568
+ fontTools/varLib/__pycache__/__init__.cpython-310.pyc,,
569
+ fontTools/varLib/__pycache__/__main__.cpython-310.pyc,,
570
+ fontTools/varLib/__pycache__/avar.cpython-310.pyc,,
571
+ fontTools/varLib/__pycache__/avarPlanner.cpython-310.pyc,,
572
+ fontTools/varLib/__pycache__/builder.cpython-310.pyc,,
573
+ fontTools/varLib/__pycache__/cff.cpython-310.pyc,,
574
+ fontTools/varLib/__pycache__/errors.cpython-310.pyc,,
575
+ fontTools/varLib/__pycache__/featureVars.cpython-310.pyc,,
576
+ fontTools/varLib/__pycache__/interpolatable.cpython-310.pyc,,
577
+ fontTools/varLib/__pycache__/interpolatableHelpers.cpython-310.pyc,,
578
+ fontTools/varLib/__pycache__/interpolatablePlot.cpython-310.pyc,,
579
+ fontTools/varLib/__pycache__/interpolatableTestContourOrder.cpython-310.pyc,,
580
+ fontTools/varLib/__pycache__/interpolatableTestStartingPoint.cpython-310.pyc,,
581
+ fontTools/varLib/__pycache__/interpolate_layout.cpython-310.pyc,,
582
+ fontTools/varLib/__pycache__/iup.cpython-310.pyc,,
583
+ fontTools/varLib/__pycache__/merger.cpython-310.pyc,,
584
+ fontTools/varLib/__pycache__/models.cpython-310.pyc,,
585
+ fontTools/varLib/__pycache__/multiVarStore.cpython-310.pyc,,
586
+ fontTools/varLib/__pycache__/mutator.cpython-310.pyc,,
587
+ fontTools/varLib/__pycache__/mvar.cpython-310.pyc,,
588
+ fontTools/varLib/__pycache__/plot.cpython-310.pyc,,
589
+ fontTools/varLib/__pycache__/stat.cpython-310.pyc,,
590
+ fontTools/varLib/__pycache__/varStore.cpython-310.pyc,,
591
+ fontTools/varLib/avar.py,sha256=Ye_u0HHznaPQaTzufNFKDj_v9o_LxOKJoa_eTK1D1F0,9647
592
+ fontTools/varLib/avarPlanner.py,sha256=uLMGsL6cBbEMq5YItwABG_vXlXV3bxquM93WGDJ1brA,27358
593
+ fontTools/varLib/builder.py,sha256=mSKOCcnnw-WzmZs15FayoqCDh77Ts7o9Tre9psh8CUc,6609
594
+ fontTools/varLib/cff.py,sha256=EVgaQcoROIrYQsRuftnxFuGGldEPYbrIh5yBckylJC4,22901
595
+ fontTools/varLib/errors.py,sha256=dMo8eGj76I7H4hrBEiNbYrGs2J1K1SwdsUyTHpkVOrQ,6934
596
+ fontTools/varLib/featureVars.py,sha256=BCOBGjGUv2Rw_z0rlVi1ZYkTDcCMh0LyAUzDVJ2PYm4,25448
597
+ fontTools/varLib/instancer/__init__.py,sha256=wFqRVbww2CjuJk3MPDQ2HGmpNBGNQd9JF58KQoBl8_c,71346
598
+ fontTools/varLib/instancer/__main__.py,sha256=zfULwcP01FhplS1IlcMgNQnLxk5RVfmOuinWjqeid-g,104
599
+ fontTools/varLib/instancer/__pycache__/__init__.cpython-310.pyc,,
600
+ fontTools/varLib/instancer/__pycache__/__main__.cpython-310.pyc,,
601
+ fontTools/varLib/instancer/__pycache__/featureVars.cpython-310.pyc,,
602
+ fontTools/varLib/instancer/__pycache__/names.cpython-310.pyc,,
603
+ fontTools/varLib/instancer/__pycache__/solver.cpython-310.pyc,,
604
+ fontTools/varLib/instancer/featureVars.py,sha256=oPqSlnHLMDTtOsmQMi6gkzLox7ymCrqlRAkvC_EJ4bc,7110
605
+ fontTools/varLib/instancer/names.py,sha256=IPRqel_M8zVU0jl30WsfgufxUm9PBBQDQCY3VHapeHc,14950
606
+ fontTools/varLib/instancer/solver.py,sha256=uMePwX0BVT5F94kUvDglsI4_F0nEH67F7RFuJ6tQwQ0,11002
607
+ fontTools/varLib/interpolatable.py,sha256=4PL6mVkZ7lZUbkcaVZTNQx_lyWF92-Hh3NfgcbAvJ94,42756
608
+ fontTools/varLib/interpolatableHelpers.py,sha256=lXd7kwfIVl-4opd-vxCDhf48RnJ7IQKv_uuFQM_6vaU,11496
609
+ fontTools/varLib/interpolatablePlot.py,sha256=w393P6mGLRhYkIjSxMww3qyoYxAUZzCXlmPBbI_84C0,44375
610
+ fontTools/varLib/interpolatableTestContourOrder.py,sha256=EmJ2jp4sHuSM5P-seYvOLk0HLdWyPOHeVWRKIGIKXx4,3033
611
+ fontTools/varLib/interpolatableTestStartingPoint.py,sha256=K6OYKBspim6BXc91pfLTbGLyi5XZukfMuBc6hRpENG8,4296
612
+ fontTools/varLib/interpolate_layout.py,sha256=22VjGZuV2YiAe2MpdTf0xPVz1x2G84bcOL0vOeBpGQM,3689
613
+ fontTools/varLib/iup.c,sha256=w2M7V2o38Z7WaeDk2jFjNO_pD9aWdxZ6kZz3wD8OJ10,779751
614
+ fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so,sha256=a2dhPc8Yu8GOzeOW87UNGBLCfkxXvGFqTCdbF9w6ETs,1514048
615
+ fontTools/varLib/iup.py,sha256=bUk3O1QoFM8k_QEleHruT0biPoauX8AUJorbRuO21Vo,14675
616
+ fontTools/varLib/merger.py,sha256=E59oli4AwqWZ-FgnuStMSBvsB-FHe-55esXTYUqGeJ8,60802
617
+ fontTools/varLib/models.py,sha256=sj_ENljh_qcMbfYzRIOlRgHq6tFOmL02Wv6WO8uofis,22398
618
+ fontTools/varLib/multiVarStore.py,sha256=GY3cLa6Z0CopuMuTrU25Hv4hlBJMkvjbDvzjbqbTNDM,8320
619
+ fontTools/varLib/mutator.py,sha256=S624yKhtFSm-uBFji2W65QdT4db4pfUNZyMw4pigzv0,19236
620
+ fontTools/varLib/mvar.py,sha256=LTV77vH_3Ecg_qKBO5xQzjLOlJir_ppEr7mPVZRgad8,2449
621
+ fontTools/varLib/plot.py,sha256=NoSZkJ5ndxNcDvJIvd5pQ9_jX6X1oM1K2G_tR4sdPVs,7494
622
+ fontTools/varLib/stat.py,sha256=pNtU3Jebm8Gr5umrbF5xGj5yJQciFwSFpfePOcg37xY,4535
623
+ fontTools/varLib/varStore.py,sha256=RrBoEmNWCcsaL7CFZnzrcl26URVekUqTN4qoOy81eVQ,25160
624
+ fontTools/voltLib/__init__.py,sha256=ZZ1AsTx1VlDn40Kupce-fM3meOWugy3RZraBW9LG-9M,151
625
+ fontTools/voltLib/__pycache__/__init__.cpython-310.pyc,,
626
+ fontTools/voltLib/__pycache__/ast.cpython-310.pyc,,
627
+ fontTools/voltLib/__pycache__/error.cpython-310.pyc,,
628
+ fontTools/voltLib/__pycache__/lexer.cpython-310.pyc,,
629
+ fontTools/voltLib/__pycache__/parser.cpython-310.pyc,,
630
+ fontTools/voltLib/__pycache__/voltToFea.cpython-310.pyc,,
631
+ fontTools/voltLib/ast.py,sha256=sioOeSazmC8PxRMRql33I64JaCflu55UUZcikm9mwIY,13226
632
+ fontTools/voltLib/error.py,sha256=phcQOQj-xOspCXu9hBJQRhSOBDzxHRgZd3fWQOFNJzw,395
633
+ fontTools/voltLib/lexer.py,sha256=OvuETOSvlS6v7iCVeJ3IdH2Cg71n3OJoEyiB3-h6vhE,3368
634
+ fontTools/voltLib/parser.py,sha256=wBSUrjLT3fSPv9Mjx6_ULIf8IcGlwjtb4Auxjh5wqnc,24916
635
+ fontTools/voltLib/voltToFea.py,sha256=igP7_E-7AzSl8f_LiN_GHMoNmFiXPBaXp_zZLndjU4c,28505
636
+ fonttools-4.54.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
637
+ fonttools-4.54.1.dist-info/LICENSE,sha256=Z4cgj4P2Wcy8IiOy_elS_6b36KymLxqKK_W8UbsbI4M,1072
638
+ fonttools-4.54.1.dist-info/METADATA,sha256=FPeXGmgCDbfkFwwdPYM2c6PEM4Cm0PNA19Blj4-lw6c,163697
639
+ fonttools-4.54.1.dist-info/RECORD,,
640
+ fonttools-4.54.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
641
+ fonttools-4.54.1.dist-info/WHEEL,sha256=VXRyidHovicsPXAYYBPK-lnsPgFrrhXkyzySBEhHzcg,151
642
+ fonttools-4.54.1.dist-info/entry_points.txt,sha256=8kVHddxfFWA44FSD4mBpmC-4uCynQnkoz_9aNJb227Y,147
643
+ fonttools-4.54.1.dist-info/top_level.txt,sha256=rRgRylrXzekqWOsrhygzib12pQ7WILf7UGjqEwkIFDM,10
parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/WHEEL ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (75.1.0)
3
+ Root-Is-Purelib: false
4
+ Tag: cp310-cp310-manylinux_2_17_x86_64
5
+ Tag: cp310-cp310-manylinux2014_x86_64
6
+
parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ fontTools
parrot/lib/python3.10/site-packages/h11/__init__.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230),
2
+ # containing no networking code at all, loosely modelled on hyper-h2's generic
3
+ # implementation of HTTP/2 (and in particular the h2.connection.H2Connection
4
+ # class). There's still a bunch of subtle details you need to get right if you
5
+ # want to make this actually useful, because it doesn't implement all the
6
+ # semantics to check that what you're asking to write to the wire is sensible,
7
+ # but at least it gets you out of dealing with the wire itself.
8
+
9
+ from h11._connection import Connection, NEED_DATA, PAUSED
10
+ from h11._events import (
11
+ ConnectionClosed,
12
+ Data,
13
+ EndOfMessage,
14
+ Event,
15
+ InformationalResponse,
16
+ Request,
17
+ Response,
18
+ )
19
+ from h11._state import (
20
+ CLIENT,
21
+ CLOSED,
22
+ DONE,
23
+ ERROR,
24
+ IDLE,
25
+ MIGHT_SWITCH_PROTOCOL,
26
+ MUST_CLOSE,
27
+ SEND_BODY,
28
+ SEND_RESPONSE,
29
+ SERVER,
30
+ SWITCHED_PROTOCOL,
31
+ )
32
+ from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError
33
+ from h11._version import __version__
34
+
35
+ PRODUCT_ID = "python-h11/" + __version__
36
+
37
+
38
+ __all__ = (
39
+ "Connection",
40
+ "NEED_DATA",
41
+ "PAUSED",
42
+ "ConnectionClosed",
43
+ "Data",
44
+ "EndOfMessage",
45
+ "Event",
46
+ "InformationalResponse",
47
+ "Request",
48
+ "Response",
49
+ "CLIENT",
50
+ "CLOSED",
51
+ "DONE",
52
+ "ERROR",
53
+ "IDLE",
54
+ "MUST_CLOSE",
55
+ "SEND_BODY",
56
+ "SEND_RESPONSE",
57
+ "SERVER",
58
+ "SWITCHED_PROTOCOL",
59
+ "ProtocolError",
60
+ "LocalProtocolError",
61
+ "RemoteProtocolError",
62
+ )
parrot/lib/python3.10/site-packages/h11/_abnf.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # We use native strings for all the re patterns, to take advantage of string
2
+ # formatting, and then convert to bytestrings when compiling the final re
3
+ # objects.
4
+
5
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace
6
+ # OWS = *( SP / HTAB )
7
+ # ; optional whitespace
8
+ OWS = r"[ \t]*"
9
+
10
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators
11
+ # token = 1*tchar
12
+ #
13
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
14
+ # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
15
+ # / DIGIT / ALPHA
16
+ # ; any VCHAR, except delimiters
17
+ token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+"
18
+
19
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields
20
+ # field-name = token
21
+ field_name = token
22
+
23
+ # The standard says:
24
+ #
25
+ # field-value = *( field-content / obs-fold )
26
+ # field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
27
+ # field-vchar = VCHAR / obs-text
28
+ # obs-fold = CRLF 1*( SP / HTAB )
29
+ # ; obsolete line folding
30
+ # ; see Section 3.2.4
31
+ #
32
+ # https://tools.ietf.org/html/rfc5234#appendix-B.1
33
+ #
34
+ # VCHAR = %x21-7E
35
+ # ; visible (printing) characters
36
+ #
37
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string
38
+ # obs-text = %x80-FF
39
+ #
40
+ # However, the standard definition of field-content is WRONG! It disallows
41
+ # fields containing a single visible character surrounded by whitespace,
42
+ # e.g. "foo a bar".
43
+ #
44
+ # See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
45
+ #
46
+ # So our definition of field_content attempts to fix it up...
47
+ #
48
+ # Also, we allow lots of control characters, because apparently people assume
49
+ # that they're legal in practice (e.g., google analytics makes cookies with
50
+ # \x01 in them!):
51
+ # https://github.com/python-hyper/h11/issues/57
52
+ # We still don't allow NUL or whitespace, because those are often treated as
53
+ # meta-characters and letting them through can lead to nasty issues like SSRF.
54
+ vchar = r"[\x21-\x7e]"
55
+ vchar_or_obs_text = r"[^\x00\s]"
56
+ field_vchar = vchar_or_obs_text
57
+ field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals())
58
+
59
+ # We handle obs-fold at a different level, and our fixed-up field_content
60
+ # already grows to swallow the whole value, so ? instead of *
61
+ field_value = r"({field_content})?".format(**globals())
62
+
63
+ # header-field = field-name ":" OWS field-value OWS
64
+ header_field = (
65
+ r"(?P<field_name>{field_name})"
66
+ r":"
67
+ r"{OWS}"
68
+ r"(?P<field_value>{field_value})"
69
+ r"{OWS}".format(**globals())
70
+ )
71
+
72
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line
73
+ #
74
+ # request-line = method SP request-target SP HTTP-version CRLF
75
+ # method = token
76
+ # HTTP-version = HTTP-name "/" DIGIT "." DIGIT
77
+ # HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive
78
+ #
79
+ # request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full
80
+ # URL, host+port (for connect), or even "*", but in any case we are guaranteed
81
+ # that it contists of the visible printing characters.
82
+ method = token
83
+ request_target = r"{vchar}+".format(**globals())
84
+ http_version = r"HTTP/(?P<http_version>[0-9]\.[0-9])"
85
+ request_line = (
86
+ r"(?P<method>{method})"
87
+ r" "
88
+ r"(?P<target>{request_target})"
89
+ r" "
90
+ r"{http_version}".format(**globals())
91
+ )
92
+
93
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line
94
+ #
95
+ # status-line = HTTP-version SP status-code SP reason-phrase CRLF
96
+ # status-code = 3DIGIT
97
+ # reason-phrase = *( HTAB / SP / VCHAR / obs-text )
98
+ status_code = r"[0-9]{3}"
99
+ reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals())
100
+ status_line = (
101
+ r"{http_version}"
102
+ r" "
103
+ r"(?P<status_code>{status_code})"
104
+ # However, there are apparently a few too many servers out there that just
105
+ # leave out the reason phrase:
106
+ # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036
107
+ # https://github.com/seanmonstar/httparse/issues/29
108
+ # so make it optional. ?: is a non-capturing group.
109
+ r"(?: (?P<reason>{reason_phrase}))?".format(**globals())
110
+ )
111
+
112
+ HEXDIG = r"[0-9A-Fa-f]"
113
+ # Actually
114
+ #
115
+ # chunk-size = 1*HEXDIG
116
+ #
117
+ # but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20
118
+ chunk_size = r"({HEXDIG}){{1,20}}".format(**globals())
119
+ # Actually
120
+ #
121
+ # chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
122
+ #
123
+ # but we aren't parsing the things so we don't really care.
124
+ chunk_ext = r";.*"
125
+ chunk_header = (
126
+ r"(?P<chunk_size>{chunk_size})"
127
+ r"(?P<chunk_ext>{chunk_ext})?"
128
+ r"{OWS}\r\n".format(
129
+ **globals()
130
+ ) # Even though the specification does not allow for extra whitespaces,
131
+ # we are lenient with trailing whitespaces because some servers on the wild use it.
132
+ )
parrot/lib/python3.10/site-packages/h11/_connection.py ADDED
@@ -0,0 +1,633 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This contains the main Connection class. Everything in h11 revolves around
2
+ # this.
3
+ from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union
4
+
5
+ from ._events import (
6
+ ConnectionClosed,
7
+ Data,
8
+ EndOfMessage,
9
+ Event,
10
+ InformationalResponse,
11
+ Request,
12
+ Response,
13
+ )
14
+ from ._headers import get_comma_header, has_expect_100_continue, set_comma_header
15
+ from ._readers import READERS, ReadersType
16
+ from ._receivebuffer import ReceiveBuffer
17
+ from ._state import (
18
+ _SWITCH_CONNECT,
19
+ _SWITCH_UPGRADE,
20
+ CLIENT,
21
+ ConnectionState,
22
+ DONE,
23
+ ERROR,
24
+ MIGHT_SWITCH_PROTOCOL,
25
+ SEND_BODY,
26
+ SERVER,
27
+ SWITCHED_PROTOCOL,
28
+ )
29
+ from ._util import ( # Import the internal things we need
30
+ LocalProtocolError,
31
+ RemoteProtocolError,
32
+ Sentinel,
33
+ )
34
+ from ._writers import WRITERS, WritersType
35
+
36
+ # Everything in __all__ gets re-exported as part of the h11 public API.
37
+ __all__ = ["Connection", "NEED_DATA", "PAUSED"]
38
+
39
+
40
+ class NEED_DATA(Sentinel, metaclass=Sentinel):
41
+ pass
42
+
43
+
44
+ class PAUSED(Sentinel, metaclass=Sentinel):
45
+ pass
46
+
47
+
48
+ # If we ever have this much buffered without it making a complete parseable
49
+ # event, we error out. The only time we really buffer is when reading the
50
+ # request/response line + headers together, so this is effectively the limit on
51
+ # the size of that.
52
+ #
53
+ # Some precedents for defaults:
54
+ # - node.js: 80 * 1024
55
+ # - tomcat: 8 * 1024
56
+ # - IIS: 16 * 1024
57
+ # - Apache: <8 KiB per line>
58
+ DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024
59
+
60
+ # RFC 7230's rules for connection lifecycles:
61
+ # - If either side says they want to close the connection, then the connection
62
+ # must close.
63
+ # - HTTP/1.1 defaults to keep-alive unless someone says Connection: close
64
+ # - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive
65
+ # (and even this is a mess -- e.g. if you're implementing a proxy then
66
+ # sending Connection: keep-alive is forbidden).
67
+ #
68
+ # We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So
69
+ # our rule is:
70
+ # - If someone says Connection: close, we will close
71
+ # - If someone uses HTTP/1.0, we will close.
72
+ def _keep_alive(event: Union[Request, Response]) -> bool:
73
+ connection = get_comma_header(event.headers, b"connection")
74
+ if b"close" in connection:
75
+ return False
76
+ if getattr(event, "http_version", b"1.1") < b"1.1":
77
+ return False
78
+ return True
79
+
80
+
81
+ def _body_framing(
82
+ request_method: bytes, event: Union[Request, Response]
83
+ ) -> Tuple[str, Union[Tuple[()], Tuple[int]]]:
84
+ # Called when we enter SEND_BODY to figure out framing information for
85
+ # this body.
86
+ #
87
+ # These are the only two events that can trigger a SEND_BODY state:
88
+ assert type(event) in (Request, Response)
89
+ # Returns one of:
90
+ #
91
+ # ("content-length", count)
92
+ # ("chunked", ())
93
+ # ("http/1.0", ())
94
+ #
95
+ # which are (lookup key, *args) for constructing body reader/writer
96
+ # objects.
97
+ #
98
+ # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3
99
+ #
100
+ # Step 1: some responses always have an empty body, regardless of what the
101
+ # headers say.
102
+ if type(event) is Response:
103
+ if (
104
+ event.status_code in (204, 304)
105
+ or request_method == b"HEAD"
106
+ or (request_method == b"CONNECT" and 200 <= event.status_code < 300)
107
+ ):
108
+ return ("content-length", (0,))
109
+ # Section 3.3.3 also lists another case -- responses with status_code
110
+ # < 200. For us these are InformationalResponses, not Responses, so
111
+ # they can't get into this function in the first place.
112
+ assert event.status_code >= 200
113
+
114
+ # Step 2: check for Transfer-Encoding (T-E beats C-L):
115
+ transfer_encodings = get_comma_header(event.headers, b"transfer-encoding")
116
+ if transfer_encodings:
117
+ assert transfer_encodings == [b"chunked"]
118
+ return ("chunked", ())
119
+
120
+ # Step 3: check for Content-Length
121
+ content_lengths = get_comma_header(event.headers, b"content-length")
122
+ if content_lengths:
123
+ return ("content-length", (int(content_lengths[0]),))
124
+
125
+ # Step 4: no applicable headers; fallback/default depends on type
126
+ if type(event) is Request:
127
+ return ("content-length", (0,))
128
+ else:
129
+ return ("http/1.0", ())
130
+
131
+
132
+ ################################################################
133
+ #
134
+ # The main Connection class
135
+ #
136
+ ################################################################
137
+
138
+
139
+ class Connection:
140
+ """An object encapsulating the state of an HTTP connection.
141
+
142
+ Args:
143
+ our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If
144
+ you're implementing a server, pass :data:`h11.SERVER`.
145
+
146
+ max_incomplete_event_size (int):
147
+ The maximum number of bytes we're willing to buffer of an
148
+ incomplete event. In practice this mostly sets a limit on the
149
+ maximum size of the request/response line + headers. If this is
150
+ exceeded, then :meth:`next_event` will raise
151
+ :exc:`RemoteProtocolError`.
152
+
153
+ """
154
+
155
+ def __init__(
156
+ self,
157
+ our_role: Type[Sentinel],
158
+ max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE,
159
+ ) -> None:
160
+ self._max_incomplete_event_size = max_incomplete_event_size
161
+ # State and role tracking
162
+ if our_role not in (CLIENT, SERVER):
163
+ raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role))
164
+ self.our_role = our_role
165
+ self.their_role: Type[Sentinel]
166
+ if our_role is CLIENT:
167
+ self.their_role = SERVER
168
+ else:
169
+ self.their_role = CLIENT
170
+ self._cstate = ConnectionState()
171
+
172
+ # Callables for converting data->events or vice-versa given the
173
+ # current state
174
+ self._writer = self._get_io_object(self.our_role, None, WRITERS)
175
+ self._reader = self._get_io_object(self.their_role, None, READERS)
176
+
177
+ # Holds any unprocessed received data
178
+ self._receive_buffer = ReceiveBuffer()
179
+ # If this is true, then it indicates that the incoming connection was
180
+ # closed *after* the end of whatever's in self._receive_buffer:
181
+ self._receive_buffer_closed = False
182
+
183
+ # Extra bits of state that don't fit into the state machine.
184
+ #
185
+ # These two are only used to interpret framing headers for figuring
186
+ # out how to read/write response bodies. their_http_version is also
187
+ # made available as a convenient public API.
188
+ self.their_http_version: Optional[bytes] = None
189
+ self._request_method: Optional[bytes] = None
190
+ # This is pure flow-control and doesn't at all affect the set of legal
191
+ # transitions, so no need to bother ConnectionState with it:
192
+ self.client_is_waiting_for_100_continue = False
193
+
194
+ @property
195
+ def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]:
196
+ """A dictionary like::
197
+
198
+ {CLIENT: <client state>, SERVER: <server state>}
199
+
200
+ See :ref:`state-machine` for details.
201
+
202
+ """
203
+ return dict(self._cstate.states)
204
+
205
+ @property
206
+ def our_state(self) -> Type[Sentinel]:
207
+ """The current state of whichever role we are playing. See
208
+ :ref:`state-machine` for details.
209
+ """
210
+ return self._cstate.states[self.our_role]
211
+
212
+ @property
213
+ def their_state(self) -> Type[Sentinel]:
214
+ """The current state of whichever role we are NOT playing. See
215
+ :ref:`state-machine` for details.
216
+ """
217
+ return self._cstate.states[self.their_role]
218
+
219
+ @property
220
+ def they_are_waiting_for_100_continue(self) -> bool:
221
+ return self.their_role is CLIENT and self.client_is_waiting_for_100_continue
222
+
223
+ def start_next_cycle(self) -> None:
224
+ """Attempt to reset our connection state for a new request/response
225
+ cycle.
226
+
227
+ If both client and server are in :data:`DONE` state, then resets them
228
+ both to :data:`IDLE` state in preparation for a new request/response
229
+ cycle on this same connection. Otherwise, raises a
230
+ :exc:`LocalProtocolError`.
231
+
232
+ See :ref:`keepalive-and-pipelining`.
233
+
234
+ """
235
+ old_states = dict(self._cstate.states)
236
+ self._cstate.start_next_cycle()
237
+ self._request_method = None
238
+ # self.their_http_version gets left alone, since it presumably lasts
239
+ # beyond a single request/response cycle
240
+ assert not self.client_is_waiting_for_100_continue
241
+ self._respond_to_state_changes(old_states)
242
+
243
+ def _process_error(self, role: Type[Sentinel]) -> None:
244
+ old_states = dict(self._cstate.states)
245
+ self._cstate.process_error(role)
246
+ self._respond_to_state_changes(old_states)
247
+
248
+ def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]:
249
+ if type(event) is InformationalResponse and event.status_code == 101:
250
+ return _SWITCH_UPGRADE
251
+ if type(event) is Response:
252
+ if (
253
+ _SWITCH_CONNECT in self._cstate.pending_switch_proposals
254
+ and 200 <= event.status_code < 300
255
+ ):
256
+ return _SWITCH_CONNECT
257
+ return None
258
+
259
+ # All events go through here
260
+ def _process_event(self, role: Type[Sentinel], event: Event) -> None:
261
+ # First, pass the event through the state machine to make sure it
262
+ # succeeds.
263
+ old_states = dict(self._cstate.states)
264
+ if role is CLIENT and type(event) is Request:
265
+ if event.method == b"CONNECT":
266
+ self._cstate.process_client_switch_proposal(_SWITCH_CONNECT)
267
+ if get_comma_header(event.headers, b"upgrade"):
268
+ self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE)
269
+ server_switch_event = None
270
+ if role is SERVER:
271
+ server_switch_event = self._server_switch_event(event)
272
+ self._cstate.process_event(role, type(event), server_switch_event)
273
+
274
+ # Then perform the updates triggered by it.
275
+
276
+ if type(event) is Request:
277
+ self._request_method = event.method
278
+
279
+ if role is self.their_role and type(event) in (
280
+ Request,
281
+ Response,
282
+ InformationalResponse,
283
+ ):
284
+ event = cast(Union[Request, Response, InformationalResponse], event)
285
+ self.their_http_version = event.http_version
286
+
287
+ # Keep alive handling
288
+ #
289
+ # RFC 7230 doesn't really say what one should do if Connection: close
290
+ # shows up on a 1xx InformationalResponse. I think the idea is that
291
+ # this is not supposed to happen. In any case, if it does happen, we
292
+ # ignore it.
293
+ if type(event) in (Request, Response) and not _keep_alive(
294
+ cast(Union[Request, Response], event)
295
+ ):
296
+ self._cstate.process_keep_alive_disabled()
297
+
298
+ # 100-continue
299
+ if type(event) is Request and has_expect_100_continue(event):
300
+ self.client_is_waiting_for_100_continue = True
301
+ if type(event) in (InformationalResponse, Response):
302
+ self.client_is_waiting_for_100_continue = False
303
+ if role is CLIENT and type(event) in (Data, EndOfMessage):
304
+ self.client_is_waiting_for_100_continue = False
305
+
306
+ self._respond_to_state_changes(old_states, event)
307
+
308
+ def _get_io_object(
309
+ self,
310
+ role: Type[Sentinel],
311
+ event: Optional[Event],
312
+ io_dict: Union[ReadersType, WritersType],
313
+ ) -> Optional[Callable[..., Any]]:
314
+ # event may be None; it's only used when entering SEND_BODY
315
+ state = self._cstate.states[role]
316
+ if state is SEND_BODY:
317
+ # Special case: the io_dict has a dict of reader/writer factories
318
+ # that depend on the request/response framing.
319
+ framing_type, args = _body_framing(
320
+ cast(bytes, self._request_method), cast(Union[Request, Response], event)
321
+ )
322
+ return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index]
323
+ else:
324
+ # General case: the io_dict just has the appropriate reader/writer
325
+ # for this state
326
+ return io_dict.get((role, state)) # type: ignore[return-value]
327
+
328
+ # This must be called after any action that might have caused
329
+ # self._cstate.states to change.
330
+ def _respond_to_state_changes(
331
+ self,
332
+ old_states: Dict[Type[Sentinel], Type[Sentinel]],
333
+ event: Optional[Event] = None,
334
+ ) -> None:
335
+ # Update reader/writer
336
+ if self.our_state != old_states[self.our_role]:
337
+ self._writer = self._get_io_object(self.our_role, event, WRITERS)
338
+ if self.their_state != old_states[self.their_role]:
339
+ self._reader = self._get_io_object(self.their_role, event, READERS)
340
+
341
+ @property
342
+ def trailing_data(self) -> Tuple[bytes, bool]:
343
+ """Data that has been received, but not yet processed, represented as
344
+ a tuple with two elements, where the first is a byte-string containing
345
+ the unprocessed data itself, and the second is a bool that is True if
346
+ the receive connection was closed.
347
+
348
+ See :ref:`switching-protocols` for discussion of why you'd want this.
349
+ """
350
+ return (bytes(self._receive_buffer), self._receive_buffer_closed)
351
+
352
+ def receive_data(self, data: bytes) -> None:
353
+ """Add data to our internal receive buffer.
354
+
355
+ This does not actually do any processing on the data, just stores
356
+ it. To trigger processing, you have to call :meth:`next_event`.
357
+
358
+ Args:
359
+ data (:term:`bytes-like object`):
360
+ The new data that was just received.
361
+
362
+ Special case: If *data* is an empty byte-string like ``b""``,
363
+ then this indicates that the remote side has closed the
364
+ connection (end of file). Normally this is convenient, because
365
+ standard Python APIs like :meth:`file.read` or
366
+ :meth:`socket.recv` use ``b""`` to indicate end-of-file, while
367
+ other failures to read are indicated using other mechanisms
368
+ like raising :exc:`TimeoutError`. When using such an API you
369
+ can just blindly pass through whatever you get from ``read``
370
+ to :meth:`receive_data`, and everything will work.
371
+
372
+ But, if you have an API where reading an empty string is a
373
+ valid non-EOF condition, then you need to be aware of this and
374
+ make sure to check for such strings and avoid passing them to
375
+ :meth:`receive_data`.
376
+
377
+ Returns:
378
+ Nothing, but after calling this you should call :meth:`next_event`
379
+ to parse the newly received data.
380
+
381
+ Raises:
382
+ RuntimeError:
383
+ Raised if you pass an empty *data*, indicating EOF, and then
384
+ pass a non-empty *data*, indicating more data that somehow
385
+ arrived after the EOF.
386
+
387
+ (Calling ``receive_data(b"")`` multiple times is fine,
388
+ and equivalent to calling it once.)
389
+
390
+ """
391
+ if data:
392
+ if self._receive_buffer_closed:
393
+ raise RuntimeError("received close, then received more data?")
394
+ self._receive_buffer += data
395
+ else:
396
+ self._receive_buffer_closed = True
397
+
398
+ def _extract_next_receive_event(
399
+ self,
400
+ ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
401
+ state = self.their_state
402
+ # We don't pause immediately when they enter DONE, because even in
403
+ # DONE state we can still process a ConnectionClosed() event. But
404
+ # if we have data in our buffer, then we definitely aren't getting
405
+ # a ConnectionClosed() immediately and we need to pause.
406
+ if state is DONE and self._receive_buffer:
407
+ return PAUSED
408
+ if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL:
409
+ return PAUSED
410
+ assert self._reader is not None
411
+ event = self._reader(self._receive_buffer)
412
+ if event is None:
413
+ if not self._receive_buffer and self._receive_buffer_closed:
414
+ # In some unusual cases (basically just HTTP/1.0 bodies), EOF
415
+ # triggers an actual protocol event; in that case, we want to
416
+ # return that event, and then the state will change and we'll
417
+ # get called again to generate the actual ConnectionClosed().
418
+ if hasattr(self._reader, "read_eof"):
419
+ event = self._reader.read_eof() # type: ignore[attr-defined]
420
+ else:
421
+ event = ConnectionClosed()
422
+ if event is None:
423
+ event = NEED_DATA
424
+ return event # type: ignore[no-any-return]
425
+
426
+ def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
427
+ """Parse the next event out of our receive buffer, update our internal
428
+ state, and return it.
429
+
430
+ This is a mutating operation -- think of it like calling :func:`next`
431
+ on an iterator.
432
+
433
+ Returns:
434
+ : One of three things:
435
+
436
+ 1) An event object -- see :ref:`events`.
437
+
438
+ 2) The special constant :data:`NEED_DATA`, which indicates that
439
+ you need to read more data from your socket and pass it to
440
+ :meth:`receive_data` before this method will be able to return
441
+ any more events.
442
+
443
+ 3) The special constant :data:`PAUSED`, which indicates that we
444
+ are not in a state where we can process incoming data (usually
445
+ because the peer has finished their part of the current
446
+ request/response cycle, and you have not yet called
447
+ :meth:`start_next_cycle`). See :ref:`flow-control` for details.
448
+
449
+ Raises:
450
+ RemoteProtocolError:
451
+ The peer has misbehaved. You should close the connection
452
+ (possibly after sending some kind of 4xx response).
453
+
454
+ Once this method returns :class:`ConnectionClosed` once, then all
455
+ subsequent calls will also return :class:`ConnectionClosed`.
456
+
457
+ If this method raises any exception besides :exc:`RemoteProtocolError`
458
+ then that's a bug -- if it happens please file a bug report!
459
+
460
+ If this method raises any exception then it also sets
461
+ :attr:`Connection.their_state` to :data:`ERROR` -- see
462
+ :ref:`error-handling` for discussion.
463
+
464
+ """
465
+
466
+ if self.their_state is ERROR:
467
+ raise RemoteProtocolError("Can't receive data when peer state is ERROR")
468
+ try:
469
+ event = self._extract_next_receive_event()
470
+ if event not in [NEED_DATA, PAUSED]:
471
+ self._process_event(self.their_role, cast(Event, event))
472
+ if event is NEED_DATA:
473
+ if len(self._receive_buffer) > self._max_incomplete_event_size:
474
+ # 431 is "Request header fields too large" which is pretty
475
+ # much the only situation where we can get here
476
+ raise RemoteProtocolError(
477
+ "Receive buffer too long", error_status_hint=431
478
+ )
479
+ if self._receive_buffer_closed:
480
+ # We're still trying to complete some event, but that's
481
+ # never going to happen because no more data is coming
482
+ raise RemoteProtocolError("peer unexpectedly closed connection")
483
+ return event
484
+ except BaseException as exc:
485
+ self._process_error(self.their_role)
486
+ if isinstance(exc, LocalProtocolError):
487
+ exc._reraise_as_remote_protocol_error()
488
+ else:
489
+ raise
490
+
491
+ def send(self, event: Event) -> Optional[bytes]:
492
+ """Convert a high-level event into bytes that can be sent to the peer,
493
+ while updating our internal state machine.
494
+
495
+ Args:
496
+ event: The :ref:`event <events>` to send.
497
+
498
+ Returns:
499
+ If ``type(event) is ConnectionClosed``, then returns
500
+ ``None``. Otherwise, returns a :term:`bytes-like object`.
501
+
502
+ Raises:
503
+ LocalProtocolError:
504
+ Sending this event at this time would violate our
505
+ understanding of the HTTP/1.1 protocol.
506
+
507
+ If this method raises any exception then it also sets
508
+ :attr:`Connection.our_state` to :data:`ERROR` -- see
509
+ :ref:`error-handling` for discussion.
510
+
511
+ """
512
+ data_list = self.send_with_data_passthrough(event)
513
+ if data_list is None:
514
+ return None
515
+ else:
516
+ return b"".join(data_list)
517
+
518
+ def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]:
519
+ """Identical to :meth:`send`, except that in situations where
520
+ :meth:`send` returns a single :term:`bytes-like object`, this instead
521
+ returns a list of them -- and when sending a :class:`Data` event, this
522
+ list is guaranteed to contain the exact object you passed in as
523
+ :attr:`Data.data`. See :ref:`sendfile` for discussion.
524
+
525
+ """
526
+ if self.our_state is ERROR:
527
+ raise LocalProtocolError("Can't send data when our state is ERROR")
528
+ try:
529
+ if type(event) is Response:
530
+ event = self._clean_up_response_headers_for_sending(event)
531
+ # We want to call _process_event before calling the writer,
532
+ # because if someone tries to do something invalid then this will
533
+ # give a sensible error message, while our writers all just assume
534
+ # they will only receive valid events. But, _process_event might
535
+ # change self._writer. So we have to do a little dance:
536
+ writer = self._writer
537
+ self._process_event(self.our_role, event)
538
+ if type(event) is ConnectionClosed:
539
+ return None
540
+ else:
541
+ # In any situation where writer is None, process_event should
542
+ # have raised ProtocolError
543
+ assert writer is not None
544
+ data_list: List[bytes] = []
545
+ writer(event, data_list.append)
546
+ return data_list
547
+ except:
548
+ self._process_error(self.our_role)
549
+ raise
550
+
551
+ def send_failed(self) -> None:
552
+ """Notify the state machine that we failed to send the data it gave
553
+ us.
554
+
555
+ This causes :attr:`Connection.our_state` to immediately become
556
+ :data:`ERROR` -- see :ref:`error-handling` for discussion.
557
+
558
+ """
559
+ self._process_error(self.our_role)
560
+
561
+ # When sending a Response, we take responsibility for a few things:
562
+ #
563
+ # - Sometimes you MUST set Connection: close. We take care of those
564
+ # times. (You can also set it yourself if you want, and if you do then
565
+ # we'll respect that and close the connection at the right time. But you
566
+ # don't have to worry about that unless you want to.)
567
+ #
568
+ # - The user has to set Content-Length if they want it. Otherwise, for
569
+ # responses that have bodies (e.g. not HEAD), then we will automatically
570
+ # select the right mechanism for streaming a body of unknown length,
571
+ # which depends on depending on the peer's HTTP version.
572
+ #
573
+ # This function's *only* responsibility is making sure headers are set up
574
+ # right -- everything downstream just looks at the headers. There are no
575
+ # side channels.
576
+ def _clean_up_response_headers_for_sending(self, response: Response) -> Response:
577
+ assert type(response) is Response
578
+
579
+ headers = response.headers
580
+ need_close = False
581
+
582
+ # HEAD requests need some special handling: they always act like they
583
+ # have Content-Length: 0, and that's how _body_framing treats
584
+ # them. But their headers are supposed to match what we would send if
585
+ # the request was a GET. (Technically there is one deviation allowed:
586
+ # we're allowed to leave out the framing headers -- see
587
+ # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as
588
+ # easy to get them right.)
589
+ method_for_choosing_headers = cast(bytes, self._request_method)
590
+ if method_for_choosing_headers == b"HEAD":
591
+ method_for_choosing_headers = b"GET"
592
+ framing_type, _ = _body_framing(method_for_choosing_headers, response)
593
+ if framing_type in ("chunked", "http/1.0"):
594
+ # This response has a body of unknown length.
595
+ # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked
596
+ # If our peer is HTTP/1.0, we use no framing headers, and close the
597
+ # connection afterwards.
598
+ #
599
+ # Make sure to clear Content-Length (in principle user could have
600
+ # set both and then we ignored Content-Length b/c
601
+ # Transfer-Encoding overwrote it -- this would be naughty of them,
602
+ # but the HTTP spec says that if our peer does this then we have
603
+ # to fix it instead of erroring out, so we'll accord the user the
604
+ # same respect).
605
+ headers = set_comma_header(headers, b"content-length", [])
606
+ if self.their_http_version is None or self.their_http_version < b"1.1":
607
+ # Either we never got a valid request and are sending back an
608
+ # error (their_http_version is None), so we assume the worst;
609
+ # or else we did get a valid HTTP/1.0 request, so we know that
610
+ # they don't understand chunked encoding.
611
+ headers = set_comma_header(headers, b"transfer-encoding", [])
612
+ # This is actually redundant ATM, since currently we
613
+ # unconditionally disable keep-alive when talking to HTTP/1.0
614
+ # peers. But let's be defensive just in case we add
615
+ # Connection: keep-alive support later:
616
+ if self._request_method != b"HEAD":
617
+ need_close = True
618
+ else:
619
+ headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"])
620
+
621
+ if not self._cstate.keep_alive or need_close:
622
+ # Make sure Connection: close is set
623
+ connection = set(get_comma_header(headers, b"connection"))
624
+ connection.discard(b"keep-alive")
625
+ connection.add(b"close")
626
+ headers = set_comma_header(headers, b"connection", sorted(connection))
627
+
628
+ return Response(
629
+ headers=headers,
630
+ status_code=response.status_code,
631
+ http_version=response.http_version,
632
+ reason=response.reason,
633
+ )
parrot/lib/python3.10/site-packages/h11/_events.py ADDED
@@ -0,0 +1,369 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # High level events that make up HTTP/1.1 conversations. Loosely inspired by
2
+ # the corresponding events in hyper-h2:
3
+ #
4
+ # http://python-hyper.org/h2/en/stable/api.html#events
5
+ #
6
+ # Don't subclass these. Stuff will break.
7
+
8
+ import re
9
+ from abc import ABC
10
+ from dataclasses import dataclass, field
11
+ from typing import Any, cast, Dict, List, Tuple, Union
12
+
13
+ from ._abnf import method, request_target
14
+ from ._headers import Headers, normalize_and_validate
15
+ from ._util import bytesify, LocalProtocolError, validate
16
+
17
+ # Everything in __all__ gets re-exported as part of the h11 public API.
18
+ __all__ = [
19
+ "Event",
20
+ "Request",
21
+ "InformationalResponse",
22
+ "Response",
23
+ "Data",
24
+ "EndOfMessage",
25
+ "ConnectionClosed",
26
+ ]
27
+
28
+ method_re = re.compile(method.encode("ascii"))
29
+ request_target_re = re.compile(request_target.encode("ascii"))
30
+
31
+
32
+ class Event(ABC):
33
+ """
34
+ Base class for h11 events.
35
+ """
36
+
37
+ __slots__ = ()
38
+
39
+
40
+ @dataclass(init=False, frozen=True)
41
+ class Request(Event):
42
+ """The beginning of an HTTP request.
43
+
44
+ Fields:
45
+
46
+ .. attribute:: method
47
+
48
+ An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte
49
+ string. :term:`Bytes-like objects <bytes-like object>` and native
50
+ strings containing only ascii characters will be automatically
51
+ converted to byte strings.
52
+
53
+ .. attribute:: target
54
+
55
+ The target of an HTTP request, e.g. ``b"/index.html"``, or one of the
56
+ more exotic formats described in `RFC 7320, section 5.3
57
+ <https://tools.ietf.org/html/rfc7230#section-5.3>`_. Always a byte
58
+ string. :term:`Bytes-like objects <bytes-like object>` and native
59
+ strings containing only ascii characters will be automatically
60
+ converted to byte strings.
61
+
62
+ .. attribute:: headers
63
+
64
+ Request headers, represented as a list of (name, value) pairs. See
65
+ :ref:`the header normalization rules <headers-format>` for details.
66
+
67
+ .. attribute:: http_version
68
+
69
+ The HTTP protocol version, represented as a byte string like
70
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
71
+ <http_version-format>` for details.
72
+
73
+ """
74
+
75
+ __slots__ = ("method", "headers", "target", "http_version")
76
+
77
+ method: bytes
78
+ headers: Headers
79
+ target: bytes
80
+ http_version: bytes
81
+
82
+ def __init__(
83
+ self,
84
+ *,
85
+ method: Union[bytes, str],
86
+ headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
87
+ target: Union[bytes, str],
88
+ http_version: Union[bytes, str] = b"1.1",
89
+ _parsed: bool = False,
90
+ ) -> None:
91
+ super().__init__()
92
+ if isinstance(headers, Headers):
93
+ object.__setattr__(self, "headers", headers)
94
+ else:
95
+ object.__setattr__(
96
+ self, "headers", normalize_and_validate(headers, _parsed=_parsed)
97
+ )
98
+ if not _parsed:
99
+ object.__setattr__(self, "method", bytesify(method))
100
+ object.__setattr__(self, "target", bytesify(target))
101
+ object.__setattr__(self, "http_version", bytesify(http_version))
102
+ else:
103
+ object.__setattr__(self, "method", method)
104
+ object.__setattr__(self, "target", target)
105
+ object.__setattr__(self, "http_version", http_version)
106
+
107
+ # "A server MUST respond with a 400 (Bad Request) status code to any
108
+ # HTTP/1.1 request message that lacks a Host header field and to any
109
+ # request message that contains more than one Host header field or a
110
+ # Host header field with an invalid field-value."
111
+ # -- https://tools.ietf.org/html/rfc7230#section-5.4
112
+ host_count = 0
113
+ for name, value in self.headers:
114
+ if name == b"host":
115
+ host_count += 1
116
+ if self.http_version == b"1.1" and host_count == 0:
117
+ raise LocalProtocolError("Missing mandatory Host: header")
118
+ if host_count > 1:
119
+ raise LocalProtocolError("Found multiple Host: headers")
120
+
121
+ validate(method_re, self.method, "Illegal method characters")
122
+ validate(request_target_re, self.target, "Illegal target characters")
123
+
124
+ # This is an unhashable type.
125
+ __hash__ = None # type: ignore
126
+
127
+
128
+ @dataclass(init=False, frozen=True)
129
+ class _ResponseBase(Event):
130
+ __slots__ = ("headers", "http_version", "reason", "status_code")
131
+
132
+ headers: Headers
133
+ http_version: bytes
134
+ reason: bytes
135
+ status_code: int
136
+
137
+ def __init__(
138
+ self,
139
+ *,
140
+ headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
141
+ status_code: int,
142
+ http_version: Union[bytes, str] = b"1.1",
143
+ reason: Union[bytes, str] = b"",
144
+ _parsed: bool = False,
145
+ ) -> None:
146
+ super().__init__()
147
+ if isinstance(headers, Headers):
148
+ object.__setattr__(self, "headers", headers)
149
+ else:
150
+ object.__setattr__(
151
+ self, "headers", normalize_and_validate(headers, _parsed=_parsed)
152
+ )
153
+ if not _parsed:
154
+ object.__setattr__(self, "reason", bytesify(reason))
155
+ object.__setattr__(self, "http_version", bytesify(http_version))
156
+ if not isinstance(status_code, int):
157
+ raise LocalProtocolError("status code must be integer")
158
+ # Because IntEnum objects are instances of int, but aren't
159
+ # duck-compatible (sigh), see gh-72.
160
+ object.__setattr__(self, "status_code", int(status_code))
161
+ else:
162
+ object.__setattr__(self, "reason", reason)
163
+ object.__setattr__(self, "http_version", http_version)
164
+ object.__setattr__(self, "status_code", status_code)
165
+
166
+ self.__post_init__()
167
+
168
+ def __post_init__(self) -> None:
169
+ pass
170
+
171
+ # This is an unhashable type.
172
+ __hash__ = None # type: ignore
173
+
174
+
175
+ @dataclass(init=False, frozen=True)
176
+ class InformationalResponse(_ResponseBase):
177
+ """An HTTP informational response.
178
+
179
+ Fields:
180
+
181
+ .. attribute:: status_code
182
+
183
+ The status code of this response, as an integer. For an
184
+ :class:`InformationalResponse`, this is always in the range [100,
185
+ 200).
186
+
187
+ .. attribute:: headers
188
+
189
+ Request headers, represented as a list of (name, value) pairs. See
190
+ :ref:`the header normalization rules <headers-format>` for
191
+ details.
192
+
193
+ .. attribute:: http_version
194
+
195
+ The HTTP protocol version, represented as a byte string like
196
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
197
+ <http_version-format>` for details.
198
+
199
+ .. attribute:: reason
200
+
201
+ The reason phrase of this response, as a byte string. For example:
202
+ ``b"OK"``, or ``b"Not Found"``.
203
+
204
+ """
205
+
206
+ def __post_init__(self) -> None:
207
+ if not (100 <= self.status_code < 200):
208
+ raise LocalProtocolError(
209
+ "InformationalResponse status_code should be in range "
210
+ "[100, 200), not {}".format(self.status_code)
211
+ )
212
+
213
+ # This is an unhashable type.
214
+ __hash__ = None # type: ignore
215
+
216
+
217
+ @dataclass(init=False, frozen=True)
218
+ class Response(_ResponseBase):
219
+ """The beginning of an HTTP response.
220
+
221
+ Fields:
222
+
223
+ .. attribute:: status_code
224
+
225
+ The status code of this response, as an integer. For an
226
+ :class:`Response`, this is always in the range [200,
227
+ 1000).
228
+
229
+ .. attribute:: headers
230
+
231
+ Request headers, represented as a list of (name, value) pairs. See
232
+ :ref:`the header normalization rules <headers-format>` for details.
233
+
234
+ .. attribute:: http_version
235
+
236
+ The HTTP protocol version, represented as a byte string like
237
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
238
+ <http_version-format>` for details.
239
+
240
+ .. attribute:: reason
241
+
242
+ The reason phrase of this response, as a byte string. For example:
243
+ ``b"OK"``, or ``b"Not Found"``.
244
+
245
+ """
246
+
247
+ def __post_init__(self) -> None:
248
+ if not (200 <= self.status_code < 1000):
249
+ raise LocalProtocolError(
250
+ "Response status_code should be in range [200, 1000), not {}".format(
251
+ self.status_code
252
+ )
253
+ )
254
+
255
+ # This is an unhashable type.
256
+ __hash__ = None # type: ignore
257
+
258
+
259
+ @dataclass(init=False, frozen=True)
260
+ class Data(Event):
261
+ """Part of an HTTP message body.
262
+
263
+ Fields:
264
+
265
+ .. attribute:: data
266
+
267
+ A :term:`bytes-like object` containing part of a message body. Or, if
268
+ using the ``combine=False`` argument to :meth:`Connection.send`, then
269
+ any object that your socket writing code knows what to do with, and for
270
+ which calling :func:`len` returns the number of bytes that will be
271
+ written -- see :ref:`sendfile` for details.
272
+
273
+ .. attribute:: chunk_start
274
+
275
+ A marker that indicates whether this data object is from the start of a
276
+ chunked transfer encoding chunk. This field is ignored when when a Data
277
+ event is provided to :meth:`Connection.send`: it is only valid on
278
+ events emitted from :meth:`Connection.next_event`. You probably
279
+ shouldn't use this attribute at all; see
280
+ :ref:`chunk-delimiters-are-bad` for details.
281
+
282
+ .. attribute:: chunk_end
283
+
284
+ A marker that indicates whether this data object is the last for a
285
+ given chunked transfer encoding chunk. This field is ignored when when
286
+ a Data event is provided to :meth:`Connection.send`: it is only valid
287
+ on events emitted from :meth:`Connection.next_event`. You probably
288
+ shouldn't use this attribute at all; see
289
+ :ref:`chunk-delimiters-are-bad` for details.
290
+
291
+ """
292
+
293
+ __slots__ = ("data", "chunk_start", "chunk_end")
294
+
295
+ data: bytes
296
+ chunk_start: bool
297
+ chunk_end: bool
298
+
299
+ def __init__(
300
+ self, data: bytes, chunk_start: bool = False, chunk_end: bool = False
301
+ ) -> None:
302
+ object.__setattr__(self, "data", data)
303
+ object.__setattr__(self, "chunk_start", chunk_start)
304
+ object.__setattr__(self, "chunk_end", chunk_end)
305
+
306
+ # This is an unhashable type.
307
+ __hash__ = None # type: ignore
308
+
309
+
310
+ # XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that
311
+ # are forbidden to be sent in a trailer, since processing them as if they were
312
+ # present in the header section might bypass external security filters."
313
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part
314
+ # Unfortunately, the list of forbidden fields is long and vague :-/
315
+ @dataclass(init=False, frozen=True)
316
+ class EndOfMessage(Event):
317
+ """The end of an HTTP message.
318
+
319
+ Fields:
320
+
321
+ .. attribute:: headers
322
+
323
+ Default value: ``[]``
324
+
325
+ Any trailing headers attached to this message, represented as a list of
326
+ (name, value) pairs. See :ref:`the header normalization rules
327
+ <headers-format>` for details.
328
+
329
+ Must be empty unless ``Transfer-Encoding: chunked`` is in use.
330
+
331
+ """
332
+
333
+ __slots__ = ("headers",)
334
+
335
+ headers: Headers
336
+
337
+ def __init__(
338
+ self,
339
+ *,
340
+ headers: Union[
341
+ Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None
342
+ ] = None,
343
+ _parsed: bool = False,
344
+ ) -> None:
345
+ super().__init__()
346
+ if headers is None:
347
+ headers = Headers([])
348
+ elif not isinstance(headers, Headers):
349
+ headers = normalize_and_validate(headers, _parsed=_parsed)
350
+
351
+ object.__setattr__(self, "headers", headers)
352
+
353
+ # This is an unhashable type.
354
+ __hash__ = None # type: ignore
355
+
356
+
357
+ @dataclass(frozen=True)
358
+ class ConnectionClosed(Event):
359
+ """This event indicates that the sender has closed their outgoing
360
+ connection.
361
+
362
+ Note that this does not necessarily mean that they can't *receive* further
363
+ data, because TCP connections are composed to two one-way channels which
364
+ can be closed independently. See :ref:`closing` for details.
365
+
366
+ No fields.
367
+ """
368
+
369
+ pass
parrot/lib/python3.10/site-packages/h11/_headers.py ADDED
@@ -0,0 +1,278 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union
3
+
4
+ from ._abnf import field_name, field_value
5
+ from ._util import bytesify, LocalProtocolError, validate
6
+
7
+ if TYPE_CHECKING:
8
+ from ._events import Request
9
+
10
+ try:
11
+ from typing import Literal
12
+ except ImportError:
13
+ from typing_extensions import Literal # type: ignore
14
+
15
+
16
+ # Facts
17
+ # -----
18
+ #
19
+ # Headers are:
20
+ # keys: case-insensitive ascii
21
+ # values: mixture of ascii and raw bytes
22
+ #
23
+ # "Historically, HTTP has allowed field content with text in the ISO-8859-1
24
+ # charset [ISO-8859-1], supporting other charsets only through use of
25
+ # [RFC2047] encoding. In practice, most HTTP header field values use only a
26
+ # subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD
27
+ # limit their field values to US-ASCII octets. A recipient SHOULD treat other
28
+ # octets in field content (obs-text) as opaque data."
29
+ # And it deprecates all non-ascii values
30
+ #
31
+ # Leading/trailing whitespace in header names is forbidden
32
+ #
33
+ # Values get leading/trailing whitespace stripped
34
+ #
35
+ # Content-Disposition actually needs to contain unicode semantically; to
36
+ # accomplish this it has a terrifically weird way of encoding the filename
37
+ # itself as ascii (and even this still has lots of cross-browser
38
+ # incompatibilities)
39
+ #
40
+ # Order is important:
41
+ # "a proxy MUST NOT change the order of these field values when forwarding a
42
+ # message"
43
+ # (and there are several headers where the order indicates a preference)
44
+ #
45
+ # Multiple occurences of the same header:
46
+ # "A sender MUST NOT generate multiple header fields with the same field name
47
+ # in a message unless either the entire field value for that header field is
48
+ # defined as a comma-separated list [or the header is Set-Cookie which gets a
49
+ # special exception]" - RFC 7230. (cookies are in RFC 6265)
50
+ #
51
+ # So every header aside from Set-Cookie can be merged by b", ".join if it
52
+ # occurs repeatedly. But, of course, they can't necessarily be split by
53
+ # .split(b","), because quoting.
54
+ #
55
+ # Given all this mess (case insensitive, duplicates allowed, order is
56
+ # important, ...), there doesn't appear to be any standard way to handle
57
+ # headers in Python -- they're almost like dicts, but... actually just
58
+ # aren't. For now we punt and just use a super simple representation: headers
59
+ # are a list of pairs
60
+ #
61
+ # [(name1, value1), (name2, value2), ...]
62
+ #
63
+ # where all entries are bytestrings, names are lowercase and have no
64
+ # leading/trailing whitespace, and values are bytestrings with no
65
+ # leading/trailing whitespace. Searching and updating are done via naive O(n)
66
+ # methods.
67
+ #
68
+ # Maybe a dict-of-lists would be better?
69
+
70
+ _content_length_re = re.compile(rb"[0-9]+")
71
+ _field_name_re = re.compile(field_name.encode("ascii"))
72
+ _field_value_re = re.compile(field_value.encode("ascii"))
73
+
74
+
75
+ class Headers(Sequence[Tuple[bytes, bytes]]):
76
+ """
77
+ A list-like interface that allows iterating over headers as byte-pairs
78
+ of (lowercased-name, value).
79
+
80
+ Internally we actually store the representation as three-tuples,
81
+ including both the raw original casing, in order to preserve casing
82
+ over-the-wire, and the lowercased name, for case-insensitive comparisions.
83
+
84
+ r = Request(
85
+ method="GET",
86
+ target="/",
87
+ headers=[("Host", "example.org"), ("Connection", "keep-alive")],
88
+ http_version="1.1",
89
+ )
90
+ assert r.headers == [
91
+ (b"host", b"example.org"),
92
+ (b"connection", b"keep-alive")
93
+ ]
94
+ assert r.headers.raw_items() == [
95
+ (b"Host", b"example.org"),
96
+ (b"Connection", b"keep-alive")
97
+ ]
98
+ """
99
+
100
+ __slots__ = "_full_items"
101
+
102
+ def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None:
103
+ self._full_items = full_items
104
+
105
+ def __bool__(self) -> bool:
106
+ return bool(self._full_items)
107
+
108
+ def __eq__(self, other: object) -> bool:
109
+ return list(self) == list(other) # type: ignore
110
+
111
+ def __len__(self) -> int:
112
+ return len(self._full_items)
113
+
114
+ def __repr__(self) -> str:
115
+ return "<Headers(%s)>" % repr(list(self))
116
+
117
+ def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override]
118
+ _, name, value = self._full_items[idx]
119
+ return (name, value)
120
+
121
+ def raw_items(self) -> List[Tuple[bytes, bytes]]:
122
+ return [(raw_name, value) for raw_name, _, value in self._full_items]
123
+
124
+
125
+ HeaderTypes = Union[
126
+ List[Tuple[bytes, bytes]],
127
+ List[Tuple[bytes, str]],
128
+ List[Tuple[str, bytes]],
129
+ List[Tuple[str, str]],
130
+ ]
131
+
132
+
133
+ @overload
134
+ def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers:
135
+ ...
136
+
137
+
138
+ @overload
139
+ def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers:
140
+ ...
141
+
142
+
143
+ @overload
144
+ def normalize_and_validate(
145
+ headers: Union[Headers, HeaderTypes], _parsed: bool = False
146
+ ) -> Headers:
147
+ ...
148
+
149
+
150
+ def normalize_and_validate(
151
+ headers: Union[Headers, HeaderTypes], _parsed: bool = False
152
+ ) -> Headers:
153
+ new_headers = []
154
+ seen_content_length = None
155
+ saw_transfer_encoding = False
156
+ for name, value in headers:
157
+ # For headers coming out of the parser, we can safely skip some steps,
158
+ # because it always returns bytes and has already run these regexes
159
+ # over the data:
160
+ if not _parsed:
161
+ name = bytesify(name)
162
+ value = bytesify(value)
163
+ validate(_field_name_re, name, "Illegal header name {!r}", name)
164
+ validate(_field_value_re, value, "Illegal header value {!r}", value)
165
+ assert isinstance(name, bytes)
166
+ assert isinstance(value, bytes)
167
+
168
+ raw_name = name
169
+ name = name.lower()
170
+ if name == b"content-length":
171
+ lengths = {length.strip() for length in value.split(b",")}
172
+ if len(lengths) != 1:
173
+ raise LocalProtocolError("conflicting Content-Length headers")
174
+ value = lengths.pop()
175
+ validate(_content_length_re, value, "bad Content-Length")
176
+ if seen_content_length is None:
177
+ seen_content_length = value
178
+ new_headers.append((raw_name, name, value))
179
+ elif seen_content_length != value:
180
+ raise LocalProtocolError("conflicting Content-Length headers")
181
+ elif name == b"transfer-encoding":
182
+ # "A server that receives a request message with a transfer coding
183
+ # it does not understand SHOULD respond with 501 (Not
184
+ # Implemented)."
185
+ # https://tools.ietf.org/html/rfc7230#section-3.3.1
186
+ if saw_transfer_encoding:
187
+ raise LocalProtocolError(
188
+ "multiple Transfer-Encoding headers", error_status_hint=501
189
+ )
190
+ # "All transfer-coding names are case-insensitive"
191
+ # -- https://tools.ietf.org/html/rfc7230#section-4
192
+ value = value.lower()
193
+ if value != b"chunked":
194
+ raise LocalProtocolError(
195
+ "Only Transfer-Encoding: chunked is supported",
196
+ error_status_hint=501,
197
+ )
198
+ saw_transfer_encoding = True
199
+ new_headers.append((raw_name, name, value))
200
+ else:
201
+ new_headers.append((raw_name, name, value))
202
+ return Headers(new_headers)
203
+
204
+
205
+ def get_comma_header(headers: Headers, name: bytes) -> List[bytes]:
206
+ # Should only be used for headers whose value is a list of
207
+ # comma-separated, case-insensitive values.
208
+ #
209
+ # The header name `name` is expected to be lower-case bytes.
210
+ #
211
+ # Connection: meets these criteria (including cast insensitivity).
212
+ #
213
+ # Content-Length: technically is just a single value (1*DIGIT), but the
214
+ # standard makes reference to implementations that do multiple values, and
215
+ # using this doesn't hurt. Ditto, case insensitivity doesn't things either
216
+ # way.
217
+ #
218
+ # Transfer-Encoding: is more complex (allows for quoted strings), so
219
+ # splitting on , is actually wrong. For example, this is legal:
220
+ #
221
+ # Transfer-Encoding: foo; options="1,2", chunked
222
+ #
223
+ # and should be parsed as
224
+ #
225
+ # foo; options="1,2"
226
+ # chunked
227
+ #
228
+ # but this naive function will parse it as
229
+ #
230
+ # foo; options="1
231
+ # 2"
232
+ # chunked
233
+ #
234
+ # However, this is okay because the only thing we are going to do with
235
+ # any Transfer-Encoding is reject ones that aren't just "chunked", so
236
+ # both of these will be treated the same anyway.
237
+ #
238
+ # Expect: the only legal value is the literal string
239
+ # "100-continue". Splitting on commas is harmless. Case insensitive.
240
+ #
241
+ out: List[bytes] = []
242
+ for _, found_name, found_raw_value in headers._full_items:
243
+ if found_name == name:
244
+ found_raw_value = found_raw_value.lower()
245
+ for found_split_value in found_raw_value.split(b","):
246
+ found_split_value = found_split_value.strip()
247
+ if found_split_value:
248
+ out.append(found_split_value)
249
+ return out
250
+
251
+
252
+ def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers:
253
+ # The header name `name` is expected to be lower-case bytes.
254
+ #
255
+ # Note that when we store the header we use title casing for the header
256
+ # names, in order to match the conventional HTTP header style.
257
+ #
258
+ # Simply calling `.title()` is a blunt approach, but it's correct
259
+ # here given the cases where we're using `set_comma_header`...
260
+ #
261
+ # Connection, Content-Length, Transfer-Encoding.
262
+ new_headers: List[Tuple[bytes, bytes]] = []
263
+ for found_raw_name, found_name, found_raw_value in headers._full_items:
264
+ if found_name != name:
265
+ new_headers.append((found_raw_name, found_raw_value))
266
+ for new_value in new_values:
267
+ new_headers.append((name.title(), new_value))
268
+ return normalize_and_validate(new_headers)
269
+
270
+
271
+ def has_expect_100_continue(request: "Request") -> bool:
272
+ # https://tools.ietf.org/html/rfc7231#section-5.1.1
273
+ # "A server that receives a 100-continue expectation in an HTTP/1.0 request
274
+ # MUST ignore that expectation."
275
+ if request.http_version < b"1.1":
276
+ return False
277
+ expect = get_comma_header(request.headers, b"expect")
278
+ return b"100-continue" in expect
parrot/lib/python3.10/site-packages/h11/_readers.py ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Code to read HTTP data
2
+ #
3
+ # Strategy: each reader is a callable which takes a ReceiveBuffer object, and
4
+ # either:
5
+ # 1) consumes some of it and returns an Event
6
+ # 2) raises a LocalProtocolError (for consistency -- e.g. we call validate()
7
+ # and it might raise a LocalProtocolError, so simpler just to always use
8
+ # this)
9
+ # 3) returns None, meaning "I need more data"
10
+ #
11
+ # If they have a .read_eof attribute, then this will be called if an EOF is
12
+ # received -- but this is optional. Either way, the actual ConnectionClosed
13
+ # event will be generated afterwards.
14
+ #
15
+ # READERS is a dict describing how to pick a reader. It maps states to either:
16
+ # - a reader
17
+ # - or, for body readers, a dict of per-framing reader factories
18
+
19
+ import re
20
+ from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union
21
+
22
+ from ._abnf import chunk_header, header_field, request_line, status_line
23
+ from ._events import Data, EndOfMessage, InformationalResponse, Request, Response
24
+ from ._receivebuffer import ReceiveBuffer
25
+ from ._state import (
26
+ CLIENT,
27
+ CLOSED,
28
+ DONE,
29
+ IDLE,
30
+ MUST_CLOSE,
31
+ SEND_BODY,
32
+ SEND_RESPONSE,
33
+ SERVER,
34
+ )
35
+ from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate
36
+
37
+ __all__ = ["READERS"]
38
+
39
+ header_field_re = re.compile(header_field.encode("ascii"))
40
+ obs_fold_re = re.compile(rb"[ \t]+")
41
+
42
+
43
+ def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]:
44
+ it = iter(lines)
45
+ last: Optional[bytes] = None
46
+ for line in it:
47
+ match = obs_fold_re.match(line)
48
+ if match:
49
+ if last is None:
50
+ raise LocalProtocolError("continuation line at start of headers")
51
+ if not isinstance(last, bytearray):
52
+ # Cast to a mutable type, avoiding copy on append to ensure O(n) time
53
+ last = bytearray(last)
54
+ last += b" "
55
+ last += line[match.end() :]
56
+ else:
57
+ if last is not None:
58
+ yield last
59
+ last = line
60
+ if last is not None:
61
+ yield last
62
+
63
+
64
+ def _decode_header_lines(
65
+ lines: Iterable[bytes],
66
+ ) -> Iterable[Tuple[bytes, bytes]]:
67
+ for line in _obsolete_line_fold(lines):
68
+ matches = validate(header_field_re, line, "illegal header line: {!r}", line)
69
+ yield (matches["field_name"], matches["field_value"])
70
+
71
+
72
+ request_line_re = re.compile(request_line.encode("ascii"))
73
+
74
+
75
+ def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]:
76
+ lines = buf.maybe_extract_lines()
77
+ if lines is None:
78
+ if buf.is_next_line_obviously_invalid_request_line():
79
+ raise LocalProtocolError("illegal request line")
80
+ return None
81
+ if not lines:
82
+ raise LocalProtocolError("no request line received")
83
+ matches = validate(
84
+ request_line_re, lines[0], "illegal request line: {!r}", lines[0]
85
+ )
86
+ return Request(
87
+ headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches
88
+ )
89
+
90
+
91
+ status_line_re = re.compile(status_line.encode("ascii"))
92
+
93
+
94
+ def maybe_read_from_SEND_RESPONSE_server(
95
+ buf: ReceiveBuffer,
96
+ ) -> Union[InformationalResponse, Response, None]:
97
+ lines = buf.maybe_extract_lines()
98
+ if lines is None:
99
+ if buf.is_next_line_obviously_invalid_request_line():
100
+ raise LocalProtocolError("illegal request line")
101
+ return None
102
+ if not lines:
103
+ raise LocalProtocolError("no response line received")
104
+ matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0])
105
+ http_version = (
106
+ b"1.1" if matches["http_version"] is None else matches["http_version"]
107
+ )
108
+ reason = b"" if matches["reason"] is None else matches["reason"]
109
+ status_code = int(matches["status_code"])
110
+ class_: Union[Type[InformationalResponse], Type[Response]] = (
111
+ InformationalResponse if status_code < 200 else Response
112
+ )
113
+ return class_(
114
+ headers=list(_decode_header_lines(lines[1:])),
115
+ _parsed=True,
116
+ status_code=status_code,
117
+ reason=reason,
118
+ http_version=http_version,
119
+ )
120
+
121
+
122
+ class ContentLengthReader:
123
+ def __init__(self, length: int) -> None:
124
+ self._length = length
125
+ self._remaining = length
126
+
127
+ def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
128
+ if self._remaining == 0:
129
+ return EndOfMessage()
130
+ data = buf.maybe_extract_at_most(self._remaining)
131
+ if data is None:
132
+ return None
133
+ self._remaining -= len(data)
134
+ return Data(data=data)
135
+
136
+ def read_eof(self) -> NoReturn:
137
+ raise RemoteProtocolError(
138
+ "peer closed connection without sending complete message body "
139
+ "(received {} bytes, expected {})".format(
140
+ self._length - self._remaining, self._length
141
+ )
142
+ )
143
+
144
+
145
+ chunk_header_re = re.compile(chunk_header.encode("ascii"))
146
+
147
+
148
+ class ChunkedReader:
149
+ def __init__(self) -> None:
150
+ self._bytes_in_chunk = 0
151
+ # After reading a chunk, we have to throw away the trailing \r\n; if
152
+ # this is >0 then we discard that many bytes before resuming regular
153
+ # de-chunkification.
154
+ self._bytes_to_discard = 0
155
+ self._reading_trailer = False
156
+
157
+ def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
158
+ if self._reading_trailer:
159
+ lines = buf.maybe_extract_lines()
160
+ if lines is None:
161
+ return None
162
+ return EndOfMessage(headers=list(_decode_header_lines(lines)))
163
+ if self._bytes_to_discard > 0:
164
+ data = buf.maybe_extract_at_most(self._bytes_to_discard)
165
+ if data is None:
166
+ return None
167
+ self._bytes_to_discard -= len(data)
168
+ if self._bytes_to_discard > 0:
169
+ return None
170
+ # else, fall through and read some more
171
+ assert self._bytes_to_discard == 0
172
+ if self._bytes_in_chunk == 0:
173
+ # We need to refill our chunk count
174
+ chunk_header = buf.maybe_extract_next_line()
175
+ if chunk_header is None:
176
+ return None
177
+ matches = validate(
178
+ chunk_header_re,
179
+ chunk_header,
180
+ "illegal chunk header: {!r}",
181
+ chunk_header,
182
+ )
183
+ # XX FIXME: we discard chunk extensions. Does anyone care?
184
+ self._bytes_in_chunk = int(matches["chunk_size"], base=16)
185
+ if self._bytes_in_chunk == 0:
186
+ self._reading_trailer = True
187
+ return self(buf)
188
+ chunk_start = True
189
+ else:
190
+ chunk_start = False
191
+ assert self._bytes_in_chunk > 0
192
+ data = buf.maybe_extract_at_most(self._bytes_in_chunk)
193
+ if data is None:
194
+ return None
195
+ self._bytes_in_chunk -= len(data)
196
+ if self._bytes_in_chunk == 0:
197
+ self._bytes_to_discard = 2
198
+ chunk_end = True
199
+ else:
200
+ chunk_end = False
201
+ return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end)
202
+
203
+ def read_eof(self) -> NoReturn:
204
+ raise RemoteProtocolError(
205
+ "peer closed connection without sending complete message body "
206
+ "(incomplete chunked read)"
207
+ )
208
+
209
+
210
+ class Http10Reader:
211
+ def __call__(self, buf: ReceiveBuffer) -> Optional[Data]:
212
+ data = buf.maybe_extract_at_most(999999999)
213
+ if data is None:
214
+ return None
215
+ return Data(data=data)
216
+
217
+ def read_eof(self) -> EndOfMessage:
218
+ return EndOfMessage()
219
+
220
+
221
+ def expect_nothing(buf: ReceiveBuffer) -> None:
222
+ if buf:
223
+ raise LocalProtocolError("Got data when expecting EOF")
224
+ return None
225
+
226
+
227
+ ReadersType = Dict[
228
+ Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]],
229
+ Union[Callable[..., Any], Dict[str, Callable[..., Any]]],
230
+ ]
231
+
232
+ READERS: ReadersType = {
233
+ (CLIENT, IDLE): maybe_read_from_IDLE_client,
234
+ (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server,
235
+ (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server,
236
+ (CLIENT, DONE): expect_nothing,
237
+ (CLIENT, MUST_CLOSE): expect_nothing,
238
+ (CLIENT, CLOSED): expect_nothing,
239
+ (SERVER, DONE): expect_nothing,
240
+ (SERVER, MUST_CLOSE): expect_nothing,
241
+ (SERVER, CLOSED): expect_nothing,
242
+ SEND_BODY: {
243
+ "chunked": ChunkedReader,
244
+ "content-length": ContentLengthReader,
245
+ "http/1.0": Http10Reader,
246
+ },
247
+ }
parrot/lib/python3.10/site-packages/h11/_receivebuffer.py ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import sys
3
+ from typing import List, Optional, Union
4
+
5
+ __all__ = ["ReceiveBuffer"]
6
+
7
+
8
+ # Operations we want to support:
9
+ # - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable),
10
+ # or wait until there is one
11
+ # - read at-most-N bytes
12
+ # Goals:
13
+ # - on average, do this fast
14
+ # - worst case, do this in O(n) where n is the number of bytes processed
15
+ # Plan:
16
+ # - store bytearray, offset, how far we've searched for a separator token
17
+ # - use the how-far-we've-searched data to avoid rescanning
18
+ # - while doing a stream of uninterrupted processing, advance offset instead
19
+ # of constantly copying
20
+ # WARNING:
21
+ # - I haven't benchmarked or profiled any of this yet.
22
+ #
23
+ # Note that starting in Python 3.4, deleting the initial n bytes from a
24
+ # bytearray is amortized O(n), thanks to some excellent work by Antoine
25
+ # Martin:
26
+ #
27
+ # https://bugs.python.org/issue19087
28
+ #
29
+ # This means that if we only supported 3.4+, we could get rid of the code here
30
+ # involving self._start and self.compress, because it's doing exactly the same
31
+ # thing that bytearray now does internally.
32
+ #
33
+ # BUT unfortunately, we still support 2.7, and reading short segments out of a
34
+ # long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually
35
+ # delete this code. Yet:
36
+ #
37
+ # https://pythonclock.org/
38
+ #
39
+ # (Two things to double-check first though: make sure PyPy also has the
40
+ # optimization, and benchmark to make sure it's a win, since we do have a
41
+ # slightly clever thing where we delay calling compress() until we've
42
+ # processed a whole event, which could in theory be slightly more efficient
43
+ # than the internal bytearray support.)
44
+ blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE)
45
+
46
+
47
+ class ReceiveBuffer:
48
+ def __init__(self) -> None:
49
+ self._data = bytearray()
50
+ self._next_line_search = 0
51
+ self._multiple_lines_search = 0
52
+
53
+ def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer":
54
+ self._data += byteslike
55
+ return self
56
+
57
+ def __bool__(self) -> bool:
58
+ return bool(len(self))
59
+
60
+ def __len__(self) -> int:
61
+ return len(self._data)
62
+
63
+ # for @property unprocessed_data
64
+ def __bytes__(self) -> bytes:
65
+ return bytes(self._data)
66
+
67
+ def _extract(self, count: int) -> bytearray:
68
+ # extracting an initial slice of the data buffer and return it
69
+ out = self._data[:count]
70
+ del self._data[:count]
71
+
72
+ self._next_line_search = 0
73
+ self._multiple_lines_search = 0
74
+
75
+ return out
76
+
77
+ def maybe_extract_at_most(self, count: int) -> Optional[bytearray]:
78
+ """
79
+ Extract a fixed number of bytes from the buffer.
80
+ """
81
+ out = self._data[:count]
82
+ if not out:
83
+ return None
84
+
85
+ return self._extract(count)
86
+
87
+ def maybe_extract_next_line(self) -> Optional[bytearray]:
88
+ """
89
+ Extract the first line, if it is completed in the buffer.
90
+ """
91
+ # Only search in buffer space that we've not already looked at.
92
+ search_start_index = max(0, self._next_line_search - 1)
93
+ partial_idx = self._data.find(b"\r\n", search_start_index)
94
+
95
+ if partial_idx == -1:
96
+ self._next_line_search = len(self._data)
97
+ return None
98
+
99
+ # + 2 is to compensate len(b"\r\n")
100
+ idx = partial_idx + 2
101
+
102
+ return self._extract(idx)
103
+
104
+ def maybe_extract_lines(self) -> Optional[List[bytearray]]:
105
+ """
106
+ Extract everything up to the first blank line, and return a list of lines.
107
+ """
108
+ # Handle the case where we have an immediate empty line.
109
+ if self._data[:1] == b"\n":
110
+ self._extract(1)
111
+ return []
112
+
113
+ if self._data[:2] == b"\r\n":
114
+ self._extract(2)
115
+ return []
116
+
117
+ # Only search in buffer space that we've not already looked at.
118
+ match = blank_line_regex.search(self._data, self._multiple_lines_search)
119
+ if match is None:
120
+ self._multiple_lines_search = max(0, len(self._data) - 2)
121
+ return None
122
+
123
+ # Truncate the buffer and return it.
124
+ idx = match.span(0)[-1]
125
+ out = self._extract(idx)
126
+ lines = out.split(b"\n")
127
+
128
+ for line in lines:
129
+ if line.endswith(b"\r"):
130
+ del line[-1]
131
+
132
+ assert lines[-2] == lines[-1] == b""
133
+
134
+ del lines[-2:]
135
+
136
+ return lines
137
+
138
+ # In theory we should wait until `\r\n` before starting to validate
139
+ # incoming data. However it's interesting to detect (very) invalid data
140
+ # early given they might not even contain `\r\n` at all (hence only
141
+ # timeout will get rid of them).
142
+ # This is not a 100% effective detection but more of a cheap sanity check
143
+ # allowing for early abort in some useful cases.
144
+ # This is especially interesting when peer is messing up with HTTPS and
145
+ # sent us a TLS stream where we were expecting plain HTTP given all
146
+ # versions of TLS so far start handshake with a 0x16 message type code.
147
+ def is_next_line_obviously_invalid_request_line(self) -> bool:
148
+ try:
149
+ # HTTP header line must not contain non-printable characters
150
+ # and should not start with a space
151
+ return self._data[0] < 0x21
152
+ except IndexError:
153
+ return False
parrot/lib/python3.10/site-packages/h11/_util.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union
2
+
3
+ __all__ = [
4
+ "ProtocolError",
5
+ "LocalProtocolError",
6
+ "RemoteProtocolError",
7
+ "validate",
8
+ "bytesify",
9
+ ]
10
+
11
+
12
+ class ProtocolError(Exception):
13
+ """Exception indicating a violation of the HTTP/1.1 protocol.
14
+
15
+ This as an abstract base class, with two concrete base classes:
16
+ :exc:`LocalProtocolError`, which indicates that you tried to do something
17
+ that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which
18
+ indicates that the remote peer tried to do something that HTTP/1.1 says is
19
+ illegal. See :ref:`error-handling` for details.
20
+
21
+ In addition to the normal :exc:`Exception` features, it has one attribute:
22
+
23
+ .. attribute:: error_status_hint
24
+
25
+ This gives a suggestion as to what status code a server might use if
26
+ this error occurred as part of a request.
27
+
28
+ For a :exc:`RemoteProtocolError`, this is useful as a suggestion for
29
+ how you might want to respond to a misbehaving peer, if you're
30
+ implementing a server.
31
+
32
+ For a :exc:`LocalProtocolError`, this can be taken as a suggestion for
33
+ how your peer might have responded to *you* if h11 had allowed you to
34
+ continue.
35
+
36
+ The default is 400 Bad Request, a generic catch-all for protocol
37
+ violations.
38
+
39
+ """
40
+
41
+ def __init__(self, msg: str, error_status_hint: int = 400) -> None:
42
+ if type(self) is ProtocolError:
43
+ raise TypeError("tried to directly instantiate ProtocolError")
44
+ Exception.__init__(self, msg)
45
+ self.error_status_hint = error_status_hint
46
+
47
+
48
+ # Strategy: there are a number of public APIs where a LocalProtocolError can
49
+ # be raised (send(), all the different event constructors, ...), and only one
50
+ # public API where RemoteProtocolError can be raised
51
+ # (receive_data()). Therefore we always raise LocalProtocolError internally,
52
+ # and then receive_data will translate this into a RemoteProtocolError.
53
+ #
54
+ # Internally:
55
+ # LocalProtocolError is the generic "ProtocolError".
56
+ # Externally:
57
+ # LocalProtocolError is for local errors and RemoteProtocolError is for
58
+ # remote errors.
59
+ class LocalProtocolError(ProtocolError):
60
+ def _reraise_as_remote_protocol_error(self) -> NoReturn:
61
+ # After catching a LocalProtocolError, use this method to re-raise it
62
+ # as a RemoteProtocolError. This method must be called from inside an
63
+ # except: block.
64
+ #
65
+ # An easy way to get an equivalent RemoteProtocolError is just to
66
+ # modify 'self' in place.
67
+ self.__class__ = RemoteProtocolError # type: ignore
68
+ # But the re-raising is somewhat non-trivial -- you might think that
69
+ # now that we've modified the in-flight exception object, that just
70
+ # doing 'raise' to re-raise it would be enough. But it turns out that
71
+ # this doesn't work, because Python tracks the exception type
72
+ # (exc_info[0]) separately from the exception object (exc_info[1]),
73
+ # and we only modified the latter. So we really do need to re-raise
74
+ # the new type explicitly.
75
+ # On py3, the traceback is part of the exception object, so our
76
+ # in-place modification preserved it and we can just re-raise:
77
+ raise self
78
+
79
+
80
+ class RemoteProtocolError(ProtocolError):
81
+ pass
82
+
83
+
84
+ def validate(
85
+ regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any
86
+ ) -> Dict[str, bytes]:
87
+ match = regex.fullmatch(data)
88
+ if not match:
89
+ if format_args:
90
+ msg = msg.format(*format_args)
91
+ raise LocalProtocolError(msg)
92
+ return match.groupdict()
93
+
94
+
95
+ # Sentinel values
96
+ #
97
+ # - Inherit identity-based comparison and hashing from object
98
+ # - Have a nice repr
99
+ # - Have a *bonus property*: type(sentinel) is sentinel
100
+ #
101
+ # The bonus property is useful if you want to take the return value from
102
+ # next_event() and do some sort of dispatch based on type(event).
103
+
104
+ _T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel")
105
+
106
+
107
+ class Sentinel(type):
108
+ def __new__(
109
+ cls: Type[_T_Sentinel],
110
+ name: str,
111
+ bases: Tuple[type, ...],
112
+ namespace: Dict[str, Any],
113
+ **kwds: Any
114
+ ) -> _T_Sentinel:
115
+ assert bases == (Sentinel,)
116
+ v = super().__new__(cls, name, bases, namespace, **kwds)
117
+ v.__class__ = v # type: ignore
118
+ return v
119
+
120
+ def __repr__(self) -> str:
121
+ return self.__name__
122
+
123
+
124
+ # Used for methods, request targets, HTTP versions, header names, and header
125
+ # values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always
126
+ # returns bytes.
127
+ def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes:
128
+ # Fast-path:
129
+ if type(s) is bytes:
130
+ return s
131
+ if isinstance(s, str):
132
+ s = s.encode("ascii")
133
+ if isinstance(s, int):
134
+ raise TypeError("expected bytes-like object, not int")
135
+ return bytes(s)
parrot/lib/python3.10/site-packages/h11/_version.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be kept very simple, because it is consumed from several
2
+ # places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc.
3
+
4
+ # We use a simple scheme:
5
+ # 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev
6
+ # where the +dev versions are never released into the wild, they're just what
7
+ # we stick into the VCS in between releases.
8
+ #
9
+ # This is compatible with PEP 440:
10
+ # http://legacy.python.org/dev/peps/pep-0440/
11
+ # via the use of the "local suffix" "+dev", which is disallowed on index
12
+ # servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we
13
+ # want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
14
+ # 1.0.0.)
15
+
16
+ __version__ = "0.14.0"
parrot/lib/python3.10/site-packages/h11/_writers.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Code to read HTTP data
2
+ #
3
+ # Strategy: each writer takes an event + a write-some-bytes function, which is
4
+ # calls.
5
+ #
6
+ # WRITERS is a dict describing how to pick a reader. It maps states to either:
7
+ # - a writer
8
+ # - or, for body writers, a dict of framin-dependent writer factories
9
+
10
+ from typing import Any, Callable, Dict, List, Tuple, Type, Union
11
+
12
+ from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response
13
+ from ._headers import Headers
14
+ from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER
15
+ from ._util import LocalProtocolError, Sentinel
16
+
17
+ __all__ = ["WRITERS"]
18
+
19
+ Writer = Callable[[bytes], Any]
20
+
21
+
22
+ def write_headers(headers: Headers, write: Writer) -> None:
23
+ # "Since the Host field-value is critical information for handling a
24
+ # request, a user agent SHOULD generate Host as the first header field
25
+ # following the request-line." - RFC 7230
26
+ raw_items = headers._full_items
27
+ for raw_name, name, value in raw_items:
28
+ if name == b"host":
29
+ write(b"%s: %s\r\n" % (raw_name, value))
30
+ for raw_name, name, value in raw_items:
31
+ if name != b"host":
32
+ write(b"%s: %s\r\n" % (raw_name, value))
33
+ write(b"\r\n")
34
+
35
+
36
+ def write_request(request: Request, write: Writer) -> None:
37
+ if request.http_version != b"1.1":
38
+ raise LocalProtocolError("I only send HTTP/1.1")
39
+ write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target))
40
+ write_headers(request.headers, write)
41
+
42
+
43
+ # Shared between InformationalResponse and Response
44
+ def write_any_response(
45
+ response: Union[InformationalResponse, Response], write: Writer
46
+ ) -> None:
47
+ if response.http_version != b"1.1":
48
+ raise LocalProtocolError("I only send HTTP/1.1")
49
+ status_bytes = str(response.status_code).encode("ascii")
50
+ # We don't bother sending ascii status messages like "OK"; they're
51
+ # optional and ignored by the protocol. (But the space after the numeric
52
+ # status code is mandatory.)
53
+ #
54
+ # XX FIXME: could at least make an effort to pull out the status message
55
+ # from stdlib's http.HTTPStatus table. Or maybe just steal their enums
56
+ # (either by import or copy/paste). We already accept them as status codes
57
+ # since they're of type IntEnum < int.
58
+ write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason))
59
+ write_headers(response.headers, write)
60
+
61
+
62
+ class BodyWriter:
63
+ def __call__(self, event: Event, write: Writer) -> None:
64
+ if type(event) is Data:
65
+ self.send_data(event.data, write)
66
+ elif type(event) is EndOfMessage:
67
+ self.send_eom(event.headers, write)
68
+ else: # pragma: no cover
69
+ assert False
70
+
71
+ def send_data(self, data: bytes, write: Writer) -> None:
72
+ pass
73
+
74
+ def send_eom(self, headers: Headers, write: Writer) -> None:
75
+ pass
76
+
77
+
78
+ #
79
+ # These are all careful not to do anything to 'data' except call len(data) and
80
+ # write(data). This allows us to transparently pass-through funny objects,
81
+ # like placeholder objects referring to files on disk that will be sent via
82
+ # sendfile(2).
83
+ #
84
+ class ContentLengthWriter(BodyWriter):
85
+ def __init__(self, length: int) -> None:
86
+ self._length = length
87
+
88
+ def send_data(self, data: bytes, write: Writer) -> None:
89
+ self._length -= len(data)
90
+ if self._length < 0:
91
+ raise LocalProtocolError("Too much data for declared Content-Length")
92
+ write(data)
93
+
94
+ def send_eom(self, headers: Headers, write: Writer) -> None:
95
+ if self._length != 0:
96
+ raise LocalProtocolError("Too little data for declared Content-Length")
97
+ if headers:
98
+ raise LocalProtocolError("Content-Length and trailers don't mix")
99
+
100
+
101
+ class ChunkedWriter(BodyWriter):
102
+ def send_data(self, data: bytes, write: Writer) -> None:
103
+ # if we encoded 0-length data in the naive way, it would look like an
104
+ # end-of-message.
105
+ if not data:
106
+ return
107
+ write(b"%x\r\n" % len(data))
108
+ write(data)
109
+ write(b"\r\n")
110
+
111
+ def send_eom(self, headers: Headers, write: Writer) -> None:
112
+ write(b"0\r\n")
113
+ write_headers(headers, write)
114
+
115
+
116
+ class Http10Writer(BodyWriter):
117
+ def send_data(self, data: bytes, write: Writer) -> None:
118
+ write(data)
119
+
120
+ def send_eom(self, headers: Headers, write: Writer) -> None:
121
+ if headers:
122
+ raise LocalProtocolError("can't send trailers to HTTP/1.0 client")
123
+ # no need to close the socket ourselves, that will be taken care of by
124
+ # Connection: close machinery
125
+
126
+
127
+ WritersType = Dict[
128
+ Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]],
129
+ Union[
130
+ Dict[str, Type[BodyWriter]],
131
+ Callable[[Union[InformationalResponse, Response], Writer], None],
132
+ Callable[[Request, Writer], None],
133
+ ],
134
+ ]
135
+
136
+ WRITERS: WritersType = {
137
+ (CLIENT, IDLE): write_request,
138
+ (SERVER, IDLE): write_any_response,
139
+ (SERVER, SEND_RESPONSE): write_any_response,
140
+ SEND_BODY: {
141
+ "chunked": ChunkedWriter,
142
+ "content-length": ContentLengthWriter,
143
+ "http/1.0": Http10Writer,
144
+ },
145
+ }