Diffusers
Safetensors
leewheel commited on
Commit
9ff7459
·
verified ·
1 Parent(s): 9d4c5ad

Upload 57 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. python_env/lib/site-packages/regex-2025.11.3.dist-info/INSTALLER +1 -0
  3. python_env/lib/site-packages/regex-2025.11.3.dist-info/METADATA +1059 -0
  4. python_env/lib/site-packages/regex-2025.11.3.dist-info/RECORD +15 -0
  5. python_env/lib/site-packages/regex-2025.11.3.dist-info/WHEEL +5 -0
  6. python_env/lib/site-packages/regex-2025.11.3.dist-info/licenses/LICENSE.txt +208 -0
  7. python_env/lib/site-packages/regex-2025.11.3.dist-info/top_level.txt +1 -0
  8. python_env/lib/site-packages/regex/__init__.py +3 -0
  9. python_env/lib/site-packages/regex/__pycache__/__init__.cpython-310.pyc +0 -0
  10. python_env/lib/site-packages/regex/__pycache__/_main.cpython-310.pyc +0 -0
  11. python_env/lib/site-packages/regex/__pycache__/_regex_core.cpython-310.pyc +3 -0
  12. python_env/lib/site-packages/regex/_main.py +746 -0
  13. python_env/lib/site-packages/regex/_regex.cp310-win_amd64.pyd +3 -0
  14. python_env/lib/site-packages/regex/_regex_core.py +0 -0
  15. python_env/lib/site-packages/regex/tests/__pycache__/test_regex.cpython-310.pyc +3 -0
  16. python_env/lib/site-packages/regex/tests/test_regex.py +0 -0
  17. python_env/lib/site-packages/requests-2.32.5.dist-info/INSTALLER +1 -0
  18. python_env/lib/site-packages/requests-2.32.5.dist-info/METADATA +133 -0
  19. python_env/lib/site-packages/requests-2.32.5.dist-info/RECORD +42 -0
  20. python_env/lib/site-packages/requests-2.32.5.dist-info/WHEEL +5 -0
  21. python_env/lib/site-packages/requests-2.32.5.dist-info/licenses/LICENSE +175 -0
  22. python_env/lib/site-packages/requests-2.32.5.dist-info/top_level.txt +1 -0
  23. python_env/lib/site-packages/requests/__init__.py +184 -0
  24. python_env/lib/site-packages/requests/__pycache__/__init__.cpython-310.pyc +0 -0
  25. python_env/lib/site-packages/requests/__pycache__/__version__.cpython-310.pyc +0 -0
  26. python_env/lib/site-packages/requests/__pycache__/_internal_utils.cpython-310.pyc +0 -0
  27. python_env/lib/site-packages/requests/__pycache__/adapters.cpython-310.pyc +0 -0
  28. python_env/lib/site-packages/requests/__pycache__/api.cpython-310.pyc +0 -0
  29. python_env/lib/site-packages/requests/__pycache__/auth.cpython-310.pyc +0 -0
  30. python_env/lib/site-packages/requests/__pycache__/certs.cpython-310.pyc +0 -0
  31. python_env/lib/site-packages/requests/__pycache__/compat.cpython-310.pyc +0 -0
  32. python_env/lib/site-packages/requests/__pycache__/cookies.cpython-310.pyc +0 -0
  33. python_env/lib/site-packages/requests/__pycache__/exceptions.cpython-310.pyc +0 -0
  34. python_env/lib/site-packages/requests/__pycache__/help.cpython-310.pyc +0 -0
  35. python_env/lib/site-packages/requests/__pycache__/hooks.cpython-310.pyc +0 -0
  36. python_env/lib/site-packages/requests/__pycache__/models.cpython-310.pyc +0 -0
  37. python_env/lib/site-packages/requests/__pycache__/packages.cpython-310.pyc +0 -0
  38. python_env/lib/site-packages/requests/__pycache__/sessions.cpython-310.pyc +0 -0
  39. python_env/lib/site-packages/requests/__pycache__/status_codes.cpython-310.pyc +0 -0
  40. python_env/lib/site-packages/requests/__pycache__/structures.cpython-310.pyc +0 -0
  41. python_env/lib/site-packages/requests/__pycache__/utils.cpython-310.pyc +0 -0
  42. python_env/lib/site-packages/requests/__version__.py +14 -0
  43. python_env/lib/site-packages/requests/_internal_utils.py +50 -0
  44. python_env/lib/site-packages/requests/adapters.py +696 -0
  45. python_env/lib/site-packages/requests/api.py +157 -0
  46. python_env/lib/site-packages/requests/auth.py +314 -0
  47. python_env/lib/site-packages/requests/certs.py +17 -0
  48. python_env/lib/site-packages/requests/compat.py +106 -0
  49. python_env/lib/site-packages/requests/cookies.py +561 -0
  50. python_env/lib/site-packages/requests/exceptions.py +151 -0
.gitattributes CHANGED
@@ -513,3 +513,6 @@ python_env/lib/site-packages/scipy/stats/tests/__pycache__/test_multivariate.cpy
513
  python_env/lib/site-packages/scipy/stats/tests/__pycache__/test_stats.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
514
  python_env/lib/site-packages/safetensors/_safetensors_rust.pyd filter=lfs diff=lfs merge=lfs -text
515
  python_env/lib/site-packages/rich/__pycache__/_emoji_codes.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
 
 
 
 
513
  python_env/lib/site-packages/scipy/stats/tests/__pycache__/test_stats.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
514
  python_env/lib/site-packages/safetensors/_safetensors_rust.pyd filter=lfs diff=lfs merge=lfs -text
515
  python_env/lib/site-packages/rich/__pycache__/_emoji_codes.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
516
+ python_env/lib/site-packages/regex/__pycache__/_regex_core.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
517
+ python_env/lib/site-packages/regex/_regex.cp310-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
518
+ python_env/lib/site-packages/regex/tests/__pycache__/test_regex.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
python_env/lib/site-packages/regex-2025.11.3.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
python_env/lib/site-packages/regex-2025.11.3.dist-info/METADATA ADDED
@@ -0,0 +1,1059 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.4
2
+ Name: regex
3
+ Version: 2025.11.3
4
+ Summary: Alternative regular expression module, to replace re.
5
+ Author-email: Matthew Barnett <regex@mrabarnett.plus.com>
6
+ License-Expression: Apache-2.0 AND CNRI-Python
7
+ Project-URL: Homepage, https://github.com/mrabarnett/mrab-regex
8
+ Classifier: Development Status :: 5 - Production/Stable
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Programming Language :: Python :: 3.9
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Programming Language :: Python :: 3.13
16
+ Classifier: Programming Language :: Python :: 3.14
17
+ Classifier: Topic :: Scientific/Engineering :: Information Analysis
18
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
19
+ Classifier: Topic :: Text Processing
20
+ Classifier: Topic :: Text Processing :: General
21
+ Requires-Python: >=3.9
22
+ Description-Content-Type: text/x-rst
23
+ License-File: LICENSE.txt
24
+ Dynamic: license-file
25
+
26
+ Introduction
27
+ ------------
28
+
29
+ This regex implementation is backwards-compatible with the standard 're' module, but offers additional functionality.
30
+
31
+ Python 2
32
+ --------
33
+
34
+ Python 2 is no longer supported. The last release that supported Python 2 was 2021.11.10.
35
+
36
+ PyPy
37
+ ----
38
+
39
+ This module is targeted at CPython. It expects that all codepoints are the same width, so it won't behave properly with PyPy outside U+0000..U+007F because PyPy stores strings as UTF-8.
40
+
41
+ Multithreading
42
+ --------------
43
+
44
+ The regex module releases the GIL during matching on instances of the built-in (immutable) string classes, enabling other Python threads to run concurrently. It is also possible to force the regex module to release the GIL during matching by calling the matching methods with the keyword argument ``concurrent=True``. The behaviour is undefined if the string changes during matching, so use it *only* when it is guaranteed that that won't happen.
45
+
46
+ Unicode
47
+ -------
48
+
49
+ This module supports Unicode 17.0.0. Full Unicode case-folding is supported.
50
+
51
+ Flags
52
+ -----
53
+
54
+ There are 2 kinds of flag: scoped and global. Scoped flags can apply to only part of a pattern and can be turned on or off; global flags apply to the entire pattern and can only be turned on.
55
+
56
+ The scoped flags are: ``ASCII (?a)``, ``FULLCASE (?f)``, ``IGNORECASE (?i)``, ``LOCALE (?L)``, ``MULTILINE (?m)``, ``DOTALL (?s)``, ``UNICODE (?u)``, ``VERBOSE (?x)``, ``WORD (?w)``.
57
+
58
+ The global flags are: ``BESTMATCH (?b)``, ``ENHANCEMATCH (?e)``, ``POSIX (?p)``, ``REVERSE (?r)``, ``VERSION0 (?V0)``, ``VERSION1 (?V1)``.
59
+
60
+ If neither the ``ASCII``, ``LOCALE`` nor ``UNICODE`` flag is specified, it will default to ``UNICODE`` if the regex pattern is a Unicode string and ``ASCII`` if it's a bytestring.
61
+
62
+ The ``ENHANCEMATCH`` flag makes fuzzy matching attempt to improve the fit of the next match that it finds.
63
+
64
+ The ``BESTMATCH`` flag makes fuzzy matching search for the best match instead of the next match.
65
+
66
+ Old vs new behaviour
67
+ --------------------
68
+
69
+ In order to be compatible with the re module, this module has 2 behaviours:
70
+
71
+ * **Version 0** behaviour (old behaviour, compatible with the re module):
72
+
73
+ Please note that the re module's behaviour may change over time, and I'll endeavour to match that behaviour in version 0.
74
+
75
+ * Indicated by the ``VERSION0`` flag.
76
+
77
+ * Zero-width matches are not handled correctly in the re module before Python 3.7. The behaviour in those earlier versions is:
78
+
79
+ * ``.split`` won't split a string at a zero-width match.
80
+
81
+ * ``.sub`` will advance by one character after a zero-width match.
82
+
83
+ * Inline flags apply to the entire pattern, and they can't be turned off.
84
+
85
+ * Only simple sets are supported.
86
+
87
+ * Case-insensitive matches in Unicode use simple case-folding by default.
88
+
89
+ * **Version 1** behaviour (new behaviour, possibly different from the re module):
90
+
91
+ * Indicated by the ``VERSION1`` flag.
92
+
93
+ * Zero-width matches are handled correctly.
94
+
95
+ * Inline flags apply to the end of the group or pattern, and they can be turned off.
96
+
97
+ * Nested sets and set operations are supported.
98
+
99
+ * Case-insensitive matches in Unicode use full case-folding by default.
100
+
101
+ If no version is specified, the regex module will default to ``regex.DEFAULT_VERSION``.
102
+
103
+ Case-insensitive matches in Unicode
104
+ -----------------------------------
105
+
106
+ The regex module supports both simple and full case-folding for case-insensitive matches in Unicode. Use of full case-folding can be turned on using the ``FULLCASE`` flag. Please note that this flag affects how the ``IGNORECASE`` flag works; the ``FULLCASE`` flag itself does not turn on case-insensitive matching.
107
+
108
+ Version 0 behaviour: the flag is off by default.
109
+
110
+ Version 1 behaviour: the flag is on by default.
111
+
112
+ Nested sets and set operations
113
+ ------------------------------
114
+
115
+ It's not possible to support both simple sets, as used in the re module, and nested sets at the same time because of a difference in the meaning of an unescaped ``"["`` in a set.
116
+
117
+ For example, the pattern ``[[a-z]--[aeiou]]`` is treated in the version 0 behaviour (simple sets, compatible with the re module) as:
118
+
119
+ * Set containing "[" and the letters "a" to "z"
120
+
121
+ * Literal "--"
122
+
123
+ * Set containing letters "a", "e", "i", "o", "u"
124
+
125
+ * Literal "]"
126
+
127
+ but in the version 1 behaviour (nested sets, enhanced behaviour) as:
128
+
129
+ * Set which is:
130
+
131
+ * Set containing the letters "a" to "z"
132
+
133
+ * but excluding:
134
+
135
+ * Set containing the letters "a", "e", "i", "o", "u"
136
+
137
+ Version 0 behaviour: only simple sets are supported.
138
+
139
+ Version 1 behaviour: nested sets and set operations are supported.
140
+
141
+ Notes on named groups
142
+ ---------------------
143
+
144
+ All groups have a group number, starting from 1.
145
+
146
+ Groups with the same group name will have the same group number, and groups with a different group name will have a different group number.
147
+
148
+ The same name can be used by more than one group, with later captures 'overwriting' earlier captures. All the captures of the group will be available from the ``captures`` method of the match object.
149
+
150
+ Group numbers will be reused across different branches of a branch reset, eg. ``(?|(first)|(second))`` has only group 1. If groups have different group names then they will, of course, have different group numbers, eg. ``(?|(?P<foo>first)|(?P<bar>second))`` has group 1 ("foo") and group 2 ("bar").
151
+
152
+ In the regex ``(\s+)(?|(?P<foo>[A-Z]+)|(\w+) (?P<foo>[0-9]+)`` there are 2 groups:
153
+
154
+ * ``(\s+)`` is group 1.
155
+
156
+ * ``(?P<foo>[A-Z]+)`` is group 2, also called "foo".
157
+
158
+ * ``(\w+)`` is group 2 because of the branch reset.
159
+
160
+ * ``(?P<foo>[0-9]+)`` is group 2 because it's called "foo".
161
+
162
+ If you want to prevent ``(\w+)`` from being group 2, you need to name it (different name, different group number).
163
+
164
+ Additional features
165
+ -------------------
166
+
167
+ The issue numbers relate to the Python bug tracker, except where listed otherwise.
168
+
169
+ Added ``\p{Horiz_Space}`` and ``\p{Vert_Space}`` (`GitHub issue 477 <https://github.com/mrabarnett/mrab-regex/issues/477#issuecomment-1216779547>`_)
170
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
171
+
172
+ ``\p{Horiz_Space}`` or ``\p{H}`` matches horizontal whitespace and ``\p{Vert_Space}`` or ``\p{V}`` matches vertical whitespace.
173
+
174
+ Added support for lookaround in conditional pattern (`Hg issue 163 <https://github.com/mrabarnett/mrab-regex/issues/163>`_)
175
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
176
+
177
+ The test of a conditional pattern can be a lookaround.
178
+
179
+ .. sourcecode:: python
180
+
181
+ >>> regex.match(r'(?(?=\d)\d+|\w+)', '123abc')
182
+ <regex.Match object; span=(0, 3), match='123'>
183
+ >>> regex.match(r'(?(?=\d)\d+|\w+)', 'abc123')
184
+ <regex.Match object; span=(0, 6), match='abc123'>
185
+
186
+ This is not quite the same as putting a lookaround in the first branch of a pair of alternatives.
187
+
188
+ .. sourcecode:: python
189
+
190
+ >>> print(regex.match(r'(?:(?=\d)\d+\b|\w+)', '123abc'))
191
+ <regex.Match object; span=(0, 6), match='123abc'>
192
+ >>> print(regex.match(r'(?(?=\d)\d+\b|\w+)', '123abc'))
193
+ None
194
+
195
+ In the first example, the lookaround matched, but the remainder of the first branch failed to match, and so the second branch was attempted, whereas in the second example, the lookaround matched, and the first branch failed to match, but the second branch was **not** attempted.
196
+
197
+ Added POSIX matching (leftmost longest) (`Hg issue 150 <https://github.com/mrabarnett/mrab-regex/issues/150>`_)
198
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
199
+
200
+ The POSIX standard for regex is to return the leftmost longest match. This can be turned on using the ``POSIX`` flag.
201
+
202
+ .. sourcecode:: python
203
+
204
+ >>> # Normal matching.
205
+ >>> regex.search(r'Mr|Mrs', 'Mrs')
206
+ <regex.Match object; span=(0, 2), match='Mr'>
207
+ >>> regex.search(r'one(self)?(selfsufficient)?', 'oneselfsufficient')
208
+ <regex.Match object; span=(0, 7), match='oneself'>
209
+ >>> # POSIX matching.
210
+ >>> regex.search(r'(?p)Mr|Mrs', 'Mrs')
211
+ <regex.Match object; span=(0, 3), match='Mrs'>
212
+ >>> regex.search(r'(?p)one(self)?(selfsufficient)?', 'oneselfsufficient')
213
+ <regex.Match object; span=(0, 17), match='oneselfsufficient'>
214
+
215
+ Note that it will take longer to find matches because when it finds a match at a certain position, it won't return that immediately, but will keep looking to see if there's another longer match there.
216
+
217
+ Added ``(?(DEFINE)...)`` (`Hg issue 152 <https://github.com/mrabarnett/mrab-regex/issues/152>`_)
218
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
219
+
220
+ If there's no group called "DEFINE", then ... will be ignored except that any groups defined within it can be called and that the normal rules for numbering groups still apply.
221
+
222
+ .. sourcecode:: python
223
+
224
+ >>> regex.search(r'(?(DEFINE)(?P<quant>\d+)(?P<item>\w+))(?&quant) (?&item)', '5 elephants')
225
+ <regex.Match object; span=(0, 11), match='5 elephants'>
226
+
227
+ Added ``(*PRUNE)``, ``(*SKIP)`` and ``(*FAIL)`` (`Hg issue 153 <https://github.com/mrabarnett/mrab-regex/issues/153>`_)
228
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
229
+
230
+ ``(*PRUNE)`` discards the backtracking info up to that point. When used in an atomic group or a lookaround, it won't affect the enclosing pattern.
231
+
232
+ ``(*SKIP)`` is similar to ``(*PRUNE)``, except that it also sets where in the text the next attempt to match will start. When used in an atomic group or a lookaround, it won't affect the enclosing pattern.
233
+
234
+ ``(*FAIL)`` causes immediate backtracking. ``(*F)`` is a permitted abbreviation.
235
+
236
+ Added ``\K`` (`Hg issue 151 <https://github.com/mrabarnett/mrab-regex/issues/151>`_)
237
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
238
+
239
+ Keeps the part of the entire match after the position where ``\K`` occurred; the part before it is discarded.
240
+
241
+ It does not affect what groups return.
242
+
243
+ .. sourcecode:: python
244
+
245
+ >>> m = regex.search(r'(\w\w\K\w\w\w)', 'abcdef')
246
+ >>> m[0]
247
+ 'cde'
248
+ >>> m[1]
249
+ 'abcde'
250
+ >>>
251
+ >>> m = regex.search(r'(?r)(\w\w\K\w\w\w)', 'abcdef')
252
+ >>> m[0]
253
+ 'bc'
254
+ >>> m[1]
255
+ 'bcdef'
256
+
257
+ Added capture subscripting for ``expandf`` and ``subf``/``subfn`` (`Hg issue 133 <https://github.com/mrabarnett/mrab-regex/issues/133>`_)
258
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
259
+
260
+ You can use subscripting to get the captures of a repeated group.
261
+
262
+ .. sourcecode:: python
263
+
264
+ >>> m = regex.match(r"(\w)+", "abc")
265
+ >>> m.expandf("{1}")
266
+ 'c'
267
+ >>> m.expandf("{1[0]} {1[1]} {1[2]}")
268
+ 'a b c'
269
+ >>> m.expandf("{1[-1]} {1[-2]} {1[-3]}")
270
+ 'c b a'
271
+ >>>
272
+ >>> m = regex.match(r"(?P<letter>\w)+", "abc")
273
+ >>> m.expandf("{letter}")
274
+ 'c'
275
+ >>> m.expandf("{letter[0]} {letter[1]} {letter[2]}")
276
+ 'a b c'
277
+ >>> m.expandf("{letter[-1]} {letter[-2]} {letter[-3]}")
278
+ 'c b a'
279
+
280
+ Added support for referring to a group by number using ``(?P=...)``
281
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
282
+
283
+ This is in addition to the existing ``\g<...>``.
284
+
285
+ Fixed the handling of locale-sensitive regexes
286
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
287
+
288
+ The ``LOCALE`` flag is intended for legacy code and has limited support. You're still recommended to use Unicode instead.
289
+
290
+ Added partial matches (`Hg issue 102 <https://github.com/mrabarnett/mrab-regex/issues/102>`_)
291
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
292
+
293
+ A partial match is one that matches up to the end of string, but that string has been truncated and you want to know whether a complete match could be possible if the string had not been truncated.
294
+
295
+ Partial matches are supported by ``match``, ``search``, ``fullmatch`` and ``finditer`` with the ``partial`` keyword argument.
296
+
297
+ Match objects have a ``partial`` attribute, which is ``True`` if it's a partial match.
298
+
299
+ For example, if you wanted a user to enter a 4-digit number and check it character by character as it was being entered:
300
+
301
+ .. sourcecode:: python
302
+
303
+ >>> pattern = regex.compile(r'\d{4}')
304
+
305
+ >>> # Initially, nothing has been entered:
306
+ >>> print(pattern.fullmatch('', partial=True))
307
+ <regex.Match object; span=(0, 0), match='', partial=True>
308
+
309
+ >>> # An empty string is OK, but it's only a partial match.
310
+ >>> # The user enters a letter:
311
+ >>> print(pattern.fullmatch('a', partial=True))
312
+ None
313
+ >>> # It'll never match.
314
+
315
+ >>> # The user deletes that and enters a digit:
316
+ >>> print(pattern.fullmatch('1', partial=True))
317
+ <regex.Match object; span=(0, 1), match='1', partial=True>
318
+ >>> # It matches this far, but it's only a partial match.
319
+
320
+ >>> # The user enters 2 more digits:
321
+ >>> print(pattern.fullmatch('123', partial=True))
322
+ <regex.Match object; span=(0, 3), match='123', partial=True>
323
+ >>> # It matches this far, but it's only a partial match.
324
+
325
+ >>> # The user enters another digit:
326
+ >>> print(pattern.fullmatch('1234', partial=True))
327
+ <regex.Match object; span=(0, 4), match='1234'>
328
+ >>> # It's a complete match.
329
+
330
+ >>> # If the user enters another digit:
331
+ >>> print(pattern.fullmatch('12345', partial=True))
332
+ None
333
+ >>> # It's no longer a match.
334
+
335
+ >>> # This is a partial match:
336
+ >>> pattern.match('123', partial=True).partial
337
+ True
338
+
339
+ >>> # This is a complete match:
340
+ >>> pattern.match('1233', partial=True).partial
341
+ False
342
+
343
+ ``*`` operator not working correctly with sub() (`Hg issue 106 <https://github.com/mrabarnett/mrab-regex/issues/106>`_)
344
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
345
+
346
+ Sometimes it's not clear how zero-width matches should be handled. For example, should ``.*`` match 0 characters directly after matching >0 characters?
347
+
348
+ .. sourcecode:: python
349
+
350
+ >>> regex.sub('.*', 'x', 'test')
351
+ 'xx'
352
+ >>> regex.sub('.*?', '|', 'test')
353
+ '|||||||||'
354
+
355
+ Added ``capturesdict`` (`Hg issue 86 <https://github.com/mrabarnett/mrab-regex/issues/86>`_)
356
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
357
+
358
+ ``capturesdict`` is a combination of ``groupdict`` and ``captures``:
359
+
360
+ ``groupdict`` returns a dict of the named groups and the last capture of those groups.
361
+
362
+ ``captures`` returns a list of all the captures of a group
363
+
364
+ ``capturesdict`` returns a dict of the named groups and lists of all the captures of those groups.
365
+
366
+ .. sourcecode:: python
367
+
368
+ >>> m = regex.match(r"(?:(?P<word>\w+) (?P<digits>\d+)\n)+", "one 1\ntwo 2\nthree 3\n")
369
+ >>> m.groupdict()
370
+ {'word': 'three', 'digits': '3'}
371
+ >>> m.captures("word")
372
+ ['one', 'two', 'three']
373
+ >>> m.captures("digits")
374
+ ['1', '2', '3']
375
+ >>> m.capturesdict()
376
+ {'word': ['one', 'two', 'three'], 'digits': ['1', '2', '3']}
377
+
378
+ Added ``allcaptures`` and ``allspans`` (`Git issue 474 <https://github.com/mrabarnett/mrab-regex/issues/474>`_)
379
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
380
+
381
+ ``allcaptures`` returns a list of all the captures of all the groups.
382
+
383
+ ``allspans`` returns a list of all the spans of the all captures of all the groups.
384
+
385
+ .. sourcecode:: python
386
+
387
+ >>> m = regex.match(r"(?:(?P<word>\w+) (?P<digits>\d+)\n)+", "one 1\ntwo 2\nthree 3\n")
388
+ >>> m.allcaptures()
389
+ (['one 1\ntwo 2\nthree 3\n'], ['one', 'two', 'three'], ['1', '2', '3'])
390
+ >>> m.allspans()
391
+ ([(0, 20)], [(0, 3), (6, 9), (12, 17)], [(4, 5), (10, 11), (18, 19)])
392
+
393
+ Allow duplicate names of groups (`Hg issue 87 <https://github.com/mrabarnett/mrab-regex/issues/87>`_)
394
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
395
+
396
+ Group names can be duplicated.
397
+
398
+ .. sourcecode:: python
399
+
400
+ >>> # With optional groups:
401
+ >>>
402
+ >>> # Both groups capture, the second capture 'overwriting' the first.
403
+ >>> m = regex.match(r"(?P<item>\w+)? or (?P<item>\w+)?", "first or second")
404
+ >>> m.group("item")
405
+ 'second'
406
+ >>> m.captures("item")
407
+ ['first', 'second']
408
+ >>> # Only the second group captures.
409
+ >>> m = regex.match(r"(?P<item>\w+)? or (?P<item>\w+)?", " or second")
410
+ >>> m.group("item")
411
+ 'second'
412
+ >>> m.captures("item")
413
+ ['second']
414
+ >>> # Only the first group captures.
415
+ >>> m = regex.match(r"(?P<item>\w+)? or (?P<item>\w+)?", "first or ")
416
+ >>> m.group("item")
417
+ 'first'
418
+ >>> m.captures("item")
419
+ ['first']
420
+ >>>
421
+ >>> # With mandatory groups:
422
+ >>>
423
+ >>> # Both groups capture, the second capture 'overwriting' the first.
424
+ >>> m = regex.match(r"(?P<item>\w*) or (?P<item>\w*)?", "first or second")
425
+ >>> m.group("item")
426
+ 'second'
427
+ >>> m.captures("item")
428
+ ['first', 'second']
429
+ >>> # Again, both groups capture, the second capture 'overwriting' the first.
430
+ >>> m = regex.match(r"(?P<item>\w*) or (?P<item>\w*)", " or second")
431
+ >>> m.group("item")
432
+ 'second'
433
+ >>> m.captures("item")
434
+ ['', 'second']
435
+ >>> # And yet again, both groups capture, the second capture 'overwriting' the first.
436
+ >>> m = regex.match(r"(?P<item>\w*) or (?P<item>\w*)", "first or ")
437
+ >>> m.group("item")
438
+ ''
439
+ >>> m.captures("item")
440
+ ['first', '']
441
+
442
+ Added ``fullmatch`` (`issue #16203 <https://bugs.python.org/issue16203>`_)
443
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
444
+
445
+ ``fullmatch`` behaves like ``match``, except that it must match all of the string.
446
+
447
+ .. sourcecode:: python
448
+
449
+ >>> print(regex.fullmatch(r"abc", "abc").span())
450
+ (0, 3)
451
+ >>> print(regex.fullmatch(r"abc", "abcx"))
452
+ None
453
+ >>> print(regex.fullmatch(r"abc", "abcx", endpos=3).span())
454
+ (0, 3)
455
+ >>> print(regex.fullmatch(r"abc", "xabcy", pos=1, endpos=4).span())
456
+ (1, 4)
457
+ >>>
458
+ >>> regex.match(r"a.*?", "abcd").group(0)
459
+ 'a'
460
+ >>> regex.fullmatch(r"a.*?", "abcd").group(0)
461
+ 'abcd'
462
+
463
+ Added ``subf`` and ``subfn``
464
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
465
+
466
+ ``subf`` and ``subfn`` are alternatives to ``sub`` and ``subn`` respectively. When passed a replacement string, they treat it as a format string.
467
+
468
+ .. sourcecode:: python
469
+
470
+ >>> regex.subf(r"(\w+) (\w+)", "{0} => {2} {1}", "foo bar")
471
+ 'foo bar => bar foo'
472
+ >>> regex.subf(r"(?P<word1>\w+) (?P<word2>\w+)", "{word2} {word1}", "foo bar")
473
+ 'bar foo'
474
+
475
+ Added ``expandf`` to match object
476
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
477
+
478
+ ``expandf`` is an alternative to ``expand``. When passed a replacement string, it treats it as a format string.
479
+
480
+ .. sourcecode:: python
481
+
482
+ >>> m = regex.match(r"(\w+) (\w+)", "foo bar")
483
+ >>> m.expandf("{0} => {2} {1}")
484
+ 'foo bar => bar foo'
485
+ >>>
486
+ >>> m = regex.match(r"(?P<word1>\w+) (?P<word2>\w+)", "foo bar")
487
+ >>> m.expandf("{word2} {word1}")
488
+ 'bar foo'
489
+
490
+ Detach searched string
491
+ ^^^^^^^^^^^^^^^^^^^^^^
492
+
493
+ A match object contains a reference to the string that was searched, via its ``string`` attribute. The ``detach_string`` method will 'detach' that string, making it available for garbage collection, which might save valuable memory if that string is very large.
494
+
495
+ .. sourcecode:: python
496
+
497
+ >>> m = regex.search(r"\w+", "Hello world")
498
+ >>> print(m.group())
499
+ Hello
500
+ >>> print(m.string)
501
+ Hello world
502
+ >>> m.detach_string()
503
+ >>> print(m.group())
504
+ Hello
505
+ >>> print(m.string)
506
+ None
507
+
508
+ Recursive patterns (`Hg issue 27 <https://github.com/mrabarnett/mrab-regex/issues/27>`_)
509
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
510
+
511
+ Recursive and repeated patterns are supported.
512
+
513
+ ``(?R)`` or ``(?0)`` tries to match the entire regex recursively. ``(?1)``, ``(?2)``, etc, try to match the relevant group.
514
+
515
+ ``(?&name)`` tries to match the named group.
516
+
517
+ .. sourcecode:: python
518
+
519
+ >>> regex.match(r"(Tarzan|Jane) loves (?1)", "Tarzan loves Jane").groups()
520
+ ('Tarzan',)
521
+ >>> regex.match(r"(Tarzan|Jane) loves (?1)", "Jane loves Tarzan").groups()
522
+ ('Jane',)
523
+
524
+ >>> m = regex.search(r"(\w)(?:(?R)|(\w?))\1", "kayak")
525
+ >>> m.group(0, 1, 2)
526
+ ('kayak', 'k', None)
527
+
528
+ The first two examples show how the subpattern within the group is reused, but is _not_ itself a group. In other words, ``"(Tarzan|Jane) loves (?1)"`` is equivalent to ``"(Tarzan|Jane) loves (?:Tarzan|Jane)"``.
529
+
530
+ It's possible to backtrack into a recursed or repeated group.
531
+
532
+ You can't call a group if there is more than one group with that group name or group number (``"ambiguous group reference"``).
533
+
534
+ The alternative forms ``(?P>name)`` and ``(?P&name)`` are also supported.
535
+
536
+ Full Unicode case-folding is supported
537
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
538
+
539
+ In version 1 behaviour, the regex module uses full case-folding when performing case-insensitive matches in Unicode.
540
+
541
+ .. sourcecode:: python
542
+
543
+ >>> regex.match(r"(?iV1)strasse", "stra\N{LATIN SMALL LETTER SHARP S}e").span()
544
+ (0, 6)
545
+ >>> regex.match(r"(?iV1)stra\N{LATIN SMALL LETTER SHARP S}e", "STRASSE").span()
546
+ (0, 7)
547
+
548
+ In version 0 behaviour, it uses simple case-folding for backward compatibility with the re module.
549
+
550
+ Approximate "fuzzy" matching (`Hg issue 12 <https://github.com/mrabarnett/mrab-regex/issues/12>`_, `Hg issue 41 <https://github.com/mrabarnett/mrab-regex/issues/41>`_, `Hg issue 109 <https://github.com/mrabarnett/mrab-regex/issues/109>`_)
551
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
552
+
553
+ Regex usually attempts an exact match, but sometimes an approximate, or "fuzzy", match is needed, for those cases where the text being searched may contain errors in the form of inserted, deleted or substituted characters.
554
+
555
+ A fuzzy regex specifies which types of errors are permitted, and, optionally, either the minimum and maximum or only the maximum permitted number of each type. (You cannot specify only a minimum.)
556
+
557
+ The 3 types of error are:
558
+
559
+ * Insertion, indicated by "i"
560
+
561
+ * Deletion, indicated by "d"
562
+
563
+ * Substitution, indicated by "s"
564
+
565
+ In addition, "e" indicates any type of error.
566
+
567
+ The fuzziness of a regex item is specified between "{" and "}" after the item.
568
+
569
+ Examples:
570
+
571
+ * ``foo`` match "foo" exactly
572
+
573
+ * ``(?:foo){i}`` match "foo", permitting insertions
574
+
575
+ * ``(?:foo){d}`` match "foo", permitting deletions
576
+
577
+ * ``(?:foo){s}`` match "foo", permitting substitutions
578
+
579
+ * ``(?:foo){i,s}`` match "foo", permitting insertions and substitutions
580
+
581
+ * ``(?:foo){e}`` match "foo", permitting errors
582
+
583
+ If a certain type of error is specified, then any type not specified will **not** be permitted.
584
+
585
+ In the following examples I'll omit the item and write only the fuzziness:
586
+
587
+ * ``{d<=3}`` permit at most 3 deletions, but no other types
588
+
589
+ * ``{i<=1,s<=2}`` permit at most 1 insertion and at most 2 substitutions, but no deletions
590
+
591
+ * ``{1<=e<=3}`` permit at least 1 and at most 3 errors
592
+
593
+ * ``{i<=2,d<=2,e<=3}`` permit at most 2 insertions, at most 2 deletions, at most 3 errors in total, but no substitutions
594
+
595
+ It's also possible to state the costs of each type of error and the maximum permitted total cost.
596
+
597
+ Examples:
598
+
599
+ * ``{2i+2d+1s<=4}`` each insertion costs 2, each deletion costs 2, each substitution costs 1, the total cost must not exceed 4
600
+
601
+ * ``{i<=1,d<=1,s<=1,2i+2d+1s<=4}`` at most 1 insertion, at most 1 deletion, at most 1 substitution; each insertion costs 2, each deletion costs 2, each substitution costs 1, the total cost must not exceed 4
602
+
603
+ You can also use "<" instead of "<=" if you want an exclusive minimum or maximum.
604
+
605
+ You can add a test to perform on a character that's substituted or inserted.
606
+
607
+ Examples:
608
+
609
+ * ``{s<=2:[a-z]}`` at most 2 substitutions, which must be in the character set ``[a-z]``.
610
+
611
+ * ``{s<=2,i<=3:\d}`` at most 2 substitutions, at most 3 insertions, which must be digits.
612
+
613
+ By default, fuzzy matching searches for the first match that meets the given constraints. The ``ENHANCEMATCH`` flag will cause it to attempt to improve the fit (i.e. reduce the number of errors) of the match that it has found.
614
+
615
+ The ``BESTMATCH`` flag will make it search for the best match instead.
616
+
617
+ Further examples to note:
618
+
619
+ * ``regex.search("(dog){e}", "cat and dog")[1]`` returns ``"cat"`` because that matches ``"dog"`` with 3 errors (an unlimited number of errors is permitted).
620
+
621
+ * ``regex.search("(dog){e<=1}", "cat and dog")[1]`` returns ``" dog"`` (with a leading space) because that matches ``"dog"`` with 1 error, which is within the limit.
622
+
623
+ * ``regex.search("(?e)(dog){e<=1}", "cat and dog")[1]`` returns ``"dog"`` (without a leading space) because the fuzzy search matches ``" dog"`` with 1 error, which is within the limit, and the ``(?e)`` then it attempts a better fit.
624
+
625
+ In the first two examples there are perfect matches later in the string, but in neither case is it the first possible match.
626
+
627
+ The match object has an attribute ``fuzzy_counts`` which gives the total number of substitutions, insertions and deletions.
628
+
629
+ .. sourcecode:: python
630
+
631
+ >>> # A 'raw' fuzzy match:
632
+ >>> regex.fullmatch(r"(?:cats|cat){e<=1}", "cat").fuzzy_counts
633
+ (0, 0, 1)
634
+ >>> # 0 substitutions, 0 insertions, 1 deletion.
635
+
636
+ >>> # A better match might be possible if the ENHANCEMATCH flag used:
637
+ >>> regex.fullmatch(r"(?e)(?:cats|cat){e<=1}", "cat").fuzzy_counts
638
+ (0, 0, 0)
639
+ >>> # 0 substitutions, 0 insertions, 0 deletions.
640
+
641
+ The match object also has an attribute ``fuzzy_changes`` which gives a tuple of the positions of the substitutions, insertions and deletions.
642
+
643
+ .. sourcecode:: python
644
+
645
+ >>> m = regex.search('(fuu){i<=2,d<=2,e<=5}', 'anaconda foo bar')
646
+ >>> m
647
+ <regex.Match object; span=(7, 10), match='a f', fuzzy_counts=(0, 2, 2)>
648
+ >>> m.fuzzy_changes
649
+ ([], [7, 8], [10, 11])
650
+
651
+ What this means is that if the matched part of the string had been:
652
+
653
+ .. sourcecode:: python
654
+
655
+ 'anacondfuuoo bar'
656
+
657
+ it would've been an exact match.
658
+
659
+ However, there were insertions at positions 7 and 8:
660
+
661
+ .. sourcecode:: python
662
+
663
+ 'anaconda fuuoo bar'
664
+ ^^
665
+
666
+ and deletions at positions 10 and 11:
667
+
668
+ .. sourcecode:: python
669
+
670
+ 'anaconda f~~oo bar'
671
+ ^^
672
+
673
+ So the actual string was:
674
+
675
+ .. sourcecode:: python
676
+
677
+ 'anaconda foo bar'
678
+
679
+ Named lists ``\L<name>`` (`Hg issue 11 <https://github.com/mrabarnett/mrab-regex/issues/11>`_)
680
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
681
+
682
+ There are occasions where you may want to include a list (actually, a set) of options in a regex.
683
+
684
+ One way is to build the pattern like this:
685
+
686
+ .. sourcecode:: python
687
+
688
+ >>> p = regex.compile(r"first|second|third|fourth|fifth")
689
+
690
+ but if the list is large, parsing the resulting regex can take considerable time, and care must also be taken that the strings are properly escaped and properly ordered, for example, "cats" before "cat".
691
+
692
+ The new alternative is to use a named list:
693
+
694
+ .. sourcecode:: python
695
+
696
+ >>> option_set = ["first", "second", "third", "fourth", "fifth"]
697
+ >>> p = regex.compile(r"\L<options>", options=option_set)
698
+
699
+ The order of the items is irrelevant, they are treated as a set. The named lists are available as the ``.named_lists`` attribute of the pattern object :
700
+
701
+ .. sourcecode:: python
702
+
703
+ >>> print(p.named_lists)
704
+ {'options': frozenset({'third', 'first', 'fifth', 'fourth', 'second'})}
705
+
706
+ If there are any unused keyword arguments, ``ValueError`` will be raised unless you tell it otherwise:
707
+
708
+ .. sourcecode:: python
709
+
710
+ >>> option_set = ["first", "second", "third", "fourth", "fifth"]
711
+ >>> p = regex.compile(r"\L<options>", options=option_set, other_options=[])
712
+ Traceback (most recent call last):
713
+ File "<stdin>", line 1, in <module>
714
+ File "C:\Python310\lib\site-packages\regex\regex.py", line 353, in compile
715
+ return _compile(pattern, flags, ignore_unused, kwargs, cache_pattern)
716
+ File "C:\Python310\lib\site-packages\regex\regex.py", line 500, in _compile
717
+ complain_unused_args()
718
+ File "C:\Python310\lib\site-packages\regex\regex.py", line 483, in complain_unused_args
719
+ raise ValueError('unused keyword argument {!a}'.format(any_one))
720
+ ValueError: unused keyword argument 'other_options'
721
+ >>> p = regex.compile(r"\L<options>", options=option_set, other_options=[], ignore_unused=True)
722
+ >>> p = regex.compile(r"\L<options>", options=option_set, other_options=[], ignore_unused=False)
723
+ Traceback (most recent call last):
724
+ File "<stdin>", line 1, in <module>
725
+ File "C:\Python310\lib\site-packages\regex\regex.py", line 353, in compile
726
+ return _compile(pattern, flags, ignore_unused, kwargs, cache_pattern)
727
+ File "C:\Python310\lib\site-packages\regex\regex.py", line 500, in _compile
728
+ complain_unused_args()
729
+ File "C:\Python310\lib\site-packages\regex\regex.py", line 483, in complain_unused_args
730
+ raise ValueError('unused keyword argument {!a}'.format(any_one))
731
+ ValueError: unused keyword argument 'other_options'
732
+ >>>
733
+
734
+ Start and end of word
735
+ ^^^^^^^^^^^^^^^^^^^^^
736
+
737
+ ``\m`` matches at the start of a word.
738
+
739
+ ``\M`` matches at the end of a word.
740
+
741
+ Compare with ``\b``, which matches at the start or end of a word.
742
+
743
+ Unicode line separators
744
+ ^^^^^^^^^^^^^^^^^^^^^^^
745
+
746
+ Normally the only line separator is ``\n`` (``\x0A``), but if the ``WORD`` flag is turned on then the line separators are ``\x0D\x0A``, ``\x0A``, ``\x0B``, ``\x0C`` and ``\x0D``, plus ``\x85``, ``\u2028`` and ``\u2029`` when working with Unicode.
747
+
748
+ This affects the regex dot ``"."``, which, with the ``DOTALL`` flag turned off, matches any character except a line separator. It also affects the line anchors ``^`` and ``$`` (in multiline mode).
749
+
750
+ Set operators
751
+ ^^^^^^^^^^^^^
752
+
753
+ **Version 1 behaviour only**
754
+
755
+ Set operators have been added, and a set ``[...]`` can include nested sets.
756
+
757
+ The operators, in order of increasing precedence, are:
758
+
759
+ * ``||`` for union ("x||y" means "x or y")
760
+
761
+ * ``~~`` (double tilde) for symmetric difference ("x~~y" means "x or y, but not both")
762
+
763
+ * ``&&`` for intersection ("x&&y" means "x and y")
764
+
765
+ * ``--`` (double dash) for difference ("x--y" means "x but not y")
766
+
767
+ Implicit union, ie, simple juxtaposition like in ``[ab]``, has the highest precedence. Thus, ``[ab&&cd]`` is the same as ``[[a||b]&&[c||d]]``.
768
+
769
+ Examples:
770
+
771
+ * ``[ab]`` # Set containing 'a' and 'b'
772
+
773
+ * ``[a-z]`` # Set containing 'a' .. 'z'
774
+
775
+ * ``[[a-z]--[qw]]`` # Set containing 'a' .. 'z', but not 'q' or 'w'
776
+
777
+ * ``[a-z--qw]`` # Same as above
778
+
779
+ * ``[\p{L}--QW]`` # Set containing all letters except 'Q' and 'W'
780
+
781
+ * ``[\p{N}--[0-9]]`` # Set containing all numbers except '0' .. '9'
782
+
783
+ * ``[\p{ASCII}&&\p{Letter}]`` # Set containing all characters which are ASCII and letter
784
+
785
+ regex.escape (`issue #2650 <https://bugs.python.org/issue2650>`_)
786
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
787
+
788
+ regex.escape has an additional keyword parameter ``special_only``. When True, only 'special' regex characters, such as '?', are escaped.
789
+
790
+ .. sourcecode:: python
791
+
792
+ >>> regex.escape("foo!?", special_only=False)
793
+ 'foo\\!\\?'
794
+ >>> regex.escape("foo!?", special_only=True)
795
+ 'foo!\\?'
796
+
797
+ regex.escape (`Hg issue 249 <https://github.com/mrabarnett/mrab-regex/issues/249>`_)
798
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
799
+
800
+ regex.escape has an additional keyword parameter ``literal_spaces``. When True, spaces are not escaped.
801
+
802
+ .. sourcecode:: python
803
+
804
+ >>> regex.escape("foo bar!?", literal_spaces=False)
805
+ 'foo\\ bar!\\?'
806
+ >>> regex.escape("foo bar!?", literal_spaces=True)
807
+ 'foo bar!\\?'
808
+
809
+ Repeated captures (`issue #7132 <https://bugs.python.org/issue7132>`_)
810
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
811
+
812
+ A match object has additional methods which return information on all the successful matches of a repeated group. These methods are:
813
+
814
+ * ``matchobject.captures([group1, ...])``
815
+
816
+ * Returns a list of the strings matched in a group or groups. Compare with ``matchobject.group([group1, ...])``.
817
+
818
+ * ``matchobject.starts([group])``
819
+
820
+ * Returns a list of the start positions. Compare with ``matchobject.start([group])``.
821
+
822
+ * ``matchobject.ends([group])``
823
+
824
+ * Returns a list of the end positions. Compare with ``matchobject.end([group])``.
825
+
826
+ * ``matchobject.spans([group])``
827
+
828
+ * Returns a list of the spans. Compare with ``matchobject.span([group])``.
829
+
830
+ .. sourcecode:: python
831
+
832
+ >>> m = regex.search(r"(\w{3})+", "123456789")
833
+ >>> m.group(1)
834
+ '789'
835
+ >>> m.captures(1)
836
+ ['123', '456', '789']
837
+ >>> m.start(1)
838
+ 6
839
+ >>> m.starts(1)
840
+ [0, 3, 6]
841
+ >>> m.end(1)
842
+ 9
843
+ >>> m.ends(1)
844
+ [3, 6, 9]
845
+ >>> m.span(1)
846
+ (6, 9)
847
+ >>> m.spans(1)
848
+ [(0, 3), (3, 6), (6, 9)]
849
+
850
+ Atomic grouping ``(?>...)`` (`issue #433030 <https://bugs.python.org/issue433030>`_)
851
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
852
+
853
+ If the following pattern subsequently fails, then the subpattern as a whole will fail.
854
+
855
+ Possessive quantifiers
856
+ ^^^^^^^^^^^^^^^^^^^^^^
857
+
858
+ ``(?:...)?+`` ; ``(?:...)*+`` ; ``(?:...)++`` ; ``(?:...){min,max}+``
859
+
860
+ The subpattern is matched up to 'max' times. If the following pattern subsequently fails, then all the repeated subpatterns will fail as a whole. For example, ``(?:...)++`` is equivalent to ``(?>(?:...)+)``.
861
+
862
+ Scoped flags (`issue #433028 <https://bugs.python.org/issue433028>`_)
863
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
864
+
865
+ ``(?flags-flags:...)``
866
+
867
+ The flags will apply only to the subpattern. Flags can be turned on or off.
868
+
869
+ Definition of 'word' character (`issue #1693050 <https://bugs.python.org/issue1693050>`_)
870
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
871
+
872
+ The definition of a 'word' character has been expanded for Unicode. It conforms to the Unicode specification at ``http://www.unicode.org/reports/tr29/``.
873
+
874
+ Variable-length lookbehind
875
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^
876
+
877
+ A lookbehind can match a variable-length string.
878
+
879
+ Flags argument for regex.split, regex.sub and regex.subn (`issue #3482 <https://bugs.python.org/issue3482>`_)
880
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
881
+
882
+ ``regex.split``, ``regex.sub`` and ``regex.subn`` support a 'flags' argument.
883
+
884
+ Pos and endpos arguments for regex.sub and regex.subn
885
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
886
+
887
+ ``regex.sub`` and ``regex.subn`` support 'pos' and 'endpos' arguments.
888
+
889
+ 'Overlapped' argument for regex.findall and regex.finditer
890
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
891
+
892
+ ``regex.findall`` and ``regex.finditer`` support an 'overlapped' flag which permits overlapped matches.
893
+
894
+ Splititer
895
+ ^^^^^^^^^
896
+
897
+ ``regex.splititer`` has been added. It's a generator equivalent of ``regex.split``.
898
+
899
+ Subscripting match objects for groups
900
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
901
+
902
+ A match object accepts access to the groups via subscripting and slicing:
903
+
904
+ .. sourcecode:: python
905
+
906
+ >>> m = regex.search(r"(?P<before>.*?)(?P<num>\d+)(?P<after>.*)", "pqr123stu")
907
+ >>> print(m["before"])
908
+ pqr
909
+ >>> print(len(m))
910
+ 4
911
+ >>> print(m[:])
912
+ ('pqr123stu', 'pqr', '123', 'stu')
913
+
914
+ Named groups
915
+ ^^^^^^^^^^^^
916
+
917
+ Groups can be named with ``(?<name>...)`` as well as the existing ``(?P<name>...)``.
918
+
919
+ Group references
920
+ ^^^^^^^^^^^^^^^^
921
+
922
+ Groups can be referenced within a pattern with ``\g<name>``. This also allows there to be more than 99 groups.
923
+
924
+ Named characters ``\N{name}``
925
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
926
+
927
+ Named characters are supported. Note that only those known by Python's Unicode database will be recognised.
928
+
929
+ Unicode codepoint properties, including scripts and blocks
930
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
931
+
932
+ ``\p{property=value}``; ``\P{property=value}``; ``\p{value}`` ; ``\P{value}``
933
+
934
+ Many Unicode properties are supported, including blocks and scripts. ``\p{property=value}`` or ``\p{property:value}`` matches a character whose property ``property`` has value ``value``. The inverse of ``\p{property=value}`` is ``\P{property=value}`` or ``\p{^property=value}``.
935
+
936
+ If the short form ``\p{value}`` is used, the properties are checked in the order: ``General_Category``, ``Script``, ``Block``, binary property:
937
+
938
+ * ``Latin``, the 'Latin' script (``Script=Latin``).
939
+
940
+ * ``BasicLatin``, the 'BasicLatin' block (``Block=BasicLatin``).
941
+
942
+ * ``Alphabetic``, the 'Alphabetic' binary property (``Alphabetic=Yes``).
943
+
944
+ A short form starting with ``Is`` indicates a script or binary property:
945
+
946
+ * ``IsLatin``, the 'Latin' script (``Script=Latin``).
947
+
948
+ * ``IsAlphabetic``, the 'Alphabetic' binary property (``Alphabetic=Yes``).
949
+
950
+ A short form starting with ``In`` indicates a block property:
951
+
952
+ * ``InBasicLatin``, the 'BasicLatin' block (``Block=BasicLatin``).
953
+
954
+ POSIX character classes
955
+ ^^^^^^^^^^^^^^^^^^^^^^^
956
+
957
+ ``[[:alpha:]]``; ``[[:^alpha:]]``
958
+
959
+ POSIX character classes are supported. These are normally treated as an alternative form of ``\p{...}``.
960
+
961
+ The exceptions are ``alnum``, ``digit``, ``punct`` and ``xdigit``, whose definitions are different from those of Unicode.
962
+
963
+ ``[[:alnum:]]`` is equivalent to ``\p{posix_alnum}``.
964
+
965
+ ``[[:digit:]]`` is equivalent to ``\p{posix_digit}``.
966
+
967
+ ``[[:punct:]]`` is equivalent to ``\p{posix_punct}``.
968
+
969
+ ``[[:xdigit:]]`` is equivalent to ``\p{posix_xdigit}``.
970
+
971
+ Search anchor ``\G``
972
+ ^^^^^^^^^^^^^^^^^^^^
973
+
974
+ A search anchor has been added. It matches at the position where each search started/continued and can be used for contiguous matches or in negative variable-length lookbehinds to limit how far back the lookbehind goes:
975
+
976
+ .. sourcecode:: python
977
+
978
+ >>> regex.findall(r"\w{2}", "abcd ef")
979
+ ['ab', 'cd', 'ef']
980
+ >>> regex.findall(r"\G\w{2}", "abcd ef")
981
+ ['ab', 'cd']
982
+
983
+ * The search starts at position 0 and matches 'ab'.
984
+
985
+ * The search continues at position 2 and matches 'cd'.
986
+
987
+ * The search continues at position 4 and fails to match any letters.
988
+
989
+ * The anchor stops the search start position from being advanced, so there are no more results.
990
+
991
+ Reverse searching
992
+ ^^^^^^^^^^^^^^^^^
993
+
994
+ Searches can also work backwards:
995
+
996
+ .. sourcecode:: python
997
+
998
+ >>> regex.findall(r".", "abc")
999
+ ['a', 'b', 'c']
1000
+ >>> regex.findall(r"(?r).", "abc")
1001
+ ['c', 'b', 'a']
1002
+
1003
+ Note that the result of a reverse search is not necessarily the reverse of a forward search:
1004
+
1005
+ .. sourcecode:: python
1006
+
1007
+ >>> regex.findall(r"..", "abcde")
1008
+ ['ab', 'cd']
1009
+ >>> regex.findall(r"(?r)..", "abcde")
1010
+ ['de', 'bc']
1011
+
1012
+ Matching a single grapheme ``\X``
1013
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1014
+
1015
+ The grapheme matcher is supported. It conforms to the Unicode specification at ``http://www.unicode.org/reports/tr29/``.
1016
+
1017
+ Branch reset ``(?|...|...)``
1018
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1019
+
1020
+ Group numbers will be reused across the alternatives, but groups with different names will have different group numbers.
1021
+
1022
+ .. sourcecode:: python
1023
+
1024
+ >>> regex.match(r"(?|(first)|(second))", "first").groups()
1025
+ ('first',)
1026
+ >>> regex.match(r"(?|(first)|(second))", "second").groups()
1027
+ ('second',)
1028
+
1029
+ Note that there is only one group.
1030
+
1031
+ Default Unicode word boundary
1032
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1033
+
1034
+ The ``WORD`` flag changes the definition of a 'word boundary' to that of a default Unicode word boundary. This applies to ``\b`` and ``\B``.
1035
+
1036
+ Timeout
1037
+ ^^^^^^^
1038
+
1039
+ The matching methods and functions support timeouts. The timeout (in seconds) applies to the entire operation:
1040
+
1041
+ .. sourcecode:: python
1042
+
1043
+ >>> from time import sleep
1044
+ >>>
1045
+ >>> def fast_replace(m):
1046
+ ... return 'X'
1047
+ ...
1048
+ >>> def slow_replace(m):
1049
+ ... sleep(0.5)
1050
+ ... return 'X'
1051
+ ...
1052
+ >>> regex.sub(r'[a-z]', fast_replace, 'abcde', timeout=2)
1053
+ 'XXXXX'
1054
+ >>> regex.sub(r'[a-z]', slow_replace, 'abcde', timeout=2)
1055
+ Traceback (most recent call last):
1056
+ File "<stdin>", line 1, in <module>
1057
+ File "C:\Python310\lib\site-packages\regex\regex.py", line 278, in sub
1058
+ return pat.sub(repl, string, count, pos, endpos, concurrent, timeout)
1059
+ TimeoutError: regex timed out
python_env/lib/site-packages/regex-2025.11.3.dist-info/RECORD ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ regex-2025.11.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ regex-2025.11.3.dist-info/METADATA,sha256=yPbLZQabvZJ75SDgxSns09JjRTeIR7ogXm-Zs8w0Gds,41526
3
+ regex-2025.11.3.dist-info/RECORD,,
4
+ regex-2025.11.3.dist-info/WHEEL,sha256=KUuBC6lxAbHCKilKua8R9W_TM71_-9Sg5uEP3uDWcoU,101
5
+ regex-2025.11.3.dist-info/licenses/LICENSE.txt,sha256=PSIMBllLgmu6vxEDEvYPOF-Z5X5Sn6S55Tb3kJH4tMc,11792
6
+ regex-2025.11.3.dist-info/top_level.txt,sha256=aQmiDMhNTF26cCK4_7D-qaVvhbxClG0wyCTnEhkzYBs,6
7
+ regex/__init__.py,sha256=K0DzoWlqFh5039Pujrkm1NZ_rc2xhRenbfBthB06W48,78
8
+ regex/__pycache__/__init__.cpython-310.pyc,,
9
+ regex/__pycache__/_main.cpython-310.pyc,,
10
+ regex/__pycache__/_regex_core.cpython-310.pyc,,
11
+ regex/_main.py,sha256=7vtK_RiZJ0uUiOJGs130iazyqU0oMZF3yTMnTZarAuE,33426
12
+ regex/_regex.cp310-win_amd64.pyd,sha256=teSRWW5Mq61-FYsWcQWr7FbdZWgi_0yBuDi2KkMJohU,726016
13
+ regex/_regex_core.py,sha256=9mFqhcom7qM6BJnlTUEFr4Mq7_EbfVnd3SeDrTIJbJk,151643
14
+ regex/tests/__pycache__/test_regex.cpython-310.pyc,,
15
+ regex/tests/test_regex.py,sha256=6eTTXBYB12CNVIklDLs4PpYhgzlV_7HysBgCBwqsato,230349
python_env/lib/site-packages/regex-2025.11.3.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: false
4
+ Tag: cp310-cp310-win_amd64
5
+
python_env/lib/site-packages/regex-2025.11.3.dist-info/licenses/LICENSE.txt ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ This work was derived from the 're' module of CPython 2.6 and CPython 3.1,
2
+ copyright (c) 1998-2001 by Secret Labs AB and licensed under CNRI's Python 1.6
3
+ license.
4
+
5
+ All additions and alterations are licensed under the Apache 2.0 License.
6
+
7
+
8
+ Apache License
9
+ Version 2.0, January 2004
10
+ http://www.apache.org/licenses/
11
+
12
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
13
+
14
+ 1. Definitions.
15
+
16
+ "License" shall mean the terms and conditions for use, reproduction,
17
+ and distribution as defined by Sections 1 through 9 of this document.
18
+
19
+ "Licensor" shall mean the copyright owner or entity authorized by
20
+ the copyright owner that is granting the License.
21
+
22
+ "Legal Entity" shall mean the union of the acting entity and all
23
+ other entities that control, are controlled by, or are under common
24
+ control with that entity. For the purposes of this definition,
25
+ "control" means (i) the power, direct or indirect, to cause the
26
+ direction or management of such entity, whether by contract or
27
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
28
+ outstanding shares, or (iii) beneficial ownership of such entity.
29
+
30
+ "You" (or "Your") shall mean an individual or Legal Entity
31
+ exercising permissions granted by this License.
32
+
33
+ "Source" form shall mean the preferred form for making modifications,
34
+ including but not limited to software source code, documentation
35
+ source, and configuration files.
36
+
37
+ "Object" form shall mean any form resulting from mechanical
38
+ transformation or translation of a Source form, including but
39
+ not limited to compiled object code, generated documentation,
40
+ and conversions to other media types.
41
+
42
+ "Work" shall mean the work of authorship, whether in Source or
43
+ Object form, made available under the License, as indicated by a
44
+ copyright notice that is included in or attached to the work
45
+ (an example is provided in the Appendix below).
46
+
47
+ "Derivative Works" shall mean any work, whether in Source or Object
48
+ form, that is based on (or derived from) the Work and for which the
49
+ editorial revisions, annotations, elaborations, or other modifications
50
+ represent, as a whole, an original work of authorship. For the purposes
51
+ of this License, Derivative Works shall not include works that remain
52
+ separable from, or merely link (or bind by name) to the interfaces of,
53
+ the Work and Derivative Works thereof.
54
+
55
+ "Contribution" shall mean any work of authorship, including
56
+ the original version of the Work and any modifications or additions
57
+ to that Work or Derivative Works thereof, that is intentionally
58
+ submitted to Licensor for inclusion in the Work by the copyright owner
59
+ or by an individual or Legal Entity authorized to submit on behalf of
60
+ the copyright owner. For the purposes of this definition, "submitted"
61
+ means any form of electronic, verbal, or written communication sent
62
+ to the Licensor or its representatives, including but not limited to
63
+ communication on electronic mailing lists, source code control systems,
64
+ and issue tracking systems that are managed by, or on behalf of, the
65
+ Licensor for the purpose of discussing and improving the Work, but
66
+ excluding communication that is conspicuously marked or otherwise
67
+ designated in writing by the copyright owner as "Not a Contribution."
68
+
69
+ "Contributor" shall mean Licensor and any individual or Legal Entity
70
+ on behalf of whom a Contribution has been received by Licensor and
71
+ subsequently incorporated within the Work.
72
+
73
+ 2. Grant of Copyright License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ copyright license to reproduce, prepare Derivative Works of,
77
+ publicly display, publicly perform, sublicense, and distribute the
78
+ Work and such Derivative Works in Source or Object form.
79
+
80
+ 3. Grant of Patent License. Subject to the terms and conditions of
81
+ this License, each Contributor hereby grants to You a perpetual,
82
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
83
+ (except as stated in this section) patent license to make, have made,
84
+ use, offer to sell, sell, import, and otherwise transfer the Work,
85
+ where such license applies only to those patent claims licensable
86
+ by such Contributor that are necessarily infringed by their
87
+ Contribution(s) alone or by combination of their Contribution(s)
88
+ with the Work to which such Contribution(s) was submitted. If You
89
+ institute patent litigation against any entity (including a
90
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
91
+ or a Contribution incorporated within the Work constitutes direct
92
+ or contributory patent infringement, then any patent licenses
93
+ granted to You under this License for that Work shall terminate
94
+ as of the date such litigation is filed.
95
+
96
+ 4. Redistribution. You may reproduce and distribute copies of the
97
+ Work or Derivative Works thereof in any medium, with or without
98
+ modifications, and in Source or Object form, provided that You
99
+ meet the following conditions:
100
+
101
+ (a) You must give any other recipients of the Work or
102
+ Derivative Works a copy of this License; and
103
+
104
+ (b) You must cause any modified files to carry prominent notices
105
+ stating that You changed the files; and
106
+
107
+ (c) You must retain, in the Source form of any Derivative Works
108
+ that You distribute, all copyright, patent, trademark, and
109
+ attribution notices from the Source form of the Work,
110
+ excluding those notices that do not pertain to any part of
111
+ the Derivative Works; and
112
+
113
+ (d) If the Work includes a "NOTICE" text file as part of its
114
+ distribution, then any Derivative Works that You distribute must
115
+ include a readable copy of the attribution notices contained
116
+ within such NOTICE file, excluding those notices that do not
117
+ pertain to any part of the Derivative Works, in at least one
118
+ of the following places: within a NOTICE text file distributed
119
+ as part of the Derivative Works; within the Source form or
120
+ documentation, if provided along with the Derivative Works; or,
121
+ within a display generated by the Derivative Works, if and
122
+ wherever such third-party notices normally appear. The contents
123
+ of the NOTICE file are for informational purposes only and
124
+ do not modify the License. You may add Your own attribution
125
+ notices within Derivative Works that You distribute, alongside
126
+ or as an addendum to the NOTICE text from the Work, provided
127
+ that such additional attribution notices cannot be construed
128
+ as modifying the License.
129
+
130
+ You may add Your own copyright statement to Your modifications and
131
+ may provide additional or different license terms and conditions
132
+ for use, reproduction, or distribution of Your modifications, or
133
+ for any such Derivative Works as a whole, provided Your use,
134
+ reproduction, and distribution of the Work otherwise complies with
135
+ the conditions stated in this License.
136
+
137
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
138
+ any Contribution intentionally submitted for inclusion in the Work
139
+ by You to the Licensor shall be under the terms and conditions of
140
+ this License, without any additional terms or conditions.
141
+ Notwithstanding the above, nothing herein shall supersede or modify
142
+ the terms of any separate license agreement you may have executed
143
+ with Licensor regarding such Contributions.
144
+
145
+ 6. Trademarks. This License does not grant permission to use the trade
146
+ names, trademarks, service marks, or product names of the Licensor,
147
+ except as required for reasonable and customary use in describing the
148
+ origin of the Work and reproducing the content of the NOTICE file.
149
+
150
+ 7. Disclaimer of Warranty. Unless required by applicable law or
151
+ agreed to in writing, Licensor provides the Work (and each
152
+ Contributor provides its Contributions) on an "AS IS" BASIS,
153
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
154
+ implied, including, without limitation, any warranties or conditions
155
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
156
+ PARTICULAR PURPOSE. You are solely responsible for determining the
157
+ appropriateness of using or redistributing the Work and assume any
158
+ risks associated with Your exercise of permissions under this License.
159
+
160
+ 8. Limitation of Liability. In no event and under no legal theory,
161
+ whether in tort (including negligence), contract, or otherwise,
162
+ unless required by applicable law (such as deliberate and grossly
163
+ negligent acts) or agreed to in writing, shall any Contributor be
164
+ liable to You for damages, including any direct, indirect, special,
165
+ incidental, or consequential damages of any character arising as a
166
+ result of this License or out of the use or inability to use the
167
+ Work (including but not limited to damages for loss of goodwill,
168
+ work stoppage, computer failure or malfunction, or any and all
169
+ other commercial damages or losses), even if such Contributor
170
+ has been advised of the possibility of such damages.
171
+
172
+ 9. Accepting Warranty or Additional Liability. While redistributing
173
+ the Work or Derivative Works thereof, You may choose to offer,
174
+ and charge a fee for, acceptance of support, warranty, indemnity,
175
+ or other liability obligations and/or rights consistent with this
176
+ License. However, in accepting such obligations, You may act only
177
+ on Your own behalf and on Your sole responsibility, not on behalf
178
+ of any other Contributor, and only if You agree to indemnify,
179
+ defend, and hold each Contributor harmless for any liability
180
+ incurred by, or claims asserted against, such Contributor by reason
181
+ of your accepting any such warranty or additional liability.
182
+
183
+ END OF TERMS AND CONDITIONS
184
+
185
+ APPENDIX: How to apply the Apache License to your work.
186
+
187
+ To apply the Apache License to your work, attach the following
188
+ boilerplate notice, with the fields enclosed by brackets "[]"
189
+ replaced with your own identifying information. (Don't include
190
+ the brackets!) The text should be enclosed in the appropriate
191
+ comment syntax for the file format. We also recommend that a
192
+ file or class name and description of purpose be included on the
193
+ same "printed page" as the copyright notice for easier
194
+ identification within third-party archives.
195
+
196
+ Copyright 2020 Matthew Barnett
197
+
198
+ Licensed under the Apache License, Version 2.0 (the "License");
199
+ you may not use this file except in compliance with the License.
200
+ You may obtain a copy of the License at
201
+
202
+ http://www.apache.org/licenses/LICENSE-2.0
203
+
204
+ Unless required by applicable law or agreed to in writing, software
205
+ distributed under the License is distributed on an "AS IS" BASIS,
206
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
207
+ See the License for the specific language governing permissions and
208
+ limitations under the License.
python_env/lib/site-packages/regex-2025.11.3.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ regex
python_env/lib/site-packages/regex/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import regex._main
2
+ from regex._main import *
3
+ __all__ = regex._main.__all__
python_env/lib/site-packages/regex/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (235 Bytes). View file
 
python_env/lib/site-packages/regex/__pycache__/_main.cpython-310.pyc ADDED
Binary file (25.5 kB). View file
 
python_env/lib/site-packages/regex/__pycache__/_regex_core.cpython-310.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce743b9cd4e6f6af2bbd3664972e44ee6cd50f3d944711c5ef8ad60976eb9d7e
3
+ size 112294
python_env/lib/site-packages/regex/_main.py ADDED
@@ -0,0 +1,746 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Secret Labs' Regular Expression Engine
3
+ #
4
+ # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
5
+ #
6
+ # This version of the SRE library can be redistributed under CNRI's
7
+ # Python 1.6 license. For any other use, please contact Secret Labs
8
+ # AB (info@pythonware.com).
9
+ #
10
+ # Portions of this engine have been developed in cooperation with
11
+ # CNRI. Hewlett-Packard provided funding for 1.6 integration and
12
+ # other compatibility work.
13
+ #
14
+ # 2010-01-16 mrab Python front-end re-written and extended
15
+
16
+ r"""Support for regular expressions (RE).
17
+
18
+ This module provides regular expression matching operations similar to those
19
+ found in Perl. It supports both 8-bit and Unicode strings; both the pattern and
20
+ the strings being processed can contain null bytes and characters outside the
21
+ US ASCII range.
22
+
23
+ Regular expressions can contain both special and ordinary characters. Most
24
+ ordinary characters, like "A", "a", or "0", are the simplest regular
25
+ expressions; they simply match themselves. You can concatenate ordinary
26
+ characters, so last matches the string 'last'.
27
+
28
+ There are a few differences between the old (legacy) behaviour and the new
29
+ (enhanced) behaviour, which are indicated by VERSION0 or VERSION1.
30
+
31
+ The special characters are:
32
+ "." Matches any character except a newline.
33
+ "^" Matches the start of the string.
34
+ "$" Matches the end of the string or just before the
35
+ newline at the end of the string.
36
+ "*" Matches 0 or more (greedy) repetitions of the preceding
37
+ RE. Greedy means that it will match as many repetitions
38
+ as possible.
39
+ "+" Matches 1 or more (greedy) repetitions of the preceding
40
+ RE.
41
+ "?" Matches 0 or 1 (greedy) of the preceding RE.
42
+ *?,+?,?? Non-greedy versions of the previous three special
43
+ characters.
44
+ *+,++,?+ Possessive versions of the previous three special
45
+ characters.
46
+ {m,n} Matches from m to n repetitions of the preceding RE.
47
+ {m,n}? Non-greedy version of the above.
48
+ {m,n}+ Possessive version of the above.
49
+ {...} Fuzzy matching constraints.
50
+ "\\" Either escapes special characters or signals a special
51
+ sequence.
52
+ [...] Indicates a set of characters. A "^" as the first
53
+ character indicates a complementing set.
54
+ "|" A|B, creates an RE that will match either A or B.
55
+ (...) Matches the RE inside the parentheses. The contents are
56
+ captured and can be retrieved or matched later in the
57
+ string.
58
+ (?flags-flags) VERSION1: Sets/clears the flags for the remainder of
59
+ the group or pattern; VERSION0: Sets the flags for the
60
+ entire pattern.
61
+ (?:...) Non-capturing version of regular parentheses.
62
+ (?>...) Atomic non-capturing version of regular parentheses.
63
+ (?flags-flags:...) Non-capturing version of regular parentheses with local
64
+ flags.
65
+ (?P<name>...) The substring matched by the group is accessible by
66
+ name.
67
+ (?<name>...) The substring matched by the group is accessible by
68
+ name.
69
+ (?P=name) Matches the text matched earlier by the group named
70
+ name.
71
+ (?#...) A comment; ignored.
72
+ (?=...) Matches if ... matches next, but doesn't consume the
73
+ string.
74
+ (?!...) Matches if ... doesn't match next.
75
+ (?<=...) Matches if preceded by ....
76
+ (?<!...) Matches if not preceded by ....
77
+ (?(id)yes|no) Matches yes pattern if group id matched, the (optional)
78
+ no pattern otherwise.
79
+ (?(DEFINE)...) If there's no group called "DEFINE", then ... will be
80
+ ignored, but any group definitions will be available.
81
+ (?|...|...) (?|A|B), creates an RE that will match either A or B,
82
+ but reuses capture group numbers across the
83
+ alternatives.
84
+ (*FAIL) Forces matching to fail, which means immediate
85
+ backtracking.
86
+ (*F) Abbreviation for (*FAIL).
87
+ (*PRUNE) Discards the current backtracking information. Its
88
+ effect doesn't extend outside an atomic group or a
89
+ lookaround.
90
+ (*SKIP) Similar to (*PRUNE), except that it also sets where in
91
+ the text the next attempt at matching the entire
92
+ pattern will start. Its effect doesn't extend outside
93
+ an atomic group or a lookaround.
94
+
95
+ The fuzzy matching constraints are: "i" to permit insertions, "d" to permit
96
+ deletions, "s" to permit substitutions, "e" to permit any of these. Limits are
97
+ optional with "<=" and "<". If any type of error is provided then any type not
98
+ provided is not permitted.
99
+
100
+ A cost equation may be provided.
101
+
102
+ Examples:
103
+ (?:fuzzy){i<=2}
104
+ (?:fuzzy){i<=1,s<=2,d<=1,1i+1s+1d<3}
105
+
106
+ VERSION1: Set operators are supported, and a set can include nested sets. The
107
+ set operators, in order of increasing precedence, are:
108
+ || Set union ("x||y" means "x or y").
109
+ ~~ (double tilde) Symmetric set difference ("x~~y" means "x or y, but not
110
+ both").
111
+ && Set intersection ("x&&y" means "x and y").
112
+ -- (double dash) Set difference ("x--y" means "x but not y").
113
+
114
+ Implicit union, ie, simple juxtaposition like in [ab], has the highest
115
+ precedence.
116
+
117
+ VERSION0 and VERSION1:
118
+ The special sequences consist of "\\" and a character from the list below. If
119
+ the ordinary character is not on the list, then the resulting RE will match the
120
+ second character.
121
+ \number Matches the contents of the group of the same number if
122
+ number is no more than 2 digits, otherwise the character
123
+ with the 3-digit octal code.
124
+ \a Matches the bell character.
125
+ \A Matches only at the start of the string.
126
+ \b Matches the empty string, but only at the start or end of a
127
+ word.
128
+ \B Matches the empty string, but not at the start or end of a
129
+ word.
130
+ \d Matches any decimal digit; equivalent to the set [0-9] when
131
+ matching a bytestring or a Unicode string with the ASCII
132
+ flag, or the whole range of Unicode digits when matching a
133
+ Unicode string.
134
+ \D Matches any non-digit character; equivalent to [^\d].
135
+ \f Matches the formfeed character.
136
+ \g<name> Matches the text matched by the group named name.
137
+ \G Matches the empty string, but only at the position where
138
+ the search started.
139
+ \h Matches horizontal whitespace.
140
+ \K Keeps only what follows for the entire match.
141
+ \L<name> Named list. The list is provided as a keyword argument.
142
+ \m Matches the empty string, but only at the start of a word.
143
+ \M Matches the empty string, but only at the end of a word.
144
+ \n Matches the newline character.
145
+ \N{name} Matches the named character.
146
+ \p{name=value} Matches the character if its property has the specified
147
+ value.
148
+ \P{name=value} Matches the character if its property hasn't the specified
149
+ value.
150
+ \r Matches the carriage-return character.
151
+ \s Matches any whitespace character; equivalent to
152
+ [ \t\n\r\f\v].
153
+ \S Matches any non-whitespace character; equivalent to [^\s].
154
+ \t Matches the tab character.
155
+ \uXXXX Matches the Unicode codepoint with 4-digit hex code XXXX.
156
+ \UXXXXXXXX Matches the Unicode codepoint with 8-digit hex code
157
+ XXXXXXXX.
158
+ \v Matches the vertical tab character.
159
+ \w Matches any alphanumeric character; equivalent to
160
+ [a-zA-Z0-9_] when matching a bytestring or a Unicode string
161
+ with the ASCII flag, or the whole range of Unicode
162
+ alphanumeric characters (letters plus digits plus
163
+ underscore) when matching a Unicode string. With LOCALE, it
164
+ will match the set [0-9_] plus characters defined as
165
+ letters for the current locale.
166
+ \W Matches the complement of \w; equivalent to [^\w].
167
+ \xXX Matches the character with 2-digit hex code XX.
168
+ \X Matches a grapheme.
169
+ \Z Matches only at the end of the string.
170
+ \\ Matches a literal backslash.
171
+
172
+ This module exports the following functions:
173
+ match Match a regular expression pattern at the beginning of a string.
174
+ fullmatch Match a regular expression pattern against all of a string.
175
+ search Search a string for the presence of a pattern.
176
+ sub Substitute occurrences of a pattern found in a string using a
177
+ template string.
178
+ subf Substitute occurrences of a pattern found in a string using a
179
+ format string.
180
+ subn Same as sub, but also return the number of substitutions made.
181
+ subfn Same as subf, but also return the number of substitutions made.
182
+ split Split a string by the occurrences of a pattern. VERSION1: will
183
+ split at zero-width match; VERSION0: won't split at zero-width
184
+ match.
185
+ splititer Return an iterator yielding the parts of a split string.
186
+ findall Find all occurrences of a pattern in a string.
187
+ finditer Return an iterator yielding a match object for each match.
188
+ compile Compile a pattern into a Pattern object.
189
+ purge Clear the regular expression cache.
190
+ escape Backslash all non-alphanumerics or special characters in a
191
+ string.
192
+
193
+ Most of the functions support a concurrent parameter: if True, the GIL will be
194
+ released during matching, allowing other Python threads to run concurrently. If
195
+ the string changes during matching, the behaviour is undefined. This parameter
196
+ is not needed when working on the builtin (immutable) string classes.
197
+
198
+ Some of the functions in this module take flags as optional parameters. Most of
199
+ these flags can also be set within an RE:
200
+ A a ASCII Make \w, \W, \b, \B, \d, and \D match the
201
+ corresponding ASCII character categories. Default
202
+ when matching a bytestring.
203
+ B b BESTMATCH Find the best fuzzy match (default is first).
204
+ D DEBUG Print the parsed pattern.
205
+ E e ENHANCEMATCH Attempt to improve the fit after finding the first
206
+ fuzzy match.
207
+ F f FULLCASE Use full case-folding when performing
208
+ case-insensitive matching in Unicode.
209
+ I i IGNORECASE Perform case-insensitive matching.
210
+ L L LOCALE Make \w, \W, \b, \B, \d, and \D dependent on the
211
+ current locale. (One byte per character only.)
212
+ M m MULTILINE "^" matches the beginning of lines (after a newline)
213
+ as well as the string. "$" matches the end of lines
214
+ (before a newline) as well as the end of the string.
215
+ P p POSIX Perform POSIX-standard matching (leftmost longest).
216
+ R r REVERSE Searches backwards.
217
+ S s DOTALL "." matches any character at all, including the
218
+ newline.
219
+ U u UNICODE Make \w, \W, \b, \B, \d, and \D dependent on the
220
+ Unicode locale. Default when matching a Unicode
221
+ string.
222
+ V0 V0 VERSION0 Turn on the old legacy behaviour.
223
+ V1 V1 VERSION1 Turn on the new enhanced behaviour. This flag
224
+ includes the FULLCASE flag.
225
+ W w WORD Make \b and \B work with default Unicode word breaks
226
+ and make ".", "^" and "$" work with Unicode line
227
+ breaks.
228
+ X x VERBOSE Ignore whitespace and comments for nicer looking REs.
229
+
230
+ This module also defines an exception 'error'.
231
+
232
+ """
233
+
234
+ # Public symbols.
235
+ __all__ = ["cache_all", "compile", "DEFAULT_VERSION", "escape", "findall",
236
+ "finditer", "fullmatch", "match", "purge", "search", "split", "splititer",
237
+ "sub", "subf", "subfn", "subn", "template", "Scanner", "A", "ASCII", "B",
238
+ "BESTMATCH", "D", "DEBUG", "E", "ENHANCEMATCH", "S", "DOTALL", "F",
239
+ "FULLCASE", "I", "IGNORECASE", "L", "LOCALE", "M", "MULTILINE", "P", "POSIX",
240
+ "R", "REVERSE", "T", "TEMPLATE", "U", "UNICODE", "V0", "VERSION0", "V1",
241
+ "VERSION1", "X", "VERBOSE", "W", "WORD", "error", "Regex", "__version__",
242
+ "__doc__", "RegexFlag"]
243
+
244
+ __version__ = "2025.11.3"
245
+
246
+ # --------------------------------------------------------------------
247
+ # Public interface.
248
+
249
+ def match(pattern, string, flags=0, pos=None, endpos=None, partial=False,
250
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
251
+ """Try to apply the pattern at the start of the string, returning a match
252
+ object, or None if no match was found."""
253
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
254
+ return pat.match(string, pos, endpos, concurrent, partial, timeout)
255
+
256
+ def fullmatch(pattern, string, flags=0, pos=None, endpos=None, partial=False,
257
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
258
+ """Try to apply the pattern against all of the string, returning a match
259
+ object, or None if no match was found."""
260
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
261
+ return pat.fullmatch(string, pos, endpos, concurrent, partial, timeout)
262
+
263
+ def search(pattern, string, flags=0, pos=None, endpos=None, partial=False,
264
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
265
+ """Search through string looking for a match to the pattern, returning a
266
+ match object, or None if no match was found."""
267
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
268
+ return pat.search(string, pos, endpos, concurrent, partial, timeout)
269
+
270
+ def sub(pattern, repl, string, count=0, flags=0, pos=None, endpos=None,
271
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
272
+ """Return the string obtained by replacing the leftmost (or rightmost with a
273
+ reverse pattern) non-overlapping occurrences of the pattern in string by the
274
+ replacement repl. repl can be either a string or a callable; if a string,
275
+ backslash escapes in it are processed; if a callable, it's passed the match
276
+ object and must return a replacement string to be used."""
277
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
278
+ return pat.sub(repl, string, count, pos, endpos, concurrent, timeout)
279
+
280
+ def subf(pattern, format, string, count=0, flags=0, pos=None, endpos=None,
281
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
282
+ """Return the string obtained by replacing the leftmost (or rightmost with a
283
+ reverse pattern) non-overlapping occurrences of the pattern in string by the
284
+ replacement format. format can be either a string or a callable; if a string,
285
+ it's treated as a format string; if a callable, it's passed the match object
286
+ and must return a replacement string to be used."""
287
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
288
+ return pat.subf(format, string, count, pos, endpos, concurrent, timeout)
289
+
290
+ def subn(pattern, repl, string, count=0, flags=0, pos=None, endpos=None,
291
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
292
+ """Return a 2-tuple containing (new_string, number). new_string is the string
293
+ obtained by replacing the leftmost (or rightmost with a reverse pattern)
294
+ non-overlapping occurrences of the pattern in the source string by the
295
+ replacement repl. number is the number of substitutions that were made. repl
296
+ can be either a string or a callable; if a string, backslash escapes in it
297
+ are processed; if a callable, it's passed the match object and must return a
298
+ replacement string to be used."""
299
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
300
+ return pat.subn(repl, string, count, pos, endpos, concurrent, timeout)
301
+
302
+ def subfn(pattern, format, string, count=0, flags=0, pos=None, endpos=None,
303
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
304
+ """Return a 2-tuple containing (new_string, number). new_string is the string
305
+ obtained by replacing the leftmost (or rightmost with a reverse pattern)
306
+ non-overlapping occurrences of the pattern in the source string by the
307
+ replacement format. number is the number of substitutions that were made. format
308
+ can be either a string or a callable; if a string, it's treated as a format
309
+ string; if a callable, it's passed the match object and must return a
310
+ replacement string to be used."""
311
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
312
+ return pat.subfn(format, string, count, pos, endpos, concurrent, timeout)
313
+
314
+ def split(pattern, string, maxsplit=0, flags=0, concurrent=None, timeout=None,
315
+ ignore_unused=False, **kwargs):
316
+ """Split the source string by the occurrences of the pattern, returning a
317
+ list containing the resulting substrings. If capturing parentheses are used
318
+ in pattern, then the text of all groups in the pattern are also returned as
319
+ part of the resulting list. If maxsplit is nonzero, at most maxsplit splits
320
+ occur, and the remainder of the string is returned as the final element of
321
+ the list."""
322
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
323
+ return pat.split(string, maxsplit, concurrent, timeout)
324
+
325
+ def splititer(pattern, string, maxsplit=0, flags=0, concurrent=None,
326
+ timeout=None, ignore_unused=False, **kwargs):
327
+ "Return an iterator yielding the parts of a split string."
328
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
329
+ return pat.splititer(string, maxsplit, concurrent, timeout)
330
+
331
+ def findall(pattern, string, flags=0, pos=None, endpos=None, overlapped=False,
332
+ concurrent=None, timeout=None, ignore_unused=False, **kwargs):
333
+ """Return a list of all matches in the string. The matches may be overlapped
334
+ if overlapped is True. If one or more groups are present in the pattern,
335
+ return a list of groups; this will be a list of tuples if the pattern has
336
+ more than one group. Empty matches are included in the result."""
337
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
338
+ return pat.findall(string, pos, endpos, overlapped, concurrent, timeout)
339
+
340
+ def finditer(pattern, string, flags=0, pos=None, endpos=None, overlapped=False,
341
+ partial=False, concurrent=None, timeout=None, ignore_unused=False, **kwargs):
342
+ """Return an iterator over all matches in the string. The matches may be
343
+ overlapped if overlapped is True. For each match, the iterator returns a
344
+ match object. Empty matches are included in the result."""
345
+ pat = _compile(pattern, flags, ignore_unused, kwargs, True)
346
+ return pat.finditer(string, pos, endpos, overlapped, concurrent, partial,
347
+ timeout)
348
+
349
+ def compile(pattern, flags=0, ignore_unused=False, cache_pattern=None, **kwargs):
350
+ "Compile a regular expression pattern, returning a pattern object."
351
+ if cache_pattern is None:
352
+ cache_pattern = _cache_all
353
+ return _compile(pattern, flags, ignore_unused, kwargs, cache_pattern)
354
+
355
+ def purge():
356
+ "Clear the regular expression cache"
357
+ _cache.clear()
358
+ _locale_sensitive.clear()
359
+
360
+ # Whether to cache all patterns.
361
+ _cache_all = True
362
+
363
+ def cache_all(value=True):
364
+ """Sets whether to cache all patterns, even those are compiled explicitly.
365
+ Passing None has no effect, but returns the current setting."""
366
+ global _cache_all
367
+
368
+ if value is None:
369
+ return _cache_all
370
+
371
+ _cache_all = value
372
+
373
+ def template(pattern, flags=0):
374
+ "Compile a template pattern, returning a pattern object."
375
+ return _compile(pattern, flags | TEMPLATE, False, {}, False)
376
+
377
+ def escape(pattern, special_only=True, literal_spaces=False):
378
+ """Escape a string for use as a literal in a pattern. If special_only is
379
+ True, escape only special characters, else escape all non-alphanumeric
380
+ characters. If literal_spaces is True, don't escape spaces."""
381
+ # Convert it to Unicode.
382
+ if isinstance(pattern, bytes):
383
+ p = pattern.decode("latin-1")
384
+ else:
385
+ p = pattern
386
+
387
+ s = []
388
+ if special_only:
389
+ for c in p:
390
+ if c == " " and literal_spaces:
391
+ s.append(c)
392
+ elif c in _METACHARS or c.isspace():
393
+ s.append("\\")
394
+ s.append(c)
395
+ else:
396
+ s.append(c)
397
+ else:
398
+ for c in p:
399
+ if c == " " and literal_spaces:
400
+ s.append(c)
401
+ elif c in _ALNUM:
402
+ s.append(c)
403
+ else:
404
+ s.append("\\")
405
+ s.append(c)
406
+
407
+ r = "".join(s)
408
+ # Convert it back to bytes if necessary.
409
+ if isinstance(pattern, bytes):
410
+ r = r.encode("latin-1")
411
+
412
+ return r
413
+
414
+ # --------------------------------------------------------------------
415
+ # Internals.
416
+
417
+ from regex import _regex_core
418
+ from regex import _regex
419
+ from threading import RLock as _RLock
420
+ from locale import getpreferredencoding as _getpreferredencoding
421
+ from regex._regex_core import *
422
+ from regex._regex_core import (_ALL_VERSIONS, _ALL_ENCODINGS, _FirstSetError,
423
+ _UnscopedFlagSet, _check_group_features, _compile_firstset,
424
+ _compile_replacement, _flatten_code, _fold_case, _get_required_string,
425
+ _parse_pattern, _shrink_cache)
426
+ from regex._regex_core import (ALNUM as _ALNUM, Info as _Info, OP as _OP, Source
427
+ as _Source, Fuzzy as _Fuzzy)
428
+
429
+ # Version 0 is the old behaviour, compatible with the original 're' module.
430
+ # Version 1 is the new behaviour, which differs slightly.
431
+
432
+ DEFAULT_VERSION = RegexFlag.VERSION0
433
+
434
+ _METACHARS = frozenset("()[]{}?*+|^$\\.-#&~")
435
+
436
+ _regex_core.DEFAULT_VERSION = DEFAULT_VERSION
437
+
438
+ # Caches for the patterns and replacements.
439
+ _cache = {}
440
+ _cache_lock = _RLock()
441
+ _named_args = {}
442
+ _replacement_cache = {}
443
+ _locale_sensitive = {}
444
+
445
+ # Maximum size of the cache.
446
+ _MAXCACHE = 500
447
+ _MAXREPCACHE = 500
448
+
449
+ def _compile(pattern, flags, ignore_unused, kwargs, cache_it):
450
+ "Compiles a regular expression to a PatternObject."
451
+
452
+ global DEFAULT_VERSION
453
+ try:
454
+ from regex import DEFAULT_VERSION
455
+ except ImportError:
456
+ pass
457
+
458
+ # We won't bother to cache the pattern if we're debugging.
459
+ if (flags & DEBUG) != 0:
460
+ cache_it = False
461
+
462
+ # What locale is this pattern using?
463
+ locale_key = (type(pattern), pattern)
464
+ if _locale_sensitive.get(locale_key, True) or (flags & LOCALE) != 0:
465
+ # This pattern is, or might be, locale-sensitive.
466
+ pattern_locale = _getpreferredencoding()
467
+ else:
468
+ # This pattern is definitely not locale-sensitive.
469
+ pattern_locale = None
470
+
471
+ def complain_unused_args():
472
+ if ignore_unused:
473
+ return
474
+
475
+ # Complain about any unused keyword arguments, possibly resulting from a typo.
476
+ unused_kwargs = set(kwargs) - {k for k, v in args_needed}
477
+ if unused_kwargs:
478
+ any_one = next(iter(unused_kwargs))
479
+ raise ValueError('unused keyword argument {!a}'.format(any_one))
480
+
481
+ if cache_it:
482
+ try:
483
+ # Do we know what keyword arguments are needed?
484
+ args_key = pattern, type(pattern), flags
485
+ args_needed = _named_args[args_key]
486
+
487
+ # Are we being provided with its required keyword arguments?
488
+ args_supplied = set()
489
+ if args_needed:
490
+ for k, v in args_needed:
491
+ try:
492
+ args_supplied.add((k, frozenset(kwargs[k])))
493
+ except KeyError:
494
+ raise error("missing named list: {!r}".format(k))
495
+
496
+ complain_unused_args()
497
+
498
+ args_supplied = frozenset(args_supplied)
499
+
500
+ # Have we already seen this regular expression and named list?
501
+ pattern_key = (pattern, type(pattern), flags, args_supplied,
502
+ DEFAULT_VERSION, pattern_locale)
503
+ return _cache[pattern_key]
504
+ except KeyError:
505
+ # It's a new pattern, or new named list for a known pattern.
506
+ pass
507
+
508
+ # Guess the encoding from the class of the pattern string.
509
+ if isinstance(pattern, str):
510
+ guess_encoding = UNICODE
511
+ elif isinstance(pattern, bytes):
512
+ guess_encoding = ASCII
513
+ elif isinstance(pattern, Pattern):
514
+ if flags:
515
+ raise ValueError("cannot process flags argument with a compiled pattern")
516
+
517
+ return pattern
518
+ else:
519
+ raise TypeError("first argument must be a string or compiled pattern")
520
+
521
+ # Set the default version in the core code in case it has been changed.
522
+ _regex_core.DEFAULT_VERSION = DEFAULT_VERSION
523
+
524
+ global_flags = flags
525
+
526
+ while True:
527
+ caught_exception = None
528
+ try:
529
+ source = _Source(pattern)
530
+ info = _Info(global_flags, source.char_type, kwargs)
531
+ info.guess_encoding = guess_encoding
532
+ source.ignore_space = bool(info.flags & VERBOSE)
533
+ parsed = _parse_pattern(source, info)
534
+ break
535
+ except _UnscopedFlagSet:
536
+ # Remember the global flags for the next attempt.
537
+ global_flags = info.global_flags
538
+ except error as e:
539
+ caught_exception = e
540
+
541
+ if caught_exception:
542
+ raise error(caught_exception.msg, caught_exception.pattern,
543
+ caught_exception.pos)
544
+
545
+ if not source.at_end():
546
+ raise error("unbalanced parenthesis", pattern, source.pos)
547
+
548
+ # Check the global flags for conflicts.
549
+ version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
550
+ if version not in (0, VERSION0, VERSION1):
551
+ raise ValueError("VERSION0 and VERSION1 flags are mutually incompatible")
552
+
553
+ if (info.flags & _ALL_ENCODINGS) not in (0, ASCII, LOCALE, UNICODE):
554
+ raise ValueError("ASCII, LOCALE and UNICODE flags are mutually incompatible")
555
+
556
+ if isinstance(pattern, bytes) and (info.flags & UNICODE):
557
+ raise ValueError("cannot use UNICODE flag with a bytes pattern")
558
+
559
+ if not (info.flags & _ALL_ENCODINGS):
560
+ if isinstance(pattern, str):
561
+ info.flags |= UNICODE
562
+ else:
563
+ info.flags |= ASCII
564
+
565
+ reverse = bool(info.flags & REVERSE)
566
+ fuzzy = isinstance(parsed, _Fuzzy)
567
+
568
+ # Remember whether this pattern as an inline locale flag.
569
+ _locale_sensitive[locale_key] = info.inline_locale
570
+
571
+ # Fix the group references.
572
+ caught_exception = None
573
+ try:
574
+ parsed.fix_groups(pattern, reverse, False)
575
+ except error as e:
576
+ caught_exception = e
577
+
578
+ if caught_exception:
579
+ raise error(caught_exception.msg, caught_exception.pattern,
580
+ caught_exception.pos)
581
+
582
+ # Should we print the parsed pattern?
583
+ if flags & DEBUG:
584
+ parsed.dump(indent=0, reverse=reverse)
585
+
586
+ # Optimise the parsed pattern.
587
+ parsed = parsed.optimise(info, reverse)
588
+ parsed = parsed.pack_characters(info)
589
+
590
+ # Get the required string.
591
+ req_offset, req_chars, req_flags = _get_required_string(parsed, info.flags)
592
+
593
+ # Build the named lists.
594
+ named_lists = {}
595
+ named_list_indexes = [None] * len(info.named_lists_used)
596
+ args_needed = set()
597
+ for key, index in info.named_lists_used.items():
598
+ name, case_flags = key
599
+ values = frozenset(kwargs[name])
600
+ if case_flags:
601
+ items = frozenset(_fold_case(info, v) for v in values)
602
+ else:
603
+ items = values
604
+ named_lists[name] = values
605
+ named_list_indexes[index] = items
606
+ args_needed.add((name, values))
607
+
608
+ complain_unused_args()
609
+
610
+ # Check the features of the groups.
611
+ _check_group_features(info, parsed)
612
+
613
+ # Compile the parsed pattern. The result is a list of tuples.
614
+ code = parsed.compile(reverse)
615
+
616
+ # Is there a group call to the pattern as a whole?
617
+ key = (0, reverse, fuzzy)
618
+ ref = info.call_refs.get(key)
619
+ if ref is not None:
620
+ code = [(_OP.CALL_REF, ref)] + code + [(_OP.END, )]
621
+
622
+ # Add the final 'success' opcode.
623
+ code += [(_OP.SUCCESS, )]
624
+
625
+ # Compile the additional copies of the groups that we need.
626
+ for group, rev, fuz in info.additional_groups:
627
+ code += group.compile(rev, fuz)
628
+
629
+ # Flatten the code into a list of ints.
630
+ code = _flatten_code(code)
631
+
632
+ if not parsed.has_simple_start():
633
+ # Get the first set, if possible.
634
+ try:
635
+ fs_code = _compile_firstset(info, parsed.get_firstset(reverse))
636
+ fs_code = _flatten_code(fs_code)
637
+ code = fs_code + code
638
+ except _FirstSetError:
639
+ pass
640
+
641
+ # The named capture groups.
642
+ index_group = dict((v, n) for n, v in info.group_index.items())
643
+
644
+ # Create the PatternObject.
645
+ #
646
+ # Local flags like IGNORECASE affect the code generation, but aren't needed
647
+ # by the PatternObject itself. Conversely, global flags like LOCALE _don't_
648
+ # affect the code generation but _are_ needed by the PatternObject.
649
+ compiled_pattern = _regex.compile(pattern, info.flags | version, code,
650
+ info.group_index, index_group, named_lists, named_list_indexes,
651
+ req_offset, req_chars, req_flags, info.group_count)
652
+
653
+ # Do we need to reduce the size of the cache?
654
+ if len(_cache) >= _MAXCACHE:
655
+ with _cache_lock:
656
+ _shrink_cache(_cache, _named_args, _locale_sensitive, _MAXCACHE)
657
+
658
+ if cache_it:
659
+ if (info.flags & LOCALE) == 0:
660
+ pattern_locale = None
661
+
662
+ args_needed = frozenset(args_needed)
663
+
664
+ # Store this regular expression and named list.
665
+ pattern_key = (pattern, type(pattern), flags, args_needed,
666
+ DEFAULT_VERSION, pattern_locale)
667
+ _cache[pattern_key] = compiled_pattern
668
+
669
+ # Store what keyword arguments are needed.
670
+ _named_args[args_key] = args_needed
671
+
672
+ return compiled_pattern
673
+
674
+ def _compile_replacement_helper(pattern, template):
675
+ "Compiles a replacement template."
676
+ # This function is called by the _regex module.
677
+
678
+ # Have we seen this before?
679
+ key = pattern.pattern, pattern.flags, template
680
+ compiled = _replacement_cache.get(key)
681
+ if compiled is not None:
682
+ return compiled
683
+
684
+ if len(_replacement_cache) >= _MAXREPCACHE:
685
+ _replacement_cache.clear()
686
+
687
+ is_unicode = isinstance(template, str)
688
+ source = _Source(template)
689
+ if is_unicode:
690
+ def make_string(char_codes):
691
+ return "".join(chr(c) for c in char_codes)
692
+ else:
693
+ def make_string(char_codes):
694
+ return bytes(char_codes)
695
+
696
+ compiled = []
697
+ literal = []
698
+ while True:
699
+ ch = source.get()
700
+ if not ch:
701
+ break
702
+ if ch == "\\":
703
+ # '_compile_replacement' will return either an int group reference
704
+ # or a string literal. It returns items (plural) in order to handle
705
+ # a 2-character literal (an invalid escape sequence).
706
+ is_group, items = _compile_replacement(source, pattern, is_unicode)
707
+ if is_group:
708
+ # It's a group, so first flush the literal.
709
+ if literal:
710
+ compiled.append(make_string(literal))
711
+ literal = []
712
+ compiled.extend(items)
713
+ else:
714
+ literal.extend(items)
715
+ else:
716
+ literal.append(ord(ch))
717
+
718
+ # Flush the literal.
719
+ if literal:
720
+ compiled.append(make_string(literal))
721
+
722
+ _replacement_cache[key] = compiled
723
+
724
+ return compiled
725
+
726
+ # We define Pattern here after all the support objects have been defined.
727
+ _pat = _compile('', 0, False, {}, False)
728
+ Pattern = type(_pat)
729
+ Match = type(_pat.match(''))
730
+ del _pat
731
+
732
+ # Make Pattern public for typing annotations.
733
+ __all__.append("Pattern")
734
+ __all__.append("Match")
735
+
736
+ # We'll define an alias for the 'compile' function so that the repr of a
737
+ # pattern object is eval-able.
738
+ Regex = compile
739
+
740
+ # Register myself for pickling.
741
+ import copyreg as _copy_reg
742
+
743
+ def _pickle(pattern):
744
+ return _regex.compile, pattern._pickled_data
745
+
746
+ _copy_reg.pickle(Pattern, _pickle)
python_env/lib/site-packages/regex/_regex.cp310-win_amd64.pyd ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5e491596e4cabad7e158b167105abec56dd656822ff4c81b838b62a4309a215
3
+ size 726016
python_env/lib/site-packages/regex/_regex_core.py ADDED
The diff for this file is too large to render. See raw diff
 
python_env/lib/site-packages/regex/tests/__pycache__/test_regex.cpython-310.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85f9292286a2ab85f4215ca2a0c7265b0d5272e7b7b32c3aa052e845148e9991
3
+ size 144991
python_env/lib/site-packages/regex/tests/test_regex.py ADDED
The diff for this file is too large to render. See raw diff
 
python_env/lib/site-packages/requests-2.32.5.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
python_env/lib/site-packages/requests-2.32.5.dist-info/METADATA ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.4
2
+ Name: requests
3
+ Version: 2.32.5
4
+ Summary: Python HTTP for Humans.
5
+ Home-page: https://requests.readthedocs.io
6
+ Author: Kenneth Reitz
7
+ Author-email: me@kennethreitz.org
8
+ License: Apache-2.0
9
+ Project-URL: Documentation, https://requests.readthedocs.io
10
+ Project-URL: Source, https://github.com/psf/requests
11
+ Classifier: Development Status :: 5 - Production/Stable
12
+ Classifier: Environment :: Web Environment
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: Apache Software License
15
+ Classifier: Natural Language :: English
16
+ Classifier: Operating System :: OS Independent
17
+ Classifier: Programming Language :: Python
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Programming Language :: Python :: 3.9
20
+ Classifier: Programming Language :: Python :: 3.10
21
+ Classifier: Programming Language :: Python :: 3.11
22
+ Classifier: Programming Language :: Python :: 3.12
23
+ Classifier: Programming Language :: Python :: 3.13
24
+ Classifier: Programming Language :: Python :: 3.14
25
+ Classifier: Programming Language :: Python :: 3 :: Only
26
+ Classifier: Programming Language :: Python :: Implementation :: CPython
27
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
28
+ Classifier: Topic :: Internet :: WWW/HTTP
29
+ Classifier: Topic :: Software Development :: Libraries
30
+ Requires-Python: >=3.9
31
+ Description-Content-Type: text/markdown
32
+ License-File: LICENSE
33
+ Requires-Dist: charset_normalizer<4,>=2
34
+ Requires-Dist: idna<4,>=2.5
35
+ Requires-Dist: urllib3<3,>=1.21.1
36
+ Requires-Dist: certifi>=2017.4.17
37
+ Provides-Extra: security
38
+ Provides-Extra: socks
39
+ Requires-Dist: PySocks!=1.5.7,>=1.5.6; extra == "socks"
40
+ Provides-Extra: use-chardet-on-py3
41
+ Requires-Dist: chardet<6,>=3.0.2; extra == "use-chardet-on-py3"
42
+ Dynamic: author
43
+ Dynamic: author-email
44
+ Dynamic: classifier
45
+ Dynamic: description
46
+ Dynamic: description-content-type
47
+ Dynamic: home-page
48
+ Dynamic: license
49
+ Dynamic: license-file
50
+ Dynamic: project-url
51
+ Dynamic: provides-extra
52
+ Dynamic: requires-dist
53
+ Dynamic: requires-python
54
+ Dynamic: summary
55
+
56
+ # Requests
57
+
58
+ **Requests** is a simple, yet elegant, HTTP library.
59
+
60
+ ```python
61
+ >>> import requests
62
+ >>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
63
+ >>> r.status_code
64
+ 200
65
+ >>> r.headers['content-type']
66
+ 'application/json; charset=utf8'
67
+ >>> r.encoding
68
+ 'utf-8'
69
+ >>> r.text
70
+ '{"authenticated": true, ...'
71
+ >>> r.json()
72
+ {'authenticated': True, ...}
73
+ ```
74
+
75
+ Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
76
+
77
+ Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
78
+
79
+ [![Downloads](https://static.pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)
80
+ [![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)
81
+ [![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)
82
+
83
+ ## Installing Requests and Supported Versions
84
+
85
+ Requests is available on PyPI:
86
+
87
+ ```console
88
+ $ python -m pip install requests
89
+ ```
90
+
91
+ Requests officially supports Python 3.9+.
92
+
93
+ ## Supported Features & Best–Practices
94
+
95
+ Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
96
+
97
+ - Keep-Alive & Connection Pooling
98
+ - International Domains and URLs
99
+ - Sessions with Cookie Persistence
100
+ - Browser-style TLS/SSL Verification
101
+ - Basic & Digest Authentication
102
+ - Familiar `dict`–like Cookies
103
+ - Automatic Content Decompression and Decoding
104
+ - Multi-part File Uploads
105
+ - SOCKS Proxy Support
106
+ - Connection Timeouts
107
+ - Streaming Downloads
108
+ - Automatic honoring of `.netrc`
109
+ - Chunked HTTP Requests
110
+
111
+ ## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
112
+
113
+ [![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)
114
+
115
+ ## Cloning the repository
116
+
117
+ When cloning the Requests repository, you may need to add the `-c
118
+ fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit timestamp (see
119
+ [this issue](https://github.com/psf/requests/issues/2690) for more background):
120
+
121
+ ```shell
122
+ git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
123
+ ```
124
+
125
+ You can also apply this setting to your global Git config:
126
+
127
+ ```shell
128
+ git config --global fetch.fsck.badTimezone ignore
129
+ ```
130
+
131
+ ---
132
+
133
+ [![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)
python_env/lib/site-packages/requests-2.32.5.dist-info/RECORD ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ requests-2.32.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ requests-2.32.5.dist-info/METADATA,sha256=ZbWgjagfSRVRPnYJZf8Ut1GPZbe7Pv4NqzZLvMTUDLA,4945
3
+ requests-2.32.5.dist-info/RECORD,,
4
+ requests-2.32.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
5
+ requests-2.32.5.dist-info/licenses/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
6
+ requests-2.32.5.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
7
+ requests/__init__.py,sha256=4xaAERmPDIBPsa2PsjpU9r06yooK-2mZKHTZAhWRWts,5072
8
+ requests/__pycache__/__init__.cpython-310.pyc,,
9
+ requests/__pycache__/__version__.cpython-310.pyc,,
10
+ requests/__pycache__/_internal_utils.cpython-310.pyc,,
11
+ requests/__pycache__/adapters.cpython-310.pyc,,
12
+ requests/__pycache__/api.cpython-310.pyc,,
13
+ requests/__pycache__/auth.cpython-310.pyc,,
14
+ requests/__pycache__/certs.cpython-310.pyc,,
15
+ requests/__pycache__/compat.cpython-310.pyc,,
16
+ requests/__pycache__/cookies.cpython-310.pyc,,
17
+ requests/__pycache__/exceptions.cpython-310.pyc,,
18
+ requests/__pycache__/help.cpython-310.pyc,,
19
+ requests/__pycache__/hooks.cpython-310.pyc,,
20
+ requests/__pycache__/models.cpython-310.pyc,,
21
+ requests/__pycache__/packages.cpython-310.pyc,,
22
+ requests/__pycache__/sessions.cpython-310.pyc,,
23
+ requests/__pycache__/status_codes.cpython-310.pyc,,
24
+ requests/__pycache__/structures.cpython-310.pyc,,
25
+ requests/__pycache__/utils.cpython-310.pyc,,
26
+ requests/__version__.py,sha256=QKDceK8K_ujqwDDc3oYrR0odOBYgKVOQQ5vFap_G_cg,435
27
+ requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
28
+ requests/adapters.py,sha256=8nX113gbb123aUtx2ETkAN_6IsYX-M2fRoLGluTEcRk,26285
29
+ requests/api.py,sha256=_Zb9Oa7tzVIizTKwFrPjDEY9ejtm_OnSRERnADxGsQs,6449
30
+ requests/auth.py,sha256=kF75tqnLctZ9Mf_hm9TZIj4cQWnN5uxRz8oWsx5wmR0,10186
31
+ requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429
32
+ requests/compat.py,sha256=J7sIjR6XoDGp5JTVzOxkK5fSoUVUa_Pjc7iRZhAWGmI,2142
33
+ requests/cookies.py,sha256=bNi-iqEj4NPZ00-ob-rHvzkvObzN3lEpgw3g6paS3Xw,18590
34
+ requests/exceptions.py,sha256=jJPS1UWATs86ShVUaLorTiJb1SaGuoNEWgICJep-VkY,4260
35
+ requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875
36
+ requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
37
+ requests/models.py,sha256=MjZdZ4k7tnw-1nz5PKShjmPmqyk0L6DciwnFngb_Vk4,35510
38
+ requests/packages.py,sha256=_g0gZ681UyAlKHRjH6kanbaoxx2eAb6qzcXiODyTIoc,904
39
+ requests/sessions.py,sha256=Cl1dpEnOfwrzzPbku-emepNeN4Rt_0_58Iy2x-JGTm8,30503
40
+ requests/status_codes.py,sha256=iJUAeA25baTdw-6PfD0eF4qhpINDJRJI-yaMqxs4LEI,4322
41
+ requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
42
+ requests/utils.py,sha256=WqU86rZ3wvhC-tQjWcjtH_HEKZwWB3iWCZV6SW5DEdQ,33213
python_env/lib/site-packages/requests-2.32.5.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
python_env/lib/site-packages/requests-2.32.5.dist-info/licenses/LICENSE ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
python_env/lib/site-packages/requests-2.32.5.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ requests
python_env/lib/site-packages/requests/__init__.py ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # __
2
+ # /__) _ _ _ _ _/ _
3
+ # / ( (- (/ (/ (- _) / _)
4
+ # /
5
+
6
+ """
7
+ Requests HTTP Library
8
+ ~~~~~~~~~~~~~~~~~~~~~
9
+
10
+ Requests is an HTTP library, written in Python, for human beings.
11
+ Basic GET usage:
12
+
13
+ >>> import requests
14
+ >>> r = requests.get('https://www.python.org')
15
+ >>> r.status_code
16
+ 200
17
+ >>> b'Python is a programming language' in r.content
18
+ True
19
+
20
+ ... or POST:
21
+
22
+ >>> payload = dict(key1='value1', key2='value2')
23
+ >>> r = requests.post('https://httpbin.org/post', data=payload)
24
+ >>> print(r.text)
25
+ {
26
+ ...
27
+ "form": {
28
+ "key1": "value1",
29
+ "key2": "value2"
30
+ },
31
+ ...
32
+ }
33
+
34
+ The other HTTP methods are supported - see `requests.api`. Full documentation
35
+ is at <https://requests.readthedocs.io>.
36
+
37
+ :copyright: (c) 2017 by Kenneth Reitz.
38
+ :license: Apache 2.0, see LICENSE for more details.
39
+ """
40
+
41
+ import warnings
42
+
43
+ import urllib3
44
+
45
+ from .exceptions import RequestsDependencyWarning
46
+
47
+ try:
48
+ from charset_normalizer import __version__ as charset_normalizer_version
49
+ except ImportError:
50
+ charset_normalizer_version = None
51
+
52
+ try:
53
+ from chardet import __version__ as chardet_version
54
+ except ImportError:
55
+ chardet_version = None
56
+
57
+
58
+ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
59
+ urllib3_version = urllib3_version.split(".")
60
+ assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
61
+
62
+ # Sometimes, urllib3 only reports its version as 16.1.
63
+ if len(urllib3_version) == 2:
64
+ urllib3_version.append("0")
65
+
66
+ # Check urllib3 for compatibility.
67
+ major, minor, patch = urllib3_version # noqa: F811
68
+ major, minor, patch = int(major), int(minor), int(patch)
69
+ # urllib3 >= 1.21.1
70
+ assert major >= 1
71
+ if major == 1:
72
+ assert minor >= 21
73
+
74
+ # Check charset_normalizer for compatibility.
75
+ if chardet_version:
76
+ major, minor, patch = chardet_version.split(".")[:3]
77
+ major, minor, patch = int(major), int(minor), int(patch)
78
+ # chardet_version >= 3.0.2, < 6.0.0
79
+ assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0)
80
+ elif charset_normalizer_version:
81
+ major, minor, patch = charset_normalizer_version.split(".")[:3]
82
+ major, minor, patch = int(major), int(minor), int(patch)
83
+ # charset_normalizer >= 2.0.0 < 4.0.0
84
+ assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
85
+ else:
86
+ warnings.warn(
87
+ "Unable to find acceptable character detection dependency "
88
+ "(chardet or charset_normalizer).",
89
+ RequestsDependencyWarning,
90
+ )
91
+
92
+
93
+ def _check_cryptography(cryptography_version):
94
+ # cryptography < 1.3.4
95
+ try:
96
+ cryptography_version = list(map(int, cryptography_version.split(".")))
97
+ except ValueError:
98
+ return
99
+
100
+ if cryptography_version < [1, 3, 4]:
101
+ warning = "Old version of cryptography ({}) may cause slowdown.".format(
102
+ cryptography_version
103
+ )
104
+ warnings.warn(warning, RequestsDependencyWarning)
105
+
106
+
107
+ # Check imported dependencies for compatibility.
108
+ try:
109
+ check_compatibility(
110
+ urllib3.__version__, chardet_version, charset_normalizer_version
111
+ )
112
+ except (AssertionError, ValueError):
113
+ warnings.warn(
114
+ "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
115
+ "version!".format(
116
+ urllib3.__version__, chardet_version, charset_normalizer_version
117
+ ),
118
+ RequestsDependencyWarning,
119
+ )
120
+
121
+ # Attempt to enable urllib3's fallback for SNI support
122
+ # if the standard library doesn't support SNI or the
123
+ # 'ssl' library isn't available.
124
+ try:
125
+ try:
126
+ import ssl
127
+ except ImportError:
128
+ ssl = None
129
+
130
+ if not getattr(ssl, "HAS_SNI", False):
131
+ from urllib3.contrib import pyopenssl
132
+
133
+ pyopenssl.inject_into_urllib3()
134
+
135
+ # Check cryptography version
136
+ from cryptography import __version__ as cryptography_version
137
+
138
+ _check_cryptography(cryptography_version)
139
+ except ImportError:
140
+ pass
141
+
142
+ # urllib3's DependencyWarnings should be silenced.
143
+ from urllib3.exceptions import DependencyWarning
144
+
145
+ warnings.simplefilter("ignore", DependencyWarning)
146
+
147
+ # Set default logging handler to avoid "No handler found" warnings.
148
+ import logging
149
+ from logging import NullHandler
150
+
151
+ from . import packages, utils
152
+ from .__version__ import (
153
+ __author__,
154
+ __author_email__,
155
+ __build__,
156
+ __cake__,
157
+ __copyright__,
158
+ __description__,
159
+ __license__,
160
+ __title__,
161
+ __url__,
162
+ __version__,
163
+ )
164
+ from .api import delete, get, head, options, patch, post, put, request
165
+ from .exceptions import (
166
+ ConnectionError,
167
+ ConnectTimeout,
168
+ FileModeWarning,
169
+ HTTPError,
170
+ JSONDecodeError,
171
+ ReadTimeout,
172
+ RequestException,
173
+ Timeout,
174
+ TooManyRedirects,
175
+ URLRequired,
176
+ )
177
+ from .models import PreparedRequest, Request, Response
178
+ from .sessions import Session, session
179
+ from .status_codes import codes
180
+
181
+ logging.getLogger(__name__).addHandler(NullHandler())
182
+
183
+ # FileModeWarnings go off per the default.
184
+ warnings.simplefilter("default", FileModeWarning, append=True)
python_env/lib/site-packages/requests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (3.91 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/__version__.cpython-310.pyc ADDED
Binary file (525 Bytes). View file
 
python_env/lib/site-packages/requests/__pycache__/_internal_utils.cpython-310.pyc ADDED
Binary file (1.6 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/adapters.cpython-310.pyc ADDED
Binary file (21.7 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/api.cpython-310.pyc ADDED
Binary file (6.7 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/auth.cpython-310.pyc ADDED
Binary file (8.1 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/certs.cpython-310.pyc ADDED
Binary file (602 Bytes). View file
 
python_env/lib/site-packages/requests/__pycache__/compat.cpython-310.pyc ADDED
Binary file (1.93 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/cookies.cpython-310.pyc ADDED
Binary file (18.7 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (6.2 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/help.cpython-310.pyc ADDED
Binary file (2.82 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/hooks.cpython-310.pyc ADDED
Binary file (969 Bytes). View file
 
python_env/lib/site-packages/requests/__pycache__/models.cpython-310.pyc ADDED
Binary file (24.3 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/packages.cpython-310.pyc ADDED
Binary file (606 Bytes). View file
 
python_env/lib/site-packages/requests/__pycache__/sessions.cpython-310.pyc ADDED
Binary file (19.7 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/status_codes.cpython-310.pyc ADDED
Binary file (4.72 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/structures.cpython-310.pyc ADDED
Binary file (4.41 kB). View file
 
python_env/lib/site-packages/requests/__pycache__/utils.cpython-310.pyc ADDED
Binary file (24.5 kB). View file
 
python_env/lib/site-packages/requests/__version__.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # .-. .-. .-. . . .-. .-. .-. .-.
2
+ # |( |- |.| | | |- `-. | `-.
3
+ # ' ' `-' `-`.`-' `-' `-' ' `-'
4
+
5
+ __title__ = "requests"
6
+ __description__ = "Python HTTP for Humans."
7
+ __url__ = "https://requests.readthedocs.io"
8
+ __version__ = "2.32.5"
9
+ __build__ = 0x023205
10
+ __author__ = "Kenneth Reitz"
11
+ __author_email__ = "me@kennethreitz.org"
12
+ __license__ = "Apache-2.0"
13
+ __copyright__ = "Copyright Kenneth Reitz"
14
+ __cake__ = "\u2728 \U0001f370 \u2728"
python_env/lib/site-packages/requests/_internal_utils.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ requests._internal_utils
3
+ ~~~~~~~~~~~~~~
4
+
5
+ Provides utility functions that are consumed internally by Requests
6
+ which depend on extremely few external helpers (such as compat)
7
+ """
8
+ import re
9
+
10
+ from .compat import builtin_str
11
+
12
+ _VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
13
+ _VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
14
+ _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
15
+ _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
16
+
17
+ _HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR)
18
+ _HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE)
19
+ HEADER_VALIDATORS = {
20
+ bytes: _HEADER_VALIDATORS_BYTE,
21
+ str: _HEADER_VALIDATORS_STR,
22
+ }
23
+
24
+
25
+ def to_native_string(string, encoding="ascii"):
26
+ """Given a string object, regardless of type, returns a representation of
27
+ that string in the native string type, encoding and decoding where
28
+ necessary. This assumes ASCII unless told otherwise.
29
+ """
30
+ if isinstance(string, builtin_str):
31
+ out = string
32
+ else:
33
+ out = string.decode(encoding)
34
+
35
+ return out
36
+
37
+
38
+ def unicode_is_ascii(u_string):
39
+ """Determine if unicode string only contains ASCII characters.
40
+
41
+ :param str u_string: unicode string to check. Must be unicode
42
+ and not Python 2 `str`.
43
+ :rtype: bool
44
+ """
45
+ assert isinstance(u_string, str)
46
+ try:
47
+ u_string.encode("ascii")
48
+ return True
49
+ except UnicodeEncodeError:
50
+ return False
python_env/lib/site-packages/requests/adapters.py ADDED
@@ -0,0 +1,696 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ requests.adapters
3
+ ~~~~~~~~~~~~~~~~~
4
+
5
+ This module contains the transport adapters that Requests uses to define
6
+ and maintain connections.
7
+ """
8
+
9
+ import os.path
10
+ import socket # noqa: F401
11
+ import typing
12
+ import warnings
13
+
14
+ from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
15
+ from urllib3.exceptions import HTTPError as _HTTPError
16
+ from urllib3.exceptions import InvalidHeader as _InvalidHeader
17
+ from urllib3.exceptions import (
18
+ LocationValueError,
19
+ MaxRetryError,
20
+ NewConnectionError,
21
+ ProtocolError,
22
+ )
23
+ from urllib3.exceptions import ProxyError as _ProxyError
24
+ from urllib3.exceptions import ReadTimeoutError, ResponseError
25
+ from urllib3.exceptions import SSLError as _SSLError
26
+ from urllib3.poolmanager import PoolManager, proxy_from_url
27
+ from urllib3.util import Timeout as TimeoutSauce
28
+ from urllib3.util import parse_url
29
+ from urllib3.util.retry import Retry
30
+
31
+ from .auth import _basic_auth_str
32
+ from .compat import basestring, urlparse
33
+ from .cookies import extract_cookies_to_jar
34
+ from .exceptions import (
35
+ ConnectionError,
36
+ ConnectTimeout,
37
+ InvalidHeader,
38
+ InvalidProxyURL,
39
+ InvalidSchema,
40
+ InvalidURL,
41
+ ProxyError,
42
+ ReadTimeout,
43
+ RetryError,
44
+ SSLError,
45
+ )
46
+ from .models import Response
47
+ from .structures import CaseInsensitiveDict
48
+ from .utils import (
49
+ DEFAULT_CA_BUNDLE_PATH,
50
+ extract_zipped_paths,
51
+ get_auth_from_url,
52
+ get_encoding_from_headers,
53
+ prepend_scheme_if_needed,
54
+ select_proxy,
55
+ urldefragauth,
56
+ )
57
+
58
+ try:
59
+ from urllib3.contrib.socks import SOCKSProxyManager
60
+ except ImportError:
61
+
62
+ def SOCKSProxyManager(*args, **kwargs):
63
+ raise InvalidSchema("Missing dependencies for SOCKS support.")
64
+
65
+
66
+ if typing.TYPE_CHECKING:
67
+ from .models import PreparedRequest
68
+
69
+
70
+ DEFAULT_POOLBLOCK = False
71
+ DEFAULT_POOLSIZE = 10
72
+ DEFAULT_RETRIES = 0
73
+ DEFAULT_POOL_TIMEOUT = None
74
+
75
+
76
+ def _urllib3_request_context(
77
+ request: "PreparedRequest",
78
+ verify: "bool | str | None",
79
+ client_cert: "typing.Tuple[str, str] | str | None",
80
+ poolmanager: "PoolManager",
81
+ ) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
82
+ host_params = {}
83
+ pool_kwargs = {}
84
+ parsed_request_url = urlparse(request.url)
85
+ scheme = parsed_request_url.scheme.lower()
86
+ port = parsed_request_url.port
87
+
88
+ cert_reqs = "CERT_REQUIRED"
89
+ if verify is False:
90
+ cert_reqs = "CERT_NONE"
91
+ elif isinstance(verify, str):
92
+ if not os.path.isdir(verify):
93
+ pool_kwargs["ca_certs"] = verify
94
+ else:
95
+ pool_kwargs["ca_cert_dir"] = verify
96
+ pool_kwargs["cert_reqs"] = cert_reqs
97
+ if client_cert is not None:
98
+ if isinstance(client_cert, tuple) and len(client_cert) == 2:
99
+ pool_kwargs["cert_file"] = client_cert[0]
100
+ pool_kwargs["key_file"] = client_cert[1]
101
+ else:
102
+ # According to our docs, we allow users to specify just the client
103
+ # cert path
104
+ pool_kwargs["cert_file"] = client_cert
105
+ host_params = {
106
+ "scheme": scheme,
107
+ "host": parsed_request_url.hostname,
108
+ "port": port,
109
+ }
110
+ return host_params, pool_kwargs
111
+
112
+
113
+ class BaseAdapter:
114
+ """The Base Transport Adapter"""
115
+
116
+ def __init__(self):
117
+ super().__init__()
118
+
119
+ def send(
120
+ self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
121
+ ):
122
+ """Sends PreparedRequest object. Returns Response object.
123
+
124
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
125
+ :param stream: (optional) Whether to stream the request content.
126
+ :param timeout: (optional) How long to wait for the server to send
127
+ data before giving up, as a float, or a :ref:`(connect timeout,
128
+ read timeout) <timeouts>` tuple.
129
+ :type timeout: float or tuple
130
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
131
+ the server's TLS certificate, or a string, in which case it must be a path
132
+ to a CA bundle to use
133
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
134
+ :param proxies: (optional) The proxies dictionary to apply to the request.
135
+ """
136
+ raise NotImplementedError
137
+
138
+ def close(self):
139
+ """Cleans up adapter specific items."""
140
+ raise NotImplementedError
141
+
142
+
143
+ class HTTPAdapter(BaseAdapter):
144
+ """The built-in HTTP Adapter for urllib3.
145
+
146
+ Provides a general-case interface for Requests sessions to contact HTTP and
147
+ HTTPS urls by implementing the Transport Adapter interface. This class will
148
+ usually be created by the :class:`Session <Session>` class under the
149
+ covers.
150
+
151
+ :param pool_connections: The number of urllib3 connection pools to cache.
152
+ :param pool_maxsize: The maximum number of connections to save in the pool.
153
+ :param max_retries: The maximum number of retries each connection
154
+ should attempt. Note, this applies only to failed DNS lookups, socket
155
+ connections and connection timeouts, never to requests where data has
156
+ made it to the server. By default, Requests does not retry failed
157
+ connections. If you need granular control over the conditions under
158
+ which we retry a request, import urllib3's ``Retry`` class and pass
159
+ that instead.
160
+ :param pool_block: Whether the connection pool should block for connections.
161
+
162
+ Usage::
163
+
164
+ >>> import requests
165
+ >>> s = requests.Session()
166
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
167
+ >>> s.mount('http://', a)
168
+ """
169
+
170
+ __attrs__ = [
171
+ "max_retries",
172
+ "config",
173
+ "_pool_connections",
174
+ "_pool_maxsize",
175
+ "_pool_block",
176
+ ]
177
+
178
+ def __init__(
179
+ self,
180
+ pool_connections=DEFAULT_POOLSIZE,
181
+ pool_maxsize=DEFAULT_POOLSIZE,
182
+ max_retries=DEFAULT_RETRIES,
183
+ pool_block=DEFAULT_POOLBLOCK,
184
+ ):
185
+ if max_retries == DEFAULT_RETRIES:
186
+ self.max_retries = Retry(0, read=False)
187
+ else:
188
+ self.max_retries = Retry.from_int(max_retries)
189
+ self.config = {}
190
+ self.proxy_manager = {}
191
+
192
+ super().__init__()
193
+
194
+ self._pool_connections = pool_connections
195
+ self._pool_maxsize = pool_maxsize
196
+ self._pool_block = pool_block
197
+
198
+ self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
199
+
200
+ def __getstate__(self):
201
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
202
+
203
+ def __setstate__(self, state):
204
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
205
+ # self.poolmanager uses a lambda function, which isn't pickleable.
206
+ self.proxy_manager = {}
207
+ self.config = {}
208
+
209
+ for attr, value in state.items():
210
+ setattr(self, attr, value)
211
+
212
+ self.init_poolmanager(
213
+ self._pool_connections, self._pool_maxsize, block=self._pool_block
214
+ )
215
+
216
+ def init_poolmanager(
217
+ self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
218
+ ):
219
+ """Initializes a urllib3 PoolManager.
220
+
221
+ This method should not be called from user code, and is only
222
+ exposed for use when subclassing the
223
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
224
+
225
+ :param connections: The number of urllib3 connection pools to cache.
226
+ :param maxsize: The maximum number of connections to save in the pool.
227
+ :param block: Block when no free connections are available.
228
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
229
+ """
230
+ # save these values for pickling
231
+ self._pool_connections = connections
232
+ self._pool_maxsize = maxsize
233
+ self._pool_block = block
234
+
235
+ self.poolmanager = PoolManager(
236
+ num_pools=connections,
237
+ maxsize=maxsize,
238
+ block=block,
239
+ **pool_kwargs,
240
+ )
241
+
242
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
243
+ """Return urllib3 ProxyManager for the given proxy.
244
+
245
+ This method should not be called from user code, and is only
246
+ exposed for use when subclassing the
247
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
248
+
249
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
250
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
251
+ :returns: ProxyManager
252
+ :rtype: urllib3.ProxyManager
253
+ """
254
+ if proxy in self.proxy_manager:
255
+ manager = self.proxy_manager[proxy]
256
+ elif proxy.lower().startswith("socks"):
257
+ username, password = get_auth_from_url(proxy)
258
+ manager = self.proxy_manager[proxy] = SOCKSProxyManager(
259
+ proxy,
260
+ username=username,
261
+ password=password,
262
+ num_pools=self._pool_connections,
263
+ maxsize=self._pool_maxsize,
264
+ block=self._pool_block,
265
+ **proxy_kwargs,
266
+ )
267
+ else:
268
+ proxy_headers = self.proxy_headers(proxy)
269
+ manager = self.proxy_manager[proxy] = proxy_from_url(
270
+ proxy,
271
+ proxy_headers=proxy_headers,
272
+ num_pools=self._pool_connections,
273
+ maxsize=self._pool_maxsize,
274
+ block=self._pool_block,
275
+ **proxy_kwargs,
276
+ )
277
+
278
+ return manager
279
+
280
+ def cert_verify(self, conn, url, verify, cert):
281
+ """Verify a SSL certificate. This method should not be called from user
282
+ code, and is only exposed for use when subclassing the
283
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
284
+
285
+ :param conn: The urllib3 connection object associated with the cert.
286
+ :param url: The requested URL.
287
+ :param verify: Either a boolean, in which case it controls whether we verify
288
+ the server's TLS certificate, or a string, in which case it must be a path
289
+ to a CA bundle to use
290
+ :param cert: The SSL certificate to verify.
291
+ """
292
+ if url.lower().startswith("https") and verify:
293
+ cert_loc = None
294
+
295
+ # Allow self-specified cert location.
296
+ if verify is not True:
297
+ cert_loc = verify
298
+
299
+ if not cert_loc:
300
+ cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
301
+
302
+ if not cert_loc or not os.path.exists(cert_loc):
303
+ raise OSError(
304
+ f"Could not find a suitable TLS CA certificate bundle, "
305
+ f"invalid path: {cert_loc}"
306
+ )
307
+
308
+ conn.cert_reqs = "CERT_REQUIRED"
309
+
310
+ if not os.path.isdir(cert_loc):
311
+ conn.ca_certs = cert_loc
312
+ else:
313
+ conn.ca_cert_dir = cert_loc
314
+ else:
315
+ conn.cert_reqs = "CERT_NONE"
316
+ conn.ca_certs = None
317
+ conn.ca_cert_dir = None
318
+
319
+ if cert:
320
+ if not isinstance(cert, basestring):
321
+ conn.cert_file = cert[0]
322
+ conn.key_file = cert[1]
323
+ else:
324
+ conn.cert_file = cert
325
+ conn.key_file = None
326
+ if conn.cert_file and not os.path.exists(conn.cert_file):
327
+ raise OSError(
328
+ f"Could not find the TLS certificate file, "
329
+ f"invalid path: {conn.cert_file}"
330
+ )
331
+ if conn.key_file and not os.path.exists(conn.key_file):
332
+ raise OSError(
333
+ f"Could not find the TLS key file, invalid path: {conn.key_file}"
334
+ )
335
+
336
+ def build_response(self, req, resp):
337
+ """Builds a :class:`Response <requests.Response>` object from a urllib3
338
+ response. This should not be called from user code, and is only exposed
339
+ for use when subclassing the
340
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
341
+
342
+ :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
343
+ :param resp: The urllib3 response object.
344
+ :rtype: requests.Response
345
+ """
346
+ response = Response()
347
+
348
+ # Fallback to None if there's no status_code, for whatever reason.
349
+ response.status_code = getattr(resp, "status", None)
350
+
351
+ # Make headers case-insensitive.
352
+ response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
353
+
354
+ # Set encoding.
355
+ response.encoding = get_encoding_from_headers(response.headers)
356
+ response.raw = resp
357
+ response.reason = response.raw.reason
358
+
359
+ if isinstance(req.url, bytes):
360
+ response.url = req.url.decode("utf-8")
361
+ else:
362
+ response.url = req.url
363
+
364
+ # Add new cookies from the server.
365
+ extract_cookies_to_jar(response.cookies, req, resp)
366
+
367
+ # Give the Response some context.
368
+ response.request = req
369
+ response.connection = self
370
+
371
+ return response
372
+
373
+ def build_connection_pool_key_attributes(self, request, verify, cert=None):
374
+ """Build the PoolKey attributes used by urllib3 to return a connection.
375
+
376
+ This looks at the PreparedRequest, the user-specified verify value,
377
+ and the value of the cert parameter to determine what PoolKey values
378
+ to use to select a connection from a given urllib3 Connection Pool.
379
+
380
+ The SSL related pool key arguments are not consistently set. As of
381
+ this writing, use the following to determine what keys may be in that
382
+ dictionary:
383
+
384
+ * If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
385
+ default Requests SSL Context
386
+ * If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
387
+ ``"cert_reqs"`` will be set
388
+ * If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
389
+ ``"ca_certs"`` will be set if the string is not a directory recognized
390
+ by :py:func:`os.path.isdir`, otherwise ``"ca_cert_dir"`` will be
391
+ set.
392
+ * If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
393
+ ``"cert"`` is a tuple with a second item, ``"key_file"`` will also
394
+ be present
395
+
396
+ To override these settings, one may subclass this class, call this
397
+ method and use the above logic to change parameters as desired. For
398
+ example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
399
+ must both set ``"ssl_context"`` and based on what else they require,
400
+ alter the other keys to ensure the desired behaviour.
401
+
402
+ :param request:
403
+ The PreparedReqest being sent over the connection.
404
+ :type request:
405
+ :class:`~requests.models.PreparedRequest`
406
+ :param verify:
407
+ Either a boolean, in which case it controls whether
408
+ we verify the server's TLS certificate, or a string, in which case it
409
+ must be a path to a CA bundle to use.
410
+ :param cert:
411
+ (optional) Any user-provided SSL certificate for client
412
+ authentication (a.k.a., mTLS). This may be a string (i.e., just
413
+ the path to a file which holds both certificate and key) or a
414
+ tuple of length 2 with the certificate file path and key file
415
+ path.
416
+ :returns:
417
+ A tuple of two dictionaries. The first is the "host parameters"
418
+ portion of the Pool Key including scheme, hostname, and port. The
419
+ second is a dictionary of SSLContext related parameters.
420
+ """
421
+ return _urllib3_request_context(request, verify, cert, self.poolmanager)
422
+
423
+ def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
424
+ """Returns a urllib3 connection for the given request and TLS settings.
425
+ This should not be called from user code, and is only exposed for use
426
+ when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
427
+
428
+ :param request:
429
+ The :class:`PreparedRequest <PreparedRequest>` object to be sent
430
+ over the connection.
431
+ :param verify:
432
+ Either a boolean, in which case it controls whether we verify the
433
+ server's TLS certificate, or a string, in which case it must be a
434
+ path to a CA bundle to use.
435
+ :param proxies:
436
+ (optional) The proxies dictionary to apply to the request.
437
+ :param cert:
438
+ (optional) Any user-provided SSL certificate to be used for client
439
+ authentication (a.k.a., mTLS).
440
+ :rtype:
441
+ urllib3.ConnectionPool
442
+ """
443
+ proxy = select_proxy(request.url, proxies)
444
+ try:
445
+ host_params, pool_kwargs = self.build_connection_pool_key_attributes(
446
+ request,
447
+ verify,
448
+ cert,
449
+ )
450
+ except ValueError as e:
451
+ raise InvalidURL(e, request=request)
452
+ if proxy:
453
+ proxy = prepend_scheme_if_needed(proxy, "http")
454
+ proxy_url = parse_url(proxy)
455
+ if not proxy_url.host:
456
+ raise InvalidProxyURL(
457
+ "Please check proxy URL. It is malformed "
458
+ "and could be missing the host."
459
+ )
460
+ proxy_manager = self.proxy_manager_for(proxy)
461
+ conn = proxy_manager.connection_from_host(
462
+ **host_params, pool_kwargs=pool_kwargs
463
+ )
464
+ else:
465
+ # Only scheme should be lower case
466
+ conn = self.poolmanager.connection_from_host(
467
+ **host_params, pool_kwargs=pool_kwargs
468
+ )
469
+
470
+ return conn
471
+
472
+ def get_connection(self, url, proxies=None):
473
+ """DEPRECATED: Users should move to `get_connection_with_tls_context`
474
+ for all subclasses of HTTPAdapter using Requests>=2.32.2.
475
+
476
+ Returns a urllib3 connection for the given URL. This should not be
477
+ called from user code, and is only exposed for use when subclassing the
478
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
479
+
480
+ :param url: The URL to connect to.
481
+ :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
482
+ :rtype: urllib3.ConnectionPool
483
+ """
484
+ warnings.warn(
485
+ (
486
+ "`get_connection` has been deprecated in favor of "
487
+ "`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
488
+ "will need to migrate for Requests>=2.32.2. Please see "
489
+ "https://github.com/psf/requests/pull/6710 for more details."
490
+ ),
491
+ DeprecationWarning,
492
+ )
493
+ proxy = select_proxy(url, proxies)
494
+
495
+ if proxy:
496
+ proxy = prepend_scheme_if_needed(proxy, "http")
497
+ proxy_url = parse_url(proxy)
498
+ if not proxy_url.host:
499
+ raise InvalidProxyURL(
500
+ "Please check proxy URL. It is malformed "
501
+ "and could be missing the host."
502
+ )
503
+ proxy_manager = self.proxy_manager_for(proxy)
504
+ conn = proxy_manager.connection_from_url(url)
505
+ else:
506
+ # Only scheme should be lower case
507
+ parsed = urlparse(url)
508
+ url = parsed.geturl()
509
+ conn = self.poolmanager.connection_from_url(url)
510
+
511
+ return conn
512
+
513
+ def close(self):
514
+ """Disposes of any internal state.
515
+
516
+ Currently, this closes the PoolManager and any active ProxyManager,
517
+ which closes any pooled connections.
518
+ """
519
+ self.poolmanager.clear()
520
+ for proxy in self.proxy_manager.values():
521
+ proxy.clear()
522
+
523
+ def request_url(self, request, proxies):
524
+ """Obtain the url to use when making the final request.
525
+
526
+ If the message is being sent through a HTTP proxy, the full URL has to
527
+ be used. Otherwise, we should only use the path portion of the URL.
528
+
529
+ This should not be called from user code, and is only exposed for use
530
+ when subclassing the
531
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
532
+
533
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
534
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
535
+ :rtype: str
536
+ """
537
+ proxy = select_proxy(request.url, proxies)
538
+ scheme = urlparse(request.url).scheme
539
+
540
+ is_proxied_http_request = proxy and scheme != "https"
541
+ using_socks_proxy = False
542
+ if proxy:
543
+ proxy_scheme = urlparse(proxy).scheme.lower()
544
+ using_socks_proxy = proxy_scheme.startswith("socks")
545
+
546
+ url = request.path_url
547
+ if url.startswith("//"): # Don't confuse urllib3
548
+ url = f"/{url.lstrip('/')}"
549
+
550
+ if is_proxied_http_request and not using_socks_proxy:
551
+ url = urldefragauth(request.url)
552
+
553
+ return url
554
+
555
+ def add_headers(self, request, **kwargs):
556
+ """Add any headers needed by the connection. As of v2.0 this does
557
+ nothing by default, but is left for overriding by users that subclass
558
+ the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
559
+
560
+ This should not be called from user code, and is only exposed for use
561
+ when subclassing the
562
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
563
+
564
+ :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
565
+ :param kwargs: The keyword arguments from the call to send().
566
+ """
567
+ pass
568
+
569
+ def proxy_headers(self, proxy):
570
+ """Returns a dictionary of the headers to add to any request sent
571
+ through a proxy. This works with urllib3 magic to ensure that they are
572
+ correctly sent to the proxy, rather than in a tunnelled request if
573
+ CONNECT is being used.
574
+
575
+ This should not be called from user code, and is only exposed for use
576
+ when subclassing the
577
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
578
+
579
+ :param proxy: The url of the proxy being used for this request.
580
+ :rtype: dict
581
+ """
582
+ headers = {}
583
+ username, password = get_auth_from_url(proxy)
584
+
585
+ if username:
586
+ headers["Proxy-Authorization"] = _basic_auth_str(username, password)
587
+
588
+ return headers
589
+
590
+ def send(
591
+ self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
592
+ ):
593
+ """Sends PreparedRequest object. Returns Response object.
594
+
595
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
596
+ :param stream: (optional) Whether to stream the request content.
597
+ :param timeout: (optional) How long to wait for the server to send
598
+ data before giving up, as a float, or a :ref:`(connect timeout,
599
+ read timeout) <timeouts>` tuple.
600
+ :type timeout: float or tuple or urllib3 Timeout object
601
+ :param verify: (optional) Either a boolean, in which case it controls whether
602
+ we verify the server's TLS certificate, or a string, in which case it
603
+ must be a path to a CA bundle to use
604
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
605
+ :param proxies: (optional) The proxies dictionary to apply to the request.
606
+ :rtype: requests.Response
607
+ """
608
+
609
+ try:
610
+ conn = self.get_connection_with_tls_context(
611
+ request, verify, proxies=proxies, cert=cert
612
+ )
613
+ except LocationValueError as e:
614
+ raise InvalidURL(e, request=request)
615
+
616
+ self.cert_verify(conn, request.url, verify, cert)
617
+ url = self.request_url(request, proxies)
618
+ self.add_headers(
619
+ request,
620
+ stream=stream,
621
+ timeout=timeout,
622
+ verify=verify,
623
+ cert=cert,
624
+ proxies=proxies,
625
+ )
626
+
627
+ chunked = not (request.body is None or "Content-Length" in request.headers)
628
+
629
+ if isinstance(timeout, tuple):
630
+ try:
631
+ connect, read = timeout
632
+ timeout = TimeoutSauce(connect=connect, read=read)
633
+ except ValueError:
634
+ raise ValueError(
635
+ f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
636
+ f"or a single float to set both timeouts to the same value."
637
+ )
638
+ elif isinstance(timeout, TimeoutSauce):
639
+ pass
640
+ else:
641
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
642
+
643
+ try:
644
+ resp = conn.urlopen(
645
+ method=request.method,
646
+ url=url,
647
+ body=request.body,
648
+ headers=request.headers,
649
+ redirect=False,
650
+ assert_same_host=False,
651
+ preload_content=False,
652
+ decode_content=False,
653
+ retries=self.max_retries,
654
+ timeout=timeout,
655
+ chunked=chunked,
656
+ )
657
+
658
+ except (ProtocolError, OSError) as err:
659
+ raise ConnectionError(err, request=request)
660
+
661
+ except MaxRetryError as e:
662
+ if isinstance(e.reason, ConnectTimeoutError):
663
+ # TODO: Remove this in 3.0.0: see #2811
664
+ if not isinstance(e.reason, NewConnectionError):
665
+ raise ConnectTimeout(e, request=request)
666
+
667
+ if isinstance(e.reason, ResponseError):
668
+ raise RetryError(e, request=request)
669
+
670
+ if isinstance(e.reason, _ProxyError):
671
+ raise ProxyError(e, request=request)
672
+
673
+ if isinstance(e.reason, _SSLError):
674
+ # This branch is for urllib3 v1.22 and later.
675
+ raise SSLError(e, request=request)
676
+
677
+ raise ConnectionError(e, request=request)
678
+
679
+ except ClosedPoolError as e:
680
+ raise ConnectionError(e, request=request)
681
+
682
+ except _ProxyError as e:
683
+ raise ProxyError(e)
684
+
685
+ except (_SSLError, _HTTPError) as e:
686
+ if isinstance(e, _SSLError):
687
+ # This branch is for urllib3 versions earlier than v1.22
688
+ raise SSLError(e, request=request)
689
+ elif isinstance(e, ReadTimeoutError):
690
+ raise ReadTimeout(e, request=request)
691
+ elif isinstance(e, _InvalidHeader):
692
+ raise InvalidHeader(e, request=request)
693
+ else:
694
+ raise
695
+
696
+ return self.build_response(request, resp)
python_env/lib/site-packages/requests/api.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ requests.api
3
+ ~~~~~~~~~~~~
4
+
5
+ This module implements the Requests API.
6
+
7
+ :copyright: (c) 2012 by Kenneth Reitz.
8
+ :license: Apache2, see LICENSE for more details.
9
+ """
10
+
11
+ from . import sessions
12
+
13
+
14
+ def request(method, url, **kwargs):
15
+ """Constructs and sends a :class:`Request <Request>`.
16
+
17
+ :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
18
+ :param url: URL for the new :class:`Request` object.
19
+ :param params: (optional) Dictionary, list of tuples or bytes to send
20
+ in the query string for the :class:`Request`.
21
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
22
+ object to send in the body of the :class:`Request`.
23
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
24
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
25
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
26
+ :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
27
+ ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
28
+ or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string
29
+ defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
30
+ to add for the file.
31
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
32
+ :param timeout: (optional) How many seconds to wait for the server to send data
33
+ before giving up, as a float, or a :ref:`(connect timeout, read
34
+ timeout) <timeouts>` tuple.
35
+ :type timeout: float or tuple
36
+ :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
37
+ :type allow_redirects: bool
38
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
39
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
40
+ the server's TLS certificate, or a string, in which case it must be a path
41
+ to a CA bundle to use. Defaults to ``True``.
42
+ :param stream: (optional) if ``False``, the response content will be immediately downloaded.
43
+ :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
44
+ :return: :class:`Response <Response>` object
45
+ :rtype: requests.Response
46
+
47
+ Usage::
48
+
49
+ >>> import requests
50
+ >>> req = requests.request('GET', 'https://httpbin.org/get')
51
+ >>> req
52
+ <Response [200]>
53
+ """
54
+
55
+ # By using the 'with' statement we are sure the session is closed, thus we
56
+ # avoid leaving sockets open which can trigger a ResourceWarning in some
57
+ # cases, and look like a memory leak in others.
58
+ with sessions.Session() as session:
59
+ return session.request(method=method, url=url, **kwargs)
60
+
61
+
62
+ def get(url, params=None, **kwargs):
63
+ r"""Sends a GET request.
64
+
65
+ :param url: URL for the new :class:`Request` object.
66
+ :param params: (optional) Dictionary, list of tuples or bytes to send
67
+ in the query string for the :class:`Request`.
68
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
69
+ :return: :class:`Response <Response>` object
70
+ :rtype: requests.Response
71
+ """
72
+
73
+ return request("get", url, params=params, **kwargs)
74
+
75
+
76
+ def options(url, **kwargs):
77
+ r"""Sends an OPTIONS request.
78
+
79
+ :param url: URL for the new :class:`Request` object.
80
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
81
+ :return: :class:`Response <Response>` object
82
+ :rtype: requests.Response
83
+ """
84
+
85
+ return request("options", url, **kwargs)
86
+
87
+
88
+ def head(url, **kwargs):
89
+ r"""Sends a HEAD request.
90
+
91
+ :param url: URL for the new :class:`Request` object.
92
+ :param \*\*kwargs: Optional arguments that ``request`` takes. If
93
+ `allow_redirects` is not provided, it will be set to `False` (as
94
+ opposed to the default :meth:`request` behavior).
95
+ :return: :class:`Response <Response>` object
96
+ :rtype: requests.Response
97
+ """
98
+
99
+ kwargs.setdefault("allow_redirects", False)
100
+ return request("head", url, **kwargs)
101
+
102
+
103
+ def post(url, data=None, json=None, **kwargs):
104
+ r"""Sends a POST request.
105
+
106
+ :param url: URL for the new :class:`Request` object.
107
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
108
+ object to send in the body of the :class:`Request`.
109
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
110
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
111
+ :return: :class:`Response <Response>` object
112
+ :rtype: requests.Response
113
+ """
114
+
115
+ return request("post", url, data=data, json=json, **kwargs)
116
+
117
+
118
+ def put(url, data=None, **kwargs):
119
+ r"""Sends a PUT request.
120
+
121
+ :param url: URL for the new :class:`Request` object.
122
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
123
+ object to send in the body of the :class:`Request`.
124
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
125
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
126
+ :return: :class:`Response <Response>` object
127
+ :rtype: requests.Response
128
+ """
129
+
130
+ return request("put", url, data=data, **kwargs)
131
+
132
+
133
+ def patch(url, data=None, **kwargs):
134
+ r"""Sends a PATCH request.
135
+
136
+ :param url: URL for the new :class:`Request` object.
137
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
138
+ object to send in the body of the :class:`Request`.
139
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
140
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
141
+ :return: :class:`Response <Response>` object
142
+ :rtype: requests.Response
143
+ """
144
+
145
+ return request("patch", url, data=data, **kwargs)
146
+
147
+
148
+ def delete(url, **kwargs):
149
+ r"""Sends a DELETE request.
150
+
151
+ :param url: URL for the new :class:`Request` object.
152
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
153
+ :return: :class:`Response <Response>` object
154
+ :rtype: requests.Response
155
+ """
156
+
157
+ return request("delete", url, **kwargs)
python_env/lib/site-packages/requests/auth.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ requests.auth
3
+ ~~~~~~~~~~~~~
4
+
5
+ This module contains the authentication handlers for Requests.
6
+ """
7
+
8
+ import hashlib
9
+ import os
10
+ import re
11
+ import threading
12
+ import time
13
+ import warnings
14
+ from base64 import b64encode
15
+
16
+ from ._internal_utils import to_native_string
17
+ from .compat import basestring, str, urlparse
18
+ from .cookies import extract_cookies_to_jar
19
+ from .utils import parse_dict_header
20
+
21
+ CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
22
+ CONTENT_TYPE_MULTI_PART = "multipart/form-data"
23
+
24
+
25
+ def _basic_auth_str(username, password):
26
+ """Returns a Basic Auth string."""
27
+
28
+ # "I want us to put a big-ol' comment on top of it that
29
+ # says that this behaviour is dumb but we need to preserve
30
+ # it because people are relying on it."
31
+ # - Lukasa
32
+ #
33
+ # These are here solely to maintain backwards compatibility
34
+ # for things like ints. This will be removed in 3.0.0.
35
+ if not isinstance(username, basestring):
36
+ warnings.warn(
37
+ "Non-string usernames will no longer be supported in Requests "
38
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
39
+ "a string or bytes object in the near future to avoid "
40
+ "problems.".format(username),
41
+ category=DeprecationWarning,
42
+ )
43
+ username = str(username)
44
+
45
+ if not isinstance(password, basestring):
46
+ warnings.warn(
47
+ "Non-string passwords will no longer be supported in Requests "
48
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
49
+ "a string or bytes object in the near future to avoid "
50
+ "problems.".format(type(password)),
51
+ category=DeprecationWarning,
52
+ )
53
+ password = str(password)
54
+ # -- End Removal --
55
+
56
+ if isinstance(username, str):
57
+ username = username.encode("latin1")
58
+
59
+ if isinstance(password, str):
60
+ password = password.encode("latin1")
61
+
62
+ authstr = "Basic " + to_native_string(
63
+ b64encode(b":".join((username, password))).strip()
64
+ )
65
+
66
+ return authstr
67
+
68
+
69
+ class AuthBase:
70
+ """Base class that all auth implementations derive from"""
71
+
72
+ def __call__(self, r):
73
+ raise NotImplementedError("Auth hooks must be callable.")
74
+
75
+
76
+ class HTTPBasicAuth(AuthBase):
77
+ """Attaches HTTP Basic Authentication to the given Request object."""
78
+
79
+ def __init__(self, username, password):
80
+ self.username = username
81
+ self.password = password
82
+
83
+ def __eq__(self, other):
84
+ return all(
85
+ [
86
+ self.username == getattr(other, "username", None),
87
+ self.password == getattr(other, "password", None),
88
+ ]
89
+ )
90
+
91
+ def __ne__(self, other):
92
+ return not self == other
93
+
94
+ def __call__(self, r):
95
+ r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
96
+ return r
97
+
98
+
99
+ class HTTPProxyAuth(HTTPBasicAuth):
100
+ """Attaches HTTP Proxy Authentication to a given Request object."""
101
+
102
+ def __call__(self, r):
103
+ r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
104
+ return r
105
+
106
+
107
+ class HTTPDigestAuth(AuthBase):
108
+ """Attaches HTTP Digest Authentication to the given Request object."""
109
+
110
+ def __init__(self, username, password):
111
+ self.username = username
112
+ self.password = password
113
+ # Keep state in per-thread local storage
114
+ self._thread_local = threading.local()
115
+
116
+ def init_per_thread_state(self):
117
+ # Ensure state is initialized just once per-thread
118
+ if not hasattr(self._thread_local, "init"):
119
+ self._thread_local.init = True
120
+ self._thread_local.last_nonce = ""
121
+ self._thread_local.nonce_count = 0
122
+ self._thread_local.chal = {}
123
+ self._thread_local.pos = None
124
+ self._thread_local.num_401_calls = None
125
+
126
+ def build_digest_header(self, method, url):
127
+ """
128
+ :rtype: str
129
+ """
130
+
131
+ realm = self._thread_local.chal["realm"]
132
+ nonce = self._thread_local.chal["nonce"]
133
+ qop = self._thread_local.chal.get("qop")
134
+ algorithm = self._thread_local.chal.get("algorithm")
135
+ opaque = self._thread_local.chal.get("opaque")
136
+ hash_utf8 = None
137
+
138
+ if algorithm is None:
139
+ _algorithm = "MD5"
140
+ else:
141
+ _algorithm = algorithm.upper()
142
+ # lambdas assume digest modules are imported at the top level
143
+ if _algorithm == "MD5" or _algorithm == "MD5-SESS":
144
+
145
+ def md5_utf8(x):
146
+ if isinstance(x, str):
147
+ x = x.encode("utf-8")
148
+ return hashlib.md5(x).hexdigest()
149
+
150
+ hash_utf8 = md5_utf8
151
+ elif _algorithm == "SHA":
152
+
153
+ def sha_utf8(x):
154
+ if isinstance(x, str):
155
+ x = x.encode("utf-8")
156
+ return hashlib.sha1(x).hexdigest()
157
+
158
+ hash_utf8 = sha_utf8
159
+ elif _algorithm == "SHA-256":
160
+
161
+ def sha256_utf8(x):
162
+ if isinstance(x, str):
163
+ x = x.encode("utf-8")
164
+ return hashlib.sha256(x).hexdigest()
165
+
166
+ hash_utf8 = sha256_utf8
167
+ elif _algorithm == "SHA-512":
168
+
169
+ def sha512_utf8(x):
170
+ if isinstance(x, str):
171
+ x = x.encode("utf-8")
172
+ return hashlib.sha512(x).hexdigest()
173
+
174
+ hash_utf8 = sha512_utf8
175
+
176
+ KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
177
+
178
+ if hash_utf8 is None:
179
+ return None
180
+
181
+ # XXX not implemented yet
182
+ entdig = None
183
+ p_parsed = urlparse(url)
184
+ #: path is request-uri defined in RFC 2616 which should not be empty
185
+ path = p_parsed.path or "/"
186
+ if p_parsed.query:
187
+ path += f"?{p_parsed.query}"
188
+
189
+ A1 = f"{self.username}:{realm}:{self.password}"
190
+ A2 = f"{method}:{path}"
191
+
192
+ HA1 = hash_utf8(A1)
193
+ HA2 = hash_utf8(A2)
194
+
195
+ if nonce == self._thread_local.last_nonce:
196
+ self._thread_local.nonce_count += 1
197
+ else:
198
+ self._thread_local.nonce_count = 1
199
+ ncvalue = f"{self._thread_local.nonce_count:08x}"
200
+ s = str(self._thread_local.nonce_count).encode("utf-8")
201
+ s += nonce.encode("utf-8")
202
+ s += time.ctime().encode("utf-8")
203
+ s += os.urandom(8)
204
+
205
+ cnonce = hashlib.sha1(s).hexdigest()[:16]
206
+ if _algorithm == "MD5-SESS":
207
+ HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
208
+
209
+ if not qop:
210
+ respdig = KD(HA1, f"{nonce}:{HA2}")
211
+ elif qop == "auth" or "auth" in qop.split(","):
212
+ noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
213
+ respdig = KD(HA1, noncebit)
214
+ else:
215
+ # XXX handle auth-int.
216
+ return None
217
+
218
+ self._thread_local.last_nonce = nonce
219
+
220
+ # XXX should the partial digests be encoded too?
221
+ base = (
222
+ f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
223
+ f'uri="{path}", response="{respdig}"'
224
+ )
225
+ if opaque:
226
+ base += f', opaque="{opaque}"'
227
+ if algorithm:
228
+ base += f', algorithm="{algorithm}"'
229
+ if entdig:
230
+ base += f', digest="{entdig}"'
231
+ if qop:
232
+ base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
233
+
234
+ return f"Digest {base}"
235
+
236
+ def handle_redirect(self, r, **kwargs):
237
+ """Reset num_401_calls counter on redirects."""
238
+ if r.is_redirect:
239
+ self._thread_local.num_401_calls = 1
240
+
241
+ def handle_401(self, r, **kwargs):
242
+ """
243
+ Takes the given response and tries digest-auth, if needed.
244
+
245
+ :rtype: requests.Response
246
+ """
247
+
248
+ # If response is not 4xx, do not auth
249
+ # See https://github.com/psf/requests/issues/3772
250
+ if not 400 <= r.status_code < 500:
251
+ self._thread_local.num_401_calls = 1
252
+ return r
253
+
254
+ if self._thread_local.pos is not None:
255
+ # Rewind the file position indicator of the body to where
256
+ # it was to resend the request.
257
+ r.request.body.seek(self._thread_local.pos)
258
+ s_auth = r.headers.get("www-authenticate", "")
259
+
260
+ if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
261
+ self._thread_local.num_401_calls += 1
262
+ pat = re.compile(r"digest ", flags=re.IGNORECASE)
263
+ self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
264
+
265
+ # Consume content and release the original connection
266
+ # to allow our new request to reuse the same one.
267
+ r.content
268
+ r.close()
269
+ prep = r.request.copy()
270
+ extract_cookies_to_jar(prep._cookies, r.request, r.raw)
271
+ prep.prepare_cookies(prep._cookies)
272
+
273
+ prep.headers["Authorization"] = self.build_digest_header(
274
+ prep.method, prep.url
275
+ )
276
+ _r = r.connection.send(prep, **kwargs)
277
+ _r.history.append(r)
278
+ _r.request = prep
279
+
280
+ return _r
281
+
282
+ self._thread_local.num_401_calls = 1
283
+ return r
284
+
285
+ def __call__(self, r):
286
+ # Initialize per-thread state, if needed
287
+ self.init_per_thread_state()
288
+ # If we have a saved nonce, skip the 401
289
+ if self._thread_local.last_nonce:
290
+ r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
291
+ try:
292
+ self._thread_local.pos = r.body.tell()
293
+ except AttributeError:
294
+ # In the case of HTTPDigestAuth being reused and the body of
295
+ # the previous request was a file-like object, pos has the
296
+ # file position of the previous body. Ensure it's set to
297
+ # None.
298
+ self._thread_local.pos = None
299
+ r.register_hook("response", self.handle_401)
300
+ r.register_hook("response", self.handle_redirect)
301
+ self._thread_local.num_401_calls = 1
302
+
303
+ return r
304
+
305
+ def __eq__(self, other):
306
+ return all(
307
+ [
308
+ self.username == getattr(other, "username", None),
309
+ self.password == getattr(other, "password", None),
310
+ ]
311
+ )
312
+
313
+ def __ne__(self, other):
314
+ return not self == other
python_env/lib/site-packages/requests/certs.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ """
4
+ requests.certs
5
+ ~~~~~~~~~~~~~~
6
+
7
+ This module returns the preferred default CA certificate bundle. There is
8
+ only one — the one from the certifi package.
9
+
10
+ If you are packaging Requests, e.g., for a Linux distribution or a managed
11
+ environment, you can change the definition of where() to return a separately
12
+ packaged CA bundle.
13
+ """
14
+ from certifi import where
15
+
16
+ if __name__ == "__main__":
17
+ print(where())
python_env/lib/site-packages/requests/compat.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ requests.compat
3
+ ~~~~~~~~~~~~~~~
4
+
5
+ This module previously handled import compatibility issues
6
+ between Python 2 and Python 3. It remains for backwards
7
+ compatibility until the next major version.
8
+ """
9
+
10
+ import importlib
11
+ import sys
12
+
13
+ # -------
14
+ # urllib3
15
+ # -------
16
+ from urllib3 import __version__ as urllib3_version
17
+
18
+ # Detect which major version of urllib3 is being used.
19
+ try:
20
+ is_urllib3_1 = int(urllib3_version.split(".")[0]) == 1
21
+ except (TypeError, AttributeError):
22
+ # If we can't discern a version, prefer old functionality.
23
+ is_urllib3_1 = True
24
+
25
+ # -------------------
26
+ # Character Detection
27
+ # -------------------
28
+
29
+
30
+ def _resolve_char_detection():
31
+ """Find supported character detection libraries."""
32
+ chardet = None
33
+ for lib in ("chardet", "charset_normalizer"):
34
+ if chardet is None:
35
+ try:
36
+ chardet = importlib.import_module(lib)
37
+ except ImportError:
38
+ pass
39
+ return chardet
40
+
41
+
42
+ chardet = _resolve_char_detection()
43
+
44
+ # -------
45
+ # Pythons
46
+ # -------
47
+
48
+ # Syntax sugar.
49
+ _ver = sys.version_info
50
+
51
+ #: Python 2.x?
52
+ is_py2 = _ver[0] == 2
53
+
54
+ #: Python 3.x?
55
+ is_py3 = _ver[0] == 3
56
+
57
+ # json/simplejson module import resolution
58
+ has_simplejson = False
59
+ try:
60
+ import simplejson as json
61
+
62
+ has_simplejson = True
63
+ except ImportError:
64
+ import json
65
+
66
+ if has_simplejson:
67
+ from simplejson import JSONDecodeError
68
+ else:
69
+ from json import JSONDecodeError
70
+
71
+ # Keep OrderedDict for backwards compatibility.
72
+ from collections import OrderedDict
73
+ from collections.abc import Callable, Mapping, MutableMapping
74
+ from http import cookiejar as cookielib
75
+ from http.cookies import Morsel
76
+ from io import StringIO
77
+
78
+ # --------------
79
+ # Legacy Imports
80
+ # --------------
81
+ from urllib.parse import (
82
+ quote,
83
+ quote_plus,
84
+ unquote,
85
+ unquote_plus,
86
+ urldefrag,
87
+ urlencode,
88
+ urljoin,
89
+ urlparse,
90
+ urlsplit,
91
+ urlunparse,
92
+ )
93
+ from urllib.request import (
94
+ getproxies,
95
+ getproxies_environment,
96
+ parse_http_list,
97
+ proxy_bypass,
98
+ proxy_bypass_environment,
99
+ )
100
+
101
+ builtin_str = str
102
+ str = str
103
+ bytes = bytes
104
+ basestring = (str, bytes)
105
+ numeric_types = (int, float)
106
+ integer_types = (int,)
python_env/lib/site-packages/requests/cookies.py ADDED
@@ -0,0 +1,561 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ requests.cookies
3
+ ~~~~~~~~~~~~~~~~
4
+
5
+ Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
6
+
7
+ requests.utils imports from here, so be careful with imports.
8
+ """
9
+
10
+ import calendar
11
+ import copy
12
+ import time
13
+
14
+ from ._internal_utils import to_native_string
15
+ from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
16
+
17
+ try:
18
+ import threading
19
+ except ImportError:
20
+ import dummy_threading as threading
21
+
22
+
23
+ class MockRequest:
24
+ """Wraps a `requests.Request` to mimic a `urllib2.Request`.
25
+
26
+ The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
27
+ manage cookie policies, i.e., determine whether a cookie can be set, given the
28
+ domains of the request and the cookie.
29
+
30
+ The original request object is read-only. The client is responsible for collecting
31
+ the new headers via `get_new_headers()` and interpreting them appropriately. You
32
+ probably want `get_cookie_header`, defined below.
33
+ """
34
+
35
+ def __init__(self, request):
36
+ self._r = request
37
+ self._new_headers = {}
38
+ self.type = urlparse(self._r.url).scheme
39
+
40
+ def get_type(self):
41
+ return self.type
42
+
43
+ def get_host(self):
44
+ return urlparse(self._r.url).netloc
45
+
46
+ def get_origin_req_host(self):
47
+ return self.get_host()
48
+
49
+ def get_full_url(self):
50
+ # Only return the response's URL if the user hadn't set the Host
51
+ # header
52
+ if not self._r.headers.get("Host"):
53
+ return self._r.url
54
+ # If they did set it, retrieve it and reconstruct the expected domain
55
+ host = to_native_string(self._r.headers["Host"], encoding="utf-8")
56
+ parsed = urlparse(self._r.url)
57
+ # Reconstruct the URL as we expect it
58
+ return urlunparse(
59
+ [
60
+ parsed.scheme,
61
+ host,
62
+ parsed.path,
63
+ parsed.params,
64
+ parsed.query,
65
+ parsed.fragment,
66
+ ]
67
+ )
68
+
69
+ def is_unverifiable(self):
70
+ return True
71
+
72
+ def has_header(self, name):
73
+ return name in self._r.headers or name in self._new_headers
74
+
75
+ def get_header(self, name, default=None):
76
+ return self._r.headers.get(name, self._new_headers.get(name, default))
77
+
78
+ def add_header(self, key, val):
79
+ """cookiejar has no legitimate use for this method; add it back if you find one."""
80
+ raise NotImplementedError(
81
+ "Cookie headers should be added with add_unredirected_header()"
82
+ )
83
+
84
+ def add_unredirected_header(self, name, value):
85
+ self._new_headers[name] = value
86
+
87
+ def get_new_headers(self):
88
+ return self._new_headers
89
+
90
+ @property
91
+ def unverifiable(self):
92
+ return self.is_unverifiable()
93
+
94
+ @property
95
+ def origin_req_host(self):
96
+ return self.get_origin_req_host()
97
+
98
+ @property
99
+ def host(self):
100
+ return self.get_host()
101
+
102
+
103
+ class MockResponse:
104
+ """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
105
+
106
+ ...what? Basically, expose the parsed HTTP headers from the server response
107
+ the way `http.cookiejar` expects to see them.
108
+ """
109
+
110
+ def __init__(self, headers):
111
+ """Make a MockResponse for `cookiejar` to read.
112
+
113
+ :param headers: a httplib.HTTPMessage or analogous carrying the headers
114
+ """
115
+ self._headers = headers
116
+
117
+ def info(self):
118
+ return self._headers
119
+
120
+ def getheaders(self, name):
121
+ self._headers.getheaders(name)
122
+
123
+
124
+ def extract_cookies_to_jar(jar, request, response):
125
+ """Extract the cookies from the response into a CookieJar.
126
+
127
+ :param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
128
+ :param request: our own requests.Request object
129
+ :param response: urllib3.HTTPResponse object
130
+ """
131
+ if not (hasattr(response, "_original_response") and response._original_response):
132
+ return
133
+ # the _original_response field is the wrapped httplib.HTTPResponse object,
134
+ req = MockRequest(request)
135
+ # pull out the HTTPMessage with the headers and put it in the mock:
136
+ res = MockResponse(response._original_response.msg)
137
+ jar.extract_cookies(res, req)
138
+
139
+
140
+ def get_cookie_header(jar, request):
141
+ """
142
+ Produce an appropriate Cookie header string to be sent with `request`, or None.
143
+
144
+ :rtype: str
145
+ """
146
+ r = MockRequest(request)
147
+ jar.add_cookie_header(r)
148
+ return r.get_new_headers().get("Cookie")
149
+
150
+
151
+ def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
152
+ """Unsets a cookie by name, by default over all domains and paths.
153
+
154
+ Wraps CookieJar.clear(), is O(n).
155
+ """
156
+ clearables = []
157
+ for cookie in cookiejar:
158
+ if cookie.name != name:
159
+ continue
160
+ if domain is not None and domain != cookie.domain:
161
+ continue
162
+ if path is not None and path != cookie.path:
163
+ continue
164
+ clearables.append((cookie.domain, cookie.path, cookie.name))
165
+
166
+ for domain, path, name in clearables:
167
+ cookiejar.clear(domain, path, name)
168
+
169
+
170
+ class CookieConflictError(RuntimeError):
171
+ """There are two cookies that meet the criteria specified in the cookie jar.
172
+ Use .get and .set and include domain and path args in order to be more specific.
173
+ """
174
+
175
+
176
+ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
177
+ """Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
178
+ interface.
179
+
180
+ This is the CookieJar we create by default for requests and sessions that
181
+ don't specify one, since some clients may expect response.cookies and
182
+ session.cookies to support dict operations.
183
+
184
+ Requests does not use the dict interface internally; it's just for
185
+ compatibility with external client code. All requests code should work
186
+ out of the box with externally provided instances of ``CookieJar``, e.g.
187
+ ``LWPCookieJar`` and ``FileCookieJar``.
188
+
189
+ Unlike a regular CookieJar, this class is pickleable.
190
+
191
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
192
+ """
193
+
194
+ def get(self, name, default=None, domain=None, path=None):
195
+ """Dict-like get() that also supports optional domain and path args in
196
+ order to resolve naming collisions from using one cookie jar over
197
+ multiple domains.
198
+
199
+ .. warning:: operation is O(n), not O(1).
200
+ """
201
+ try:
202
+ return self._find_no_duplicates(name, domain, path)
203
+ except KeyError:
204
+ return default
205
+
206
+ def set(self, name, value, **kwargs):
207
+ """Dict-like set() that also supports optional domain and path args in
208
+ order to resolve naming collisions from using one cookie jar over
209
+ multiple domains.
210
+ """
211
+ # support client code that unsets cookies by assignment of a None value:
212
+ if value is None:
213
+ remove_cookie_by_name(
214
+ self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
215
+ )
216
+ return
217
+
218
+ if isinstance(value, Morsel):
219
+ c = morsel_to_cookie(value)
220
+ else:
221
+ c = create_cookie(name, value, **kwargs)
222
+ self.set_cookie(c)
223
+ return c
224
+
225
+ def iterkeys(self):
226
+ """Dict-like iterkeys() that returns an iterator of names of cookies
227
+ from the jar.
228
+
229
+ .. seealso:: itervalues() and iteritems().
230
+ """
231
+ for cookie in iter(self):
232
+ yield cookie.name
233
+
234
+ def keys(self):
235
+ """Dict-like keys() that returns a list of names of cookies from the
236
+ jar.
237
+
238
+ .. seealso:: values() and items().
239
+ """
240
+ return list(self.iterkeys())
241
+
242
+ def itervalues(self):
243
+ """Dict-like itervalues() that returns an iterator of values of cookies
244
+ from the jar.
245
+
246
+ .. seealso:: iterkeys() and iteritems().
247
+ """
248
+ for cookie in iter(self):
249
+ yield cookie.value
250
+
251
+ def values(self):
252
+ """Dict-like values() that returns a list of values of cookies from the
253
+ jar.
254
+
255
+ .. seealso:: keys() and items().
256
+ """
257
+ return list(self.itervalues())
258
+
259
+ def iteritems(self):
260
+ """Dict-like iteritems() that returns an iterator of name-value tuples
261
+ from the jar.
262
+
263
+ .. seealso:: iterkeys() and itervalues().
264
+ """
265
+ for cookie in iter(self):
266
+ yield cookie.name, cookie.value
267
+
268
+ def items(self):
269
+ """Dict-like items() that returns a list of name-value tuples from the
270
+ jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
271
+ vanilla python dict of key value pairs.
272
+
273
+ .. seealso:: keys() and values().
274
+ """
275
+ return list(self.iteritems())
276
+
277
+ def list_domains(self):
278
+ """Utility method to list all the domains in the jar."""
279
+ domains = []
280
+ for cookie in iter(self):
281
+ if cookie.domain not in domains:
282
+ domains.append(cookie.domain)
283
+ return domains
284
+
285
+ def list_paths(self):
286
+ """Utility method to list all the paths in the jar."""
287
+ paths = []
288
+ for cookie in iter(self):
289
+ if cookie.path not in paths:
290
+ paths.append(cookie.path)
291
+ return paths
292
+
293
+ def multiple_domains(self):
294
+ """Returns True if there are multiple domains in the jar.
295
+ Returns False otherwise.
296
+
297
+ :rtype: bool
298
+ """
299
+ domains = []
300
+ for cookie in iter(self):
301
+ if cookie.domain is not None and cookie.domain in domains:
302
+ return True
303
+ domains.append(cookie.domain)
304
+ return False # there is only one domain in jar
305
+
306
+ def get_dict(self, domain=None, path=None):
307
+ """Takes as an argument an optional domain and path and returns a plain
308
+ old Python dict of name-value pairs of cookies that meet the
309
+ requirements.
310
+
311
+ :rtype: dict
312
+ """
313
+ dictionary = {}
314
+ for cookie in iter(self):
315
+ if (domain is None or cookie.domain == domain) and (
316
+ path is None or cookie.path == path
317
+ ):
318
+ dictionary[cookie.name] = cookie.value
319
+ return dictionary
320
+
321
+ def __contains__(self, name):
322
+ try:
323
+ return super().__contains__(name)
324
+ except CookieConflictError:
325
+ return True
326
+
327
+ def __getitem__(self, name):
328
+ """Dict-like __getitem__() for compatibility with client code. Throws
329
+ exception if there are more than one cookie with name. In that case,
330
+ use the more explicit get() method instead.
331
+
332
+ .. warning:: operation is O(n), not O(1).
333
+ """
334
+ return self._find_no_duplicates(name)
335
+
336
+ def __setitem__(self, name, value):
337
+ """Dict-like __setitem__ for compatibility with client code. Throws
338
+ exception if there is already a cookie of that name in the jar. In that
339
+ case, use the more explicit set() method instead.
340
+ """
341
+ self.set(name, value)
342
+
343
+ def __delitem__(self, name):
344
+ """Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
345
+ ``remove_cookie_by_name()``.
346
+ """
347
+ remove_cookie_by_name(self, name)
348
+
349
+ def set_cookie(self, cookie, *args, **kwargs):
350
+ if (
351
+ hasattr(cookie.value, "startswith")
352
+ and cookie.value.startswith('"')
353
+ and cookie.value.endswith('"')
354
+ ):
355
+ cookie.value = cookie.value.replace('\\"', "")
356
+ return super().set_cookie(cookie, *args, **kwargs)
357
+
358
+ def update(self, other):
359
+ """Updates this jar with cookies from another CookieJar or dict-like"""
360
+ if isinstance(other, cookielib.CookieJar):
361
+ for cookie in other:
362
+ self.set_cookie(copy.copy(cookie))
363
+ else:
364
+ super().update(other)
365
+
366
+ def _find(self, name, domain=None, path=None):
367
+ """Requests uses this method internally to get cookie values.
368
+
369
+ If there are conflicting cookies, _find arbitrarily chooses one.
370
+ See _find_no_duplicates if you want an exception thrown if there are
371
+ conflicting cookies.
372
+
373
+ :param name: a string containing name of cookie
374
+ :param domain: (optional) string containing domain of cookie
375
+ :param path: (optional) string containing path of cookie
376
+ :return: cookie.value
377
+ """
378
+ for cookie in iter(self):
379
+ if cookie.name == name:
380
+ if domain is None or cookie.domain == domain:
381
+ if path is None or cookie.path == path:
382
+ return cookie.value
383
+
384
+ raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
385
+
386
+ def _find_no_duplicates(self, name, domain=None, path=None):
387
+ """Both ``__get_item__`` and ``get`` call this function: it's never
388
+ used elsewhere in Requests.
389
+
390
+ :param name: a string containing name of cookie
391
+ :param domain: (optional) string containing domain of cookie
392
+ :param path: (optional) string containing path of cookie
393
+ :raises KeyError: if cookie is not found
394
+ :raises CookieConflictError: if there are multiple cookies
395
+ that match name and optionally domain and path
396
+ :return: cookie.value
397
+ """
398
+ toReturn = None
399
+ for cookie in iter(self):
400
+ if cookie.name == name:
401
+ if domain is None or cookie.domain == domain:
402
+ if path is None or cookie.path == path:
403
+ if toReturn is not None:
404
+ # if there are multiple cookies that meet passed in criteria
405
+ raise CookieConflictError(
406
+ f"There are multiple cookies with name, {name!r}"
407
+ )
408
+ # we will eventually return this as long as no cookie conflict
409
+ toReturn = cookie.value
410
+
411
+ if toReturn:
412
+ return toReturn
413
+ raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
414
+
415
+ def __getstate__(self):
416
+ """Unlike a normal CookieJar, this class is pickleable."""
417
+ state = self.__dict__.copy()
418
+ # remove the unpickleable RLock object
419
+ state.pop("_cookies_lock")
420
+ return state
421
+
422
+ def __setstate__(self, state):
423
+ """Unlike a normal CookieJar, this class is pickleable."""
424
+ self.__dict__.update(state)
425
+ if "_cookies_lock" not in self.__dict__:
426
+ self._cookies_lock = threading.RLock()
427
+
428
+ def copy(self):
429
+ """Return a copy of this RequestsCookieJar."""
430
+ new_cj = RequestsCookieJar()
431
+ new_cj.set_policy(self.get_policy())
432
+ new_cj.update(self)
433
+ return new_cj
434
+
435
+ def get_policy(self):
436
+ """Return the CookiePolicy instance used."""
437
+ return self._policy
438
+
439
+
440
+ def _copy_cookie_jar(jar):
441
+ if jar is None:
442
+ return None
443
+
444
+ if hasattr(jar, "copy"):
445
+ # We're dealing with an instance of RequestsCookieJar
446
+ return jar.copy()
447
+ # We're dealing with a generic CookieJar instance
448
+ new_jar = copy.copy(jar)
449
+ new_jar.clear()
450
+ for cookie in jar:
451
+ new_jar.set_cookie(copy.copy(cookie))
452
+ return new_jar
453
+
454
+
455
+ def create_cookie(name, value, **kwargs):
456
+ """Make a cookie from underspecified parameters.
457
+
458
+ By default, the pair of `name` and `value` will be set for the domain ''
459
+ and sent on every request (this is sometimes called a "supercookie").
460
+ """
461
+ result = {
462
+ "version": 0,
463
+ "name": name,
464
+ "value": value,
465
+ "port": None,
466
+ "domain": "",
467
+ "path": "/",
468
+ "secure": False,
469
+ "expires": None,
470
+ "discard": True,
471
+ "comment": None,
472
+ "comment_url": None,
473
+ "rest": {"HttpOnly": None},
474
+ "rfc2109": False,
475
+ }
476
+
477
+ badargs = set(kwargs) - set(result)
478
+ if badargs:
479
+ raise TypeError(
480
+ f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
481
+ )
482
+
483
+ result.update(kwargs)
484
+ result["port_specified"] = bool(result["port"])
485
+ result["domain_specified"] = bool(result["domain"])
486
+ result["domain_initial_dot"] = result["domain"].startswith(".")
487
+ result["path_specified"] = bool(result["path"])
488
+
489
+ return cookielib.Cookie(**result)
490
+
491
+
492
+ def morsel_to_cookie(morsel):
493
+ """Convert a Morsel object into a Cookie containing the one k/v pair."""
494
+
495
+ expires = None
496
+ if morsel["max-age"]:
497
+ try:
498
+ expires = int(time.time() + int(morsel["max-age"]))
499
+ except ValueError:
500
+ raise TypeError(f"max-age: {morsel['max-age']} must be integer")
501
+ elif morsel["expires"]:
502
+ time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
503
+ expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
504
+ return create_cookie(
505
+ comment=morsel["comment"],
506
+ comment_url=bool(morsel["comment"]),
507
+ discard=False,
508
+ domain=morsel["domain"],
509
+ expires=expires,
510
+ name=morsel.key,
511
+ path=morsel["path"],
512
+ port=None,
513
+ rest={"HttpOnly": morsel["httponly"]},
514
+ rfc2109=False,
515
+ secure=bool(morsel["secure"]),
516
+ value=morsel.value,
517
+ version=morsel["version"] or 0,
518
+ )
519
+
520
+
521
+ def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
522
+ """Returns a CookieJar from a key/value dictionary.
523
+
524
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
525
+ :param cookiejar: (optional) A cookiejar to add the cookies to.
526
+ :param overwrite: (optional) If False, will not replace cookies
527
+ already in the jar with new ones.
528
+ :rtype: CookieJar
529
+ """
530
+ if cookiejar is None:
531
+ cookiejar = RequestsCookieJar()
532
+
533
+ if cookie_dict is not None:
534
+ names_from_jar = [cookie.name for cookie in cookiejar]
535
+ for name in cookie_dict:
536
+ if overwrite or (name not in names_from_jar):
537
+ cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
538
+
539
+ return cookiejar
540
+
541
+
542
+ def merge_cookies(cookiejar, cookies):
543
+ """Add cookies to cookiejar and returns a merged CookieJar.
544
+
545
+ :param cookiejar: CookieJar object to add the cookies to.
546
+ :param cookies: Dictionary or CookieJar object to be added.
547
+ :rtype: CookieJar
548
+ """
549
+ if not isinstance(cookiejar, cookielib.CookieJar):
550
+ raise ValueError("You can only merge into CookieJar")
551
+
552
+ if isinstance(cookies, dict):
553
+ cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
554
+ elif isinstance(cookies, cookielib.CookieJar):
555
+ try:
556
+ cookiejar.update(cookies)
557
+ except AttributeError:
558
+ for cookie_in_jar in cookies:
559
+ cookiejar.set_cookie(cookie_in_jar)
560
+
561
+ return cookiejar
python_env/lib/site-packages/requests/exceptions.py ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ requests.exceptions
3
+ ~~~~~~~~~~~~~~~~~~~
4
+
5
+ This module contains the set of Requests' exceptions.
6
+ """
7
+ from urllib3.exceptions import HTTPError as BaseHTTPError
8
+
9
+ from .compat import JSONDecodeError as CompatJSONDecodeError
10
+
11
+
12
+ class RequestException(IOError):
13
+ """There was an ambiguous exception that occurred while handling your
14
+ request.
15
+ """
16
+
17
+ def __init__(self, *args, **kwargs):
18
+ """Initialize RequestException with `request` and `response` objects."""
19
+ response = kwargs.pop("response", None)
20
+ self.response = response
21
+ self.request = kwargs.pop("request", None)
22
+ if response is not None and not self.request and hasattr(response, "request"):
23
+ self.request = self.response.request
24
+ super().__init__(*args, **kwargs)
25
+
26
+
27
+ class InvalidJSONError(RequestException):
28
+ """A JSON error occurred."""
29
+
30
+
31
+ class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
32
+ """Couldn't decode the text into json"""
33
+
34
+ def __init__(self, *args, **kwargs):
35
+ """
36
+ Construct the JSONDecodeError instance first with all
37
+ args. Then use it's args to construct the IOError so that
38
+ the json specific args aren't used as IOError specific args
39
+ and the error message from JSONDecodeError is preserved.
40
+ """
41
+ CompatJSONDecodeError.__init__(self, *args)
42
+ InvalidJSONError.__init__(self, *self.args, **kwargs)
43
+
44
+ def __reduce__(self):
45
+ """
46
+ The __reduce__ method called when pickling the object must
47
+ be the one from the JSONDecodeError (be it json/simplejson)
48
+ as it expects all the arguments for instantiation, not just
49
+ one like the IOError, and the MRO would by default call the
50
+ __reduce__ method from the IOError due to the inheritance order.
51
+ """
52
+ return CompatJSONDecodeError.__reduce__(self)
53
+
54
+
55
+ class HTTPError(RequestException):
56
+ """An HTTP error occurred."""
57
+
58
+
59
+ class ConnectionError(RequestException):
60
+ """A Connection error occurred."""
61
+
62
+
63
+ class ProxyError(ConnectionError):
64
+ """A proxy error occurred."""
65
+
66
+
67
+ class SSLError(ConnectionError):
68
+ """An SSL error occurred."""
69
+
70
+
71
+ class Timeout(RequestException):
72
+ """The request timed out.
73
+
74
+ Catching this error will catch both
75
+ :exc:`~requests.exceptions.ConnectTimeout` and
76
+ :exc:`~requests.exceptions.ReadTimeout` errors.
77
+ """
78
+
79
+
80
+ class ConnectTimeout(ConnectionError, Timeout):
81
+ """The request timed out while trying to connect to the remote server.
82
+
83
+ Requests that produced this error are safe to retry.
84
+ """
85
+
86
+
87
+ class ReadTimeout(Timeout):
88
+ """The server did not send any data in the allotted amount of time."""
89
+
90
+
91
+ class URLRequired(RequestException):
92
+ """A valid URL is required to make a request."""
93
+
94
+
95
+ class TooManyRedirects(RequestException):
96
+ """Too many redirects."""
97
+
98
+
99
+ class MissingSchema(RequestException, ValueError):
100
+ """The URL scheme (e.g. http or https) is missing."""
101
+
102
+
103
+ class InvalidSchema(RequestException, ValueError):
104
+ """The URL scheme provided is either invalid or unsupported."""
105
+
106
+
107
+ class InvalidURL(RequestException, ValueError):
108
+ """The URL provided was somehow invalid."""
109
+
110
+
111
+ class InvalidHeader(RequestException, ValueError):
112
+ """The header value provided was somehow invalid."""
113
+
114
+
115
+ class InvalidProxyURL(InvalidURL):
116
+ """The proxy URL provided is invalid."""
117
+
118
+
119
+ class ChunkedEncodingError(RequestException):
120
+ """The server declared chunked encoding but sent an invalid chunk."""
121
+
122
+
123
+ class ContentDecodingError(RequestException, BaseHTTPError):
124
+ """Failed to decode response content."""
125
+
126
+
127
+ class StreamConsumedError(RequestException, TypeError):
128
+ """The content for this response was already consumed."""
129
+
130
+
131
+ class RetryError(RequestException):
132
+ """Custom retries logic failed"""
133
+
134
+
135
+ class UnrewindableBodyError(RequestException):
136
+ """Requests encountered an error when trying to rewind a body."""
137
+
138
+
139
+ # Warnings
140
+
141
+
142
+ class RequestsWarning(Warning):
143
+ """Base warning for Requests."""
144
+
145
+
146
+ class FileModeWarning(RequestsWarning, DeprecationWarning):
147
+ """A file was opened in text mode, but Requests determined its binary length."""
148
+
149
+
150
+ class RequestsDependencyWarning(RequestsWarning):
151
+ """An imported dependency doesn't match the expected version range."""