koichi12 commited on
Commit
f2e88b6
·
verified ·
1 Parent(s): b17ecb6

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .venv/lib/python3.11/site-packages/astor/VERSION +1 -0
  2. .venv/lib/python3.11/site-packages/astor/__init__.py +78 -0
  3. .venv/lib/python3.11/site-packages/astor/__pycache__/__init__.cpython-311.pyc +0 -0
  4. .venv/lib/python3.11/site-packages/astor/__pycache__/code_gen.cpython-311.pyc +0 -0
  5. .venv/lib/python3.11/site-packages/astor/__pycache__/codegen.cpython-311.pyc +0 -0
  6. .venv/lib/python3.11/site-packages/astor/__pycache__/file_util.cpython-311.pyc +0 -0
  7. .venv/lib/python3.11/site-packages/astor/__pycache__/node_util.cpython-311.pyc +0 -0
  8. .venv/lib/python3.11/site-packages/astor/__pycache__/op_util.cpython-311.pyc +0 -0
  9. .venv/lib/python3.11/site-packages/astor/__pycache__/rtrip.cpython-311.pyc +0 -0
  10. .venv/lib/python3.11/site-packages/astor/__pycache__/source_repr.cpython-311.pyc +0 -0
  11. .venv/lib/python3.11/site-packages/astor/__pycache__/string_repr.cpython-311.pyc +0 -0
  12. .venv/lib/python3.11/site-packages/astor/__pycache__/tree_walk.cpython-311.pyc +0 -0
  13. .venv/lib/python3.11/site-packages/astor/code_gen.py +901 -0
  14. .venv/lib/python3.11/site-packages/astor/codegen.py +11 -0
  15. .venv/lib/python3.11/site-packages/astor/file_util.py +111 -0
  16. .venv/lib/python3.11/site-packages/astor/node_util.py +208 -0
  17. .venv/lib/python3.11/site-packages/astor/op_util.py +110 -0
  18. .venv/lib/python3.11/site-packages/astor/rtrip.py +209 -0
  19. .venv/lib/python3.11/site-packages/astor/source_repr.py +273 -0
  20. .venv/lib/python3.11/site-packages/astor/string_repr.py +112 -0
  21. .venv/lib/python3.11/site-packages/astor/tree_walk.py +179 -0
  22. .venv/lib/python3.11/site-packages/fastapi/__init__.py +25 -0
  23. .venv/lib/python3.11/site-packages/fastapi/__main__.py +3 -0
  24. .venv/lib/python3.11/site-packages/fastapi/__pycache__/__main__.cpython-311.pyc +0 -0
  25. .venv/lib/python3.11/site-packages/fastapi/__pycache__/applications.cpython-311.pyc +0 -0
  26. .venv/lib/python3.11/site-packages/fastapi/__pycache__/background.cpython-311.pyc +0 -0
  27. .venv/lib/python3.11/site-packages/fastapi/__pycache__/datastructures.cpython-311.pyc +0 -0
  28. .venv/lib/python3.11/site-packages/fastapi/__pycache__/logger.cpython-311.pyc +0 -0
  29. .venv/lib/python3.11/site-packages/fastapi/__pycache__/param_functions.cpython-311.pyc +0 -0
  30. .venv/lib/python3.11/site-packages/fastapi/__pycache__/requests.cpython-311.pyc +0 -0
  31. .venv/lib/python3.11/site-packages/fastapi/__pycache__/templating.cpython-311.pyc +0 -0
  32. .venv/lib/python3.11/site-packages/fastapi/__pycache__/testclient.cpython-311.pyc +0 -0
  33. .venv/lib/python3.11/site-packages/fastapi/__pycache__/websockets.cpython-311.pyc +0 -0
  34. .venv/lib/python3.11/site-packages/fastapi/_compat.py +659 -0
  35. .venv/lib/python3.11/site-packages/fastapi/applications.py +0 -0
  36. .venv/lib/python3.11/site-packages/fastapi/background.py +59 -0
  37. .venv/lib/python3.11/site-packages/fastapi/cli.py +13 -0
  38. .venv/lib/python3.11/site-packages/fastapi/concurrency.py +39 -0
  39. .venv/lib/python3.11/site-packages/fastapi/datastructures.py +204 -0
  40. .venv/lib/python3.11/site-packages/fastapi/encoders.py +343 -0
  41. .venv/lib/python3.11/site-packages/fastapi/exception_handlers.py +34 -0
  42. .venv/lib/python3.11/site-packages/fastapi/exceptions.py +176 -0
  43. .venv/lib/python3.11/site-packages/fastapi/logger.py +3 -0
  44. .venv/lib/python3.11/site-packages/fastapi/middleware/__init__.py +1 -0
  45. .venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/__init__.cpython-311.pyc +0 -0
  46. .venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/cors.cpython-311.pyc +0 -0
  47. .venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/gzip.cpython-311.pyc +0 -0
  48. .venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-311.pyc +0 -0
  49. .venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-311.pyc +0 -0
  50. .venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-311.pyc +0 -0
.venv/lib/python3.11/site-packages/astor/VERSION ADDED
@@ -0,0 +1 @@
 
 
1
+ 0.8.1
.venv/lib/python3.11/site-packages/astor/__init__.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright 2012 (c) Patrick Maupin
8
+ Copyright 2013 (c) Berker Peksag
9
+
10
+ """
11
+
12
+ import os
13
+ import warnings
14
+
15
+ from .code_gen import SourceGenerator, to_source # NOQA
16
+ from .node_util import iter_node, strip_tree, dump_tree # NOQA
17
+ from .node_util import ExplicitNodeVisitor # NOQA
18
+ from .file_util import CodeToAst, code_to_ast # NOQA
19
+ from .op_util import get_op_symbol, get_op_precedence # NOQA
20
+ from .op_util import symbol_data # NOQA
21
+ from .tree_walk import TreeWalk # NOQA
22
+
23
+ ROOT = os.path.dirname(__file__)
24
+ with open(os.path.join(ROOT, 'VERSION')) as version_file:
25
+ __version__ = version_file.read().strip()
26
+
27
+ parse_file = code_to_ast.parse_file
28
+
29
+ # DEPRECATED!!!
30
+ # These aliases support old programs. Please do not use in future.
31
+
32
+ deprecated = """
33
+ get_boolop = get_binop = get_cmpop = get_unaryop = get_op_symbol
34
+ get_anyop = get_op_symbol
35
+ parsefile = code_to_ast.parse_file
36
+ codetoast = code_to_ast
37
+ dump = dump_tree
38
+ all_symbols = symbol_data
39
+ treewalk = tree_walk
40
+ codegen = code_gen
41
+ """
42
+
43
+ exec(deprecated)
44
+
45
+
46
+ def deprecate():
47
+ def wrap(deprecated_name, target_name):
48
+ if '.' in target_name:
49
+ target_mod, target_fname = target_name.split('.')
50
+ target_func = getattr(globals()[target_mod], target_fname)
51
+ else:
52
+ target_func = globals()[target_name]
53
+ msg = "astor.%s is deprecated. Please use astor.%s." % (
54
+ deprecated_name, target_name)
55
+ if callable(target_func):
56
+ def newfunc(*args, **kwarg):
57
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
58
+ return target_func(*args, **kwarg)
59
+ else:
60
+ class ModProxy:
61
+ def __getattr__(self, name):
62
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
63
+ return getattr(target_func, name)
64
+ newfunc = ModProxy()
65
+
66
+ globals()[deprecated_name] = newfunc
67
+
68
+ for line in deprecated.splitlines(): # NOQA
69
+ line = line.split('#')[0].replace('=', '').split()
70
+ if line:
71
+ target_name = line.pop()
72
+ for deprecated_name in line:
73
+ wrap(deprecated_name, target_name)
74
+
75
+
76
+ deprecate()
77
+
78
+ del deprecate, deprecated
.venv/lib/python3.11/site-packages/astor/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (4.16 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/code_gen.cpython-311.pyc ADDED
Binary file (57 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/codegen.cpython-311.pyc ADDED
Binary file (451 Bytes). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/file_util.cpython-311.pyc ADDED
Binary file (5.35 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/node_util.cpython-311.pyc ADDED
Binary file (11 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/op_util.cpython-311.pyc ADDED
Binary file (5.47 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/rtrip.cpython-311.pyc ADDED
Binary file (9.88 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/source_repr.cpython-311.pyc ADDED
Binary file (10 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/string_repr.cpython-311.pyc ADDED
Binary file (4.85 kB). View file
 
.venv/lib/python3.11/site-packages/astor/__pycache__/tree_walk.cpython-311.pyc ADDED
Binary file (8.25 kB). View file
 
.venv/lib/python3.11/site-packages/astor/code_gen.py ADDED
@@ -0,0 +1,901 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright (c) 2008 Armin Ronacher
8
+ Copyright (c) 2012-2017 Patrick Maupin
9
+ Copyright (c) 2013-2017 Berker Peksag
10
+
11
+ This module converts an AST into Python source code.
12
+
13
+ Before being version-controlled as part of astor,
14
+ this code came from here (in 2012):
15
+
16
+ https://gist.github.com/1250562
17
+
18
+ """
19
+
20
+ import ast
21
+ import inspect
22
+ import math
23
+ import sys
24
+
25
+ from .op_util import get_op_symbol, get_op_precedence, Precedence
26
+ from .node_util import ExplicitNodeVisitor
27
+ from .string_repr import pretty_string
28
+ from .source_repr import pretty_source
29
+
30
+
31
+ def to_source(node, indent_with=' ' * 4, add_line_information=False,
32
+ pretty_string=pretty_string, pretty_source=pretty_source,
33
+ source_generator_class=None):
34
+ """This function can convert a node tree back into python sourcecode.
35
+ This is useful for debugging purposes, especially if you're dealing with
36
+ custom asts not generated by python itself.
37
+
38
+ It could be that the sourcecode is evaluable when the AST itself is not
39
+ compilable / evaluable. The reason for this is that the AST contains some
40
+ more data than regular sourcecode does, which is dropped during
41
+ conversion.
42
+
43
+ Each level of indentation is replaced with `indent_with`. Per default this
44
+ parameter is equal to four spaces as suggested by PEP 8, but it might be
45
+ adjusted to match the application's styleguide.
46
+
47
+ If `add_line_information` is set to `True` comments for the line numbers
48
+ of the nodes are added to the output. This can be used to spot wrong line
49
+ number information of statement nodes.
50
+
51
+ `source_generator_class` defaults to `SourceGenerator`, and specifies the
52
+ class that will be instantiated and used to generate the source code.
53
+
54
+ """
55
+ if source_generator_class is None:
56
+ source_generator_class = SourceGenerator
57
+ elif not inspect.isclass(source_generator_class):
58
+ raise TypeError('source_generator_class should be a class')
59
+ elif not issubclass(source_generator_class, SourceGenerator):
60
+ raise TypeError('source_generator_class should be a subclass of SourceGenerator')
61
+ generator = source_generator_class(
62
+ indent_with, add_line_information, pretty_string)
63
+ generator.visit(node)
64
+ generator.result.append('\n')
65
+ if set(generator.result[0]) == set('\n'):
66
+ generator.result[0] = ''
67
+ return pretty_source(generator.result)
68
+
69
+
70
+ def precedence_setter(AST=ast.AST, get_op_precedence=get_op_precedence,
71
+ isinstance=isinstance, list=list):
72
+ """ This only uses a closure for performance reasons,
73
+ to reduce the number of attribute lookups. (set_precedence
74
+ is called a lot of times.)
75
+ """
76
+
77
+ def set_precedence(value, *nodes):
78
+ """Set the precedence (of the parent) into the children.
79
+ """
80
+ if isinstance(value, AST):
81
+ value = get_op_precedence(value)
82
+ for node in nodes:
83
+ if isinstance(node, AST):
84
+ node._pp = value
85
+ elif isinstance(node, list):
86
+ set_precedence(value, *node)
87
+ else:
88
+ assert node is None, node
89
+
90
+ return set_precedence
91
+
92
+
93
+ set_precedence = precedence_setter()
94
+
95
+
96
+ class Delimit(object):
97
+ """A context manager that can add enclosing
98
+ delimiters around the output of a
99
+ SourceGenerator method. By default, the
100
+ parentheses are added, but the enclosed code
101
+ may set discard=True to get rid of them.
102
+ """
103
+
104
+ discard = False
105
+
106
+ def __init__(self, tree, *args):
107
+ """ use write instead of using result directly
108
+ for initial data, because it may flush
109
+ preceding data into result.
110
+ """
111
+ delimiters = '()'
112
+ node = None
113
+ op = None
114
+ for arg in args:
115
+ if isinstance(arg, ast.AST):
116
+ if node is None:
117
+ node = arg
118
+ else:
119
+ op = arg
120
+ else:
121
+ delimiters = arg
122
+ tree.write(delimiters[0])
123
+ result = self.result = tree.result
124
+ self.index = len(result)
125
+ self.closing = delimiters[1]
126
+ if node is not None:
127
+ self.p = p = get_op_precedence(op or node)
128
+ self.pp = pp = tree.get__pp(node)
129
+ self.discard = p >= pp
130
+
131
+ def __enter__(self):
132
+ return self
133
+
134
+ def __exit__(self, *exc_info):
135
+ result = self.result
136
+ start = self.index - 1
137
+ if self.discard:
138
+ result[start] = ''
139
+ else:
140
+ result.append(self.closing)
141
+
142
+
143
+ class SourceGenerator(ExplicitNodeVisitor):
144
+ """This visitor is able to transform a well formed syntax tree into Python
145
+ sourcecode.
146
+
147
+ For more details have a look at the docstring of the `node_to_source`
148
+ function.
149
+
150
+ """
151
+
152
+ using_unicode_literals = False
153
+
154
+ def __init__(self, indent_with, add_line_information=False,
155
+ pretty_string=pretty_string,
156
+ # constants
157
+ len=len, isinstance=isinstance, callable=callable):
158
+ self.result = []
159
+ self.indent_with = indent_with
160
+ self.add_line_information = add_line_information
161
+ self.indentation = 0 # Current indentation level
162
+ self.new_lines = 0 # Number of lines to insert before next code
163
+ self.colinfo = 0, 0 # index in result of string containing linefeed, and
164
+ # position of last linefeed in that string
165
+ self.pretty_string = pretty_string
166
+ AST = ast.AST
167
+
168
+ visit = self.visit
169
+ result = self.result
170
+ append = result.append
171
+
172
+ def write(*params):
173
+ """ self.write is a closure for performance (to reduce the number
174
+ of attribute lookups).
175
+ """
176
+ for item in params:
177
+ if isinstance(item, AST):
178
+ visit(item)
179
+ elif callable(item):
180
+ item()
181
+ else:
182
+ if self.new_lines:
183
+ append('\n' * self.new_lines)
184
+ self.colinfo = len(result), 0
185
+ append(self.indent_with * self.indentation)
186
+ self.new_lines = 0
187
+ if item:
188
+ append(item)
189
+
190
+ self.write = write
191
+
192
+ def __getattr__(self, name, defaults=dict(keywords=(),
193
+ _pp=Precedence.highest).get):
194
+ """ Get an attribute of the node.
195
+ like dict.get (returns None if doesn't exist)
196
+ """
197
+ if not name.startswith('get_'):
198
+ raise AttributeError
199
+ geta = getattr
200
+ shortname = name[4:]
201
+ default = defaults(shortname)
202
+
203
+ def getter(node):
204
+ return geta(node, shortname, default)
205
+
206
+ setattr(self, name, getter)
207
+ return getter
208
+
209
+ def delimit(self, *args):
210
+ return Delimit(self, *args)
211
+
212
+ def conditional_write(self, *stuff):
213
+ if stuff[-1] is not None:
214
+ self.write(*stuff)
215
+ # Inform the caller that we wrote
216
+ return True
217
+
218
+ def newline(self, node=None, extra=0):
219
+ self.new_lines = max(self.new_lines, 1 + extra)
220
+ if node is not None and self.add_line_information:
221
+ self.write('# line: %s' % node.lineno)
222
+ self.new_lines = 1
223
+
224
+ def body(self, statements):
225
+ self.indentation += 1
226
+ self.write(*statements)
227
+ self.indentation -= 1
228
+
229
+ def else_body(self, elsewhat):
230
+ if elsewhat:
231
+ self.write(self.newline, 'else:')
232
+ self.body(elsewhat)
233
+
234
+ def body_or_else(self, node):
235
+ self.body(node.body)
236
+ self.else_body(node.orelse)
237
+
238
+ def visit_arguments(self, node):
239
+ want_comma = []
240
+
241
+ def write_comma():
242
+ if want_comma:
243
+ self.write(', ')
244
+ else:
245
+ want_comma.append(True)
246
+
247
+ def loop_args(args, defaults):
248
+ set_precedence(Precedence.Comma, defaults)
249
+ padding = [None] * (len(args) - len(defaults))
250
+ for arg, default in zip(args, padding + defaults):
251
+ self.write(write_comma, arg)
252
+ self.conditional_write('=', default)
253
+
254
+ posonlyargs = getattr(node, 'posonlyargs', [])
255
+ offset = 0
256
+ if posonlyargs:
257
+ offset += len(node.defaults) - len(node.args)
258
+ loop_args(posonlyargs, node.defaults[:offset])
259
+ self.write(write_comma, '/')
260
+
261
+ loop_args(node.args, node.defaults[offset:])
262
+ self.conditional_write(write_comma, '*', node.vararg)
263
+
264
+ kwonlyargs = self.get_kwonlyargs(node)
265
+ if kwonlyargs:
266
+ if node.vararg is None:
267
+ self.write(write_comma, '*')
268
+ loop_args(kwonlyargs, node.kw_defaults)
269
+ self.conditional_write(write_comma, '**', node.kwarg)
270
+
271
+ def statement(self, node, *params, **kw):
272
+ self.newline(node)
273
+ self.write(*params)
274
+
275
+ def decorators(self, node, extra):
276
+ self.newline(extra=extra)
277
+ for decorator in node.decorator_list:
278
+ self.statement(decorator, '@', decorator)
279
+
280
+ def comma_list(self, items, trailing=False):
281
+ set_precedence(Precedence.Comma, *items)
282
+ for idx, item in enumerate(items):
283
+ self.write(', ' if idx else '', item)
284
+ self.write(',' if trailing else '')
285
+
286
+ # Statements
287
+
288
+ def visit_Assign(self, node):
289
+ set_precedence(node, node.value, *node.targets)
290
+ self.newline(node)
291
+ for target in node.targets:
292
+ self.write(target, ' = ')
293
+ self.visit(node.value)
294
+
295
+ def visit_AugAssign(self, node):
296
+ set_precedence(node, node.value, node.target)
297
+ self.statement(node, node.target, get_op_symbol(node.op, ' %s= '),
298
+ node.value)
299
+
300
+ def visit_AnnAssign(self, node):
301
+ set_precedence(node, node.target, node.annotation)
302
+ set_precedence(Precedence.Comma, node.value)
303
+ need_parens = isinstance(node.target, ast.Name) and not node.simple
304
+ begin = '(' if need_parens else ''
305
+ end = ')' if need_parens else ''
306
+ self.statement(node, begin, node.target, end, ': ', node.annotation)
307
+ self.conditional_write(' = ', node.value)
308
+
309
+ def visit_ImportFrom(self, node):
310
+ self.statement(node, 'from ', node.level * '.',
311
+ node.module or '', ' import ')
312
+ self.comma_list(node.names)
313
+ # Goofy stuff for Python 2.7 _pyio module
314
+ if node.module == '__future__' and 'unicode_literals' in (
315
+ x.name for x in node.names):
316
+ self.using_unicode_literals = True
317
+
318
+ def visit_Import(self, node):
319
+ self.statement(node, 'import ')
320
+ self.comma_list(node.names)
321
+
322
+ def visit_Expr(self, node):
323
+ set_precedence(node, node.value)
324
+ self.statement(node)
325
+ self.generic_visit(node)
326
+
327
+ def visit_FunctionDef(self, node, is_async=False):
328
+ prefix = 'async ' if is_async else ''
329
+ self.decorators(node, 1 if self.indentation else 2)
330
+ self.statement(node, '%sdef %s' % (prefix, node.name), '(')
331
+ self.visit_arguments(node.args)
332
+ self.write(')')
333
+ self.conditional_write(' ->', self.get_returns(node))
334
+ self.write(':')
335
+ self.body(node.body)
336
+ if not self.indentation:
337
+ self.newline(extra=2)
338
+
339
+ # introduced in Python 3.5
340
+ def visit_AsyncFunctionDef(self, node):
341
+ self.visit_FunctionDef(node, is_async=True)
342
+
343
+ def visit_ClassDef(self, node):
344
+ have_args = []
345
+
346
+ def paren_or_comma():
347
+ if have_args:
348
+ self.write(', ')
349
+ else:
350
+ have_args.append(True)
351
+ self.write('(')
352
+
353
+ self.decorators(node, 2)
354
+ self.statement(node, 'class %s' % node.name)
355
+ for base in node.bases:
356
+ self.write(paren_or_comma, base)
357
+ # keywords not available in early version
358
+ for keyword in self.get_keywords(node):
359
+ self.write(paren_or_comma, keyword.arg or '',
360
+ '=' if keyword.arg else '**', keyword.value)
361
+ self.conditional_write(paren_or_comma, '*', self.get_starargs(node))
362
+ self.conditional_write(paren_or_comma, '**', self.get_kwargs(node))
363
+ self.write(have_args and '):' or ':')
364
+ self.body(node.body)
365
+ if not self.indentation:
366
+ self.newline(extra=2)
367
+
368
+ def visit_If(self, node):
369
+ set_precedence(node, node.test)
370
+ self.statement(node, 'if ', node.test, ':')
371
+ self.body(node.body)
372
+ while True:
373
+ else_ = node.orelse
374
+ if len(else_) == 1 and isinstance(else_[0], ast.If):
375
+ node = else_[0]
376
+ set_precedence(node, node.test)
377
+ self.write(self.newline, 'elif ', node.test, ':')
378
+ self.body(node.body)
379
+ else:
380
+ self.else_body(else_)
381
+ break
382
+
383
+ def visit_For(self, node, is_async=False):
384
+ set_precedence(node, node.target)
385
+ prefix = 'async ' if is_async else ''
386
+ self.statement(node, '%sfor ' % prefix,
387
+ node.target, ' in ', node.iter, ':')
388
+ self.body_or_else(node)
389
+
390
+ # introduced in Python 3.5
391
+ def visit_AsyncFor(self, node):
392
+ self.visit_For(node, is_async=True)
393
+
394
+ def visit_While(self, node):
395
+ set_precedence(node, node.test)
396
+ self.statement(node, 'while ', node.test, ':')
397
+ self.body_or_else(node)
398
+
399
+ def visit_With(self, node, is_async=False):
400
+ prefix = 'async ' if is_async else ''
401
+ self.statement(node, '%swith ' % prefix)
402
+ if hasattr(node, "context_expr"): # Python < 3.3
403
+ self.visit_withitem(node)
404
+ else: # Python >= 3.3
405
+ self.comma_list(node.items)
406
+ self.write(':')
407
+ self.body(node.body)
408
+
409
+ # new for Python 3.5
410
+ def visit_AsyncWith(self, node):
411
+ self.visit_With(node, is_async=True)
412
+
413
+ # new for Python 3.3
414
+ def visit_withitem(self, node):
415
+ self.write(node.context_expr)
416
+ self.conditional_write(' as ', node.optional_vars)
417
+
418
+ # deprecated in Python 3.8
419
+ def visit_NameConstant(self, node):
420
+ self.write(repr(node.value))
421
+
422
+ def visit_Pass(self, node):
423
+ self.statement(node, 'pass')
424
+
425
+ def visit_Print(self, node):
426
+ # XXX: python 2.6 only
427
+ self.statement(node, 'print ')
428
+ values = node.values
429
+ if node.dest is not None:
430
+ self.write(' >> ')
431
+ values = [node.dest] + node.values
432
+ self.comma_list(values, not node.nl)
433
+
434
+ def visit_Delete(self, node):
435
+ self.statement(node, 'del ')
436
+ self.comma_list(node.targets)
437
+
438
+ def visit_TryExcept(self, node):
439
+ self.statement(node, 'try:')
440
+ self.body(node.body)
441
+ self.write(*node.handlers)
442
+ self.else_body(node.orelse)
443
+
444
+ # new for Python 3.3
445
+ def visit_Try(self, node):
446
+ self.statement(node, 'try:')
447
+ self.body(node.body)
448
+ self.write(*node.handlers)
449
+ self.else_body(node.orelse)
450
+ if node.finalbody:
451
+ self.statement(node, 'finally:')
452
+ self.body(node.finalbody)
453
+
454
+ def visit_ExceptHandler(self, node):
455
+ self.statement(node, 'except')
456
+ if self.conditional_write(' ', node.type):
457
+ self.conditional_write(' as ', node.name)
458
+ self.write(':')
459
+ self.body(node.body)
460
+
461
+ def visit_TryFinally(self, node):
462
+ self.statement(node, 'try:')
463
+ self.body(node.body)
464
+ self.statement(node, 'finally:')
465
+ self.body(node.finalbody)
466
+
467
+ def visit_Exec(self, node):
468
+ dicts = node.globals, node.locals
469
+ dicts = dicts[::-1] if dicts[0] is None else dicts
470
+ self.statement(node, 'exec ', node.body)
471
+ self.conditional_write(' in ', dicts[0])
472
+ self.conditional_write(', ', dicts[1])
473
+
474
+ def visit_Assert(self, node):
475
+ set_precedence(node, node.test, node.msg)
476
+ self.statement(node, 'assert ', node.test)
477
+ self.conditional_write(', ', node.msg)
478
+
479
+ def visit_Global(self, node):
480
+ self.statement(node, 'global ', ', '.join(node.names))
481
+
482
+ def visit_Nonlocal(self, node):
483
+ self.statement(node, 'nonlocal ', ', '.join(node.names))
484
+
485
+ def visit_Return(self, node):
486
+ set_precedence(node, node.value)
487
+ self.statement(node, 'return')
488
+ self.conditional_write(' ', node.value)
489
+
490
+ def visit_Break(self, node):
491
+ self.statement(node, 'break')
492
+
493
+ def visit_Continue(self, node):
494
+ self.statement(node, 'continue')
495
+
496
+ def visit_Raise(self, node):
497
+ # XXX: Python 2.6 / 3.0 compatibility
498
+ self.statement(node, 'raise')
499
+ if self.conditional_write(' ', self.get_exc(node)):
500
+ self.conditional_write(' from ', node.cause)
501
+ elif self.conditional_write(' ', self.get_type(node)):
502
+ set_precedence(node, node.inst)
503
+ self.conditional_write(', ', node.inst)
504
+ self.conditional_write(', ', node.tback)
505
+
506
+ # Expressions
507
+
508
+ def visit_Attribute(self, node):
509
+ self.write(node.value, '.', node.attr)
510
+
511
+ def visit_Call(self, node, len=len):
512
+ write = self.write
513
+ want_comma = []
514
+
515
+ def write_comma():
516
+ if want_comma:
517
+ write(', ')
518
+ else:
519
+ want_comma.append(True)
520
+
521
+ args = node.args
522
+ keywords = node.keywords
523
+ starargs = self.get_starargs(node)
524
+ kwargs = self.get_kwargs(node)
525
+ numargs = len(args) + len(keywords)
526
+ numargs += starargs is not None
527
+ numargs += kwargs is not None
528
+ p = Precedence.Comma if numargs > 1 else Precedence.call_one_arg
529
+ set_precedence(p, *args)
530
+ self.visit(node.func)
531
+ write('(')
532
+ for arg in args:
533
+ write(write_comma, arg)
534
+
535
+ set_precedence(Precedence.Comma, *(x.value for x in keywords))
536
+ for keyword in keywords:
537
+ # a keyword.arg of None indicates dictionary unpacking
538
+ # (Python >= 3.5)
539
+ arg = keyword.arg or ''
540
+ write(write_comma, arg, '=' if arg else '**', keyword.value)
541
+ # 3.5 no longer has these
542
+ self.conditional_write(write_comma, '*', starargs)
543
+ self.conditional_write(write_comma, '**', kwargs)
544
+ write(')')
545
+
546
+ def visit_Name(self, node):
547
+ self.write(node.id)
548
+
549
+ # ast.Constant is new in Python 3.6 and it replaces ast.Bytes,
550
+ # ast.Ellipsis, ast.NameConstant, ast.Num, ast.Str in Python 3.8
551
+ def visit_Constant(self, node):
552
+ value = node.value
553
+
554
+ if isinstance(value, (int, float, complex)):
555
+ with self.delimit(node):
556
+ self._handle_numeric_constant(value)
557
+ elif isinstance(value, str):
558
+ self._handle_string_constant(node, node.value)
559
+ elif value is Ellipsis:
560
+ self.write('...')
561
+ else:
562
+ self.write(repr(value))
563
+
564
+ def visit_JoinedStr(self, node):
565
+ self._handle_string_constant(node, None, is_joined=True)
566
+
567
+ def _handle_string_constant(self, node, value, is_joined=False):
568
+ # embedded is used to control when we might want
569
+ # to use a triple-quoted string. We determine
570
+ # if we are in an assignment and/or in an expression
571
+ precedence = self.get__pp(node)
572
+ embedded = ((precedence > Precedence.Expr) +
573
+ (precedence >= Precedence.Assign))
574
+
575
+ # Flush any pending newlines, because we're about
576
+ # to severely abuse the result list.
577
+ self.write('')
578
+ result = self.result
579
+
580
+ # Calculate the string representing the line
581
+ # we are working on, up to but not including
582
+ # the string we are adding.
583
+
584
+ res_index, str_index = self.colinfo
585
+ current_line = self.result[res_index:]
586
+ if str_index:
587
+ current_line[0] = current_line[0][str_index:]
588
+ current_line = ''.join(current_line)
589
+
590
+ has_ast_constant = sys.version_info >= (3, 6)
591
+
592
+ if is_joined:
593
+ # Handle new f-strings. This is a bit complicated, because
594
+ # the tree can contain subnodes that recurse back to JoinedStr
595
+ # subnodes...
596
+
597
+ def recurse(node):
598
+ for value in node.values:
599
+ if isinstance(value, ast.Str):
600
+ # Double up braces to escape them.
601
+ self.write(value.s.replace('{', '{{').replace('}', '}}'))
602
+ elif isinstance(value, ast.FormattedValue):
603
+ with self.delimit('{}'):
604
+ # expr_text used for f-string debugging syntax.
605
+ if getattr(value, 'expr_text', None):
606
+ self.write(value.expr_text)
607
+ else:
608
+ set_precedence(value, value.value)
609
+ self.visit(value.value)
610
+ if value.conversion != -1:
611
+ self.write('!%s' % chr(value.conversion))
612
+ if value.format_spec is not None:
613
+ self.write(':')
614
+ recurse(value.format_spec)
615
+ elif has_ast_constant and isinstance(value, ast.Constant):
616
+ self.write(value.value)
617
+ else:
618
+ kind = type(value).__name__
619
+ assert False, 'Invalid node %s inside JoinedStr' % kind
620
+
621
+ index = len(result)
622
+ recurse(node)
623
+
624
+ # Flush trailing newlines (so that they are part of mystr)
625
+ self.write('')
626
+ mystr = ''.join(result[index:])
627
+ del result[index:]
628
+ self.colinfo = res_index, str_index # Put it back like we found it
629
+ uni_lit = False # No formatted byte strings
630
+
631
+ else:
632
+ assert value is not None, "Node value cannot be None"
633
+ mystr = value
634
+ uni_lit = self.using_unicode_literals
635
+
636
+ mystr = self.pretty_string(mystr, embedded, current_line, uni_lit)
637
+
638
+ if is_joined:
639
+ mystr = 'f' + mystr
640
+ elif getattr(node, 'kind', False):
641
+ # Constant.kind is a Python 3.8 addition.
642
+ mystr = node.kind + mystr
643
+
644
+ self.write(mystr)
645
+
646
+ lf = mystr.rfind('\n') + 1
647
+ if lf:
648
+ self.colinfo = len(result) - 1, lf
649
+
650
+ # deprecated in Python 3.8
651
+ def visit_Str(self, node):
652
+ self._handle_string_constant(node, node.s)
653
+
654
+ # deprecated in Python 3.8
655
+ def visit_Bytes(self, node):
656
+ self.write(repr(node.s))
657
+
658
+ def _handle_numeric_constant(self, value):
659
+ x = value
660
+
661
+ def part(p, imaginary):
662
+ # Represent infinity as 1e1000 and NaN as 1e1000-1e1000.
663
+ s = 'j' if imaginary else ''
664
+ try:
665
+ if math.isinf(p):
666
+ if p < 0:
667
+ return '-1e1000' + s
668
+ return '1e1000' + s
669
+ if math.isnan(p):
670
+ return '(1e1000%s-1e1000%s)' % (s, s)
671
+ except OverflowError:
672
+ # math.isinf will raise this when given an integer
673
+ # that's too large to convert to a float.
674
+ pass
675
+ return repr(p) + s
676
+
677
+ real = part(x.real if isinstance(x, complex) else x, imaginary=False)
678
+ if isinstance(x, complex):
679
+ imag = part(x.imag, imaginary=True)
680
+ if x.real == 0:
681
+ s = imag
682
+ elif x.imag == 0:
683
+ s = '(%s+0j)' % real
684
+ else:
685
+ # x has nonzero real and imaginary parts.
686
+ s = '(%s%s%s)' % (real, ['+', ''][imag.startswith('-')], imag)
687
+ else:
688
+ s = real
689
+ self.write(s)
690
+
691
+ def visit_Num(self, node,
692
+ # constants
693
+ new=sys.version_info >= (3, 0)):
694
+ with self.delimit(node) as delimiters:
695
+ self._handle_numeric_constant(node.n)
696
+
697
+ # We can leave the delimiters handling in visit_Num
698
+ # since this is meant to handle a Python 2.x specific
699
+ # issue and ast.Constant exists only in 3.6+
700
+
701
+ # The Python 2.x compiler merges a unary minus
702
+ # with a number. This is a premature optimization
703
+ # that we deal with here...
704
+ if not new and delimiters.discard:
705
+ if not isinstance(node.n, complex) and node.n < 0:
706
+ pow_lhs = Precedence.Pow + 1
707
+ delimiters.discard = delimiters.pp != pow_lhs
708
+ else:
709
+ op = self.get__p_op(node)
710
+ delimiters.discard = not isinstance(op, ast.USub)
711
+
712
+ def visit_Tuple(self, node):
713
+ with self.delimit(node) as delimiters:
714
+ # Two things are special about tuples:
715
+ # 1) We cannot discard the enclosing parentheses if empty
716
+ # 2) We need the trailing comma if only one item
717
+ elts = node.elts
718
+ delimiters.discard = delimiters.discard and elts
719
+ self.comma_list(elts, len(elts) == 1)
720
+
721
+ def visit_List(self, node):
722
+ with self.delimit('[]'):
723
+ self.comma_list(node.elts)
724
+
725
+ def visit_Set(self, node):
726
+ if node.elts:
727
+ with self.delimit('{}'):
728
+ self.comma_list(node.elts)
729
+ else:
730
+ # If we tried to use "{}" to represent an empty set, it would be
731
+ # interpreted as an empty dictionary. We can't use "set()" either
732
+ # because the name "set" might be rebound.
733
+ self.write('{1}.__class__()')
734
+
735
+ def visit_Dict(self, node):
736
+ set_precedence(Precedence.Comma, *node.values)
737
+ with self.delimit('{}'):
738
+ for idx, (key, value) in enumerate(zip(node.keys, node.values)):
739
+ self.write(', ' if idx else '',
740
+ key if key else '',
741
+ ': ' if key else '**', value)
742
+
743
+ def visit_BinOp(self, node):
744
+ op, left, right = node.op, node.left, node.right
745
+ with self.delimit(node, op) as delimiters:
746
+ ispow = isinstance(op, ast.Pow)
747
+ p = delimiters.p
748
+ set_precedence((Precedence.Pow + 1) if ispow else p, left)
749
+ set_precedence(Precedence.PowRHS if ispow else (p + 1), right)
750
+ self.write(left, get_op_symbol(op, ' %s '), right)
751
+
752
+ def visit_BoolOp(self, node):
753
+ with self.delimit(node, node.op) as delimiters:
754
+ op = get_op_symbol(node.op, ' %s ')
755
+ set_precedence(delimiters.p + 1, *node.values)
756
+ for idx, value in enumerate(node.values):
757
+ self.write(idx and op or '', value)
758
+
759
+ def visit_Compare(self, node):
760
+ with self.delimit(node, node.ops[0]) as delimiters:
761
+ set_precedence(delimiters.p + 1, node.left, *node.comparators)
762
+ self.visit(node.left)
763
+ for op, right in zip(node.ops, node.comparators):
764
+ self.write(get_op_symbol(op, ' %s '), right)
765
+
766
+ # assignment expressions; new for Python 3.8
767
+ def visit_NamedExpr(self, node):
768
+ with self.delimit(node) as delimiters:
769
+ p = delimiters.p
770
+ set_precedence(p, node.target)
771
+ set_precedence(p + 1, node.value)
772
+ # Python is picky about delimiters for assignment
773
+ # expressions: it requires at least one pair in any
774
+ # statement that uses an assignment expression, even
775
+ # when not necessary according to the precedence
776
+ # rules. We address this with the kludge of forcing a
777
+ # pair of parentheses around every assignment
778
+ # expression.
779
+ delimiters.discard = False
780
+ self.write(node.target, ' := ', node.value)
781
+
782
+ def visit_UnaryOp(self, node):
783
+ with self.delimit(node, node.op) as delimiters:
784
+ set_precedence(delimiters.p, node.operand)
785
+ # In Python 2.x, a unary negative of a literal
786
+ # number is merged into the number itself. This
787
+ # bit of ugliness means it is useful to know
788
+ # what the parent operation was...
789
+ node.operand._p_op = node.op
790
+ sym = get_op_symbol(node.op)
791
+ self.write(sym, ' ' if sym.isalpha() else '', node.operand)
792
+
793
+ def visit_Subscript(self, node):
794
+ set_precedence(node, node.slice)
795
+ self.write(node.value, '[', node.slice, ']')
796
+
797
+ def visit_Slice(self, node):
798
+ set_precedence(node, node.lower, node.upper, node.step)
799
+ self.conditional_write(node.lower)
800
+ self.write(':')
801
+ self.conditional_write(node.upper)
802
+ if node.step is not None:
803
+ self.write(':')
804
+ if not (isinstance(node.step, ast.Name) and
805
+ node.step.id == 'None'):
806
+ self.visit(node.step)
807
+
808
+ def visit_Index(self, node):
809
+ with self.delimit(node) as delimiters:
810
+ set_precedence(delimiters.p, node.value)
811
+ self.visit(node.value)
812
+
813
+ def visit_ExtSlice(self, node):
814
+ dims = node.dims
815
+ set_precedence(node, *dims)
816
+ self.comma_list(dims, len(dims) == 1)
817
+
818
+ def visit_Yield(self, node):
819
+ with self.delimit(node):
820
+ set_precedence(get_op_precedence(node) + 1, node.value)
821
+ self.write('yield')
822
+ self.conditional_write(' ', node.value)
823
+
824
+ # new for Python 3.3
825
+ def visit_YieldFrom(self, node):
826
+ with self.delimit(node):
827
+ self.write('yield from ', node.value)
828
+
829
+ # new for Python 3.5
830
+ def visit_Await(self, node):
831
+ with self.delimit(node):
832
+ self.write('await ', node.value)
833
+
834
+ def visit_Lambda(self, node):
835
+ with self.delimit(node) as delimiters:
836
+ set_precedence(delimiters.p, node.body)
837
+ self.write('lambda ')
838
+ self.visit_arguments(node.args)
839
+ self.write(': ', node.body)
840
+
841
+ def visit_Ellipsis(self, node):
842
+ self.write('...')
843
+
844
+ def visit_ListComp(self, node):
845
+ with self.delimit('[]'):
846
+ self.write(node.elt, *node.generators)
847
+
848
+ def visit_GeneratorExp(self, node):
849
+ with self.delimit(node) as delimiters:
850
+ if delimiters.pp == Precedence.call_one_arg:
851
+ delimiters.discard = True
852
+ set_precedence(Precedence.Comma, node.elt)
853
+ self.write(node.elt, *node.generators)
854
+
855
+ def visit_SetComp(self, node):
856
+ with self.delimit('{}'):
857
+ self.write(node.elt, *node.generators)
858
+
859
+ def visit_DictComp(self, node):
860
+ with self.delimit('{}'):
861
+ self.write(node.key, ': ', node.value, *node.generators)
862
+
863
+ def visit_IfExp(self, node):
864
+ with self.delimit(node) as delimiters:
865
+ set_precedence(delimiters.p + 1, node.body, node.test)
866
+ set_precedence(delimiters.p, node.orelse)
867
+ self.write(node.body, ' if ', node.test, ' else ', node.orelse)
868
+
869
+ def visit_Starred(self, node):
870
+ self.write('*', node.value)
871
+
872
+ def visit_Repr(self, node):
873
+ # XXX: python 2.6 only
874
+ with self.delimit('``'):
875
+ self.visit(node.value)
876
+
877
+ def visit_Module(self, node):
878
+ self.write(*node.body)
879
+
880
+ visit_Interactive = visit_Module
881
+
882
+ def visit_Expression(self, node):
883
+ self.visit(node.body)
884
+
885
+ # Helper Nodes
886
+
887
+ def visit_arg(self, node):
888
+ self.write(node.arg)
889
+ self.conditional_write(': ', node.annotation)
890
+
891
+ def visit_alias(self, node):
892
+ self.write(node.name)
893
+ self.conditional_write(' as ', node.asname)
894
+
895
+ def visit_comprehension(self, node):
896
+ set_precedence(node, node.iter, *node.ifs)
897
+ set_precedence(Precedence.comprehension_target, node.target)
898
+ stmt = ' async for ' if self.get_is_async(node) else ' for '
899
+ self.write(stmt, node.target, ' in ', node.iter)
900
+ for if_ in node.ifs:
901
+ self.write(' if ', if_)
.venv/lib/python3.11/site-packages/astor/codegen.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import warnings
2
+
3
+ from .code_gen import * # NOQA
4
+
5
+
6
+ warnings.warn(
7
+ 'astor.codegen module is deprecated. Please import '
8
+ 'astor.code_gen module instead.',
9
+ DeprecationWarning,
10
+ stacklevel=2
11
+ )
.venv/lib/python3.11/site-packages/astor/file_util.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright (c) 2012-2015 Patrick Maupin
8
+ Copyright (c) 2013-2015 Berker Peksag
9
+
10
+ Functions that interact with the filesystem go here.
11
+
12
+ """
13
+
14
+ import ast
15
+ import sys
16
+ import os
17
+
18
+ try:
19
+ from tokenize import open as fopen
20
+ except ImportError:
21
+ fopen = open
22
+
23
+
24
+ class CodeToAst(object):
25
+ """Given a module, or a function that was compiled as part
26
+ of a module, re-compile the module into an AST and extract
27
+ the sub-AST for the function. Allow caching to reduce
28
+ number of compiles.
29
+
30
+ Also contains static helper utility functions to
31
+ look for python files, to parse python files, and to extract
32
+ the file/line information from a code object.
33
+ """
34
+
35
+ @staticmethod
36
+ def find_py_files(srctree, ignore=None):
37
+ """Return all the python files in a source tree
38
+
39
+ Ignores any path that contains the ignore string
40
+
41
+ This is not used by other class methods, but is
42
+ designed to be used in code that uses this class.
43
+ """
44
+
45
+ if not os.path.isdir(srctree):
46
+ yield os.path.split(srctree)
47
+ for srcpath, _, fnames in os.walk(srctree):
48
+ # Avoid infinite recursion for silly users
49
+ if ignore is not None and ignore in srcpath:
50
+ continue
51
+ for fname in (x for x in fnames if x.endswith('.py')):
52
+ yield srcpath, fname
53
+
54
+ @staticmethod
55
+ def parse_file(fname):
56
+ """Parse a python file into an AST.
57
+
58
+ This is a very thin wrapper around ast.parse
59
+
60
+ TODO: Handle encodings other than the default for Python 2
61
+ (issue #26)
62
+ """
63
+ try:
64
+ with fopen(fname) as f:
65
+ fstr = f.read()
66
+ except IOError:
67
+ if fname != 'stdin':
68
+ raise
69
+ sys.stdout.write('\nReading from stdin:\n\n')
70
+ fstr = sys.stdin.read()
71
+ fstr = fstr.replace('\r\n', '\n').replace('\r', '\n')
72
+ if not fstr.endswith('\n'):
73
+ fstr += '\n'
74
+ return ast.parse(fstr, filename=fname)
75
+
76
+ @staticmethod
77
+ def get_file_info(codeobj):
78
+ """Returns the file and line number of a code object.
79
+
80
+ If the code object has a __file__ attribute (e.g. if
81
+ it is a module), then the returned line number will
82
+ be 0
83
+ """
84
+ fname = getattr(codeobj, '__file__', None)
85
+ linenum = 0
86
+ if fname is None:
87
+ func_code = codeobj.__code__
88
+ fname = func_code.co_filename
89
+ linenum = func_code.co_firstlineno
90
+ fname = fname.replace('.pyc', '.py')
91
+ return fname, linenum
92
+
93
+ def __init__(self, cache=None):
94
+ self.cache = cache or {}
95
+
96
+ def __call__(self, codeobj):
97
+ cache = self.cache
98
+ key = self.get_file_info(codeobj)
99
+ result = cache.get(key)
100
+ if result is not None:
101
+ return result
102
+ fname = key[0]
103
+ cache[(fname, 0)] = mod_ast = self.parse_file(fname)
104
+ for obj in mod_ast.body:
105
+ if not isinstance(obj, ast.FunctionDef):
106
+ continue
107
+ cache[(fname, obj.lineno)] = obj
108
+ return cache[key]
109
+
110
+
111
+ code_to_ast = CodeToAst()
.venv/lib/python3.11/site-packages/astor/node_util.py ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright 2012-2015 (c) Patrick Maupin
8
+ Copyright 2013-2015 (c) Berker Peksag
9
+
10
+ Utilities for node (and, by extension, tree) manipulation.
11
+ For a whole-tree approach, see the treewalk submodule.
12
+
13
+ """
14
+
15
+ import ast
16
+ import itertools
17
+
18
+ try:
19
+ zip_longest = itertools.zip_longest
20
+ except AttributeError:
21
+ zip_longest = itertools.izip_longest
22
+
23
+
24
+ class NonExistent(object):
25
+ """This is not the class you are looking for.
26
+ """
27
+ pass
28
+
29
+
30
+ def iter_node(node, name='', unknown=None,
31
+ # Runtime optimization
32
+ list=list, getattr=getattr, isinstance=isinstance,
33
+ enumerate=enumerate, missing=NonExistent):
34
+ """Iterates over an object:
35
+
36
+ - If the object has a _fields attribute,
37
+ it gets attributes in the order of this
38
+ and returns name, value pairs.
39
+
40
+ - Otherwise, if the object is a list instance,
41
+ it returns name, value pairs for each item
42
+ in the list, where the name is passed into
43
+ this function (defaults to blank).
44
+
45
+ - Can update an unknown set with information about
46
+ attributes that do not exist in fields.
47
+ """
48
+ fields = getattr(node, '_fields', None)
49
+ if fields is not None:
50
+ for name in fields:
51
+ value = getattr(node, name, missing)
52
+ if value is not missing:
53
+ yield value, name
54
+ if unknown is not None:
55
+ unknown.update(set(vars(node)) - set(fields))
56
+ elif isinstance(node, list):
57
+ for value in node:
58
+ yield value, name
59
+
60
+
61
+ def dump_tree(node, name=None, initial_indent='', indentation=' ',
62
+ maxline=120, maxmerged=80,
63
+ # Runtime optimization
64
+ iter_node=iter_node, special=ast.AST,
65
+ list=list, isinstance=isinstance, type=type, len=len):
66
+ """Dumps an AST or similar structure:
67
+
68
+ - Pretty-prints with indentation
69
+ - Doesn't print line/column/ctx info
70
+
71
+ """
72
+ def dump(node, name=None, indent=''):
73
+ level = indent + indentation
74
+ name = name and name + '=' or ''
75
+ values = list(iter_node(node))
76
+ if isinstance(node, list):
77
+ prefix, suffix = '%s[' % name, ']'
78
+ elif values:
79
+ prefix, suffix = '%s%s(' % (name, type(node).__name__), ')'
80
+ elif isinstance(node, special):
81
+ prefix, suffix = name + type(node).__name__, ''
82
+ else:
83
+ return '%s%s' % (name, repr(node))
84
+ node = [dump(a, b, level) for a, b in values if b != 'ctx']
85
+ oneline = '%s%s%s' % (prefix, ', '.join(node), suffix)
86
+ if len(oneline) + len(indent) < maxline:
87
+ return '%s' % oneline
88
+ if node and len(prefix) + len(node[0]) < maxmerged:
89
+ prefix = '%s%s,' % (prefix, node.pop(0))
90
+ node = (',\n%s' % level).join(node).lstrip()
91
+ return '%s\n%s%s%s' % (prefix, level, node, suffix)
92
+ return dump(node, name, initial_indent)
93
+
94
+
95
+ def strip_tree(node,
96
+ # Runtime optimization
97
+ iter_node=iter_node, special=ast.AST,
98
+ list=list, isinstance=isinstance, type=type, len=len):
99
+ """Strips an AST by removing all attributes not in _fields.
100
+
101
+ Returns a set of the names of all attributes stripped.
102
+
103
+ This canonicalizes two trees for comparison purposes.
104
+ """
105
+ stripped = set()
106
+
107
+ def strip(node, indent):
108
+ unknown = set()
109
+ leaf = True
110
+ for subnode, _ in iter_node(node, unknown=unknown):
111
+ leaf = False
112
+ strip(subnode, indent + ' ')
113
+ if leaf:
114
+ if isinstance(node, special):
115
+ unknown = set(vars(node))
116
+ stripped.update(unknown)
117
+ for name in unknown:
118
+ delattr(node, name)
119
+ if hasattr(node, 'ctx'):
120
+ delattr(node, 'ctx')
121
+ if 'ctx' in node._fields:
122
+ mylist = list(node._fields)
123
+ mylist.remove('ctx')
124
+ node._fields = mylist
125
+ strip(node, '')
126
+ return stripped
127
+
128
+
129
+ class ExplicitNodeVisitor(ast.NodeVisitor):
130
+ """This expands on the ast module's NodeVisitor class
131
+ to remove any implicit visits.
132
+
133
+ """
134
+
135
+ def abort_visit(node): # XXX: self?
136
+ msg = 'No defined handler for node of type %s'
137
+ raise AttributeError(msg % node.__class__.__name__)
138
+
139
+ def visit(self, node, abort=abort_visit):
140
+ """Visit a node."""
141
+ method = 'visit_' + node.__class__.__name__
142
+ visitor = getattr(self, method, abort)
143
+ return visitor(node)
144
+
145
+
146
+ def allow_ast_comparison():
147
+ """This ugly little monkey-patcher adds in a helper class
148
+ to all the AST node types. This helper class allows
149
+ eq/ne comparisons to work, so that entire trees can
150
+ be easily compared by Python's comparison machinery.
151
+ Used by the anti8 functions to compare old and new ASTs.
152
+ Could also be used by the test library.
153
+
154
+
155
+ """
156
+
157
+ class CompareHelper(object):
158
+ def __eq__(self, other):
159
+ return type(self) == type(other) and vars(self) == vars(other)
160
+
161
+ def __ne__(self, other):
162
+ return type(self) != type(other) or vars(self) != vars(other)
163
+
164
+ for item in vars(ast).values():
165
+ if type(item) != type:
166
+ continue
167
+ if issubclass(item, ast.AST):
168
+ try:
169
+ item.__bases__ = tuple(list(item.__bases__) + [CompareHelper])
170
+ except TypeError:
171
+ pass
172
+
173
+
174
+ def fast_compare(tree1, tree2):
175
+ """ This is optimized to compare two AST trees for equality.
176
+ It makes several assumptions that are currently true for
177
+ AST trees used by rtrip, and it doesn't examine the _attributes.
178
+ """
179
+
180
+ geta = ast.AST.__getattribute__
181
+
182
+ work = [(tree1, tree2)]
183
+ pop = work.pop
184
+ extend = work.extend
185
+ # TypeError in cPython, AttributeError in PyPy
186
+ exception = TypeError, AttributeError
187
+ zipl = zip_longest
188
+ type_ = type
189
+ list_ = list
190
+ while work:
191
+ n1, n2 = pop()
192
+ try:
193
+ f1 = geta(n1, '_fields')
194
+ f2 = geta(n2, '_fields')
195
+ except exception:
196
+ if type_(n1) is list_:
197
+ extend(zipl(n1, n2))
198
+ continue
199
+ if n1 == n2:
200
+ continue
201
+ return False
202
+ else:
203
+ f1 = [x for x in f1 if x != 'ctx']
204
+ if f1 != [x for x in f2 if x != 'ctx']:
205
+ return False
206
+ extend((geta(n1, fname), geta(n2, fname)) for fname in f1)
207
+
208
+ return True
.venv/lib/python3.11/site-packages/astor/op_util.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright (c) 2015 Patrick Maupin
8
+
9
+ This module provides data and functions for mapping
10
+ AST nodes to symbols and precedences.
11
+
12
+ """
13
+
14
+ import ast
15
+
16
+ op_data = """
17
+ GeneratorExp 1
18
+
19
+ Assign 1
20
+ AnnAssign 1
21
+ AugAssign 0
22
+ Expr 0
23
+ Yield 1
24
+ YieldFrom 0
25
+ If 1
26
+ For 0
27
+ AsyncFor 0
28
+ While 0
29
+ Return 1
30
+
31
+ Slice 1
32
+ Subscript 0
33
+ Index 1
34
+ ExtSlice 1
35
+ comprehension_target 1
36
+ Tuple 0
37
+ FormattedValue 0
38
+
39
+ Comma 1
40
+ NamedExpr 1
41
+ Assert 0
42
+ Raise 0
43
+ call_one_arg 1
44
+
45
+ Lambda 1
46
+ IfExp 0
47
+
48
+ comprehension 1
49
+ Or or 1
50
+ And and 1
51
+ Not not 1
52
+
53
+ Eq == 1
54
+ Gt > 0
55
+ GtE >= 0
56
+ In in 0
57
+ Is is 0
58
+ NotEq != 0
59
+ Lt < 0
60
+ LtE <= 0
61
+ NotIn not in 0
62
+ IsNot is not 0
63
+
64
+ BitOr | 1
65
+ BitXor ^ 1
66
+ BitAnd & 1
67
+ LShift << 1
68
+ RShift >> 0
69
+ Add + 1
70
+ Sub - 0
71
+ Mult * 1
72
+ Div / 0
73
+ Mod % 0
74
+ FloorDiv // 0
75
+ MatMult @ 0
76
+ PowRHS 1
77
+ Invert ~ 1
78
+ UAdd + 0
79
+ USub - 0
80
+ Pow ** 1
81
+ Await 1
82
+ Num 1
83
+ Constant 1
84
+ """
85
+
86
+ op_data = [x.split() for x in op_data.splitlines()]
87
+ op_data = [[x[0], ' '.join(x[1:-1]), int(x[-1])] for x in op_data if x]
88
+ for index in range(1, len(op_data)):
89
+ op_data[index][2] *= 2
90
+ op_data[index][2] += op_data[index - 1][2]
91
+
92
+ precedence_data = dict((getattr(ast, x, None), z) for x, y, z in op_data)
93
+ symbol_data = dict((getattr(ast, x, None), y) for x, y, z in op_data)
94
+
95
+
96
+ def get_op_symbol(obj, fmt='%s', symbol_data=symbol_data, type=type):
97
+ """Given an AST node object, returns a string containing the symbol.
98
+ """
99
+ return fmt % symbol_data[type(obj)]
100
+
101
+
102
+ def get_op_precedence(obj, precedence_data=precedence_data, type=type):
103
+ """Given an AST node object, returns the precedence.
104
+ """
105
+ return precedence_data[type(obj)]
106
+
107
+
108
+ class Precedence(object):
109
+ vars().update((x, z) for x, y, z in op_data)
110
+ highest = max(z for x, y, z in op_data) + 2
.venv/lib/python3.11/site-packages/astor/rtrip.py ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Part of the astor library for Python AST manipulation.
5
+
6
+ License: 3-clause BSD
7
+
8
+ Copyright (c) 2015 Patrick Maupin
9
+ """
10
+
11
+ import sys
12
+ import os
13
+ import ast
14
+ import shutil
15
+ import logging
16
+
17
+ from astor.code_gen import to_source
18
+ from astor.file_util import code_to_ast
19
+ from astor.node_util import (allow_ast_comparison, dump_tree,
20
+ strip_tree, fast_compare)
21
+
22
+
23
+ dsttree = 'tmp_rtrip'
24
+
25
+ # TODO: Remove this workaround once we remove version 2 support
26
+
27
+
28
+ def out_prep(s, pre_encoded=(sys.version_info[0] == 2)):
29
+ return s if pre_encoded else s.encode('utf-8')
30
+
31
+
32
+ def convert(srctree, dsttree=dsttree, readonly=False, dumpall=False,
33
+ ignore_exceptions=False, fullcomp=False):
34
+ """Walk the srctree, and convert/copy all python files
35
+ into the dsttree
36
+
37
+ """
38
+
39
+ if fullcomp:
40
+ allow_ast_comparison()
41
+
42
+ parse_file = code_to_ast.parse_file
43
+ find_py_files = code_to_ast.find_py_files
44
+ srctree = os.path.normpath(srctree)
45
+
46
+ if not readonly:
47
+ dsttree = os.path.normpath(dsttree)
48
+ logging.info('')
49
+ logging.info('Trashing ' + dsttree)
50
+ shutil.rmtree(dsttree, True)
51
+
52
+ unknown_src_nodes = set()
53
+ unknown_dst_nodes = set()
54
+ badfiles = set()
55
+ broken = []
56
+
57
+ oldpath = None
58
+
59
+ allfiles = find_py_files(srctree, None if readonly else dsttree)
60
+ for srcpath, fname in allfiles:
61
+ # Create destination directory
62
+ if not readonly and srcpath != oldpath:
63
+ oldpath = srcpath
64
+ if srcpath >= srctree:
65
+ dstpath = srcpath.replace(srctree, dsttree, 1)
66
+ if not dstpath.startswith(dsttree):
67
+ raise ValueError("%s not a subdirectory of %s" %
68
+ (dstpath, dsttree))
69
+ else:
70
+ assert srctree.startswith(srcpath)
71
+ dstpath = dsttree
72
+ os.makedirs(dstpath)
73
+
74
+ srcfname = os.path.join(srcpath, fname)
75
+ logging.info('Converting %s' % srcfname)
76
+ try:
77
+ srcast = parse_file(srcfname)
78
+ except SyntaxError:
79
+ badfiles.add(srcfname)
80
+ continue
81
+
82
+ try:
83
+ dsttxt = to_source(srcast)
84
+ except Exception:
85
+ if not ignore_exceptions:
86
+ raise
87
+ dsttxt = ''
88
+
89
+ if not readonly:
90
+ dstfname = os.path.join(dstpath, fname)
91
+ try:
92
+ with open(dstfname, 'wb') as f:
93
+ f.write(out_prep(dsttxt))
94
+ except UnicodeEncodeError:
95
+ badfiles.add(dstfname)
96
+
97
+ # As a sanity check, make sure that ASTs themselves
98
+ # round-trip OK
99
+ try:
100
+ dstast = ast.parse(dsttxt) if readonly else parse_file(dstfname)
101
+ except SyntaxError:
102
+ dstast = []
103
+ if fullcomp:
104
+ unknown_src_nodes.update(strip_tree(srcast))
105
+ unknown_dst_nodes.update(strip_tree(dstast))
106
+ bad = srcast != dstast
107
+ else:
108
+ bad = not fast_compare(srcast, dstast)
109
+ if dumpall or bad:
110
+ srcdump = dump_tree(srcast)
111
+ dstdump = dump_tree(dstast)
112
+ logging.warning(' calculating dump -- %s' %
113
+ ('bad' if bad else 'OK'))
114
+ if bad:
115
+ broken.append(srcfname)
116
+ if dumpall or bad:
117
+ if not readonly:
118
+ try:
119
+ with open(dstfname[:-3] + '.srcdmp', 'wb') as f:
120
+ f.write(out_prep(srcdump))
121
+ except UnicodeEncodeError:
122
+ badfiles.add(dstfname[:-3] + '.srcdmp')
123
+ try:
124
+ with open(dstfname[:-3] + '.dstdmp', 'wb') as f:
125
+ f.write(out_prep(dstdump))
126
+ except UnicodeEncodeError:
127
+ badfiles.add(dstfname[:-3] + '.dstdmp')
128
+ elif dumpall:
129
+ sys.stdout.write('\n\nAST:\n\n ')
130
+ sys.stdout.write(srcdump.replace('\n', '\n '))
131
+ sys.stdout.write('\n\nDecompile:\n\n ')
132
+ sys.stdout.write(dsttxt.replace('\n', '\n '))
133
+ sys.stdout.write('\n\nNew AST:\n\n ')
134
+ sys.stdout.write('(same as old)' if dstdump == srcdump
135
+ else dstdump.replace('\n', '\n '))
136
+ sys.stdout.write('\n')
137
+
138
+ if badfiles:
139
+ logging.warning('\nFiles not processed due to syntax errors:')
140
+ for fname in sorted(badfiles):
141
+ logging.warning(' %s' % fname)
142
+ if broken:
143
+ logging.warning('\nFiles failed to round-trip to AST:')
144
+ for srcfname in broken:
145
+ logging.warning(' %s' % srcfname)
146
+
147
+ ok_to_strip = 'col_offset _precedence _use_parens lineno _p_op _pp'
148
+ ok_to_strip = set(ok_to_strip.split())
149
+ bad_nodes = (unknown_dst_nodes | unknown_src_nodes) - ok_to_strip
150
+ if bad_nodes:
151
+ logging.error('\nERROR -- UNKNOWN NODES STRIPPED: %s' % bad_nodes)
152
+ logging.info('\n')
153
+ return broken
154
+
155
+
156
+ def usage(msg):
157
+ raise SystemExit(textwrap.dedent("""
158
+
159
+ Error: %s
160
+
161
+ Usage:
162
+
163
+ python -m astor.rtrip [readonly] [<source>]
164
+
165
+
166
+ This utility tests round-tripping of Python source to AST
167
+ and back to source.
168
+
169
+ If readonly is specified, then the source will be tested,
170
+ but no files will be written.
171
+
172
+ if the source is specified to be "stdin" (without quotes)
173
+ then any source entered at the command line will be compiled
174
+ into an AST, converted back to text, and then compiled to
175
+ an AST again, and the results will be displayed to stdout.
176
+
177
+ If neither readonly nor stdin is specified, then rtrip
178
+ will create a mirror directory named tmp_rtrip and will
179
+ recursively round-trip all the Python source from the source
180
+ into the tmp_rtrip dir, after compiling it and then reconstituting
181
+ it through code_gen.to_source.
182
+
183
+ If the source is not specified, the entire Python library will be used.
184
+
185
+ """) % msg)
186
+
187
+
188
+ if __name__ == '__main__':
189
+ import textwrap
190
+
191
+ args = sys.argv[1:]
192
+
193
+ readonly = 'readonly' in args
194
+ if readonly:
195
+ args.remove('readonly')
196
+
197
+ if not args:
198
+ args = [os.path.dirname(textwrap.__file__)]
199
+
200
+ if len(args) > 1:
201
+ usage("Too many arguments")
202
+
203
+ fname, = args
204
+ dumpall = False
205
+ if not os.path.exists(fname):
206
+ dumpall = fname == 'stdin' or usage("Cannot find directory %s" % fname)
207
+
208
+ logging.basicConfig(format='%(msg)s', level=logging.INFO)
209
+ convert(fname, readonly=readonly or dumpall, dumpall=dumpall)
.venv/lib/python3.11/site-packages/astor/source_repr.py ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright (c) 2015 Patrick Maupin
8
+
9
+ Pretty-print source -- post-process for the decompiler
10
+
11
+ The goals of the initial cut of this engine are:
12
+
13
+ 1) Do a passable, if not PEP8, job of line-wrapping.
14
+
15
+ 2) Serve as an example of an interface to the decompiler
16
+ for anybody who wants to do a better job. :)
17
+ """
18
+
19
+
20
+ def pretty_source(source):
21
+ """ Prettify the source.
22
+ """
23
+
24
+ return ''.join(split_lines(source))
25
+
26
+
27
+ def split_lines(source, maxline=79):
28
+ """Split inputs according to lines.
29
+ If a line is short enough, just yield it.
30
+ Otherwise, fix it.
31
+ """
32
+ result = []
33
+ extend = result.extend
34
+ append = result.append
35
+ line = []
36
+ multiline = False
37
+ count = 0
38
+ for item in source:
39
+ newline = type(item)('\n')
40
+ index = item.find(newline)
41
+ if index:
42
+ line.append(item)
43
+ multiline = index > 0
44
+ count += len(item)
45
+ else:
46
+ if line:
47
+ if count <= maxline or multiline:
48
+ extend(line)
49
+ else:
50
+ wrap_line(line, maxline, result)
51
+ count = 0
52
+ multiline = False
53
+ line = []
54
+ append(item)
55
+ return result
56
+
57
+
58
+ def count(group, slen=str.__len__):
59
+ return sum([slen(x) for x in group])
60
+
61
+
62
+ def wrap_line(line, maxline=79, result=[], count=count):
63
+ """ We have a line that is too long,
64
+ so we're going to try to wrap it.
65
+ """
66
+
67
+ # Extract the indentation
68
+
69
+ append = result.append
70
+ extend = result.extend
71
+
72
+ indentation = line[0]
73
+ lenfirst = len(indentation)
74
+ indent = lenfirst - len(indentation.lstrip())
75
+ assert indent in (0, lenfirst)
76
+ indentation = line.pop(0) if indent else ''
77
+
78
+ # Get splittable/non-splittable groups
79
+
80
+ dgroups = list(delimiter_groups(line))
81
+ unsplittable = dgroups[::2]
82
+ splittable = dgroups[1::2]
83
+
84
+ # If the largest non-splittable group won't fit
85
+ # on a line, try to add parentheses to the line.
86
+
87
+ if max(count(x) for x in unsplittable) > maxline - indent:
88
+ line = add_parens(line, maxline, indent)
89
+ dgroups = list(delimiter_groups(line))
90
+ unsplittable = dgroups[::2]
91
+ splittable = dgroups[1::2]
92
+
93
+ # Deal with the first (always unsplittable) group, and
94
+ # then set up to deal with the remainder in pairs.
95
+
96
+ first = unsplittable[0]
97
+ append(indentation)
98
+ extend(first)
99
+ if not splittable:
100
+ return result
101
+ pos = indent + count(first)
102
+ indentation += ' '
103
+ indent += 4
104
+ if indent >= maxline / 2:
105
+ maxline = maxline / 2 + indent
106
+
107
+ for sg, nsg in zip(splittable, unsplittable[1:]):
108
+
109
+ if sg:
110
+ # If we already have stuff on the line and even
111
+ # the very first item won't fit, start a new line
112
+ if pos > indent and pos + len(sg[0]) > maxline:
113
+ append('\n')
114
+ append(indentation)
115
+ pos = indent
116
+
117
+ # Dump lines out of the splittable group
118
+ # until the entire thing fits
119
+ csg = count(sg)
120
+ while pos + csg > maxline:
121
+ ready, sg = split_group(sg, pos, maxline)
122
+ if ready[-1].endswith(' '):
123
+ ready[-1] = ready[-1][:-1]
124
+ extend(ready)
125
+ append('\n')
126
+ append(indentation)
127
+ pos = indent
128
+ csg = count(sg)
129
+
130
+ # Dump the remainder of the splittable group
131
+ if sg:
132
+ extend(sg)
133
+ pos += csg
134
+
135
+ # Dump the unsplittable group, optionally
136
+ # preceded by a linefeed.
137
+ cnsg = count(nsg)
138
+ if pos > indent and pos + cnsg > maxline:
139
+ append('\n')
140
+ append(indentation)
141
+ pos = indent
142
+ extend(nsg)
143
+ pos += cnsg
144
+
145
+
146
+ def split_group(source, pos, maxline):
147
+ """ Split a group into two subgroups. The
148
+ first will be appended to the current
149
+ line, the second will start the new line.
150
+
151
+ Note that the first group must always
152
+ contain at least one item.
153
+
154
+ The original group may be destroyed.
155
+ """
156
+ first = []
157
+ source.reverse()
158
+ while source:
159
+ tok = source.pop()
160
+ first.append(tok)
161
+ pos += len(tok)
162
+ if source:
163
+ tok = source[-1]
164
+ allowed = (maxline + 1) if tok.endswith(' ') else (maxline - 4)
165
+ if pos + len(tok) > allowed:
166
+ break
167
+
168
+ source.reverse()
169
+ return first, source
170
+
171
+
172
+ begin_delim = set('([{')
173
+ end_delim = set(')]}')
174
+ end_delim.add('):')
175
+
176
+
177
+ def delimiter_groups(line, begin_delim=begin_delim,
178
+ end_delim=end_delim):
179
+ """Split a line into alternating groups.
180
+ The first group cannot have a line feed inserted,
181
+ the next one can, etc.
182
+ """
183
+ text = []
184
+ line = iter(line)
185
+ while True:
186
+ # First build and yield an unsplittable group
187
+ for item in line:
188
+ text.append(item)
189
+ if item in begin_delim:
190
+ break
191
+ if not text:
192
+ break
193
+ yield text
194
+
195
+ # Now build and yield a splittable group
196
+ level = 0
197
+ text = []
198
+ for item in line:
199
+ if item in begin_delim:
200
+ level += 1
201
+ elif item in end_delim:
202
+ level -= 1
203
+ if level < 0:
204
+ yield text
205
+ text = [item]
206
+ break
207
+ text.append(item)
208
+ else:
209
+ assert not text, text
210
+ break
211
+
212
+
213
+ statements = set(['del ', 'return', 'yield ', 'if ', 'while '])
214
+
215
+
216
+ def add_parens(line, maxline, indent, statements=statements, count=count):
217
+ """Attempt to add parentheses around the line
218
+ in order to make it splittable.
219
+ """
220
+
221
+ if line[0] in statements:
222
+ index = 1
223
+ if not line[0].endswith(' '):
224
+ index = 2
225
+ assert line[1] == ' '
226
+ line.insert(index, '(')
227
+ if line[-1] == ':':
228
+ line.insert(-1, ')')
229
+ else:
230
+ line.append(')')
231
+
232
+ # That was the easy stuff. Now for assignments.
233
+ groups = list(get_assign_groups(line))
234
+ if len(groups) == 1:
235
+ # So sad, too bad
236
+ return line
237
+
238
+ counts = list(count(x) for x in groups)
239
+ didwrap = False
240
+
241
+ # If the LHS is large, wrap it first
242
+ if sum(counts[:-1]) >= maxline - indent - 4:
243
+ for group in groups[:-1]:
244
+ didwrap = False # Only want to know about last group
245
+ if len(group) > 1:
246
+ group.insert(0, '(')
247
+ group.insert(-1, ')')
248
+ didwrap = True
249
+
250
+ # Might not need to wrap the RHS if wrapped the LHS
251
+ if not didwrap or counts[-1] > maxline - indent - 10:
252
+ groups[-1].insert(0, '(')
253
+ groups[-1].append(')')
254
+
255
+ return [item for group in groups for item in group]
256
+
257
+
258
+ # Assignment operators
259
+ ops = list('|^&+-*/%@~') + '<< >> // **'.split() + ['']
260
+ ops = set(' %s= ' % x for x in ops)
261
+
262
+
263
+ def get_assign_groups(line, ops=ops):
264
+ """ Split a line into groups by assignment (including
265
+ augmented assignment)
266
+ """
267
+ group = []
268
+ for item in line:
269
+ group.append(item)
270
+ if item in ops:
271
+ yield group
272
+ group = []
273
+ yield group
.venv/lib/python3.11/site-packages/astor/string_repr.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright (c) 2015 Patrick Maupin
8
+
9
+ Pretty-print strings for the decompiler
10
+
11
+ We either return the repr() of the string,
12
+ or try to format it as a triple-quoted string.
13
+
14
+ This is a lot harder than you would think.
15
+
16
+ This has lots of Python 2 / Python 3 ugliness.
17
+
18
+ """
19
+
20
+ import re
21
+
22
+ try:
23
+ special_unicode = unicode
24
+ except NameError:
25
+ class special_unicode(object):
26
+ pass
27
+
28
+ try:
29
+ basestring = basestring
30
+ except NameError:
31
+ basestring = str
32
+
33
+
34
+ def _properly_indented(s, line_indent):
35
+ mylist = s.split('\n')[1:]
36
+ mylist = [x.rstrip() for x in mylist]
37
+ mylist = [x for x in mylist if x]
38
+ if not s:
39
+ return False
40
+ counts = [(len(x) - len(x.lstrip())) for x in mylist]
41
+ return counts and min(counts) >= line_indent
42
+
43
+
44
+ mysplit = re.compile(r'(\\|\"\"\"|\"$)').split
45
+ replacements = {'\\': '\\\\', '"""': '""\\"', '"': '\\"'}
46
+
47
+
48
+ def _prep_triple_quotes(s, mysplit=mysplit, replacements=replacements):
49
+ """ Split the string up and force-feed some replacements
50
+ to make sure it will round-trip OK
51
+ """
52
+
53
+ s = mysplit(s)
54
+ s[1::2] = (replacements[x] for x in s[1::2])
55
+ return ''.join(s)
56
+
57
+
58
+ def string_triplequote_repr(s):
59
+ """Return string's python representation in triple quotes.
60
+ """
61
+ return '"""%s"""' % _prep_triple_quotes(s)
62
+
63
+
64
+ def pretty_string(s, embedded, current_line, uni_lit=False,
65
+ min_trip_str=20, max_line=100):
66
+ """There are a lot of reasons why we might not want to or
67
+ be able to return a triple-quoted string. We can always
68
+ punt back to the default normal string.
69
+ """
70
+
71
+ default = repr(s)
72
+
73
+ # Punt on abnormal strings
74
+ if (isinstance(s, special_unicode) or not isinstance(s, basestring)):
75
+ return default
76
+ if uni_lit and isinstance(s, bytes):
77
+ return 'b' + default
78
+
79
+ len_s = len(default)
80
+
81
+ if current_line.strip():
82
+ len_current = len(current_line)
83
+ second_line_start = s.find('\n') + 1
84
+ if embedded > 1 and not second_line_start:
85
+ return default
86
+
87
+ if len_s < min_trip_str:
88
+ return default
89
+
90
+ line_indent = len_current - len(current_line.lstrip())
91
+
92
+ # Could be on a line by itself...
93
+ if embedded and not second_line_start:
94
+ return default
95
+
96
+ total_len = len_current + len_s
97
+ if total_len < max_line and not _properly_indented(s, line_indent):
98
+ return default
99
+
100
+ fancy = string_triplequote_repr(s)
101
+
102
+ # Sometimes this doesn't work. One reason is that
103
+ # the AST has no understanding of whether \r\n was
104
+ # entered that way in the string or was a cr/lf in the
105
+ # file. So we punt just so we can round-trip properly.
106
+
107
+ try:
108
+ if eval(fancy) == s and '\r' not in fancy:
109
+ return fancy
110
+ except Exception:
111
+ pass
112
+ return default
.venv/lib/python3.11/site-packages/astor/tree_walk.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Part of the astor library for Python AST manipulation.
4
+
5
+ License: 3-clause BSD
6
+
7
+ Copyright 2012 (c) Patrick Maupin
8
+ Copyright 2013 (c) Berker Peksag
9
+
10
+ This file contains a TreeWalk class that views a node tree
11
+ as a unified whole and allows several modes of traversal.
12
+
13
+ """
14
+
15
+ from .node_util import iter_node
16
+
17
+
18
+ class MetaFlatten(type):
19
+ """This metaclass is used to flatten classes to remove
20
+ class hierarchy.
21
+
22
+ This makes it easier to manipulate classes (find
23
+ attributes in a single dict, etc.)
24
+
25
+ """
26
+ def __new__(clstype, name, bases, clsdict):
27
+ newbases = (object,)
28
+ newdict = {}
29
+ for base in reversed(bases):
30
+ if base not in newbases:
31
+ newdict.update(vars(base))
32
+ newdict.update(clsdict)
33
+ # These are class-bound, we should let Python recreate them.
34
+ newdict.pop('__dict__', None)
35
+ newdict.pop('__weakref__', None)
36
+ # Delegate the real work to type
37
+ return type.__new__(clstype, name, newbases, newdict)
38
+
39
+
40
+ MetaFlatten = MetaFlatten('MetaFlatten', (object,), {})
41
+
42
+
43
+ class TreeWalk(MetaFlatten):
44
+ """The TreeWalk class can be used as a superclass in order
45
+ to walk an AST or similar tree.
46
+
47
+ Unlike other treewalkers, this class can walk a tree either
48
+ recursively or non-recursively. Subclasses can define
49
+ methods with the following signatures::
50
+
51
+ def pre_xxx(self):
52
+ pass
53
+
54
+ def post_xxx(self):
55
+ pass
56
+
57
+ def init_xxx(self):
58
+ pass
59
+
60
+ Where 'xxx' is one of:
61
+
62
+ - A class name
63
+ - An attribute member name concatenated with '_name'
64
+ For example, 'pre_targets_name' will process nodes
65
+ that are referenced by the name 'targets' in their
66
+ parent's node.
67
+ - An attribute member name concatenated with '_item'
68
+ For example, 'pre_targets_item' will process nodes
69
+ that are in a list that is the targets attribute
70
+ of some node.
71
+
72
+ pre_xxx will process a node before processing any of its subnodes.
73
+ if the return value from pre_xxx evalates to true, then walk
74
+ will not process any of the subnodes. Those can be manually
75
+ processed, if desired, by calling self.walk(node) on the subnodes
76
+ before returning True.
77
+
78
+ post_xxx will process a node after processing all its subnodes.
79
+
80
+ init_xxx methods can decorate the class instance with subclass-specific
81
+ information. A single init_whatever method could be written, but to
82
+ make it easy to keep initialization with use, any number of init_xxx
83
+ methods can be written. They will be called in alphabetical order.
84
+
85
+ """
86
+
87
+ def __init__(self, node=None):
88
+ self.nodestack = []
89
+ self.setup()
90
+ if node is not None:
91
+ self.walk(node)
92
+
93
+ def setup(self):
94
+ """All the node-specific handlers are setup at
95
+ object initialization time.
96
+
97
+ """
98
+ self.pre_handlers = pre_handlers = {}
99
+ self.post_handlers = post_handlers = {}
100
+ for name in sorted(vars(type(self))):
101
+ if name.startswith('init_'):
102
+ getattr(self, name)()
103
+ elif name.startswith('pre_'):
104
+ pre_handlers[name[4:]] = getattr(self, name)
105
+ elif name.startswith('post_'):
106
+ post_handlers[name[5:]] = getattr(self, name)
107
+
108
+ def walk(self, node, name='', list=list, len=len, type=type):
109
+ """Walk the tree starting at a given node.
110
+
111
+ Maintain a stack of nodes.
112
+
113
+ """
114
+ pre_handlers = self.pre_handlers.get
115
+ post_handlers = self.post_handlers.get
116
+ nodestack = self.nodestack
117
+ emptystack = len(nodestack)
118
+ append, pop = nodestack.append, nodestack.pop
119
+ append([node, name, list(iter_node(node, name + '_item')), -1])
120
+ while len(nodestack) > emptystack:
121
+ node, name, subnodes, index = nodestack[-1]
122
+ if index >= len(subnodes):
123
+ handler = (post_handlers(type(node).__name__) or
124
+ post_handlers(name + '_name'))
125
+ if handler is None:
126
+ pop()
127
+ continue
128
+ self.cur_node = node
129
+ self.cur_name = name
130
+ handler()
131
+ current = nodestack and nodestack[-1]
132
+ popstack = current and current[0] is node
133
+ if popstack and current[-1] >= len(current[-2]):
134
+ pop()
135
+ continue
136
+ nodestack[-1][-1] = index + 1
137
+ if index < 0:
138
+ handler = (pre_handlers(type(node).__name__) or
139
+ pre_handlers(name + '_name'))
140
+ if handler is not None:
141
+ self.cur_node = node
142
+ self.cur_name = name
143
+ if handler():
144
+ pop()
145
+ else:
146
+ node, name = subnodes[index]
147
+ append([node, name, list(iter_node(node, name + '_item')), -1])
148
+
149
+ @property
150
+ def parent(self):
151
+ """Return the parent node of the current node."""
152
+ nodestack = self.nodestack
153
+ if len(nodestack) < 2:
154
+ return None
155
+ return nodestack[-2][0]
156
+
157
+ @property
158
+ def parent_name(self):
159
+ """Return the parent node and name."""
160
+ nodestack = self.nodestack
161
+ if len(nodestack) < 2:
162
+ return None
163
+ return nodestack[-2][:2]
164
+
165
+ def replace(self, new_node):
166
+ """Replace a node after first checking integrity of node stack."""
167
+ cur_node = self.cur_node
168
+ nodestack = self.nodestack
169
+ cur = nodestack.pop()
170
+ prev = nodestack[-1]
171
+ index = prev[-1] - 1
172
+ oldnode, name = prev[-2][index]
173
+ assert cur[0] is cur_node is oldnode, (cur[0], cur_node, prev[-2],
174
+ index)
175
+ parent = prev[0]
176
+ if isinstance(parent, list):
177
+ parent[index] = new_node
178
+ else:
179
+ setattr(parent, name, new_node)
.venv/lib/python3.11/site-packages/fastapi/__init__.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """FastAPI framework, high performance, easy to learn, fast to code, ready for production"""
2
+
3
+ __version__ = "0.115.8"
4
+
5
+ from starlette import status as status
6
+
7
+ from .applications import FastAPI as FastAPI
8
+ from .background import BackgroundTasks as BackgroundTasks
9
+ from .datastructures import UploadFile as UploadFile
10
+ from .exceptions import HTTPException as HTTPException
11
+ from .exceptions import WebSocketException as WebSocketException
12
+ from .param_functions import Body as Body
13
+ from .param_functions import Cookie as Cookie
14
+ from .param_functions import Depends as Depends
15
+ from .param_functions import File as File
16
+ from .param_functions import Form as Form
17
+ from .param_functions import Header as Header
18
+ from .param_functions import Path as Path
19
+ from .param_functions import Query as Query
20
+ from .param_functions import Security as Security
21
+ from .requests import Request as Request
22
+ from .responses import Response as Response
23
+ from .routing import APIRouter as APIRouter
24
+ from .websockets import WebSocket as WebSocket
25
+ from .websockets import WebSocketDisconnect as WebSocketDisconnect
.venv/lib/python3.11/site-packages/fastapi/__main__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from fastapi.cli import main
2
+
3
+ main()
.venv/lib/python3.11/site-packages/fastapi/__pycache__/__main__.cpython-311.pyc ADDED
Binary file (263 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/applications.cpython-311.pyc ADDED
Binary file (92.5 kB). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/background.cpython-311.pyc ADDED
Binary file (2.56 kB). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/datastructures.cpython-311.pyc ADDED
Binary file (9.39 kB). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/logger.cpython-311.pyc ADDED
Binary file (278 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/param_functions.cpython-311.pyc ADDED
Binary file (41.4 kB). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/requests.cpython-311.pyc ADDED
Binary file (297 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/templating.cpython-311.pyc ADDED
Binary file (256 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/testclient.cpython-311.pyc ADDED
Binary file (251 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/__pycache__/websockets.cpython-311.pyc ADDED
Binary file (361 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/_compat.py ADDED
@@ -0,0 +1,659 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import deque
2
+ from copy import copy
3
+ from dataclasses import dataclass, is_dataclass
4
+ from enum import Enum
5
+ from functools import lru_cache
6
+ from typing import (
7
+ Any,
8
+ Callable,
9
+ Deque,
10
+ Dict,
11
+ FrozenSet,
12
+ List,
13
+ Mapping,
14
+ Sequence,
15
+ Set,
16
+ Tuple,
17
+ Type,
18
+ Union,
19
+ )
20
+
21
+ from fastapi.exceptions import RequestErrorModel
22
+ from fastapi.types import IncEx, ModelNameMap, UnionType
23
+ from pydantic import BaseModel, create_model
24
+ from pydantic.version import VERSION as PYDANTIC_VERSION
25
+ from starlette.datastructures import UploadFile
26
+ from typing_extensions import Annotated, Literal, get_args, get_origin
27
+
28
+ PYDANTIC_VERSION_MINOR_TUPLE = tuple(int(x) for x in PYDANTIC_VERSION.split(".")[:2])
29
+ PYDANTIC_V2 = PYDANTIC_VERSION_MINOR_TUPLE[0] == 2
30
+
31
+
32
+ sequence_annotation_to_type = {
33
+ Sequence: list,
34
+ List: list,
35
+ list: list,
36
+ Tuple: tuple,
37
+ tuple: tuple,
38
+ Set: set,
39
+ set: set,
40
+ FrozenSet: frozenset,
41
+ frozenset: frozenset,
42
+ Deque: deque,
43
+ deque: deque,
44
+ }
45
+
46
+ sequence_types = tuple(sequence_annotation_to_type.keys())
47
+
48
+ Url: Type[Any]
49
+
50
+ if PYDANTIC_V2:
51
+ from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
52
+ from pydantic import TypeAdapter
53
+ from pydantic import ValidationError as ValidationError
54
+ from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined]
55
+ GetJsonSchemaHandler as GetJsonSchemaHandler,
56
+ )
57
+ from pydantic._internal._typing_extra import eval_type_lenient
58
+ from pydantic._internal._utils import lenient_issubclass as lenient_issubclass
59
+ from pydantic.fields import FieldInfo
60
+ from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema
61
+ from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue
62
+ from pydantic_core import CoreSchema as CoreSchema
63
+ from pydantic_core import PydanticUndefined, PydanticUndefinedType
64
+ from pydantic_core import Url as Url
65
+
66
+ try:
67
+ from pydantic_core.core_schema import (
68
+ with_info_plain_validator_function as with_info_plain_validator_function,
69
+ )
70
+ except ImportError: # pragma: no cover
71
+ from pydantic_core.core_schema import (
72
+ general_plain_validator_function as with_info_plain_validator_function, # noqa: F401
73
+ )
74
+
75
+ RequiredParam = PydanticUndefined
76
+ Undefined = PydanticUndefined
77
+ UndefinedType = PydanticUndefinedType
78
+ evaluate_forwardref = eval_type_lenient
79
+ Validator = Any
80
+
81
+ class BaseConfig:
82
+ pass
83
+
84
+ class ErrorWrapper(Exception):
85
+ pass
86
+
87
+ @dataclass
88
+ class ModelField:
89
+ field_info: FieldInfo
90
+ name: str
91
+ mode: Literal["validation", "serialization"] = "validation"
92
+
93
+ @property
94
+ def alias(self) -> str:
95
+ a = self.field_info.alias
96
+ return a if a is not None else self.name
97
+
98
+ @property
99
+ def required(self) -> bool:
100
+ return self.field_info.is_required()
101
+
102
+ @property
103
+ def default(self) -> Any:
104
+ return self.get_default()
105
+
106
+ @property
107
+ def type_(self) -> Any:
108
+ return self.field_info.annotation
109
+
110
+ def __post_init__(self) -> None:
111
+ self._type_adapter: TypeAdapter[Any] = TypeAdapter(
112
+ Annotated[self.field_info.annotation, self.field_info]
113
+ )
114
+
115
+ def get_default(self) -> Any:
116
+ if self.field_info.is_required():
117
+ return Undefined
118
+ return self.field_info.get_default(call_default_factory=True)
119
+
120
+ def validate(
121
+ self,
122
+ value: Any,
123
+ values: Dict[str, Any] = {}, # noqa: B006
124
+ *,
125
+ loc: Tuple[Union[int, str], ...] = (),
126
+ ) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
127
+ try:
128
+ return (
129
+ self._type_adapter.validate_python(value, from_attributes=True),
130
+ None,
131
+ )
132
+ except ValidationError as exc:
133
+ return None, _regenerate_error_with_loc(
134
+ errors=exc.errors(include_url=False), loc_prefix=loc
135
+ )
136
+
137
+ def serialize(
138
+ self,
139
+ value: Any,
140
+ *,
141
+ mode: Literal["json", "python"] = "json",
142
+ include: Union[IncEx, None] = None,
143
+ exclude: Union[IncEx, None] = None,
144
+ by_alias: bool = True,
145
+ exclude_unset: bool = False,
146
+ exclude_defaults: bool = False,
147
+ exclude_none: bool = False,
148
+ ) -> Any:
149
+ # What calls this code passes a value that already called
150
+ # self._type_adapter.validate_python(value)
151
+ return self._type_adapter.dump_python(
152
+ value,
153
+ mode=mode,
154
+ include=include,
155
+ exclude=exclude,
156
+ by_alias=by_alias,
157
+ exclude_unset=exclude_unset,
158
+ exclude_defaults=exclude_defaults,
159
+ exclude_none=exclude_none,
160
+ )
161
+
162
+ def __hash__(self) -> int:
163
+ # Each ModelField is unique for our purposes, to allow making a dict from
164
+ # ModelField to its JSON Schema.
165
+ return id(self)
166
+
167
+ def get_annotation_from_field_info(
168
+ annotation: Any, field_info: FieldInfo, field_name: str
169
+ ) -> Any:
170
+ return annotation
171
+
172
+ def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]:
173
+ return errors # type: ignore[return-value]
174
+
175
+ def _model_rebuild(model: Type[BaseModel]) -> None:
176
+ model.model_rebuild()
177
+
178
+ def _model_dump(
179
+ model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
180
+ ) -> Any:
181
+ return model.model_dump(mode=mode, **kwargs)
182
+
183
+ def _get_model_config(model: BaseModel) -> Any:
184
+ return model.model_config
185
+
186
+ def get_schema_from_model_field(
187
+ *,
188
+ field: ModelField,
189
+ schema_generator: GenerateJsonSchema,
190
+ model_name_map: ModelNameMap,
191
+ field_mapping: Dict[
192
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
193
+ ],
194
+ separate_input_output_schemas: bool = True,
195
+ ) -> Dict[str, Any]:
196
+ override_mode: Union[Literal["validation"], None] = (
197
+ None if separate_input_output_schemas else "validation"
198
+ )
199
+ # This expects that GenerateJsonSchema was already used to generate the definitions
200
+ json_schema = field_mapping[(field, override_mode or field.mode)]
201
+ if "$ref" not in json_schema:
202
+ # TODO remove when deprecating Pydantic v1
203
+ # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207
204
+ json_schema["title"] = (
205
+ field.field_info.title or field.alias.title().replace("_", " ")
206
+ )
207
+ return json_schema
208
+
209
+ def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
210
+ return {}
211
+
212
+ def get_definitions(
213
+ *,
214
+ fields: List[ModelField],
215
+ schema_generator: GenerateJsonSchema,
216
+ model_name_map: ModelNameMap,
217
+ separate_input_output_schemas: bool = True,
218
+ ) -> Tuple[
219
+ Dict[
220
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
221
+ ],
222
+ Dict[str, Dict[str, Any]],
223
+ ]:
224
+ override_mode: Union[Literal["validation"], None] = (
225
+ None if separate_input_output_schemas else "validation"
226
+ )
227
+ inputs = [
228
+ (field, override_mode or field.mode, field._type_adapter.core_schema)
229
+ for field in fields
230
+ ]
231
+ field_mapping, definitions = schema_generator.generate_definitions(
232
+ inputs=inputs
233
+ )
234
+ return field_mapping, definitions # type: ignore[return-value]
235
+
236
+ def is_scalar_field(field: ModelField) -> bool:
237
+ from fastapi import params
238
+
239
+ return field_annotation_is_scalar(
240
+ field.field_info.annotation
241
+ ) and not isinstance(field.field_info, params.Body)
242
+
243
+ def is_sequence_field(field: ModelField) -> bool:
244
+ return field_annotation_is_sequence(field.field_info.annotation)
245
+
246
+ def is_scalar_sequence_field(field: ModelField) -> bool:
247
+ return field_annotation_is_scalar_sequence(field.field_info.annotation)
248
+
249
+ def is_bytes_field(field: ModelField) -> bool:
250
+ return is_bytes_or_nonable_bytes_annotation(field.type_)
251
+
252
+ def is_bytes_sequence_field(field: ModelField) -> bool:
253
+ return is_bytes_sequence_annotation(field.type_)
254
+
255
+ def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
256
+ cls = type(field_info)
257
+ merged_field_info = cls.from_annotation(annotation)
258
+ new_field_info = copy(field_info)
259
+ new_field_info.metadata = merged_field_info.metadata
260
+ new_field_info.annotation = merged_field_info.annotation
261
+ return new_field_info
262
+
263
+ def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
264
+ origin_type = (
265
+ get_origin(field.field_info.annotation) or field.field_info.annotation
266
+ )
267
+ assert issubclass(origin_type, sequence_types) # type: ignore[arg-type]
268
+ return sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return]
269
+
270
+ def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
271
+ error = ValidationError.from_exception_data(
272
+ "Field required", [{"type": "missing", "loc": loc, "input": {}}]
273
+ ).errors(include_url=False)[0]
274
+ error["input"] = None
275
+ return error # type: ignore[return-value]
276
+
277
+ def create_body_model(
278
+ *, fields: Sequence[ModelField], model_name: str
279
+ ) -> Type[BaseModel]:
280
+ field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields}
281
+ BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload]
282
+ return BodyModel
283
+
284
+ def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
285
+ return [
286
+ ModelField(field_info=field_info, name=name)
287
+ for name, field_info in model.model_fields.items()
288
+ ]
289
+
290
+ else:
291
+ from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX
292
+ from pydantic import AnyUrl as Url # noqa: F401
293
+ from pydantic import ( # type: ignore[assignment]
294
+ BaseConfig as BaseConfig, # noqa: F401
295
+ )
296
+ from pydantic import ValidationError as ValidationError # noqa: F401
297
+ from pydantic.class_validators import ( # type: ignore[no-redef]
298
+ Validator as Validator, # noqa: F401
299
+ )
300
+ from pydantic.error_wrappers import ( # type: ignore[no-redef]
301
+ ErrorWrapper as ErrorWrapper, # noqa: F401
302
+ )
303
+ from pydantic.errors import MissingError
304
+ from pydantic.fields import ( # type: ignore[attr-defined]
305
+ SHAPE_FROZENSET,
306
+ SHAPE_LIST,
307
+ SHAPE_SEQUENCE,
308
+ SHAPE_SET,
309
+ SHAPE_SINGLETON,
310
+ SHAPE_TUPLE,
311
+ SHAPE_TUPLE_ELLIPSIS,
312
+ )
313
+ from pydantic.fields import FieldInfo as FieldInfo
314
+ from pydantic.fields import ( # type: ignore[no-redef,attr-defined]
315
+ ModelField as ModelField, # noqa: F401
316
+ )
317
+
318
+ # Keeping old "Required" functionality from Pydantic V1, without
319
+ # shadowing typing.Required.
320
+ RequiredParam: Any = Ellipsis # type: ignore[no-redef]
321
+ from pydantic.fields import ( # type: ignore[no-redef,attr-defined]
322
+ Undefined as Undefined,
323
+ )
324
+ from pydantic.fields import ( # type: ignore[no-redef, attr-defined]
325
+ UndefinedType as UndefinedType, # noqa: F401
326
+ )
327
+ from pydantic.schema import (
328
+ field_schema,
329
+ get_flat_models_from_fields,
330
+ get_model_name_map,
331
+ model_process_schema,
332
+ )
333
+ from pydantic.schema import ( # type: ignore[no-redef] # noqa: F401
334
+ get_annotation_from_field_info as get_annotation_from_field_info,
335
+ )
336
+ from pydantic.typing import ( # type: ignore[no-redef]
337
+ evaluate_forwardref as evaluate_forwardref, # noqa: F401
338
+ )
339
+ from pydantic.utils import ( # type: ignore[no-redef]
340
+ lenient_issubclass as lenient_issubclass, # noqa: F401
341
+ )
342
+
343
+ GetJsonSchemaHandler = Any # type: ignore[assignment,misc]
344
+ JsonSchemaValue = Dict[str, Any] # type: ignore[misc]
345
+ CoreSchema = Any # type: ignore[assignment,misc]
346
+
347
+ sequence_shapes = {
348
+ SHAPE_LIST,
349
+ SHAPE_SET,
350
+ SHAPE_FROZENSET,
351
+ SHAPE_TUPLE,
352
+ SHAPE_SEQUENCE,
353
+ SHAPE_TUPLE_ELLIPSIS,
354
+ }
355
+ sequence_shape_to_type = {
356
+ SHAPE_LIST: list,
357
+ SHAPE_SET: set,
358
+ SHAPE_TUPLE: tuple,
359
+ SHAPE_SEQUENCE: list,
360
+ SHAPE_TUPLE_ELLIPSIS: list,
361
+ }
362
+
363
+ @dataclass
364
+ class GenerateJsonSchema: # type: ignore[no-redef]
365
+ ref_template: str
366
+
367
+ class PydanticSchemaGenerationError(Exception): # type: ignore[no-redef]
368
+ pass
369
+
370
+ def with_info_plain_validator_function( # type: ignore[misc]
371
+ function: Callable[..., Any],
372
+ *,
373
+ ref: Union[str, None] = None,
374
+ metadata: Any = None,
375
+ serialization: Any = None,
376
+ ) -> Any:
377
+ return {}
378
+
379
+ def get_model_definitions(
380
+ *,
381
+ flat_models: Set[Union[Type[BaseModel], Type[Enum]]],
382
+ model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str],
383
+ ) -> Dict[str, Any]:
384
+ definitions: Dict[str, Dict[str, Any]] = {}
385
+ for model in flat_models:
386
+ m_schema, m_definitions, m_nested_models = model_process_schema(
387
+ model, model_name_map=model_name_map, ref_prefix=REF_PREFIX
388
+ )
389
+ definitions.update(m_definitions)
390
+ model_name = model_name_map[model]
391
+ if "description" in m_schema:
392
+ m_schema["description"] = m_schema["description"].split("\f")[0]
393
+ definitions[model_name] = m_schema
394
+ return definitions
395
+
396
+ def is_pv1_scalar_field(field: ModelField) -> bool:
397
+ from fastapi import params
398
+
399
+ field_info = field.field_info
400
+ if not (
401
+ field.shape == SHAPE_SINGLETON # type: ignore[attr-defined]
402
+ and not lenient_issubclass(field.type_, BaseModel)
403
+ and not lenient_issubclass(field.type_, dict)
404
+ and not field_annotation_is_sequence(field.type_)
405
+ and not is_dataclass(field.type_)
406
+ and not isinstance(field_info, params.Body)
407
+ ):
408
+ return False
409
+ if field.sub_fields: # type: ignore[attr-defined]
410
+ if not all(
411
+ is_pv1_scalar_field(f)
412
+ for f in field.sub_fields # type: ignore[attr-defined]
413
+ ):
414
+ return False
415
+ return True
416
+
417
+ def is_pv1_scalar_sequence_field(field: ModelField) -> bool:
418
+ if (field.shape in sequence_shapes) and not lenient_issubclass( # type: ignore[attr-defined]
419
+ field.type_, BaseModel
420
+ ):
421
+ if field.sub_fields is not None: # type: ignore[attr-defined]
422
+ for sub_field in field.sub_fields: # type: ignore[attr-defined]
423
+ if not is_pv1_scalar_field(sub_field):
424
+ return False
425
+ return True
426
+ if _annotation_is_sequence(field.type_):
427
+ return True
428
+ return False
429
+
430
+ def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]:
431
+ use_errors: List[Any] = []
432
+ for error in errors:
433
+ if isinstance(error, ErrorWrapper):
434
+ new_errors = ValidationError( # type: ignore[call-arg]
435
+ errors=[error], model=RequestErrorModel
436
+ ).errors()
437
+ use_errors.extend(new_errors)
438
+ elif isinstance(error, list):
439
+ use_errors.extend(_normalize_errors(error))
440
+ else:
441
+ use_errors.append(error)
442
+ return use_errors
443
+
444
+ def _model_rebuild(model: Type[BaseModel]) -> None:
445
+ model.update_forward_refs()
446
+
447
+ def _model_dump(
448
+ model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
449
+ ) -> Any:
450
+ return model.dict(**kwargs)
451
+
452
+ def _get_model_config(model: BaseModel) -> Any:
453
+ return model.__config__ # type: ignore[attr-defined]
454
+
455
+ def get_schema_from_model_field(
456
+ *,
457
+ field: ModelField,
458
+ schema_generator: GenerateJsonSchema,
459
+ model_name_map: ModelNameMap,
460
+ field_mapping: Dict[
461
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
462
+ ],
463
+ separate_input_output_schemas: bool = True,
464
+ ) -> Dict[str, Any]:
465
+ # This expects that GenerateJsonSchema was already used to generate the definitions
466
+ return field_schema( # type: ignore[no-any-return]
467
+ field, model_name_map=model_name_map, ref_prefix=REF_PREFIX
468
+ )[0]
469
+
470
+ def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
471
+ models = get_flat_models_from_fields(fields, known_models=set())
472
+ return get_model_name_map(models) # type: ignore[no-any-return]
473
+
474
+ def get_definitions(
475
+ *,
476
+ fields: List[ModelField],
477
+ schema_generator: GenerateJsonSchema,
478
+ model_name_map: ModelNameMap,
479
+ separate_input_output_schemas: bool = True,
480
+ ) -> Tuple[
481
+ Dict[
482
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
483
+ ],
484
+ Dict[str, Dict[str, Any]],
485
+ ]:
486
+ models = get_flat_models_from_fields(fields, known_models=set())
487
+ return {}, get_model_definitions(
488
+ flat_models=models, model_name_map=model_name_map
489
+ )
490
+
491
+ def is_scalar_field(field: ModelField) -> bool:
492
+ return is_pv1_scalar_field(field)
493
+
494
+ def is_sequence_field(field: ModelField) -> bool:
495
+ return field.shape in sequence_shapes or _annotation_is_sequence(field.type_) # type: ignore[attr-defined]
496
+
497
+ def is_scalar_sequence_field(field: ModelField) -> bool:
498
+ return is_pv1_scalar_sequence_field(field)
499
+
500
+ def is_bytes_field(field: ModelField) -> bool:
501
+ return lenient_issubclass(field.type_, bytes)
502
+
503
+ def is_bytes_sequence_field(field: ModelField) -> bool:
504
+ return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes) # type: ignore[attr-defined]
505
+
506
+ def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
507
+ return copy(field_info)
508
+
509
+ def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
510
+ return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return,attr-defined]
511
+
512
+ def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
513
+ missing_field_error = ErrorWrapper(MissingError(), loc=loc) # type: ignore[call-arg]
514
+ new_error = ValidationError([missing_field_error], RequestErrorModel)
515
+ return new_error.errors()[0] # type: ignore[return-value]
516
+
517
+ def create_body_model(
518
+ *, fields: Sequence[ModelField], model_name: str
519
+ ) -> Type[BaseModel]:
520
+ BodyModel = create_model(model_name)
521
+ for f in fields:
522
+ BodyModel.__fields__[f.name] = f # type: ignore[index]
523
+ return BodyModel
524
+
525
+ def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
526
+ return list(model.__fields__.values()) # type: ignore[attr-defined]
527
+
528
+
529
+ def _regenerate_error_with_loc(
530
+ *, errors: Sequence[Any], loc_prefix: Tuple[Union[str, int], ...]
531
+ ) -> List[Dict[str, Any]]:
532
+ updated_loc_errors: List[Any] = [
533
+ {**err, "loc": loc_prefix + err.get("loc", ())}
534
+ for err in _normalize_errors(errors)
535
+ ]
536
+
537
+ return updated_loc_errors
538
+
539
+
540
+ def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
541
+ if lenient_issubclass(annotation, (str, bytes)):
542
+ return False
543
+ return lenient_issubclass(annotation, sequence_types)
544
+
545
+
546
+ def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
547
+ origin = get_origin(annotation)
548
+ if origin is Union or origin is UnionType:
549
+ for arg in get_args(annotation):
550
+ if field_annotation_is_sequence(arg):
551
+ return True
552
+ return False
553
+ return _annotation_is_sequence(annotation) or _annotation_is_sequence(
554
+ get_origin(annotation)
555
+ )
556
+
557
+
558
+ def value_is_sequence(value: Any) -> bool:
559
+ return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type]
560
+
561
+
562
+ def _annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
563
+ return (
564
+ lenient_issubclass(annotation, (BaseModel, Mapping, UploadFile))
565
+ or _annotation_is_sequence(annotation)
566
+ or is_dataclass(annotation)
567
+ )
568
+
569
+
570
+ def field_annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
571
+ origin = get_origin(annotation)
572
+ if origin is Union or origin is UnionType:
573
+ return any(field_annotation_is_complex(arg) for arg in get_args(annotation))
574
+
575
+ return (
576
+ _annotation_is_complex(annotation)
577
+ or _annotation_is_complex(origin)
578
+ or hasattr(origin, "__pydantic_core_schema__")
579
+ or hasattr(origin, "__get_pydantic_core_schema__")
580
+ )
581
+
582
+
583
+ def field_annotation_is_scalar(annotation: Any) -> bool:
584
+ # handle Ellipsis here to make tuple[int, ...] work nicely
585
+ return annotation is Ellipsis or not field_annotation_is_complex(annotation)
586
+
587
+
588
+ def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool:
589
+ origin = get_origin(annotation)
590
+ if origin is Union or origin is UnionType:
591
+ at_least_one_scalar_sequence = False
592
+ for arg in get_args(annotation):
593
+ if field_annotation_is_scalar_sequence(arg):
594
+ at_least_one_scalar_sequence = True
595
+ continue
596
+ elif not field_annotation_is_scalar(arg):
597
+ return False
598
+ return at_least_one_scalar_sequence
599
+ return field_annotation_is_sequence(annotation) and all(
600
+ field_annotation_is_scalar(sub_annotation)
601
+ for sub_annotation in get_args(annotation)
602
+ )
603
+
604
+
605
+ def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool:
606
+ if lenient_issubclass(annotation, bytes):
607
+ return True
608
+ origin = get_origin(annotation)
609
+ if origin is Union or origin is UnionType:
610
+ for arg in get_args(annotation):
611
+ if lenient_issubclass(arg, bytes):
612
+ return True
613
+ return False
614
+
615
+
616
+ def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool:
617
+ if lenient_issubclass(annotation, UploadFile):
618
+ return True
619
+ origin = get_origin(annotation)
620
+ if origin is Union or origin is UnionType:
621
+ for arg in get_args(annotation):
622
+ if lenient_issubclass(arg, UploadFile):
623
+ return True
624
+ return False
625
+
626
+
627
+ def is_bytes_sequence_annotation(annotation: Any) -> bool:
628
+ origin = get_origin(annotation)
629
+ if origin is Union or origin is UnionType:
630
+ at_least_one = False
631
+ for arg in get_args(annotation):
632
+ if is_bytes_sequence_annotation(arg):
633
+ at_least_one = True
634
+ continue
635
+ return at_least_one
636
+ return field_annotation_is_sequence(annotation) and all(
637
+ is_bytes_or_nonable_bytes_annotation(sub_annotation)
638
+ for sub_annotation in get_args(annotation)
639
+ )
640
+
641
+
642
+ def is_uploadfile_sequence_annotation(annotation: Any) -> bool:
643
+ origin = get_origin(annotation)
644
+ if origin is Union or origin is UnionType:
645
+ at_least_one = False
646
+ for arg in get_args(annotation):
647
+ if is_uploadfile_sequence_annotation(arg):
648
+ at_least_one = True
649
+ continue
650
+ return at_least_one
651
+ return field_annotation_is_sequence(annotation) and all(
652
+ is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation)
653
+ for sub_annotation in get_args(annotation)
654
+ )
655
+
656
+
657
+ @lru_cache
658
+ def get_cached_model_fields(model: Type[BaseModel]) -> List[ModelField]:
659
+ return get_model_fields(model)
.venv/lib/python3.11/site-packages/fastapi/applications.py ADDED
The diff for this file is too large to render. See raw diff
 
.venv/lib/python3.11/site-packages/fastapi/background.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Callable
2
+
3
+ from starlette.background import BackgroundTasks as StarletteBackgroundTasks
4
+ from typing_extensions import Annotated, Doc, ParamSpec
5
+
6
+ P = ParamSpec("P")
7
+
8
+
9
+ class BackgroundTasks(StarletteBackgroundTasks):
10
+ """
11
+ A collection of background tasks that will be called after a response has been
12
+ sent to the client.
13
+
14
+ Read more about it in the
15
+ [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/).
16
+
17
+ ## Example
18
+
19
+ ```python
20
+ from fastapi import BackgroundTasks, FastAPI
21
+
22
+ app = FastAPI()
23
+
24
+
25
+ def write_notification(email: str, message=""):
26
+ with open("log.txt", mode="w") as email_file:
27
+ content = f"notification for {email}: {message}"
28
+ email_file.write(content)
29
+
30
+
31
+ @app.post("/send-notification/{email}")
32
+ async def send_notification(email: str, background_tasks: BackgroundTasks):
33
+ background_tasks.add_task(write_notification, email, message="some notification")
34
+ return {"message": "Notification sent in the background"}
35
+ ```
36
+ """
37
+
38
+ def add_task(
39
+ self,
40
+ func: Annotated[
41
+ Callable[P, Any],
42
+ Doc(
43
+ """
44
+ The function to call after the response is sent.
45
+
46
+ It can be a regular `def` function or an `async def` function.
47
+ """
48
+ ),
49
+ ],
50
+ *args: P.args,
51
+ **kwargs: P.kwargs,
52
+ ) -> None:
53
+ """
54
+ Add a function to be called in the background after the response is sent.
55
+
56
+ Read more about it in the
57
+ [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/).
58
+ """
59
+ return super().add_task(func, *args, **kwargs)
.venv/lib/python3.11/site-packages/fastapi/cli.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ try:
2
+ from fastapi_cli.cli import main as cli_main
3
+
4
+ except ImportError: # pragma: no cover
5
+ cli_main = None # type: ignore
6
+
7
+
8
+ def main() -> None:
9
+ if not cli_main: # type: ignore[truthy-function]
10
+ message = 'To use the fastapi command, please install "fastapi[standard]":\n\n\tpip install "fastapi[standard]"\n'
11
+ print(message)
12
+ raise RuntimeError(message) # noqa: B904
13
+ cli_main()
.venv/lib/python3.11/site-packages/fastapi/concurrency.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import asynccontextmanager as asynccontextmanager
2
+ from typing import AsyncGenerator, ContextManager, TypeVar
3
+
4
+ import anyio.to_thread
5
+ from anyio import CapacityLimiter
6
+ from starlette.concurrency import iterate_in_threadpool as iterate_in_threadpool # noqa
7
+ from starlette.concurrency import run_in_threadpool as run_in_threadpool # noqa
8
+ from starlette.concurrency import ( # noqa
9
+ run_until_first_complete as run_until_first_complete,
10
+ )
11
+
12
+ _T = TypeVar("_T")
13
+
14
+
15
+ @asynccontextmanager
16
+ async def contextmanager_in_threadpool(
17
+ cm: ContextManager[_T],
18
+ ) -> AsyncGenerator[_T, None]:
19
+ # blocking __exit__ from running waiting on a free thread
20
+ # can create race conditions/deadlocks if the context manager itself
21
+ # has its own internal pool (e.g. a database connection pool)
22
+ # to avoid this we let __exit__ run without a capacity limit
23
+ # since we're creating a new limiter for each call, any non-zero limit
24
+ # works (1 is arbitrary)
25
+ exit_limiter = CapacityLimiter(1)
26
+ try:
27
+ yield await run_in_threadpool(cm.__enter__)
28
+ except Exception as e:
29
+ ok = bool(
30
+ await anyio.to_thread.run_sync(
31
+ cm.__exit__, type(e), e, e.__traceback__, limiter=exit_limiter
32
+ )
33
+ )
34
+ if not ok:
35
+ raise e
36
+ else:
37
+ await anyio.to_thread.run_sync(
38
+ cm.__exit__, None, None, None, limiter=exit_limiter
39
+ )
.venv/lib/python3.11/site-packages/fastapi/datastructures.py ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ BinaryIO,
4
+ Callable,
5
+ Dict,
6
+ Iterable,
7
+ Optional,
8
+ Type,
9
+ TypeVar,
10
+ cast,
11
+ )
12
+
13
+ from fastapi._compat import (
14
+ PYDANTIC_V2,
15
+ CoreSchema,
16
+ GetJsonSchemaHandler,
17
+ JsonSchemaValue,
18
+ with_info_plain_validator_function,
19
+ )
20
+ from starlette.datastructures import URL as URL # noqa: F401
21
+ from starlette.datastructures import Address as Address # noqa: F401
22
+ from starlette.datastructures import FormData as FormData # noqa: F401
23
+ from starlette.datastructures import Headers as Headers # noqa: F401
24
+ from starlette.datastructures import QueryParams as QueryParams # noqa: F401
25
+ from starlette.datastructures import State as State # noqa: F401
26
+ from starlette.datastructures import UploadFile as StarletteUploadFile
27
+ from typing_extensions import Annotated, Doc
28
+
29
+
30
+ class UploadFile(StarletteUploadFile):
31
+ """
32
+ A file uploaded in a request.
33
+
34
+ Define it as a *path operation function* (or dependency) parameter.
35
+
36
+ If you are using a regular `def` function, you can use the `upload_file.file`
37
+ attribute to access the raw standard Python file (blocking, not async), useful and
38
+ needed for non-async code.
39
+
40
+ Read more about it in the
41
+ [FastAPI docs for Request Files](https://fastapi.tiangolo.com/tutorial/request-files/).
42
+
43
+ ## Example
44
+
45
+ ```python
46
+ from typing import Annotated
47
+
48
+ from fastapi import FastAPI, File, UploadFile
49
+
50
+ app = FastAPI()
51
+
52
+
53
+ @app.post("/files/")
54
+ async def create_file(file: Annotated[bytes, File()]):
55
+ return {"file_size": len(file)}
56
+
57
+
58
+ @app.post("/uploadfile/")
59
+ async def create_upload_file(file: UploadFile):
60
+ return {"filename": file.filename}
61
+ ```
62
+ """
63
+
64
+ file: Annotated[
65
+ BinaryIO,
66
+ Doc("The standard Python file object (non-async)."),
67
+ ]
68
+ filename: Annotated[Optional[str], Doc("The original file name.")]
69
+ size: Annotated[Optional[int], Doc("The size of the file in bytes.")]
70
+ headers: Annotated[Headers, Doc("The headers of the request.")]
71
+ content_type: Annotated[
72
+ Optional[str], Doc("The content type of the request, from the headers.")
73
+ ]
74
+
75
+ async def write(
76
+ self,
77
+ data: Annotated[
78
+ bytes,
79
+ Doc(
80
+ """
81
+ The bytes to write to the file.
82
+ """
83
+ ),
84
+ ],
85
+ ) -> None:
86
+ """
87
+ Write some bytes to the file.
88
+
89
+ You normally wouldn't use this from a file you read in a request.
90
+
91
+ To be awaitable, compatible with async, this is run in threadpool.
92
+ """
93
+ return await super().write(data)
94
+
95
+ async def read(
96
+ self,
97
+ size: Annotated[
98
+ int,
99
+ Doc(
100
+ """
101
+ The number of bytes to read from the file.
102
+ """
103
+ ),
104
+ ] = -1,
105
+ ) -> bytes:
106
+ """
107
+ Read some bytes from the file.
108
+
109
+ To be awaitable, compatible with async, this is run in threadpool.
110
+ """
111
+ return await super().read(size)
112
+
113
+ async def seek(
114
+ self,
115
+ offset: Annotated[
116
+ int,
117
+ Doc(
118
+ """
119
+ The position in bytes to seek to in the file.
120
+ """
121
+ ),
122
+ ],
123
+ ) -> None:
124
+ """
125
+ Move to a position in the file.
126
+
127
+ Any next read or write will be done from that position.
128
+
129
+ To be awaitable, compatible with async, this is run in threadpool.
130
+ """
131
+ return await super().seek(offset)
132
+
133
+ async def close(self) -> None:
134
+ """
135
+ Close the file.
136
+
137
+ To be awaitable, compatible with async, this is run in threadpool.
138
+ """
139
+ return await super().close()
140
+
141
+ @classmethod
142
+ def __get_validators__(cls: Type["UploadFile"]) -> Iterable[Callable[..., Any]]:
143
+ yield cls.validate
144
+
145
+ @classmethod
146
+ def validate(cls: Type["UploadFile"], v: Any) -> Any:
147
+ if not isinstance(v, StarletteUploadFile):
148
+ raise ValueError(f"Expected UploadFile, received: {type(v)}")
149
+ return v
150
+
151
+ @classmethod
152
+ def _validate(cls, __input_value: Any, _: Any) -> "UploadFile":
153
+ if not isinstance(__input_value, StarletteUploadFile):
154
+ raise ValueError(f"Expected UploadFile, received: {type(__input_value)}")
155
+ return cast(UploadFile, __input_value)
156
+
157
+ if not PYDANTIC_V2:
158
+
159
+ @classmethod
160
+ def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
161
+ field_schema.update({"type": "string", "format": "binary"})
162
+
163
+ @classmethod
164
+ def __get_pydantic_json_schema__(
165
+ cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler
166
+ ) -> JsonSchemaValue:
167
+ return {"type": "string", "format": "binary"}
168
+
169
+ @classmethod
170
+ def __get_pydantic_core_schema__(
171
+ cls, source: Type[Any], handler: Callable[[Any], CoreSchema]
172
+ ) -> CoreSchema:
173
+ return with_info_plain_validator_function(cls._validate)
174
+
175
+
176
+ class DefaultPlaceholder:
177
+ """
178
+ You shouldn't use this class directly.
179
+
180
+ It's used internally to recognize when a default value has been overwritten, even
181
+ if the overridden default value was truthy.
182
+ """
183
+
184
+ def __init__(self, value: Any):
185
+ self.value = value
186
+
187
+ def __bool__(self) -> bool:
188
+ return bool(self.value)
189
+
190
+ def __eq__(self, o: object) -> bool:
191
+ return isinstance(o, DefaultPlaceholder) and o.value == self.value
192
+
193
+
194
+ DefaultType = TypeVar("DefaultType")
195
+
196
+
197
+ def Default(value: DefaultType) -> DefaultType:
198
+ """
199
+ You shouldn't use this function directly.
200
+
201
+ It's used internally to recognize when a default value has been overwritten, even
202
+ if the overridden default value was truthy.
203
+ """
204
+ return DefaultPlaceholder(value) # type: ignore
.venv/lib/python3.11/site-packages/fastapi/encoders.py ADDED
@@ -0,0 +1,343 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import dataclasses
2
+ import datetime
3
+ from collections import defaultdict, deque
4
+ from decimal import Decimal
5
+ from enum import Enum
6
+ from ipaddress import (
7
+ IPv4Address,
8
+ IPv4Interface,
9
+ IPv4Network,
10
+ IPv6Address,
11
+ IPv6Interface,
12
+ IPv6Network,
13
+ )
14
+ from pathlib import Path, PurePath
15
+ from re import Pattern
16
+ from types import GeneratorType
17
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
18
+ from uuid import UUID
19
+
20
+ from fastapi.types import IncEx
21
+ from pydantic import BaseModel
22
+ from pydantic.color import Color
23
+ from pydantic.networks import AnyUrl, NameEmail
24
+ from pydantic.types import SecretBytes, SecretStr
25
+ from typing_extensions import Annotated, Doc
26
+
27
+ from ._compat import PYDANTIC_V2, UndefinedType, Url, _model_dump
28
+
29
+
30
+ # Taken from Pydantic v1 as is
31
+ def isoformat(o: Union[datetime.date, datetime.time]) -> str:
32
+ return o.isoformat()
33
+
34
+
35
+ # Taken from Pydantic v1 as is
36
+ # TODO: pv2 should this return strings instead?
37
+ def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
38
+ """
39
+ Encodes a Decimal as int of there's no exponent, otherwise float
40
+
41
+ This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
42
+ where a integer (but not int typed) is used. Encoding this as a float
43
+ results in failed round-tripping between encode and parse.
44
+ Our Id type is a prime example of this.
45
+
46
+ >>> decimal_encoder(Decimal("1.0"))
47
+ 1.0
48
+
49
+ >>> decimal_encoder(Decimal("1"))
50
+ 1
51
+ """
52
+ if dec_value.as_tuple().exponent >= 0: # type: ignore[operator]
53
+ return int(dec_value)
54
+ else:
55
+ return float(dec_value)
56
+
57
+
58
+ ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
59
+ bytes: lambda o: o.decode(),
60
+ Color: str,
61
+ datetime.date: isoformat,
62
+ datetime.datetime: isoformat,
63
+ datetime.time: isoformat,
64
+ datetime.timedelta: lambda td: td.total_seconds(),
65
+ Decimal: decimal_encoder,
66
+ Enum: lambda o: o.value,
67
+ frozenset: list,
68
+ deque: list,
69
+ GeneratorType: list,
70
+ IPv4Address: str,
71
+ IPv4Interface: str,
72
+ IPv4Network: str,
73
+ IPv6Address: str,
74
+ IPv6Interface: str,
75
+ IPv6Network: str,
76
+ NameEmail: str,
77
+ Path: str,
78
+ Pattern: lambda o: o.pattern,
79
+ SecretBytes: str,
80
+ SecretStr: str,
81
+ set: list,
82
+ UUID: str,
83
+ Url: str,
84
+ AnyUrl: str,
85
+ }
86
+
87
+
88
+ def generate_encoders_by_class_tuples(
89
+ type_encoder_map: Dict[Any, Callable[[Any], Any]],
90
+ ) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]:
91
+ encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(
92
+ tuple
93
+ )
94
+ for type_, encoder in type_encoder_map.items():
95
+ encoders_by_class_tuples[encoder] += (type_,)
96
+ return encoders_by_class_tuples
97
+
98
+
99
+ encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE)
100
+
101
+
102
+ def jsonable_encoder(
103
+ obj: Annotated[
104
+ Any,
105
+ Doc(
106
+ """
107
+ The input object to convert to JSON.
108
+ """
109
+ ),
110
+ ],
111
+ include: Annotated[
112
+ Optional[IncEx],
113
+ Doc(
114
+ """
115
+ Pydantic's `include` parameter, passed to Pydantic models to set the
116
+ fields to include.
117
+ """
118
+ ),
119
+ ] = None,
120
+ exclude: Annotated[
121
+ Optional[IncEx],
122
+ Doc(
123
+ """
124
+ Pydantic's `exclude` parameter, passed to Pydantic models to set the
125
+ fields to exclude.
126
+ """
127
+ ),
128
+ ] = None,
129
+ by_alias: Annotated[
130
+ bool,
131
+ Doc(
132
+ """
133
+ Pydantic's `by_alias` parameter, passed to Pydantic models to define if
134
+ the output should use the alias names (when provided) or the Python
135
+ attribute names. In an API, if you set an alias, it's probably because you
136
+ want to use it in the result, so you probably want to leave this set to
137
+ `True`.
138
+ """
139
+ ),
140
+ ] = True,
141
+ exclude_unset: Annotated[
142
+ bool,
143
+ Doc(
144
+ """
145
+ Pydantic's `exclude_unset` parameter, passed to Pydantic models to define
146
+ if it should exclude from the output the fields that were not explicitly
147
+ set (and that only had their default values).
148
+ """
149
+ ),
150
+ ] = False,
151
+ exclude_defaults: Annotated[
152
+ bool,
153
+ Doc(
154
+ """
155
+ Pydantic's `exclude_defaults` parameter, passed to Pydantic models to define
156
+ if it should exclude from the output the fields that had the same default
157
+ value, even when they were explicitly set.
158
+ """
159
+ ),
160
+ ] = False,
161
+ exclude_none: Annotated[
162
+ bool,
163
+ Doc(
164
+ """
165
+ Pydantic's `exclude_none` parameter, passed to Pydantic models to define
166
+ if it should exclude from the output any fields that have a `None` value.
167
+ """
168
+ ),
169
+ ] = False,
170
+ custom_encoder: Annotated[
171
+ Optional[Dict[Any, Callable[[Any], Any]]],
172
+ Doc(
173
+ """
174
+ Pydantic's `custom_encoder` parameter, passed to Pydantic models to define
175
+ a custom encoder.
176
+ """
177
+ ),
178
+ ] = None,
179
+ sqlalchemy_safe: Annotated[
180
+ bool,
181
+ Doc(
182
+ """
183
+ Exclude from the output any fields that start with the name `_sa`.
184
+
185
+ This is mainly a hack for compatibility with SQLAlchemy objects, they
186
+ store internal SQLAlchemy-specific state in attributes named with `_sa`,
187
+ and those objects can't (and shouldn't be) serialized to JSON.
188
+ """
189
+ ),
190
+ ] = True,
191
+ ) -> Any:
192
+ """
193
+ Convert any object to something that can be encoded in JSON.
194
+
195
+ This is used internally by FastAPI to make sure anything you return can be
196
+ encoded as JSON before it is sent to the client.
197
+
198
+ You can also use it yourself, for example to convert objects before saving them
199
+ in a database that supports only JSON.
200
+
201
+ Read more about it in the
202
+ [FastAPI docs for JSON Compatible Encoder](https://fastapi.tiangolo.com/tutorial/encoder/).
203
+ """
204
+ custom_encoder = custom_encoder or {}
205
+ if custom_encoder:
206
+ if type(obj) in custom_encoder:
207
+ return custom_encoder[type(obj)](obj)
208
+ else:
209
+ for encoder_type, encoder_instance in custom_encoder.items():
210
+ if isinstance(obj, encoder_type):
211
+ return encoder_instance(obj)
212
+ if include is not None and not isinstance(include, (set, dict)):
213
+ include = set(include)
214
+ if exclude is not None and not isinstance(exclude, (set, dict)):
215
+ exclude = set(exclude)
216
+ if isinstance(obj, BaseModel):
217
+ # TODO: remove when deprecating Pydantic v1
218
+ encoders: Dict[Any, Any] = {}
219
+ if not PYDANTIC_V2:
220
+ encoders = getattr(obj.__config__, "json_encoders", {}) # type: ignore[attr-defined]
221
+ if custom_encoder:
222
+ encoders.update(custom_encoder)
223
+ obj_dict = _model_dump(
224
+ obj,
225
+ mode="json",
226
+ include=include,
227
+ exclude=exclude,
228
+ by_alias=by_alias,
229
+ exclude_unset=exclude_unset,
230
+ exclude_none=exclude_none,
231
+ exclude_defaults=exclude_defaults,
232
+ )
233
+ if "__root__" in obj_dict:
234
+ obj_dict = obj_dict["__root__"]
235
+ return jsonable_encoder(
236
+ obj_dict,
237
+ exclude_none=exclude_none,
238
+ exclude_defaults=exclude_defaults,
239
+ # TODO: remove when deprecating Pydantic v1
240
+ custom_encoder=encoders,
241
+ sqlalchemy_safe=sqlalchemy_safe,
242
+ )
243
+ if dataclasses.is_dataclass(obj):
244
+ obj_dict = dataclasses.asdict(obj)
245
+ return jsonable_encoder(
246
+ obj_dict,
247
+ include=include,
248
+ exclude=exclude,
249
+ by_alias=by_alias,
250
+ exclude_unset=exclude_unset,
251
+ exclude_defaults=exclude_defaults,
252
+ exclude_none=exclude_none,
253
+ custom_encoder=custom_encoder,
254
+ sqlalchemy_safe=sqlalchemy_safe,
255
+ )
256
+ if isinstance(obj, Enum):
257
+ return obj.value
258
+ if isinstance(obj, PurePath):
259
+ return str(obj)
260
+ if isinstance(obj, (str, int, float, type(None))):
261
+ return obj
262
+ if isinstance(obj, UndefinedType):
263
+ return None
264
+ if isinstance(obj, dict):
265
+ encoded_dict = {}
266
+ allowed_keys = set(obj.keys())
267
+ if include is not None:
268
+ allowed_keys &= set(include)
269
+ if exclude is not None:
270
+ allowed_keys -= set(exclude)
271
+ for key, value in obj.items():
272
+ if (
273
+ (
274
+ not sqlalchemy_safe
275
+ or (not isinstance(key, str))
276
+ or (not key.startswith("_sa"))
277
+ )
278
+ and (value is not None or not exclude_none)
279
+ and key in allowed_keys
280
+ ):
281
+ encoded_key = jsonable_encoder(
282
+ key,
283
+ by_alias=by_alias,
284
+ exclude_unset=exclude_unset,
285
+ exclude_none=exclude_none,
286
+ custom_encoder=custom_encoder,
287
+ sqlalchemy_safe=sqlalchemy_safe,
288
+ )
289
+ encoded_value = jsonable_encoder(
290
+ value,
291
+ by_alias=by_alias,
292
+ exclude_unset=exclude_unset,
293
+ exclude_none=exclude_none,
294
+ custom_encoder=custom_encoder,
295
+ sqlalchemy_safe=sqlalchemy_safe,
296
+ )
297
+ encoded_dict[encoded_key] = encoded_value
298
+ return encoded_dict
299
+ if isinstance(obj, (list, set, frozenset, GeneratorType, tuple, deque)):
300
+ encoded_list = []
301
+ for item in obj:
302
+ encoded_list.append(
303
+ jsonable_encoder(
304
+ item,
305
+ include=include,
306
+ exclude=exclude,
307
+ by_alias=by_alias,
308
+ exclude_unset=exclude_unset,
309
+ exclude_defaults=exclude_defaults,
310
+ exclude_none=exclude_none,
311
+ custom_encoder=custom_encoder,
312
+ sqlalchemy_safe=sqlalchemy_safe,
313
+ )
314
+ )
315
+ return encoded_list
316
+
317
+ if type(obj) in ENCODERS_BY_TYPE:
318
+ return ENCODERS_BY_TYPE[type(obj)](obj)
319
+ for encoder, classes_tuple in encoders_by_class_tuples.items():
320
+ if isinstance(obj, classes_tuple):
321
+ return encoder(obj)
322
+
323
+ try:
324
+ data = dict(obj)
325
+ except Exception as e:
326
+ errors: List[Exception] = []
327
+ errors.append(e)
328
+ try:
329
+ data = vars(obj)
330
+ except Exception as e:
331
+ errors.append(e)
332
+ raise ValueError(errors) from e
333
+ return jsonable_encoder(
334
+ data,
335
+ include=include,
336
+ exclude=exclude,
337
+ by_alias=by_alias,
338
+ exclude_unset=exclude_unset,
339
+ exclude_defaults=exclude_defaults,
340
+ exclude_none=exclude_none,
341
+ custom_encoder=custom_encoder,
342
+ sqlalchemy_safe=sqlalchemy_safe,
343
+ )
.venv/lib/python3.11/site-packages/fastapi/exception_handlers.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi.encoders import jsonable_encoder
2
+ from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError
3
+ from fastapi.utils import is_body_allowed_for_status_code
4
+ from fastapi.websockets import WebSocket
5
+ from starlette.exceptions import HTTPException
6
+ from starlette.requests import Request
7
+ from starlette.responses import JSONResponse, Response
8
+ from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY, WS_1008_POLICY_VIOLATION
9
+
10
+
11
+ async def http_exception_handler(request: Request, exc: HTTPException) -> Response:
12
+ headers = getattr(exc, "headers", None)
13
+ if not is_body_allowed_for_status_code(exc.status_code):
14
+ return Response(status_code=exc.status_code, headers=headers)
15
+ return JSONResponse(
16
+ {"detail": exc.detail}, status_code=exc.status_code, headers=headers
17
+ )
18
+
19
+
20
+ async def request_validation_exception_handler(
21
+ request: Request, exc: RequestValidationError
22
+ ) -> JSONResponse:
23
+ return JSONResponse(
24
+ status_code=HTTP_422_UNPROCESSABLE_ENTITY,
25
+ content={"detail": jsonable_encoder(exc.errors())},
26
+ )
27
+
28
+
29
+ async def websocket_request_validation_exception_handler(
30
+ websocket: WebSocket, exc: WebSocketRequestValidationError
31
+ ) -> None:
32
+ await websocket.close(
33
+ code=WS_1008_POLICY_VIOLATION, reason=jsonable_encoder(exc.errors())
34
+ )
.venv/lib/python3.11/site-packages/fastapi/exceptions.py ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict, Optional, Sequence, Type, Union
2
+
3
+ from pydantic import BaseModel, create_model
4
+ from starlette.exceptions import HTTPException as StarletteHTTPException
5
+ from starlette.exceptions import WebSocketException as StarletteWebSocketException
6
+ from typing_extensions import Annotated, Doc
7
+
8
+
9
+ class HTTPException(StarletteHTTPException):
10
+ """
11
+ An HTTP exception you can raise in your own code to show errors to the client.
12
+
13
+ This is for client errors, invalid authentication, invalid data, etc. Not for server
14
+ errors in your code.
15
+
16
+ Read more about it in the
17
+ [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/).
18
+
19
+ ## Example
20
+
21
+ ```python
22
+ from fastapi import FastAPI, HTTPException
23
+
24
+ app = FastAPI()
25
+
26
+ items = {"foo": "The Foo Wrestlers"}
27
+
28
+
29
+ @app.get("/items/{item_id}")
30
+ async def read_item(item_id: str):
31
+ if item_id not in items:
32
+ raise HTTPException(status_code=404, detail="Item not found")
33
+ return {"item": items[item_id]}
34
+ ```
35
+ """
36
+
37
+ def __init__(
38
+ self,
39
+ status_code: Annotated[
40
+ int,
41
+ Doc(
42
+ """
43
+ HTTP status code to send to the client.
44
+ """
45
+ ),
46
+ ],
47
+ detail: Annotated[
48
+ Any,
49
+ Doc(
50
+ """
51
+ Any data to be sent to the client in the `detail` key of the JSON
52
+ response.
53
+ """
54
+ ),
55
+ ] = None,
56
+ headers: Annotated[
57
+ Optional[Dict[str, str]],
58
+ Doc(
59
+ """
60
+ Any headers to send to the client in the response.
61
+ """
62
+ ),
63
+ ] = None,
64
+ ) -> None:
65
+ super().__init__(status_code=status_code, detail=detail, headers=headers)
66
+
67
+
68
+ class WebSocketException(StarletteWebSocketException):
69
+ """
70
+ A WebSocket exception you can raise in your own code to show errors to the client.
71
+
72
+ This is for client errors, invalid authentication, invalid data, etc. Not for server
73
+ errors in your code.
74
+
75
+ Read more about it in the
76
+ [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/).
77
+
78
+ ## Example
79
+
80
+ ```python
81
+ from typing import Annotated
82
+
83
+ from fastapi import (
84
+ Cookie,
85
+ FastAPI,
86
+ WebSocket,
87
+ WebSocketException,
88
+ status,
89
+ )
90
+
91
+ app = FastAPI()
92
+
93
+ @app.websocket("/items/{item_id}/ws")
94
+ async def websocket_endpoint(
95
+ *,
96
+ websocket: WebSocket,
97
+ session: Annotated[str | None, Cookie()] = None,
98
+ item_id: str,
99
+ ):
100
+ if session is None:
101
+ raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION)
102
+ await websocket.accept()
103
+ while True:
104
+ data = await websocket.receive_text()
105
+ await websocket.send_text(f"Session cookie is: {session}")
106
+ await websocket.send_text(f"Message text was: {data}, for item ID: {item_id}")
107
+ ```
108
+ """
109
+
110
+ def __init__(
111
+ self,
112
+ code: Annotated[
113
+ int,
114
+ Doc(
115
+ """
116
+ A closing code from the
117
+ [valid codes defined in the specification](https://datatracker.ietf.org/doc/html/rfc6455#section-7.4.1).
118
+ """
119
+ ),
120
+ ],
121
+ reason: Annotated[
122
+ Union[str, None],
123
+ Doc(
124
+ """
125
+ The reason to close the WebSocket connection.
126
+
127
+ It is UTF-8-encoded data. The interpretation of the reason is up to the
128
+ application, it is not specified by the WebSocket specification.
129
+
130
+ It could contain text that could be human-readable or interpretable
131
+ by the client code, etc.
132
+ """
133
+ ),
134
+ ] = None,
135
+ ) -> None:
136
+ super().__init__(code=code, reason=reason)
137
+
138
+
139
+ RequestErrorModel: Type[BaseModel] = create_model("Request")
140
+ WebSocketErrorModel: Type[BaseModel] = create_model("WebSocket")
141
+
142
+
143
+ class FastAPIError(RuntimeError):
144
+ """
145
+ A generic, FastAPI-specific error.
146
+ """
147
+
148
+
149
+ class ValidationException(Exception):
150
+ def __init__(self, errors: Sequence[Any]) -> None:
151
+ self._errors = errors
152
+
153
+ def errors(self) -> Sequence[Any]:
154
+ return self._errors
155
+
156
+
157
+ class RequestValidationError(ValidationException):
158
+ def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None:
159
+ super().__init__(errors)
160
+ self.body = body
161
+
162
+
163
+ class WebSocketRequestValidationError(ValidationException):
164
+ pass
165
+
166
+
167
+ class ResponseValidationError(ValidationException):
168
+ def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None:
169
+ super().__init__(errors)
170
+ self.body = body
171
+
172
+ def __str__(self) -> str:
173
+ message = f"{len(self._errors)} validation errors:\n"
174
+ for err in self._errors:
175
+ message += f" {err}\n"
176
+ return message
.venv/lib/python3.11/site-packages/fastapi/logger.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import logging
2
+
3
+ logger = logging.getLogger("fastapi")
.venv/lib/python3.11/site-packages/fastapi/middleware/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from starlette.middleware import Middleware as Middleware
.venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (260 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/cors.cpython-311.pyc ADDED
Binary file (265 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/gzip.cpython-311.pyc ADDED
Binary file (265 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-311.pyc ADDED
Binary file (308 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-311.pyc ADDED
Binary file (302 Bytes). View file
 
.venv/lib/python3.11/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-311.pyc ADDED
Binary file (265 Bytes). View file