koichi12 commited on
Commit
e57c5ab
·
verified ·
1 Parent(s): 73e7686

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/__pycache__/Parsing.cpython-311.pyc +3 -0
  3. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/TestCodeWriter.py +128 -0
  4. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/TestJediTyper.py +225 -0
  5. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/TestShadow.py +79 -0
  6. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/__pycache__/TestCythonUtils.cpython-311.pyc +0 -0
  7. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/__pycache__/TestTestUtils.cpython-311.pyc +0 -0
  8. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/CConvert.pyx +134 -0
  9. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/CMath.c +95 -0
  10. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/Complex.c +366 -0
  11. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/CythonFunction.c +1810 -0
  12. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/Dataclasses.c +188 -0
  13. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/Embed.c +255 -0
  14. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/MemoryView_C.c +987 -0
  15. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/ModuleSetupCode.c +2366 -0
  16. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/ObjectHandling.c +0 -0
  17. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/TestCyUtilityLoader.pyx +8 -0
  18. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/TestCythonScope.pyx +70 -0
  19. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/TestUtilityLoader.c +12 -0
  20. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/__pycache__/Dataclasses.cpython-311.pyc +0 -0
  21. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/__pycache__/__init__.cpython-311.pyc +0 -0
  22. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc +0 -0
  23. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc +0 -0
  24. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc +0 -0
  25. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc +0 -0
  26. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc +0 -0
  27. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc +0 -0
  28. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc +0 -0
  29. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc +0 -0
  30. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc +0 -0
  31. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc +0 -0
  32. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc +0 -0
  33. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc +0 -0
  34. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc +0 -0
  35. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc +0 -0
  36. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc +0 -0
  37. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc +0 -0
  38. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc +0 -0
  39. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/bccache.py +406 -0
  40. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/compiler.py +1956 -0
  41. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/debug.py +191 -0
  42. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/defaults.py +48 -0
  43. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/exceptions.py +166 -0
  44. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/filters.py +1854 -0
  45. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/idtracking.py +318 -0
  46. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/lexer.py +866 -0
  47. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/meta.py +111 -0
  48. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/nativetypes.py +130 -0
  49. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/nodes.py +1204 -0
  50. tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/optimizer.py +47 -0
.gitattributes CHANGED
@@ -41,3 +41,4 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/DFA.c
41
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/FusedNode.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
42
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Code.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
43
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Parsing.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
41
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/FusedNode.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
42
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Code.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
43
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Parsing.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
44
+ tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/__pycache__/Parsing.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/__pycache__/Parsing.cpython-311.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54e902406cc2baef92acba1b2573314b55348561d14dabe6ce3cbf870abfba28
3
+ size 182578
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/TestCodeWriter.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from Cython.TestUtils import CythonTest
2
+
3
+ class TestCodeWriter(CythonTest):
4
+ # CythonTest uses the CodeWriter heavily, so do some checking by
5
+ # roundtripping Cython code through the test framework.
6
+
7
+ # Note that this test is dependent upon the normal Cython parser
8
+ # to generate the input trees to the CodeWriter. This save *a lot*
9
+ # of time; better to spend that time writing other tests than perfecting
10
+ # this one...
11
+
12
+ # Whitespace is very significant in this process:
13
+ # - always newline on new block (!)
14
+ # - indent 4 spaces
15
+ # - 1 space around every operator
16
+
17
+ def t(self, codestr):
18
+ self.assertCode(codestr, self.fragment(codestr).root)
19
+
20
+ def test_print(self):
21
+ self.t(u"""
22
+ print(x + y ** 2)
23
+ print(x, y, z)
24
+ print(x + y, x + y * z, x * (y + z))
25
+ """)
26
+
27
+ def test_if(self):
28
+ self.t(u"if x:\n pass")
29
+
30
+ def test_ifelifelse(self):
31
+ self.t(u"""
32
+ if x:
33
+ pass
34
+ elif y:
35
+ pass
36
+ elif z + 34 ** 34 - 2:
37
+ pass
38
+ else:
39
+ pass
40
+ """)
41
+
42
+ def test_def(self):
43
+ self.t(u"""
44
+ def f(x, y, z):
45
+ pass
46
+ def f(x = 34, y = 54, z):
47
+ pass
48
+ """)
49
+
50
+ def test_cdef(self):
51
+ self.t(u"""
52
+ cdef f(x, y, z):
53
+ pass
54
+ cdef public void (x = 34, y = 54, z):
55
+ pass
56
+ cdef f(int *x, void *y, Value *z):
57
+ pass
58
+ cdef f(int **x, void **y, Value **z):
59
+ pass
60
+ cdef inline f(int &x, Value &z):
61
+ pass
62
+ """)
63
+
64
+ def test_longness_and_signedness(self):
65
+ self.t(u"def f(unsigned long long long long long int y):\n pass")
66
+
67
+ def test_signed_short(self):
68
+ self.t(u"def f(signed short int y):\n pass")
69
+
70
+ def test_typed_args(self):
71
+ self.t(u"def f(int x, unsigned long int y):\n pass")
72
+
73
+ def test_cdef_var(self):
74
+ self.t(u"""
75
+ cdef int hello
76
+ cdef int hello = 4, x = 3, y, z
77
+ """)
78
+
79
+ def test_for_loop(self):
80
+ self.t(u"""
81
+ for x, y, z in f(g(h(34) * 2) + 23):
82
+ print(x, y, z)
83
+ else:
84
+ print(43)
85
+ """)
86
+ self.t(u"""
87
+ for abc in (1, 2, 3):
88
+ print(x, y, z)
89
+ else:
90
+ print(43)
91
+ """)
92
+
93
+ def test_while_loop(self):
94
+ self.t(u"""
95
+ while True:
96
+ while True:
97
+ while True:
98
+ continue
99
+ """)
100
+
101
+ def test_inplace_assignment(self):
102
+ self.t(u"x += 43")
103
+
104
+ def test_cascaded_assignment(self):
105
+ self.t(u"x = y = z = abc = 43")
106
+
107
+ def test_attribute(self):
108
+ self.t(u"a.x")
109
+
110
+ def test_return_none(self):
111
+ self.t(u"""
112
+ def f(x, y, z):
113
+ return
114
+ cdef f(x, y, z):
115
+ return
116
+ def f(x, y, z):
117
+ return None
118
+ cdef f(x, y, z):
119
+ return None
120
+ def f(x, y, z):
121
+ return 1234
122
+ cdef f(x, y, z):
123
+ return 1234
124
+ """)
125
+
126
+ if __name__ == "__main__":
127
+ import unittest
128
+ unittest.main()
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/TestJediTyper.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # tag: jedi
3
+
4
+ from __future__ import absolute_import
5
+
6
+ import sys
7
+ import os.path
8
+
9
+ from textwrap import dedent
10
+ from contextlib import contextmanager
11
+ from tempfile import NamedTemporaryFile
12
+
13
+ from Cython.Compiler.ParseTreeTransforms import NormalizeTree, InterpretCompilerDirectives
14
+ from Cython.Compiler import Main, Symtab, Visitor, Options
15
+ from Cython.TestUtils import TransformTest
16
+
17
+ TOOLS_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'Tools'))
18
+
19
+
20
+ @contextmanager
21
+ def _tempfile(code):
22
+ code = dedent(code)
23
+ if not isinstance(code, bytes):
24
+ code = code.encode('utf8')
25
+
26
+ with NamedTemporaryFile(suffix='.py') as f:
27
+ f.write(code)
28
+ f.seek(0)
29
+ yield f
30
+
31
+
32
+ def _test_typing(code, inject=False):
33
+ sys.path.insert(0, TOOLS_DIR)
34
+ try:
35
+ import jedityper
36
+ finally:
37
+ sys.path.remove(TOOLS_DIR)
38
+ lines = []
39
+ with _tempfile(code) as f:
40
+ types = jedityper.analyse(f.name)
41
+ if inject:
42
+ lines = jedityper.inject_types(f.name, types)
43
+ return types, lines
44
+
45
+
46
+ class DeclarationsFinder(Visitor.VisitorTransform):
47
+ directives = None
48
+
49
+ visit_Node = Visitor.VisitorTransform.recurse_to_children
50
+
51
+ def visit_CompilerDirectivesNode(self, node):
52
+ if not self.directives:
53
+ self.directives = []
54
+ self.directives.append(node)
55
+ self.visitchildren(node)
56
+ return node
57
+
58
+
59
+ class TestJediTyper(TransformTest):
60
+ def _test(self, code):
61
+ return _test_typing(code)[0]
62
+
63
+ def test_typing_global_int_loop(self):
64
+ code = '''\
65
+ for i in range(10):
66
+ a = i + 1
67
+ '''
68
+ types = self._test(code)
69
+ self.assertIn((None, (1, 0)), types)
70
+ variables = types.pop((None, (1, 0)))
71
+ self.assertFalse(types)
72
+ self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables)
73
+
74
+ def test_typing_function_int_loop(self):
75
+ code = '''\
76
+ def func(x):
77
+ for i in range(x):
78
+ a = i + 1
79
+ return a
80
+ '''
81
+ types = self._test(code)
82
+ self.assertIn(('func', (1, 0)), types)
83
+ variables = types.pop(('func', (1, 0)))
84
+ self.assertFalse(types)
85
+ self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables)
86
+
87
+ def test_conflicting_types_in_function(self):
88
+ code = '''\
89
+ def func(a, b):
90
+ print(a)
91
+ a = 1
92
+ b += a
93
+ a = 'abc'
94
+ return a, str(b)
95
+
96
+ print(func(1.5, 2))
97
+ '''
98
+ types = self._test(code)
99
+ self.assertIn(('func', (1, 0)), types)
100
+ variables = types.pop(('func', (1, 0)))
101
+ self.assertFalse(types)
102
+ self.assertEqual({'a': set(['float', 'int', 'str']), 'b': set(['int'])}, variables)
103
+
104
+ def _test_typing_function_char_loop(self):
105
+ code = '''\
106
+ def func(x):
107
+ l = []
108
+ for c in x:
109
+ l.append(c)
110
+ return l
111
+
112
+ print(func('abcdefg'))
113
+ '''
114
+ types = self._test(code)
115
+ self.assertIn(('func', (1, 0)), types)
116
+ variables = types.pop(('func', (1, 0)))
117
+ self.assertFalse(types)
118
+ self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables)
119
+
120
+ def test_typing_global_list(self):
121
+ code = '''\
122
+ a = [x for x in range(10)]
123
+ b = list(range(10))
124
+ c = a + b
125
+ d = [0]*10
126
+ '''
127
+ types = self._test(code)
128
+ self.assertIn((None, (1, 0)), types)
129
+ variables = types.pop((None, (1, 0)))
130
+ self.assertFalse(types)
131
+ self.assertEqual({'a': set(['list']), 'b': set(['list']), 'c': set(['list']), 'd': set(['list'])}, variables)
132
+
133
+ def test_typing_function_list(self):
134
+ code = '''\
135
+ def func(x):
136
+ a = [[], []]
137
+ b = [0]* 10 + a
138
+ c = a[0]
139
+
140
+ print(func([0]*100))
141
+ '''
142
+ types = self._test(code)
143
+ self.assertIn(('func', (1, 0)), types)
144
+ variables = types.pop(('func', (1, 0)))
145
+ self.assertFalse(types)
146
+ self.assertEqual({'a': set(['list']), 'b': set(['list']), 'c': set(['list']), 'x': set(['list'])}, variables)
147
+
148
+ def test_typing_global_dict(self):
149
+ code = '''\
150
+ a = dict()
151
+ b = {i: i**2 for i in range(10)}
152
+ c = a
153
+ '''
154
+ types = self._test(code)
155
+ self.assertIn((None, (1, 0)), types)
156
+ variables = types.pop((None, (1, 0)))
157
+ self.assertFalse(types)
158
+ self.assertEqual({'a': set(['dict']), 'b': set(['dict']), 'c': set(['dict'])}, variables)
159
+
160
+ def test_typing_function_dict(self):
161
+ code = '''\
162
+ def func(x):
163
+ a = dict()
164
+ b = {i: i**2 for i in range(10)}
165
+ c = x
166
+
167
+ print(func({1:2, 'x':7}))
168
+ '''
169
+ types = self._test(code)
170
+ self.assertIn(('func', (1, 0)), types)
171
+ variables = types.pop(('func', (1, 0)))
172
+ self.assertFalse(types)
173
+ self.assertEqual({'a': set(['dict']), 'b': set(['dict']), 'c': set(['dict']), 'x': set(['dict'])}, variables)
174
+
175
+
176
+ def test_typing_global_set(self):
177
+ code = '''\
178
+ a = set()
179
+ # b = {i for i in range(10)} # jedi does not support set comprehension yet
180
+ c = a
181
+ d = {1,2,3}
182
+ e = a | b
183
+ '''
184
+ types = self._test(code)
185
+ self.assertIn((None, (1, 0)), types)
186
+ variables = types.pop((None, (1, 0)))
187
+ self.assertFalse(types)
188
+ self.assertEqual({'a': set(['set']), 'c': set(['set']), 'd': set(['set']), 'e': set(['set'])}, variables)
189
+
190
+ def test_typing_function_set(self):
191
+ code = '''\
192
+ def func(x):
193
+ a = set()
194
+ # b = {i for i in range(10)} # jedi does not support set comprehension yet
195
+ c = a
196
+ d = a | b
197
+
198
+ print(func({1,2,3}))
199
+ '''
200
+ types = self._test(code)
201
+ self.assertIn(('func', (1, 0)), types)
202
+ variables = types.pop(('func', (1, 0)))
203
+ self.assertFalse(types)
204
+ self.assertEqual({'a': set(['set']), 'c': set(['set']), 'd': set(['set']), 'x': set(['set'])}, variables)
205
+
206
+
207
+ class TestTypeInjection(TestJediTyper):
208
+ """
209
+ Subtype of TestJediTyper that additionally tests type injection and compilation.
210
+ """
211
+ def setUp(self):
212
+ super(TestTypeInjection, self).setUp()
213
+ compilation_options = Options.CompilationOptions(Options.default_options)
214
+ ctx = Main.Context.from_options(compilation_options)
215
+ transform = InterpretCompilerDirectives(ctx, ctx.compiler_directives)
216
+ transform.module_scope = Symtab.ModuleScope('__main__', None, ctx)
217
+ self.declarations_finder = DeclarationsFinder()
218
+ self.pipeline = [NormalizeTree(None), transform, self.declarations_finder]
219
+
220
+ def _test(self, code):
221
+ types, lines = _test_typing(code, inject=True)
222
+ tree = self.run_pipeline(self.pipeline, ''.join(lines))
223
+ directives = self.declarations_finder.directives
224
+ # TODO: validate directives
225
+ return types
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/TestShadow.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import unittest
2
+
3
+ from Cython import Shadow
4
+ from Cython.Compiler import Options, CythonScope, PyrexTypes, Errors
5
+
6
+ class TestShadow(unittest.TestCase):
7
+ def test_all_types_in_shadow(self):
8
+ cython_scope = CythonScope.create_cython_scope(None)
9
+ # Not doing load_cythonscope at this stage because it requires a proper context and
10
+ # Errors.py to be set up
11
+
12
+ missing_types = []
13
+ for key in cython_scope.entries.keys():
14
+ if key.startswith('__') and key.endswith('__'):
15
+ continue
16
+ if key in ('PyTypeObject', 'PyObject_TypeCheck'):
17
+ # These are declared in Shadow.py for reasons that look to
18
+ # be an implementation detail, but it isn't our intention for
19
+ # users to access them from Pure Python mode.
20
+ continue
21
+ if not hasattr(Shadow, key):
22
+ missing_types.append(key)
23
+ self.assertEqual(missing_types, [])
24
+
25
+ def test_int_types_in_shadow(self):
26
+ missing_types = []
27
+ for int_name in Shadow.int_types:
28
+ for sign in ['', 'u', 's']:
29
+ name = sign + int_name
30
+
31
+ if sign and (
32
+ int_name in ['Py_UNICODE', 'Py_UCS4', 'Py_ssize_t',
33
+ 'ssize_t', 'ptrdiff_t', 'Py_hash_t'] or
34
+ name == "usize_t"):
35
+ # size_t is special-cased here a little since ssize_t legitimate
36
+ # but usize_t isn't
37
+ self.assertNotIn(name, dir(Shadow))
38
+ self.assertNotIn('p_' + name, dir(Shadow))
39
+ continue
40
+
41
+ if not hasattr(Shadow, name):
42
+ missing_types.append(name)
43
+
44
+ for ptr in range(1, 4):
45
+ ptr_name = 'p' * ptr + '_' + name
46
+ if not hasattr(Shadow, ptr_name):
47
+ missing_types.append(ptr_name)
48
+ self.assertEqual(missing_types, [])
49
+
50
+ def test_most_types(self):
51
+ # TODO it's unfortunately hard to get a definite list of types to confirm that they're
52
+ # present (because they're obtained by on-the-fly string parsing in `cython_scope.lookup_type`)
53
+
54
+ cython_scope = CythonScope.create_cython_scope(None)
55
+ # Set up just enough of "Context" and "Errors" that CythonScope.lookup_type can fail
56
+ class Context:
57
+ cpp = False
58
+ language_level = 3
59
+ future_directives = []
60
+ cython_scope.context = Context
61
+ Errors.init_thread()
62
+
63
+ missing_types = []
64
+ missing_lookups = []
65
+ for (signed, longness, name), type_ in PyrexTypes.modifiers_and_name_to_type.items():
66
+ if name == 'object':
67
+ continue # This probably shouldn't be in Shadow
68
+ if not hasattr(Shadow, name):
69
+ missing_types.append(name)
70
+ if not cython_scope.lookup_type(name):
71
+ missing_lookups.append(name)
72
+ for ptr in range(1, 4):
73
+ ptr_name = 'p' * ptr + '_' + name
74
+ if not hasattr(Shadow, ptr_name):
75
+ missing_types.append(ptr_name)
76
+ if not cython_scope.lookup_type(ptr_name):
77
+ missing_lookups.append(ptr_name)
78
+ self.assertEqual(missing_types, [])
79
+ self.assertEqual(missing_lookups, [])
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/__pycache__/TestCythonUtils.cpython-311.pyc ADDED
Binary file (12.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tests/__pycache__/TestTestUtils.cpython-311.pyc ADDED
Binary file (6.69 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/CConvert.pyx ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #################### FromPyStructUtility ####################
2
+
3
+ cdef extern from *:
4
+ ctypedef struct PyTypeObject:
5
+ char* tp_name
6
+ PyTypeObject *Py_TYPE(obj)
7
+ bint PyMapping_Check(obj)
8
+ object PyErr_Format(exc, const char *format, ...)
9
+ int __Pyx_RaiseUnexpectedTypeError(const char *expected, object obj) except 0
10
+
11
+ @cname("{{funcname}}")
12
+ cdef {{struct_type}} {{funcname}}(obj) except *:
13
+ cdef {{struct_type}} result
14
+ if not PyMapping_Check(obj):
15
+ __Pyx_RaiseUnexpectedTypeError(b"a mapping", obj)
16
+
17
+ {{for member in var_entries:}}
18
+ try:
19
+ value = obj['{{member.name}}']
20
+ except KeyError:
21
+ raise ValueError("No value specified for struct attribute '{{member.name}}'")
22
+ result.{{member.name}} = value
23
+ {{endfor}}
24
+ return result
25
+
26
+
27
+ #################### FromPyUnionUtility ####################
28
+
29
+ cdef extern from *:
30
+ ctypedef struct PyTypeObject:
31
+ char* tp_name
32
+ PyTypeObject *Py_TYPE(obj)
33
+ bint PyMapping_Check(obj)
34
+ object PyErr_Format(exc, const char *format, ...)
35
+ int __Pyx_RaiseUnexpectedTypeError(const char *expected, object obj) except 0
36
+
37
+ @cname("{{funcname}}")
38
+ cdef {{struct_type}} {{funcname}}(obj) except *:
39
+ cdef {{struct_type}} result
40
+ cdef Py_ssize_t length
41
+ if not PyMapping_Check(obj):
42
+ __Pyx_RaiseUnexpectedTypeError(b"a mapping", obj)
43
+
44
+ last_found = None
45
+ length = len(obj)
46
+ if length:
47
+ {{for member in var_entries:}}
48
+ if '{{member.name}}' in obj:
49
+ if last_found is not None:
50
+ raise ValueError("More than one union attribute passed: '%s' and '%s'" % (last_found, '{{member.name}}'))
51
+ last_found = '{{member.name}}'
52
+ result.{{member.cname}} = obj['{{member.name}}']
53
+ length -= 1
54
+ if not length:
55
+ return result
56
+ {{endfor}}
57
+ if last_found is None:
58
+ raise ValueError("No value specified for any of the union attributes (%s)" %
59
+ '{{", ".join(member.name for member in var_entries)}}')
60
+ return result
61
+
62
+
63
+ #################### cfunc.to_py ####################
64
+
65
+ @cname("{{cname}}")
66
+ cdef object {{cname}}({{return_type.ctype}} (*f)({{ ', '.join(arg.type_cname for arg in args) }}) {{except_clause}}):
67
+ def wrap({{ ', '.join('{arg.ctype} {arg.name}'.format(arg=arg) for arg in args) }}):
68
+ """wrap({{', '.join(('{arg.name}: {arg.type_displayname}'.format(arg=arg) if arg.type_displayname else arg.name) for arg in args)}}){{if return_type.type_displayname}} -> {{return_type.type_displayname}}{{endif}}"""
69
+ {{'' if return_type.type.is_void else 'return '}}f({{ ', '.join(arg.name for arg in args) }})
70
+ return wrap
71
+
72
+
73
+ #################### carray.from_py ####################
74
+
75
+ cdef extern from *:
76
+ object PyErr_Format(exc, const char *format, ...)
77
+
78
+ @cname("{{cname}}")
79
+ cdef int {{cname}}(object o, {{base_type}} *v, Py_ssize_t length) except -1:
80
+ cdef Py_ssize_t i = length
81
+ try:
82
+ i = len(o)
83
+ except (TypeError, OverflowError):
84
+ pass
85
+ if i == length:
86
+ for i, item in enumerate(o):
87
+ if i >= length:
88
+ break
89
+ v[i] = item
90
+ else:
91
+ i += 1 # convert index to length
92
+ if i == length:
93
+ return 0
94
+
95
+ PyErr_Format(
96
+ IndexError,
97
+ ("too many values found during array assignment, expected %zd"
98
+ if i >= length else
99
+ "not enough values found during array assignment, expected %zd, got %zd"),
100
+ length, i)
101
+
102
+
103
+ #################### carray.to_py ####################
104
+
105
+ cdef extern from *:
106
+ void Py_INCREF(object o)
107
+ tuple PyTuple_New(Py_ssize_t size)
108
+ list PyList_New(Py_ssize_t size)
109
+ void PyTuple_SET_ITEM(object p, Py_ssize_t pos, object o)
110
+ void PyList_SET_ITEM(object p, Py_ssize_t pos, object o)
111
+
112
+
113
+ @cname("{{cname}}")
114
+ cdef inline list {{cname}}({{base_type}} *v, Py_ssize_t length):
115
+ cdef size_t i
116
+ cdef object value
117
+ l = PyList_New(length)
118
+ for i in range(<size_t>length):
119
+ value = v[i]
120
+ Py_INCREF(value)
121
+ PyList_SET_ITEM(l, i, value)
122
+ return l
123
+
124
+
125
+ @cname("{{to_tuple_cname}}")
126
+ cdef inline tuple {{to_tuple_cname}}({{base_type}} *v, Py_ssize_t length):
127
+ cdef size_t i
128
+ cdef object value
129
+ t = PyTuple_New(length)
130
+ for i in range(<size_t>length):
131
+ value = v[i]
132
+ Py_INCREF(value)
133
+ PyTuple_SET_ITEM(t, i, value)
134
+ return t
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/CMath.c ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ /////////////// CDivisionWarning.proto ///////////////
3
+
4
+ static int __Pyx_cdivision_warning(const char *, int); /* proto */
5
+
6
+ /////////////// CDivisionWarning ///////////////
7
+
8
+ static int __Pyx_cdivision_warning(const char *filename, int lineno) {
9
+ #if CYTHON_COMPILING_IN_PYPY
10
+ // avoid compiler warnings
11
+ filename++; lineno++;
12
+ return PyErr_Warn(PyExc_RuntimeWarning,
13
+ "division with oppositely signed operands, C and Python semantics differ");
14
+ #else
15
+ return PyErr_WarnExplicit(PyExc_RuntimeWarning,
16
+ "division with oppositely signed operands, C and Python semantics differ",
17
+ filename,
18
+ lineno,
19
+ __Pyx_MODULE_NAME,
20
+ NULL);
21
+ #endif
22
+ }
23
+
24
+
25
+ /////////////// DivInt.proto ///////////////
26
+
27
+ static CYTHON_INLINE %(type)s __Pyx_div_%(type_name)s(%(type)s, %(type)s); /* proto */
28
+
29
+ /////////////// DivInt ///////////////
30
+
31
+ static CYTHON_INLINE %(type)s __Pyx_div_%(type_name)s(%(type)s a, %(type)s b) {
32
+ %(type)s q = a / b;
33
+ %(type)s r = a - q*b;
34
+ q -= ((r != 0) & ((r ^ b) < 0));
35
+ return q;
36
+ }
37
+
38
+
39
+ /////////////// ModInt.proto ///////////////
40
+
41
+ static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s, %(type)s); /* proto */
42
+
43
+ /////////////// ModInt ///////////////
44
+
45
+ static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s a, %(type)s b) {
46
+ %(type)s r = a %% b;
47
+ r += ((r != 0) & ((r ^ b) < 0)) * b;
48
+ return r;
49
+ }
50
+
51
+
52
+ /////////////// ModFloat.proto ///////////////
53
+
54
+ static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s, %(type)s); /* proto */
55
+
56
+ /////////////// ModFloat ///////////////
57
+
58
+ static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s a, %(type)s b) {
59
+ %(type)s r = fmod%(math_h_modifier)s(a, b);
60
+ r += ((r != 0) & ((r < 0) ^ (b < 0))) * b;
61
+ return r;
62
+ }
63
+
64
+
65
+ /////////////// IntPow.proto ///////////////
66
+
67
+ static CYTHON_INLINE %(type)s %(func_name)s(%(type)s, %(type)s); /* proto */
68
+
69
+ /////////////// IntPow ///////////////
70
+
71
+ static CYTHON_INLINE %(type)s %(func_name)s(%(type)s b, %(type)s e) {
72
+ %(type)s t = b;
73
+ switch (e) {
74
+ case 3:
75
+ t *= b;
76
+ CYTHON_FALLTHROUGH;
77
+ case 2:
78
+ t *= b;
79
+ CYTHON_FALLTHROUGH;
80
+ case 1:
81
+ return t;
82
+ case 0:
83
+ return 1;
84
+ }
85
+ #if %(signed)s
86
+ if (unlikely(e<0)) return 0;
87
+ #endif
88
+ t = 1;
89
+ while (likely(e)) {
90
+ t *= (b * (e&1)) | ((~e)&1); /* 1 or b */
91
+ b *= b;
92
+ e >>= 1;
93
+ }
94
+ return t;
95
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/Complex.c ADDED
@@ -0,0 +1,366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /////////////// Header.proto ///////////////
2
+ //@proto_block: h_code
3
+
4
+ #if !defined(CYTHON_CCOMPLEX)
5
+ #if defined(__cplusplus)
6
+ #define CYTHON_CCOMPLEX 1
7
+ #elif (defined(_Complex_I) && !defined(_MSC_VER)) || ((defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) && !defined(__STDC_NO_COMPLEX__) && !defined(_MSC_VER))
8
+ // <complex.h> should exist since C99, but only C11 defines a test to detect it.
9
+ // MSVC defines "_Complex_I" but not "_Complex". See https://github.com/cython/cython/issues/5512
10
+ #define CYTHON_CCOMPLEX 1
11
+ #else
12
+ #define CYTHON_CCOMPLEX 0
13
+ #endif
14
+ #endif
15
+
16
+ #if CYTHON_CCOMPLEX
17
+ #ifdef __cplusplus
18
+ #include <complex>
19
+ #else
20
+ #include <complex.h>
21
+ #endif
22
+ #endif
23
+
24
+ #if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__)
25
+ #undef _Complex_I
26
+ #define _Complex_I 1.0fj
27
+ #endif
28
+
29
+ /////////////// RealImag.proto ///////////////
30
+
31
+ #if CYTHON_CCOMPLEX
32
+ #ifdef __cplusplus
33
+ #define __Pyx_CREAL(z) ((z).real())
34
+ #define __Pyx_CIMAG(z) ((z).imag())
35
+ #else
36
+ #define __Pyx_CREAL(z) (__real__(z))
37
+ #define __Pyx_CIMAG(z) (__imag__(z))
38
+ #endif
39
+ #else
40
+ #define __Pyx_CREAL(z) ((z).real)
41
+ #define __Pyx_CIMAG(z) ((z).imag)
42
+ #endif
43
+
44
+ #if defined(__cplusplus) && CYTHON_CCOMPLEX \
45
+ && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103)
46
+ #define __Pyx_SET_CREAL(z,x) ((z).real(x))
47
+ #define __Pyx_SET_CIMAG(z,y) ((z).imag(y))
48
+ #else
49
+ #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x)
50
+ #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y)
51
+ #endif
52
+
53
+ /////////////// RealImag_Cy.proto ///////////////
54
+
55
+ // alternative version of RealImag.proto for the case where
56
+ // we definitely want to force it to use the Cython utility
57
+ // code version of complex.
58
+ // Because integer complex types simply aren't covered by
59
+ // the C or C++ standards
60
+ // (although practically will probably work in C++).
61
+
62
+ #define __Pyx_CREAL_Cy(z) ((z).real)
63
+ #define __Pyx_CIMAG_Cy(z) ((z).imag)
64
+ #define __Pyx_SET_CREAL_Cy(z,x) __Pyx_CREAL_Cy(z) = (x)
65
+ #define __Pyx_SET_CIMAG_Cy(z,y) __Pyx_CIMAG_cy(z) = (y)
66
+
67
+ /////////////// RealImag_CyTypedef.proto //////////
68
+ //@requires: RealImag
69
+ //@requires: RealImag_Cy
70
+
71
+ #if __cplusplus
72
+ // C++ is fine with complexes based on typedefs because the template sees through them
73
+ #define __Pyx_CREAL_CyTypedef(z) __Pyx_CREAL(z)
74
+ #define __Pyx_CIMAG_CyTypedef(z) __Pyx_CIMAG(z)
75
+ #define __Pyx_SET_CREAL_CyTypedef(z,x) __Pyx_SET_CREAL(z)
76
+ #define __Pyx_SET_CIMAG_CyTypedef(z,x) __Pyx_SET_CIMAG(z)
77
+ #else
78
+ // C isn't
79
+ #define __Pyx_CREAL_CyTypedef(z) __Pyx_CREAL_Cy(z)
80
+ #define __Pyx_CIMAG_CyTypedef(z) __Pyx_CIMAG_Cy(z)
81
+ #define __Pyx_SET_CREAL_CyTypedef(z,x) __Pyx_SET_CREAL_Cy(z)
82
+ #define __Pyx_SET_CIMAG_CyTypedef(z,x) __Pyx_SET_CIMAG_Cy(z)
83
+ #endif
84
+
85
+ /////////////// Declarations.proto ///////////////
86
+ //@proto_block: complex_type_declarations
87
+
88
+ #if CYTHON_CCOMPLEX && ({{is_float}}) && (!{{is_extern_float_typedef}} || __cplusplus)
89
+ #ifdef __cplusplus
90
+ typedef ::std::complex< {{real_type}} > {{type_name}};
91
+ #else
92
+ typedef {{real_type}} _Complex {{type_name}};
93
+ #endif
94
+ #else
95
+ typedef struct { {{real_type}} real, imag; } {{type_name}};
96
+ #endif
97
+
98
+ static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}}, {{real_type}});
99
+
100
+ /////////////// Declarations ///////////////
101
+
102
+ #if CYTHON_CCOMPLEX && ({{is_float}}) && (!{{is_extern_float_typedef}} || __cplusplus)
103
+ #ifdef __cplusplus
104
+ static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) {
105
+ return ::std::complex< {{real_type}} >(x, y);
106
+ }
107
+ #else
108
+ static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) {
109
+ return x + y*({{type}})_Complex_I;
110
+ }
111
+ #endif
112
+ #else
113
+ static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) {
114
+ {{type}} z;
115
+ z.real = x;
116
+ z.imag = y;
117
+ return z;
118
+ }
119
+ #endif
120
+
121
+
122
+ /////////////// ToPy.proto ///////////////
123
+
124
+ {{py: func_suffix = "_CyTypedef" if is_extern_float_typedef else ("" if is_float else "_Cy")}}
125
+ #define __pyx_PyComplex_FromComplex{{func_suffix}}(z) \
126
+ PyComplex_FromDoubles((double)__Pyx_CREAL{{func_suffix}}(z), \
127
+ (double)__Pyx_CIMAG{{func_suffix}}(z))
128
+
129
+ /////////////// FromPy.proto ///////////////
130
+
131
+ static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject*);
132
+
133
+ /////////////// FromPy ///////////////
134
+
135
+ static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject* o) {
136
+ Py_complex cval;
137
+ #if !CYTHON_COMPILING_IN_PYPY
138
+ if (PyComplex_CheckExact(o))
139
+ cval = ((PyComplexObject *)o)->cval;
140
+ else
141
+ #endif
142
+ cval = PyComplex_AsCComplex(o);
143
+ return {{type_name}}_from_parts(
144
+ ({{real_type}})cval.real,
145
+ ({{real_type}})cval.imag);
146
+ }
147
+
148
+
149
+ /////////////// Arithmetic.proto ///////////////
150
+
151
+ #if CYTHON_CCOMPLEX && ({{is_float}}) && (!{{is_extern_float_typedef}} || __cplusplus)
152
+ #define __Pyx_c_eq{{func_suffix}}(a, b) ((a)==(b))
153
+ #define __Pyx_c_sum{{func_suffix}}(a, b) ((a)+(b))
154
+ #define __Pyx_c_diff{{func_suffix}}(a, b) ((a)-(b))
155
+ #define __Pyx_c_prod{{func_suffix}}(a, b) ((a)*(b))
156
+ #define __Pyx_c_quot{{func_suffix}}(a, b) ((a)/(b))
157
+ #define __Pyx_c_neg{{func_suffix}}(a) (-(a))
158
+ #ifdef __cplusplus
159
+ #define __Pyx_c_is_zero{{func_suffix}}(z) ((z)==({{real_type}})0)
160
+ #define __Pyx_c_conj{{func_suffix}}(z) (::std::conj(z))
161
+ #if {{is_float}}
162
+ #define __Pyx_c_abs{{func_suffix}}(z) (::std::abs(z))
163
+ #define __Pyx_c_pow{{func_suffix}}(a, b) (::std::pow(a, b))
164
+ #endif
165
+ #else
166
+ #define __Pyx_c_is_zero{{func_suffix}}(z) ((z)==0)
167
+ #define __Pyx_c_conj{{func_suffix}}(z) (conj{{m}}(z))
168
+ #if {{is_float}}
169
+ #define __Pyx_c_abs{{func_suffix}}(z) (cabs{{m}}(z))
170
+ #define __Pyx_c_pow{{func_suffix}}(a, b) (cpow{{m}}(a, b))
171
+ #endif
172
+ #endif
173
+ #else
174
+ static CYTHON_INLINE int __Pyx_c_eq{{func_suffix}}({{type}}, {{type}});
175
+ static CYTHON_INLINE {{type}} __Pyx_c_sum{{func_suffix}}({{type}}, {{type}});
176
+ static CYTHON_INLINE {{type}} __Pyx_c_diff{{func_suffix}}({{type}}, {{type}});
177
+ static CYTHON_INLINE {{type}} __Pyx_c_prod{{func_suffix}}({{type}}, {{type}});
178
+ static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}}, {{type}});
179
+ static CYTHON_INLINE {{type}} __Pyx_c_neg{{func_suffix}}({{type}});
180
+ static CYTHON_INLINE int __Pyx_c_is_zero{{func_suffix}}({{type}});
181
+ static CYTHON_INLINE {{type}} __Pyx_c_conj{{func_suffix}}({{type}});
182
+ #if {{is_float}}
183
+ static CYTHON_INLINE {{real_type}} __Pyx_c_abs{{func_suffix}}({{type}});
184
+ static CYTHON_INLINE {{type}} __Pyx_c_pow{{func_suffix}}({{type}}, {{type}});
185
+ #endif
186
+ #endif
187
+
188
+ /////////////// Arithmetic ///////////////
189
+
190
+ #if CYTHON_CCOMPLEX && ({{is_float}}) && (!{{is_extern_float_typedef}} || __cplusplus)
191
+ #else
192
+ static CYTHON_INLINE int __Pyx_c_eq{{func_suffix}}({{type}} a, {{type}} b) {
193
+ return (a.real == b.real) && (a.imag == b.imag);
194
+ }
195
+ static CYTHON_INLINE {{type}} __Pyx_c_sum{{func_suffix}}({{type}} a, {{type}} b) {
196
+ {{type}} z;
197
+ z.real = a.real + b.real;
198
+ z.imag = a.imag + b.imag;
199
+ return z;
200
+ }
201
+ static CYTHON_INLINE {{type}} __Pyx_c_diff{{func_suffix}}({{type}} a, {{type}} b) {
202
+ {{type}} z;
203
+ z.real = a.real - b.real;
204
+ z.imag = a.imag - b.imag;
205
+ return z;
206
+ }
207
+ static CYTHON_INLINE {{type}} __Pyx_c_prod{{func_suffix}}({{type}} a, {{type}} b) {
208
+ {{type}} z;
209
+ z.real = a.real * b.real - a.imag * b.imag;
210
+ z.imag = a.real * b.imag + a.imag * b.real;
211
+ return z;
212
+ }
213
+
214
+ #if {{is_float}}
215
+ static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}} a, {{type}} b) {
216
+ if (b.imag == 0) {
217
+ return {{type_name}}_from_parts(a.real / b.real, a.imag / b.real);
218
+ } else if (fabs{{m}}(b.real) >= fabs{{m}}(b.imag)) {
219
+ if (b.real == 0 && b.imag == 0) {
220
+ return {{type_name}}_from_parts(a.real / b.real, a.imag / b.imag);
221
+ } else {
222
+ {{real_type}} r = b.imag / b.real;
223
+ {{real_type}} s = ({{real_type}})(1.0) / (b.real + b.imag * r);
224
+ return {{type_name}}_from_parts(
225
+ (a.real + a.imag * r) * s, (a.imag - a.real * r) * s);
226
+ }
227
+ } else {
228
+ {{real_type}} r = b.real / b.imag;
229
+ {{real_type}} s = ({{real_type}})(1.0) / (b.imag + b.real * r);
230
+ return {{type_name}}_from_parts(
231
+ (a.real * r + a.imag) * s, (a.imag * r - a.real) * s);
232
+ }
233
+ }
234
+ #else
235
+ static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}} a, {{type}} b) {
236
+ if (b.imag == 0) {
237
+ return {{type_name}}_from_parts(a.real / b.real, a.imag / b.real);
238
+ } else {
239
+ {{real_type}} denom = b.real * b.real + b.imag * b.imag;
240
+ return {{type_name}}_from_parts(
241
+ (a.real * b.real + a.imag * b.imag) / denom,
242
+ (a.imag * b.real - a.real * b.imag) / denom);
243
+ }
244
+ }
245
+ #endif
246
+
247
+ static CYTHON_INLINE {{type}} __Pyx_c_neg{{func_suffix}}({{type}} a) {
248
+ {{type}} z;
249
+ z.real = -a.real;
250
+ z.imag = -a.imag;
251
+ return z;
252
+ }
253
+ static CYTHON_INLINE int __Pyx_c_is_zero{{func_suffix}}({{type}} a) {
254
+ return (a.real == 0) && (a.imag == 0);
255
+ }
256
+ static CYTHON_INLINE {{type}} __Pyx_c_conj{{func_suffix}}({{type}} a) {
257
+ {{type}} z;
258
+ z.real = a.real;
259
+ z.imag = -a.imag;
260
+ return z;
261
+ }
262
+ #if {{is_float}}
263
+ static CYTHON_INLINE {{real_type}} __Pyx_c_abs{{func_suffix}}({{type}} z) {
264
+ #if !defined(HAVE_HYPOT) || defined(_MSC_VER)
265
+ return sqrt{{m}}(z.real*z.real + z.imag*z.imag);
266
+ #else
267
+ return hypot{{m}}(z.real, z.imag);
268
+ #endif
269
+ }
270
+ static CYTHON_INLINE {{type}} __Pyx_c_pow{{func_suffix}}({{type}} a, {{type}} b) {
271
+ {{type}} z;
272
+ {{real_type}} r, lnr, theta, z_r, z_theta;
273
+ if (b.imag == 0 && b.real == (int)b.real) {
274
+ if (b.real < 0) {
275
+ {{real_type}} denom = a.real * a.real + a.imag * a.imag;
276
+ a.real = a.real / denom;
277
+ a.imag = -a.imag / denom;
278
+ b.real = -b.real;
279
+ }
280
+ switch ((int)b.real) {
281
+ case 0:
282
+ z.real = 1;
283
+ z.imag = 0;
284
+ return z;
285
+ case 1:
286
+ return a;
287
+ case 2:
288
+ return __Pyx_c_prod{{func_suffix}}(a, a);
289
+ case 3:
290
+ z = __Pyx_c_prod{{func_suffix}}(a, a);
291
+ return __Pyx_c_prod{{func_suffix}}(z, a);
292
+ case 4:
293
+ z = __Pyx_c_prod{{func_suffix}}(a, a);
294
+ return __Pyx_c_prod{{func_suffix}}(z, z);
295
+ }
296
+ }
297
+ if (a.imag == 0) {
298
+ if (a.real == 0) {
299
+ return a;
300
+ } else if ((b.imag == 0) && (a.real >= 0)) {
301
+ z.real = pow{{m}}(a.real, b.real);
302
+ z.imag = 0;
303
+ return z;
304
+ } else if (a.real > 0) {
305
+ r = a.real;
306
+ theta = 0;
307
+ } else {
308
+ r = -a.real;
309
+ theta = atan2{{m}}(0.0, -1.0);
310
+ }
311
+ } else {
312
+ r = __Pyx_c_abs{{func_suffix}}(a);
313
+ theta = atan2{{m}}(a.imag, a.real);
314
+ }
315
+ lnr = log{{m}}(r);
316
+ z_r = exp{{m}}(lnr * b.real - theta * b.imag);
317
+ z_theta = theta * b.real + lnr * b.imag;
318
+ z.real = z_r * cos{{m}}(z_theta);
319
+ z.imag = z_r * sin{{m}}(z_theta);
320
+ return z;
321
+ }
322
+ #endif
323
+ #endif
324
+
325
+ /////////////// SoftComplexToDouble.proto //////////////////
326
+
327
+ static double __Pyx_SoftComplexToDouble(__pyx_t_double_complex value, int have_gil); /* proto */
328
+
329
+ /////////////// SoftComplexToDouble //////////////////
330
+ //@requires: RealImag
331
+
332
+ static double __Pyx_SoftComplexToDouble(__pyx_t_double_complex value, int have_gil) {
333
+ // This isn't an absolutely perfect match for the Python behaviour:
334
+ // In Python the type would be determined right after the number is
335
+ // created (usually '**'), while here it's determined when coerced
336
+ // to a PyObject, which may be a few operations later.
337
+ if (unlikely(__Pyx_CIMAG(value))) {
338
+ PyGILState_STATE gilstate;
339
+ if (!have_gil)
340
+ gilstate = PyGILState_Ensure();
341
+ PyErr_SetString(PyExc_TypeError,
342
+ "Cannot convert 'complex' with non-zero imaginary component to 'double' "
343
+ "(this most likely comes from the '**' operator; "
344
+ "use 'cython.cpow(True)' to return 'nan' instead of a "
345
+ "complex number).");
346
+ if (!have_gil)
347
+ PyGILState_Release(gilstate);
348
+ return -1.;
349
+ }
350
+ return __Pyx_CREAL(value);
351
+ }
352
+
353
+ ///////// SoftComplexToPy.proto ///////////////////////
354
+
355
+ static PyObject *__pyx_Py_FromSoftComplex(__pyx_t_double_complex value); /* proto */
356
+
357
+ //////// SoftComplexToPy ////////////////
358
+ //@requires: RealImag
359
+
360
+ static PyObject *__pyx_Py_FromSoftComplex(__pyx_t_double_complex value) {
361
+ if (__Pyx_CIMAG(value)) {
362
+ return PyComplex_FromDoubles(__Pyx_CREAL(value), __Pyx_CIMAG(value));
363
+ } else {
364
+ return PyFloat_FromDouble(__Pyx_CREAL(value));
365
+ }
366
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/CythonFunction.c ADDED
@@ -0,0 +1,1810 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ //////////////////// CythonFunctionShared.proto ////////////////////
3
+
4
+ #define __Pyx_CyFunction_USED
5
+
6
+ #define __Pyx_CYFUNCTION_STATICMETHOD 0x01
7
+ #define __Pyx_CYFUNCTION_CLASSMETHOD 0x02
8
+ #define __Pyx_CYFUNCTION_CCLASS 0x04
9
+ #define __Pyx_CYFUNCTION_COROUTINE 0x08
10
+
11
+ #define __Pyx_CyFunction_GetClosure(f) \
12
+ (((__pyx_CyFunctionObject *) (f))->func_closure)
13
+
14
+ #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API
15
+ #define __Pyx_CyFunction_GetClassObj(f) \
16
+ (((__pyx_CyFunctionObject *) (f))->func_classobj)
17
+ #else
18
+ #define __Pyx_CyFunction_GetClassObj(f) \
19
+ ((PyObject*) ((PyCMethodObject *) (f))->mm_class)
20
+ #endif
21
+ #define __Pyx_CyFunction_SetClassObj(f, classobj) \
22
+ __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj))
23
+
24
+ #define __Pyx_CyFunction_Defaults(type, f) \
25
+ ((type *)(((__pyx_CyFunctionObject *) (f))->defaults))
26
+ #define __Pyx_CyFunction_SetDefaultsGetter(f, g) \
27
+ ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g)
28
+
29
+
30
+ typedef struct {
31
+ #if CYTHON_COMPILING_IN_LIMITED_API
32
+ PyObject_HEAD
33
+ // We can't "inherit" from func, but we can use it as a data store
34
+ PyObject *func;
35
+ #elif PY_VERSION_HEX < 0x030900B1
36
+ PyCFunctionObject func;
37
+ #else
38
+ // PEP-573: PyCFunctionObject + mm_class
39
+ PyCMethodObject func;
40
+ #endif
41
+ #if CYTHON_BACKPORT_VECTORCALL
42
+ __pyx_vectorcallfunc func_vectorcall;
43
+ #endif
44
+ #if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API
45
+ PyObject *func_weakreflist;
46
+ #endif
47
+ PyObject *func_dict;
48
+ PyObject *func_name;
49
+ PyObject *func_qualname;
50
+ PyObject *func_doc;
51
+ PyObject *func_globals;
52
+ PyObject *func_code;
53
+ PyObject *func_closure;
54
+ #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API
55
+ // No-args super() class cell
56
+ PyObject *func_classobj;
57
+ #endif
58
+ // Dynamic default args and annotations
59
+ void *defaults;
60
+ int defaults_pyobjects;
61
+ size_t defaults_size; /* used by FusedFunction for copying defaults */
62
+ int flags;
63
+
64
+ // Defaults info
65
+ PyObject *defaults_tuple; /* Const defaults tuple */
66
+ PyObject *defaults_kwdict; /* Const kwonly defaults dict */
67
+ PyObject *(*defaults_getter)(PyObject *);
68
+ PyObject *func_annotations; /* function annotations dict */
69
+
70
+ // Coroutine marker
71
+ PyObject *func_is_coroutine;
72
+ } __pyx_CyFunctionObject;
73
+
74
+ #undef __Pyx_CyOrPyCFunction_Check
75
+ #define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType)
76
+ #define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type)
77
+ #define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType)
78
+ static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc);/*proto*/
79
+ #undef __Pyx_IsSameCFunction
80
+ #define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc)
81
+
82
+ static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml,
83
+ int flags, PyObject* qualname,
84
+ PyObject *closure,
85
+ PyObject *module, PyObject *globals,
86
+ PyObject* code);
87
+
88
+ static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj);
89
+ static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m,
90
+ size_t size,
91
+ int pyobjects);
92
+ static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m,
93
+ PyObject *tuple);
94
+ static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m,
95
+ PyObject *dict);
96
+ static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m,
97
+ PyObject *dict);
98
+
99
+
100
+ static int __pyx_CyFunction_init(PyObject *module);
101
+
102
+ #if CYTHON_METH_FASTCALL
103
+ static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames);
104
+ static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames);
105
+ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames);
106
+ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames);
107
+ #if CYTHON_BACKPORT_VECTORCALL
108
+ #define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall)
109
+ #else
110
+ #define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall)
111
+ #endif
112
+ #endif
113
+
114
+ //////////////////// CythonFunctionShared ////////////////////
115
+ //@substitute: naming
116
+ //@requires: CommonStructures.c::FetchCommonType
117
+ //@requires: ObjectHandling.c::PyMethodNew
118
+ //@requires: ObjectHandling.c::PyVectorcallFastCallDict
119
+ //@requires: ModuleSetupCode.c::IncludeStructmemberH
120
+ //@requires: ObjectHandling.c::PyObjectGetAttrStr
121
+
122
+ #if CYTHON_COMPILING_IN_LIMITED_API
123
+ static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) {
124
+ if (__Pyx_CyFunction_Check(func)) {
125
+ return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc;
126
+ } else if (PyCFunction_Check(func)) {
127
+ return PyCFunction_GetFunction(func) == (PyCFunction) cfunc;
128
+ }
129
+ return 0;
130
+ }
131
+ #else
132
+ static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) {
133
+ return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc;
134
+ }
135
+ #endif
136
+
137
+ static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) {
138
+ #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API
139
+ __Pyx_Py_XDECREF_SET(
140
+ __Pyx_CyFunction_GetClassObj(f),
141
+ ((classobj) ? __Pyx_NewRef(classobj) : NULL));
142
+ #else
143
+ __Pyx_Py_XDECREF_SET(
144
+ // assigning to "mm_class", which is a "PyTypeObject*"
145
+ ((PyCMethodObject *) (f))->mm_class,
146
+ (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL));
147
+ #endif
148
+ }
149
+
150
+ static PyObject *
151
+ __Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure)
152
+ {
153
+ CYTHON_UNUSED_VAR(closure);
154
+ if (unlikely(op->func_doc == NULL)) {
155
+ #if CYTHON_COMPILING_IN_LIMITED_API
156
+ op->func_doc = PyObject_GetAttrString(op->func, "__doc__");
157
+ if (unlikely(!op->func_doc)) return NULL;
158
+ #else
159
+ if (((PyCFunctionObject*)op)->m_ml->ml_doc) {
160
+ #if PY_MAJOR_VERSION >= 3
161
+ op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc);
162
+ #else
163
+ op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc);
164
+ #endif
165
+ if (unlikely(op->func_doc == NULL))
166
+ return NULL;
167
+ } else {
168
+ Py_INCREF(Py_None);
169
+ return Py_None;
170
+ }
171
+ #endif /* CYTHON_COMPILING_IN_LIMITED_API */
172
+ }
173
+ Py_INCREF(op->func_doc);
174
+ return op->func_doc;
175
+ }
176
+
177
+ static int
178
+ __Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context)
179
+ {
180
+ CYTHON_UNUSED_VAR(context);
181
+ if (value == NULL) {
182
+ // Mark as deleted
183
+ value = Py_None;
184
+ }
185
+ Py_INCREF(value);
186
+ __Pyx_Py_XDECREF_SET(op->func_doc, value);
187
+ return 0;
188
+ }
189
+
190
+ static PyObject *
191
+ __Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context)
192
+ {
193
+ CYTHON_UNUSED_VAR(context);
194
+ if (unlikely(op->func_name == NULL)) {
195
+ #if CYTHON_COMPILING_IN_LIMITED_API
196
+ op->func_name = PyObject_GetAttrString(op->func, "__name__");
197
+ #elif PY_MAJOR_VERSION >= 3
198
+ op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name);
199
+ #else
200
+ op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name);
201
+ #endif /* CYTHON_COMPILING_IN_LIMITED_API */
202
+ if (unlikely(op->func_name == NULL))
203
+ return NULL;
204
+ }
205
+ Py_INCREF(op->func_name);
206
+ return op->func_name;
207
+ }
208
+
209
+ static int
210
+ __Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context)
211
+ {
212
+ CYTHON_UNUSED_VAR(context);
213
+ #if PY_MAJOR_VERSION >= 3
214
+ if (unlikely(value == NULL || !PyUnicode_Check(value)))
215
+ #else
216
+ if (unlikely(value == NULL || !PyString_Check(value)))
217
+ #endif
218
+ {
219
+ PyErr_SetString(PyExc_TypeError,
220
+ "__name__ must be set to a string object");
221
+ return -1;
222
+ }
223
+ Py_INCREF(value);
224
+ __Pyx_Py_XDECREF_SET(op->func_name, value);
225
+ return 0;
226
+ }
227
+
228
+ static PyObject *
229
+ __Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context)
230
+ {
231
+ CYTHON_UNUSED_VAR(context);
232
+ Py_INCREF(op->func_qualname);
233
+ return op->func_qualname;
234
+ }
235
+
236
+ static int
237
+ __Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context)
238
+ {
239
+ CYTHON_UNUSED_VAR(context);
240
+ #if PY_MAJOR_VERSION >= 3
241
+ if (unlikely(value == NULL || !PyUnicode_Check(value)))
242
+ #else
243
+ if (unlikely(value == NULL || !PyString_Check(value)))
244
+ #endif
245
+ {
246
+ PyErr_SetString(PyExc_TypeError,
247
+ "__qualname__ must be set to a string object");
248
+ return -1;
249
+ }
250
+ Py_INCREF(value);
251
+ __Pyx_Py_XDECREF_SET(op->func_qualname, value);
252
+ return 0;
253
+ }
254
+
255
+ static PyObject *
256
+ __Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context)
257
+ {
258
+ CYTHON_UNUSED_VAR(context);
259
+ if (unlikely(op->func_dict == NULL)) {
260
+ op->func_dict = PyDict_New();
261
+ if (unlikely(op->func_dict == NULL))
262
+ return NULL;
263
+ }
264
+ Py_INCREF(op->func_dict);
265
+ return op->func_dict;
266
+ }
267
+
268
+ static int
269
+ __Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context)
270
+ {
271
+ CYTHON_UNUSED_VAR(context);
272
+ if (unlikely(value == NULL)) {
273
+ PyErr_SetString(PyExc_TypeError,
274
+ "function's dictionary may not be deleted");
275
+ return -1;
276
+ }
277
+ if (unlikely(!PyDict_Check(value))) {
278
+ PyErr_SetString(PyExc_TypeError,
279
+ "setting function's dictionary to a non-dict");
280
+ return -1;
281
+ }
282
+ Py_INCREF(value);
283
+ __Pyx_Py_XDECREF_SET(op->func_dict, value);
284
+ return 0;
285
+ }
286
+
287
+ static PyObject *
288
+ __Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context)
289
+ {
290
+ CYTHON_UNUSED_VAR(context);
291
+ Py_INCREF(op->func_globals);
292
+ return op->func_globals;
293
+ }
294
+
295
+ static PyObject *
296
+ __Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context)
297
+ {
298
+ CYTHON_UNUSED_VAR(op);
299
+ CYTHON_UNUSED_VAR(context);
300
+ Py_INCREF(Py_None);
301
+ return Py_None;
302
+ }
303
+
304
+ static PyObject *
305
+ __Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context)
306
+ {
307
+ PyObject* result = (op->func_code) ? op->func_code : Py_None;
308
+ CYTHON_UNUSED_VAR(context);
309
+ Py_INCREF(result);
310
+ return result;
311
+ }
312
+
313
+ static int
314
+ __Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) {
315
+ int result = 0;
316
+ PyObject *res = op->defaults_getter((PyObject *) op);
317
+ if (unlikely(!res))
318
+ return -1;
319
+
320
+ // Cache result
321
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
322
+ op->defaults_tuple = PyTuple_GET_ITEM(res, 0);
323
+ Py_INCREF(op->defaults_tuple);
324
+ op->defaults_kwdict = PyTuple_GET_ITEM(res, 1);
325
+ Py_INCREF(op->defaults_kwdict);
326
+ #else
327
+ op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0);
328
+ if (unlikely(!op->defaults_tuple)) result = -1;
329
+ else {
330
+ op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1);
331
+ if (unlikely(!op->defaults_kwdict)) result = -1;
332
+ }
333
+ #endif
334
+ Py_DECREF(res);
335
+ return result;
336
+ }
337
+
338
+ static int
339
+ __Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) {
340
+ CYTHON_UNUSED_VAR(context);
341
+ if (!value) {
342
+ // del => explicit None to prevent rebuilding
343
+ value = Py_None;
344
+ } else if (unlikely(value != Py_None && !PyTuple_Check(value))) {
345
+ PyErr_SetString(PyExc_TypeError,
346
+ "__defaults__ must be set to a tuple object");
347
+ return -1;
348
+ }
349
+ PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not "
350
+ "currently affect the values used in function calls", 1);
351
+ Py_INCREF(value);
352
+ __Pyx_Py_XDECREF_SET(op->defaults_tuple, value);
353
+ return 0;
354
+ }
355
+
356
+ static PyObject *
357
+ __Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) {
358
+ PyObject* result = op->defaults_tuple;
359
+ CYTHON_UNUSED_VAR(context);
360
+ if (unlikely(!result)) {
361
+ if (op->defaults_getter) {
362
+ if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL;
363
+ result = op->defaults_tuple;
364
+ } else {
365
+ result = Py_None;
366
+ }
367
+ }
368
+ Py_INCREF(result);
369
+ return result;
370
+ }
371
+
372
+ static int
373
+ __Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) {
374
+ CYTHON_UNUSED_VAR(context);
375
+ if (!value) {
376
+ // del => explicit None to prevent rebuilding
377
+ value = Py_None;
378
+ } else if (unlikely(value != Py_None && !PyDict_Check(value))) {
379
+ PyErr_SetString(PyExc_TypeError,
380
+ "__kwdefaults__ must be set to a dict object");
381
+ return -1;
382
+ }
383
+ PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not "
384
+ "currently affect the values used in function calls", 1);
385
+ Py_INCREF(value);
386
+ __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value);
387
+ return 0;
388
+ }
389
+
390
+ static PyObject *
391
+ __Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) {
392
+ PyObject* result = op->defaults_kwdict;
393
+ CYTHON_UNUSED_VAR(context);
394
+ if (unlikely(!result)) {
395
+ if (op->defaults_getter) {
396
+ if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL;
397
+ result = op->defaults_kwdict;
398
+ } else {
399
+ result = Py_None;
400
+ }
401
+ }
402
+ Py_INCREF(result);
403
+ return result;
404
+ }
405
+
406
+ static int
407
+ __Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) {
408
+ CYTHON_UNUSED_VAR(context);
409
+ if (!value || value == Py_None) {
410
+ value = NULL;
411
+ } else if (unlikely(!PyDict_Check(value))) {
412
+ PyErr_SetString(PyExc_TypeError,
413
+ "__annotations__ must be set to a dict object");
414
+ return -1;
415
+ }
416
+ Py_XINCREF(value);
417
+ __Pyx_Py_XDECREF_SET(op->func_annotations, value);
418
+ return 0;
419
+ }
420
+
421
+ static PyObject *
422
+ __Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) {
423
+ PyObject* result = op->func_annotations;
424
+ CYTHON_UNUSED_VAR(context);
425
+ if (unlikely(!result)) {
426
+ result = PyDict_New();
427
+ if (unlikely(!result)) return NULL;
428
+ op->func_annotations = result;
429
+ }
430
+ Py_INCREF(result);
431
+ return result;
432
+ }
433
+
434
+ static PyObject *
435
+ __Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) {
436
+ int is_coroutine;
437
+ CYTHON_UNUSED_VAR(context);
438
+ if (op->func_is_coroutine) {
439
+ return __Pyx_NewRef(op->func_is_coroutine);
440
+ }
441
+
442
+ is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE;
443
+ #if PY_VERSION_HEX >= 0x03050000
444
+ if (is_coroutine) {
445
+ PyObject *module, *fromlist, *marker = PYIDENT("_is_coroutine");
446
+ fromlist = PyList_New(1);
447
+ if (unlikely(!fromlist)) return NULL;
448
+ Py_INCREF(marker);
449
+ #if CYTHON_ASSUME_SAFE_MACROS
450
+ PyList_SET_ITEM(fromlist, 0, marker);
451
+ #else
452
+ if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) {
453
+ Py_DECREF(marker);
454
+ Py_DECREF(fromlist);
455
+ return NULL;
456
+ }
457
+ #endif
458
+ module = PyImport_ImportModuleLevelObject(PYIDENT("asyncio.coroutines"), NULL, NULL, fromlist, 0);
459
+ Py_DECREF(fromlist);
460
+ if (unlikely(!module)) goto ignore;
461
+ op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker);
462
+ Py_DECREF(module);
463
+ if (likely(op->func_is_coroutine)) {
464
+ return __Pyx_NewRef(op->func_is_coroutine);
465
+ }
466
+ ignore:
467
+ PyErr_Clear();
468
+ }
469
+ #endif
470
+
471
+ op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine);
472
+ return __Pyx_NewRef(op->func_is_coroutine);
473
+ }
474
+
475
+ //#if PY_VERSION_HEX >= 0x030400C1
476
+ //static PyObject *
477
+ //__Pyx_CyFunction_get_signature(__pyx_CyFunctionObject *op, void *context) {
478
+ // PyObject *inspect_module, *signature_class, *signature;
479
+ // CYTHON_UNUSED_VAR(context);
480
+ // // from inspect import Signature
481
+ // inspect_module = PyImport_ImportModuleLevelObject(PYIDENT("inspect"), NULL, NULL, NULL, 0);
482
+ // if (unlikely(!inspect_module))
483
+ // goto bad;
484
+ // signature_class = __Pyx_PyObject_GetAttrStr(inspect_module, PYIDENT("Signature"));
485
+ // Py_DECREF(inspect_module);
486
+ // if (unlikely(!signature_class))
487
+ // goto bad;
488
+ // // return Signature.from_function(op)
489
+ // signature = PyObject_CallMethodObjArgs(signature_class, PYIDENT("from_function"), op, NULL);
490
+ // Py_DECREF(signature_class);
491
+ // if (likely(signature))
492
+ // return signature;
493
+ //bad:
494
+ // // make sure we raise an AttributeError from this property on any errors
495
+ // if (!PyErr_ExceptionMatches(PyExc_AttributeError))
496
+ // PyErr_SetString(PyExc_AttributeError, "failed to calculate __signature__");
497
+ // return NULL;
498
+ //}
499
+ //#endif
500
+
501
+ #if CYTHON_COMPILING_IN_LIMITED_API
502
+ static PyObject *
503
+ __Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) {
504
+ CYTHON_UNUSED_VAR(context);
505
+ return PyObject_GetAttrString(op->func, "__module__");
506
+ }
507
+
508
+ static int
509
+ __Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) {
510
+ CYTHON_UNUSED_VAR(context);
511
+ return PyObject_SetAttrString(op->func, "__module__", value);
512
+ }
513
+ #endif
514
+
515
+ static PyGetSetDef __pyx_CyFunction_getsets[] = {
516
+ {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0},
517
+ {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0},
518
+ {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0},
519
+ {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0},
520
+ {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0},
521
+ {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0},
522
+ {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0},
523
+ {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0},
524
+ {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0},
525
+ {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0},
526
+ {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0},
527
+ {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0},
528
+ {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0},
529
+ {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0},
530
+ {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0},
531
+ {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0},
532
+ {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0},
533
+ {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0},
534
+ //#if PY_VERSION_HEX >= 0x030400C1
535
+ // {(char *) "__signature__", (getter)__Pyx_CyFunction_get_signature, 0, 0, 0},
536
+ //#endif
537
+ #if CYTHON_COMPILING_IN_LIMITED_API
538
+ {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0},
539
+ #endif
540
+ {0, 0, 0, 0, 0}
541
+ };
542
+
543
+ static PyMemberDef __pyx_CyFunction_members[] = {
544
+ #if !CYTHON_COMPILING_IN_LIMITED_API
545
+ {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0},
546
+ #endif
547
+ #if CYTHON_USE_TYPE_SPECS
548
+ {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0},
549
+ #if CYTHON_METH_FASTCALL
550
+ #if CYTHON_BACKPORT_VECTORCALL
551
+ {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0},
552
+ #else
553
+ #if !CYTHON_COMPILING_IN_LIMITED_API
554
+ {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0},
555
+ #endif
556
+ #endif
557
+ #endif
558
+ #if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API
559
+ {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0},
560
+ #else
561
+ {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0},
562
+ #endif
563
+ #endif
564
+ {0, 0, 0, 0, 0}
565
+ };
566
+
567
+ static PyObject *
568
+ __Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args)
569
+ {
570
+ CYTHON_UNUSED_VAR(args);
571
+ #if PY_MAJOR_VERSION >= 3
572
+ Py_INCREF(m->func_qualname);
573
+ return m->func_qualname;
574
+ #else
575
+ return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name);
576
+ #endif
577
+ }
578
+
579
+ static PyMethodDef __pyx_CyFunction_methods[] = {
580
+ {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0},
581
+ {0, 0, 0, 0}
582
+ };
583
+
584
+
585
+ #if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API
586
+ #define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist)
587
+ #else
588
+ #define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist)
589
+ #endif
590
+
591
+ static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname,
592
+ PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) {
593
+ #if !CYTHON_COMPILING_IN_LIMITED_API
594
+ PyCFunctionObject *cf = (PyCFunctionObject*) op;
595
+ #endif
596
+ if (unlikely(op == NULL))
597
+ return NULL;
598
+ #if CYTHON_COMPILING_IN_LIMITED_API
599
+ // Note that we end up with a circular reference to op. This isn't
600
+ // a disaster, but in an ideal world it'd be nice to avoid it.
601
+ op->func = PyCFunction_NewEx(ml, (PyObject*)op, module);
602
+ if (unlikely(!op->func)) return NULL;
603
+ #endif
604
+ op->flags = flags;
605
+ __Pyx_CyFunction_weakreflist(op) = NULL;
606
+ #if !CYTHON_COMPILING_IN_LIMITED_API
607
+ cf->m_ml = ml;
608
+ cf->m_self = (PyObject *) op;
609
+ #endif
610
+ Py_XINCREF(closure);
611
+ op->func_closure = closure;
612
+ #if !CYTHON_COMPILING_IN_LIMITED_API
613
+ Py_XINCREF(module);
614
+ cf->m_module = module;
615
+ #endif
616
+ op->func_dict = NULL;
617
+ op->func_name = NULL;
618
+ Py_INCREF(qualname);
619
+ op->func_qualname = qualname;
620
+ op->func_doc = NULL;
621
+ #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API
622
+ op->func_classobj = NULL;
623
+ #else
624
+ ((PyCMethodObject*)op)->mm_class = NULL;
625
+ #endif
626
+ op->func_globals = globals;
627
+ Py_INCREF(op->func_globals);
628
+ Py_XINCREF(code);
629
+ op->func_code = code;
630
+ // Dynamic Default args
631
+ op->defaults_pyobjects = 0;
632
+ op->defaults_size = 0;
633
+ op->defaults = NULL;
634
+ op->defaults_tuple = NULL;
635
+ op->defaults_kwdict = NULL;
636
+ op->defaults_getter = NULL;
637
+ op->func_annotations = NULL;
638
+ op->func_is_coroutine = NULL;
639
+ #if CYTHON_METH_FASTCALL
640
+ switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) {
641
+ case METH_NOARGS:
642
+ __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS;
643
+ break;
644
+ case METH_O:
645
+ __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O;
646
+ break;
647
+ // case METH_FASTCALL is not used
648
+ case METH_METHOD | METH_FASTCALL | METH_KEYWORDS:
649
+ __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD;
650
+ break;
651
+ case METH_FASTCALL | METH_KEYWORDS:
652
+ __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS;
653
+ break;
654
+ // case METH_VARARGS is not used
655
+ case METH_VARARGS | METH_KEYWORDS:
656
+ __Pyx_CyFunction_func_vectorcall(op) = NULL;
657
+ break;
658
+ default:
659
+ PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction");
660
+ Py_DECREF(op);
661
+ return NULL;
662
+ }
663
+ #endif
664
+ return (PyObject *) op;
665
+ }
666
+
667
+ static int
668
+ __Pyx_CyFunction_clear(__pyx_CyFunctionObject *m)
669
+ {
670
+ Py_CLEAR(m->func_closure);
671
+ #if CYTHON_COMPILING_IN_LIMITED_API
672
+ Py_CLEAR(m->func);
673
+ #else
674
+ Py_CLEAR(((PyCFunctionObject*)m)->m_module);
675
+ #endif
676
+ Py_CLEAR(m->func_dict);
677
+ Py_CLEAR(m->func_name);
678
+ Py_CLEAR(m->func_qualname);
679
+ Py_CLEAR(m->func_doc);
680
+ Py_CLEAR(m->func_globals);
681
+ Py_CLEAR(m->func_code);
682
+ #if !CYTHON_COMPILING_IN_LIMITED_API
683
+ #if PY_VERSION_HEX < 0x030900B1
684
+ Py_CLEAR(__Pyx_CyFunction_GetClassObj(m));
685
+ #else
686
+ {
687
+ PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class;
688
+ ((PyCMethodObject *) (m))->mm_class = NULL;
689
+ Py_XDECREF(cls);
690
+ }
691
+ #endif
692
+ #endif
693
+ Py_CLEAR(m->defaults_tuple);
694
+ Py_CLEAR(m->defaults_kwdict);
695
+ Py_CLEAR(m->func_annotations);
696
+ Py_CLEAR(m->func_is_coroutine);
697
+
698
+ if (m->defaults) {
699
+ PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m);
700
+ int i;
701
+
702
+ for (i = 0; i < m->defaults_pyobjects; i++)
703
+ Py_XDECREF(pydefaults[i]);
704
+
705
+ PyObject_Free(m->defaults);
706
+ m->defaults = NULL;
707
+ }
708
+
709
+ return 0;
710
+ }
711
+
712
+ static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m)
713
+ {
714
+ if (__Pyx_CyFunction_weakreflist(m) != NULL)
715
+ PyObject_ClearWeakRefs((PyObject *) m);
716
+ __Pyx_CyFunction_clear(m);
717
+ __Pyx_PyHeapTypeObject_GC_Del(m);
718
+ }
719
+
720
+ static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m)
721
+ {
722
+ PyObject_GC_UnTrack(m);
723
+ __Pyx__CyFunction_dealloc(m);
724
+ }
725
+
726
+ static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg)
727
+ {
728
+ Py_VISIT(m->func_closure);
729
+ #if CYTHON_COMPILING_IN_LIMITED_API
730
+ Py_VISIT(m->func);
731
+ #else
732
+ Py_VISIT(((PyCFunctionObject*)m)->m_module);
733
+ #endif
734
+ Py_VISIT(m->func_dict);
735
+ Py_VISIT(m->func_name);
736
+ Py_VISIT(m->func_qualname);
737
+ Py_VISIT(m->func_doc);
738
+ Py_VISIT(m->func_globals);
739
+ Py_VISIT(m->func_code);
740
+ #if !CYTHON_COMPILING_IN_LIMITED_API
741
+ Py_VISIT(__Pyx_CyFunction_GetClassObj(m));
742
+ #endif
743
+ Py_VISIT(m->defaults_tuple);
744
+ Py_VISIT(m->defaults_kwdict);
745
+ Py_VISIT(m->func_is_coroutine);
746
+
747
+ if (m->defaults) {
748
+ PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m);
749
+ int i;
750
+
751
+ for (i = 0; i < m->defaults_pyobjects; i++)
752
+ Py_VISIT(pydefaults[i]);
753
+ }
754
+
755
+ return 0;
756
+ }
757
+
758
+ static PyObject*
759
+ __Pyx_CyFunction_repr(__pyx_CyFunctionObject *op)
760
+ {
761
+ #if PY_MAJOR_VERSION >= 3
762
+ return PyUnicode_FromFormat("<cyfunction %U at %p>",
763
+ op->func_qualname, (void *)op);
764
+ #else
765
+ return PyString_FromFormat("<cyfunction %s at %p>",
766
+ PyString_AsString(op->func_qualname), (void *)op);
767
+ #endif
768
+ }
769
+
770
+ static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) {
771
+ // originally copied from PyCFunction_Call() in CPython's Objects/methodobject.c
772
+ #if CYTHON_COMPILING_IN_LIMITED_API
773
+ PyObject *f = ((__pyx_CyFunctionObject*)func)->func;
774
+ PyObject *py_name = NULL;
775
+ PyCFunction meth;
776
+ int flags;
777
+ meth = PyCFunction_GetFunction(f);
778
+ if (unlikely(!meth)) return NULL;
779
+ flags = PyCFunction_GetFlags(f);
780
+ if (unlikely(flags < 0)) return NULL;
781
+ #else
782
+ PyCFunctionObject* f = (PyCFunctionObject*)func;
783
+ PyCFunction meth = f->m_ml->ml_meth;
784
+ int flags = f->m_ml->ml_flags;
785
+ #endif
786
+
787
+ Py_ssize_t size;
788
+
789
+ switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) {
790
+ case METH_VARARGS:
791
+ if (likely(kw == NULL || PyDict_Size(kw) == 0))
792
+ return (*meth)(self, arg);
793
+ break;
794
+ case METH_VARARGS | METH_KEYWORDS:
795
+ return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw);
796
+ case METH_NOARGS:
797
+ if (likely(kw == NULL || PyDict_Size(kw) == 0)) {
798
+ #if CYTHON_ASSUME_SAFE_MACROS
799
+ size = PyTuple_GET_SIZE(arg);
800
+ #else
801
+ size = PyTuple_Size(arg);
802
+ if (unlikely(size < 0)) return NULL;
803
+ #endif
804
+ if (likely(size == 0))
805
+ return (*meth)(self, NULL);
806
+ #if CYTHON_COMPILING_IN_LIMITED_API
807
+ py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL);
808
+ if (!py_name) return NULL;
809
+ PyErr_Format(PyExc_TypeError,
810
+ "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)",
811
+ py_name, size);
812
+ Py_DECREF(py_name);
813
+ #else
814
+ PyErr_Format(PyExc_TypeError,
815
+ "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)",
816
+ f->m_ml->ml_name, size);
817
+ #endif
818
+ return NULL;
819
+ }
820
+ break;
821
+ case METH_O:
822
+ if (likely(kw == NULL || PyDict_Size(kw) == 0)) {
823
+ #if CYTHON_ASSUME_SAFE_MACROS
824
+ size = PyTuple_GET_SIZE(arg);
825
+ #else
826
+ size = PyTuple_Size(arg);
827
+ if (unlikely(size < 0)) return NULL;
828
+ #endif
829
+ if (likely(size == 1)) {
830
+ PyObject *result, *arg0;
831
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
832
+ arg0 = PyTuple_GET_ITEM(arg, 0);
833
+ #else
834
+ arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL;
835
+ #endif
836
+ result = (*meth)(self, arg0);
837
+ #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS)
838
+ Py_DECREF(arg0);
839
+ #endif
840
+ return result;
841
+ }
842
+ #if CYTHON_COMPILING_IN_LIMITED_API
843
+ py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL);
844
+ if (!py_name) return NULL;
845
+ PyErr_Format(PyExc_TypeError,
846
+ "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)",
847
+ py_name, size);
848
+ Py_DECREF(py_name);
849
+ #else
850
+ PyErr_Format(PyExc_TypeError,
851
+ "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)",
852
+ f->m_ml->ml_name, size);
853
+ #endif
854
+
855
+ return NULL;
856
+ }
857
+ break;
858
+ default:
859
+ PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction");
860
+ return NULL;
861
+ }
862
+ #if CYTHON_COMPILING_IN_LIMITED_API
863
+ py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL);
864
+ if (!py_name) return NULL;
865
+ PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments",
866
+ py_name);
867
+ Py_DECREF(py_name);
868
+ #else
869
+ PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments",
870
+ f->m_ml->ml_name);
871
+ #endif
872
+ return NULL;
873
+ }
874
+
875
+ static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) {
876
+ PyObject *self, *result;
877
+ #if CYTHON_COMPILING_IN_LIMITED_API
878
+ // PyCFunction_GetSelf returns a borrowed reference
879
+ self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func);
880
+ if (unlikely(!self) && PyErr_Occurred()) return NULL;
881
+ #else
882
+ self = ((PyCFunctionObject*)func)->m_self;
883
+ #endif
884
+ result = __Pyx_CyFunction_CallMethod(func, self, arg, kw);
885
+ return result;
886
+ }
887
+
888
+ static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) {
889
+ PyObject *result;
890
+ __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func;
891
+
892
+ #if CYTHON_METH_FASTCALL
893
+ // Prefer vectorcall if available. This is not the typical case, as
894
+ // CPython would normally use vectorcall directly instead of tp_call.
895
+ __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc);
896
+ if (vc) {
897
+ #if CYTHON_ASSUME_SAFE_MACROS
898
+ return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw);
899
+ #else
900
+ // avoid unused function warning
901
+ (void) &__Pyx_PyVectorcall_FastCallDict;
902
+ return PyVectorcall_Call(func, args, kw);
903
+ #endif
904
+ }
905
+ #endif
906
+
907
+ if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) {
908
+ Py_ssize_t argc;
909
+ PyObject *new_args;
910
+ PyObject *self;
911
+
912
+ #if CYTHON_ASSUME_SAFE_MACROS
913
+ argc = PyTuple_GET_SIZE(args);
914
+ #else
915
+ argc = PyTuple_Size(args);
916
+ if (unlikely(!argc) < 0) return NULL;
917
+ #endif
918
+ new_args = PyTuple_GetSlice(args, 1, argc);
919
+
920
+ if (unlikely(!new_args))
921
+ return NULL;
922
+
923
+ self = PyTuple_GetItem(args, 0);
924
+ if (unlikely(!self)) {
925
+ Py_DECREF(new_args);
926
+ #if PY_MAJOR_VERSION > 2
927
+ PyErr_Format(PyExc_TypeError,
928
+ "unbound method %.200S() needs an argument",
929
+ cyfunc->func_qualname);
930
+ #else
931
+ // %S doesn't work in PyErr_Format on Py2 and replicating
932
+ // the formatting seems more trouble than it's worth
933
+ // (so produce a less useful error message).
934
+ PyErr_SetString(PyExc_TypeError,
935
+ "unbound method needs an argument");
936
+ #endif
937
+ return NULL;
938
+ }
939
+
940
+ result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw);
941
+ Py_DECREF(new_args);
942
+ } else {
943
+ result = __Pyx_CyFunction_Call(func, args, kw);
944
+ }
945
+ return result;
946
+ }
947
+
948
+ #if CYTHON_METH_FASTCALL
949
+ // Check that kwnames is empty (if you want to allow keyword arguments,
950
+ // simply pass kwnames=NULL) and figure out what to do with "self".
951
+ // Return value:
952
+ // 1: self = args[0]
953
+ // 0: self = cyfunc->func.m_self
954
+ // -1: error
955
+ static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames)
956
+ {
957
+ int ret = 0;
958
+ if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) {
959
+ if (unlikely(nargs < 1)) {
960
+ PyErr_Format(PyExc_TypeError, "%.200s() needs an argument",
961
+ ((PyCFunctionObject*)cyfunc)->m_ml->ml_name);
962
+ return -1;
963
+ }
964
+ ret = 1;
965
+ }
966
+ if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) {
967
+ PyErr_Format(PyExc_TypeError,
968
+ "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name);
969
+ return -1;
970
+ }
971
+ return ret;
972
+ }
973
+
974
+ static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames)
975
+ {
976
+ __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func;
977
+ PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml;
978
+ #if CYTHON_BACKPORT_VECTORCALL
979
+ Py_ssize_t nargs = (Py_ssize_t)nargsf;
980
+ #else
981
+ Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
982
+ #endif
983
+ PyObject *self;
984
+ switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) {
985
+ case 1:
986
+ self = args[0];
987
+ args += 1;
988
+ nargs -= 1;
989
+ break;
990
+ case 0:
991
+ self = ((PyCFunctionObject*)cyfunc)->m_self;
992
+ break;
993
+ default:
994
+ return NULL;
995
+ }
996
+
997
+ if (unlikely(nargs != 0)) {
998
+ PyErr_Format(PyExc_TypeError,
999
+ "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)",
1000
+ def->ml_name, nargs);
1001
+ return NULL;
1002
+ }
1003
+ return def->ml_meth(self, NULL);
1004
+ }
1005
+
1006
+ static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames)
1007
+ {
1008
+ __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func;
1009
+ PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml;
1010
+ #if CYTHON_BACKPORT_VECTORCALL
1011
+ Py_ssize_t nargs = (Py_ssize_t)nargsf;
1012
+ #else
1013
+ Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
1014
+ #endif
1015
+ PyObject *self;
1016
+ switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) {
1017
+ case 1:
1018
+ self = args[0];
1019
+ args += 1;
1020
+ nargs -= 1;
1021
+ break;
1022
+ case 0:
1023
+ self = ((PyCFunctionObject*)cyfunc)->m_self;
1024
+ break;
1025
+ default:
1026
+ return NULL;
1027
+ }
1028
+
1029
+ if (unlikely(nargs != 1)) {
1030
+ PyErr_Format(PyExc_TypeError,
1031
+ "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)",
1032
+ def->ml_name, nargs);
1033
+ return NULL;
1034
+ }
1035
+ return def->ml_meth(self, args[0]);
1036
+ }
1037
+
1038
+ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames)
1039
+ {
1040
+ __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func;
1041
+ PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml;
1042
+ #if CYTHON_BACKPORT_VECTORCALL
1043
+ Py_ssize_t nargs = (Py_ssize_t)nargsf;
1044
+ #else
1045
+ Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
1046
+ #endif
1047
+ PyObject *self;
1048
+ switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) {
1049
+ case 1:
1050
+ self = args[0];
1051
+ args += 1;
1052
+ nargs -= 1;
1053
+ break;
1054
+ case 0:
1055
+ self = ((PyCFunctionObject*)cyfunc)->m_self;
1056
+ break;
1057
+ default:
1058
+ return NULL;
1059
+ }
1060
+
1061
+ return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames);
1062
+ }
1063
+
1064
+ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames)
1065
+ {
1066
+ __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func;
1067
+ PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml;
1068
+ PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc);
1069
+ #if CYTHON_BACKPORT_VECTORCALL
1070
+ Py_ssize_t nargs = (Py_ssize_t)nargsf;
1071
+ #else
1072
+ Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
1073
+ #endif
1074
+ PyObject *self;
1075
+ switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) {
1076
+ case 1:
1077
+ self = args[0];
1078
+ args += 1;
1079
+ nargs -= 1;
1080
+ break;
1081
+ case 0:
1082
+ self = ((PyCFunctionObject*)cyfunc)->m_self;
1083
+ break;
1084
+ default:
1085
+ return NULL;
1086
+ }
1087
+
1088
+ return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames);
1089
+ }
1090
+ #endif
1091
+
1092
+ #if CYTHON_USE_TYPE_SPECS
1093
+ static PyType_Slot __pyx_CyFunctionType_slots[] = {
1094
+ {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc},
1095
+ {Py_tp_repr, (void *)__Pyx_CyFunction_repr},
1096
+ {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod},
1097
+ {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse},
1098
+ {Py_tp_clear, (void *)__Pyx_CyFunction_clear},
1099
+ {Py_tp_methods, (void *)__pyx_CyFunction_methods},
1100
+ {Py_tp_members, (void *)__pyx_CyFunction_members},
1101
+ {Py_tp_getset, (void *)__pyx_CyFunction_getsets},
1102
+ {Py_tp_descr_get, (void *)__Pyx_PyMethod_New},
1103
+ {0, 0},
1104
+ };
1105
+
1106
+ static PyType_Spec __pyx_CyFunctionType_spec = {
1107
+ __PYX_TYPE_MODULE_PREFIX "cython_function_or_method",
1108
+ sizeof(__pyx_CyFunctionObject),
1109
+ 0,
1110
+ #ifdef Py_TPFLAGS_METHOD_DESCRIPTOR
1111
+ Py_TPFLAGS_METHOD_DESCRIPTOR |
1112
+ #endif
1113
+ #if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL)
1114
+ _Py_TPFLAGS_HAVE_VECTORCALL |
1115
+ #endif
1116
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /*tp_flags*/
1117
+ __pyx_CyFunctionType_slots
1118
+ };
1119
+ #else /* CYTHON_USE_TYPE_SPECS */
1120
+
1121
+ static PyTypeObject __pyx_CyFunctionType_type = {
1122
+ PyVarObject_HEAD_INIT(0, 0)
1123
+ __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", /*tp_name*/
1124
+ sizeof(__pyx_CyFunctionObject), /*tp_basicsize*/
1125
+ 0, /*tp_itemsize*/
1126
+ (destructor) __Pyx_CyFunction_dealloc, /*tp_dealloc*/
1127
+ #if !CYTHON_METH_FASTCALL
1128
+ 0, /*tp_print*/
1129
+ #elif CYTHON_BACKPORT_VECTORCALL
1130
+ (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), /*tp_vectorcall_offset backported into tp_print*/
1131
+ #else
1132
+ offsetof(PyCFunctionObject, vectorcall), /*tp_vectorcall_offset*/
1133
+ #endif
1134
+ 0, /*tp_getattr*/
1135
+ 0, /*tp_setattr*/
1136
+ #if PY_MAJOR_VERSION < 3
1137
+ 0, /*tp_compare*/
1138
+ #else
1139
+ 0, /*tp_as_async*/
1140
+ #endif
1141
+ (reprfunc) __Pyx_CyFunction_repr, /*tp_repr*/
1142
+ 0, /*tp_as_number*/
1143
+ 0, /*tp_as_sequence*/
1144
+ 0, /*tp_as_mapping*/
1145
+ 0, /*tp_hash*/
1146
+ __Pyx_CyFunction_CallAsMethod, /*tp_call*/
1147
+ 0, /*tp_str*/
1148
+ 0, /*tp_getattro*/
1149
+ 0, /*tp_setattro*/
1150
+ 0, /*tp_as_buffer*/
1151
+ #ifdef Py_TPFLAGS_METHOD_DESCRIPTOR
1152
+ Py_TPFLAGS_METHOD_DESCRIPTOR |
1153
+ #endif
1154
+ #if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL
1155
+ _Py_TPFLAGS_HAVE_VECTORCALL |
1156
+ #endif
1157
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /*tp_flags*/
1158
+ 0, /*tp_doc*/
1159
+ (traverseproc) __Pyx_CyFunction_traverse, /*tp_traverse*/
1160
+ (inquiry) __Pyx_CyFunction_clear, /*tp_clear*/
1161
+ 0, /*tp_richcompare*/
1162
+ #if PY_VERSION_HEX < 0x030500A0
1163
+ offsetof(__pyx_CyFunctionObject, func_weakreflist), /*tp_weaklistoffset*/
1164
+ #else
1165
+ offsetof(PyCFunctionObject, m_weakreflist), /*tp_weaklistoffset*/
1166
+ #endif
1167
+ 0, /*tp_iter*/
1168
+ 0, /*tp_iternext*/
1169
+ __pyx_CyFunction_methods, /*tp_methods*/
1170
+ __pyx_CyFunction_members, /*tp_members*/
1171
+ __pyx_CyFunction_getsets, /*tp_getset*/
1172
+ 0, /*tp_base*/
1173
+ 0, /*tp_dict*/
1174
+ __Pyx_PyMethod_New, /*tp_descr_get*/
1175
+ 0, /*tp_descr_set*/
1176
+ offsetof(__pyx_CyFunctionObject, func_dict),/*tp_dictoffset*/
1177
+ 0, /*tp_init*/
1178
+ 0, /*tp_alloc*/
1179
+ 0, /*tp_new*/
1180
+ 0, /*tp_free*/
1181
+ 0, /*tp_is_gc*/
1182
+ 0, /*tp_bases*/
1183
+ 0, /*tp_mro*/
1184
+ 0, /*tp_cache*/
1185
+ 0, /*tp_subclasses*/
1186
+ 0, /*tp_weaklist*/
1187
+ 0, /*tp_del*/
1188
+ 0, /*tp_version_tag*/
1189
+ #if PY_VERSION_HEX >= 0x030400a1
1190
+ 0, /*tp_finalize*/
1191
+ #endif
1192
+ #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
1193
+ 0, /*tp_vectorcall*/
1194
+ #endif
1195
+ #if __PYX_NEED_TP_PRINT_SLOT
1196
+ 0, /*tp_print*/
1197
+ #endif
1198
+ #if PY_VERSION_HEX >= 0x030C0000
1199
+ 0, /*tp_watched*/
1200
+ #endif
1201
+ #if PY_VERSION_HEX >= 0x030d00A4
1202
+ 0, /*tp_versions_used*/
1203
+ #endif
1204
+ #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000
1205
+ 0, /*tp_pypy_flags*/
1206
+ #endif
1207
+ };
1208
+ #endif /* CYTHON_USE_TYPE_SPECS */
1209
+
1210
+
1211
+ static int __pyx_CyFunction_init(PyObject *module) {
1212
+ #if CYTHON_USE_TYPE_SPECS
1213
+ __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL);
1214
+ #else
1215
+ CYTHON_UNUSED_VAR(module);
1216
+ __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type);
1217
+ #endif
1218
+ if (unlikely(__pyx_CyFunctionType == NULL)) {
1219
+ return -1;
1220
+ }
1221
+ return 0;
1222
+ }
1223
+
1224
+ static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) {
1225
+ __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;
1226
+
1227
+ m->defaults = PyObject_Malloc(size);
1228
+ if (unlikely(!m->defaults))
1229
+ return PyErr_NoMemory();
1230
+ memset(m->defaults, 0, size);
1231
+ m->defaults_pyobjects = pyobjects;
1232
+ m->defaults_size = size;
1233
+ return m->defaults;
1234
+ }
1235
+
1236
+ static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) {
1237
+ __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;
1238
+ m->defaults_tuple = tuple;
1239
+ Py_INCREF(tuple);
1240
+ }
1241
+
1242
+ static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) {
1243
+ __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;
1244
+ m->defaults_kwdict = dict;
1245
+ Py_INCREF(dict);
1246
+ }
1247
+
1248
+ static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) {
1249
+ __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;
1250
+ m->func_annotations = dict;
1251
+ Py_INCREF(dict);
1252
+ }
1253
+
1254
+
1255
+ //////////////////// CythonFunction.proto ////////////////////
1256
+
1257
+ static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml,
1258
+ int flags, PyObject* qualname,
1259
+ PyObject *closure,
1260
+ PyObject *module, PyObject *globals,
1261
+ PyObject* code);
1262
+
1263
+ //////////////////// CythonFunction ////////////////////
1264
+ //@requires: CythonFunctionShared
1265
+
1266
+ static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname,
1267
+ PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) {
1268
+ PyObject *op = __Pyx_CyFunction_Init(
1269
+ PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType),
1270
+ ml, flags, qualname, closure, module, globals, code
1271
+ );
1272
+ if (likely(op)) {
1273
+ PyObject_GC_Track(op);
1274
+ }
1275
+ return op;
1276
+ }
1277
+
1278
+ //////////////////// CyFunctionClassCell.proto ////////////////////
1279
+ static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj);/*proto*/
1280
+
1281
+ //////////////////// CyFunctionClassCell ////////////////////
1282
+ //@requires: CythonFunctionShared
1283
+
1284
+ static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj) {
1285
+ Py_ssize_t i, count = PyList_GET_SIZE(cyfunctions);
1286
+
1287
+ for (i = 0; i < count; i++) {
1288
+ __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *)
1289
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
1290
+ PyList_GET_ITEM(cyfunctions, i);
1291
+ #else
1292
+ PySequence_ITEM(cyfunctions, i);
1293
+ if (unlikely(!m))
1294
+ return -1;
1295
+ #endif
1296
+ __Pyx_CyFunction_SetClassObj(m, classobj);
1297
+ #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS)
1298
+ Py_DECREF((PyObject*)m);
1299
+ #endif
1300
+ }
1301
+ return 0;
1302
+ }
1303
+
1304
+
1305
+ //////////////////// FusedFunction.proto ////////////////////
1306
+
1307
+ typedef struct {
1308
+ __pyx_CyFunctionObject func;
1309
+ PyObject *__signatures__;
1310
+ PyObject *self;
1311
+ } __pyx_FusedFunctionObject;
1312
+
1313
+ static PyObject *__pyx_FusedFunction_New(PyMethodDef *ml, int flags,
1314
+ PyObject *qualname, PyObject *closure,
1315
+ PyObject *module, PyObject *globals,
1316
+ PyObject *code);
1317
+
1318
+ static int __pyx_FusedFunction_clear(__pyx_FusedFunctionObject *self);
1319
+ static int __pyx_FusedFunction_init(PyObject *module);
1320
+
1321
+ #define __Pyx_FusedFunction_USED
1322
+
1323
+ //////////////////// FusedFunction ////////////////////
1324
+ //@requires: CythonFunctionShared
1325
+
1326
+ static PyObject *
1327
+ __pyx_FusedFunction_New(PyMethodDef *ml, int flags,
1328
+ PyObject *qualname, PyObject *closure,
1329
+ PyObject *module, PyObject *globals,
1330
+ PyObject *code)
1331
+ {
1332
+ PyObject *op = __Pyx_CyFunction_Init(
1333
+ // __pyx_CyFunctionObject is correct below since that's the cast that we want.
1334
+ PyObject_GC_New(__pyx_CyFunctionObject, __pyx_FusedFunctionType),
1335
+ ml, flags, qualname, closure, module, globals, code
1336
+ );
1337
+ if (likely(op)) {
1338
+ __pyx_FusedFunctionObject *fusedfunc = (__pyx_FusedFunctionObject *) op;
1339
+ fusedfunc->__signatures__ = NULL;
1340
+ fusedfunc->self = NULL;
1341
+ PyObject_GC_Track(op);
1342
+ }
1343
+ return op;
1344
+ }
1345
+
1346
+ static void
1347
+ __pyx_FusedFunction_dealloc(__pyx_FusedFunctionObject *self)
1348
+ {
1349
+ PyObject_GC_UnTrack(self);
1350
+ Py_CLEAR(self->self);
1351
+ Py_CLEAR(self->__signatures__);
1352
+ __Pyx__CyFunction_dealloc((__pyx_CyFunctionObject *) self);
1353
+ }
1354
+
1355
+ static int
1356
+ __pyx_FusedFunction_traverse(__pyx_FusedFunctionObject *self,
1357
+ visitproc visit,
1358
+ void *arg)
1359
+ {
1360
+ Py_VISIT(self->self);
1361
+ Py_VISIT(self->__signatures__);
1362
+ return __Pyx_CyFunction_traverse((__pyx_CyFunctionObject *) self, visit, arg);
1363
+ }
1364
+
1365
+ static int
1366
+ __pyx_FusedFunction_clear(__pyx_FusedFunctionObject *self)
1367
+ {
1368
+ Py_CLEAR(self->self);
1369
+ Py_CLEAR(self->__signatures__);
1370
+ return __Pyx_CyFunction_clear((__pyx_CyFunctionObject *) self);
1371
+ }
1372
+
1373
+
1374
+ static PyObject *
1375
+ __pyx_FusedFunction_descr_get(PyObject *self, PyObject *obj, PyObject *type)
1376
+ {
1377
+ __pyx_FusedFunctionObject *func, *meth;
1378
+
1379
+ func = (__pyx_FusedFunctionObject *) self;
1380
+
1381
+ if (func->self || func->func.flags & __Pyx_CYFUNCTION_STATICMETHOD) {
1382
+ // Do not allow rebinding and don't do anything for static methods
1383
+ Py_INCREF(self);
1384
+ return self;
1385
+ }
1386
+
1387
+ if (obj == Py_None)
1388
+ obj = NULL;
1389
+
1390
+ if (func->func.flags & __Pyx_CYFUNCTION_CLASSMETHOD)
1391
+ obj = type;
1392
+
1393
+ if (obj == NULL) {
1394
+ // We aren't actually binding to anything, save the effort of rebinding
1395
+ Py_INCREF(self);
1396
+ return self;
1397
+ }
1398
+
1399
+ meth = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_New(
1400
+ ((PyCFunctionObject *) func)->m_ml,
1401
+ ((__pyx_CyFunctionObject *) func)->flags,
1402
+ ((__pyx_CyFunctionObject *) func)->func_qualname,
1403
+ ((__pyx_CyFunctionObject *) func)->func_closure,
1404
+ ((PyCFunctionObject *) func)->m_module,
1405
+ ((__pyx_CyFunctionObject *) func)->func_globals,
1406
+ ((__pyx_CyFunctionObject *) func)->func_code);
1407
+ if (unlikely(!meth))
1408
+ return NULL;
1409
+
1410
+ // defaults needs copying fully rather than just copying the pointer
1411
+ // since otherwise it will be freed on destruction of meth despite
1412
+ // belonging to func rather than meth
1413
+ if (func->func.defaults) {
1414
+ PyObject **pydefaults;
1415
+ int i;
1416
+
1417
+ if (unlikely(!__Pyx_CyFunction_InitDefaults(
1418
+ (PyObject*)meth,
1419
+ func->func.defaults_size,
1420
+ func->func.defaults_pyobjects))) {
1421
+ Py_XDECREF((PyObject*)meth);
1422
+ return NULL;
1423
+ }
1424
+ memcpy(meth->func.defaults, func->func.defaults, func->func.defaults_size);
1425
+
1426
+ pydefaults = __Pyx_CyFunction_Defaults(PyObject *, meth);
1427
+ for (i = 0; i < meth->func.defaults_pyobjects; i++)
1428
+ Py_XINCREF(pydefaults[i]);
1429
+ }
1430
+
1431
+ __Pyx_CyFunction_SetClassObj(meth, __Pyx_CyFunction_GetClassObj(func));
1432
+
1433
+ Py_XINCREF(func->__signatures__);
1434
+ meth->__signatures__ = func->__signatures__;
1435
+
1436
+ Py_XINCREF(func->func.defaults_tuple);
1437
+ meth->func.defaults_tuple = func->func.defaults_tuple;
1438
+
1439
+ Py_XINCREF(obj);
1440
+ meth->self = obj;
1441
+
1442
+ return (PyObject *) meth;
1443
+ }
1444
+
1445
+ static PyObject *
1446
+ _obj_to_string(PyObject *obj)
1447
+ {
1448
+ if (PyUnicode_CheckExact(obj))
1449
+ return __Pyx_NewRef(obj);
1450
+ #if PY_MAJOR_VERSION == 2
1451
+ else if (PyString_Check(obj))
1452
+ return PyUnicode_FromEncodedObject(obj, NULL, "strict");
1453
+ #endif
1454
+ else if (PyType_Check(obj))
1455
+ return PyObject_GetAttr(obj, PYIDENT("__name__"));
1456
+ else
1457
+ return PyObject_Unicode(obj);
1458
+ }
1459
+
1460
+ static PyObject *
1461
+ __pyx_FusedFunction_getitem(__pyx_FusedFunctionObject *self, PyObject *idx)
1462
+ {
1463
+ PyObject *signature = NULL;
1464
+ PyObject *unbound_result_func;
1465
+ PyObject *result_func = NULL;
1466
+
1467
+ if (unlikely(self->__signatures__ == NULL)) {
1468
+ PyErr_SetString(PyExc_TypeError, "Function is not fused");
1469
+ return NULL;
1470
+ }
1471
+
1472
+ if (PyTuple_Check(idx)) {
1473
+ Py_ssize_t n = PyTuple_GET_SIZE(idx);
1474
+ PyObject *list = PyList_New(n);
1475
+ int i;
1476
+
1477
+ if (unlikely(!list))
1478
+ return NULL;
1479
+
1480
+ for (i = 0; i < n; i++) {
1481
+ PyObject *string;
1482
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
1483
+ PyObject *item = PyTuple_GET_ITEM(idx, i);
1484
+ #else
1485
+ PyObject *item = PySequence_ITEM(idx, i); if (unlikely(!item)) goto __pyx_err;
1486
+ #endif
1487
+ string = _obj_to_string(item);
1488
+ #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS)
1489
+ Py_DECREF(item);
1490
+ #endif
1491
+ if (unlikely(!string)) goto __pyx_err;
1492
+ PyList_SET_ITEM(list, i, string);
1493
+ }
1494
+
1495
+ signature = PyUnicode_Join(PYUNICODE("|"), list);
1496
+ __pyx_err:;
1497
+ Py_DECREF(list);
1498
+ } else {
1499
+ signature = _obj_to_string(idx);
1500
+ }
1501
+
1502
+ if (unlikely(!signature))
1503
+ return NULL;
1504
+
1505
+ unbound_result_func = PyObject_GetItem(self->__signatures__, signature);
1506
+
1507
+ if (likely(unbound_result_func)) {
1508
+ if (self->self) {
1509
+ __pyx_FusedFunctionObject *unbound = (__pyx_FusedFunctionObject *) unbound_result_func;
1510
+
1511
+ // TODO: move this to InitClassCell
1512
+ __Pyx_CyFunction_SetClassObj(unbound, __Pyx_CyFunction_GetClassObj(self));
1513
+
1514
+ result_func = __pyx_FusedFunction_descr_get(unbound_result_func,
1515
+ self->self, self->self);
1516
+ } else {
1517
+ result_func = unbound_result_func;
1518
+ Py_INCREF(result_func);
1519
+ }
1520
+ }
1521
+
1522
+ Py_DECREF(signature);
1523
+ Py_XDECREF(unbound_result_func);
1524
+
1525
+ return result_func;
1526
+ }
1527
+
1528
+ static PyObject *
1529
+ __pyx_FusedFunction_callfunction(PyObject *func, PyObject *args, PyObject *kw)
1530
+ {
1531
+ __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func;
1532
+ int static_specialized = (cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD &&
1533
+ !((__pyx_FusedFunctionObject *) func)->__signatures__);
1534
+
1535
+ if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !static_specialized) {
1536
+ return __Pyx_CyFunction_CallAsMethod(func, args, kw);
1537
+ } else {
1538
+ return __Pyx_CyFunction_Call(func, args, kw);
1539
+ }
1540
+ }
1541
+
1542
+ // Note: the 'self' from method binding is passed in in the args tuple,
1543
+ // whereas PyCFunctionObject's m_self is passed in as the first
1544
+ // argument to the C function. For extension methods we need
1545
+ // to pass 'self' as 'm_self' and not as the first element of the
1546
+ // args tuple.
1547
+
1548
+ static PyObject *
1549
+ __pyx_FusedFunction_call(PyObject *func, PyObject *args, PyObject *kw)
1550
+ {
1551
+ __pyx_FusedFunctionObject *binding_func = (__pyx_FusedFunctionObject *) func;
1552
+ Py_ssize_t argc = PyTuple_GET_SIZE(args);
1553
+ PyObject *new_args = NULL;
1554
+ __pyx_FusedFunctionObject *new_func = NULL;
1555
+ PyObject *result = NULL;
1556
+ int is_staticmethod = binding_func->func.flags & __Pyx_CYFUNCTION_STATICMETHOD;
1557
+
1558
+ if (binding_func->self) {
1559
+ // Bound method call, put 'self' in the args tuple
1560
+ PyObject *self;
1561
+ Py_ssize_t i;
1562
+ new_args = PyTuple_New(argc + 1);
1563
+ if (unlikely(!new_args))
1564
+ return NULL;
1565
+
1566
+ self = binding_func->self;
1567
+
1568
+ Py_INCREF(self);
1569
+ PyTuple_SET_ITEM(new_args, 0, self);
1570
+ self = NULL;
1571
+
1572
+ for (i = 0; i < argc; i++) {
1573
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
1574
+ PyObject *item = PyTuple_GET_ITEM(args, i);
1575
+ Py_INCREF(item);
1576
+ #else
1577
+ PyObject *item = PySequence_ITEM(args, i); if (unlikely(!item)) goto bad;
1578
+ #endif
1579
+ PyTuple_SET_ITEM(new_args, i + 1, item);
1580
+ }
1581
+
1582
+ args = new_args;
1583
+ }
1584
+
1585
+ if (binding_func->__signatures__) {
1586
+ PyObject *tup;
1587
+ if (is_staticmethod && binding_func->func.flags & __Pyx_CYFUNCTION_CCLASS) {
1588
+ // FIXME: this seems wrong, but we must currently pass the signatures dict as 'self' argument
1589
+ tup = PyTuple_Pack(3, args,
1590
+ kw == NULL ? Py_None : kw,
1591
+ binding_func->func.defaults_tuple);
1592
+ if (unlikely(!tup)) goto bad;
1593
+ new_func = (__pyx_FusedFunctionObject *) __Pyx_CyFunction_CallMethod(
1594
+ func, binding_func->__signatures__, tup, NULL);
1595
+ } else {
1596
+ tup = PyTuple_Pack(4, binding_func->__signatures__, args,
1597
+ kw == NULL ? Py_None : kw,
1598
+ binding_func->func.defaults_tuple);
1599
+ if (unlikely(!tup)) goto bad;
1600
+ new_func = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_callfunction(func, tup, NULL);
1601
+ }
1602
+ Py_DECREF(tup);
1603
+
1604
+ if (unlikely(!new_func))
1605
+ goto bad;
1606
+
1607
+ __Pyx_CyFunction_SetClassObj(new_func, __Pyx_CyFunction_GetClassObj(binding_func));
1608
+
1609
+ func = (PyObject *) new_func;
1610
+ }
1611
+
1612
+ result = __pyx_FusedFunction_callfunction(func, args, kw);
1613
+ bad:
1614
+ Py_XDECREF(new_args);
1615
+ Py_XDECREF((PyObject *) new_func);
1616
+ return result;
1617
+ }
1618
+
1619
+ static PyMemberDef __pyx_FusedFunction_members[] = {
1620
+ {(char *) "__signatures__",
1621
+ T_OBJECT,
1622
+ offsetof(__pyx_FusedFunctionObject, __signatures__),
1623
+ READONLY,
1624
+ 0},
1625
+ {(char *) "__self__", T_OBJECT_EX, offsetof(__pyx_FusedFunctionObject, self), READONLY, 0},
1626
+ {0, 0, 0, 0, 0},
1627
+ };
1628
+
1629
+ static PyGetSetDef __pyx_FusedFunction_getsets[] = {
1630
+ // __doc__ is None for the fused function type, but we need it to be
1631
+ // a descriptor for the instance's __doc__, so rebuild the descriptor in our subclass
1632
+ // (all other descriptors are inherited)
1633
+ {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0},
1634
+ {0, 0, 0, 0, 0}
1635
+ };
1636
+
1637
+ #if CYTHON_USE_TYPE_SPECS
1638
+ static PyType_Slot __pyx_FusedFunctionType_slots[] = {
1639
+ {Py_tp_dealloc, (void *)__pyx_FusedFunction_dealloc},
1640
+ {Py_tp_call, (void *)__pyx_FusedFunction_call},
1641
+ {Py_tp_traverse, (void *)__pyx_FusedFunction_traverse},
1642
+ {Py_tp_clear, (void *)__pyx_FusedFunction_clear},
1643
+ {Py_tp_members, (void *)__pyx_FusedFunction_members},
1644
+ {Py_tp_getset, (void *)__pyx_FusedFunction_getsets},
1645
+ {Py_tp_descr_get, (void *)__pyx_FusedFunction_descr_get},
1646
+ {Py_mp_subscript, (void *)__pyx_FusedFunction_getitem},
1647
+ {0, 0},
1648
+ };
1649
+
1650
+ static PyType_Spec __pyx_FusedFunctionType_spec = {
1651
+ __PYX_TYPE_MODULE_PREFIX "fused_cython_function",
1652
+ sizeof(__pyx_FusedFunctionObject),
1653
+ 0,
1654
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /*tp_flags*/
1655
+ __pyx_FusedFunctionType_slots
1656
+ };
1657
+
1658
+ #else /* !CYTHON_USE_TYPE_SPECS */
1659
+
1660
+ static PyMappingMethods __pyx_FusedFunction_mapping_methods = {
1661
+ 0,
1662
+ (binaryfunc) __pyx_FusedFunction_getitem,
1663
+ 0,
1664
+ };
1665
+
1666
+ static PyTypeObject __pyx_FusedFunctionType_type = {
1667
+ PyVarObject_HEAD_INIT(0, 0)
1668
+ __PYX_TYPE_MODULE_PREFIX "fused_cython_function", /*tp_name*/
1669
+ sizeof(__pyx_FusedFunctionObject), /*tp_basicsize*/
1670
+ 0, /*tp_itemsize*/
1671
+ (destructor) __pyx_FusedFunction_dealloc, /*tp_dealloc*/
1672
+ 0, /*tp_print*/
1673
+ 0, /*tp_getattr*/
1674
+ 0, /*tp_setattr*/
1675
+ #if PY_MAJOR_VERSION < 3
1676
+ 0, /*tp_compare*/
1677
+ #else
1678
+ 0, /*tp_as_async*/
1679
+ #endif
1680
+ 0, /*tp_repr*/
1681
+ 0, /*tp_as_number*/
1682
+ 0, /*tp_as_sequence*/
1683
+ &__pyx_FusedFunction_mapping_methods, /*tp_as_mapping*/
1684
+ 0, /*tp_hash*/
1685
+ (ternaryfunc) __pyx_FusedFunction_call, /*tp_call*/
1686
+ 0, /*tp_str*/
1687
+ 0, /*tp_getattro*/
1688
+ 0, /*tp_setattro*/
1689
+ 0, /*tp_as_buffer*/
1690
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /*tp_flags*/
1691
+ 0, /*tp_doc*/
1692
+ (traverseproc) __pyx_FusedFunction_traverse, /*tp_traverse*/
1693
+ (inquiry) __pyx_FusedFunction_clear,/*tp_clear*/
1694
+ 0, /*tp_richcompare*/
1695
+ 0, /*tp_weaklistoffset*/
1696
+ 0, /*tp_iter*/
1697
+ 0, /*tp_iternext*/
1698
+ 0, /*tp_methods*/
1699
+ __pyx_FusedFunction_members, /*tp_members*/
1700
+ __pyx_FusedFunction_getsets, /*tp_getset*/
1701
+ // NOTE: tp_base may be changed later during module initialisation when importing CyFunction across modules.
1702
+ &__pyx_CyFunctionType_type, /*tp_base*/
1703
+ 0, /*tp_dict*/
1704
+ __pyx_FusedFunction_descr_get, /*tp_descr_get*/
1705
+ 0, /*tp_descr_set*/
1706
+ 0, /*tp_dictoffset*/
1707
+ 0, /*tp_init*/
1708
+ 0, /*tp_alloc*/
1709
+ 0, /*tp_new*/
1710
+ 0, /*tp_free*/
1711
+ 0, /*tp_is_gc*/
1712
+ 0, /*tp_bases*/
1713
+ 0, /*tp_mro*/
1714
+ 0, /*tp_cache*/
1715
+ 0, /*tp_subclasses*/
1716
+ 0, /*tp_weaklist*/
1717
+ 0, /*tp_del*/
1718
+ 0, /*tp_version_tag*/
1719
+ #if PY_VERSION_HEX >= 0x030400a1
1720
+ 0, /*tp_finalize*/
1721
+ #endif
1722
+ #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
1723
+ 0, /*tp_vectorcall*/
1724
+ #endif
1725
+ #if __PYX_NEED_TP_PRINT_SLOT
1726
+ 0, /*tp_print*/
1727
+ #endif
1728
+ #if PY_VERSION_HEX >= 0x030C0000
1729
+ 0, /*tp_watched*/
1730
+ #endif
1731
+ #if PY_VERSION_HEX >= 0x030d00A4
1732
+ 0, /*tp_versions_used*/
1733
+ #endif
1734
+ #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000
1735
+ 0, /*tp_pypy_flags*/
1736
+ #endif
1737
+ };
1738
+ #endif
1739
+
1740
+ static int __pyx_FusedFunction_init(PyObject *module) {
1741
+ #if CYTHON_USE_TYPE_SPECS
1742
+ PyObject *bases = PyTuple_Pack(1, __pyx_CyFunctionType);
1743
+ if (unlikely(!bases)) {
1744
+ return -1;
1745
+ }
1746
+ __pyx_FusedFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_FusedFunctionType_spec, bases);
1747
+ Py_DECREF(bases);
1748
+ #else
1749
+ CYTHON_UNUSED_VAR(module);
1750
+ // Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value.
1751
+ __pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType;
1752
+ __pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type);
1753
+ #endif
1754
+ if (unlikely(__pyx_FusedFunctionType == NULL)) {
1755
+ return -1;
1756
+ }
1757
+ return 0;
1758
+ }
1759
+
1760
+ //////////////////// ClassMethod.proto ////////////////////
1761
+
1762
+ #include "descrobject.h"
1763
+ CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/
1764
+
1765
+ //////////////////// ClassMethod ////////////////////
1766
+
1767
+ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) {
1768
+ #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000
1769
+ if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) {
1770
+ // cdef classes
1771
+ return PyClassMethod_New(method);
1772
+ }
1773
+ #else
1774
+ #if CYTHON_COMPILING_IN_PYPY
1775
+ // special C-API function only in PyPy >= 5.9
1776
+ if (PyMethodDescr_Check(method))
1777
+ #else
1778
+ #if PY_MAJOR_VERSION == 2
1779
+ // PyMethodDescr_Type is not exposed in the CPython C-API in Py2.
1780
+ static PyTypeObject *methoddescr_type = NULL;
1781
+ if (unlikely(methoddescr_type == NULL)) {
1782
+ PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append");
1783
+ if (unlikely(!meth)) return NULL;
1784
+ methoddescr_type = Py_TYPE(meth);
1785
+ Py_DECREF(meth);
1786
+ }
1787
+ #else
1788
+ PyTypeObject *methoddescr_type = &PyMethodDescr_Type;
1789
+ #endif
1790
+ if (__Pyx_TypeCheck(method, methoddescr_type))
1791
+ #endif
1792
+ {
1793
+ // cdef classes
1794
+ PyMethodDescrObject *descr = (PyMethodDescrObject *)method;
1795
+ #if PY_VERSION_HEX < 0x03020000
1796
+ PyTypeObject *d_type = descr->d_type;
1797
+ #else
1798
+ PyTypeObject *d_type = descr->d_common.d_type;
1799
+ #endif
1800
+ return PyDescr_NewClassMethod(d_type, descr->d_method);
1801
+ }
1802
+ #endif
1803
+ else if (PyMethod_Check(method)) {
1804
+ // python classes
1805
+ return PyClassMethod_New(PyMethod_GET_FUNCTION(method));
1806
+ }
1807
+ else {
1808
+ return PyClassMethod_New(method);
1809
+ }
1810
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/Dataclasses.c ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ///////////////////// ModuleLoader.proto //////////////////////////
2
+
3
+ static PyObject* __Pyx_LoadInternalModule(const char* name, const char* fallback_code); /* proto */
4
+
5
+ //////////////////// ModuleLoader ///////////////////////
6
+ //@requires: CommonStructures.c::FetchSharedCythonModule
7
+
8
+ static PyObject* __Pyx_LoadInternalModule(const char* name, const char* fallback_code) {
9
+ // We want to be able to use the contents of the standard library dataclasses module where available.
10
+ // If those objects aren't available (due to Python version) then a simple fallback is substituted
11
+ // instead, which largely just fails with a not-implemented error.
12
+ //
13
+ // The fallbacks are placed in the "shared abi module" as a convenient internal place to
14
+ // store them
15
+
16
+ PyObject *shared_abi_module = 0, *module = 0;
17
+ #if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1
18
+ PyObject *result;
19
+ #endif
20
+
21
+ shared_abi_module = __Pyx_FetchSharedCythonABIModule();
22
+ if (!shared_abi_module) return NULL;
23
+
24
+ #if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1
25
+ if (PyObject_GetOptionalAttrString(shared_abi_module, name, &result) != 0) {
26
+ Py_DECREF(shared_abi_module);
27
+ return result;
28
+ }
29
+ #else
30
+ if (PyObject_HasAttrString(shared_abi_module, name)) {
31
+ PyObject* result = PyObject_GetAttrString(shared_abi_module, name);
32
+ Py_DECREF(shared_abi_module);
33
+ return result;
34
+ }
35
+ #endif
36
+
37
+ // the best and simplest case is simply to defer to the standard library (if available)
38
+ module = PyImport_ImportModule(name);
39
+ if (!module) {
40
+ PyObject *localDict, *runValue, *builtins, *modulename;
41
+ if (!PyErr_ExceptionMatches(PyExc_ImportError)) goto bad;
42
+ PyErr_Clear(); /* this is reasonably likely (especially on older versions of Python) */
43
+ #if PY_MAJOR_VERSION < 3
44
+ modulename = PyBytes_FromFormat("_cython_" CYTHON_ABI ".%s", name);
45
+ #else
46
+ modulename = PyUnicode_FromFormat("_cython_" CYTHON_ABI ".%s", name);
47
+ #endif
48
+ if (!modulename) goto bad;
49
+ #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_CPYTHON
50
+ module = PyImport_AddModuleObject(modulename); /* borrowed */
51
+ #else
52
+ module = PyImport_AddModule(PyBytes_AsString(modulename)); /* borrowed */
53
+ #endif
54
+ Py_DECREF(modulename);
55
+ if (!module) goto bad;
56
+ Py_INCREF(module);
57
+ if (PyObject_SetAttrString(shared_abi_module, name, module) < 0) goto bad;
58
+ localDict = PyModule_GetDict(module); /* borrowed */
59
+ if (!localDict) goto bad;
60
+ builtins = PyEval_GetBuiltins(); /* borrowed */
61
+ if (!builtins) goto bad;
62
+ if (PyDict_SetItemString(localDict, "__builtins__", builtins) <0) goto bad;
63
+
64
+ runValue = PyRun_String(fallback_code, Py_file_input, localDict, localDict);
65
+ if (!runValue) goto bad;
66
+ Py_DECREF(runValue);
67
+ }
68
+ goto shared_cleanup;
69
+
70
+ bad:
71
+ Py_CLEAR(module);
72
+ shared_cleanup:
73
+ Py_XDECREF(shared_abi_module);
74
+ return module;
75
+ }
76
+
77
+ ///////////////////// SpecificModuleLoader.proto //////////////////////
78
+ //@substitute: tempita
79
+
80
+ static PyObject* __Pyx_Load_{{cname}}_Module(void); /* proto */
81
+
82
+
83
+ //////////////////// SpecificModuleLoader ///////////////////////
84
+ //@requires: ModuleLoader
85
+
86
+ static PyObject* __Pyx_Load_{{cname}}_Module(void) {
87
+ return __Pyx_LoadInternalModule("{{cname}}", {{py_code}});
88
+ }
89
+
90
+ //////////////////// DataclassesCallHelper.proto ////////////////////////
91
+
92
+ static PyObject* __Pyx_DataclassesCallHelper(PyObject *callable, PyObject *kwds); /* proto */
93
+
94
+ //////////////////// DataclassesCallHelper ////////////////////////
95
+ //@substitute: naming
96
+
97
+ // The signature of a few of the dataclasses module functions has
98
+ // been expanded over the years. Cython always passes the full set
99
+ // of arguments from the most recent version we know of, so needs
100
+ // to remove any arguments that don't exist on earlier versions.
101
+
102
+ #if PY_MAJOR_VERSION >= 3
103
+ static int __Pyx_DataclassesCallHelper_FilterToDict(PyObject *callable, PyObject *kwds, PyObject *new_kwds, PyObject *args_list, int is_kwonly) {
104
+ Py_ssize_t size, i;
105
+ size = PySequence_Size(args_list);
106
+ if (size == -1) return -1;
107
+
108
+ for (i=0; i<size; ++i) {
109
+ PyObject *key, *value;
110
+ int setitem_result;
111
+ key = PySequence_GetItem(args_list, i);
112
+ if (!key) return -1;
113
+
114
+ if (PyUnicode_Check(key) && (
115
+ PyUnicode_CompareWithASCIIString(key, "self") == 0 ||
116
+ // namedtuple constructor in fallback code
117
+ PyUnicode_CompareWithASCIIString(key, "_cls") == 0)) {
118
+ Py_DECREF(key);
119
+ continue;
120
+ }
121
+
122
+ value = PyDict_GetItem(kwds, key);
123
+ if (!value) {
124
+ if (is_kwonly) {
125
+ Py_DECREF(key);
126
+ continue;
127
+ } else {
128
+ // The most likely reason for this is that Cython
129
+ // hasn't kept up to date with the Python dataclasses module.
130
+ // To be nice to our users, try not to fail, but ask them
131
+ // to report a bug so we can keep up to date.
132
+ value = Py_None;
133
+ if (PyErr_WarnFormat(
134
+ PyExc_RuntimeWarning, 1,
135
+ "Argument %S not passed to %R. This is likely a bug in Cython so please report it.",
136
+ key, callable) == -1) {
137
+ Py_DECREF(key);
138
+ return -1;
139
+ }
140
+ }
141
+ }
142
+ Py_INCREF(value);
143
+ setitem_result = PyDict_SetItem(new_kwds, key, value);
144
+ Py_DECREF(key);
145
+ Py_DECREF(value);
146
+ if (setitem_result == -1) return -1;
147
+ }
148
+ return 0;
149
+ }
150
+ #endif
151
+
152
+ static PyObject* __Pyx_DataclassesCallHelper(PyObject *callable, PyObject *kwds) {
153
+ #if PY_MAJOR_VERSION < 3
154
+ // We're falling back to our full replacement anyway
155
+ return PyObject_Call(callable, $empty_tuple, kwds);
156
+ #else
157
+ PyObject *new_kwds=NULL, *result=NULL;
158
+ PyObject *inspect;
159
+ PyObject *args_list=NULL, *kwonly_args_list=NULL, *getfullargspec_result=NULL;
160
+
161
+ // Going via inspect to work out what arguments to pass is unlikely to be the
162
+ // fastest thing ever. However, it is compatible, and only happens once
163
+ // at module-import time.
164
+ inspect = PyImport_ImportModule("inspect");
165
+ if (!inspect) goto bad;
166
+ getfullargspec_result = PyObject_CallMethodObjArgs(inspect, PYUNICODE("getfullargspec"), callable, NULL);
167
+ Py_DECREF(inspect);
168
+ if (!getfullargspec_result) goto bad;
169
+ args_list = PyObject_GetAttrString(getfullargspec_result, "args");
170
+ if (!args_list) goto bad;
171
+ kwonly_args_list = PyObject_GetAttrString(getfullargspec_result, "kwonlyargs");
172
+ if (!kwonly_args_list) goto bad;
173
+
174
+ new_kwds = PyDict_New();
175
+ if (!new_kwds) goto bad;
176
+
177
+ // copy over only those arguments that are in the specification
178
+ if (__Pyx_DataclassesCallHelper_FilterToDict(callable, kwds, new_kwds, args_list, 0) == -1) goto bad;
179
+ if (__Pyx_DataclassesCallHelper_FilterToDict(callable, kwds, new_kwds, kwonly_args_list, 1) == -1) goto bad;
180
+ result = PyObject_Call(callable, $empty_tuple, new_kwds);
181
+ bad:
182
+ Py_XDECREF(getfullargspec_result);
183
+ Py_XDECREF(args_list);
184
+ Py_XDECREF(kwonly_args_list);
185
+ Py_XDECREF(new_kwds);
186
+ return result;
187
+ #endif
188
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/Embed.c ADDED
@@ -0,0 +1,255 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //////////////////// MainFunction ////////////////////
2
+
3
+ #ifdef __FreeBSD__
4
+ #include <floatingpoint.h>
5
+ #endif
6
+
7
+ #if PY_MAJOR_VERSION < 3
8
+ int %(main_method)s(int argc, char** argv)
9
+ #elif defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS)
10
+ int %(wmain_method)s(int argc, wchar_t **argv)
11
+ #else
12
+ static int __Pyx_main(int argc, wchar_t **argv)
13
+ #endif
14
+ {
15
+ /* 754 requires that FP exceptions run in "no stop" mode by default,
16
+ * and until C vendors implement C99's ways to control FP exceptions,
17
+ * Python requires non-stop mode. Alas, some platforms enable FP
18
+ * exceptions by default. Here we disable them.
19
+ */
20
+ #ifdef __FreeBSD__
21
+ fp_except_t m;
22
+
23
+ m = fpgetmask();
24
+ fpsetmask(m & ~FP_X_OFL);
25
+ #endif
26
+ #if PY_VERSION_HEX < 0x03080000
27
+ if (argc && argv)
28
+ Py_SetProgramName(argv[0]);
29
+ #endif
30
+
31
+ #if PY_MAJOR_VERSION < 3
32
+ if (PyImport_AppendInittab("%(module_name)s", init%(module_name)s) < 0) return 1;
33
+ #else
34
+ if (PyImport_AppendInittab("%(module_name)s", PyInit_%(module_name)s) < 0) return 1;
35
+ #endif
36
+
37
+ #if PY_VERSION_HEX < 0x03080000
38
+ Py_Initialize();
39
+ if (argc && argv)
40
+ PySys_SetArgv(argc, argv);
41
+ #else
42
+ {
43
+ PyStatus status;
44
+
45
+ PyConfig config;
46
+ PyConfig_InitPythonConfig(&config);
47
+ // Disable parsing command line arguments
48
+ config.parse_argv = 0;
49
+
50
+ if (argc && argv) {
51
+ status = PyConfig_SetString(&config, &config.program_name, argv[0]);
52
+ if (PyStatus_Exception(status)) {
53
+ PyConfig_Clear(&config);
54
+ return 1;
55
+ }
56
+
57
+ status = PyConfig_SetArgv(&config, argc, argv);
58
+ if (PyStatus_Exception(status)) {
59
+ PyConfig_Clear(&config);
60
+ return 1;
61
+ }
62
+ }
63
+
64
+ status = Py_InitializeFromConfig(&config);
65
+ if (PyStatus_Exception(status)) {
66
+ PyConfig_Clear(&config);
67
+ return 1;
68
+ }
69
+
70
+ PyConfig_Clear(&config);
71
+ }
72
+ #endif
73
+
74
+ { /* init module '%(module_name)s' as '__main__' */
75
+ PyObject* m = NULL;
76
+ %(module_is_main)s = 1;
77
+ m = PyImport_ImportModule("%(module_name)s");
78
+
79
+ if (!m && PyErr_Occurred()) {
80
+ PyErr_Print(); /* This exits with the right code if SystemExit. */
81
+ #if PY_MAJOR_VERSION < 3
82
+ if (Py_FlushLine()) PyErr_Clear();
83
+ #endif
84
+ return 1;
85
+ }
86
+ Py_XDECREF(m);
87
+ }
88
+ #if PY_VERSION_HEX < 0x03060000
89
+ Py_Finalize();
90
+ #else
91
+ if (Py_FinalizeEx() < 0)
92
+ return 2;
93
+ #endif
94
+ return 0;
95
+ }
96
+
97
+
98
+ #if PY_MAJOR_VERSION >= 3 && !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS)
99
+ #include <locale.h>
100
+
101
+ #if PY_VERSION_HEX < 0x03050000
102
+
103
+ static wchar_t*
104
+ __Pyx_char2wchar(char* arg)
105
+ {
106
+ wchar_t *res;
107
+ #ifdef HAVE_BROKEN_MBSTOWCS
108
+ /* Some platforms have a broken implementation of
109
+ * mbstowcs which does not count the characters that
110
+ * would result from conversion. Use an upper bound.
111
+ */
112
+ size_t argsize = strlen(arg);
113
+ #else
114
+ size_t argsize = mbstowcs(NULL, arg, 0);
115
+ #endif
116
+ size_t count;
117
+ unsigned char *in;
118
+ wchar_t *out;
119
+ #ifdef HAVE_MBRTOWC
120
+ mbstate_t mbs;
121
+ #endif
122
+ if (argsize != (size_t)-1) {
123
+ res = (wchar_t *)malloc((argsize+1)*sizeof(wchar_t));
124
+ if (!res)
125
+ goto oom;
126
+ count = mbstowcs(res, arg, argsize+1);
127
+ if (count != (size_t)-1) {
128
+ wchar_t *tmp;
129
+ /* Only use the result if it contains no
130
+ surrogate characters. */
131
+ for (tmp = res; *tmp != 0 &&
132
+ (*tmp < 0xd800 || *tmp > 0xdfff); tmp++)
133
+ ;
134
+ if (*tmp == 0)
135
+ return res;
136
+ }
137
+ free(res);
138
+ }
139
+ /* Conversion failed. Fall back to escaping with surrogateescape. */
140
+ #ifdef HAVE_MBRTOWC
141
+ /* Try conversion with mbrtwoc (C99), and escape non-decodable bytes. */
142
+
143
+ /* Overallocate; as multi-byte characters are in the argument, the
144
+ actual output could use less memory. */
145
+ argsize = strlen(arg) + 1;
146
+ res = (wchar_t *)malloc(argsize*sizeof(wchar_t));
147
+ if (!res) goto oom;
148
+ in = (unsigned char*)arg;
149
+ out = res;
150
+ memset(&mbs, 0, sizeof mbs);
151
+ while (argsize) {
152
+ size_t converted = mbrtowc(out, (char*)in, argsize, &mbs);
153
+ if (converted == 0)
154
+ /* Reached end of string; null char stored. */
155
+ break;
156
+ if (converted == (size_t)-2) {
157
+ /* Incomplete character. This should never happen,
158
+ since we provide everything that we have -
159
+ unless there is a bug in the C library, or I
160
+ misunderstood how mbrtowc works. */
161
+ fprintf(stderr, "unexpected mbrtowc result -2\\n");
162
+ free(res);
163
+ return NULL;
164
+ }
165
+ if (converted == (size_t)-1) {
166
+ /* Conversion error. Escape as UTF-8b, and start over
167
+ in the initial shift state. */
168
+ *out++ = 0xdc00 + *in++;
169
+ argsize--;
170
+ memset(&mbs, 0, sizeof mbs);
171
+ continue;
172
+ }
173
+ if (*out >= 0xd800 && *out <= 0xdfff) {
174
+ /* Surrogate character. Escape the original
175
+ byte sequence with surrogateescape. */
176
+ argsize -= converted;
177
+ while (converted--)
178
+ *out++ = 0xdc00 + *in++;
179
+ continue;
180
+ }
181
+ /* successfully converted some bytes */
182
+ in += converted;
183
+ argsize -= converted;
184
+ out++;
185
+ }
186
+ #else
187
+ /* Cannot use C locale for escaping; manually escape as if charset
188
+ is ASCII (i.e. escape all bytes > 128. This will still roundtrip
189
+ correctly in the locale's charset, which must be an ASCII superset. */
190
+ res = (wchar_t *)malloc((strlen(arg)+1)*sizeof(wchar_t));
191
+ if (!res) goto oom;
192
+ in = (unsigned char*)arg;
193
+ out = res;
194
+ while(*in)
195
+ if(*in < 128)
196
+ *out++ = *in++;
197
+ else
198
+ *out++ = 0xdc00 + *in++;
199
+ *out = 0;
200
+ #endif
201
+ return res;
202
+ oom:
203
+ fprintf(stderr, "out of memory\\n");
204
+ return NULL;
205
+ }
206
+
207
+ #endif
208
+
209
+ int
210
+ %(main_method)s(int argc, char **argv)
211
+ {
212
+ if (!argc) {
213
+ return __Pyx_main(0, NULL);
214
+ }
215
+ else {
216
+ int i, res;
217
+ wchar_t **argv_copy = (wchar_t **)malloc(sizeof(wchar_t*)*argc);
218
+ /* We need a second copy, as Python might modify the first one. */
219
+ wchar_t **argv_copy2 = (wchar_t **)malloc(sizeof(wchar_t*)*argc);
220
+ char *oldloc = strdup(setlocale(LC_ALL, NULL));
221
+ if (!argv_copy || !argv_copy2 || !oldloc) {
222
+ fprintf(stderr, "out of memory\\n");
223
+ free(argv_copy);
224
+ free(argv_copy2);
225
+ free(oldloc);
226
+ return 1;
227
+ }
228
+ res = 0;
229
+ setlocale(LC_ALL, "");
230
+ for (i = 0; i < argc; i++) {
231
+ argv_copy2[i] = argv_copy[i] =
232
+ #if PY_VERSION_HEX < 0x03050000
233
+ __Pyx_char2wchar(argv[i]);
234
+ #else
235
+ Py_DecodeLocale(argv[i], NULL);
236
+ #endif
237
+ if (!argv_copy[i]) res = 1; /* failure, but continue to simplify cleanup */
238
+ }
239
+ setlocale(LC_ALL, oldloc);
240
+ free(oldloc);
241
+ if (res == 0)
242
+ res = __Pyx_main(argc, argv_copy);
243
+ for (i = 0; i < argc; i++) {
244
+ #if PY_VERSION_HEX < 0x03050000
245
+ free(argv_copy2[i]);
246
+ #else
247
+ PyMem_RawFree(argv_copy2[i]);
248
+ #endif
249
+ }
250
+ free(argv_copy);
251
+ free(argv_copy2);
252
+ return res;
253
+ }
254
+ }
255
+ #endif
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/MemoryView_C.c ADDED
@@ -0,0 +1,987 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ////////// MemviewSliceStruct.proto //////////
2
+ //@proto_block: utility_code_proto_before_types
3
+
4
+ /* memoryview slice struct */
5
+ struct {{memview_struct_name}};
6
+
7
+ typedef struct {
8
+ struct {{memview_struct_name}} *memview;
9
+ char *data;
10
+ Py_ssize_t shape[{{max_dims}}];
11
+ Py_ssize_t strides[{{max_dims}}];
12
+ Py_ssize_t suboffsets[{{max_dims}}];
13
+ } {{memviewslice_name}};
14
+
15
+ // used for "len(memviewslice)"
16
+ #define __Pyx_MemoryView_Len(m) (m.shape[0])
17
+
18
+
19
+ /////////// Atomics.proto /////////////
20
+ //@proto_block: utility_code_proto_before_types
21
+
22
+ #include <pythread.h>
23
+
24
+ #ifndef CYTHON_ATOMICS
25
+ #define CYTHON_ATOMICS 1
26
+ #endif
27
+ // using CYTHON_ATOMICS as a cdef extern bint in the Cython memoryview code
28
+ // interacts badly with "import *". Therefore, define a helper function-like macro
29
+ #define __PYX_CYTHON_ATOMICS_ENABLED() CYTHON_ATOMICS
30
+
31
+ #define __pyx_atomic_int_type int
32
+ #define __pyx_nonatomic_int_type int
33
+
34
+ // For standard C/C++ atomics, get the headers first so we have ATOMIC_INT_LOCK_FREE
35
+ // defined when we decide to use them.
36
+ #if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \
37
+ (__STDC_VERSION__ >= 201112L) && \
38
+ !defined(__STDC_NO_ATOMICS__))
39
+ #include <stdatomic.h>
40
+ #elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \
41
+ (__cplusplus >= 201103L) || \
42
+ (defined(_MSC_VER) && _MSC_VER >= 1700)))
43
+ #include <atomic>
44
+ #endif
45
+
46
+ #if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \
47
+ (__STDC_VERSION__ >= 201112L) && \
48
+ !defined(__STDC_NO_ATOMICS__) && \
49
+ ATOMIC_INT_LOCK_FREE == 2)
50
+ // C11 atomics are available and ATOMIC_INT_LOCK_FREE is definitely on
51
+ #undef __pyx_atomic_int_type
52
+ #define __pyx_atomic_int_type atomic_int
53
+ #define __pyx_atomic_incr_aligned(value) atomic_fetch_add_explicit(value, 1, memory_order_relaxed)
54
+ #define __pyx_atomic_decr_aligned(value) atomic_fetch_sub_explicit(value, 1, memory_order_acq_rel)
55
+ #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER)
56
+ #pragma message ("Using standard C atomics")
57
+ #elif defined(__PYX_DEBUG_ATOMICS)
58
+ #warning "Using standard C atomics"
59
+ #endif
60
+ #elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \
61
+ (__cplusplus >= 201103L) || \
62
+ /*_MSC_VER 1700 is Visual Studio 2012 */ \
63
+ (defined(_MSC_VER) && _MSC_VER >= 1700)) && \
64
+ ATOMIC_INT_LOCK_FREE == 2)
65
+ // C++11 atomics are available and ATOMIC_INT_LOCK_FREE is definitely on
66
+ #undef __pyx_atomic_int_type
67
+ #define __pyx_atomic_int_type std::atomic_int
68
+ #define __pyx_atomic_incr_aligned(value) std::atomic_fetch_add_explicit(value, 1, std::memory_order_relaxed)
69
+ #define __pyx_atomic_decr_aligned(value) std::atomic_fetch_sub_explicit(value, 1, std::memory_order_acq_rel)
70
+
71
+ #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER)
72
+ #pragma message ("Using standard C++ atomics")
73
+ #elif defined(__PYX_DEBUG_ATOMICS)
74
+ #warning "Using standard C++ atomics"
75
+ #endif
76
+ #elif CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 && \
77
+ (__GNUC_MINOR__ > 1 || \
78
+ (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL__ >= 2))))
79
+ /* gcc >= 4.1.2 */
80
+ #define __pyx_atomic_incr_aligned(value) __sync_fetch_and_add(value, 1)
81
+ #define __pyx_atomic_decr_aligned(value) __sync_fetch_and_sub(value, 1)
82
+
83
+ #ifdef __PYX_DEBUG_ATOMICS
84
+ #warning "Using GNU atomics"
85
+ #endif
86
+ #elif CYTHON_ATOMICS && defined(_MSC_VER)
87
+ /* msvc */
88
+ #include <intrin.h>
89
+ #undef __pyx_atomic_int_type
90
+ #define __pyx_atomic_int_type long
91
+ #undef __pyx_nonatomic_int_type
92
+ #define __pyx_nonatomic_int_type long
93
+ #pragma intrinsic (_InterlockedExchangeAdd)
94
+ #define __pyx_atomic_incr_aligned(value) _InterlockedExchangeAdd(value, 1)
95
+ #define __pyx_atomic_decr_aligned(value) _InterlockedExchangeAdd(value, -1)
96
+
97
+ #ifdef __PYX_DEBUG_ATOMICS
98
+ #pragma message ("Using MSVC atomics")
99
+ #endif
100
+ #else
101
+ #undef CYTHON_ATOMICS
102
+ #define CYTHON_ATOMICS 0
103
+
104
+ #ifdef __PYX_DEBUG_ATOMICS
105
+ #warning "Not using atomics"
106
+ #endif
107
+ #endif
108
+
109
+ #if CYTHON_ATOMICS
110
+ #define __pyx_add_acquisition_count(memview) \
111
+ __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview))
112
+ #define __pyx_sub_acquisition_count(memview) \
113
+ __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview))
114
+ #else
115
+ #define __pyx_add_acquisition_count(memview) \
116
+ __pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
117
+ #define __pyx_sub_acquisition_count(memview) \
118
+ __pyx_sub_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
119
+ #endif
120
+
121
+
122
+ /////////////// ObjectToMemviewSlice.proto ///////////////
123
+
124
+ static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *, int writable_flag);
125
+
126
+
127
+ ////////// MemviewSliceInit.proto //////////
128
+
129
+ #define __Pyx_BUF_MAX_NDIMS %(BUF_MAX_NDIMS)d
130
+
131
+ #define __Pyx_MEMVIEW_DIRECT 1
132
+ #define __Pyx_MEMVIEW_PTR 2
133
+ #define __Pyx_MEMVIEW_FULL 4
134
+ #define __Pyx_MEMVIEW_CONTIG 8
135
+ #define __Pyx_MEMVIEW_STRIDED 16
136
+ #define __Pyx_MEMVIEW_FOLLOW 32
137
+
138
+ #define __Pyx_IS_C_CONTIG 1
139
+ #define __Pyx_IS_F_CONTIG 2
140
+
141
+ static int __Pyx_init_memviewslice(
142
+ struct __pyx_memoryview_obj *memview,
143
+ int ndim,
144
+ __Pyx_memviewslice *memviewslice,
145
+ int memview_is_new_reference);
146
+
147
+ static CYTHON_INLINE int __pyx_add_acquisition_count_locked(
148
+ __pyx_atomic_int_type *acquisition_count, PyThread_type_lock lock);
149
+ static CYTHON_INLINE int __pyx_sub_acquisition_count_locked(
150
+ __pyx_atomic_int_type *acquisition_count, PyThread_type_lock lock);
151
+
152
+ #define __pyx_get_slice_count_pointer(memview) (&memview->acquisition_count)
153
+ #define __PYX_INC_MEMVIEW(slice, have_gil) __Pyx_INC_MEMVIEW(slice, have_gil, __LINE__)
154
+ #define __PYX_XCLEAR_MEMVIEW(slice, have_gil) __Pyx_XCLEAR_MEMVIEW(slice, have_gil, __LINE__)
155
+ static CYTHON_INLINE void __Pyx_INC_MEMVIEW({{memviewslice_name}} *, int, int);
156
+ static CYTHON_INLINE void __Pyx_XCLEAR_MEMVIEW({{memviewslice_name}} *, int, int);
157
+
158
+
159
+ /////////////// MemviewSliceIndex.proto ///////////////
160
+
161
+ static CYTHON_INLINE char *__pyx_memviewslice_index_full(
162
+ const char *bufp, Py_ssize_t idx, Py_ssize_t stride, Py_ssize_t suboffset);
163
+
164
+
165
+ /////////////// ObjectToMemviewSlice ///////////////
166
+ //@requires: MemviewSliceValidateAndInit
167
+
168
+ static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj, int writable_flag) {
169
+ {{memviewslice_name}} result = {{memslice_init}};
170
+ __Pyx_BufFmt_StackElem stack[{{struct_nesting_depth}}];
171
+ int axes_specs[] = { {{axes_specs}} };
172
+ int retcode;
173
+
174
+ if (obj == Py_None) {
175
+ /* We don't bother to refcount None */
176
+ result.memview = (struct __pyx_memoryview_obj *) Py_None;
177
+ return result;
178
+ }
179
+
180
+ retcode = __Pyx_ValidateAndInit_memviewslice(axes_specs, {{c_or_f_flag}},
181
+ {{buf_flag}} | writable_flag, {{ndim}},
182
+ &{{dtype_typeinfo}}, stack,
183
+ &result, obj);
184
+
185
+ if (unlikely(retcode == -1))
186
+ goto __pyx_fail;
187
+
188
+ return result;
189
+ __pyx_fail:
190
+ result.memview = NULL;
191
+ result.data = NULL;
192
+ return result;
193
+ }
194
+
195
+
196
+ /////////////// MemviewSliceValidateAndInit.proto ///////////////
197
+
198
+ static int __Pyx_ValidateAndInit_memviewslice(
199
+ int *axes_specs,
200
+ int c_or_f_flag,
201
+ int buf_flags,
202
+ int ndim,
203
+ __Pyx_TypeInfo *dtype,
204
+ __Pyx_BufFmt_StackElem stack[],
205
+ __Pyx_memviewslice *memviewslice,
206
+ PyObject *original_obj);
207
+
208
+ /////////////// MemviewSliceValidateAndInit ///////////////
209
+ //@requires: Buffer.c::TypeInfoCompare
210
+ //@requires: Buffer.c::BufferFormatStructs
211
+ //@requires: Buffer.c::BufferFormatCheck
212
+
213
+ static int
214
+ __pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec)
215
+ {
216
+ if (buf->shape[dim] <= 1)
217
+ return 1;
218
+
219
+ if (buf->strides) {
220
+ if (spec & __Pyx_MEMVIEW_CONTIG) {
221
+ if (spec & (__Pyx_MEMVIEW_PTR|__Pyx_MEMVIEW_FULL)) {
222
+ if (unlikely(buf->strides[dim] != sizeof(void *))) {
223
+ PyErr_Format(PyExc_ValueError,
224
+ "Buffer is not indirectly contiguous "
225
+ "in dimension %d.", dim);
226
+ goto fail;
227
+ }
228
+ } else if (unlikely(buf->strides[dim] != buf->itemsize)) {
229
+ PyErr_SetString(PyExc_ValueError,
230
+ "Buffer and memoryview are not contiguous "
231
+ "in the same dimension.");
232
+ goto fail;
233
+ }
234
+ }
235
+
236
+ if (spec & __Pyx_MEMVIEW_FOLLOW) {
237
+ Py_ssize_t stride = buf->strides[dim];
238
+ if (stride < 0)
239
+ stride = -stride;
240
+ if (unlikely(stride < buf->itemsize)) {
241
+ PyErr_SetString(PyExc_ValueError,
242
+ "Buffer and memoryview are not contiguous "
243
+ "in the same dimension.");
244
+ goto fail;
245
+ }
246
+ }
247
+ } else {
248
+ if (unlikely(spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1)) {
249
+ PyErr_Format(PyExc_ValueError,
250
+ "C-contiguous buffer is not contiguous in "
251
+ "dimension %d", dim);
252
+ goto fail;
253
+ } else if (unlikely(spec & (__Pyx_MEMVIEW_PTR))) {
254
+ PyErr_Format(PyExc_ValueError,
255
+ "C-contiguous buffer is not indirect in "
256
+ "dimension %d", dim);
257
+ goto fail;
258
+ } else if (unlikely(buf->suboffsets)) {
259
+ PyErr_SetString(PyExc_ValueError,
260
+ "Buffer exposes suboffsets but no strides");
261
+ goto fail;
262
+ }
263
+ }
264
+
265
+ return 1;
266
+ fail:
267
+ return 0;
268
+ }
269
+
270
+ static int
271
+ __pyx_check_suboffsets(Py_buffer *buf, int dim, int ndim, int spec)
272
+ {
273
+ CYTHON_UNUSED_VAR(ndim);
274
+ // Todo: without PyBUF_INDIRECT we may not have suboffset information, i.e., the
275
+ // ptr may not be set to NULL but may be uninitialized?
276
+ if (spec & __Pyx_MEMVIEW_DIRECT) {
277
+ if (unlikely(buf->suboffsets && buf->suboffsets[dim] >= 0)) {
278
+ PyErr_Format(PyExc_ValueError,
279
+ "Buffer not compatible with direct access "
280
+ "in dimension %d.", dim);
281
+ goto fail;
282
+ }
283
+ }
284
+
285
+ if (spec & __Pyx_MEMVIEW_PTR) {
286
+ if (unlikely(!buf->suboffsets || (buf->suboffsets[dim] < 0))) {
287
+ PyErr_Format(PyExc_ValueError,
288
+ "Buffer is not indirectly accessible "
289
+ "in dimension %d.", dim);
290
+ goto fail;
291
+ }
292
+ }
293
+
294
+ return 1;
295
+ fail:
296
+ return 0;
297
+ }
298
+
299
+ static int
300
+ __pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag)
301
+ {
302
+ int i;
303
+
304
+ if (c_or_f_flag & __Pyx_IS_F_CONTIG) {
305
+ Py_ssize_t stride = 1;
306
+ for (i = 0; i < ndim; i++) {
307
+ if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) {
308
+ PyErr_SetString(PyExc_ValueError,
309
+ "Buffer not fortran contiguous.");
310
+ goto fail;
311
+ }
312
+ stride = stride * buf->shape[i];
313
+ }
314
+ } else if (c_or_f_flag & __Pyx_IS_C_CONTIG) {
315
+ Py_ssize_t stride = 1;
316
+ for (i = ndim - 1; i >- 1; i--) {
317
+ if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) {
318
+ PyErr_SetString(PyExc_ValueError,
319
+ "Buffer not C contiguous.");
320
+ goto fail;
321
+ }
322
+ stride = stride * buf->shape[i];
323
+ }
324
+ }
325
+
326
+ return 1;
327
+ fail:
328
+ return 0;
329
+ }
330
+
331
+ static int __Pyx_ValidateAndInit_memviewslice(
332
+ int *axes_specs,
333
+ int c_or_f_flag,
334
+ int buf_flags,
335
+ int ndim,
336
+ __Pyx_TypeInfo *dtype,
337
+ __Pyx_BufFmt_StackElem stack[],
338
+ __Pyx_memviewslice *memviewslice,
339
+ PyObject *original_obj)
340
+ {
341
+ struct __pyx_memoryview_obj *memview, *new_memview;
342
+ __Pyx_RefNannyDeclarations
343
+ Py_buffer *buf;
344
+ int i, spec = 0, retval = -1;
345
+ __Pyx_BufFmt_Context ctx;
346
+ int from_memoryview = __pyx_memoryview_check(original_obj);
347
+
348
+ __Pyx_RefNannySetupContext("ValidateAndInit_memviewslice", 0);
349
+
350
+ if (from_memoryview && __pyx_typeinfo_cmp(dtype, ((struct __pyx_memoryview_obj *)
351
+ original_obj)->typeinfo)) {
352
+ /* We have a matching dtype, skip format parsing */
353
+ memview = (struct __pyx_memoryview_obj *) original_obj;
354
+ new_memview = NULL;
355
+ } else {
356
+ memview = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
357
+ original_obj, buf_flags, 0, dtype);
358
+ new_memview = memview;
359
+ if (unlikely(!memview))
360
+ goto fail;
361
+ }
362
+
363
+ buf = &memview->view;
364
+ if (unlikely(buf->ndim != ndim)) {
365
+ PyErr_Format(PyExc_ValueError,
366
+ "Buffer has wrong number of dimensions (expected %d, got %d)",
367
+ ndim, buf->ndim);
368
+ goto fail;
369
+ }
370
+
371
+ if (new_memview) {
372
+ __Pyx_BufFmt_Init(&ctx, stack, dtype);
373
+ if (unlikely(!__Pyx_BufFmt_CheckString(&ctx, buf->format))) goto fail;
374
+ }
375
+
376
+ if (unlikely((unsigned) buf->itemsize != dtype->size)) {
377
+ PyErr_Format(PyExc_ValueError,
378
+ "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "u byte%s) "
379
+ "does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "u byte%s)",
380
+ buf->itemsize,
381
+ (buf->itemsize > 1) ? "s" : "",
382
+ dtype->name,
383
+ dtype->size,
384
+ (dtype->size > 1) ? "s" : "");
385
+ goto fail;
386
+ }
387
+
388
+ /* Check axes */
389
+ if (buf->len > 0) {
390
+ // 0-sized arrays do not undergo these checks since their strides are
391
+ // irrelevant and they are always both C- and F-contiguous.
392
+ for (i = 0; i < ndim; i++) {
393
+ spec = axes_specs[i];
394
+ if (unlikely(!__pyx_check_strides(buf, i, ndim, spec)))
395
+ goto fail;
396
+ if (unlikely(!__pyx_check_suboffsets(buf, i, ndim, spec)))
397
+ goto fail;
398
+ }
399
+
400
+ /* Check contiguity */
401
+ if (unlikely(buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag)))
402
+ goto fail;
403
+ }
404
+
405
+ /* Initialize */
406
+ if (unlikely(__Pyx_init_memviewslice(memview, ndim, memviewslice,
407
+ new_memview != NULL) == -1)) {
408
+ goto fail;
409
+ }
410
+
411
+ retval = 0;
412
+ goto no_fail;
413
+
414
+ fail:
415
+ Py_XDECREF(new_memview);
416
+ retval = -1;
417
+
418
+ no_fail:
419
+ __Pyx_RefNannyFinishContext();
420
+ return retval;
421
+ }
422
+
423
+
424
+ ////////// MemviewSliceInit //////////
425
+
426
+ static int
427
+ __Pyx_init_memviewslice(struct __pyx_memoryview_obj *memview,
428
+ int ndim,
429
+ {{memviewslice_name}} *memviewslice,
430
+ int memview_is_new_reference)
431
+ {
432
+ __Pyx_RefNannyDeclarations
433
+ int i, retval=-1;
434
+ Py_buffer *buf = &memview->view;
435
+ __Pyx_RefNannySetupContext("init_memviewslice", 0);
436
+
437
+ if (unlikely(memviewslice->memview || memviewslice->data)) {
438
+ PyErr_SetString(PyExc_ValueError,
439
+ "memviewslice is already initialized!");
440
+ goto fail;
441
+ }
442
+
443
+ if (buf->strides) {
444
+ for (i = 0; i < ndim; i++) {
445
+ memviewslice->strides[i] = buf->strides[i];
446
+ }
447
+ } else {
448
+ Py_ssize_t stride = buf->itemsize;
449
+ for (i = ndim - 1; i >= 0; i--) {
450
+ memviewslice->strides[i] = stride;
451
+ stride *= buf->shape[i];
452
+ }
453
+ }
454
+
455
+ for (i = 0; i < ndim; i++) {
456
+ memviewslice->shape[i] = buf->shape[i];
457
+ if (buf->suboffsets) {
458
+ memviewslice->suboffsets[i] = buf->suboffsets[i];
459
+ } else {
460
+ memviewslice->suboffsets[i] = -1;
461
+ }
462
+ }
463
+
464
+ memviewslice->memview = memview;
465
+ memviewslice->data = (char *)buf->buf;
466
+ if (__pyx_add_acquisition_count(memview) == 0 && !memview_is_new_reference) {
467
+ Py_INCREF(memview);
468
+ }
469
+ retval = 0;
470
+ goto no_fail;
471
+
472
+ fail:
473
+ /* Don't decref, the memoryview may be borrowed. Let the caller do the cleanup */
474
+ /* __Pyx_XDECREF(memviewslice->memview); */
475
+ memviewslice->memview = 0;
476
+ memviewslice->data = 0;
477
+ retval = -1;
478
+ no_fail:
479
+ __Pyx_RefNannyFinishContext();
480
+ return retval;
481
+ }
482
+
483
+ #ifndef Py_NO_RETURN
484
+ // available since Py3.3
485
+ #define Py_NO_RETURN
486
+ #endif
487
+
488
+ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
489
+ va_list vargs;
490
+ char msg[200];
491
+
492
+ #if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES)
493
+ va_start(vargs, fmt);
494
+ #else
495
+ va_start(vargs);
496
+ #endif
497
+ vsnprintf(msg, 200, fmt, vargs);
498
+ va_end(vargs);
499
+
500
+ Py_FatalError(msg);
501
+ }
502
+
503
+ static CYTHON_INLINE int
504
+ __pyx_add_acquisition_count_locked(__pyx_atomic_int_type *acquisition_count,
505
+ PyThread_type_lock lock)
506
+ {
507
+ int result;
508
+ PyThread_acquire_lock(lock, 1);
509
+ result = (*acquisition_count)++;
510
+ PyThread_release_lock(lock);
511
+ return result;
512
+ }
513
+
514
+ static CYTHON_INLINE int
515
+ __pyx_sub_acquisition_count_locked(__pyx_atomic_int_type *acquisition_count,
516
+ PyThread_type_lock lock)
517
+ {
518
+ int result;
519
+ PyThread_acquire_lock(lock, 1);
520
+ result = (*acquisition_count)--;
521
+ PyThread_release_lock(lock);
522
+ return result;
523
+ }
524
+
525
+
526
+ static CYTHON_INLINE void
527
+ __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno)
528
+ {
529
+ __pyx_nonatomic_int_type old_acquisition_count;
530
+ struct {{memview_struct_name}} *memview = memslice->memview;
531
+ if (unlikely(!memview || (PyObject *) memview == Py_None)) {
532
+ // Allow uninitialized memoryview assignment and do not ref-count None.
533
+ return;
534
+ }
535
+
536
+ old_acquisition_count = __pyx_add_acquisition_count(memview);
537
+ if (unlikely(old_acquisition_count <= 0)) {
538
+ if (likely(old_acquisition_count == 0)) {
539
+ // First acquisition => keep the memoryview object alive.
540
+ if (have_gil) {
541
+ Py_INCREF((PyObject *) memview);
542
+ } else {
543
+ PyGILState_STATE _gilstate = PyGILState_Ensure();
544
+ Py_INCREF((PyObject *) memview);
545
+ PyGILState_Release(_gilstate);
546
+ }
547
+ } else {
548
+ __pyx_fatalerror("Acquisition count is %d (line %d)",
549
+ old_acquisition_count+1, lineno);
550
+ }
551
+ }
552
+ }
553
+
554
+ static CYTHON_INLINE void __Pyx_XCLEAR_MEMVIEW({{memviewslice_name}} *memslice,
555
+ int have_gil, int lineno) {
556
+ __pyx_nonatomic_int_type old_acquisition_count;
557
+ struct {{memview_struct_name}} *memview = memslice->memview;
558
+
559
+ if (unlikely(!memview || (PyObject *) memview == Py_None)) {
560
+ // Do not ref-count None.
561
+ memslice->memview = NULL;
562
+ return;
563
+ }
564
+
565
+ old_acquisition_count = __pyx_sub_acquisition_count(memview);
566
+ memslice->data = NULL;
567
+ if (likely(old_acquisition_count > 1)) {
568
+ // Still other slices out there => we do not own the reference.
569
+ memslice->memview = NULL;
570
+ } else if (likely(old_acquisition_count == 1)) {
571
+ // Last slice => discard owned Python reference to memoryview object.
572
+ if (have_gil) {
573
+ Py_CLEAR(memslice->memview);
574
+ } else {
575
+ PyGILState_STATE _gilstate = PyGILState_Ensure();
576
+ Py_CLEAR(memslice->memview);
577
+ PyGILState_Release(_gilstate);
578
+ }
579
+ } else {
580
+ __pyx_fatalerror("Acquisition count is %d (line %d)",
581
+ old_acquisition_count-1, lineno);
582
+ }
583
+ }
584
+
585
+
586
+ ////////// MemviewSliceCopyTemplate.proto //////////
587
+
588
+ static {{memviewslice_name}}
589
+ __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
590
+ const char *mode, int ndim,
591
+ size_t sizeof_dtype, int contig_flag,
592
+ int dtype_is_object);
593
+
594
+
595
+ ////////// MemviewSliceCopyTemplate //////////
596
+
597
+ static {{memviewslice_name}}
598
+ __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
599
+ const char *mode, int ndim,
600
+ size_t sizeof_dtype, int contig_flag,
601
+ int dtype_is_object)
602
+ {
603
+ __Pyx_RefNannyDeclarations
604
+ int i;
605
+ __Pyx_memviewslice new_mvs = {{memslice_init}};
606
+ struct __pyx_memoryview_obj *from_memview = from_mvs->memview;
607
+ Py_buffer *buf = &from_memview->view;
608
+ PyObject *shape_tuple = NULL;
609
+ PyObject *temp_int = NULL;
610
+ struct __pyx_array_obj *array_obj = NULL;
611
+ struct __pyx_memoryview_obj *memview_obj = NULL;
612
+
613
+ __Pyx_RefNannySetupContext("__pyx_memoryview_copy_new_contig", 0);
614
+
615
+ for (i = 0; i < ndim; i++) {
616
+ if (unlikely(from_mvs->suboffsets[i] >= 0)) {
617
+ PyErr_Format(PyExc_ValueError, "Cannot copy memoryview slice with "
618
+ "indirect dimensions (axis %d)", i);
619
+ goto fail;
620
+ }
621
+ }
622
+
623
+ shape_tuple = PyTuple_New(ndim);
624
+ if (unlikely(!shape_tuple)) {
625
+ goto fail;
626
+ }
627
+ __Pyx_GOTREF(shape_tuple);
628
+
629
+
630
+ for(i = 0; i < ndim; i++) {
631
+ temp_int = PyInt_FromSsize_t(from_mvs->shape[i]);
632
+ if(unlikely(!temp_int)) {
633
+ goto fail;
634
+ } else {
635
+ PyTuple_SET_ITEM(shape_tuple, i, temp_int);
636
+ temp_int = NULL;
637
+ }
638
+ }
639
+
640
+ array_obj = __pyx_array_new(shape_tuple, sizeof_dtype, buf->format, (char *) mode, NULL);
641
+ if (unlikely(!array_obj)) {
642
+ goto fail;
643
+ }
644
+ __Pyx_GOTREF(array_obj);
645
+
646
+ memview_obj = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
647
+ (PyObject *) array_obj, contig_flag,
648
+ dtype_is_object,
649
+ from_mvs->memview->typeinfo);
650
+ if (unlikely(!memview_obj))
651
+ goto fail;
652
+
653
+ /* initialize new_mvs */
654
+ if (unlikely(__Pyx_init_memviewslice(memview_obj, ndim, &new_mvs, 1) < 0))
655
+ goto fail;
656
+
657
+ if (unlikely(__pyx_memoryview_copy_contents(*from_mvs, new_mvs, ndim, ndim,
658
+ dtype_is_object) < 0))
659
+ goto fail;
660
+
661
+ goto no_fail;
662
+
663
+ fail:
664
+ __Pyx_XDECREF(new_mvs.memview);
665
+ new_mvs.memview = NULL;
666
+ new_mvs.data = NULL;
667
+ no_fail:
668
+ __Pyx_XDECREF(shape_tuple);
669
+ __Pyx_XDECREF(temp_int);
670
+ __Pyx_XDECREF(array_obj);
671
+ __Pyx_RefNannyFinishContext();
672
+ return new_mvs;
673
+ }
674
+
675
+
676
+ ////////// CopyContentsUtility.proto /////////
677
+
678
+ #define {{func_cname}}(slice) \
679
+ __pyx_memoryview_copy_new_contig(&slice, "{{mode}}", {{ndim}}, \
680
+ sizeof({{dtype_decl}}), {{contig_flag}}, \
681
+ {{dtype_is_object}})
682
+
683
+
684
+ ////////// OverlappingSlices.proto //////////
685
+
686
+ static int __pyx_slices_overlap({{memviewslice_name}} *slice1,
687
+ {{memviewslice_name}} *slice2,
688
+ int ndim, size_t itemsize);
689
+
690
+
691
+ ////////// OverlappingSlices //////////
692
+
693
+ /* Based on numpy's core/src/multiarray/array_assign.c */
694
+
695
+ /* Gets a half-open range [start, end) which contains the array data */
696
+ static void
697
+ __pyx_get_array_memory_extents({{memviewslice_name}} *slice,
698
+ void **out_start, void **out_end,
699
+ int ndim, size_t itemsize)
700
+ {
701
+ char *start, *end;
702
+ int i;
703
+
704
+ start = end = slice->data;
705
+
706
+ for (i = 0; i < ndim; i++) {
707
+ Py_ssize_t stride = slice->strides[i];
708
+ Py_ssize_t extent = slice->shape[i];
709
+
710
+ if (extent == 0) {
711
+ *out_start = *out_end = start;
712
+ return;
713
+ } else {
714
+ if (stride > 0)
715
+ end += stride * (extent - 1);
716
+ else
717
+ start += stride * (extent - 1);
718
+ }
719
+ }
720
+
721
+ /* Return a half-open range */
722
+ *out_start = start;
723
+ *out_end = end + itemsize;
724
+ }
725
+
726
+ /* Returns 1 if the arrays have overlapping data, 0 otherwise */
727
+ static int
728
+ __pyx_slices_overlap({{memviewslice_name}} *slice1,
729
+ {{memviewslice_name}} *slice2,
730
+ int ndim, size_t itemsize)
731
+ {
732
+ void *start1, *end1, *start2, *end2;
733
+
734
+ __pyx_get_array_memory_extents(slice1, &start1, &end1, ndim, itemsize);
735
+ __pyx_get_array_memory_extents(slice2, &start2, &end2, ndim, itemsize);
736
+
737
+ return (start1 < end2) && (start2 < end1);
738
+ }
739
+
740
+
741
+ ////////// MemviewSliceCheckContig.proto //////////
742
+
743
+ #define __pyx_memviewslice_is_contig_{{contig_type}}{{ndim}}(slice) \
744
+ __pyx_memviewslice_is_contig(slice, '{{contig_type}}', {{ndim}})
745
+
746
+
747
+ ////////// MemviewSliceIsContig.proto //////////
748
+
749
+ static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim);/*proto*/
750
+
751
+
752
+ ////////// MemviewSliceIsContig //////////
753
+
754
+ static int
755
+ __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim)
756
+ {
757
+ int i, index, step, start;
758
+ Py_ssize_t itemsize = mvs.memview->view.itemsize;
759
+
760
+ if (order == 'F') {
761
+ step = 1;
762
+ start = 0;
763
+ } else {
764
+ step = -1;
765
+ start = ndim - 1;
766
+ }
767
+
768
+ for (i = 0; i < ndim; i++) {
769
+ index = start + step * i;
770
+ if (mvs.suboffsets[index] >= 0 || mvs.strides[index] != itemsize)
771
+ return 0;
772
+
773
+ itemsize *= mvs.shape[index];
774
+ }
775
+
776
+ return 1;
777
+ }
778
+
779
+
780
+ /////////////// MemviewSliceIndex ///////////////
781
+
782
+ static CYTHON_INLINE char *
783
+ __pyx_memviewslice_index_full(const char *bufp, Py_ssize_t idx,
784
+ Py_ssize_t stride, Py_ssize_t suboffset)
785
+ {
786
+ bufp = bufp + idx * stride;
787
+ if (suboffset >= 0) {
788
+ bufp = *((char **) bufp) + suboffset;
789
+ }
790
+ return (char *) bufp;
791
+ }
792
+
793
+
794
+ /////////////// MemviewDtypeToObject.proto ///////////////
795
+
796
+ {{if to_py_function}}
797
+ static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp); /* proto */
798
+ {{endif}}
799
+
800
+ {{if from_py_function}}
801
+ static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
802
+ {{endif}}
803
+
804
+ /////////////// MemviewDtypeToObject ///////////////
805
+
806
+ {{#__pyx_memview_<dtype_name>_to_object}}
807
+
808
+ /* Convert a dtype to or from a Python object */
809
+
810
+ {{if to_py_function}}
811
+ static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp) {
812
+ return (PyObject *) {{to_py_function}}(*({{dtype}} *) itemp);
813
+ }
814
+ {{endif}}
815
+
816
+ {{if from_py_function}}
817
+ static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj) {
818
+ {{dtype}} value = {{from_py_function}}(obj);
819
+ if (unlikely({{error_condition}}))
820
+ return 0;
821
+ *({{dtype}} *) itemp = value;
822
+ return 1;
823
+ }
824
+ {{endif}}
825
+
826
+
827
+ /////////////// MemviewObjectToObject.proto ///////////////
828
+
829
+ /* Function callbacks (for memoryview object) for dtype object */
830
+ static PyObject *{{get_function}}(const char *itemp); /* proto */
831
+ static int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
832
+
833
+
834
+ /////////////// MemviewObjectToObject ///////////////
835
+
836
+ static PyObject *{{get_function}}(const char *itemp) {
837
+ PyObject *result = *(PyObject **) itemp;
838
+ Py_INCREF(result);
839
+ return result;
840
+ }
841
+
842
+ static int {{set_function}}(const char *itemp, PyObject *obj) {
843
+ Py_INCREF(obj);
844
+ Py_DECREF(*(PyObject **) itemp);
845
+ *(PyObject **) itemp = obj;
846
+ return 1;
847
+ }
848
+
849
+ /////////// ToughSlice //////////
850
+
851
+ /* Dimension is indexed with 'start:stop:step' */
852
+
853
+ if (unlikely(__pyx_memoryview_slice_memviewslice(
854
+ &{{dst}},
855
+ {{src}}.shape[{{dim}}], {{src}}.strides[{{dim}}], {{src}}.suboffsets[{{dim}}],
856
+ {{dim}},
857
+ {{new_ndim}},
858
+ &{{get_suboffset_dim()}},
859
+ {{start}},
860
+ {{stop}},
861
+ {{step}},
862
+ {{int(have_start)}},
863
+ {{int(have_stop)}},
864
+ {{int(have_step)}},
865
+ 1) < 0))
866
+ {
867
+ {{error_goto}}
868
+ }
869
+
870
+
871
+ ////////// SimpleSlice //////////
872
+
873
+ /* Dimension is indexed with ':' only */
874
+
875
+ {{dst}}.shape[{{new_ndim}}] = {{src}}.shape[{{dim}}];
876
+ {{dst}}.strides[{{new_ndim}}] = {{src}}.strides[{{dim}}];
877
+
878
+ {{if access == 'direct'}}
879
+ {{dst}}.suboffsets[{{new_ndim}}] = -1;
880
+ {{else}}
881
+ {{dst}}.suboffsets[{{new_ndim}}] = {{src}}.suboffsets[{{dim}}];
882
+ if ({{src}}.suboffsets[{{dim}}] >= 0)
883
+ {{get_suboffset_dim()}} = {{new_ndim}};
884
+ {{endif}}
885
+
886
+
887
+ ////////// SliceIndex //////////
888
+
889
+ // Dimension is indexed with an integer, we could use the ToughSlice
890
+ // approach, but this is faster
891
+
892
+ {
893
+ Py_ssize_t __pyx_tmp_idx = {{idx}};
894
+
895
+ {{if wraparound or boundscheck}}
896
+ Py_ssize_t __pyx_tmp_shape = {{src}}.shape[{{dim}}];
897
+ {{endif}}
898
+
899
+ Py_ssize_t __pyx_tmp_stride = {{src}}.strides[{{dim}}];
900
+ {{if wraparound}}
901
+ if (__pyx_tmp_idx < 0)
902
+ __pyx_tmp_idx += __pyx_tmp_shape;
903
+ {{endif}}
904
+
905
+ {{if boundscheck}}
906
+ if (unlikely(!__Pyx_is_valid_index(__pyx_tmp_idx, __pyx_tmp_shape))) {
907
+ {{if not have_gil}}
908
+ #ifdef WITH_THREAD
909
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();
910
+ #endif
911
+ {{endif}}
912
+
913
+ PyErr_SetString(PyExc_IndexError,
914
+ "Index out of bounds (axis {{dim}})");
915
+
916
+ {{if not have_gil}}
917
+ #ifdef WITH_THREAD
918
+ PyGILState_Release(__pyx_gilstate_save);
919
+ #endif
920
+ {{endif}}
921
+
922
+ {{error_goto}}
923
+ }
924
+ {{endif}}
925
+
926
+ {{if all_dimensions_direct}}
927
+ {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride;
928
+ {{else}}
929
+ if ({{get_suboffset_dim()}} < 0) {
930
+ {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride;
931
+
932
+ /* This dimension is the first dimension, or is preceded by */
933
+ /* direct or indirect dimensions that are indexed away. */
934
+ /* Hence suboffset_dim must be less than zero, and we can have */
935
+ /* our data pointer refer to another block by dereferencing. */
936
+ /* slice.data -> B -> C becomes slice.data -> C */
937
+
938
+ {{if indirect}}
939
+ {
940
+ Py_ssize_t __pyx_tmp_suboffset = {{src}}.suboffsets[{{dim}}];
941
+
942
+ {{if generic}}
943
+ if (__pyx_tmp_suboffset >= 0)
944
+ {{endif}}
945
+
946
+ {{dst}}.data = *((char **) {{dst}}.data) + __pyx_tmp_suboffset;
947
+ }
948
+ {{endif}}
949
+
950
+ } else {
951
+ {{dst}}.suboffsets[{{get_suboffset_dim()}}] += __pyx_tmp_idx * __pyx_tmp_stride;
952
+
953
+ /* Note: dimension can not be indirect, the compiler will have */
954
+ /* issued an error */
955
+ }
956
+
957
+ {{endif}}
958
+ }
959
+
960
+
961
+ ////////// FillStrided1DScalar.proto //////////
962
+
963
+ static void
964
+ __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride,
965
+ size_t itemsize, void *itemp);
966
+
967
+ ////////// FillStrided1DScalar //////////
968
+
969
+ /* Fill a slice with a scalar value. The dimension is direct and strided or contiguous */
970
+ /* This can be used as a callback for the memoryview object to efficiently assign a scalar */
971
+ /* Currently unused */
972
+ static void
973
+ __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride,
974
+ size_t itemsize, void *itemp)
975
+ {
976
+ Py_ssize_t i;
977
+ {{type_decl}} item = *(({{type_decl}} *) itemp);
978
+ {{type_decl}} *endp;
979
+
980
+ stride /= sizeof({{type_decl}});
981
+ endp = p + stride * extent;
982
+
983
+ while (p < endp) {
984
+ *p = item;
985
+ p += stride;
986
+ }
987
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/ModuleSetupCode.c ADDED
@@ -0,0 +1,2366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /////////////// InitLimitedAPI ///////////////
2
+
3
+ #if defined(CYTHON_LIMITED_API) && 0 /* disabled: enabling Py_LIMITED_API needs more work */
4
+ #ifndef Py_LIMITED_API
5
+ #if CYTHON_LIMITED_API+0 > 0x03030000
6
+ #define Py_LIMITED_API CYTHON_LIMITED_API
7
+ #else
8
+ #define Py_LIMITED_API 0x03030000
9
+ #endif
10
+ #endif
11
+ #endif
12
+
13
+
14
+ /////////////// CModulePreamble ///////////////
15
+
16
+ #include <stddef.h> /* For offsetof */
17
+ #ifndef offsetof
18
+ #define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
19
+ #endif
20
+
21
+ #if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS)
22
+ #ifndef __stdcall
23
+ #define __stdcall
24
+ #endif
25
+ #ifndef __cdecl
26
+ #define __cdecl
27
+ #endif
28
+ #ifndef __fastcall
29
+ #define __fastcall
30
+ #endif
31
+ #endif
32
+
33
+ #ifndef DL_IMPORT
34
+ #define DL_IMPORT(t) t
35
+ #endif
36
+ #ifndef DL_EXPORT
37
+ #define DL_EXPORT(t) t
38
+ #endif
39
+
40
+ // For use in DL_IMPORT/DL_EXPORT macros.
41
+ #define __PYX_COMMA ,
42
+
43
+ #ifndef HAVE_LONG_LONG
44
+ // CPython has required PY_LONG_LONG support for years, even if HAVE_LONG_LONG is not defined for us
45
+ #define HAVE_LONG_LONG
46
+ #endif
47
+
48
+ #ifndef PY_LONG_LONG
49
+ #define PY_LONG_LONG LONG_LONG
50
+ #endif
51
+
52
+ #ifndef Py_HUGE_VAL
53
+ #define Py_HUGE_VAL HUGE_VAL
54
+ #endif
55
+
56
+ // For the limited API it often makes sense to use Py_LIMITED_API rather than PY_VERSION_HEX
57
+ // when doing version checks.
58
+ #define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX
59
+
60
+ #if defined(GRAALVM_PYTHON)
61
+ /* For very preliminary testing purposes. Most variables are set the same as PyPy.
62
+ The existence of this section does not imply that anything works or is even tested */
63
+ // GRAALVM_PYTHON test comes before PyPy test because GraalPython unhelpfully defines PYPY_VERSION
64
+ #define CYTHON_COMPILING_IN_PYPY 0
65
+ #define CYTHON_COMPILING_IN_CPYTHON 0
66
+ #define CYTHON_COMPILING_IN_LIMITED_API 0
67
+ #define CYTHON_COMPILING_IN_GRAAL 1
68
+ #define CYTHON_COMPILING_IN_NOGIL 0
69
+
70
+ #undef CYTHON_USE_TYPE_SLOTS
71
+ #define CYTHON_USE_TYPE_SLOTS 0
72
+ #undef CYTHON_USE_TYPE_SPECS
73
+ #define CYTHON_USE_TYPE_SPECS 0
74
+ #undef CYTHON_USE_PYTYPE_LOOKUP
75
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
76
+ #if PY_VERSION_HEX < 0x03050000
77
+ #undef CYTHON_USE_ASYNC_SLOTS
78
+ #define CYTHON_USE_ASYNC_SLOTS 0
79
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
80
+ #define CYTHON_USE_ASYNC_SLOTS 1
81
+ #endif
82
+ #undef CYTHON_USE_PYLIST_INTERNALS
83
+ #define CYTHON_USE_PYLIST_INTERNALS 0
84
+ #undef CYTHON_USE_UNICODE_INTERNALS
85
+ #define CYTHON_USE_UNICODE_INTERNALS 0
86
+ #undef CYTHON_USE_UNICODE_WRITER
87
+ #define CYTHON_USE_UNICODE_WRITER 0
88
+ #undef CYTHON_USE_PYLONG_INTERNALS
89
+ #define CYTHON_USE_PYLONG_INTERNALS 0
90
+ #undef CYTHON_AVOID_BORROWED_REFS
91
+ #define CYTHON_AVOID_BORROWED_REFS 1
92
+ #undef CYTHON_ASSUME_SAFE_MACROS
93
+ #define CYTHON_ASSUME_SAFE_MACROS 0
94
+ #undef CYTHON_UNPACK_METHODS
95
+ #define CYTHON_UNPACK_METHODS 0
96
+ #undef CYTHON_FAST_THREAD_STATE
97
+ #define CYTHON_FAST_THREAD_STATE 0
98
+ #undef CYTHON_FAST_GIL
99
+ #define CYTHON_FAST_GIL 0
100
+ #undef CYTHON_METH_FASTCALL
101
+ #define CYTHON_METH_FASTCALL 0
102
+ #undef CYTHON_FAST_PYCALL
103
+ #define CYTHON_FAST_PYCALL 0
104
+ #ifndef CYTHON_PEP487_INIT_SUBCLASS
105
+ #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3)
106
+ #endif
107
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
108
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 1
109
+ #undef CYTHON_USE_MODULE_STATE
110
+ #define CYTHON_USE_MODULE_STATE 0
111
+ #undef CYTHON_USE_TP_FINALIZE
112
+ #define CYTHON_USE_TP_FINALIZE 0
113
+ #undef CYTHON_USE_DICT_VERSIONS
114
+ #define CYTHON_USE_DICT_VERSIONS 0
115
+ #undef CYTHON_USE_EXC_INFO_STACK
116
+ #define CYTHON_USE_EXC_INFO_STACK 0
117
+ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
118
+ #define CYTHON_UPDATE_DESCRIPTOR_DOC 0
119
+ #endif
120
+ #undef CYTHON_USE_FREELISTS
121
+ #define CYTHON_USE_FREELISTS 0
122
+
123
+ #elif defined(PYPY_VERSION)
124
+ #define CYTHON_COMPILING_IN_PYPY 1
125
+ #define CYTHON_COMPILING_IN_CPYTHON 0
126
+ #define CYTHON_COMPILING_IN_LIMITED_API 0
127
+ #define CYTHON_COMPILING_IN_GRAAL 0
128
+ #define CYTHON_COMPILING_IN_NOGIL 0
129
+
130
+ #undef CYTHON_USE_TYPE_SLOTS
131
+ #define CYTHON_USE_TYPE_SLOTS 0
132
+ #ifndef CYTHON_USE_TYPE_SPECS
133
+ #define CYTHON_USE_TYPE_SPECS 0
134
+ #endif
135
+ #undef CYTHON_USE_PYTYPE_LOOKUP
136
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
137
+ #if PY_VERSION_HEX < 0x03050000
138
+ #undef CYTHON_USE_ASYNC_SLOTS
139
+ #define CYTHON_USE_ASYNC_SLOTS 0
140
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
141
+ #define CYTHON_USE_ASYNC_SLOTS 1
142
+ #endif
143
+ #undef CYTHON_USE_PYLIST_INTERNALS
144
+ #define CYTHON_USE_PYLIST_INTERNALS 0
145
+ #undef CYTHON_USE_UNICODE_INTERNALS
146
+ #define CYTHON_USE_UNICODE_INTERNALS 0
147
+ #undef CYTHON_USE_UNICODE_WRITER
148
+ #define CYTHON_USE_UNICODE_WRITER 0
149
+ #undef CYTHON_USE_PYLONG_INTERNALS
150
+ #define CYTHON_USE_PYLONG_INTERNALS 0
151
+ #undef CYTHON_AVOID_BORROWED_REFS
152
+ #define CYTHON_AVOID_BORROWED_REFS 1
153
+ #undef CYTHON_ASSUME_SAFE_MACROS
154
+ #define CYTHON_ASSUME_SAFE_MACROS 0
155
+ #undef CYTHON_UNPACK_METHODS
156
+ #define CYTHON_UNPACK_METHODS 0
157
+ #undef CYTHON_FAST_THREAD_STATE
158
+ #define CYTHON_FAST_THREAD_STATE 0
159
+ #undef CYTHON_FAST_GIL
160
+ #define CYTHON_FAST_GIL 0
161
+ #undef CYTHON_METH_FASTCALL
162
+ #define CYTHON_METH_FASTCALL 0
163
+ #undef CYTHON_FAST_PYCALL
164
+ #define CYTHON_FAST_PYCALL 0
165
+ #ifndef CYTHON_PEP487_INIT_SUBCLASS
166
+ #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3)
167
+ #endif
168
+ #if PY_VERSION_HEX < 0x03090000
169
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
170
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
171
+ #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT)
172
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 1
173
+ #endif
174
+ #undef CYTHON_USE_MODULE_STATE
175
+ #define CYTHON_USE_MODULE_STATE 0
176
+ #undef CYTHON_USE_TP_FINALIZE
177
+ #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00)
178
+ #undef CYTHON_USE_DICT_VERSIONS
179
+ #define CYTHON_USE_DICT_VERSIONS 0
180
+ #undef CYTHON_USE_EXC_INFO_STACK
181
+ #define CYTHON_USE_EXC_INFO_STACK 0
182
+ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
183
+ #define CYTHON_UPDATE_DESCRIPTOR_DOC 0
184
+ #endif
185
+ #undef CYTHON_USE_FREELISTS
186
+ #define CYTHON_USE_FREELISTS 0
187
+
188
+ #elif defined(CYTHON_LIMITED_API)
189
+ // EXPERIMENTAL !!
190
+ #ifdef Py_LIMITED_API
191
+ #undef __PYX_LIMITED_VERSION_HEX
192
+ #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API
193
+ #endif
194
+ #define CYTHON_COMPILING_IN_PYPY 0
195
+ #define CYTHON_COMPILING_IN_CPYTHON 0
196
+ #define CYTHON_COMPILING_IN_LIMITED_API 1
197
+ #define CYTHON_COMPILING_IN_GRAAL 0
198
+ #define CYTHON_COMPILING_IN_NOGIL 0
199
+
200
+ // CYTHON_CLINE_IN_TRACEBACK is currently disabled for the Limited API
201
+ #undef CYTHON_CLINE_IN_TRACEBACK
202
+ #define CYTHON_CLINE_IN_TRACEBACK 0
203
+
204
+ #undef CYTHON_USE_TYPE_SLOTS
205
+ #define CYTHON_USE_TYPE_SLOTS 0
206
+ #undef CYTHON_USE_TYPE_SPECS
207
+ #define CYTHON_USE_TYPE_SPECS 1
208
+ #undef CYTHON_USE_PYTYPE_LOOKUP
209
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
210
+ #undef CYTHON_USE_ASYNC_SLOTS
211
+ #define CYTHON_USE_ASYNC_SLOTS 0
212
+ #undef CYTHON_USE_PYLIST_INTERNALS
213
+ #define CYTHON_USE_PYLIST_INTERNALS 0
214
+ #undef CYTHON_USE_UNICODE_INTERNALS
215
+ #define CYTHON_USE_UNICODE_INTERNALS 0
216
+ #ifndef CYTHON_USE_UNICODE_WRITER
217
+ #define CYTHON_USE_UNICODE_WRITER 0
218
+ #endif
219
+ #undef CYTHON_USE_PYLONG_INTERNALS
220
+ #define CYTHON_USE_PYLONG_INTERNALS 0
221
+ #ifndef CYTHON_AVOID_BORROWED_REFS
222
+ #define CYTHON_AVOID_BORROWED_REFS 0
223
+ #endif
224
+ #undef CYTHON_ASSUME_SAFE_MACROS
225
+ #define CYTHON_ASSUME_SAFE_MACROS 0
226
+ #undef CYTHON_UNPACK_METHODS
227
+ #define CYTHON_UNPACK_METHODS 0
228
+ #undef CYTHON_FAST_THREAD_STATE
229
+ #define CYTHON_FAST_THREAD_STATE 0
230
+ #undef CYTHON_FAST_GIL
231
+ #define CYTHON_FAST_GIL 0
232
+ #undef CYTHON_METH_FASTCALL
233
+ #define CYTHON_METH_FASTCALL 0
234
+ #undef CYTHON_FAST_PYCALL
235
+ #define CYTHON_FAST_PYCALL 0
236
+ #ifndef CYTHON_PEP487_INIT_SUBCLASS
237
+ #define CYTHON_PEP487_INIT_SUBCLASS 1
238
+ #endif
239
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
240
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
241
+ #undef CYTHON_USE_MODULE_STATE
242
+ #define CYTHON_USE_MODULE_STATE 1
243
+ #ifndef CYTHON_USE_TP_FINALIZE
244
+ // PyObject_CallFinalizerFromDealloc is missing and not easily replaced
245
+ #define CYTHON_USE_TP_FINALIZE 0
246
+ #endif
247
+ #undef CYTHON_USE_DICT_VERSIONS
248
+ #define CYTHON_USE_DICT_VERSIONS 0
249
+ #undef CYTHON_USE_EXC_INFO_STACK
250
+ #define CYTHON_USE_EXC_INFO_STACK 0
251
+ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
252
+ #define CYTHON_UPDATE_DESCRIPTOR_DOC 0
253
+ #endif
254
+ #undef CYTHON_USE_FREELISTS
255
+ #define CYTHON_USE_FREELISTS 0
256
+
257
+ #elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL)
258
+ #define CYTHON_COMPILING_IN_PYPY 0
259
+ #define CYTHON_COMPILING_IN_CPYTHON 0
260
+ #define CYTHON_COMPILING_IN_LIMITED_API 0
261
+ #define CYTHON_COMPILING_IN_GRAAL 0
262
+ #define CYTHON_COMPILING_IN_NOGIL 1
263
+
264
+ #ifndef CYTHON_USE_TYPE_SLOTS
265
+ #define CYTHON_USE_TYPE_SLOTS 1
266
+ #endif
267
+ #ifndef CYTHON_USE_TYPE_SPECS
268
+ #define CYTHON_USE_TYPE_SPECS 0
269
+ #endif
270
+ #undef CYTHON_USE_PYTYPE_LOOKUP
271
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
272
+ #ifndef CYTHON_USE_ASYNC_SLOTS
273
+ #define CYTHON_USE_ASYNC_SLOTS 1
274
+ #endif
275
+ #ifndef CYTHON_USE_PYLONG_INTERNALS
276
+ #define CYTHON_USE_PYLONG_INTERNALS 0
277
+ #endif
278
+ #undef CYTHON_USE_PYLIST_INTERNALS
279
+ #define CYTHON_USE_PYLIST_INTERNALS 0
280
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
281
+ #define CYTHON_USE_UNICODE_INTERNALS 1
282
+ #endif
283
+ #undef CYTHON_USE_UNICODE_WRITER
284
+ #define CYTHON_USE_UNICODE_WRITER 0
285
+ #ifndef CYTHON_AVOID_BORROWED_REFS
286
+ #define CYTHON_AVOID_BORROWED_REFS 0
287
+ #endif
288
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
289
+ #define CYTHON_ASSUME_SAFE_MACROS 1
290
+ #endif
291
+ #ifndef CYTHON_UNPACK_METHODS
292
+ #define CYTHON_UNPACK_METHODS 1
293
+ #endif
294
+ #undef CYTHON_FAST_THREAD_STATE
295
+ #define CYTHON_FAST_THREAD_STATE 0
296
+ #undef CYTHON_FAST_GIL
297
+ #define CYTHON_FAST_GIL 0
298
+ #ifndef CYTHON_METH_FASTCALL
299
+ #define CYTHON_METH_FASTCALL 1
300
+ #endif
301
+ #undef CYTHON_FAST_PYCALL
302
+ #define CYTHON_FAST_PYCALL 0
303
+ #ifndef CYTHON_PEP487_INIT_SUBCLASS
304
+ #define CYTHON_PEP487_INIT_SUBCLASS 1
305
+ #endif
306
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
307
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 1
308
+ #endif
309
+ #ifndef CYTHON_USE_MODULE_STATE
310
+ #define CYTHON_USE_MODULE_STATE 0
311
+ #endif
312
+ #ifndef CYTHON_USE_TP_FINALIZE
313
+ #define CYTHON_USE_TP_FINALIZE 1
314
+ #endif
315
+ #undef CYTHON_USE_DICT_VERSIONS
316
+ #define CYTHON_USE_DICT_VERSIONS 0
317
+ #undef CYTHON_USE_EXC_INFO_STACK
318
+ #define CYTHON_USE_EXC_INFO_STACK 0
319
+ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
320
+ #define CYTHON_UPDATE_DESCRIPTOR_DOC 1
321
+ #endif
322
+ #ifndef CYTHON_USE_FREELISTS
323
+ // TODO - we could probably enable CYTHON_USE_FREELISTS by default in future since
324
+ // this is just a variant of cpython now, but we'd need to be very careful to make
325
+ // them thread safe. Since it will probably work, let the user decide.
326
+ #define CYTHON_USE_FREELISTS 0
327
+ #endif
328
+
329
+ #else
330
+ #define CYTHON_COMPILING_IN_PYPY 0
331
+ #define CYTHON_COMPILING_IN_CPYTHON 1
332
+ #define CYTHON_COMPILING_IN_LIMITED_API 0
333
+ #define CYTHON_COMPILING_IN_GRAAL 0
334
+ #define CYTHON_COMPILING_IN_NOGIL 0
335
+
336
+ #ifndef CYTHON_USE_TYPE_SLOTS
337
+ #define CYTHON_USE_TYPE_SLOTS 1
338
+ #endif
339
+ #ifndef CYTHON_USE_TYPE_SPECS
340
+ #define CYTHON_USE_TYPE_SPECS 0
341
+ #endif
342
+ #ifndef CYTHON_USE_PYTYPE_LOOKUP
343
+ #define CYTHON_USE_PYTYPE_LOOKUP 1
344
+ #endif
345
+ #if PY_MAJOR_VERSION < 3
346
+ #undef CYTHON_USE_ASYNC_SLOTS
347
+ #define CYTHON_USE_ASYNC_SLOTS 0
348
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
349
+ #define CYTHON_USE_ASYNC_SLOTS 1
350
+ #endif
351
+ #ifndef CYTHON_USE_PYLONG_INTERNALS
352
+ #define CYTHON_USE_PYLONG_INTERNALS 1
353
+ #endif
354
+ #ifndef CYTHON_USE_PYLIST_INTERNALS
355
+ #define CYTHON_USE_PYLIST_INTERNALS 1
356
+ #endif
357
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
358
+ #define CYTHON_USE_UNICODE_INTERNALS 1
359
+ #endif
360
+ #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2
361
+ // Python 3.11a2 hid _PyLong_FormatAdvancedWriter and _PyFloat_FormatAdvancedWriter
362
+ // therefore disable unicode writer until a better alternative appears
363
+ #undef CYTHON_USE_UNICODE_WRITER
364
+ #define CYTHON_USE_UNICODE_WRITER 0
365
+ #elif !defined(CYTHON_USE_UNICODE_WRITER)
366
+ #define CYTHON_USE_UNICODE_WRITER 1
367
+ #endif
368
+ // CYTHON_AVOID_BORROWED_REFS - Avoid borrowed references and always request owned references directly instead.
369
+ #ifndef CYTHON_AVOID_BORROWED_REFS
370
+ #define CYTHON_AVOID_BORROWED_REFS 0
371
+ #endif
372
+ // CYTHON_ASSUME_SAFE_MACROS - Assume that macro calls do not fail and do not raise exceptions.
373
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
374
+ #define CYTHON_ASSUME_SAFE_MACROS 1
375
+ #endif
376
+ #ifndef CYTHON_UNPACK_METHODS
377
+ #define CYTHON_UNPACK_METHODS 1
378
+ #endif
379
+ #ifndef CYTHON_FAST_THREAD_STATE
380
+ #define CYTHON_FAST_THREAD_STATE 1
381
+ #endif
382
+ #ifndef CYTHON_FAST_GIL
383
+ // Py3<3.5.2 does not support _PyThreadState_UncheckedGet().
384
+ // FIXME: FastGIL can probably be supported also in CPython 3.12 but needs to be adapted.
385
+ #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6)
386
+ #endif
387
+ #ifndef CYTHON_METH_FASTCALL
388
+ // CPython 3.6 introduced METH_FASTCALL but with slightly different
389
+ // semantics. It became stable starting from CPython 3.7.
390
+ #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1)
391
+ #endif
392
+ #ifndef CYTHON_FAST_PYCALL
393
+ #define CYTHON_FAST_PYCALL 1
394
+ #endif
395
+ #ifndef CYTHON_PEP487_INIT_SUBCLASS
396
+ #define CYTHON_PEP487_INIT_SUBCLASS 1
397
+ #endif
398
+ #if PY_VERSION_HEX < 0x03050000
399
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
400
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
401
+ #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT)
402
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 1
403
+ #endif
404
+ // CYTHON_USE_MODULE_STATE - Use a module state/globals struct tied to the module object.
405
+ #ifndef CYTHON_USE_MODULE_STATE
406
+ // EXPERIMENTAL !!
407
+ #define CYTHON_USE_MODULE_STATE 0
408
+ #endif
409
+ #if PY_VERSION_HEX < 0x030400a1
410
+ #undef CYTHON_USE_TP_FINALIZE
411
+ #define CYTHON_USE_TP_FINALIZE 0
412
+ #elif !defined(CYTHON_USE_TP_FINALIZE)
413
+ #define CYTHON_USE_TP_FINALIZE 1
414
+ #endif
415
+ #if PY_VERSION_HEX < 0x030600B1
416
+ #undef CYTHON_USE_DICT_VERSIONS
417
+ #define CYTHON_USE_DICT_VERSIONS 0
418
+ #elif !defined(CYTHON_USE_DICT_VERSIONS)
419
+ // Python 3.12a5 deprecated "ma_version_tag"
420
+ #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5)
421
+ #endif
422
+ #if PY_VERSION_HEX < 0x030700A3
423
+ #undef CYTHON_USE_EXC_INFO_STACK
424
+ #define CYTHON_USE_EXC_INFO_STACK 0
425
+ #elif !defined(CYTHON_USE_EXC_INFO_STACK)
426
+ #define CYTHON_USE_EXC_INFO_STACK 1
427
+ #endif
428
+ #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
429
+ #define CYTHON_UPDATE_DESCRIPTOR_DOC 1
430
+ #endif
431
+ #ifndef CYTHON_USE_FREELISTS
432
+ #define CYTHON_USE_FREELISTS 1
433
+ #endif
434
+ #endif
435
+
436
+ #if !defined(CYTHON_FAST_PYCCALL)
437
+ #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
438
+ #endif
439
+
440
+ #if !defined(CYTHON_VECTORCALL)
441
+ #define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1)
442
+ #endif
443
+
444
+ /* Whether to use METH_FASTCALL with a fake backported implementation of vectorcall */
445
+ #define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1)
446
+
447
+ #if CYTHON_USE_PYLONG_INTERNALS
448
+ #if PY_MAJOR_VERSION < 3
449
+ #include "longintrepr.h"
450
+ #endif
451
+ /* These short defines can easily conflict with other code */
452
+ #undef SHIFT
453
+ #undef BASE
454
+ #undef MASK
455
+ /* Compile-time sanity check that these are indeed equal. Github issue #2670. */
456
+ #ifdef SIZEOF_VOID_P
457
+ enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
458
+ #endif
459
+ #endif
460
+
461
+ #ifndef __has_attribute
462
+ #define __has_attribute(x) 0
463
+ #endif
464
+
465
+ #ifndef __has_cpp_attribute
466
+ #define __has_cpp_attribute(x) 0
467
+ #endif
468
+
469
+ // restrict
470
+ #ifndef CYTHON_RESTRICT
471
+ #if defined(__GNUC__)
472
+ #define CYTHON_RESTRICT __restrict__
473
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
474
+ #define CYTHON_RESTRICT __restrict
475
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
476
+ #define CYTHON_RESTRICT restrict
477
+ #else
478
+ #define CYTHON_RESTRICT
479
+ #endif
480
+ #endif
481
+
482
+ // unused attribute
483
+ #ifndef CYTHON_UNUSED
484
+ #if defined(__cplusplus)
485
+ /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17
486
+ * but leads to warnings with -pedantic, since it is a C++17 feature */
487
+ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L)
488
+ #if __has_cpp_attribute(maybe_unused)
489
+ #define CYTHON_UNUSED [[maybe_unused]]
490
+ #endif
491
+ #endif
492
+ #endif
493
+ #endif
494
+ #ifndef CYTHON_UNUSED
495
+ # if defined(__GNUC__)
496
+ # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
497
+ # define CYTHON_UNUSED __attribute__ ((__unused__))
498
+ # else
499
+ # define CYTHON_UNUSED
500
+ # endif
501
+ # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
502
+ # define CYTHON_UNUSED __attribute__ ((__unused__))
503
+ # else
504
+ # define CYTHON_UNUSED
505
+ # endif
506
+ #endif
507
+
508
+ #ifndef CYTHON_UNUSED_VAR
509
+ # if defined(__cplusplus)
510
+ template<class T> void CYTHON_UNUSED_VAR( const T& ) { }
511
+ # else
512
+ # define CYTHON_UNUSED_VAR(x) (void)(x)
513
+ # endif
514
+ #endif
515
+
516
+ #ifndef CYTHON_MAYBE_UNUSED_VAR
517
+ #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x)
518
+ #endif
519
+
520
+ #ifndef CYTHON_NCP_UNUSED
521
+ # if CYTHON_COMPILING_IN_CPYTHON
522
+ # define CYTHON_NCP_UNUSED
523
+ # else
524
+ # define CYTHON_NCP_UNUSED CYTHON_UNUSED
525
+ # endif
526
+ #endif
527
+
528
+ #ifndef CYTHON_USE_CPP_STD_MOVE
529
+ // msvc doesn't set __cplusplus to a useful value
530
+ #if defined(__cplusplus) && ( \
531
+ __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600))
532
+ #define CYTHON_USE_CPP_STD_MOVE 1
533
+ #else
534
+ #define CYTHON_USE_CPP_STD_MOVE 0
535
+ #endif
536
+ #endif
537
+
538
+ #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
539
+
540
+ #ifdef _MSC_VER
541
+ #ifndef _MSC_STDINT_H_
542
+ #if _MSC_VER < 1300
543
+ typedef unsigned char uint8_t;
544
+ typedef unsigned short uint16_t;
545
+ typedef unsigned int uint32_t;
546
+ #else
547
+ typedef unsigned __int8 uint8_t;
548
+ typedef unsigned __int16 uint16_t;
549
+ typedef unsigned __int32 uint32_t;
550
+ #endif
551
+ #endif
552
+ #if _MSC_VER < 1300
553
+ #ifdef _WIN64
554
+ typedef unsigned long long __pyx_uintptr_t;
555
+ #else
556
+ typedef unsigned int __pyx_uintptr_t;
557
+ #endif
558
+ #else
559
+ #ifdef _WIN64
560
+ typedef unsigned __int64 __pyx_uintptr_t;
561
+ #else
562
+ typedef unsigned __int32 __pyx_uintptr_t;
563
+ #endif
564
+ #endif
565
+ #else
566
+ #include <stdint.h>
567
+ typedef uintptr_t __pyx_uintptr_t;
568
+ #endif
569
+
570
+
571
+ #ifndef CYTHON_FALLTHROUGH
572
+ #if defined(__cplusplus)
573
+ /* for clang __has_cpp_attribute(fallthrough) is true even before C++17
574
+ * but leads to warnings with -pedantic, since it is a C++17 feature */
575
+ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L)
576
+ #if __has_cpp_attribute(fallthrough)
577
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
578
+ #endif
579
+ #endif
580
+
581
+ #ifndef CYTHON_FALLTHROUGH
582
+ #if __has_cpp_attribute(clang::fallthrough)
583
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
584
+ #elif __has_cpp_attribute(gnu::fallthrough)
585
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
586
+ #endif
587
+ #endif
588
+ #endif
589
+
590
+ #ifndef CYTHON_FALLTHROUGH
591
+ #if __has_attribute(fallthrough)
592
+ #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
593
+ #else
594
+ #define CYTHON_FALLTHROUGH
595
+ #endif
596
+ #endif
597
+
598
+ #if defined(__clang__) && defined(__apple_build_version__)
599
+ #if __apple_build_version__ < 7000000 /* Xcode < 7.0 */
600
+ #undef CYTHON_FALLTHROUGH
601
+ #define CYTHON_FALLTHROUGH
602
+ #endif
603
+ #endif
604
+ #endif
605
+
606
+ #ifdef __cplusplus
607
+ template <typename T>
608
+ struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);};
609
+ #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL<type>::value)
610
+ #else
611
+ #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0)
612
+ #endif
613
+
614
+ #if CYTHON_COMPILING_IN_PYPY == 1
615
+ #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000)
616
+ #else
617
+ #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000)
618
+ #endif
619
+ // reinterpret
620
+
621
+ // TODO: refactor existing code to use those macros
622
+ #define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer))
623
+ // #define __PYX_REINTERPRET_POINTER(pointer_type, pointer) ((pointer_type)(void *)(pointer))
624
+ // #define __PYX_RUNTIME_REINTERPRET(type, var) (*(type *)(&var))
625
+
626
+
627
+ /////////////// CInitCode ///////////////
628
+
629
+ // inline attribute
630
+ #ifndef CYTHON_INLINE
631
+ #if defined(__clang__)
632
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
633
+ #elif defined(__GNUC__)
634
+ #define CYTHON_INLINE __inline__
635
+ #elif defined(_MSC_VER)
636
+ #define CYTHON_INLINE __inline
637
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
638
+ #define CYTHON_INLINE inline
639
+ #else
640
+ #define CYTHON_INLINE
641
+ #endif
642
+ #endif
643
+
644
+
645
+ /////////////// CppInitCode ///////////////
646
+
647
+ #ifndef __cplusplus
648
+ #error "Cython files generated with the C++ option must be compiled with a C++ compiler."
649
+ #endif
650
+
651
+ // inline attribute
652
+ #ifndef CYTHON_INLINE
653
+ #if defined(__clang__)
654
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
655
+ #else
656
+ #define CYTHON_INLINE inline
657
+ #endif
658
+ #endif
659
+
660
+ // Work around clang bug https://stackoverflow.com/questions/21847816/c-invoke-nested-template-class-destructor
661
+ template<typename T>
662
+ void __Pyx_call_destructor(T& x) {
663
+ x.~T();
664
+ }
665
+
666
+ // Used for temporary variables of "reference" type.
667
+ template<typename T>
668
+ class __Pyx_FakeReference {
669
+ public:
670
+ __Pyx_FakeReference() : ptr(NULL) { }
671
+ // __Pyx_FakeReference(T& ref) : ptr(&ref) { }
672
+ // Const version needed as Cython doesn't know about const overloads (e.g. for stl containers).
673
+ __Pyx_FakeReference(const T& ref) : ptr(const_cast<T*>(&ref)) { }
674
+ T *operator->() { return ptr; }
675
+ T *operator&() { return ptr; }
676
+ operator T&() { return *ptr; }
677
+ // TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed).
678
+ template<typename U> bool operator ==(const U& other) const { return *ptr == other; }
679
+ template<typename U> bool operator !=(const U& other) const { return *ptr != other; }
680
+ template<typename U> bool operator==(const __Pyx_FakeReference<U>& other) const { return *ptr == *other.ptr; }
681
+ template<typename U> bool operator!=(const __Pyx_FakeReference<U>& other) const { return *ptr != *other.ptr; }
682
+ private:
683
+ T *ptr;
684
+ };
685
+
686
+
687
+ /////////////// PythonCompatibility ///////////////
688
+
689
+ #define __PYX_BUILD_PY_SSIZE_T "n"
690
+ #define CYTHON_FORMAT_SSIZE_T "z"
691
+
692
+ #if PY_MAJOR_VERSION < 3
693
+ #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
694
+ #define __Pyx_DefaultClassType PyClass_Type
695
+ #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \
696
+ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
697
+ #else
698
+ #define __Pyx_BUILTIN_MODULE_NAME "builtins"
699
+ #define __Pyx_DefaultClassType PyType_Type
700
+ #if CYTHON_COMPILING_IN_LIMITED_API
701
+ // Note that the limited API doesn't know about PyCodeObject, so the type of this
702
+ // is PyObject (unlike for the main API)
703
+ static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f,
704
+ PyObject *code, PyObject *c, PyObject* n, PyObject *v,
705
+ PyObject *fv, PyObject *cell, PyObject* fn,
706
+ PyObject *name, int fline, PyObject *lnos) {
707
+ // Backout option for generating a code object.
708
+ // PyCode_NewEmpty isn't in the limited API. Therefore the two options are
709
+ // 1. Python call of the code type with a long list of positional args.
710
+ // 2. Generate a code object by compiling some trivial code, and customize.
711
+ // We use the second because it's less sensitive to changes in the code type
712
+ // constructor with version.
713
+ PyObject *exception_table = NULL;
714
+ PyObject *types_module=NULL, *code_type=NULL, *result=NULL;
715
+ #if __PYX_LIMITED_VERSION_HEX < 0x030B0000
716
+ PyObject *version_info; /* borrowed */
717
+ PyObject *py_minor_version = NULL;
718
+ #endif
719
+ long minor_version = 0;
720
+ PyObject *type, *value, *traceback;
721
+
722
+ // we must be able to call this while an exception is happening - thus clear then restore the state
723
+ PyErr_Fetch(&type, &value, &traceback);
724
+
725
+ #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000
726
+ minor_version = 11;
727
+ // we don't yet need to distinguish between versions > 11
728
+ // Note that from 3.13, when we do, we can use Py_Version
729
+ #else
730
+ if (!(version_info = PySys_GetObject("version_info"))) goto end;
731
+ if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end;
732
+ minor_version = PyLong_AsLong(py_minor_version);
733
+ Py_DECREF(py_minor_version);
734
+ if (minor_version == -1 && PyErr_Occurred()) goto end;
735
+ #endif
736
+
737
+ if (!(types_module = PyImport_ImportModule("types"))) goto end;
738
+ if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end;
739
+
740
+ if (minor_version <= 7) {
741
+ // 3.7:
742
+ // code(argcount, kwonlyargcount, nlocals, stacksize, flags, codestring,
743
+ // constants, names, varnames, filename, name, firstlineno,
744
+ // lnotab[, freevars[, cellvars]])
745
+ (void)p;
746
+ result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code,
747
+ c, n, v, fn, name, fline, lnos, fv, cell);
748
+ } else if (minor_version <= 10) {
749
+ // 3.8, 3.9, 3.10
750
+ // code(argcount, posonlyargcount, kwonlyargcount, nlocals, stacksize,
751
+ // flags, codestring, constants, names, varnames, filename, name,
752
+ // firstlineno, lnotab[, freevars[, cellvars]])
753
+ // 3.10 switches lnotab for linetable, but is otherwise the same
754
+ result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code,
755
+ c, n, v, fn, name, fline, lnos, fv, cell);
756
+ } else {
757
+ // 3.11, 3.12
758
+ // code(argcount, posonlyargcount, kwonlyargcount, nlocals, stacksize,
759
+ // flags, codestring, constants, names, varnames, filename, name,
760
+ // qualname, firstlineno, linetable, exceptiontable, freevars=(), cellvars=(), /)
761
+ // We use name and qualname for simplicity
762
+ if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end;
763
+ result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code,
764
+ c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell);
765
+ }
766
+
767
+ end:
768
+ Py_XDECREF(code_type);
769
+ Py_XDECREF(exception_table);
770
+ Py_XDECREF(types_module);
771
+ if (type) {
772
+ PyErr_Restore(type, value, traceback);
773
+ }
774
+ return result;
775
+ }
776
+
777
+ // Cython uses these constants but they are not available in the limited API.
778
+ // (it'd be nice if there was a more robust way of looking these up)
779
+ #ifndef CO_OPTIMIZED
780
+ #define CO_OPTIMIZED 0x0001
781
+ #endif
782
+ #ifndef CO_NEWLOCALS
783
+ #define CO_NEWLOCALS 0x0002
784
+ #endif
785
+ #ifndef CO_VARARGS
786
+ #define CO_VARARGS 0x0004
787
+ #endif
788
+ #ifndef CO_VARKEYWORDS
789
+ #define CO_VARKEYWORDS 0x0008
790
+ #endif
791
+ #ifndef CO_ASYNC_GENERATOR
792
+ #define CO_ASYNC_GENERATOR 0x0200
793
+ #endif
794
+ #ifndef CO_GENERATOR
795
+ #define CO_GENERATOR 0x0020
796
+ #endif
797
+ #ifndef CO_COROUTINE
798
+ #define CO_COROUTINE 0x0080
799
+ #endif
800
+ #elif PY_VERSION_HEX >= 0x030B0000
801
+ static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f,
802
+ PyObject *code, PyObject *c, PyObject* n, PyObject *v,
803
+ PyObject *fv, PyObject *cell, PyObject* fn,
804
+ PyObject *name, int fline, PyObject *lnos) {
805
+ // As earlier versions, but
806
+ // 1. pass an empty bytes string as exception_table
807
+ // 2. pass name as qualname (TODO this might implementing properly in future)
808
+ PyCodeObject *result;
809
+ PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); /* we don't have access to __pyx_empty_bytes here */
810
+ if (!empty_bytes) return NULL;
811
+ result =
812
+ #if PY_VERSION_HEX >= 0x030C0000
813
+ PyUnstable_Code_NewWithPosOnlyArgs
814
+ #else
815
+ PyCode_NewWithPosOnlyArgs
816
+ #endif
817
+ (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes);
818
+ Py_DECREF(empty_bytes);
819
+ return result;
820
+ }
821
+ #elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY
822
+ #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \
823
+ PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
824
+ #else
825
+ #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \
826
+ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
827
+ #endif
828
+ #endif
829
+
830
+ #if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE)
831
+ #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type)
832
+ #else
833
+ #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type))
834
+ #endif
835
+
836
+ #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is)
837
+ #define __Pyx_Py_Is(x, y) Py_Is(x, y)
838
+ #else
839
+ #define __Pyx_Py_Is(x, y) ((x) == (y))
840
+ #endif
841
+ #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone)
842
+ #define __Pyx_Py_IsNone(ob) Py_IsNone(ob)
843
+ #else
844
+ #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None)
845
+ #endif
846
+ #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue)
847
+ #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob)
848
+ #else
849
+ #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True)
850
+ #endif
851
+ #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse)
852
+ #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob)
853
+ #else
854
+ #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False)
855
+ #endif
856
+ #define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj))
857
+
858
+ #if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY
859
+ #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o)
860
+ #else
861
+ #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o)
862
+ #endif
863
+
864
+ #ifndef CO_COROUTINE
865
+ #define CO_COROUTINE 0x80
866
+ #endif
867
+ #ifndef CO_ASYNC_GENERATOR
868
+ #define CO_ASYNC_GENERATOR 0x200
869
+ #endif
870
+
871
+ #ifndef Py_TPFLAGS_CHECKTYPES
872
+ #define Py_TPFLAGS_CHECKTYPES 0
873
+ #endif
874
+ #ifndef Py_TPFLAGS_HAVE_INDEX
875
+ #define Py_TPFLAGS_HAVE_INDEX 0
876
+ #endif
877
+ #ifndef Py_TPFLAGS_HAVE_NEWBUFFER
878
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
879
+ #endif
880
+ #ifndef Py_TPFLAGS_HAVE_FINALIZE
881
+ #define Py_TPFLAGS_HAVE_FINALIZE 0
882
+ #endif
883
+ #ifndef Py_TPFLAGS_SEQUENCE
884
+ #define Py_TPFLAGS_SEQUENCE 0
885
+ #endif
886
+ #ifndef Py_TPFLAGS_MAPPING
887
+ #define Py_TPFLAGS_MAPPING 0
888
+ #endif
889
+
890
+ #ifndef METH_STACKLESS
891
+ // already defined for Stackless Python (all versions) and C-Python >= 3.7
892
+ // value if defined: Stackless Python < 3.6: 0x80 else 0x100
893
+ #define METH_STACKLESS 0
894
+ #endif
895
+ #if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
896
+ // new in CPython 3.6, but changed in 3.7 - see
897
+ // positional-only parameters:
898
+ // https://bugs.python.org/issue29464
899
+ // const args:
900
+ // https://bugs.python.org/issue32240
901
+ #ifndef METH_FASTCALL
902
+ #define METH_FASTCALL 0x80
903
+ #endif
904
+ typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
905
+ // new in CPython 3.7, used to be old signature of _PyCFunctionFast() in 3.6
906
+ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
907
+ Py_ssize_t nargs, PyObject *kwnames);
908
+ #else
909
+ #if PY_VERSION_HEX >= 0x030d00A4
910
+ # define __Pyx_PyCFunctionFast PyCFunctionFast
911
+ # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords
912
+ #else
913
+ # define __Pyx_PyCFunctionFast _PyCFunctionFast
914
+ # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
915
+ #endif
916
+ #endif
917
+
918
+ #if CYTHON_METH_FASTCALL
919
+ #define __Pyx_METH_FASTCALL METH_FASTCALL
920
+ #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast
921
+ #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords
922
+ #else
923
+ #define __Pyx_METH_FASTCALL METH_VARARGS
924
+ #define __Pyx_PyCFunction_FastCall PyCFunction
925
+ #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords
926
+ #endif
927
+
928
+ #if CYTHON_VECTORCALL
929
+ #define __pyx_vectorcallfunc vectorcallfunc
930
+ #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET
931
+ #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n))
932
+ #elif CYTHON_BACKPORT_VECTORCALL
933
+ typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args,
934
+ size_t nargsf, PyObject *kwnames);
935
+ #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1))
936
+ #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET))
937
+ #else
938
+ #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0
939
+ #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n))
940
+ #endif
941
+
942
+ // These PyCFunction related macros get redefined in CythonFunction.c.
943
+ // We need our own copies because the inline functions in CPython have a type-check assert
944
+ // that breaks with a CyFunction in debug mode.
945
+ #if PY_MAJOR_VERSION >= 0x030900B1
946
+ #define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func)
947
+ #else
948
+ #define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func)
949
+ #endif
950
+ #define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func)
951
+
952
+ #if CYTHON_COMPILING_IN_CPYTHON
953
+ #define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth)
954
+ #elif !CYTHON_COMPILING_IN_LIMITED_API
955
+ // It's probably easier for non-CPythons to support PyCFunction_GET_FUNCTION() than the object struct layout.
956
+ #define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func)
957
+ // Unused in CYTHON_COMPILING_IN_LIMITED_API.
958
+ #endif
959
+ #if CYTHON_COMPILING_IN_CPYTHON
960
+ #define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags)
961
+ static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) {
962
+ return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self;
963
+ }
964
+ // Only used if CYTHON_COMPILING_IN_CPYTHON.
965
+ #endif
966
+ static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) {
967
+ #if CYTHON_COMPILING_IN_LIMITED_API
968
+ return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc;
969
+ #else
970
+ return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc;
971
+ #endif
972
+ }
973
+ #define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc)
974
+
975
+ // PEP-573: PyCFunction holds reference to defining class (PyCMethodObject)
976
+ #if __PYX_LIMITED_VERSION_HEX < 0x030900B1
977
+ #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b))
978
+ typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *);
979
+ #else
980
+ #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b)
981
+ #define __Pyx_PyCMethod PyCMethod
982
+ #endif
983
+ #ifndef METH_METHOD
984
+ #define METH_METHOD 0x200
985
+ #endif
986
+
987
+ #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
988
+ #define PyObject_Malloc(s) PyMem_Malloc(s)
989
+ #define PyObject_Free(p) PyMem_Free(p)
990
+ #define PyObject_Realloc(p) PyMem_Realloc(p)
991
+ #endif
992
+
993
+ #if CYTHON_COMPILING_IN_LIMITED_API
994
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
995
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno)
996
+ #else
997
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
998
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
999
+ #endif
1000
+
1001
+ #if CYTHON_COMPILING_IN_LIMITED_API
1002
+ #define __Pyx_PyThreadState_Current PyThreadState_Get()
1003
+ #elif !CYTHON_FAST_THREAD_STATE
1004
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
1005
+ #elif PY_VERSION_HEX >= 0x030d00A1
1006
+ //#elif PY_VERSION_HEX >= 0x03050200
1007
+ // Actually added in 3.5.2, but compiling against that does not guarantee that we get imported there.
1008
+ #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked()
1009
+ #elif PY_VERSION_HEX >= 0x03060000
1010
+ //#elif PY_VERSION_HEX >= 0x03050200
1011
+ // Actually added in 3.5.2, but compiling against that does not guarantee that we get imported there.
1012
+ #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
1013
+ #elif PY_VERSION_HEX >= 0x03000000
1014
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
1015
+ #else
1016
+ #define __Pyx_PyThreadState_Current _PyThreadState_Current
1017
+ #endif
1018
+
1019
+ #if CYTHON_COMPILING_IN_LIMITED_API
1020
+ static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op)
1021
+ {
1022
+ void *result;
1023
+
1024
+ result = PyModule_GetState(op);
1025
+ if (!result)
1026
+ Py_FatalError("Couldn't find the module state");
1027
+ return result;
1028
+ }
1029
+ #endif
1030
+
1031
+ #define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype)
1032
+ #if CYTHON_COMPILING_IN_LIMITED_API
1033
+ #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name))
1034
+ #else
1035
+ #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name)
1036
+ #endif
1037
+
1038
+ // TSS (Thread Specific Storage) API
1039
+ #if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
1040
+ #include "pythread.h"
1041
+ #define Py_tss_NEEDS_INIT 0
1042
+ typedef int Py_tss_t;
1043
+ static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
1044
+ *key = PyThread_create_key();
1045
+ return 0; /* PyThread_create_key reports success always */
1046
+ }
1047
+ static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
1048
+ Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
1049
+ *key = Py_tss_NEEDS_INIT;
1050
+ return key;
1051
+ }
1052
+ static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
1053
+ PyObject_Free(key);
1054
+ }
1055
+ static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
1056
+ return *key != Py_tss_NEEDS_INIT;
1057
+ }
1058
+ static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
1059
+ PyThread_delete_key(*key);
1060
+ *key = Py_tss_NEEDS_INIT;
1061
+ }
1062
+ static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
1063
+ return PyThread_set_key_value(*key, value);
1064
+ }
1065
+ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
1066
+ return PyThread_get_key_value(*key);
1067
+ }
1068
+ // PyThread_delete_key_value(key) is equivalent to PyThread_set_key_value(key, NULL)
1069
+ // PyThread_ReInitTLS() is a no-op
1070
+ #endif /* TSS (Thread Specific Storage) API */
1071
+
1072
+
1073
+ #if PY_MAJOR_VERSION < 3
1074
+ #if CYTHON_COMPILING_IN_PYPY
1075
+ #if PYPY_VERSION_NUM < 0x07030600
1076
+ #if defined(__cplusplus) && __cplusplus >= 201402L
1077
+ [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]]
1078
+ #elif defined(__GNUC__) || defined(__clang__)
1079
+ __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")))
1080
+ #elif defined(_MSC_VER)
1081
+ __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))
1082
+ #endif
1083
+ static CYTHON_INLINE int PyGILState_Check(void) {
1084
+ // PyGILState_Check is used to decide whether to release the GIL when we don't
1085
+ // know that we have it. For PyPy2 it isn't possible to check.
1086
+ // Therefore assume that we don't have the GIL (which causes us not to release it,
1087
+ // but is "safe")
1088
+ return 0;
1089
+ }
1090
+ #else // PYPY_VERSION_NUM < 0x07030600
1091
+ // PyPy2 >= 7.3.6 has PyGILState_Check
1092
+ #endif // PYPY_VERSION_NUM < 0x07030600
1093
+ #else
1094
+ // https://stackoverflow.com/a/25666624
1095
+ static CYTHON_INLINE int PyGILState_Check(void) {
1096
+ PyThreadState * tstate = _PyThreadState_Current;
1097
+ return tstate && (tstate == PyGILState_GetThisThreadState());
1098
+ }
1099
+ #endif
1100
+ #endif
1101
+
1102
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized)
1103
+ #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
1104
+ #else
1105
+ #define __Pyx_PyDict_NewPresized(n) PyDict_New()
1106
+ #endif
1107
+
1108
+ #if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
1109
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
1110
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
1111
+ #else
1112
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
1113
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
1114
+ #endif
1115
+
1116
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS
1117
+ // _PyDict_GetItem_KnownHash() existed from CPython 3.5 to 3.12, but it was
1118
+ // dropping exceptions in 3.5. Since 3.6, exceptions are kept.
1119
+ #define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
1120
+ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) {
1121
+ PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name);
1122
+ if (res == NULL) PyErr_Clear();
1123
+ return res;
1124
+ }
1125
+ #elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000)
1126
+ #define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError
1127
+ #define __Pyx_PyDict_GetItemStr PyDict_GetItem
1128
+ #else
1129
+ static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) {
1130
+ // This is tricky - we should return a borrowed reference but not swallow non-KeyError exceptions. 8-|
1131
+ // But: this function is only used in Py2 and older PyPys,
1132
+ // and currently only for argument parsing and other non-correctness-critical lookups
1133
+ // and we know that 'name' is an interned 'str' with pre-calculated hash value (only comparisons can fail),
1134
+ // thus, performance matters more than correctness here, especially in the "not found" case.
1135
+ #if CYTHON_COMPILING_IN_PYPY
1136
+ // So we ignore any exceptions in old PyPys ...
1137
+ return PyDict_GetItem(dict, name);
1138
+ #else
1139
+ // and hack together a stripped-down and modified PyDict_GetItem() in CPython 2.
1140
+ PyDictEntry *ep;
1141
+ PyDictObject *mp = (PyDictObject*) dict;
1142
+ long hash = ((PyStringObject *) name)->ob_shash;
1143
+ assert(hash != -1); /* hash values of interned strings are always initialised */
1144
+ ep = (mp->ma_lookup)(mp, name, hash);
1145
+ if (ep == NULL) {
1146
+ // error occurred
1147
+ return NULL;
1148
+ }
1149
+ // found or not found
1150
+ return ep->me_value;
1151
+ #endif
1152
+ }
1153
+ #define __Pyx_PyDict_GetItemStr PyDict_GetItem
1154
+ #endif
1155
+
1156
+ /* Type slots */
1157
+
1158
+ #if CYTHON_USE_TYPE_SLOTS
1159
+ #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags)
1160
+ #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0)
1161
+ #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext)
1162
+ #else
1163
+ #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp))
1164
+ #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature)
1165
+ #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next
1166
+ #endif
1167
+
1168
+ #if CYTHON_COMPILING_IN_LIMITED_API
1169
+ // Using PyObject_GenericSetAttr to bypass types immutability protection feels
1170
+ // a little hacky, but it does work in the limited API .
1171
+ // (It doesn't work on PyPy but that probably isn't a bug.)
1172
+ #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v)
1173
+ #else
1174
+ #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v)
1175
+ #endif
1176
+
1177
+ #if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000
1178
+ // In Py3.8+, instances of heap types need to decref their type on deallocation.
1179
+ // https://bugs.python.org/issue35810
1180
+ #define __Pyx_PyHeapTypeObject_GC_Del(obj) { \
1181
+ PyTypeObject *type = Py_TYPE((PyObject*)obj); \
1182
+ assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)); \
1183
+ PyObject_GC_Del(obj); \
1184
+ Py_DECREF(type); \
1185
+ }
1186
+ #else
1187
+ #define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj)
1188
+ #endif
1189
+
1190
+ #if CYTHON_COMPILING_IN_LIMITED_API
1191
+ #define CYTHON_PEP393_ENABLED 1
1192
+ #define __Pyx_PyUnicode_READY(op) (0)
1193
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u)
1194
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i)
1195
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U)
1196
+ #define __Pyx_PyUnicode_KIND(u) ((void)u, (0))
1197
+ // __Pyx_PyUnicode_DATA() and __Pyx_PyUnicode_READ() must go together, e.g. for iteration.
1198
+ #define __Pyx_PyUnicode_DATA(u) ((void*)u)
1199
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i))
1200
+ //#define __Pyx_PyUnicode_WRITE(k, d, i, ch) /* not available */
1201
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u))
1202
+ #elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
1203
+ /* new Py3.3 unicode type (PEP 393) */
1204
+ #define CYTHON_PEP393_ENABLED 1
1205
+
1206
+ #if PY_VERSION_HEX >= 0x030C0000
1207
+ // Py3.12 / PEP-623 removed wstr type unicode strings and all of the PyUnicode_READY() machinery.
1208
+ #define __Pyx_PyUnicode_READY(op) (0)
1209
+ #else
1210
+ #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \
1211
+ 0 : _PyUnicode_Ready((PyObject *)(op)))
1212
+ #endif
1213
+
1214
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
1215
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
1216
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
1217
+ #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u))
1218
+ #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
1219
+ #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
1220
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch)
1221
+ #if PY_VERSION_HEX >= 0x030C0000
1222
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
1223
+ #else
1224
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
1225
+ // Avoid calling deprecated C-API functions in Py3.9+ that PEP-623 schedules for removal in Py3.12.
1226
+ // https://www.python.org/dev/peps/pep-0623/
1227
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
1228
+ #else
1229
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
1230
+ #endif
1231
+ #endif
1232
+ #else
1233
+ #define CYTHON_PEP393_ENABLED 0
1234
+ #define PyUnicode_1BYTE_KIND 1
1235
+ #define PyUnicode_2BYTE_KIND 2
1236
+ #define PyUnicode_4BYTE_KIND 4
1237
+ #define __Pyx_PyUnicode_READY(op) (0)
1238
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
1239
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
1240
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U)
1241
+ #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE))
1242
+ #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
1243
+ // (void)(k) => avoid unused variable warning due to macro:
1244
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
1245
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch)
1246
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
1247
+ #endif
1248
+
1249
+ #if CYTHON_COMPILING_IN_PYPY
1250
+ #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
1251
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
1252
+ #else
1253
+ #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
1254
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ? \
1255
+ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
1256
+ #endif
1257
+
1258
+ #if CYTHON_COMPILING_IN_PYPY
1259
+ #if !defined(PyUnicode_DecodeUnicodeEscape)
1260
+ #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors)
1261
+ #endif
1262
+ #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500)
1263
+ #undef PyUnicode_Contains
1264
+ #define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
1265
+ #endif
1266
+ #if !defined(PyByteArray_Check)
1267
+ #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
1268
+ #endif
1269
+ #if !defined(PyObject_Format)
1270
+ #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
1271
+ #endif
1272
+ #endif
1273
+
1274
+ // ("..." % x) must call PyNumber_Remainder() if x is a string subclass that implements "__rmod__()".
1275
+ #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
1276
+ #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
1277
+
1278
+ #if PY_MAJOR_VERSION >= 3
1279
+ #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
1280
+ #else
1281
+ #define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
1282
+ #endif
1283
+
1284
+ #if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
1285
+ #define PyObject_ASCII(o) PyObject_Repr(o)
1286
+ #endif
1287
+
1288
+ #if PY_MAJOR_VERSION >= 3
1289
+ #define PyBaseString_Type PyUnicode_Type
1290
+ #define PyStringObject PyUnicodeObject
1291
+ #define PyString_Type PyUnicode_Type
1292
+ #define PyString_Check PyUnicode_Check
1293
+ #define PyString_CheckExact PyUnicode_CheckExact
1294
+ // PyPy3 used to define "PyObject_Unicode"
1295
+ #ifndef PyObject_Unicode
1296
+ #define PyObject_Unicode PyObject_Str
1297
+ #endif
1298
+ #endif
1299
+
1300
+ #if PY_MAJOR_VERSION >= 3
1301
+ #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
1302
+ #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
1303
+ #else
1304
+ #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
1305
+ #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
1306
+ #endif
1307
+
1308
+ #if CYTHON_COMPILING_IN_CPYTHON
1309
+ #define __Pyx_PySequence_ListKeepNew(obj) \
1310
+ (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj))
1311
+ #else
1312
+ #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj)
1313
+ #endif
1314
+
1315
+ #ifndef PySet_CheckExact
1316
+ #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type)
1317
+ #endif
1318
+
1319
+ #if PY_VERSION_HEX >= 0x030900A4
1320
+ #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
1321
+ #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
1322
+ #else
1323
+ #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
1324
+ #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
1325
+ #endif
1326
+
1327
+ #if CYTHON_ASSUME_SAFE_MACROS
1328
+ #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i)
1329
+ #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
1330
+ #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0))
1331
+ #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0))
1332
+ #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o)
1333
+ #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o)
1334
+ #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o)
1335
+ #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o)
1336
+ #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o)
1337
+ #else
1338
+ #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i)
1339
+ // NOTE: might fail with exception => check for -1
1340
+ #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
1341
+ // Note that this doesn't leak a reference to whatever's at o[i]
1342
+ #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v)
1343
+ #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v)
1344
+ #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o)
1345
+ #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o)
1346
+ #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o)
1347
+ #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o)
1348
+ #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o)
1349
+ #endif
1350
+
1351
+ #if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1
1352
+ #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name)
1353
+ #else
1354
+ static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) {
1355
+ PyObject *module = PyImport_AddModule(name);
1356
+ Py_XINCREF(module);
1357
+ return module;
1358
+ }
1359
+ #endif
1360
+
1361
+ #if PY_MAJOR_VERSION >= 3
1362
+ #define PyIntObject PyLongObject
1363
+ #define PyInt_Type PyLong_Type
1364
+ #define PyInt_Check(op) PyLong_Check(op)
1365
+ #define PyInt_CheckExact(op) PyLong_CheckExact(op)
1366
+ #define __Pyx_Py3Int_Check(op) PyLong_Check(op)
1367
+ #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op)
1368
+ #define PyInt_FromString PyLong_FromString
1369
+ #define PyInt_FromUnicode PyLong_FromUnicode
1370
+ #define PyInt_FromLong PyLong_FromLong
1371
+ #define PyInt_FromSize_t PyLong_FromSize_t
1372
+ #define PyInt_FromSsize_t PyLong_FromSsize_t
1373
+ #define PyInt_AsLong PyLong_AsLong
1374
+ #define PyInt_AS_LONG PyLong_AS_LONG
1375
+ #define PyInt_AsSsize_t PyLong_AsSsize_t
1376
+ #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
1377
+ #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
1378
+ #define PyNumber_Int PyNumber_Long
1379
+ #else
1380
+ #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op))
1381
+ #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op))
1382
+ #endif
1383
+
1384
+ #if PY_MAJOR_VERSION >= 3
1385
+ #define PyBoolObject PyLongObject
1386
+ #endif
1387
+
1388
+ #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
1389
+ #ifndef PyUnicode_InternFromString
1390
+ #define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
1391
+ #endif
1392
+ #endif
1393
+
1394
+ #if PY_VERSION_HEX < 0x030200A4
1395
+ typedef long Py_hash_t;
1396
+ #define __Pyx_PyInt_FromHash_t PyInt_FromLong
1397
+ #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t
1398
+ #else
1399
+ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
1400
+ #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t
1401
+ #endif
1402
+
1403
+ // backport of PyAsyncMethods from Py3.5 to older Py3.x versions
1404
+ // (mis-)using the "tp_reserved" type slot which is re-activated as "tp_as_async" in Py3.5
1405
+ #if CYTHON_USE_ASYNC_SLOTS
1406
+ #if PY_VERSION_HEX >= 0x030500B1
1407
+ #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
1408
+ #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
1409
+ #else
1410
+ #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
1411
+ #endif
1412
+ #else
1413
+ #define __Pyx_PyType_AsAsync(obj) NULL
1414
+ #endif
1415
+ #ifndef __Pyx_PyAsyncMethodsStruct
1416
+ typedef struct {
1417
+ unaryfunc am_await;
1418
+ unaryfunc am_aiter;
1419
+ unaryfunc am_anext;
1420
+ } __Pyx_PyAsyncMethodsStruct;
1421
+ #endif
1422
+
1423
+
1424
+ /////////////// IncludeStructmemberH.proto ///////////////
1425
+
1426
+ #include <structmember.h>
1427
+
1428
+
1429
+ /////////////// SmallCodeConfig.proto ///////////////
1430
+
1431
+ #ifndef CYTHON_SMALL_CODE
1432
+ #if defined(__clang__)
1433
+ #define CYTHON_SMALL_CODE
1434
+ #elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))
1435
+ #define CYTHON_SMALL_CODE __attribute__((cold))
1436
+ #else
1437
+ #define CYTHON_SMALL_CODE
1438
+ #endif
1439
+ #endif
1440
+
1441
+
1442
+ /////////////// PyModInitFuncType.proto ///////////////
1443
+
1444
+ #ifndef CYTHON_NO_PYINIT_EXPORT
1445
+ #define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
1446
+
1447
+ #elif PY_MAJOR_VERSION < 3
1448
+ // Py2: define this to void manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition.
1449
+ #ifdef __cplusplus
1450
+ #define __Pyx_PyMODINIT_FUNC extern "C" void
1451
+ #else
1452
+ #define __Pyx_PyMODINIT_FUNC void
1453
+ #endif
1454
+
1455
+ #else
1456
+ // Py3+: define this to PyObject * manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition.
1457
+ #ifdef __cplusplus
1458
+ #define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
1459
+ #else
1460
+ #define __Pyx_PyMODINIT_FUNC PyObject *
1461
+ #endif
1462
+ #endif
1463
+
1464
+
1465
+ /////////////// FastTypeChecks.proto ///////////////
1466
+
1467
+ #if CYTHON_COMPILING_IN_CPYTHON
1468
+ #define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
1469
+ #define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2)
1470
+ static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);/*proto*/
1471
+ static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b);/*proto*/
1472
+ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);/*proto*/
1473
+ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);/*proto*/
1474
+ #else
1475
+ #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
1476
+ #define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2))
1477
+ #define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
1478
+ #define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
1479
+ #endif
1480
+ #define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2)
1481
+
1482
+ #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
1483
+
1484
+ /////////////// FastTypeChecks ///////////////
1485
+ //@requires: Exceptions.c::PyThreadStateGet
1486
+ //@requires: Exceptions.c::PyErrFetchRestore
1487
+
1488
+ #if CYTHON_COMPILING_IN_CPYTHON
1489
+ static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
1490
+ while (a) {
1491
+ a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*);
1492
+ if (a == b)
1493
+ return 1;
1494
+ }
1495
+ return b == &PyBaseObject_Type;
1496
+ }
1497
+
1498
+ static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
1499
+ PyObject *mro;
1500
+ if (a == b) return 1;
1501
+ mro = a->tp_mro;
1502
+ if (likely(mro)) {
1503
+ Py_ssize_t i, n;
1504
+ n = PyTuple_GET_SIZE(mro);
1505
+ for (i = 0; i < n; i++) {
1506
+ if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
1507
+ return 1;
1508
+ }
1509
+ return 0;
1510
+ }
1511
+ // should only get here for incompletely initialised types, i.e. never under normal usage patterns
1512
+ return __Pyx_InBases(a, b);
1513
+ }
1514
+
1515
+ static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) {
1516
+ PyObject *mro;
1517
+ if (cls == a || cls == b) return 1;
1518
+ mro = cls->tp_mro;
1519
+ if (likely(mro)) {
1520
+ Py_ssize_t i, n;
1521
+ n = PyTuple_GET_SIZE(mro);
1522
+ for (i = 0; i < n; i++) {
1523
+ PyObject *base = PyTuple_GET_ITEM(mro, i);
1524
+ if (base == (PyObject *)a || base == (PyObject *)b)
1525
+ return 1;
1526
+ }
1527
+ return 0;
1528
+ }
1529
+ // should only get here for incompletely initialised types, i.e. never under normal usage patterns
1530
+ return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b);
1531
+ }
1532
+
1533
+
1534
+ #if PY_MAJOR_VERSION == 2
1535
+ static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
1536
+ // PyObject_IsSubclass() can recurse and therefore is not safe
1537
+ PyObject *exception, *value, *tb;
1538
+ int res;
1539
+ __Pyx_PyThreadState_declare
1540
+ __Pyx_PyThreadState_assign
1541
+ __Pyx_ErrFetch(&exception, &value, &tb);
1542
+
1543
+ res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
1544
+ // This function must not fail, so print the error here (which also clears it)
1545
+ if (unlikely(res == -1)) {
1546
+ PyErr_WriteUnraisable(err);
1547
+ res = 0;
1548
+ }
1549
+ if (!res) {
1550
+ res = PyObject_IsSubclass(err, exc_type2);
1551
+ // This function must not fail, so print the error here (which also clears it)
1552
+ if (unlikely(res == -1)) {
1553
+ PyErr_WriteUnraisable(err);
1554
+ res = 0;
1555
+ }
1556
+ }
1557
+
1558
+ __Pyx_ErrRestore(exception, value, tb);
1559
+ return res;
1560
+ }
1561
+ #else
1562
+ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
1563
+ if (exc_type1) {
1564
+ return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2);
1565
+ } else {
1566
+ return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
1567
+ }
1568
+ }
1569
+ #endif
1570
+
1571
+ // so far, we only call PyErr_GivenExceptionMatches() with an exception type (not instance) as first argument
1572
+ // => optimise for that case
1573
+
1574
+ static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
1575
+ Py_ssize_t i, n;
1576
+ assert(PyExceptionClass_Check(exc_type));
1577
+ n = PyTuple_GET_SIZE(tuple);
1578
+ #if PY_MAJOR_VERSION >= 3
1579
+ // the tighter subtype checking in Py3 allows faster out-of-order comparison
1580
+ for (i=0; i<n; i++) {
1581
+ if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
1582
+ }
1583
+ #endif
1584
+ for (i=0; i<n; i++) {
1585
+ PyObject *t = PyTuple_GET_ITEM(tuple, i);
1586
+ #if PY_MAJOR_VERSION < 3
1587
+ if (likely(exc_type == t)) return 1;
1588
+ #endif
1589
+ if (likely(PyExceptionClass_Check(t))) {
1590
+ if (__Pyx_inner_PyErr_GivenExceptionMatches2(exc_type, NULL, t)) return 1;
1591
+ } else {
1592
+ // FIXME: Py3: PyErr_SetString(PyExc_TypeError, "catching classes that do not inherit from BaseException is not allowed");
1593
+ }
1594
+ }
1595
+ return 0;
1596
+ }
1597
+
1598
+ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
1599
+ if (likely(err == exc_type)) return 1;
1600
+ if (likely(PyExceptionClass_Check(err))) {
1601
+ if (likely(PyExceptionClass_Check(exc_type))) {
1602
+ return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
1603
+ } else if (likely(PyTuple_Check(exc_type))) {
1604
+ return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type);
1605
+ } else {
1606
+ // FIXME: Py3: PyErr_SetString(PyExc_TypeError, "catching classes that do not inherit from BaseException is not allowed");
1607
+ }
1608
+ }
1609
+ return PyErr_GivenExceptionMatches(err, exc_type);
1610
+ }
1611
+
1612
+ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
1613
+ // Only used internally with known exception types => pure safety check assertions.
1614
+ assert(PyExceptionClass_Check(exc_type1));
1615
+ assert(PyExceptionClass_Check(exc_type2));
1616
+ if (likely(err == exc_type1 || err == exc_type2)) return 1;
1617
+ if (likely(PyExceptionClass_Check(err))) {
1618
+ return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
1619
+ }
1620
+ return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
1621
+ }
1622
+
1623
+ #endif
1624
+
1625
+
1626
+ /////////////// MathInitCode ///////////////
1627
+
1628
+ #if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS)
1629
+ #if !defined(_USE_MATH_DEFINES)
1630
+ #define _USE_MATH_DEFINES
1631
+ #endif
1632
+ #endif
1633
+ #include <math.h>
1634
+
1635
+ #ifdef NAN
1636
+ #define __PYX_NAN() ((float) NAN)
1637
+ #else
1638
+ static CYTHON_INLINE float __PYX_NAN() {
1639
+ // Initialize NaN. The sign is irrelevant, an exponent with all bits 1 and
1640
+ // a nonzero mantissa means NaN. If the first bit in the mantissa is 1, it is
1641
+ // a quiet NaN.
1642
+ float value;
1643
+ memset(&value, 0xFF, sizeof(value));
1644
+ return value;
1645
+ }
1646
+ #endif
1647
+
1648
+ #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
1649
+ #define __Pyx_truncl trunc
1650
+ #else
1651
+ #define __Pyx_truncl truncl
1652
+ #endif
1653
+
1654
+
1655
+ /////////////// UtilityFunctionPredeclarations.proto ///////////////
1656
+
1657
+ typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
1658
+ const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/
1659
+
1660
+ /////////////// ForceInitThreads.proto ///////////////
1661
+ //@proto_block: utility_code_proto_before_types
1662
+
1663
+ #ifndef __PYX_FORCE_INIT_THREADS
1664
+ #define __PYX_FORCE_INIT_THREADS 0
1665
+ #endif
1666
+
1667
+ /////////////// InitThreads.init ///////////////
1668
+
1669
+ #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0
1670
+ PyEval_InitThreads();
1671
+ #endif
1672
+
1673
+
1674
+ /////////////// ModuleCreationPEP489 ///////////////
1675
+ //@substitute: naming
1676
+
1677
+ //#if CYTHON_PEP489_MULTI_PHASE_INIT
1678
+ static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
1679
+ #if PY_VERSION_HEX >= 0x030700A1
1680
+ static PY_INT64_T main_interpreter_id = -1;
1681
+ PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);
1682
+ if (main_interpreter_id == -1) {
1683
+ main_interpreter_id = current_id;
1684
+ return (unlikely(current_id == -1)) ? -1 : 0;
1685
+ } else if (unlikely(main_interpreter_id != current_id))
1686
+
1687
+ #else
1688
+ static PyInterpreterState *main_interpreter = NULL;
1689
+ PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;
1690
+ if (!main_interpreter) {
1691
+ main_interpreter = current_interpreter;
1692
+ } else if (unlikely(main_interpreter != current_interpreter))
1693
+ #endif
1694
+
1695
+ {
1696
+ PyErr_SetString(
1697
+ PyExc_ImportError,
1698
+ "Interpreter change detected - this module can only be loaded into one interpreter per process.");
1699
+ return -1;
1700
+ }
1701
+ return 0;
1702
+ }
1703
+
1704
+ #if CYTHON_COMPILING_IN_LIMITED_API
1705
+ static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none)
1706
+ #else
1707
+ static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none)
1708
+ #endif
1709
+ {
1710
+ PyObject *value = PyObject_GetAttrString(spec, from_name);
1711
+ int result = 0;
1712
+ if (likely(value)) {
1713
+ if (allow_none || value != Py_None) {
1714
+ #if CYTHON_COMPILING_IN_LIMITED_API
1715
+ result = PyModule_AddObject(module, to_name, value);
1716
+ #else
1717
+ result = PyDict_SetItemString(moddict, to_name, value);
1718
+ #endif
1719
+ }
1720
+ Py_DECREF(value);
1721
+ } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
1722
+ PyErr_Clear();
1723
+ } else {
1724
+ result = -1;
1725
+ }
1726
+ return result;
1727
+ }
1728
+
1729
+ static CYTHON_SMALL_CODE PyObject* ${pymodule_create_func_cname}(PyObject *spec, PyModuleDef *def) {
1730
+ PyObject *module = NULL, *moddict, *modname;
1731
+ CYTHON_UNUSED_VAR(def);
1732
+
1733
+ // For now, we only have exactly one module instance.
1734
+ if (__Pyx_check_single_interpreter())
1735
+ return NULL;
1736
+ if (${module_cname})
1737
+ return __Pyx_NewRef(${module_cname});
1738
+
1739
+ modname = PyObject_GetAttrString(spec, "name");
1740
+ if (unlikely(!modname)) goto bad;
1741
+
1742
+ module = PyModule_NewObject(modname);
1743
+ Py_DECREF(modname);
1744
+ if (unlikely(!module)) goto bad;
1745
+
1746
+ #if CYTHON_COMPILING_IN_LIMITED_API
1747
+ moddict = module;
1748
+ #else
1749
+ moddict = PyModule_GetDict(module);
1750
+ if (unlikely(!moddict)) goto bad;
1751
+ // moddict is a borrowed reference
1752
+ #endif
1753
+
1754
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
1755
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
1756
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
1757
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
1758
+
1759
+ return module;
1760
+ bad:
1761
+ Py_XDECREF(module);
1762
+ return NULL;
1763
+ }
1764
+ //#endif
1765
+
1766
+
1767
+ /////////////// CodeObjectCache.proto ///////////////
1768
+
1769
+ #if !CYTHON_COMPILING_IN_LIMITED_API
1770
+ typedef struct {
1771
+ PyCodeObject* code_object;
1772
+ int code_line;
1773
+ } __Pyx_CodeObjectCacheEntry;
1774
+
1775
+ struct __Pyx_CodeObjectCache {
1776
+ int count;
1777
+ int max_count;
1778
+ __Pyx_CodeObjectCacheEntry* entries;
1779
+ };
1780
+
1781
+ static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
1782
+
1783
+ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
1784
+ static PyCodeObject *__pyx_find_code_object(int code_line);
1785
+ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
1786
+ #endif
1787
+
1788
+ /////////////// CodeObjectCache ///////////////
1789
+ // Note that errors are simply ignored in the code below.
1790
+ // This is just a cache, if a lookup or insertion fails - so what?
1791
+
1792
+ #if !CYTHON_COMPILING_IN_LIMITED_API
1793
+ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
1794
+ int start = 0, mid = 0, end = count - 1;
1795
+ if (end >= 0 && code_line > entries[end].code_line) {
1796
+ return count;
1797
+ }
1798
+ while (start < end) {
1799
+ mid = start + (end - start) / 2;
1800
+ if (code_line < entries[mid].code_line) {
1801
+ end = mid;
1802
+ } else if (code_line > entries[mid].code_line) {
1803
+ start = mid + 1;
1804
+ } else {
1805
+ return mid;
1806
+ }
1807
+ }
1808
+ if (code_line <= entries[mid].code_line) {
1809
+ return mid;
1810
+ } else {
1811
+ return mid + 1;
1812
+ }
1813
+ }
1814
+
1815
+ static PyCodeObject *__pyx_find_code_object(int code_line) {
1816
+ PyCodeObject* code_object;
1817
+ int pos;
1818
+ if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
1819
+ return NULL;
1820
+ }
1821
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
1822
+ if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
1823
+ return NULL;
1824
+ }
1825
+ code_object = __pyx_code_cache.entries[pos].code_object;
1826
+ Py_INCREF(code_object);
1827
+ return code_object;
1828
+ }
1829
+
1830
+ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
1831
+ int pos, i;
1832
+ __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
1833
+ if (unlikely(!code_line)) {
1834
+ return;
1835
+ }
1836
+ if (unlikely(!entries)) {
1837
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
1838
+ if (likely(entries)) {
1839
+ __pyx_code_cache.entries = entries;
1840
+ __pyx_code_cache.max_count = 64;
1841
+ __pyx_code_cache.count = 1;
1842
+ entries[0].code_line = code_line;
1843
+ entries[0].code_object = code_object;
1844
+ Py_INCREF(code_object);
1845
+ }
1846
+ return;
1847
+ }
1848
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
1849
+ if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
1850
+ PyCodeObject* tmp = entries[pos].code_object;
1851
+ entries[pos].code_object = code_object;
1852
+ Py_DECREF(tmp);
1853
+ return;
1854
+ }
1855
+ if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
1856
+ int new_max = __pyx_code_cache.max_count + 64;
1857
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
1858
+ __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
1859
+ if (unlikely(!entries)) {
1860
+ return;
1861
+ }
1862
+ __pyx_code_cache.entries = entries;
1863
+ __pyx_code_cache.max_count = new_max;
1864
+ }
1865
+ for (i=__pyx_code_cache.count; i>pos; i--) {
1866
+ entries[i] = entries[i-1];
1867
+ }
1868
+ entries[pos].code_line = code_line;
1869
+ entries[pos].code_object = code_object;
1870
+ __pyx_code_cache.count++;
1871
+ Py_INCREF(code_object);
1872
+ }
1873
+ #endif
1874
+
1875
+ /////////////// CodeObjectCache.cleanup ///////////////
1876
+
1877
+ #if !CYTHON_COMPILING_IN_LIMITED_API
1878
+ if (__pyx_code_cache.entries) {
1879
+ __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
1880
+ int i, count = __pyx_code_cache.count;
1881
+ __pyx_code_cache.count = 0;
1882
+ __pyx_code_cache.max_count = 0;
1883
+ __pyx_code_cache.entries = NULL;
1884
+ for (i=0; i<count; i++) {
1885
+ Py_DECREF(entries[i].code_object);
1886
+ }
1887
+ PyMem_Free(entries);
1888
+ }
1889
+ #endif
1890
+
1891
+ /////////////// CheckBinaryVersion.proto ///////////////
1892
+
1893
+ static unsigned long __Pyx_get_runtime_version(void);
1894
+ static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer);
1895
+
1896
+ /////////////// CheckBinaryVersion ///////////////
1897
+
1898
+ static unsigned long __Pyx_get_runtime_version(void) {
1899
+ // We will probably never need the alpha/beta status, so avoid the complexity to parse it.
1900
+ #if __PYX_LIMITED_VERSION_HEX >= 0x030B00A4
1901
+ return Py_Version & ~0xFFUL;
1902
+ #else
1903
+ const char* rt_version = Py_GetVersion();
1904
+ unsigned long version = 0;
1905
+ unsigned long factor = 0x01000000UL;
1906
+ unsigned int digit = 0;
1907
+ int i = 0;
1908
+ while (factor) {
1909
+ while ('0' <= rt_version[i] && rt_version[i] <= '9') {
1910
+ digit = digit * 10 + (unsigned int) (rt_version[i] - '0');
1911
+ ++i;
1912
+ }
1913
+ version += factor * digit;
1914
+ if (rt_version[i] != '.')
1915
+ break;
1916
+ digit = 0;
1917
+ factor >>= 8;
1918
+ ++i;
1919
+ }
1920
+ return version;
1921
+ #endif
1922
+ }
1923
+
1924
+ static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) {
1925
+ // runtime version is: -1 => older, 0 => equal, 1 => newer
1926
+ const unsigned long MAJOR_MINOR = 0xFFFF0000UL;
1927
+ if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR))
1928
+ return 0;
1929
+ if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR)))
1930
+ return 1;
1931
+
1932
+ {
1933
+ char message[200];
1934
+ PyOS_snprintf(message, sizeof(message),
1935
+ "compile time Python version %d.%d "
1936
+ "of module '%.100s' "
1937
+ "%s "
1938
+ "runtime version %d.%d",
1939
+ (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF),
1940
+ __Pyx_MODULE_NAME,
1941
+ (allow_newer) ? "was newer than" : "does not match",
1942
+ (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF)
1943
+ );
1944
+ // returns 0 or -1
1945
+ return PyErr_WarnEx(NULL, message, 1);
1946
+ }
1947
+ }
1948
+
1949
+ /////////////// IsLittleEndian.proto ///////////////
1950
+
1951
+ static CYTHON_INLINE int __Pyx_Is_Little_Endian(void);
1952
+
1953
+ /////////////// IsLittleEndian ///////////////
1954
+
1955
+ static CYTHON_INLINE int __Pyx_Is_Little_Endian(void)
1956
+ {
1957
+ union {
1958
+ uint32_t u32;
1959
+ uint8_t u8[4];
1960
+ } S;
1961
+ S.u32 = 0x01020304;
1962
+ return S.u8[0] == 4;
1963
+ }
1964
+
1965
+ /////////////// Refnanny.proto ///////////////
1966
+
1967
+ #ifndef CYTHON_REFNANNY
1968
+ #define CYTHON_REFNANNY 0
1969
+ #endif
1970
+
1971
+ #if CYTHON_REFNANNY
1972
+ typedef struct {
1973
+ void (*INCREF)(void*, PyObject*, Py_ssize_t);
1974
+ void (*DECREF)(void*, PyObject*, Py_ssize_t);
1975
+ void (*GOTREF)(void*, PyObject*, Py_ssize_t);
1976
+ void (*GIVEREF)(void*, PyObject*, Py_ssize_t);
1977
+ void* (*SetupContext)(const char*, Py_ssize_t, const char*);
1978
+ void (*FinishContext)(void**);
1979
+ } __Pyx_RefNannyAPIStruct;
1980
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
1981
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/
1982
+ #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
1983
+ #ifdef WITH_THREAD
1984
+ #define __Pyx_RefNannySetupContext(name, acquire_gil) \
1985
+ if (acquire_gil) { \
1986
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \
1987
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)); \
1988
+ PyGILState_Release(__pyx_gilstate_save); \
1989
+ } else { \
1990
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)); \
1991
+ }
1992
+ #define __Pyx_RefNannyFinishContextNogil() { \
1993
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \
1994
+ __Pyx_RefNannyFinishContext(); \
1995
+ PyGILState_Release(__pyx_gilstate_save); \
1996
+ }
1997
+ #else
1998
+ #define __Pyx_RefNannySetupContext(name, acquire_gil) \
1999
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__))
2000
+ #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext()
2001
+ #endif
2002
+ #define __Pyx_RefNannyFinishContextNogil() { \
2003
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \
2004
+ __Pyx_RefNannyFinishContext(); \
2005
+ PyGILState_Release(__pyx_gilstate_save); \
2006
+ }
2007
+ #define __Pyx_RefNannyFinishContext() \
2008
+ __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
2009
+ #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__))
2010
+ #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__))
2011
+ #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__))
2012
+ #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__))
2013
+ #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0)
2014
+ #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0)
2015
+ #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0)
2016
+ #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0)
2017
+ #else
2018
+ #define __Pyx_RefNannyDeclarations
2019
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)
2020
+ #define __Pyx_RefNannyFinishContextNogil()
2021
+ #define __Pyx_RefNannyFinishContext()
2022
+ #define __Pyx_INCREF(r) Py_INCREF(r)
2023
+ #define __Pyx_DECREF(r) Py_DECREF(r)
2024
+ #define __Pyx_GOTREF(r)
2025
+ #define __Pyx_GIVEREF(r)
2026
+ #define __Pyx_XINCREF(r) Py_XINCREF(r)
2027
+ #define __Pyx_XDECREF(r) Py_XDECREF(r)
2028
+ #define __Pyx_XGOTREF(r)
2029
+ #define __Pyx_XGIVEREF(r)
2030
+ #endif /* CYTHON_REFNANNY */
2031
+
2032
+ #define __Pyx_Py_XDECREF_SET(r, v) do { \
2033
+ PyObject *tmp = (PyObject *) r; \
2034
+ r = v; Py_XDECREF(tmp); \
2035
+ } while (0)
2036
+ #define __Pyx_XDECREF_SET(r, v) do { \
2037
+ PyObject *tmp = (PyObject *) r; \
2038
+ r = v; __Pyx_XDECREF(tmp); \
2039
+ } while (0)
2040
+ #define __Pyx_DECREF_SET(r, v) do { \
2041
+ PyObject *tmp = (PyObject *) r; \
2042
+ r = v; __Pyx_DECREF(tmp); \
2043
+ } while (0)
2044
+
2045
+ #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
2046
+ #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
2047
+
2048
+ /////////////// Refnanny ///////////////
2049
+
2050
+ #if CYTHON_REFNANNY
2051
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
2052
+ PyObject *m = NULL, *p = NULL;
2053
+ void *r = NULL;
2054
+ m = PyImport_ImportModule(modname);
2055
+ if (!m) goto end;
2056
+ p = PyObject_GetAttrString(m, "RefNannyAPI");
2057
+ if (!p) goto end;
2058
+ r = PyLong_AsVoidPtr(p);
2059
+ end:
2060
+ Py_XDECREF(p);
2061
+ Py_XDECREF(m);
2062
+ return (__Pyx_RefNannyAPIStruct *)r;
2063
+ }
2064
+ #endif /* CYTHON_REFNANNY */
2065
+
2066
+
2067
+ /////////////// ImportRefnannyAPI ///////////////
2068
+
2069
+ #if CYTHON_REFNANNY
2070
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
2071
+ if (!__Pyx_RefNanny) {
2072
+ PyErr_Clear();
2073
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
2074
+ if (!__Pyx_RefNanny)
2075
+ Py_FatalError("failed to import 'refnanny' module");
2076
+ }
2077
+ #endif
2078
+
2079
+
2080
+ /////////////// RegisterModuleCleanup.proto ///////////////
2081
+ //@substitute: naming
2082
+
2083
+ static void ${cleanup_cname}(PyObject *self); /*proto*/
2084
+
2085
+ #if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY
2086
+ static int __Pyx_RegisterCleanup(void); /*proto*/
2087
+ #else
2088
+ #define __Pyx_RegisterCleanup() (0)
2089
+ #endif
2090
+
2091
+ /////////////// RegisterModuleCleanup ///////////////
2092
+ //@substitute: naming
2093
+
2094
+ #if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY
2095
+ static PyObject* ${cleanup_cname}_atexit(PyObject *module, PyObject *unused) {
2096
+ CYTHON_UNUSED_VAR(unused);
2097
+ ${cleanup_cname}(module);
2098
+ Py_INCREF(Py_None); return Py_None;
2099
+ }
2100
+
2101
+ static int __Pyx_RegisterCleanup(void) {
2102
+ // Don't use Py_AtExit because that has a 32-call limit and is called
2103
+ // after python finalization.
2104
+ // Also, we try to prepend the cleanup function to "atexit._exithandlers"
2105
+ // in Py2 because CPython runs them last-to-first. Being run last allows
2106
+ // user exit code to run before us that may depend on the globals
2107
+ // and cached objects that we are about to clean up.
2108
+
2109
+ static PyMethodDef cleanup_def = {
2110
+ "__cleanup", (PyCFunction)${cleanup_cname}_atexit, METH_NOARGS, 0};
2111
+
2112
+ PyObject *cleanup_func = 0;
2113
+ PyObject *atexit = 0;
2114
+ PyObject *reg = 0;
2115
+ PyObject *args = 0;
2116
+ PyObject *res = 0;
2117
+ int ret = -1;
2118
+
2119
+ cleanup_func = PyCFunction_New(&cleanup_def, 0);
2120
+ if (!cleanup_func)
2121
+ goto bad;
2122
+
2123
+ atexit = PyImport_ImportModule("atexit");
2124
+ if (!atexit)
2125
+ goto bad;
2126
+ reg = PyObject_GetAttrString(atexit, "_exithandlers");
2127
+ if (reg && PyList_Check(reg)) {
2128
+ PyObject *a, *kw;
2129
+ a = PyTuple_New(0);
2130
+ kw = PyDict_New();
2131
+ if (!a || !kw) {
2132
+ Py_XDECREF(a);
2133
+ Py_XDECREF(kw);
2134
+ goto bad;
2135
+ }
2136
+ args = PyTuple_Pack(3, cleanup_func, a, kw);
2137
+ Py_DECREF(a);
2138
+ Py_DECREF(kw);
2139
+ if (!args)
2140
+ goto bad;
2141
+ ret = PyList_Insert(reg, 0, args);
2142
+ } else {
2143
+ if (!reg)
2144
+ PyErr_Clear();
2145
+ Py_XDECREF(reg);
2146
+ reg = PyObject_GetAttrString(atexit, "register");
2147
+ if (!reg)
2148
+ goto bad;
2149
+ args = PyTuple_Pack(1, cleanup_func);
2150
+ if (!args)
2151
+ goto bad;
2152
+ res = PyObject_CallObject(reg, args);
2153
+ if (!res)
2154
+ goto bad;
2155
+ ret = 0;
2156
+ }
2157
+ bad:
2158
+ Py_XDECREF(cleanup_func);
2159
+ Py_XDECREF(atexit);
2160
+ Py_XDECREF(reg);
2161
+ Py_XDECREF(args);
2162
+ Py_XDECREF(res);
2163
+ return ret;
2164
+ }
2165
+ #endif
2166
+
2167
+ /////////////// FastGil.init ///////////////
2168
+ #ifdef WITH_THREAD
2169
+ __Pyx_FastGilFuncInit();
2170
+ #endif
2171
+
2172
+ /////////////// NoFastGil.proto ///////////////
2173
+ //@proto_block: utility_code_proto_before_types
2174
+
2175
+ #define __Pyx_PyGILState_Ensure PyGILState_Ensure
2176
+ #define __Pyx_PyGILState_Release PyGILState_Release
2177
+ #define __Pyx_FastGIL_Remember()
2178
+ #define __Pyx_FastGIL_Forget()
2179
+ #define __Pyx_FastGilFuncInit()
2180
+
2181
+ /////////////// FastGil.proto ///////////////
2182
+ //@proto_block: utility_code_proto_before_types
2183
+
2184
+ #if CYTHON_FAST_GIL
2185
+
2186
+ struct __Pyx_FastGilVtab {
2187
+ PyGILState_STATE (*Fast_PyGILState_Ensure)(void);
2188
+ void (*Fast_PyGILState_Release)(PyGILState_STATE oldstate);
2189
+ void (*FastGIL_Remember)(void);
2190
+ void (*FastGIL_Forget)(void);
2191
+ };
2192
+
2193
+ static void __Pyx_FastGIL_Noop(void) {}
2194
+ static struct __Pyx_FastGilVtab __Pyx_FastGilFuncs = {
2195
+ PyGILState_Ensure,
2196
+ PyGILState_Release,
2197
+ __Pyx_FastGIL_Noop,
2198
+ __Pyx_FastGIL_Noop
2199
+ };
2200
+
2201
+ static void __Pyx_FastGilFuncInit(void);
2202
+
2203
+ #define __Pyx_PyGILState_Ensure __Pyx_FastGilFuncs.Fast_PyGILState_Ensure
2204
+ #define __Pyx_PyGILState_Release __Pyx_FastGilFuncs.Fast_PyGILState_Release
2205
+ #define __Pyx_FastGIL_Remember __Pyx_FastGilFuncs.FastGIL_Remember
2206
+ #define __Pyx_FastGIL_Forget __Pyx_FastGilFuncs.FastGIL_Forget
2207
+
2208
+ #ifdef WITH_THREAD
2209
+ #ifndef CYTHON_THREAD_LOCAL
2210
+ #if defined(__cplusplus) && __cplusplus >= 201103L
2211
+ #define CYTHON_THREAD_LOCAL thread_local
2212
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112
2213
+ #define CYTHON_THREAD_LOCAL _Thread_local
2214
+ #elif defined(__GNUC__)
2215
+ #define CYTHON_THREAD_LOCAL __thread
2216
+ #elif defined(_MSC_VER)
2217
+ #define CYTHON_THREAD_LOCAL __declspec(thread)
2218
+ #endif
2219
+ #endif
2220
+ #endif
2221
+
2222
+ #else
2223
+ #define __Pyx_PyGILState_Ensure PyGILState_Ensure
2224
+ #define __Pyx_PyGILState_Release PyGILState_Release
2225
+ #define __Pyx_FastGIL_Remember()
2226
+ #define __Pyx_FastGIL_Forget()
2227
+ #define __Pyx_FastGilFuncInit()
2228
+ #endif
2229
+
2230
+ /////////////// FastGil ///////////////
2231
+ // The implementations of PyGILState_Ensure/Release calls PyThread_get_key_value
2232
+ // several times which is turns out to be quite slow (slower in fact than
2233
+ // acquiring the GIL itself). Simply storing it in a thread local for the
2234
+ // common case is much faster.
2235
+ // To make optimal use of this thread local, we attempt to share it between
2236
+ // modules.
2237
+
2238
+ #if CYTHON_FAST_GIL
2239
+
2240
+ #define __Pyx_FastGIL_ABI_module __PYX_ABI_MODULE_NAME
2241
+ #define __Pyx_FastGIL_PyCapsuleName "FastGilFuncs"
2242
+ #define __Pyx_FastGIL_PyCapsule \
2243
+ __Pyx_FastGIL_ABI_module "." __Pyx_FastGIL_PyCapsuleName
2244
+
2245
+ #ifdef CYTHON_THREAD_LOCAL
2246
+
2247
+ #include "pythread.h"
2248
+ #include "pystate.h"
2249
+
2250
+ static CYTHON_THREAD_LOCAL PyThreadState *__Pyx_FastGil_tcur = NULL;
2251
+ static CYTHON_THREAD_LOCAL int __Pyx_FastGil_tcur_depth = 0;
2252
+ static int __Pyx_FastGil_autoTLSkey = -1;
2253
+
2254
+ static CYTHON_INLINE void __Pyx_FastGIL_Remember0(void) {
2255
+ ++__Pyx_FastGil_tcur_depth;
2256
+ }
2257
+
2258
+ static CYTHON_INLINE void __Pyx_FastGIL_Forget0(void) {
2259
+ if (--__Pyx_FastGil_tcur_depth == 0) {
2260
+ __Pyx_FastGil_tcur = NULL;
2261
+ }
2262
+ }
2263
+
2264
+ static CYTHON_INLINE PyThreadState *__Pyx_FastGil_get_tcur(void) {
2265
+ PyThreadState *tcur = __Pyx_FastGil_tcur;
2266
+ if (tcur == NULL) {
2267
+ tcur = __Pyx_FastGil_tcur = (PyThreadState*)PyThread_get_key_value(__Pyx_FastGil_autoTLSkey);
2268
+ }
2269
+ return tcur;
2270
+ }
2271
+
2272
+ static PyGILState_STATE __Pyx_FastGil_PyGILState_Ensure(void) {
2273
+ int current;
2274
+ PyThreadState *tcur;
2275
+ __Pyx_FastGIL_Remember0();
2276
+ tcur = __Pyx_FastGil_get_tcur();
2277
+ if (tcur == NULL) {
2278
+ // Uninitialized, need to initialize now.
2279
+ return PyGILState_Ensure();
2280
+ }
2281
+ current = tcur == __Pyx_PyThreadState_Current;
2282
+ if (current == 0) {
2283
+ PyEval_RestoreThread(tcur);
2284
+ }
2285
+ ++tcur->gilstate_counter;
2286
+ return current ? PyGILState_LOCKED : PyGILState_UNLOCKED;
2287
+ }
2288
+
2289
+ static void __Pyx_FastGil_PyGILState_Release(PyGILState_STATE oldstate) {
2290
+ PyThreadState *tcur = __Pyx_FastGil_get_tcur();
2291
+ __Pyx_FastGIL_Forget0();
2292
+ if (tcur->gilstate_counter == 1) {
2293
+ // This is the last lock, do all the cleanup as well.
2294
+ PyGILState_Release(oldstate);
2295
+ } else {
2296
+ --tcur->gilstate_counter;
2297
+ if (oldstate == PyGILState_UNLOCKED) {
2298
+ PyEval_SaveThread();
2299
+ }
2300
+ }
2301
+ }
2302
+
2303
+ static void __Pyx_FastGilFuncInit0(void) {
2304
+ /* Try to detect autoTLSkey. */
2305
+ int key;
2306
+ void* this_thread_state = (void*) PyGILState_GetThisThreadState();
2307
+ for (key = 0; key < 100; key++) {
2308
+ if (PyThread_get_key_value(key) == this_thread_state) {
2309
+ __Pyx_FastGil_autoTLSkey = key;
2310
+ break;
2311
+ }
2312
+ }
2313
+ if (__Pyx_FastGil_autoTLSkey != -1) {
2314
+ PyObject* capsule = NULL;
2315
+ PyObject* abi_module = NULL;
2316
+ __Pyx_PyGILState_Ensure = __Pyx_FastGil_PyGILState_Ensure;
2317
+ __Pyx_PyGILState_Release = __Pyx_FastGil_PyGILState_Release;
2318
+ __Pyx_FastGIL_Remember = __Pyx_FastGIL_Remember0;
2319
+ __Pyx_FastGIL_Forget = __Pyx_FastGIL_Forget0;
2320
+ capsule = PyCapsule_New(&__Pyx_FastGilFuncs, __Pyx_FastGIL_PyCapsule, NULL);
2321
+ if (capsule) {
2322
+ abi_module = __Pyx_PyImport_AddModuleRef(__Pyx_FastGIL_ABI_module);
2323
+ if (abi_module) {
2324
+ PyObject_SetAttrString(abi_module, __Pyx_FastGIL_PyCapsuleName, capsule);
2325
+ Py_DECREF(abi_module);
2326
+ }
2327
+ }
2328
+ Py_XDECREF(capsule);
2329
+ }
2330
+ }
2331
+
2332
+ #else
2333
+
2334
+ static void __Pyx_FastGilFuncInit0(void) {
2335
+ }
2336
+
2337
+ #endif
2338
+
2339
+ static void __Pyx_FastGilFuncInit(void) {
2340
+ struct __Pyx_FastGilVtab* shared = (struct __Pyx_FastGilVtab*)PyCapsule_Import(__Pyx_FastGIL_PyCapsule, 1);
2341
+ if (shared) {
2342
+ __Pyx_FastGilFuncs = *shared;
2343
+ } else {
2344
+ PyErr_Clear();
2345
+ __Pyx_FastGilFuncInit0();
2346
+ }
2347
+ }
2348
+
2349
+ #endif
2350
+
2351
+ ///////////////////// UtilityCodePragmas /////////////////////////
2352
+
2353
+ #ifdef _MSC_VER
2354
+ #pragma warning( push )
2355
+ /* Warning 4127: conditional expression is constant
2356
+ * Cython uses constant conditional expressions to allow in inline functions to be optimized at
2357
+ * compile-time, so this warning is not useful
2358
+ */
2359
+ #pragma warning( disable : 4127 )
2360
+ #endif
2361
+
2362
+ ///////////////////// UtilityCodePragmasEnd //////////////////////
2363
+
2364
+ #ifdef _MSC_VER
2365
+ #pragma warning( pop ) /* undo whatever Cython has done to warnings */
2366
+ #endif
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/ObjectHandling.c ADDED
The diff for this file is too large to render. See raw diff
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/TestCyUtilityLoader.pyx ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ ########## TestCyUtilityLoader ##########
2
+ #@requires: OtherUtility
3
+
4
+ test {{cy_loader}} impl
5
+
6
+
7
+ ########## OtherUtility ##########
8
+ req {{cy_loader}} impl
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/TestCythonScope.pyx ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ########## TestClass ##########
2
+ # These utilities are for testing purposes
3
+
4
+ # The "cythonscope" test calls METH_O functions with their (self, arg) signature.
5
+ # cython: always_allow_keywords=False
6
+
7
+ from __future__ import print_function
8
+
9
+ cdef extern from *:
10
+ cdef object __pyx_test_dep(object)
11
+
12
+ @cname('__pyx_TestClass')
13
+ cdef class TestClass(object):
14
+ cdef public int value
15
+
16
+ def __init__(self, int value):
17
+ self.value = value
18
+
19
+ def __str__(self):
20
+ return f'TestClass({self.value})'
21
+
22
+ cdef cdef_method(self, int value):
23
+ print('Hello from cdef_method', value)
24
+
25
+ cpdef cpdef_method(self, int value):
26
+ print('Hello from cpdef_method', value)
27
+
28
+ def def_method(self, int value):
29
+ print('Hello from def_method', value)
30
+
31
+ @cname('cdef_cname')
32
+ cdef cdef_cname_method(self, int value):
33
+ print("Hello from cdef_cname_method", value)
34
+
35
+ @cname('cpdef_cname')
36
+ cpdef cpdef_cname_method(self, int value):
37
+ print("Hello from cpdef_cname_method", value)
38
+
39
+ @cname('def_cname')
40
+ def def_cname_method(self, int value):
41
+ print("Hello from def_cname_method", value)
42
+
43
+ @cname('__pyx_test_call_other_cy_util')
44
+ cdef test_call(obj):
45
+ print('test_call')
46
+ __pyx_test_dep(obj)
47
+
48
+ @cname('__pyx_TestClass_New')
49
+ cdef _testclass_new(int value):
50
+ return TestClass(value)
51
+
52
+ ########### TestDep ##########
53
+
54
+ from __future__ import print_function
55
+
56
+ @cname('__pyx_test_dep')
57
+ cdef test_dep(obj):
58
+ print('test_dep', obj)
59
+
60
+ ########## TestScope ##########
61
+
62
+ @cname('__pyx_testscope')
63
+ cdef object _testscope(int value):
64
+ return f"hello from cython scope, value={value}"
65
+
66
+ ########## View.TestScope ##########
67
+
68
+ @cname('__pyx_view_testscope')
69
+ cdef object _testscope(int value):
70
+ return f"hello from cython.view scope, value={value}"
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/TestUtilityLoader.c ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ////////// TestUtilityLoader.proto //////////
2
+ test {{loader}} prototype
3
+
4
+ ////////// TestUtilityLoader //////////
5
+ //@requires: OtherUtility
6
+ test {{loader}} impl
7
+
8
+ ////////// OtherUtility.proto //////////
9
+ req {{loader}} proto
10
+
11
+ ////////// OtherUtility //////////
12
+ req {{loader}} impl
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/__pycache__/Dataclasses.cpython-311.pyc ADDED
Binary file (5.17 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utility/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (2.21 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (2.14 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc ADDED
Binary file (4.59 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc ADDED
Binary file (20.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc ADDED
Binary file (6.74 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc ADDED
Binary file (1.74 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc ADDED
Binary file (8.63 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc ADDED
Binary file (43.4 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc ADDED
Binary file (76.2 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc ADDED
Binary file (19.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc ADDED
Binary file (35.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc ADDED
Binary file (7.98 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc ADDED
Binary file (64.5 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc ADDED
Binary file (2.87 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc ADDED
Binary file (59.4 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc ADDED
Binary file (18.8 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc ADDED
Binary file (9.26 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc ADDED
Binary file (37.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/bccache.py ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """The optional bytecode cache system. This is useful if you have very
2
+ complex template situations and the compilation of all those templates
3
+ slows down your application too much.
4
+
5
+ Situations where this is useful are often forking web applications that
6
+ are initialized on the first request.
7
+ """
8
+ import errno
9
+ import fnmatch
10
+ import marshal
11
+ import os
12
+ import pickle
13
+ import stat
14
+ import sys
15
+ import tempfile
16
+ import typing as t
17
+ from hashlib import sha1
18
+ from io import BytesIO
19
+ from types import CodeType
20
+
21
+ if t.TYPE_CHECKING:
22
+ import typing_extensions as te
23
+ from .environment import Environment
24
+
25
+ class _MemcachedClient(te.Protocol):
26
+ def get(self, key: str) -> bytes:
27
+ ...
28
+
29
+ def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None:
30
+ ...
31
+
32
+
33
+ bc_version = 5
34
+ # Magic bytes to identify Jinja bytecode cache files. Contains the
35
+ # Python major and minor version to avoid loading incompatible bytecode
36
+ # if a project upgrades its Python version.
37
+ bc_magic = (
38
+ b"j2"
39
+ + pickle.dumps(bc_version, 2)
40
+ + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
41
+ )
42
+
43
+
44
+ class Bucket:
45
+ """Buckets are used to store the bytecode for one template. It's created
46
+ and initialized by the bytecode cache and passed to the loading functions.
47
+
48
+ The buckets get an internal checksum from the cache assigned and use this
49
+ to automatically reject outdated cache material. Individual bytecode
50
+ cache subclasses don't have to care about cache invalidation.
51
+ """
52
+
53
+ def __init__(self, environment: "Environment", key: str, checksum: str) -> None:
54
+ self.environment = environment
55
+ self.key = key
56
+ self.checksum = checksum
57
+ self.reset()
58
+
59
+ def reset(self) -> None:
60
+ """Resets the bucket (unloads the bytecode)."""
61
+ self.code: t.Optional[CodeType] = None
62
+
63
+ def load_bytecode(self, f: t.BinaryIO) -> None:
64
+ """Loads bytecode from a file or file like object."""
65
+ # make sure the magic header is correct
66
+ magic = f.read(len(bc_magic))
67
+ if magic != bc_magic:
68
+ self.reset()
69
+ return
70
+ # the source code of the file changed, we need to reload
71
+ checksum = pickle.load(f)
72
+ if self.checksum != checksum:
73
+ self.reset()
74
+ return
75
+ # if marshal_load fails then we need to reload
76
+ try:
77
+ self.code = marshal.load(f)
78
+ except (EOFError, ValueError, TypeError):
79
+ self.reset()
80
+ return
81
+
82
+ def write_bytecode(self, f: t.IO[bytes]) -> None:
83
+ """Dump the bytecode into the file or file like object passed."""
84
+ if self.code is None:
85
+ raise TypeError("can't write empty bucket")
86
+ f.write(bc_magic)
87
+ pickle.dump(self.checksum, f, 2)
88
+ marshal.dump(self.code, f)
89
+
90
+ def bytecode_from_string(self, string: bytes) -> None:
91
+ """Load bytecode from bytes."""
92
+ self.load_bytecode(BytesIO(string))
93
+
94
+ def bytecode_to_string(self) -> bytes:
95
+ """Return the bytecode as bytes."""
96
+ out = BytesIO()
97
+ self.write_bytecode(out)
98
+ return out.getvalue()
99
+
100
+
101
+ class BytecodeCache:
102
+ """To implement your own bytecode cache you have to subclass this class
103
+ and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
104
+ these methods are passed a :class:`~jinja2.bccache.Bucket`.
105
+
106
+ A very basic bytecode cache that saves the bytecode on the file system::
107
+
108
+ from os import path
109
+
110
+ class MyCache(BytecodeCache):
111
+
112
+ def __init__(self, directory):
113
+ self.directory = directory
114
+
115
+ def load_bytecode(self, bucket):
116
+ filename = path.join(self.directory, bucket.key)
117
+ if path.exists(filename):
118
+ with open(filename, 'rb') as f:
119
+ bucket.load_bytecode(f)
120
+
121
+ def dump_bytecode(self, bucket):
122
+ filename = path.join(self.directory, bucket.key)
123
+ with open(filename, 'wb') as f:
124
+ bucket.write_bytecode(f)
125
+
126
+ A more advanced version of a filesystem based bytecode cache is part of
127
+ Jinja.
128
+ """
129
+
130
+ def load_bytecode(self, bucket: Bucket) -> None:
131
+ """Subclasses have to override this method to load bytecode into a
132
+ bucket. If they are not able to find code in the cache for the
133
+ bucket, it must not do anything.
134
+ """
135
+ raise NotImplementedError()
136
+
137
+ def dump_bytecode(self, bucket: Bucket) -> None:
138
+ """Subclasses have to override this method to write the bytecode
139
+ from a bucket back to the cache. If it unable to do so it must not
140
+ fail silently but raise an exception.
141
+ """
142
+ raise NotImplementedError()
143
+
144
+ def clear(self) -> None:
145
+ """Clears the cache. This method is not used by Jinja but should be
146
+ implemented to allow applications to clear the bytecode cache used
147
+ by a particular environment.
148
+ """
149
+
150
+ def get_cache_key(
151
+ self, name: str, filename: t.Optional[t.Union[str]] = None
152
+ ) -> str:
153
+ """Returns the unique hash key for this template name."""
154
+ hash = sha1(name.encode("utf-8"))
155
+
156
+ if filename is not None:
157
+ hash.update(f"|{filename}".encode())
158
+
159
+ return hash.hexdigest()
160
+
161
+ def get_source_checksum(self, source: str) -> str:
162
+ """Returns a checksum for the source."""
163
+ return sha1(source.encode("utf-8")).hexdigest()
164
+
165
+ def get_bucket(
166
+ self,
167
+ environment: "Environment",
168
+ name: str,
169
+ filename: t.Optional[str],
170
+ source: str,
171
+ ) -> Bucket:
172
+ """Return a cache bucket for the given template. All arguments are
173
+ mandatory but filename may be `None`.
174
+ """
175
+ key = self.get_cache_key(name, filename)
176
+ checksum = self.get_source_checksum(source)
177
+ bucket = Bucket(environment, key, checksum)
178
+ self.load_bytecode(bucket)
179
+ return bucket
180
+
181
+ def set_bucket(self, bucket: Bucket) -> None:
182
+ """Put the bucket into the cache."""
183
+ self.dump_bytecode(bucket)
184
+
185
+
186
+ class FileSystemBytecodeCache(BytecodeCache):
187
+ """A bytecode cache that stores bytecode on the filesystem. It accepts
188
+ two arguments: The directory where the cache items are stored and a
189
+ pattern string that is used to build the filename.
190
+
191
+ If no directory is specified a default cache directory is selected. On
192
+ Windows the user's temp directory is used, on UNIX systems a directory
193
+ is created for the user in the system temp directory.
194
+
195
+ The pattern can be used to have multiple separate caches operate on the
196
+ same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
197
+ is replaced with the cache key.
198
+
199
+ >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
200
+
201
+ This bytecode cache supports clearing of the cache using the clear method.
202
+ """
203
+
204
+ def __init__(
205
+ self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
206
+ ) -> None:
207
+ if directory is None:
208
+ directory = self._get_default_cache_dir()
209
+ self.directory = directory
210
+ self.pattern = pattern
211
+
212
+ def _get_default_cache_dir(self) -> str:
213
+ def _unsafe_dir() -> "te.NoReturn":
214
+ raise RuntimeError(
215
+ "Cannot determine safe temp directory. You "
216
+ "need to explicitly provide one."
217
+ )
218
+
219
+ tmpdir = tempfile.gettempdir()
220
+
221
+ # On windows the temporary directory is used specific unless
222
+ # explicitly forced otherwise. We can just use that.
223
+ if os.name == "nt":
224
+ return tmpdir
225
+ if not hasattr(os, "getuid"):
226
+ _unsafe_dir()
227
+
228
+ dirname = f"_jinja2-cache-{os.getuid()}"
229
+ actual_dir = os.path.join(tmpdir, dirname)
230
+
231
+ try:
232
+ os.mkdir(actual_dir, stat.S_IRWXU)
233
+ except OSError as e:
234
+ if e.errno != errno.EEXIST:
235
+ raise
236
+ try:
237
+ os.chmod(actual_dir, stat.S_IRWXU)
238
+ actual_dir_stat = os.lstat(actual_dir)
239
+ if (
240
+ actual_dir_stat.st_uid != os.getuid()
241
+ or not stat.S_ISDIR(actual_dir_stat.st_mode)
242
+ or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
243
+ ):
244
+ _unsafe_dir()
245
+ except OSError as e:
246
+ if e.errno != errno.EEXIST:
247
+ raise
248
+
249
+ actual_dir_stat = os.lstat(actual_dir)
250
+ if (
251
+ actual_dir_stat.st_uid != os.getuid()
252
+ or not stat.S_ISDIR(actual_dir_stat.st_mode)
253
+ or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
254
+ ):
255
+ _unsafe_dir()
256
+
257
+ return actual_dir
258
+
259
+ def _get_cache_filename(self, bucket: Bucket) -> str:
260
+ return os.path.join(self.directory, self.pattern % (bucket.key,))
261
+
262
+ def load_bytecode(self, bucket: Bucket) -> None:
263
+ filename = self._get_cache_filename(bucket)
264
+
265
+ # Don't test for existence before opening the file, since the
266
+ # file could disappear after the test before the open.
267
+ try:
268
+ f = open(filename, "rb")
269
+ except (FileNotFoundError, IsADirectoryError, PermissionError):
270
+ # PermissionError can occur on Windows when an operation is
271
+ # in progress, such as calling clear().
272
+ return
273
+
274
+ with f:
275
+ bucket.load_bytecode(f)
276
+
277
+ def dump_bytecode(self, bucket: Bucket) -> None:
278
+ # Write to a temporary file, then rename to the real name after
279
+ # writing. This avoids another process reading the file before
280
+ # it is fully written.
281
+ name = self._get_cache_filename(bucket)
282
+ f = tempfile.NamedTemporaryFile(
283
+ mode="wb",
284
+ dir=os.path.dirname(name),
285
+ prefix=os.path.basename(name),
286
+ suffix=".tmp",
287
+ delete=False,
288
+ )
289
+
290
+ def remove_silent() -> None:
291
+ try:
292
+ os.remove(f.name)
293
+ except OSError:
294
+ # Another process may have called clear(). On Windows,
295
+ # another program may be holding the file open.
296
+ pass
297
+
298
+ try:
299
+ with f:
300
+ bucket.write_bytecode(f)
301
+ except BaseException:
302
+ remove_silent()
303
+ raise
304
+
305
+ try:
306
+ os.replace(f.name, name)
307
+ except OSError:
308
+ # Another process may have called clear(). On Windows,
309
+ # another program may be holding the file open.
310
+ remove_silent()
311
+ except BaseException:
312
+ remove_silent()
313
+ raise
314
+
315
+ def clear(self) -> None:
316
+ # imported lazily here because google app-engine doesn't support
317
+ # write access on the file system and the function does not exist
318
+ # normally.
319
+ from os import remove
320
+
321
+ files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",))
322
+ for filename in files:
323
+ try:
324
+ remove(os.path.join(self.directory, filename))
325
+ except OSError:
326
+ pass
327
+
328
+
329
+ class MemcachedBytecodeCache(BytecodeCache):
330
+ """This class implements a bytecode cache that uses a memcache cache for
331
+ storing the information. It does not enforce a specific memcache library
332
+ (tummy's memcache or cmemcache) but will accept any class that provides
333
+ the minimal interface required.
334
+
335
+ Libraries compatible with this class:
336
+
337
+ - `cachelib <https://github.com/pallets/cachelib>`_
338
+ - `python-memcached <https://pypi.org/project/python-memcached/>`_
339
+
340
+ (Unfortunately the django cache interface is not compatible because it
341
+ does not support storing binary data, only text. You can however pass
342
+ the underlying cache client to the bytecode cache which is available
343
+ as `django.core.cache.cache._client`.)
344
+
345
+ The minimal interface for the client passed to the constructor is this:
346
+
347
+ .. class:: MinimalClientInterface
348
+
349
+ .. method:: set(key, value[, timeout])
350
+
351
+ Stores the bytecode in the cache. `value` is a string and
352
+ `timeout` the timeout of the key. If timeout is not provided
353
+ a default timeout or no timeout should be assumed, if it's
354
+ provided it's an integer with the number of seconds the cache
355
+ item should exist.
356
+
357
+ .. method:: get(key)
358
+
359
+ Returns the value for the cache key. If the item does not
360
+ exist in the cache the return value must be `None`.
361
+
362
+ The other arguments to the constructor are the prefix for all keys that
363
+ is added before the actual cache key and the timeout for the bytecode in
364
+ the cache system. We recommend a high (or no) timeout.
365
+
366
+ This bytecode cache does not support clearing of used items in the cache.
367
+ The clear method is a no-operation function.
368
+
369
+ .. versionadded:: 2.7
370
+ Added support for ignoring memcache errors through the
371
+ `ignore_memcache_errors` parameter.
372
+ """
373
+
374
+ def __init__(
375
+ self,
376
+ client: "_MemcachedClient",
377
+ prefix: str = "jinja2/bytecode/",
378
+ timeout: t.Optional[int] = None,
379
+ ignore_memcache_errors: bool = True,
380
+ ):
381
+ self.client = client
382
+ self.prefix = prefix
383
+ self.timeout = timeout
384
+ self.ignore_memcache_errors = ignore_memcache_errors
385
+
386
+ def load_bytecode(self, bucket: Bucket) -> None:
387
+ try:
388
+ code = self.client.get(self.prefix + bucket.key)
389
+ except Exception:
390
+ if not self.ignore_memcache_errors:
391
+ raise
392
+ else:
393
+ bucket.bytecode_from_string(code)
394
+
395
+ def dump_bytecode(self, bucket: Bucket) -> None:
396
+ key = self.prefix + bucket.key
397
+ value = bucket.bytecode_to_string()
398
+
399
+ try:
400
+ if self.timeout is not None:
401
+ self.client.set(key, value, self.timeout)
402
+ else:
403
+ self.client.set(key, value)
404
+ except Exception:
405
+ if not self.ignore_memcache_errors:
406
+ raise
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/compiler.py ADDED
@@ -0,0 +1,1956 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Compiles nodes from the parser into Python code."""
2
+ import typing as t
3
+ from contextlib import contextmanager
4
+ from functools import update_wrapper
5
+ from io import StringIO
6
+ from itertools import chain
7
+ from keyword import iskeyword as is_python_keyword
8
+
9
+ from markupsafe import escape
10
+ from markupsafe import Markup
11
+
12
+ from . import nodes
13
+ from .exceptions import TemplateAssertionError
14
+ from .idtracking import Symbols
15
+ from .idtracking import VAR_LOAD_ALIAS
16
+ from .idtracking import VAR_LOAD_PARAMETER
17
+ from .idtracking import VAR_LOAD_RESOLVE
18
+ from .idtracking import VAR_LOAD_UNDEFINED
19
+ from .nodes import EvalContext
20
+ from .optimizer import Optimizer
21
+ from .utils import _PassArg
22
+ from .utils import concat
23
+ from .visitor import NodeVisitor
24
+
25
+ if t.TYPE_CHECKING:
26
+ import typing_extensions as te
27
+ from .environment import Environment
28
+
29
+ F = t.TypeVar("F", bound=t.Callable[..., t.Any])
30
+
31
+ operators = {
32
+ "eq": "==",
33
+ "ne": "!=",
34
+ "gt": ">",
35
+ "gteq": ">=",
36
+ "lt": "<",
37
+ "lteq": "<=",
38
+ "in": "in",
39
+ "notin": "not in",
40
+ }
41
+
42
+
43
+ def optimizeconst(f: F) -> F:
44
+ def new_func(
45
+ self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any
46
+ ) -> t.Any:
47
+ # Only optimize if the frame is not volatile
48
+ if self.optimizer is not None and not frame.eval_ctx.volatile:
49
+ new_node = self.optimizer.visit(node, frame.eval_ctx)
50
+
51
+ if new_node != node:
52
+ return self.visit(new_node, frame)
53
+
54
+ return f(self, node, frame, **kwargs)
55
+
56
+ return update_wrapper(t.cast(F, new_func), f)
57
+
58
+
59
+ def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]:
60
+ @optimizeconst
61
+ def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None:
62
+ if (
63
+ self.environment.sandboxed
64
+ and op in self.environment.intercepted_binops # type: ignore
65
+ ):
66
+ self.write(f"environment.call_binop(context, {op!r}, ")
67
+ self.visit(node.left, frame)
68
+ self.write(", ")
69
+ self.visit(node.right, frame)
70
+ else:
71
+ self.write("(")
72
+ self.visit(node.left, frame)
73
+ self.write(f" {op} ")
74
+ self.visit(node.right, frame)
75
+
76
+ self.write(")")
77
+
78
+ return visitor
79
+
80
+
81
+ def _make_unop(
82
+ op: str,
83
+ ) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]:
84
+ @optimizeconst
85
+ def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None:
86
+ if (
87
+ self.environment.sandboxed
88
+ and op in self.environment.intercepted_unops # type: ignore
89
+ ):
90
+ self.write(f"environment.call_unop(context, {op!r}, ")
91
+ self.visit(node.node, frame)
92
+ else:
93
+ self.write("(" + op)
94
+ self.visit(node.node, frame)
95
+
96
+ self.write(")")
97
+
98
+ return visitor
99
+
100
+
101
+ def generate(
102
+ node: nodes.Template,
103
+ environment: "Environment",
104
+ name: t.Optional[str],
105
+ filename: t.Optional[str],
106
+ stream: t.Optional[t.TextIO] = None,
107
+ defer_init: bool = False,
108
+ optimized: bool = True,
109
+ ) -> t.Optional[str]:
110
+ """Generate the python source for a node tree."""
111
+ if not isinstance(node, nodes.Template):
112
+ raise TypeError("Can't compile non template nodes")
113
+
114
+ generator = environment.code_generator_class(
115
+ environment, name, filename, stream, defer_init, optimized
116
+ )
117
+ generator.visit(node)
118
+
119
+ if stream is None:
120
+ return generator.stream.getvalue() # type: ignore
121
+
122
+ return None
123
+
124
+
125
+ def has_safe_repr(value: t.Any) -> bool:
126
+ """Does the node have a safe representation?"""
127
+ if value is None or value is NotImplemented or value is Ellipsis:
128
+ return True
129
+
130
+ if type(value) in {bool, int, float, complex, range, str, Markup}:
131
+ return True
132
+
133
+ if type(value) in {tuple, list, set, frozenset}:
134
+ return all(has_safe_repr(v) for v in value)
135
+
136
+ if type(value) is dict:
137
+ return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items())
138
+
139
+ return False
140
+
141
+
142
+ def find_undeclared(
143
+ nodes: t.Iterable[nodes.Node], names: t.Iterable[str]
144
+ ) -> t.Set[str]:
145
+ """Check if the names passed are accessed undeclared. The return value
146
+ is a set of all the undeclared names from the sequence of names found.
147
+ """
148
+ visitor = UndeclaredNameVisitor(names)
149
+ try:
150
+ for node in nodes:
151
+ visitor.visit(node)
152
+ except VisitorExit:
153
+ pass
154
+ return visitor.undeclared
155
+
156
+
157
+ class MacroRef:
158
+ def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None:
159
+ self.node = node
160
+ self.accesses_caller = False
161
+ self.accesses_kwargs = False
162
+ self.accesses_varargs = False
163
+
164
+
165
+ class Frame:
166
+ """Holds compile time information for us."""
167
+
168
+ def __init__(
169
+ self,
170
+ eval_ctx: EvalContext,
171
+ parent: t.Optional["Frame"] = None,
172
+ level: t.Optional[int] = None,
173
+ ) -> None:
174
+ self.eval_ctx = eval_ctx
175
+
176
+ # the parent of this frame
177
+ self.parent = parent
178
+
179
+ if parent is None:
180
+ self.symbols = Symbols(level=level)
181
+
182
+ # in some dynamic inheritance situations the compiler needs to add
183
+ # write tests around output statements.
184
+ self.require_output_check = False
185
+
186
+ # inside some tags we are using a buffer rather than yield statements.
187
+ # this for example affects {% filter %} or {% macro %}. If a frame
188
+ # is buffered this variable points to the name of the list used as
189
+ # buffer.
190
+ self.buffer: t.Optional[str] = None
191
+
192
+ # the name of the block we're in, otherwise None.
193
+ self.block: t.Optional[str] = None
194
+
195
+ else:
196
+ self.symbols = Symbols(parent.symbols, level=level)
197
+ self.require_output_check = parent.require_output_check
198
+ self.buffer = parent.buffer
199
+ self.block = parent.block
200
+
201
+ # a toplevel frame is the root + soft frames such as if conditions.
202
+ self.toplevel = False
203
+
204
+ # the root frame is basically just the outermost frame, so no if
205
+ # conditions. This information is used to optimize inheritance
206
+ # situations.
207
+ self.rootlevel = False
208
+
209
+ # variables set inside of loops and blocks should not affect outer frames,
210
+ # but they still needs to be kept track of as part of the active context.
211
+ self.loop_frame = False
212
+ self.block_frame = False
213
+
214
+ # track whether the frame is being used in an if-statement or conditional
215
+ # expression as it determines which errors should be raised during runtime
216
+ # or compile time.
217
+ self.soft_frame = False
218
+
219
+ def copy(self) -> "Frame":
220
+ """Create a copy of the current one."""
221
+ rv = object.__new__(self.__class__)
222
+ rv.__dict__.update(self.__dict__)
223
+ rv.symbols = self.symbols.copy()
224
+ return rv
225
+
226
+ def inner(self, isolated: bool = False) -> "Frame":
227
+ """Return an inner frame."""
228
+ if isolated:
229
+ return Frame(self.eval_ctx, level=self.symbols.level + 1)
230
+ return Frame(self.eval_ctx, self)
231
+
232
+ def soft(self) -> "Frame":
233
+ """Return a soft frame. A soft frame may not be modified as
234
+ standalone thing as it shares the resources with the frame it
235
+ was created of, but it's not a rootlevel frame any longer.
236
+
237
+ This is only used to implement if-statements and conditional
238
+ expressions.
239
+ """
240
+ rv = self.copy()
241
+ rv.rootlevel = False
242
+ rv.soft_frame = True
243
+ return rv
244
+
245
+ __copy__ = copy
246
+
247
+
248
+ class VisitorExit(RuntimeError):
249
+ """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
250
+
251
+
252
+ class DependencyFinderVisitor(NodeVisitor):
253
+ """A visitor that collects filter and test calls."""
254
+
255
+ def __init__(self) -> None:
256
+ self.filters: t.Set[str] = set()
257
+ self.tests: t.Set[str] = set()
258
+
259
+ def visit_Filter(self, node: nodes.Filter) -> None:
260
+ self.generic_visit(node)
261
+ self.filters.add(node.name)
262
+
263
+ def visit_Test(self, node: nodes.Test) -> None:
264
+ self.generic_visit(node)
265
+ self.tests.add(node.name)
266
+
267
+ def visit_Block(self, node: nodes.Block) -> None:
268
+ """Stop visiting at blocks."""
269
+
270
+
271
+ class UndeclaredNameVisitor(NodeVisitor):
272
+ """A visitor that checks if a name is accessed without being
273
+ declared. This is different from the frame visitor as it will
274
+ not stop at closure frames.
275
+ """
276
+
277
+ def __init__(self, names: t.Iterable[str]) -> None:
278
+ self.names = set(names)
279
+ self.undeclared: t.Set[str] = set()
280
+
281
+ def visit_Name(self, node: nodes.Name) -> None:
282
+ if node.ctx == "load" and node.name in self.names:
283
+ self.undeclared.add(node.name)
284
+ if self.undeclared == self.names:
285
+ raise VisitorExit()
286
+ else:
287
+ self.names.discard(node.name)
288
+
289
+ def visit_Block(self, node: nodes.Block) -> None:
290
+ """Stop visiting a blocks."""
291
+
292
+
293
+ class CompilerExit(Exception):
294
+ """Raised if the compiler encountered a situation where it just
295
+ doesn't make sense to further process the code. Any block that
296
+ raises such an exception is not further processed.
297
+ """
298
+
299
+
300
+ class CodeGenerator(NodeVisitor):
301
+ def __init__(
302
+ self,
303
+ environment: "Environment",
304
+ name: t.Optional[str],
305
+ filename: t.Optional[str],
306
+ stream: t.Optional[t.TextIO] = None,
307
+ defer_init: bool = False,
308
+ optimized: bool = True,
309
+ ) -> None:
310
+ if stream is None:
311
+ stream = StringIO()
312
+ self.environment = environment
313
+ self.name = name
314
+ self.filename = filename
315
+ self.stream = stream
316
+ self.created_block_context = False
317
+ self.defer_init = defer_init
318
+ self.optimizer: t.Optional[Optimizer] = None
319
+
320
+ if optimized:
321
+ self.optimizer = Optimizer(environment)
322
+
323
+ # aliases for imports
324
+ self.import_aliases: t.Dict[str, str] = {}
325
+
326
+ # a registry for all blocks. Because blocks are moved out
327
+ # into the global python scope they are registered here
328
+ self.blocks: t.Dict[str, nodes.Block] = {}
329
+
330
+ # the number of extends statements so far
331
+ self.extends_so_far = 0
332
+
333
+ # some templates have a rootlevel extends. In this case we
334
+ # can safely assume that we're a child template and do some
335
+ # more optimizations.
336
+ self.has_known_extends = False
337
+
338
+ # the current line number
339
+ self.code_lineno = 1
340
+
341
+ # registry of all filters and tests (global, not block local)
342
+ self.tests: t.Dict[str, str] = {}
343
+ self.filters: t.Dict[str, str] = {}
344
+
345
+ # the debug information
346
+ self.debug_info: t.List[t.Tuple[int, int]] = []
347
+ self._write_debug_info: t.Optional[int] = None
348
+
349
+ # the number of new lines before the next write()
350
+ self._new_lines = 0
351
+
352
+ # the line number of the last written statement
353
+ self._last_line = 0
354
+
355
+ # true if nothing was written so far.
356
+ self._first_write = True
357
+
358
+ # used by the `temporary_identifier` method to get new
359
+ # unique, temporary identifier
360
+ self._last_identifier = 0
361
+
362
+ # the current indentation
363
+ self._indentation = 0
364
+
365
+ # Tracks toplevel assignments
366
+ self._assign_stack: t.List[t.Set[str]] = []
367
+
368
+ # Tracks parameter definition blocks
369
+ self._param_def_block: t.List[t.Set[str]] = []
370
+
371
+ # Tracks the current context.
372
+ self._context_reference_stack = ["context"]
373
+
374
+ @property
375
+ def optimized(self) -> bool:
376
+ return self.optimizer is not None
377
+
378
+ # -- Various compilation helpers
379
+
380
+ def fail(self, msg: str, lineno: int) -> "te.NoReturn":
381
+ """Fail with a :exc:`TemplateAssertionError`."""
382
+ raise TemplateAssertionError(msg, lineno, self.name, self.filename)
383
+
384
+ def temporary_identifier(self) -> str:
385
+ """Get a new unique identifier."""
386
+ self._last_identifier += 1
387
+ return f"t_{self._last_identifier}"
388
+
389
+ def buffer(self, frame: Frame) -> None:
390
+ """Enable buffering for the frame from that point onwards."""
391
+ frame.buffer = self.temporary_identifier()
392
+ self.writeline(f"{frame.buffer} = []")
393
+
394
+ def return_buffer_contents(
395
+ self, frame: Frame, force_unescaped: bool = False
396
+ ) -> None:
397
+ """Return the buffer contents of the frame."""
398
+ if not force_unescaped:
399
+ if frame.eval_ctx.volatile:
400
+ self.writeline("if context.eval_ctx.autoescape:")
401
+ self.indent()
402
+ self.writeline(f"return Markup(concat({frame.buffer}))")
403
+ self.outdent()
404
+ self.writeline("else:")
405
+ self.indent()
406
+ self.writeline(f"return concat({frame.buffer})")
407
+ self.outdent()
408
+ return
409
+ elif frame.eval_ctx.autoescape:
410
+ self.writeline(f"return Markup(concat({frame.buffer}))")
411
+ return
412
+ self.writeline(f"return concat({frame.buffer})")
413
+
414
+ def indent(self) -> None:
415
+ """Indent by one."""
416
+ self._indentation += 1
417
+
418
+ def outdent(self, step: int = 1) -> None:
419
+ """Outdent by step."""
420
+ self._indentation -= step
421
+
422
+ def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None:
423
+ """Yield or write into the frame buffer."""
424
+ if frame.buffer is None:
425
+ self.writeline("yield ", node)
426
+ else:
427
+ self.writeline(f"{frame.buffer}.append(", node)
428
+
429
+ def end_write(self, frame: Frame) -> None:
430
+ """End the writing process started by `start_write`."""
431
+ if frame.buffer is not None:
432
+ self.write(")")
433
+
434
+ def simple_write(
435
+ self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None
436
+ ) -> None:
437
+ """Simple shortcut for start_write + write + end_write."""
438
+ self.start_write(frame, node)
439
+ self.write(s)
440
+ self.end_write(frame)
441
+
442
+ def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None:
443
+ """Visit a list of nodes as block in a frame. If the current frame
444
+ is no buffer a dummy ``if 0: yield None`` is written automatically.
445
+ """
446
+ try:
447
+ self.writeline("pass")
448
+ for node in nodes:
449
+ self.visit(node, frame)
450
+ except CompilerExit:
451
+ pass
452
+
453
+ def write(self, x: str) -> None:
454
+ """Write a string into the output stream."""
455
+ if self._new_lines:
456
+ if not self._first_write:
457
+ self.stream.write("\n" * self._new_lines)
458
+ self.code_lineno += self._new_lines
459
+ if self._write_debug_info is not None:
460
+ self.debug_info.append((self._write_debug_info, self.code_lineno))
461
+ self._write_debug_info = None
462
+ self._first_write = False
463
+ self.stream.write(" " * self._indentation)
464
+ self._new_lines = 0
465
+ self.stream.write(x)
466
+
467
+ def writeline(
468
+ self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0
469
+ ) -> None:
470
+ """Combination of newline and write."""
471
+ self.newline(node, extra)
472
+ self.write(x)
473
+
474
+ def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None:
475
+ """Add one or more newlines before the next write."""
476
+ self._new_lines = max(self._new_lines, 1 + extra)
477
+ if node is not None and node.lineno != self._last_line:
478
+ self._write_debug_info = node.lineno
479
+ self._last_line = node.lineno
480
+
481
+ def signature(
482
+ self,
483
+ node: t.Union[nodes.Call, nodes.Filter, nodes.Test],
484
+ frame: Frame,
485
+ extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
486
+ ) -> None:
487
+ """Writes a function call to the stream for the current node.
488
+ A leading comma is added automatically. The extra keyword
489
+ arguments may not include python keywords otherwise a syntax
490
+ error could occur. The extra keyword arguments should be given
491
+ as python dict.
492
+ """
493
+ # if any of the given keyword arguments is a python keyword
494
+ # we have to make sure that no invalid call is created.
495
+ kwarg_workaround = any(
496
+ is_python_keyword(t.cast(str, k))
497
+ for k in chain((x.key for x in node.kwargs), extra_kwargs or ())
498
+ )
499
+
500
+ for arg in node.args:
501
+ self.write(", ")
502
+ self.visit(arg, frame)
503
+
504
+ if not kwarg_workaround:
505
+ for kwarg in node.kwargs:
506
+ self.write(", ")
507
+ self.visit(kwarg, frame)
508
+ if extra_kwargs is not None:
509
+ for key, value in extra_kwargs.items():
510
+ self.write(f", {key}={value}")
511
+ if node.dyn_args:
512
+ self.write(", *")
513
+ self.visit(node.dyn_args, frame)
514
+
515
+ if kwarg_workaround:
516
+ if node.dyn_kwargs is not None:
517
+ self.write(", **dict({")
518
+ else:
519
+ self.write(", **{")
520
+ for kwarg in node.kwargs:
521
+ self.write(f"{kwarg.key!r}: ")
522
+ self.visit(kwarg.value, frame)
523
+ self.write(", ")
524
+ if extra_kwargs is not None:
525
+ for key, value in extra_kwargs.items():
526
+ self.write(f"{key!r}: {value}, ")
527
+ if node.dyn_kwargs is not None:
528
+ self.write("}, **")
529
+ self.visit(node.dyn_kwargs, frame)
530
+ self.write(")")
531
+ else:
532
+ self.write("}")
533
+
534
+ elif node.dyn_kwargs is not None:
535
+ self.write(", **")
536
+ self.visit(node.dyn_kwargs, frame)
537
+
538
+ def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None:
539
+ """Find all filter and test names used in the template and
540
+ assign them to variables in the compiled namespace. Checking
541
+ that the names are registered with the environment is done when
542
+ compiling the Filter and Test nodes. If the node is in an If or
543
+ CondExpr node, the check is done at runtime instead.
544
+
545
+ .. versionchanged:: 3.0
546
+ Filters and tests in If and CondExpr nodes are checked at
547
+ runtime instead of compile time.
548
+ """
549
+ visitor = DependencyFinderVisitor()
550
+
551
+ for node in nodes:
552
+ visitor.visit(node)
553
+
554
+ for id_map, names, dependency in (self.filters, visitor.filters, "filters"), (
555
+ self.tests,
556
+ visitor.tests,
557
+ "tests",
558
+ ):
559
+ for name in sorted(names):
560
+ if name not in id_map:
561
+ id_map[name] = self.temporary_identifier()
562
+
563
+ # add check during runtime that dependencies used inside of executed
564
+ # blocks are defined, as this step may be skipped during compile time
565
+ self.writeline("try:")
566
+ self.indent()
567
+ self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]")
568
+ self.outdent()
569
+ self.writeline("except KeyError:")
570
+ self.indent()
571
+ self.writeline("@internalcode")
572
+ self.writeline(f"def {id_map[name]}(*unused):")
573
+ self.indent()
574
+ self.writeline(
575
+ f'raise TemplateRuntimeError("No {dependency[:-1]}'
576
+ f' named {name!r} found.")'
577
+ )
578
+ self.outdent()
579
+ self.outdent()
580
+
581
+ def enter_frame(self, frame: Frame) -> None:
582
+ undefs = []
583
+ for target, (action, param) in frame.symbols.loads.items():
584
+ if action == VAR_LOAD_PARAMETER:
585
+ pass
586
+ elif action == VAR_LOAD_RESOLVE:
587
+ self.writeline(f"{target} = {self.get_resolve_func()}({param!r})")
588
+ elif action == VAR_LOAD_ALIAS:
589
+ self.writeline(f"{target} = {param}")
590
+ elif action == VAR_LOAD_UNDEFINED:
591
+ undefs.append(target)
592
+ else:
593
+ raise NotImplementedError("unknown load instruction")
594
+ if undefs:
595
+ self.writeline(f"{' = '.join(undefs)} = missing")
596
+
597
+ def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None:
598
+ if not with_python_scope:
599
+ undefs = []
600
+ for target in frame.symbols.loads:
601
+ undefs.append(target)
602
+ if undefs:
603
+ self.writeline(f"{' = '.join(undefs)} = missing")
604
+
605
+ def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str:
606
+ return async_value if self.environment.is_async else sync_value
607
+
608
+ def func(self, name: str) -> str:
609
+ return f"{self.choose_async()}def {name}"
610
+
611
+ def macro_body(
612
+ self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame
613
+ ) -> t.Tuple[Frame, MacroRef]:
614
+ """Dump the function def of a macro or call block."""
615
+ frame = frame.inner()
616
+ frame.symbols.analyze_node(node)
617
+ macro_ref = MacroRef(node)
618
+
619
+ explicit_caller = None
620
+ skip_special_params = set()
621
+ args = []
622
+
623
+ for idx, arg in enumerate(node.args):
624
+ if arg.name == "caller":
625
+ explicit_caller = idx
626
+ if arg.name in ("kwargs", "varargs"):
627
+ skip_special_params.add(arg.name)
628
+ args.append(frame.symbols.ref(arg.name))
629
+
630
+ undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
631
+
632
+ if "caller" in undeclared:
633
+ # In older Jinja versions there was a bug that allowed caller
634
+ # to retain the special behavior even if it was mentioned in
635
+ # the argument list. However thankfully this was only really
636
+ # working if it was the last argument. So we are explicitly
637
+ # checking this now and error out if it is anywhere else in
638
+ # the argument list.
639
+ if explicit_caller is not None:
640
+ try:
641
+ node.defaults[explicit_caller - len(node.args)]
642
+ except IndexError:
643
+ self.fail(
644
+ "When defining macros or call blocks the "
645
+ 'special "caller" argument must be omitted '
646
+ "or be given a default.",
647
+ node.lineno,
648
+ )
649
+ else:
650
+ args.append(frame.symbols.declare_parameter("caller"))
651
+ macro_ref.accesses_caller = True
652
+ if "kwargs" in undeclared and "kwargs" not in skip_special_params:
653
+ args.append(frame.symbols.declare_parameter("kwargs"))
654
+ macro_ref.accesses_kwargs = True
655
+ if "varargs" in undeclared and "varargs" not in skip_special_params:
656
+ args.append(frame.symbols.declare_parameter("varargs"))
657
+ macro_ref.accesses_varargs = True
658
+
659
+ # macros are delayed, they never require output checks
660
+ frame.require_output_check = False
661
+ frame.symbols.analyze_node(node)
662
+ self.writeline(f"{self.func('macro')}({', '.join(args)}):", node)
663
+ self.indent()
664
+
665
+ self.buffer(frame)
666
+ self.enter_frame(frame)
667
+
668
+ self.push_parameter_definitions(frame)
669
+ for idx, arg in enumerate(node.args):
670
+ ref = frame.symbols.ref(arg.name)
671
+ self.writeline(f"if {ref} is missing:")
672
+ self.indent()
673
+ try:
674
+ default = node.defaults[idx - len(node.args)]
675
+ except IndexError:
676
+ self.writeline(
677
+ f'{ref} = undefined("parameter {arg.name!r} was not provided",'
678
+ f" name={arg.name!r})"
679
+ )
680
+ else:
681
+ self.writeline(f"{ref} = ")
682
+ self.visit(default, frame)
683
+ self.mark_parameter_stored(ref)
684
+ self.outdent()
685
+ self.pop_parameter_definitions()
686
+
687
+ self.blockvisit(node.body, frame)
688
+ self.return_buffer_contents(frame, force_unescaped=True)
689
+ self.leave_frame(frame, with_python_scope=True)
690
+ self.outdent()
691
+
692
+ return frame, macro_ref
693
+
694
+ def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None:
695
+ """Dump the macro definition for the def created by macro_body."""
696
+ arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
697
+ name = getattr(macro_ref.node, "name", None)
698
+ if len(macro_ref.node.args) == 1:
699
+ arg_tuple += ","
700
+ self.write(
701
+ f"Macro(environment, macro, {name!r}, ({arg_tuple}),"
702
+ f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r},"
703
+ f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)"
704
+ )
705
+
706
+ def position(self, node: nodes.Node) -> str:
707
+ """Return a human readable position for the node."""
708
+ rv = f"line {node.lineno}"
709
+ if self.name is not None:
710
+ rv = f"{rv} in {self.name!r}"
711
+ return rv
712
+
713
+ def dump_local_context(self, frame: Frame) -> str:
714
+ items_kv = ", ".join(
715
+ f"{name!r}: {target}"
716
+ for name, target in frame.symbols.dump_stores().items()
717
+ )
718
+ return f"{{{items_kv}}}"
719
+
720
+ def write_commons(self) -> None:
721
+ """Writes a common preamble that is used by root and block functions.
722
+ Primarily this sets up common local helpers and enforces a generator
723
+ through a dead branch.
724
+ """
725
+ self.writeline("resolve = context.resolve_or_missing")
726
+ self.writeline("undefined = environment.undefined")
727
+ self.writeline("concat = environment.concat")
728
+ # always use the standard Undefined class for the implicit else of
729
+ # conditional expressions
730
+ self.writeline("cond_expr_undefined = Undefined")
731
+ self.writeline("if 0: yield None")
732
+
733
+ def push_parameter_definitions(self, frame: Frame) -> None:
734
+ """Pushes all parameter targets from the given frame into a local
735
+ stack that permits tracking of yet to be assigned parameters. In
736
+ particular this enables the optimization from `visit_Name` to skip
737
+ undefined expressions for parameters in macros as macros can reference
738
+ otherwise unbound parameters.
739
+ """
740
+ self._param_def_block.append(frame.symbols.dump_param_targets())
741
+
742
+ def pop_parameter_definitions(self) -> None:
743
+ """Pops the current parameter definitions set."""
744
+ self._param_def_block.pop()
745
+
746
+ def mark_parameter_stored(self, target: str) -> None:
747
+ """Marks a parameter in the current parameter definitions as stored.
748
+ This will skip the enforced undefined checks.
749
+ """
750
+ if self._param_def_block:
751
+ self._param_def_block[-1].discard(target)
752
+
753
+ def push_context_reference(self, target: str) -> None:
754
+ self._context_reference_stack.append(target)
755
+
756
+ def pop_context_reference(self) -> None:
757
+ self._context_reference_stack.pop()
758
+
759
+ def get_context_ref(self) -> str:
760
+ return self._context_reference_stack[-1]
761
+
762
+ def get_resolve_func(self) -> str:
763
+ target = self._context_reference_stack[-1]
764
+ if target == "context":
765
+ return "resolve"
766
+ return f"{target}.resolve"
767
+
768
+ def derive_context(self, frame: Frame) -> str:
769
+ return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})"
770
+
771
+ def parameter_is_undeclared(self, target: str) -> bool:
772
+ """Checks if a given target is an undeclared parameter."""
773
+ if not self._param_def_block:
774
+ return False
775
+ return target in self._param_def_block[-1]
776
+
777
+ def push_assign_tracking(self) -> None:
778
+ """Pushes a new layer for assignment tracking."""
779
+ self._assign_stack.append(set())
780
+
781
+ def pop_assign_tracking(self, frame: Frame) -> None:
782
+ """Pops the topmost level for assignment tracking and updates the
783
+ context variables if necessary.
784
+ """
785
+ vars = self._assign_stack.pop()
786
+ if (
787
+ not frame.block_frame
788
+ and not frame.loop_frame
789
+ and not frame.toplevel
790
+ or not vars
791
+ ):
792
+ return
793
+ public_names = [x for x in vars if x[:1] != "_"]
794
+ if len(vars) == 1:
795
+ name = next(iter(vars))
796
+ ref = frame.symbols.ref(name)
797
+ if frame.loop_frame:
798
+ self.writeline(f"_loop_vars[{name!r}] = {ref}")
799
+ return
800
+ if frame.block_frame:
801
+ self.writeline(f"_block_vars[{name!r}] = {ref}")
802
+ return
803
+ self.writeline(f"context.vars[{name!r}] = {ref}")
804
+ else:
805
+ if frame.loop_frame:
806
+ self.writeline("_loop_vars.update({")
807
+ elif frame.block_frame:
808
+ self.writeline("_block_vars.update({")
809
+ else:
810
+ self.writeline("context.vars.update({")
811
+ for idx, name in enumerate(vars):
812
+ if idx:
813
+ self.write(", ")
814
+ ref = frame.symbols.ref(name)
815
+ self.write(f"{name!r}: {ref}")
816
+ self.write("})")
817
+ if not frame.block_frame and not frame.loop_frame and public_names:
818
+ if len(public_names) == 1:
819
+ self.writeline(f"context.exported_vars.add({public_names[0]!r})")
820
+ else:
821
+ names_str = ", ".join(map(repr, public_names))
822
+ self.writeline(f"context.exported_vars.update(({names_str}))")
823
+
824
+ # -- Statement Visitors
825
+
826
+ def visit_Template(
827
+ self, node: nodes.Template, frame: t.Optional[Frame] = None
828
+ ) -> None:
829
+ assert frame is None, "no root frame allowed"
830
+ eval_ctx = EvalContext(self.environment, self.name)
831
+
832
+ from .runtime import exported, async_exported
833
+
834
+ if self.environment.is_async:
835
+ exported_names = sorted(exported + async_exported)
836
+ else:
837
+ exported_names = sorted(exported)
838
+
839
+ self.writeline("from jinja2.runtime import " + ", ".join(exported_names))
840
+
841
+ # if we want a deferred initialization we cannot move the
842
+ # environment into a local name
843
+ envenv = "" if self.defer_init else ", environment=environment"
844
+
845
+ # do we have an extends tag at all? If not, we can save some
846
+ # overhead by just not processing any inheritance code.
847
+ have_extends = node.find(nodes.Extends) is not None
848
+
849
+ # find all blocks
850
+ for block in node.find_all(nodes.Block):
851
+ if block.name in self.blocks:
852
+ self.fail(f"block {block.name!r} defined twice", block.lineno)
853
+ self.blocks[block.name] = block
854
+
855
+ # find all imports and import them
856
+ for import_ in node.find_all(nodes.ImportedName):
857
+ if import_.importname not in self.import_aliases:
858
+ imp = import_.importname
859
+ self.import_aliases[imp] = alias = self.temporary_identifier()
860
+ if "." in imp:
861
+ module, obj = imp.rsplit(".", 1)
862
+ self.writeline(f"from {module} import {obj} as {alias}")
863
+ else:
864
+ self.writeline(f"import {imp} as {alias}")
865
+
866
+ # add the load name
867
+ self.writeline(f"name = {self.name!r}")
868
+
869
+ # generate the root render function.
870
+ self.writeline(
871
+ f"{self.func('root')}(context, missing=missing{envenv}):", extra=1
872
+ )
873
+ self.indent()
874
+ self.write_commons()
875
+
876
+ # process the root
877
+ frame = Frame(eval_ctx)
878
+ if "self" in find_undeclared(node.body, ("self",)):
879
+ ref = frame.symbols.declare_parameter("self")
880
+ self.writeline(f"{ref} = TemplateReference(context)")
881
+ frame.symbols.analyze_node(node)
882
+ frame.toplevel = frame.rootlevel = True
883
+ frame.require_output_check = have_extends and not self.has_known_extends
884
+ if have_extends:
885
+ self.writeline("parent_template = None")
886
+ self.enter_frame(frame)
887
+ self.pull_dependencies(node.body)
888
+ self.blockvisit(node.body, frame)
889
+ self.leave_frame(frame, with_python_scope=True)
890
+ self.outdent()
891
+
892
+ # make sure that the parent root is called.
893
+ if have_extends:
894
+ if not self.has_known_extends:
895
+ self.indent()
896
+ self.writeline("if parent_template is not None:")
897
+ self.indent()
898
+ if not self.environment.is_async:
899
+ self.writeline("yield from parent_template.root_render_func(context)")
900
+ else:
901
+ self.writeline(
902
+ "async for event in parent_template.root_render_func(context):"
903
+ )
904
+ self.indent()
905
+ self.writeline("yield event")
906
+ self.outdent()
907
+ self.outdent(1 + (not self.has_known_extends))
908
+
909
+ # at this point we now have the blocks collected and can visit them too.
910
+ for name, block in self.blocks.items():
911
+ self.writeline(
912
+ f"{self.func('block_' + name)}(context, missing=missing{envenv}):",
913
+ block,
914
+ 1,
915
+ )
916
+ self.indent()
917
+ self.write_commons()
918
+ # It's important that we do not make this frame a child of the
919
+ # toplevel template. This would cause a variety of
920
+ # interesting issues with identifier tracking.
921
+ block_frame = Frame(eval_ctx)
922
+ block_frame.block_frame = True
923
+ undeclared = find_undeclared(block.body, ("self", "super"))
924
+ if "self" in undeclared:
925
+ ref = block_frame.symbols.declare_parameter("self")
926
+ self.writeline(f"{ref} = TemplateReference(context)")
927
+ if "super" in undeclared:
928
+ ref = block_frame.symbols.declare_parameter("super")
929
+ self.writeline(f"{ref} = context.super({name!r}, block_{name})")
930
+ block_frame.symbols.analyze_node(block)
931
+ block_frame.block = name
932
+ self.writeline("_block_vars = {}")
933
+ self.enter_frame(block_frame)
934
+ self.pull_dependencies(block.body)
935
+ self.blockvisit(block.body, block_frame)
936
+ self.leave_frame(block_frame, with_python_scope=True)
937
+ self.outdent()
938
+
939
+ blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks)
940
+ self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1)
941
+ debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info)
942
+ self.writeline(f"debug_info = {debug_kv_str!r}")
943
+
944
+ def visit_Block(self, node: nodes.Block, frame: Frame) -> None:
945
+ """Call a block and register it for the template."""
946
+ level = 0
947
+ if frame.toplevel:
948
+ # if we know that we are a child template, there is no need to
949
+ # check if we are one
950
+ if self.has_known_extends:
951
+ return
952
+ if self.extends_so_far > 0:
953
+ self.writeline("if parent_template is None:")
954
+ self.indent()
955
+ level += 1
956
+
957
+ if node.scoped:
958
+ context = self.derive_context(frame)
959
+ else:
960
+ context = self.get_context_ref()
961
+
962
+ if node.required:
963
+ self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node)
964
+ self.indent()
965
+ self.writeline(
966
+ f'raise TemplateRuntimeError("Required block {node.name!r} not found")',
967
+ node,
968
+ )
969
+ self.outdent()
970
+
971
+ if not self.environment.is_async and frame.buffer is None:
972
+ self.writeline(
973
+ f"yield from context.blocks[{node.name!r}][0]({context})", node
974
+ )
975
+ else:
976
+ self.writeline(
977
+ f"{self.choose_async()}for event in"
978
+ f" context.blocks[{node.name!r}][0]({context}):",
979
+ node,
980
+ )
981
+ self.indent()
982
+ self.simple_write("event", frame)
983
+ self.outdent()
984
+
985
+ self.outdent(level)
986
+
987
+ def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None:
988
+ """Calls the extender."""
989
+ if not frame.toplevel:
990
+ self.fail("cannot use extend from a non top-level scope", node.lineno)
991
+
992
+ # if the number of extends statements in general is zero so
993
+ # far, we don't have to add a check if something extended
994
+ # the template before this one.
995
+ if self.extends_so_far > 0:
996
+ # if we have a known extends we just add a template runtime
997
+ # error into the generated code. We could catch that at compile
998
+ # time too, but i welcome it not to confuse users by throwing the
999
+ # same error at different times just "because we can".
1000
+ if not self.has_known_extends:
1001
+ self.writeline("if parent_template is not None:")
1002
+ self.indent()
1003
+ self.writeline('raise TemplateRuntimeError("extended multiple times")')
1004
+
1005
+ # if we have a known extends already we don't need that code here
1006
+ # as we know that the template execution will end here.
1007
+ if self.has_known_extends:
1008
+ raise CompilerExit()
1009
+ else:
1010
+ self.outdent()
1011
+
1012
+ self.writeline("parent_template = environment.get_template(", node)
1013
+ self.visit(node.template, frame)
1014
+ self.write(f", {self.name!r})")
1015
+ self.writeline("for name, parent_block in parent_template.blocks.items():")
1016
+ self.indent()
1017
+ self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
1018
+ self.outdent()
1019
+
1020
+ # if this extends statement was in the root level we can take
1021
+ # advantage of that information and simplify the generated code
1022
+ # in the top level from this point onwards
1023
+ if frame.rootlevel:
1024
+ self.has_known_extends = True
1025
+
1026
+ # and now we have one more
1027
+ self.extends_so_far += 1
1028
+
1029
+ def visit_Include(self, node: nodes.Include, frame: Frame) -> None:
1030
+ """Handles includes."""
1031
+ if node.ignore_missing:
1032
+ self.writeline("try:")
1033
+ self.indent()
1034
+
1035
+ func_name = "get_or_select_template"
1036
+ if isinstance(node.template, nodes.Const):
1037
+ if isinstance(node.template.value, str):
1038
+ func_name = "get_template"
1039
+ elif isinstance(node.template.value, (tuple, list)):
1040
+ func_name = "select_template"
1041
+ elif isinstance(node.template, (nodes.Tuple, nodes.List)):
1042
+ func_name = "select_template"
1043
+
1044
+ self.writeline(f"template = environment.{func_name}(", node)
1045
+ self.visit(node.template, frame)
1046
+ self.write(f", {self.name!r})")
1047
+ if node.ignore_missing:
1048
+ self.outdent()
1049
+ self.writeline("except TemplateNotFound:")
1050
+ self.indent()
1051
+ self.writeline("pass")
1052
+ self.outdent()
1053
+ self.writeline("else:")
1054
+ self.indent()
1055
+
1056
+ skip_event_yield = False
1057
+ if node.with_context:
1058
+ self.writeline(
1059
+ f"{self.choose_async()}for event in template.root_render_func("
1060
+ "template.new_context(context.get_all(), True,"
1061
+ f" {self.dump_local_context(frame)})):"
1062
+ )
1063
+ elif self.environment.is_async:
1064
+ self.writeline(
1065
+ "for event in (await template._get_default_module_async())"
1066
+ "._body_stream:"
1067
+ )
1068
+ else:
1069
+ self.writeline("yield from template._get_default_module()._body_stream")
1070
+ skip_event_yield = True
1071
+
1072
+ if not skip_event_yield:
1073
+ self.indent()
1074
+ self.simple_write("event", frame)
1075
+ self.outdent()
1076
+
1077
+ if node.ignore_missing:
1078
+ self.outdent()
1079
+
1080
+ def _import_common(
1081
+ self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame
1082
+ ) -> None:
1083
+ self.write(f"{self.choose_async('await ')}environment.get_template(")
1084
+ self.visit(node.template, frame)
1085
+ self.write(f", {self.name!r}).")
1086
+
1087
+ if node.with_context:
1088
+ f_name = f"make_module{self.choose_async('_async')}"
1089
+ self.write(
1090
+ f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})"
1091
+ )
1092
+ else:
1093
+ self.write(f"_get_default_module{self.choose_async('_async')}(context)")
1094
+
1095
+ def visit_Import(self, node: nodes.Import, frame: Frame) -> None:
1096
+ """Visit regular imports."""
1097
+ self.writeline(f"{frame.symbols.ref(node.target)} = ", node)
1098
+ if frame.toplevel:
1099
+ self.write(f"context.vars[{node.target!r}] = ")
1100
+
1101
+ self._import_common(node, frame)
1102
+
1103
+ if frame.toplevel and not node.target.startswith("_"):
1104
+ self.writeline(f"context.exported_vars.discard({node.target!r})")
1105
+
1106
+ def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None:
1107
+ """Visit named imports."""
1108
+ self.newline(node)
1109
+ self.write("included_template = ")
1110
+ self._import_common(node, frame)
1111
+ var_names = []
1112
+ discarded_names = []
1113
+ for name in node.names:
1114
+ if isinstance(name, tuple):
1115
+ name, alias = name
1116
+ else:
1117
+ alias = name
1118
+ self.writeline(
1119
+ f"{frame.symbols.ref(alias)} ="
1120
+ f" getattr(included_template, {name!r}, missing)"
1121
+ )
1122
+ self.writeline(f"if {frame.symbols.ref(alias)} is missing:")
1123
+ self.indent()
1124
+ message = (
1125
+ "the template {included_template.__name__!r}"
1126
+ f" (imported on {self.position(node)})"
1127
+ f" does not export the requested name {name!r}"
1128
+ )
1129
+ self.writeline(
1130
+ f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})"
1131
+ )
1132
+ self.outdent()
1133
+ if frame.toplevel:
1134
+ var_names.append(alias)
1135
+ if not alias.startswith("_"):
1136
+ discarded_names.append(alias)
1137
+
1138
+ if var_names:
1139
+ if len(var_names) == 1:
1140
+ name = var_names[0]
1141
+ self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}")
1142
+ else:
1143
+ names_kv = ", ".join(
1144
+ f"{name!r}: {frame.symbols.ref(name)}" for name in var_names
1145
+ )
1146
+ self.writeline(f"context.vars.update({{{names_kv}}})")
1147
+ if discarded_names:
1148
+ if len(discarded_names) == 1:
1149
+ self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})")
1150
+ else:
1151
+ names_str = ", ".join(map(repr, discarded_names))
1152
+ self.writeline(
1153
+ f"context.exported_vars.difference_update(({names_str}))"
1154
+ )
1155
+
1156
+ def visit_For(self, node: nodes.For, frame: Frame) -> None:
1157
+ loop_frame = frame.inner()
1158
+ loop_frame.loop_frame = True
1159
+ test_frame = frame.inner()
1160
+ else_frame = frame.inner()
1161
+
1162
+ # try to figure out if we have an extended loop. An extended loop
1163
+ # is necessary if the loop is in recursive mode if the special loop
1164
+ # variable is accessed in the body if the body is a scoped block.
1165
+ extended_loop = (
1166
+ node.recursive
1167
+ or "loop"
1168
+ in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",))
1169
+ or any(block.scoped for block in node.find_all(nodes.Block))
1170
+ )
1171
+
1172
+ loop_ref = None
1173
+ if extended_loop:
1174
+ loop_ref = loop_frame.symbols.declare_parameter("loop")
1175
+
1176
+ loop_frame.symbols.analyze_node(node, for_branch="body")
1177
+ if node.else_:
1178
+ else_frame.symbols.analyze_node(node, for_branch="else")
1179
+
1180
+ if node.test:
1181
+ loop_filter_func = self.temporary_identifier()
1182
+ test_frame.symbols.analyze_node(node, for_branch="test")
1183
+ self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test)
1184
+ self.indent()
1185
+ self.enter_frame(test_frame)
1186
+ self.writeline(self.choose_async("async for ", "for "))
1187
+ self.visit(node.target, loop_frame)
1188
+ self.write(" in ")
1189
+ self.write(self.choose_async("auto_aiter(fiter)", "fiter"))
1190
+ self.write(":")
1191
+ self.indent()
1192
+ self.writeline("if ", node.test)
1193
+ self.visit(node.test, test_frame)
1194
+ self.write(":")
1195
+ self.indent()
1196
+ self.writeline("yield ")
1197
+ self.visit(node.target, loop_frame)
1198
+ self.outdent(3)
1199
+ self.leave_frame(test_frame, with_python_scope=True)
1200
+
1201
+ # if we don't have an recursive loop we have to find the shadowed
1202
+ # variables at that point. Because loops can be nested but the loop
1203
+ # variable is a special one we have to enforce aliasing for it.
1204
+ if node.recursive:
1205
+ self.writeline(
1206
+ f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node
1207
+ )
1208
+ self.indent()
1209
+ self.buffer(loop_frame)
1210
+
1211
+ # Use the same buffer for the else frame
1212
+ else_frame.buffer = loop_frame.buffer
1213
+
1214
+ # make sure the loop variable is a special one and raise a template
1215
+ # assertion error if a loop tries to write to loop
1216
+ if extended_loop:
1217
+ self.writeline(f"{loop_ref} = missing")
1218
+
1219
+ for name in node.find_all(nodes.Name):
1220
+ if name.ctx == "store" and name.name == "loop":
1221
+ self.fail(
1222
+ "Can't assign to special loop variable in for-loop target",
1223
+ name.lineno,
1224
+ )
1225
+
1226
+ if node.else_:
1227
+ iteration_indicator = self.temporary_identifier()
1228
+ self.writeline(f"{iteration_indicator} = 1")
1229
+
1230
+ self.writeline(self.choose_async("async for ", "for "), node)
1231
+ self.visit(node.target, loop_frame)
1232
+ if extended_loop:
1233
+ self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(")
1234
+ else:
1235
+ self.write(" in ")
1236
+
1237
+ if node.test:
1238
+ self.write(f"{loop_filter_func}(")
1239
+ if node.recursive:
1240
+ self.write("reciter")
1241
+ else:
1242
+ if self.environment.is_async and not extended_loop:
1243
+ self.write("auto_aiter(")
1244
+ self.visit(node.iter, frame)
1245
+ if self.environment.is_async and not extended_loop:
1246
+ self.write(")")
1247
+ if node.test:
1248
+ self.write(")")
1249
+
1250
+ if node.recursive:
1251
+ self.write(", undefined, loop_render_func, depth):")
1252
+ else:
1253
+ self.write(", undefined):" if extended_loop else ":")
1254
+
1255
+ self.indent()
1256
+ self.enter_frame(loop_frame)
1257
+
1258
+ self.writeline("_loop_vars = {}")
1259
+ self.blockvisit(node.body, loop_frame)
1260
+ if node.else_:
1261
+ self.writeline(f"{iteration_indicator} = 0")
1262
+ self.outdent()
1263
+ self.leave_frame(
1264
+ loop_frame, with_python_scope=node.recursive and not node.else_
1265
+ )
1266
+
1267
+ if node.else_:
1268
+ self.writeline(f"if {iteration_indicator}:")
1269
+ self.indent()
1270
+ self.enter_frame(else_frame)
1271
+ self.blockvisit(node.else_, else_frame)
1272
+ self.leave_frame(else_frame)
1273
+ self.outdent()
1274
+
1275
+ # if the node was recursive we have to return the buffer contents
1276
+ # and start the iteration code
1277
+ if node.recursive:
1278
+ self.return_buffer_contents(loop_frame)
1279
+ self.outdent()
1280
+ self.start_write(frame, node)
1281
+ self.write(f"{self.choose_async('await ')}loop(")
1282
+ if self.environment.is_async:
1283
+ self.write("auto_aiter(")
1284
+ self.visit(node.iter, frame)
1285
+ if self.environment.is_async:
1286
+ self.write(")")
1287
+ self.write(", loop)")
1288
+ self.end_write(frame)
1289
+
1290
+ # at the end of the iteration, clear any assignments made in the
1291
+ # loop from the top level
1292
+ if self._assign_stack:
1293
+ self._assign_stack[-1].difference_update(loop_frame.symbols.stores)
1294
+
1295
+ def visit_If(self, node: nodes.If, frame: Frame) -> None:
1296
+ if_frame = frame.soft()
1297
+ self.writeline("if ", node)
1298
+ self.visit(node.test, if_frame)
1299
+ self.write(":")
1300
+ self.indent()
1301
+ self.blockvisit(node.body, if_frame)
1302
+ self.outdent()
1303
+ for elif_ in node.elif_:
1304
+ self.writeline("elif ", elif_)
1305
+ self.visit(elif_.test, if_frame)
1306
+ self.write(":")
1307
+ self.indent()
1308
+ self.blockvisit(elif_.body, if_frame)
1309
+ self.outdent()
1310
+ if node.else_:
1311
+ self.writeline("else:")
1312
+ self.indent()
1313
+ self.blockvisit(node.else_, if_frame)
1314
+ self.outdent()
1315
+
1316
+ def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None:
1317
+ macro_frame, macro_ref = self.macro_body(node, frame)
1318
+ self.newline()
1319
+ if frame.toplevel:
1320
+ if not node.name.startswith("_"):
1321
+ self.write(f"context.exported_vars.add({node.name!r})")
1322
+ self.writeline(f"context.vars[{node.name!r}] = ")
1323
+ self.write(f"{frame.symbols.ref(node.name)} = ")
1324
+ self.macro_def(macro_ref, macro_frame)
1325
+
1326
+ def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None:
1327
+ call_frame, macro_ref = self.macro_body(node, frame)
1328
+ self.writeline("caller = ")
1329
+ self.macro_def(macro_ref, call_frame)
1330
+ self.start_write(frame, node)
1331
+ self.visit_Call(node.call, frame, forward_caller=True)
1332
+ self.end_write(frame)
1333
+
1334
+ def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None:
1335
+ filter_frame = frame.inner()
1336
+ filter_frame.symbols.analyze_node(node)
1337
+ self.enter_frame(filter_frame)
1338
+ self.buffer(filter_frame)
1339
+ self.blockvisit(node.body, filter_frame)
1340
+ self.start_write(frame, node)
1341
+ self.visit_Filter(node.filter, filter_frame)
1342
+ self.end_write(frame)
1343
+ self.leave_frame(filter_frame)
1344
+
1345
+ def visit_With(self, node: nodes.With, frame: Frame) -> None:
1346
+ with_frame = frame.inner()
1347
+ with_frame.symbols.analyze_node(node)
1348
+ self.enter_frame(with_frame)
1349
+ for target, expr in zip(node.targets, node.values):
1350
+ self.newline()
1351
+ self.visit(target, with_frame)
1352
+ self.write(" = ")
1353
+ self.visit(expr, frame)
1354
+ self.blockvisit(node.body, with_frame)
1355
+ self.leave_frame(with_frame)
1356
+
1357
+ def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None:
1358
+ self.newline(node)
1359
+ self.visit(node.node, frame)
1360
+
1361
+ class _FinalizeInfo(t.NamedTuple):
1362
+ const: t.Optional[t.Callable[..., str]]
1363
+ src: t.Optional[str]
1364
+
1365
+ @staticmethod
1366
+ def _default_finalize(value: t.Any) -> t.Any:
1367
+ """The default finalize function if the environment isn't
1368
+ configured with one. Or, if the environment has one, this is
1369
+ called on that function's output for constants.
1370
+ """
1371
+ return str(value)
1372
+
1373
+ _finalize: t.Optional[_FinalizeInfo] = None
1374
+
1375
+ def _make_finalize(self) -> _FinalizeInfo:
1376
+ """Build the finalize function to be used on constants and at
1377
+ runtime. Cached so it's only created once for all output nodes.
1378
+
1379
+ Returns a ``namedtuple`` with the following attributes:
1380
+
1381
+ ``const``
1382
+ A function to finalize constant data at compile time.
1383
+
1384
+ ``src``
1385
+ Source code to output around nodes to be evaluated at
1386
+ runtime.
1387
+ """
1388
+ if self._finalize is not None:
1389
+ return self._finalize
1390
+
1391
+ finalize: t.Optional[t.Callable[..., t.Any]]
1392
+ finalize = default = self._default_finalize
1393
+ src = None
1394
+
1395
+ if self.environment.finalize:
1396
+ src = "environment.finalize("
1397
+ env_finalize = self.environment.finalize
1398
+ pass_arg = {
1399
+ _PassArg.context: "context",
1400
+ _PassArg.eval_context: "context.eval_ctx",
1401
+ _PassArg.environment: "environment",
1402
+ }.get(
1403
+ _PassArg.from_obj(env_finalize) # type: ignore
1404
+ )
1405
+ finalize = None
1406
+
1407
+ if pass_arg is None:
1408
+
1409
+ def finalize(value: t.Any) -> t.Any: # noqa: F811
1410
+ return default(env_finalize(value))
1411
+
1412
+ else:
1413
+ src = f"{src}{pass_arg}, "
1414
+
1415
+ if pass_arg == "environment":
1416
+
1417
+ def finalize(value: t.Any) -> t.Any: # noqa: F811
1418
+ return default(env_finalize(self.environment, value))
1419
+
1420
+ self._finalize = self._FinalizeInfo(finalize, src)
1421
+ return self._finalize
1422
+
1423
+ def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
1424
+ """Given a group of constant values converted from ``Output``
1425
+ child nodes, produce a string to write to the template module
1426
+ source.
1427
+ """
1428
+ return repr(concat(group))
1429
+
1430
+ def _output_child_to_const(
1431
+ self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
1432
+ ) -> str:
1433
+ """Try to optimize a child of an ``Output`` node by trying to
1434
+ convert it to constant, finalized data at compile time.
1435
+
1436
+ If :exc:`Impossible` is raised, the node is not constant and
1437
+ will be evaluated at runtime. Any other exception will also be
1438
+ evaluated at runtime for easier debugging.
1439
+ """
1440
+ const = node.as_const(frame.eval_ctx)
1441
+
1442
+ if frame.eval_ctx.autoescape:
1443
+ const = escape(const)
1444
+
1445
+ # Template data doesn't go through finalize.
1446
+ if isinstance(node, nodes.TemplateData):
1447
+ return str(const)
1448
+
1449
+ return finalize.const(const) # type: ignore
1450
+
1451
+ def _output_child_pre(
1452
+ self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
1453
+ ) -> None:
1454
+ """Output extra source code before visiting a child of an
1455
+ ``Output`` node.
1456
+ """
1457
+ if frame.eval_ctx.volatile:
1458
+ self.write("(escape if context.eval_ctx.autoescape else str)(")
1459
+ elif frame.eval_ctx.autoescape:
1460
+ self.write("escape(")
1461
+ else:
1462
+ self.write("str(")
1463
+
1464
+ if finalize.src is not None:
1465
+ self.write(finalize.src)
1466
+
1467
+ def _output_child_post(
1468
+ self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
1469
+ ) -> None:
1470
+ """Output extra source code after visiting a child of an
1471
+ ``Output`` node.
1472
+ """
1473
+ self.write(")")
1474
+
1475
+ if finalize.src is not None:
1476
+ self.write(")")
1477
+
1478
+ def visit_Output(self, node: nodes.Output, frame: Frame) -> None:
1479
+ # If an extends is active, don't render outside a block.
1480
+ if frame.require_output_check:
1481
+ # A top-level extends is known to exist at compile time.
1482
+ if self.has_known_extends:
1483
+ return
1484
+
1485
+ self.writeline("if parent_template is None:")
1486
+ self.indent()
1487
+
1488
+ finalize = self._make_finalize()
1489
+ body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = []
1490
+
1491
+ # Evaluate constants at compile time if possible. Each item in
1492
+ # body will be either a list of static data or a node to be
1493
+ # evaluated at runtime.
1494
+ for child in node.nodes:
1495
+ try:
1496
+ if not (
1497
+ # If the finalize function requires runtime context,
1498
+ # constants can't be evaluated at compile time.
1499
+ finalize.const
1500
+ # Unless it's basic template data that won't be
1501
+ # finalized anyway.
1502
+ or isinstance(child, nodes.TemplateData)
1503
+ ):
1504
+ raise nodes.Impossible()
1505
+
1506
+ const = self._output_child_to_const(child, frame, finalize)
1507
+ except (nodes.Impossible, Exception):
1508
+ # The node was not constant and needs to be evaluated at
1509
+ # runtime. Or another error was raised, which is easier
1510
+ # to debug at runtime.
1511
+ body.append(child)
1512
+ continue
1513
+
1514
+ if body and isinstance(body[-1], list):
1515
+ body[-1].append(const)
1516
+ else:
1517
+ body.append([const])
1518
+
1519
+ if frame.buffer is not None:
1520
+ if len(body) == 1:
1521
+ self.writeline(f"{frame.buffer}.append(")
1522
+ else:
1523
+ self.writeline(f"{frame.buffer}.extend((")
1524
+
1525
+ self.indent()
1526
+
1527
+ for item in body:
1528
+ if isinstance(item, list):
1529
+ # A group of constant data to join and output.
1530
+ val = self._output_const_repr(item)
1531
+
1532
+ if frame.buffer is None:
1533
+ self.writeline("yield " + val)
1534
+ else:
1535
+ self.writeline(val + ",")
1536
+ else:
1537
+ if frame.buffer is None:
1538
+ self.writeline("yield ", item)
1539
+ else:
1540
+ self.newline(item)
1541
+
1542
+ # A node to be evaluated at runtime.
1543
+ self._output_child_pre(item, frame, finalize)
1544
+ self.visit(item, frame)
1545
+ self._output_child_post(item, frame, finalize)
1546
+
1547
+ if frame.buffer is not None:
1548
+ self.write(",")
1549
+
1550
+ if frame.buffer is not None:
1551
+ self.outdent()
1552
+ self.writeline(")" if len(body) == 1 else "))")
1553
+
1554
+ if frame.require_output_check:
1555
+ self.outdent()
1556
+
1557
+ def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None:
1558
+ self.push_assign_tracking()
1559
+ self.newline(node)
1560
+ self.visit(node.target, frame)
1561
+ self.write(" = ")
1562
+ self.visit(node.node, frame)
1563
+ self.pop_assign_tracking(frame)
1564
+
1565
+ def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None:
1566
+ self.push_assign_tracking()
1567
+ block_frame = frame.inner()
1568
+ # This is a special case. Since a set block always captures we
1569
+ # will disable output checks. This way one can use set blocks
1570
+ # toplevel even in extended templates.
1571
+ block_frame.require_output_check = False
1572
+ block_frame.symbols.analyze_node(node)
1573
+ self.enter_frame(block_frame)
1574
+ self.buffer(block_frame)
1575
+ self.blockvisit(node.body, block_frame)
1576
+ self.newline(node)
1577
+ self.visit(node.target, frame)
1578
+ self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
1579
+ if node.filter is not None:
1580
+ self.visit_Filter(node.filter, block_frame)
1581
+ else:
1582
+ self.write(f"concat({block_frame.buffer})")
1583
+ self.write(")")
1584
+ self.pop_assign_tracking(frame)
1585
+ self.leave_frame(block_frame)
1586
+
1587
+ # -- Expression Visitors
1588
+
1589
+ def visit_Name(self, node: nodes.Name, frame: Frame) -> None:
1590
+ if node.ctx == "store" and (
1591
+ frame.toplevel or frame.loop_frame or frame.block_frame
1592
+ ):
1593
+ if self._assign_stack:
1594
+ self._assign_stack[-1].add(node.name)
1595
+ ref = frame.symbols.ref(node.name)
1596
+
1597
+ # If we are looking up a variable we might have to deal with the
1598
+ # case where it's undefined. We can skip that case if the load
1599
+ # instruction indicates a parameter which are always defined.
1600
+ if node.ctx == "load":
1601
+ load = frame.symbols.find_load(ref)
1602
+ if not (
1603
+ load is not None
1604
+ and load[0] == VAR_LOAD_PARAMETER
1605
+ and not self.parameter_is_undeclared(ref)
1606
+ ):
1607
+ self.write(
1608
+ f"(undefined(name={node.name!r}) if {ref} is missing else {ref})"
1609
+ )
1610
+ return
1611
+
1612
+ self.write(ref)
1613
+
1614
+ def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None:
1615
+ # NSRefs can only be used to store values; since they use the normal
1616
+ # `foo.bar` notation they will be parsed as a normal attribute access
1617
+ # when used anywhere but in a `set` context
1618
+ ref = frame.symbols.ref(node.name)
1619
+ self.writeline(f"if not isinstance({ref}, Namespace):")
1620
+ self.indent()
1621
+ self.writeline(
1622
+ "raise TemplateRuntimeError"
1623
+ '("cannot assign attribute on non-namespace object")'
1624
+ )
1625
+ self.outdent()
1626
+ self.writeline(f"{ref}[{node.attr!r}]")
1627
+
1628
+ def visit_Const(self, node: nodes.Const, frame: Frame) -> None:
1629
+ val = node.as_const(frame.eval_ctx)
1630
+ if isinstance(val, float):
1631
+ self.write(str(val))
1632
+ else:
1633
+ self.write(repr(val))
1634
+
1635
+ def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None:
1636
+ try:
1637
+ self.write(repr(node.as_const(frame.eval_ctx)))
1638
+ except nodes.Impossible:
1639
+ self.write(
1640
+ f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})"
1641
+ )
1642
+
1643
+ def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None:
1644
+ self.write("(")
1645
+ idx = -1
1646
+ for idx, item in enumerate(node.items):
1647
+ if idx:
1648
+ self.write(", ")
1649
+ self.visit(item, frame)
1650
+ self.write(",)" if idx == 0 else ")")
1651
+
1652
+ def visit_List(self, node: nodes.List, frame: Frame) -> None:
1653
+ self.write("[")
1654
+ for idx, item in enumerate(node.items):
1655
+ if idx:
1656
+ self.write(", ")
1657
+ self.visit(item, frame)
1658
+ self.write("]")
1659
+
1660
+ def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None:
1661
+ self.write("{")
1662
+ for idx, item in enumerate(node.items):
1663
+ if idx:
1664
+ self.write(", ")
1665
+ self.visit(item.key, frame)
1666
+ self.write(": ")
1667
+ self.visit(item.value, frame)
1668
+ self.write("}")
1669
+
1670
+ visit_Add = _make_binop("+")
1671
+ visit_Sub = _make_binop("-")
1672
+ visit_Mul = _make_binop("*")
1673
+ visit_Div = _make_binop("/")
1674
+ visit_FloorDiv = _make_binop("//")
1675
+ visit_Pow = _make_binop("**")
1676
+ visit_Mod = _make_binop("%")
1677
+ visit_And = _make_binop("and")
1678
+ visit_Or = _make_binop("or")
1679
+ visit_Pos = _make_unop("+")
1680
+ visit_Neg = _make_unop("-")
1681
+ visit_Not = _make_unop("not ")
1682
+
1683
+ @optimizeconst
1684
+ def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None:
1685
+ if frame.eval_ctx.volatile:
1686
+ func_name = "(markup_join if context.eval_ctx.volatile else str_join)"
1687
+ elif frame.eval_ctx.autoescape:
1688
+ func_name = "markup_join"
1689
+ else:
1690
+ func_name = "str_join"
1691
+ self.write(f"{func_name}((")
1692
+ for arg in node.nodes:
1693
+ self.visit(arg, frame)
1694
+ self.write(", ")
1695
+ self.write("))")
1696
+
1697
+ @optimizeconst
1698
+ def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None:
1699
+ self.write("(")
1700
+ self.visit(node.expr, frame)
1701
+ for op in node.ops:
1702
+ self.visit(op, frame)
1703
+ self.write(")")
1704
+
1705
+ def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None:
1706
+ self.write(f" {operators[node.op]} ")
1707
+ self.visit(node.expr, frame)
1708
+
1709
+ @optimizeconst
1710
+ def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None:
1711
+ if self.environment.is_async:
1712
+ self.write("(await auto_await(")
1713
+
1714
+ self.write("environment.getattr(")
1715
+ self.visit(node.node, frame)
1716
+ self.write(f", {node.attr!r})")
1717
+
1718
+ if self.environment.is_async:
1719
+ self.write("))")
1720
+
1721
+ @optimizeconst
1722
+ def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None:
1723
+ # slices bypass the environment getitem method.
1724
+ if isinstance(node.arg, nodes.Slice):
1725
+ self.visit(node.node, frame)
1726
+ self.write("[")
1727
+ self.visit(node.arg, frame)
1728
+ self.write("]")
1729
+ else:
1730
+ if self.environment.is_async:
1731
+ self.write("(await auto_await(")
1732
+
1733
+ self.write("environment.getitem(")
1734
+ self.visit(node.node, frame)
1735
+ self.write(", ")
1736
+ self.visit(node.arg, frame)
1737
+ self.write(")")
1738
+
1739
+ if self.environment.is_async:
1740
+ self.write("))")
1741
+
1742
+ def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None:
1743
+ if node.start is not None:
1744
+ self.visit(node.start, frame)
1745
+ self.write(":")
1746
+ if node.stop is not None:
1747
+ self.visit(node.stop, frame)
1748
+ if node.step is not None:
1749
+ self.write(":")
1750
+ self.visit(node.step, frame)
1751
+
1752
+ @contextmanager
1753
+ def _filter_test_common(
1754
+ self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool
1755
+ ) -> t.Iterator[None]:
1756
+ if self.environment.is_async:
1757
+ self.write("(await auto_await(")
1758
+
1759
+ if is_filter:
1760
+ self.write(f"{self.filters[node.name]}(")
1761
+ func = self.environment.filters.get(node.name)
1762
+ else:
1763
+ self.write(f"{self.tests[node.name]}(")
1764
+ func = self.environment.tests.get(node.name)
1765
+
1766
+ # When inside an If or CondExpr frame, allow the filter to be
1767
+ # undefined at compile time and only raise an error if it's
1768
+ # actually called at runtime. See pull_dependencies.
1769
+ if func is None and not frame.soft_frame:
1770
+ type_name = "filter" if is_filter else "test"
1771
+ self.fail(f"No {type_name} named {node.name!r}.", node.lineno)
1772
+
1773
+ pass_arg = {
1774
+ _PassArg.context: "context",
1775
+ _PassArg.eval_context: "context.eval_ctx",
1776
+ _PassArg.environment: "environment",
1777
+ }.get(
1778
+ _PassArg.from_obj(func) # type: ignore
1779
+ )
1780
+
1781
+ if pass_arg is not None:
1782
+ self.write(f"{pass_arg}, ")
1783
+
1784
+ # Back to the visitor function to handle visiting the target of
1785
+ # the filter or test.
1786
+ yield
1787
+
1788
+ self.signature(node, frame)
1789
+ self.write(")")
1790
+
1791
+ if self.environment.is_async:
1792
+ self.write("))")
1793
+
1794
+ @optimizeconst
1795
+ def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None:
1796
+ with self._filter_test_common(node, frame, True):
1797
+ # if the filter node is None we are inside a filter block
1798
+ # and want to write to the current buffer
1799
+ if node.node is not None:
1800
+ self.visit(node.node, frame)
1801
+ elif frame.eval_ctx.volatile:
1802
+ self.write(
1803
+ f"(Markup(concat({frame.buffer}))"
1804
+ f" if context.eval_ctx.autoescape else concat({frame.buffer}))"
1805
+ )
1806
+ elif frame.eval_ctx.autoescape:
1807
+ self.write(f"Markup(concat({frame.buffer}))")
1808
+ else:
1809
+ self.write(f"concat({frame.buffer})")
1810
+
1811
+ @optimizeconst
1812
+ def visit_Test(self, node: nodes.Test, frame: Frame) -> None:
1813
+ with self._filter_test_common(node, frame, False):
1814
+ self.visit(node.node, frame)
1815
+
1816
+ @optimizeconst
1817
+ def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None:
1818
+ frame = frame.soft()
1819
+
1820
+ def write_expr2() -> None:
1821
+ if node.expr2 is not None:
1822
+ self.visit(node.expr2, frame)
1823
+ return
1824
+
1825
+ self.write(
1826
+ f'cond_expr_undefined("the inline if-expression on'
1827
+ f" {self.position(node)} evaluated to false and no else"
1828
+ f' section was defined.")'
1829
+ )
1830
+
1831
+ self.write("(")
1832
+ self.visit(node.expr1, frame)
1833
+ self.write(" if ")
1834
+ self.visit(node.test, frame)
1835
+ self.write(" else ")
1836
+ write_expr2()
1837
+ self.write(")")
1838
+
1839
+ @optimizeconst
1840
+ def visit_Call(
1841
+ self, node: nodes.Call, frame: Frame, forward_caller: bool = False
1842
+ ) -> None:
1843
+ if self.environment.is_async:
1844
+ self.write("(await auto_await(")
1845
+ if self.environment.sandboxed:
1846
+ self.write("environment.call(context, ")
1847
+ else:
1848
+ self.write("context.call(")
1849
+ self.visit(node.node, frame)
1850
+ extra_kwargs = {"caller": "caller"} if forward_caller else None
1851
+ loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {}
1852
+ block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {}
1853
+ if extra_kwargs:
1854
+ extra_kwargs.update(loop_kwargs, **block_kwargs)
1855
+ elif loop_kwargs or block_kwargs:
1856
+ extra_kwargs = dict(loop_kwargs, **block_kwargs)
1857
+ self.signature(node, frame, extra_kwargs)
1858
+ self.write(")")
1859
+ if self.environment.is_async:
1860
+ self.write("))")
1861
+
1862
+ def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None:
1863
+ self.write(node.key + "=")
1864
+ self.visit(node.value, frame)
1865
+
1866
+ # -- Unused nodes for extensions
1867
+
1868
+ def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None:
1869
+ self.write("Markup(")
1870
+ self.visit(node.expr, frame)
1871
+ self.write(")")
1872
+
1873
+ def visit_MarkSafeIfAutoescape(
1874
+ self, node: nodes.MarkSafeIfAutoescape, frame: Frame
1875
+ ) -> None:
1876
+ self.write("(Markup if context.eval_ctx.autoescape else identity)(")
1877
+ self.visit(node.expr, frame)
1878
+ self.write(")")
1879
+
1880
+ def visit_EnvironmentAttribute(
1881
+ self, node: nodes.EnvironmentAttribute, frame: Frame
1882
+ ) -> None:
1883
+ self.write("environment." + node.name)
1884
+
1885
+ def visit_ExtensionAttribute(
1886
+ self, node: nodes.ExtensionAttribute, frame: Frame
1887
+ ) -> None:
1888
+ self.write(f"environment.extensions[{node.identifier!r}].{node.name}")
1889
+
1890
+ def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None:
1891
+ self.write(self.import_aliases[node.importname])
1892
+
1893
+ def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None:
1894
+ self.write(node.name)
1895
+
1896
+ def visit_ContextReference(
1897
+ self, node: nodes.ContextReference, frame: Frame
1898
+ ) -> None:
1899
+ self.write("context")
1900
+
1901
+ def visit_DerivedContextReference(
1902
+ self, node: nodes.DerivedContextReference, frame: Frame
1903
+ ) -> None:
1904
+ self.write(self.derive_context(frame))
1905
+
1906
+ def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None:
1907
+ self.writeline("continue", node)
1908
+
1909
+ def visit_Break(self, node: nodes.Break, frame: Frame) -> None:
1910
+ self.writeline("break", node)
1911
+
1912
+ def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None:
1913
+ scope_frame = frame.inner()
1914
+ scope_frame.symbols.analyze_node(node)
1915
+ self.enter_frame(scope_frame)
1916
+ self.blockvisit(node.body, scope_frame)
1917
+ self.leave_frame(scope_frame)
1918
+
1919
+ def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None:
1920
+ ctx = self.temporary_identifier()
1921
+ self.writeline(f"{ctx} = {self.derive_context(frame)}")
1922
+ self.writeline(f"{ctx}.vars = ")
1923
+ self.visit(node.context, frame)
1924
+ self.push_context_reference(ctx)
1925
+
1926
+ scope_frame = frame.inner(isolated=True)
1927
+ scope_frame.symbols.analyze_node(node)
1928
+ self.enter_frame(scope_frame)
1929
+ self.blockvisit(node.body, scope_frame)
1930
+ self.leave_frame(scope_frame)
1931
+ self.pop_context_reference()
1932
+
1933
+ def visit_EvalContextModifier(
1934
+ self, node: nodes.EvalContextModifier, frame: Frame
1935
+ ) -> None:
1936
+ for keyword in node.options:
1937
+ self.writeline(f"context.eval_ctx.{keyword.key} = ")
1938
+ self.visit(keyword.value, frame)
1939
+ try:
1940
+ val = keyword.value.as_const(frame.eval_ctx)
1941
+ except nodes.Impossible:
1942
+ frame.eval_ctx.volatile = True
1943
+ else:
1944
+ setattr(frame.eval_ctx, keyword.key, val)
1945
+
1946
+ def visit_ScopedEvalContextModifier(
1947
+ self, node: nodes.ScopedEvalContextModifier, frame: Frame
1948
+ ) -> None:
1949
+ old_ctx_name = self.temporary_identifier()
1950
+ saved_ctx = frame.eval_ctx.save()
1951
+ self.writeline(f"{old_ctx_name} = context.eval_ctx.save()")
1952
+ self.visit_EvalContextModifier(node, frame)
1953
+ for child in node.body:
1954
+ self.visit(child, frame)
1955
+ frame.eval_ctx.revert(saved_ctx)
1956
+ self.writeline(f"context.eval_ctx.revert({old_ctx_name})")
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/debug.py ADDED
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import typing as t
3
+ from types import CodeType
4
+ from types import TracebackType
5
+
6
+ from .exceptions import TemplateSyntaxError
7
+ from .utils import internal_code
8
+ from .utils import missing
9
+
10
+ if t.TYPE_CHECKING:
11
+ from .runtime import Context
12
+
13
+
14
+ def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException:
15
+ """Rewrite the current exception to replace any tracebacks from
16
+ within compiled template code with tracebacks that look like they
17
+ came from the template source.
18
+
19
+ This must be called within an ``except`` block.
20
+
21
+ :param source: For ``TemplateSyntaxError``, the original source if
22
+ known.
23
+ :return: The original exception with the rewritten traceback.
24
+ """
25
+ _, exc_value, tb = sys.exc_info()
26
+ exc_value = t.cast(BaseException, exc_value)
27
+ tb = t.cast(TracebackType, tb)
28
+
29
+ if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
30
+ exc_value.translated = True
31
+ exc_value.source = source
32
+ # Remove the old traceback, otherwise the frames from the
33
+ # compiler still show up.
34
+ exc_value.with_traceback(None)
35
+ # Outside of runtime, so the frame isn't executing template
36
+ # code, but it still needs to point at the template.
37
+ tb = fake_traceback(
38
+ exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
39
+ )
40
+ else:
41
+ # Skip the frame for the render function.
42
+ tb = tb.tb_next
43
+
44
+ stack = []
45
+
46
+ # Build the stack of traceback object, replacing any in template
47
+ # code with the source file and line information.
48
+ while tb is not None:
49
+ # Skip frames decorated with @internalcode. These are internal
50
+ # calls that aren't useful in template debugging output.
51
+ if tb.tb_frame.f_code in internal_code:
52
+ tb = tb.tb_next
53
+ continue
54
+
55
+ template = tb.tb_frame.f_globals.get("__jinja_template__")
56
+
57
+ if template is not None:
58
+ lineno = template.get_corresponding_lineno(tb.tb_lineno)
59
+ fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
60
+ stack.append(fake_tb)
61
+ else:
62
+ stack.append(tb)
63
+
64
+ tb = tb.tb_next
65
+
66
+ tb_next = None
67
+
68
+ # Assign tb_next in reverse to avoid circular references.
69
+ for tb in reversed(stack):
70
+ tb.tb_next = tb_next
71
+ tb_next = tb
72
+
73
+ return exc_value.with_traceback(tb_next)
74
+
75
+
76
+ def fake_traceback( # type: ignore
77
+ exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int
78
+ ) -> TracebackType:
79
+ """Produce a new traceback object that looks like it came from the
80
+ template source instead of the compiled code. The filename, line
81
+ number, and location name will point to the template, and the local
82
+ variables will be the current template context.
83
+
84
+ :param exc_value: The original exception to be re-raised to create
85
+ the new traceback.
86
+ :param tb: The original traceback to get the local variables and
87
+ code info from.
88
+ :param filename: The template filename.
89
+ :param lineno: The line number in the template source.
90
+ """
91
+ if tb is not None:
92
+ # Replace the real locals with the context that would be
93
+ # available at that point in the template.
94
+ locals = get_template_locals(tb.tb_frame.f_locals)
95
+ locals.pop("__jinja_exception__", None)
96
+ else:
97
+ locals = {}
98
+
99
+ globals = {
100
+ "__name__": filename,
101
+ "__file__": filename,
102
+ "__jinja_exception__": exc_value,
103
+ }
104
+ # Raise an exception at the correct line number.
105
+ code: CodeType = compile(
106
+ "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec"
107
+ )
108
+
109
+ # Build a new code object that points to the template file and
110
+ # replaces the location with a block name.
111
+ location = "template"
112
+
113
+ if tb is not None:
114
+ function = tb.tb_frame.f_code.co_name
115
+
116
+ if function == "root":
117
+ location = "top-level template code"
118
+ elif function.startswith("block_"):
119
+ location = f"block {function[6:]!r}"
120
+
121
+ if sys.version_info >= (3, 8):
122
+ code = code.replace(co_name=location)
123
+ else:
124
+ code = CodeType(
125
+ code.co_argcount,
126
+ code.co_kwonlyargcount,
127
+ code.co_nlocals,
128
+ code.co_stacksize,
129
+ code.co_flags,
130
+ code.co_code,
131
+ code.co_consts,
132
+ code.co_names,
133
+ code.co_varnames,
134
+ code.co_filename,
135
+ location,
136
+ code.co_firstlineno,
137
+ code.co_lnotab,
138
+ code.co_freevars,
139
+ code.co_cellvars,
140
+ )
141
+
142
+ # Execute the new code, which is guaranteed to raise, and return
143
+ # the new traceback without this frame.
144
+ try:
145
+ exec(code, globals, locals)
146
+ except BaseException:
147
+ return sys.exc_info()[2].tb_next # type: ignore
148
+
149
+
150
+ def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]:
151
+ """Based on the runtime locals, get the context that would be
152
+ available at that point in the template.
153
+ """
154
+ # Start with the current template context.
155
+ ctx: "t.Optional[Context]" = real_locals.get("context")
156
+
157
+ if ctx is not None:
158
+ data: t.Dict[str, t.Any] = ctx.get_all().copy()
159
+ else:
160
+ data = {}
161
+
162
+ # Might be in a derived context that only sets local variables
163
+ # rather than pushing a context. Local variables follow the scheme
164
+ # l_depth_name. Find the highest-depth local that has a value for
165
+ # each name.
166
+ local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {}
167
+
168
+ for name, value in real_locals.items():
169
+ if not name.startswith("l_") or value is missing:
170
+ # Not a template variable, or no longer relevant.
171
+ continue
172
+
173
+ try:
174
+ _, depth_str, name = name.split("_", 2)
175
+ depth = int(depth_str)
176
+ except ValueError:
177
+ continue
178
+
179
+ cur_depth = local_overrides.get(name, (-1,))[0]
180
+
181
+ if cur_depth < depth:
182
+ local_overrides[name] = (depth, value)
183
+
184
+ # Modify the context with any derived context.
185
+ for name, (_, value) in local_overrides.items():
186
+ if value is missing:
187
+ data.pop(name, None)
188
+ else:
189
+ data[name] = value
190
+
191
+ return data
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/defaults.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import typing as t
2
+
3
+ from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
4
+ from .tests import TESTS as DEFAULT_TESTS # noqa: F401
5
+ from .utils import Cycler
6
+ from .utils import generate_lorem_ipsum
7
+ from .utils import Joiner
8
+ from .utils import Namespace
9
+
10
+ if t.TYPE_CHECKING:
11
+ import typing_extensions as te
12
+
13
+ # defaults for the parser / lexer
14
+ BLOCK_START_STRING = "{%"
15
+ BLOCK_END_STRING = "%}"
16
+ VARIABLE_START_STRING = "{{"
17
+ VARIABLE_END_STRING = "}}"
18
+ COMMENT_START_STRING = "{#"
19
+ COMMENT_END_STRING = "#}"
20
+ LINE_STATEMENT_PREFIX: t.Optional[str] = None
21
+ LINE_COMMENT_PREFIX: t.Optional[str] = None
22
+ TRIM_BLOCKS = False
23
+ LSTRIP_BLOCKS = False
24
+ NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n"
25
+ KEEP_TRAILING_NEWLINE = False
26
+
27
+ # default filters, tests and namespace
28
+
29
+ DEFAULT_NAMESPACE = {
30
+ "range": range,
31
+ "dict": dict,
32
+ "lipsum": generate_lorem_ipsum,
33
+ "cycler": Cycler,
34
+ "joiner": Joiner,
35
+ "namespace": Namespace,
36
+ }
37
+
38
+ # default policies
39
+ DEFAULT_POLICIES: t.Dict[str, t.Any] = {
40
+ "compiler.ascii_str": True,
41
+ "urlize.rel": "noopener",
42
+ "urlize.target": None,
43
+ "urlize.extra_schemes": None,
44
+ "truncate.leeway": 5,
45
+ "json.dumps_function": None,
46
+ "json.dumps_kwargs": {"sort_keys": True},
47
+ "ext.i18n.trimmed": False,
48
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/exceptions.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import typing as t
2
+
3
+ if t.TYPE_CHECKING:
4
+ from .runtime import Undefined
5
+
6
+
7
+ class TemplateError(Exception):
8
+ """Baseclass for all template errors."""
9
+
10
+ def __init__(self, message: t.Optional[str] = None) -> None:
11
+ super().__init__(message)
12
+
13
+ @property
14
+ def message(self) -> t.Optional[str]:
15
+ return self.args[0] if self.args else None
16
+
17
+
18
+ class TemplateNotFound(IOError, LookupError, TemplateError):
19
+ """Raised if a template does not exist.
20
+
21
+ .. versionchanged:: 2.11
22
+ If the given name is :class:`Undefined` and no message was
23
+ provided, an :exc:`UndefinedError` is raised.
24
+ """
25
+
26
+ # Silence the Python warning about message being deprecated since
27
+ # it's not valid here.
28
+ message: t.Optional[str] = None
29
+
30
+ def __init__(
31
+ self,
32
+ name: t.Optional[t.Union[str, "Undefined"]],
33
+ message: t.Optional[str] = None,
34
+ ) -> None:
35
+ IOError.__init__(self, name)
36
+
37
+ if message is None:
38
+ from .runtime import Undefined
39
+
40
+ if isinstance(name, Undefined):
41
+ name._fail_with_undefined_error()
42
+
43
+ message = name
44
+
45
+ self.message = message
46
+ self.name = name
47
+ self.templates = [name]
48
+
49
+ def __str__(self) -> str:
50
+ return str(self.message)
51
+
52
+
53
+ class TemplatesNotFound(TemplateNotFound):
54
+ """Like :class:`TemplateNotFound` but raised if multiple templates
55
+ are selected. This is a subclass of :class:`TemplateNotFound`
56
+ exception, so just catching the base exception will catch both.
57
+
58
+ .. versionchanged:: 2.11
59
+ If a name in the list of names is :class:`Undefined`, a message
60
+ about it being undefined is shown rather than the empty string.
61
+
62
+ .. versionadded:: 2.2
63
+ """
64
+
65
+ def __init__(
66
+ self,
67
+ names: t.Sequence[t.Union[str, "Undefined"]] = (),
68
+ message: t.Optional[str] = None,
69
+ ) -> None:
70
+ if message is None:
71
+ from .runtime import Undefined
72
+
73
+ parts = []
74
+
75
+ for name in names:
76
+ if isinstance(name, Undefined):
77
+ parts.append(name._undefined_message)
78
+ else:
79
+ parts.append(name)
80
+
81
+ parts_str = ", ".join(map(str, parts))
82
+ message = f"none of the templates given were found: {parts_str}"
83
+
84
+ super().__init__(names[-1] if names else None, message)
85
+ self.templates = list(names)
86
+
87
+
88
+ class TemplateSyntaxError(TemplateError):
89
+ """Raised to tell the user that there is a problem with the template."""
90
+
91
+ def __init__(
92
+ self,
93
+ message: str,
94
+ lineno: int,
95
+ name: t.Optional[str] = None,
96
+ filename: t.Optional[str] = None,
97
+ ) -> None:
98
+ super().__init__(message)
99
+ self.lineno = lineno
100
+ self.name = name
101
+ self.filename = filename
102
+ self.source: t.Optional[str] = None
103
+
104
+ # this is set to True if the debug.translate_syntax_error
105
+ # function translated the syntax error into a new traceback
106
+ self.translated = False
107
+
108
+ def __str__(self) -> str:
109
+ # for translated errors we only return the message
110
+ if self.translated:
111
+ return t.cast(str, self.message)
112
+
113
+ # otherwise attach some stuff
114
+ location = f"line {self.lineno}"
115
+ name = self.filename or self.name
116
+ if name:
117
+ location = f'File "{name}", {location}'
118
+ lines = [t.cast(str, self.message), " " + location]
119
+
120
+ # if the source is set, add the line to the output
121
+ if self.source is not None:
122
+ try:
123
+ line = self.source.splitlines()[self.lineno - 1]
124
+ except IndexError:
125
+ pass
126
+ else:
127
+ lines.append(" " + line.strip())
128
+
129
+ return "\n".join(lines)
130
+
131
+ def __reduce__(self): # type: ignore
132
+ # https://bugs.python.org/issue1692335 Exceptions that take
133
+ # multiple required arguments have problems with pickling.
134
+ # Without this, raises TypeError: __init__() missing 1 required
135
+ # positional argument: 'lineno'
136
+ return self.__class__, (self.message, self.lineno, self.name, self.filename)
137
+
138
+
139
+ class TemplateAssertionError(TemplateSyntaxError):
140
+ """Like a template syntax error, but covers cases where something in the
141
+ template caused an error at compile time that wasn't necessarily caused
142
+ by a syntax error. However it's a direct subclass of
143
+ :exc:`TemplateSyntaxError` and has the same attributes.
144
+ """
145
+
146
+
147
+ class TemplateRuntimeError(TemplateError):
148
+ """A generic runtime error in the template engine. Under some situations
149
+ Jinja may raise this exception.
150
+ """
151
+
152
+
153
+ class UndefinedError(TemplateRuntimeError):
154
+ """Raised if a template tries to operate on :class:`Undefined`."""
155
+
156
+
157
+ class SecurityError(TemplateRuntimeError):
158
+ """Raised if a template tries to do something insecure if the
159
+ sandbox is enabled.
160
+ """
161
+
162
+
163
+ class FilterArgumentError(TemplateRuntimeError):
164
+ """This error is raised if a filter was called with inappropriate
165
+ arguments
166
+ """
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/filters.py ADDED
@@ -0,0 +1,1854 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Built-in template filters used with the ``|`` operator."""
2
+ import math
3
+ import random
4
+ import re
5
+ import typing
6
+ import typing as t
7
+ from collections import abc
8
+ from itertools import chain
9
+ from itertools import groupby
10
+
11
+ from markupsafe import escape
12
+ from markupsafe import Markup
13
+ from markupsafe import soft_str
14
+
15
+ from .async_utils import async_variant
16
+ from .async_utils import auto_aiter
17
+ from .async_utils import auto_await
18
+ from .async_utils import auto_to_list
19
+ from .exceptions import FilterArgumentError
20
+ from .runtime import Undefined
21
+ from .utils import htmlsafe_json_dumps
22
+ from .utils import pass_context
23
+ from .utils import pass_environment
24
+ from .utils import pass_eval_context
25
+ from .utils import pformat
26
+ from .utils import url_quote
27
+ from .utils import urlize
28
+
29
+ if t.TYPE_CHECKING:
30
+ import typing_extensions as te
31
+ from .environment import Environment
32
+ from .nodes import EvalContext
33
+ from .runtime import Context
34
+ from .sandbox import SandboxedEnvironment # noqa: F401
35
+
36
+ class HasHTML(te.Protocol):
37
+ def __html__(self) -> str:
38
+ pass
39
+
40
+
41
+ F = t.TypeVar("F", bound=t.Callable[..., t.Any])
42
+ K = t.TypeVar("K")
43
+ V = t.TypeVar("V")
44
+
45
+
46
+ def ignore_case(value: V) -> V:
47
+ """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
48
+ to lowercase and returns other types as-is."""
49
+ if isinstance(value, str):
50
+ return t.cast(V, value.lower())
51
+
52
+ return value
53
+
54
+
55
+ def make_attrgetter(
56
+ environment: "Environment",
57
+ attribute: t.Optional[t.Union[str, int]],
58
+ postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
59
+ default: t.Optional[t.Any] = None,
60
+ ) -> t.Callable[[t.Any], t.Any]:
61
+ """Returns a callable that looks up the given attribute from a
62
+ passed object with the rules of the environment. Dots are allowed
63
+ to access attributes of attributes. Integer parts in paths are
64
+ looked up as integers.
65
+ """
66
+ parts = _prepare_attribute_parts(attribute)
67
+
68
+ def attrgetter(item: t.Any) -> t.Any:
69
+ for part in parts:
70
+ item = environment.getitem(item, part)
71
+
72
+ if default is not None and isinstance(item, Undefined):
73
+ item = default
74
+
75
+ if postprocess is not None:
76
+ item = postprocess(item)
77
+
78
+ return item
79
+
80
+ return attrgetter
81
+
82
+
83
+ def make_multi_attrgetter(
84
+ environment: "Environment",
85
+ attribute: t.Optional[t.Union[str, int]],
86
+ postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
87
+ ) -> t.Callable[[t.Any], t.List[t.Any]]:
88
+ """Returns a callable that looks up the given comma separated
89
+ attributes from a passed object with the rules of the environment.
90
+ Dots are allowed to access attributes of each attribute. Integer
91
+ parts in paths are looked up as integers.
92
+
93
+ The value returned by the returned callable is a list of extracted
94
+ attribute values.
95
+
96
+ Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
97
+ """
98
+ if isinstance(attribute, str):
99
+ split: t.Sequence[t.Union[str, int, None]] = attribute.split(",")
100
+ else:
101
+ split = [attribute]
102
+
103
+ parts = [_prepare_attribute_parts(item) for item in split]
104
+
105
+ def attrgetter(item: t.Any) -> t.List[t.Any]:
106
+ items = [None] * len(parts)
107
+
108
+ for i, attribute_part in enumerate(parts):
109
+ item_i = item
110
+
111
+ for part in attribute_part:
112
+ item_i = environment.getitem(item_i, part)
113
+
114
+ if postprocess is not None:
115
+ item_i = postprocess(item_i)
116
+
117
+ items[i] = item_i
118
+
119
+ return items
120
+
121
+ return attrgetter
122
+
123
+
124
+ def _prepare_attribute_parts(
125
+ attr: t.Optional[t.Union[str, int]]
126
+ ) -> t.List[t.Union[str, int]]:
127
+ if attr is None:
128
+ return []
129
+
130
+ if isinstance(attr, str):
131
+ return [int(x) if x.isdigit() else x for x in attr.split(".")]
132
+
133
+ return [attr]
134
+
135
+
136
+ def do_forceescape(value: "t.Union[str, HasHTML]") -> Markup:
137
+ """Enforce HTML escaping. This will probably double escape variables."""
138
+ if hasattr(value, "__html__"):
139
+ value = t.cast("HasHTML", value).__html__()
140
+
141
+ return escape(str(value))
142
+
143
+
144
+ def do_urlencode(
145
+ value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.Tuple[str, t.Any]]]
146
+ ) -> str:
147
+ """Quote data for use in a URL path or query using UTF-8.
148
+
149
+ Basic wrapper around :func:`urllib.parse.quote` when given a
150
+ string, or :func:`urllib.parse.urlencode` for a dict or iterable.
151
+
152
+ :param value: Data to quote. A string will be quoted directly. A
153
+ dict or iterable of ``(key, value)`` pairs will be joined as a
154
+ query string.
155
+
156
+ When given a string, "/" is not quoted. HTTP servers treat "/" and
157
+ "%2F" equivalently in paths. If you need quoted slashes, use the
158
+ ``|replace("/", "%2F")`` filter.
159
+
160
+ .. versionadded:: 2.7
161
+ """
162
+ if isinstance(value, str) or not isinstance(value, abc.Iterable):
163
+ return url_quote(value)
164
+
165
+ if isinstance(value, dict):
166
+ items: t.Iterable[t.Tuple[str, t.Any]] = value.items()
167
+ else:
168
+ items = value # type: ignore
169
+
170
+ return "&".join(
171
+ f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items
172
+ )
173
+
174
+
175
+ @pass_eval_context
176
+ def do_replace(
177
+ eval_ctx: "EvalContext", s: str, old: str, new: str, count: t.Optional[int] = None
178
+ ) -> str:
179
+ """Return a copy of the value with all occurrences of a substring
180
+ replaced with a new one. The first argument is the substring
181
+ that should be replaced, the second is the replacement string.
182
+ If the optional third argument ``count`` is given, only the first
183
+ ``count`` occurrences are replaced:
184
+
185
+ .. sourcecode:: jinja
186
+
187
+ {{ "Hello World"|replace("Hello", "Goodbye") }}
188
+ -> Goodbye World
189
+
190
+ {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
191
+ -> d'oh, d'oh, aaargh
192
+ """
193
+ if count is None:
194
+ count = -1
195
+
196
+ if not eval_ctx.autoescape:
197
+ return str(s).replace(str(old), str(new), count)
198
+
199
+ if (
200
+ hasattr(old, "__html__")
201
+ or hasattr(new, "__html__")
202
+ and not hasattr(s, "__html__")
203
+ ):
204
+ s = escape(s)
205
+ else:
206
+ s = soft_str(s)
207
+
208
+ return s.replace(soft_str(old), soft_str(new), count)
209
+
210
+
211
+ def do_upper(s: str) -> str:
212
+ """Convert a value to uppercase."""
213
+ return soft_str(s).upper()
214
+
215
+
216
+ def do_lower(s: str) -> str:
217
+ """Convert a value to lowercase."""
218
+ return soft_str(s).lower()
219
+
220
+
221
+ def do_items(value: t.Union[t.Mapping[K, V], Undefined]) -> t.Iterator[t.Tuple[K, V]]:
222
+ """Return an iterator over the ``(key, value)`` items of a mapping.
223
+
224
+ ``x|items`` is the same as ``x.items()``, except if ``x`` is
225
+ undefined an empty iterator is returned.
226
+
227
+ This filter is useful if you expect the template to be rendered with
228
+ an implementation of Jinja in another programming language that does
229
+ not have a ``.items()`` method on its mapping type.
230
+
231
+ .. code-block:: html+jinja
232
+
233
+ <dl>
234
+ {% for key, value in my_dict|items %}
235
+ <dt>{{ key }}
236
+ <dd>{{ value }}
237
+ {% endfor %}
238
+ </dl>
239
+
240
+ .. versionadded:: 3.1
241
+ """
242
+ if isinstance(value, Undefined):
243
+ return
244
+
245
+ if not isinstance(value, abc.Mapping):
246
+ raise TypeError("Can only get item pairs from a mapping.")
247
+
248
+ yield from value.items()
249
+
250
+
251
+ _space_re = re.compile(r"\s", flags=re.ASCII)
252
+
253
+
254
+ @pass_eval_context
255
+ def do_xmlattr(
256
+ eval_ctx: "EvalContext", d: t.Mapping[str, t.Any], autospace: bool = True
257
+ ) -> str:
258
+ """Create an SGML/XML attribute string based on the items in a dict.
259
+
260
+ If any key contains a space, this fails with a ``ValueError``. Values that
261
+ are neither ``none`` nor ``undefined`` are automatically escaped.
262
+
263
+ .. sourcecode:: html+jinja
264
+
265
+ <ul{{ {'class': 'my_list', 'missing': none,
266
+ 'id': 'list-%d'|format(variable)}|xmlattr }}>
267
+ ...
268
+ </ul>
269
+
270
+ Results in something like this:
271
+
272
+ .. sourcecode:: html
273
+
274
+ <ul class="my_list" id="list-42">
275
+ ...
276
+ </ul>
277
+
278
+ As you can see it automatically prepends a space in front of the item
279
+ if the filter returned something unless the second parameter is false.
280
+
281
+ .. versionchanged:: 3.1.3
282
+ Keys with spaces are not allowed.
283
+ """
284
+ items = []
285
+
286
+ for key, value in d.items():
287
+ if value is None or isinstance(value, Undefined):
288
+ continue
289
+
290
+ if _space_re.search(key) is not None:
291
+ raise ValueError(f"Spaces are not allowed in attributes: '{key}'")
292
+
293
+ items.append(f'{escape(key)}="{escape(value)}"')
294
+
295
+ rv = " ".join(items)
296
+
297
+ if autospace and rv:
298
+ rv = " " + rv
299
+
300
+ if eval_ctx.autoescape:
301
+ rv = Markup(rv)
302
+
303
+ return rv
304
+
305
+
306
+ def do_capitalize(s: str) -> str:
307
+ """Capitalize a value. The first character will be uppercase, all others
308
+ lowercase.
309
+ """
310
+ return soft_str(s).capitalize()
311
+
312
+
313
+ _word_beginning_split_re = re.compile(r"([-\s({\[<]+)")
314
+
315
+
316
+ def do_title(s: str) -> str:
317
+ """Return a titlecased version of the value. I.e. words will start with
318
+ uppercase letters, all remaining characters are lowercase.
319
+ """
320
+ return "".join(
321
+ [
322
+ item[0].upper() + item[1:].lower()
323
+ for item in _word_beginning_split_re.split(soft_str(s))
324
+ if item
325
+ ]
326
+ )
327
+
328
+
329
+ def do_dictsort(
330
+ value: t.Mapping[K, V],
331
+ case_sensitive: bool = False,
332
+ by: 'te.Literal["key", "value"]' = "key",
333
+ reverse: bool = False,
334
+ ) -> t.List[t.Tuple[K, V]]:
335
+ """Sort a dict and yield (key, value) pairs. Python dicts may not
336
+ be in the order you want to display them in, so sort them first.
337
+
338
+ .. sourcecode:: jinja
339
+
340
+ {% for key, value in mydict|dictsort %}
341
+ sort the dict by key, case insensitive
342
+
343
+ {% for key, value in mydict|dictsort(reverse=true) %}
344
+ sort the dict by key, case insensitive, reverse order
345
+
346
+ {% for key, value in mydict|dictsort(true) %}
347
+ sort the dict by key, case sensitive
348
+
349
+ {% for key, value in mydict|dictsort(false, 'value') %}
350
+ sort the dict by value, case insensitive
351
+ """
352
+ if by == "key":
353
+ pos = 0
354
+ elif by == "value":
355
+ pos = 1
356
+ else:
357
+ raise FilterArgumentError('You can only sort by either "key" or "value"')
358
+
359
+ def sort_func(item: t.Tuple[t.Any, t.Any]) -> t.Any:
360
+ value = item[pos]
361
+
362
+ if not case_sensitive:
363
+ value = ignore_case(value)
364
+
365
+ return value
366
+
367
+ return sorted(value.items(), key=sort_func, reverse=reverse)
368
+
369
+
370
+ @pass_environment
371
+ def do_sort(
372
+ environment: "Environment",
373
+ value: "t.Iterable[V]",
374
+ reverse: bool = False,
375
+ case_sensitive: bool = False,
376
+ attribute: t.Optional[t.Union[str, int]] = None,
377
+ ) -> "t.List[V]":
378
+ """Sort an iterable using Python's :func:`sorted`.
379
+
380
+ .. sourcecode:: jinja
381
+
382
+ {% for city in cities|sort %}
383
+ ...
384
+ {% endfor %}
385
+
386
+ :param reverse: Sort descending instead of ascending.
387
+ :param case_sensitive: When sorting strings, sort upper and lower
388
+ case separately.
389
+ :param attribute: When sorting objects or dicts, an attribute or
390
+ key to sort by. Can use dot notation like ``"address.city"``.
391
+ Can be a list of attributes like ``"age,name"``.
392
+
393
+ The sort is stable, it does not change the relative order of
394
+ elements that compare equal. This makes it is possible to chain
395
+ sorts on different attributes and ordering.
396
+
397
+ .. sourcecode:: jinja
398
+
399
+ {% for user in users|sort(attribute="name")
400
+ |sort(reverse=true, attribute="age") %}
401
+ ...
402
+ {% endfor %}
403
+
404
+ As a shortcut to chaining when the direction is the same for all
405
+ attributes, pass a comma separate list of attributes.
406
+
407
+ .. sourcecode:: jinja
408
+
409
+ {% for user in users|sort(attribute="age,name") %}
410
+ ...
411
+ {% endfor %}
412
+
413
+ .. versionchanged:: 2.11.0
414
+ The ``attribute`` parameter can be a comma separated list of
415
+ attributes, e.g. ``"age,name"``.
416
+
417
+ .. versionchanged:: 2.6
418
+ The ``attribute`` parameter was added.
419
+ """
420
+ key_func = make_multi_attrgetter(
421
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
422
+ )
423
+ return sorted(value, key=key_func, reverse=reverse)
424
+
425
+
426
+ @pass_environment
427
+ def do_unique(
428
+ environment: "Environment",
429
+ value: "t.Iterable[V]",
430
+ case_sensitive: bool = False,
431
+ attribute: t.Optional[t.Union[str, int]] = None,
432
+ ) -> "t.Iterator[V]":
433
+ """Returns a list of unique items from the given iterable.
434
+
435
+ .. sourcecode:: jinja
436
+
437
+ {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }}
438
+ -> ['foo', 'bar', 'foobar']
439
+
440
+ The unique items are yielded in the same order as their first occurrence in
441
+ the iterable passed to the filter.
442
+
443
+ :param case_sensitive: Treat upper and lower case strings as distinct.
444
+ :param attribute: Filter objects with unique values for this attribute.
445
+ """
446
+ getter = make_attrgetter(
447
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
448
+ )
449
+ seen = set()
450
+
451
+ for item in value:
452
+ key = getter(item)
453
+
454
+ if key not in seen:
455
+ seen.add(key)
456
+ yield item
457
+
458
+
459
+ def _min_or_max(
460
+ environment: "Environment",
461
+ value: "t.Iterable[V]",
462
+ func: "t.Callable[..., V]",
463
+ case_sensitive: bool,
464
+ attribute: t.Optional[t.Union[str, int]],
465
+ ) -> "t.Union[V, Undefined]":
466
+ it = iter(value)
467
+
468
+ try:
469
+ first = next(it)
470
+ except StopIteration:
471
+ return environment.undefined("No aggregated item, sequence was empty.")
472
+
473
+ key_func = make_attrgetter(
474
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
475
+ )
476
+ return func(chain([first], it), key=key_func)
477
+
478
+
479
+ @pass_environment
480
+ def do_min(
481
+ environment: "Environment",
482
+ value: "t.Iterable[V]",
483
+ case_sensitive: bool = False,
484
+ attribute: t.Optional[t.Union[str, int]] = None,
485
+ ) -> "t.Union[V, Undefined]":
486
+ """Return the smallest item from the sequence.
487
+
488
+ .. sourcecode:: jinja
489
+
490
+ {{ [1, 2, 3]|min }}
491
+ -> 1
492
+
493
+ :param case_sensitive: Treat upper and lower case strings as distinct.
494
+ :param attribute: Get the object with the min value of this attribute.
495
+ """
496
+ return _min_or_max(environment, value, min, case_sensitive, attribute)
497
+
498
+
499
+ @pass_environment
500
+ def do_max(
501
+ environment: "Environment",
502
+ value: "t.Iterable[V]",
503
+ case_sensitive: bool = False,
504
+ attribute: t.Optional[t.Union[str, int]] = None,
505
+ ) -> "t.Union[V, Undefined]":
506
+ """Return the largest item from the sequence.
507
+
508
+ .. sourcecode:: jinja
509
+
510
+ {{ [1, 2, 3]|max }}
511
+ -> 3
512
+
513
+ :param case_sensitive: Treat upper and lower case strings as distinct.
514
+ :param attribute: Get the object with the max value of this attribute.
515
+ """
516
+ return _min_or_max(environment, value, max, case_sensitive, attribute)
517
+
518
+
519
+ def do_default(
520
+ value: V,
521
+ default_value: V = "", # type: ignore
522
+ boolean: bool = False,
523
+ ) -> V:
524
+ """If the value is undefined it will return the passed default value,
525
+ otherwise the value of the variable:
526
+
527
+ .. sourcecode:: jinja
528
+
529
+ {{ my_variable|default('my_variable is not defined') }}
530
+
531
+ This will output the value of ``my_variable`` if the variable was
532
+ defined, otherwise ``'my_variable is not defined'``. If you want
533
+ to use default with variables that evaluate to false you have to
534
+ set the second parameter to `true`:
535
+
536
+ .. sourcecode:: jinja
537
+
538
+ {{ ''|default('the string was empty', true) }}
539
+
540
+ .. versionchanged:: 2.11
541
+ It's now possible to configure the :class:`~jinja2.Environment` with
542
+ :class:`~jinja2.ChainableUndefined` to make the `default` filter work
543
+ on nested elements and attributes that may contain undefined values
544
+ in the chain without getting an :exc:`~jinja2.UndefinedError`.
545
+ """
546
+ if isinstance(value, Undefined) or (boolean and not value):
547
+ return default_value
548
+
549
+ return value
550
+
551
+
552
+ @pass_eval_context
553
+ def sync_do_join(
554
+ eval_ctx: "EvalContext",
555
+ value: t.Iterable,
556
+ d: str = "",
557
+ attribute: t.Optional[t.Union[str, int]] = None,
558
+ ) -> str:
559
+ """Return a string which is the concatenation of the strings in the
560
+ sequence. The separator between elements is an empty string per
561
+ default, you can define it with the optional parameter:
562
+
563
+ .. sourcecode:: jinja
564
+
565
+ {{ [1, 2, 3]|join('|') }}
566
+ -> 1|2|3
567
+
568
+ {{ [1, 2, 3]|join }}
569
+ -> 123
570
+
571
+ It is also possible to join certain attributes of an object:
572
+
573
+ .. sourcecode:: jinja
574
+
575
+ {{ users|join(', ', attribute='username') }}
576
+
577
+ .. versionadded:: 2.6
578
+ The `attribute` parameter was added.
579
+ """
580
+ if attribute is not None:
581
+ value = map(make_attrgetter(eval_ctx.environment, attribute), value)
582
+
583
+ # no automatic escaping? joining is a lot easier then
584
+ if not eval_ctx.autoescape:
585
+ return str(d).join(map(str, value))
586
+
587
+ # if the delimiter doesn't have an html representation we check
588
+ # if any of the items has. If yes we do a coercion to Markup
589
+ if not hasattr(d, "__html__"):
590
+ value = list(value)
591
+ do_escape = False
592
+
593
+ for idx, item in enumerate(value):
594
+ if hasattr(item, "__html__"):
595
+ do_escape = True
596
+ else:
597
+ value[idx] = str(item)
598
+
599
+ if do_escape:
600
+ d = escape(d)
601
+ else:
602
+ d = str(d)
603
+
604
+ return d.join(value)
605
+
606
+ # no html involved, to normal joining
607
+ return soft_str(d).join(map(soft_str, value))
608
+
609
+
610
+ @async_variant(sync_do_join) # type: ignore
611
+ async def do_join(
612
+ eval_ctx: "EvalContext",
613
+ value: t.Union[t.AsyncIterable, t.Iterable],
614
+ d: str = "",
615
+ attribute: t.Optional[t.Union[str, int]] = None,
616
+ ) -> str:
617
+ return sync_do_join(eval_ctx, await auto_to_list(value), d, attribute)
618
+
619
+
620
+ def do_center(value: str, width: int = 80) -> str:
621
+ """Centers the value in a field of a given width."""
622
+ return soft_str(value).center(width)
623
+
624
+
625
+ @pass_environment
626
+ def sync_do_first(
627
+ environment: "Environment", seq: "t.Iterable[V]"
628
+ ) -> "t.Union[V, Undefined]":
629
+ """Return the first item of a sequence."""
630
+ try:
631
+ return next(iter(seq))
632
+ except StopIteration:
633
+ return environment.undefined("No first item, sequence was empty.")
634
+
635
+
636
+ @async_variant(sync_do_first) # type: ignore
637
+ async def do_first(
638
+ environment: "Environment", seq: "t.Union[t.AsyncIterable[V], t.Iterable[V]]"
639
+ ) -> "t.Union[V, Undefined]":
640
+ try:
641
+ return await auto_aiter(seq).__anext__()
642
+ except StopAsyncIteration:
643
+ return environment.undefined("No first item, sequence was empty.")
644
+
645
+
646
+ @pass_environment
647
+ def do_last(
648
+ environment: "Environment", seq: "t.Reversible[V]"
649
+ ) -> "t.Union[V, Undefined]":
650
+ """Return the last item of a sequence.
651
+
652
+ Note: Does not work with generators. You may want to explicitly
653
+ convert it to a list:
654
+
655
+ .. sourcecode:: jinja
656
+
657
+ {{ data | selectattr('name', '==', 'Jinja') | list | last }}
658
+ """
659
+ try:
660
+ return next(iter(reversed(seq)))
661
+ except StopIteration:
662
+ return environment.undefined("No last item, sequence was empty.")
663
+
664
+
665
+ # No async do_last, it may not be safe in async mode.
666
+
667
+
668
+ @pass_context
669
+ def do_random(context: "Context", seq: "t.Sequence[V]") -> "t.Union[V, Undefined]":
670
+ """Return a random item from the sequence."""
671
+ try:
672
+ return random.choice(seq)
673
+ except IndexError:
674
+ return context.environment.undefined("No random item, sequence was empty.")
675
+
676
+
677
+ def do_filesizeformat(value: t.Union[str, float, int], binary: bool = False) -> str:
678
+ """Format the value like a 'human-readable' file size (i.e. 13 kB,
679
+ 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
680
+ Giga, etc.), if the second parameter is set to `True` the binary
681
+ prefixes are used (Mebi, Gibi).
682
+ """
683
+ bytes = float(value)
684
+ base = 1024 if binary else 1000
685
+ prefixes = [
686
+ ("KiB" if binary else "kB"),
687
+ ("MiB" if binary else "MB"),
688
+ ("GiB" if binary else "GB"),
689
+ ("TiB" if binary else "TB"),
690
+ ("PiB" if binary else "PB"),
691
+ ("EiB" if binary else "EB"),
692
+ ("ZiB" if binary else "ZB"),
693
+ ("YiB" if binary else "YB"),
694
+ ]
695
+
696
+ if bytes == 1:
697
+ return "1 Byte"
698
+ elif bytes < base:
699
+ return f"{int(bytes)} Bytes"
700
+ else:
701
+ for i, prefix in enumerate(prefixes):
702
+ unit = base ** (i + 2)
703
+
704
+ if bytes < unit:
705
+ return f"{base * bytes / unit:.1f} {prefix}"
706
+
707
+ return f"{base * bytes / unit:.1f} {prefix}"
708
+
709
+
710
+ def do_pprint(value: t.Any) -> str:
711
+ """Pretty print a variable. Useful for debugging."""
712
+ return pformat(value)
713
+
714
+
715
+ _uri_scheme_re = re.compile(r"^([\w.+-]{2,}:(/){0,2})$")
716
+
717
+
718
+ @pass_eval_context
719
+ def do_urlize(
720
+ eval_ctx: "EvalContext",
721
+ value: str,
722
+ trim_url_limit: t.Optional[int] = None,
723
+ nofollow: bool = False,
724
+ target: t.Optional[str] = None,
725
+ rel: t.Optional[str] = None,
726
+ extra_schemes: t.Optional[t.Iterable[str]] = None,
727
+ ) -> str:
728
+ """Convert URLs in text into clickable links.
729
+
730
+ This may not recognize links in some situations. Usually, a more
731
+ comprehensive formatter, such as a Markdown library, is a better
732
+ choice.
733
+
734
+ Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email
735
+ addresses. Links with trailing punctuation (periods, commas, closing
736
+ parentheses) and leading punctuation (opening parentheses) are
737
+ recognized excluding the punctuation. Email addresses that include
738
+ header fields are not recognized (for example,
739
+ ``mailto:address@example.com?cc=copy@example.com``).
740
+
741
+ :param value: Original text containing URLs to link.
742
+ :param trim_url_limit: Shorten displayed URL values to this length.
743
+ :param nofollow: Add the ``rel=nofollow`` attribute to links.
744
+ :param target: Add the ``target`` attribute to links.
745
+ :param rel: Add the ``rel`` attribute to links.
746
+ :param extra_schemes: Recognize URLs that start with these schemes
747
+ in addition to the default behavior. Defaults to
748
+ ``env.policies["urlize.extra_schemes"]``, which defaults to no
749
+ extra schemes.
750
+
751
+ .. versionchanged:: 3.0
752
+ The ``extra_schemes`` parameter was added.
753
+
754
+ .. versionchanged:: 3.0
755
+ Generate ``https://`` links for URLs without a scheme.
756
+
757
+ .. versionchanged:: 3.0
758
+ The parsing rules were updated. Recognize email addresses with
759
+ or without the ``mailto:`` scheme. Validate IP addresses. Ignore
760
+ parentheses and brackets in more cases.
761
+
762
+ .. versionchanged:: 2.8
763
+ The ``target`` parameter was added.
764
+ """
765
+ policies = eval_ctx.environment.policies
766
+ rel_parts = set((rel or "").split())
767
+
768
+ if nofollow:
769
+ rel_parts.add("nofollow")
770
+
771
+ rel_parts.update((policies["urlize.rel"] or "").split())
772
+ rel = " ".join(sorted(rel_parts)) or None
773
+
774
+ if target is None:
775
+ target = policies["urlize.target"]
776
+
777
+ if extra_schemes is None:
778
+ extra_schemes = policies["urlize.extra_schemes"] or ()
779
+
780
+ for scheme in extra_schemes:
781
+ if _uri_scheme_re.fullmatch(scheme) is None:
782
+ raise FilterArgumentError(f"{scheme!r} is not a valid URI scheme prefix.")
783
+
784
+ rv = urlize(
785
+ value,
786
+ trim_url_limit=trim_url_limit,
787
+ rel=rel,
788
+ target=target,
789
+ extra_schemes=extra_schemes,
790
+ )
791
+
792
+ if eval_ctx.autoescape:
793
+ rv = Markup(rv)
794
+
795
+ return rv
796
+
797
+
798
+ def do_indent(
799
+ s: str, width: t.Union[int, str] = 4, first: bool = False, blank: bool = False
800
+ ) -> str:
801
+ """Return a copy of the string with each line indented by 4 spaces. The
802
+ first line and blank lines are not indented by default.
803
+
804
+ :param width: Number of spaces, or a string, to indent by.
805
+ :param first: Don't skip indenting the first line.
806
+ :param blank: Don't skip indenting empty lines.
807
+
808
+ .. versionchanged:: 3.0
809
+ ``width`` can be a string.
810
+
811
+ .. versionchanged:: 2.10
812
+ Blank lines are not indented by default.
813
+
814
+ Rename the ``indentfirst`` argument to ``first``.
815
+ """
816
+ if isinstance(width, str):
817
+ indention = width
818
+ else:
819
+ indention = " " * width
820
+
821
+ newline = "\n"
822
+
823
+ if isinstance(s, Markup):
824
+ indention = Markup(indention)
825
+ newline = Markup(newline)
826
+
827
+ s += newline # this quirk is necessary for splitlines method
828
+
829
+ if blank:
830
+ rv = (newline + indention).join(s.splitlines())
831
+ else:
832
+ lines = s.splitlines()
833
+ rv = lines.pop(0)
834
+
835
+ if lines:
836
+ rv += newline + newline.join(
837
+ indention + line if line else line for line in lines
838
+ )
839
+
840
+ if first:
841
+ rv = indention + rv
842
+
843
+ return rv
844
+
845
+
846
+ @pass_environment
847
+ def do_truncate(
848
+ env: "Environment",
849
+ s: str,
850
+ length: int = 255,
851
+ killwords: bool = False,
852
+ end: str = "...",
853
+ leeway: t.Optional[int] = None,
854
+ ) -> str:
855
+ """Return a truncated copy of the string. The length is specified
856
+ with the first parameter which defaults to ``255``. If the second
857
+ parameter is ``true`` the filter will cut the text at length. Otherwise
858
+ it will discard the last word. If the text was in fact
859
+ truncated it will append an ellipsis sign (``"..."``). If you want a
860
+ different ellipsis sign than ``"..."`` you can specify it using the
861
+ third parameter. Strings that only exceed the length by the tolerance
862
+ margin given in the fourth parameter will not be truncated.
863
+
864
+ .. sourcecode:: jinja
865
+
866
+ {{ "foo bar baz qux"|truncate(9) }}
867
+ -> "foo..."
868
+ {{ "foo bar baz qux"|truncate(9, True) }}
869
+ -> "foo ba..."
870
+ {{ "foo bar baz qux"|truncate(11) }}
871
+ -> "foo bar baz qux"
872
+ {{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
873
+ -> "foo bar..."
874
+
875
+ The default leeway on newer Jinja versions is 5 and was 0 before but
876
+ can be reconfigured globally.
877
+ """
878
+ if leeway is None:
879
+ leeway = env.policies["truncate.leeway"]
880
+
881
+ assert length >= len(end), f"expected length >= {len(end)}, got {length}"
882
+ assert leeway >= 0, f"expected leeway >= 0, got {leeway}"
883
+
884
+ if len(s) <= length + leeway:
885
+ return s
886
+
887
+ if killwords:
888
+ return s[: length - len(end)] + end
889
+
890
+ result = s[: length - len(end)].rsplit(" ", 1)[0]
891
+ return result + end
892
+
893
+
894
+ @pass_environment
895
+ def do_wordwrap(
896
+ environment: "Environment",
897
+ s: str,
898
+ width: int = 79,
899
+ break_long_words: bool = True,
900
+ wrapstring: t.Optional[str] = None,
901
+ break_on_hyphens: bool = True,
902
+ ) -> str:
903
+ """Wrap a string to the given width. Existing newlines are treated
904
+ as paragraphs to be wrapped separately.
905
+
906
+ :param s: Original text to wrap.
907
+ :param width: Maximum length of wrapped lines.
908
+ :param break_long_words: If a word is longer than ``width``, break
909
+ it across lines.
910
+ :param break_on_hyphens: If a word contains hyphens, it may be split
911
+ across lines.
912
+ :param wrapstring: String to join each wrapped line. Defaults to
913
+ :attr:`Environment.newline_sequence`.
914
+
915
+ .. versionchanged:: 2.11
916
+ Existing newlines are treated as paragraphs wrapped separately.
917
+
918
+ .. versionchanged:: 2.11
919
+ Added the ``break_on_hyphens`` parameter.
920
+
921
+ .. versionchanged:: 2.7
922
+ Added the ``wrapstring`` parameter.
923
+ """
924
+ import textwrap
925
+
926
+ if wrapstring is None:
927
+ wrapstring = environment.newline_sequence
928
+
929
+ # textwrap.wrap doesn't consider existing newlines when wrapping.
930
+ # If the string has a newline before width, wrap will still insert
931
+ # a newline at width, resulting in a short line. Instead, split and
932
+ # wrap each paragraph individually.
933
+ return wrapstring.join(
934
+ [
935
+ wrapstring.join(
936
+ textwrap.wrap(
937
+ line,
938
+ width=width,
939
+ expand_tabs=False,
940
+ replace_whitespace=False,
941
+ break_long_words=break_long_words,
942
+ break_on_hyphens=break_on_hyphens,
943
+ )
944
+ )
945
+ for line in s.splitlines()
946
+ ]
947
+ )
948
+
949
+
950
+ _word_re = re.compile(r"\w+")
951
+
952
+
953
+ def do_wordcount(s: str) -> int:
954
+ """Count the words in that string."""
955
+ return len(_word_re.findall(soft_str(s)))
956
+
957
+
958
+ def do_int(value: t.Any, default: int = 0, base: int = 10) -> int:
959
+ """Convert the value into an integer. If the
960
+ conversion doesn't work it will return ``0``. You can
961
+ override this default using the first parameter. You
962
+ can also override the default base (10) in the second
963
+ parameter, which handles input with prefixes such as
964
+ 0b, 0o and 0x for bases 2, 8 and 16 respectively.
965
+ The base is ignored for decimal numbers and non-string values.
966
+ """
967
+ try:
968
+ if isinstance(value, str):
969
+ return int(value, base)
970
+
971
+ return int(value)
972
+ except (TypeError, ValueError):
973
+ # this quirk is necessary so that "42.23"|int gives 42.
974
+ try:
975
+ return int(float(value))
976
+ except (TypeError, ValueError):
977
+ return default
978
+
979
+
980
+ def do_float(value: t.Any, default: float = 0.0) -> float:
981
+ """Convert the value into a floating point number. If the
982
+ conversion doesn't work it will return ``0.0``. You can
983
+ override this default using the first parameter.
984
+ """
985
+ try:
986
+ return float(value)
987
+ except (TypeError, ValueError):
988
+ return default
989
+
990
+
991
+ def do_format(value: str, *args: t.Any, **kwargs: t.Any) -> str:
992
+ """Apply the given values to a `printf-style`_ format string, like
993
+ ``string % values``.
994
+
995
+ .. sourcecode:: jinja
996
+
997
+ {{ "%s, %s!"|format(greeting, name) }}
998
+ Hello, World!
999
+
1000
+ In most cases it should be more convenient and efficient to use the
1001
+ ``%`` operator or :meth:`str.format`.
1002
+
1003
+ .. code-block:: text
1004
+
1005
+ {{ "%s, %s!" % (greeting, name) }}
1006
+ {{ "{}, {}!".format(greeting, name) }}
1007
+
1008
+ .. _printf-style: https://docs.python.org/library/stdtypes.html
1009
+ #printf-style-string-formatting
1010
+ """
1011
+ if args and kwargs:
1012
+ raise FilterArgumentError(
1013
+ "can't handle positional and keyword arguments at the same time"
1014
+ )
1015
+
1016
+ return soft_str(value) % (kwargs or args)
1017
+
1018
+
1019
+ def do_trim(value: str, chars: t.Optional[str] = None) -> str:
1020
+ """Strip leading and trailing characters, by default whitespace."""
1021
+ return soft_str(value).strip(chars)
1022
+
1023
+
1024
+ def do_striptags(value: "t.Union[str, HasHTML]") -> str:
1025
+ """Strip SGML/XML tags and replace adjacent whitespace by one space."""
1026
+ if hasattr(value, "__html__"):
1027
+ value = t.cast("HasHTML", value).__html__()
1028
+
1029
+ return Markup(str(value)).striptags()
1030
+
1031
+
1032
+ def sync_do_slice(
1033
+ value: "t.Collection[V]", slices: int, fill_with: "t.Optional[V]" = None
1034
+ ) -> "t.Iterator[t.List[V]]":
1035
+ """Slice an iterator and return a list of lists containing
1036
+ those items. Useful if you want to create a div containing
1037
+ three ul tags that represent columns:
1038
+
1039
+ .. sourcecode:: html+jinja
1040
+
1041
+ <div class="columnwrapper">
1042
+ {%- for column in items|slice(3) %}
1043
+ <ul class="column-{{ loop.index }}">
1044
+ {%- for item in column %}
1045
+ <li>{{ item }}</li>
1046
+ {%- endfor %}
1047
+ </ul>
1048
+ {%- endfor %}
1049
+ </div>
1050
+
1051
+ If you pass it a second argument it's used to fill missing
1052
+ values on the last iteration.
1053
+ """
1054
+ seq = list(value)
1055
+ length = len(seq)
1056
+ items_per_slice = length // slices
1057
+ slices_with_extra = length % slices
1058
+ offset = 0
1059
+
1060
+ for slice_number in range(slices):
1061
+ start = offset + slice_number * items_per_slice
1062
+
1063
+ if slice_number < slices_with_extra:
1064
+ offset += 1
1065
+
1066
+ end = offset + (slice_number + 1) * items_per_slice
1067
+ tmp = seq[start:end]
1068
+
1069
+ if fill_with is not None and slice_number >= slices_with_extra:
1070
+ tmp.append(fill_with)
1071
+
1072
+ yield tmp
1073
+
1074
+
1075
+ @async_variant(sync_do_slice) # type: ignore
1076
+ async def do_slice(
1077
+ value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1078
+ slices: int,
1079
+ fill_with: t.Optional[t.Any] = None,
1080
+ ) -> "t.Iterator[t.List[V]]":
1081
+ return sync_do_slice(await auto_to_list(value), slices, fill_with)
1082
+
1083
+
1084
+ def do_batch(
1085
+ value: "t.Iterable[V]", linecount: int, fill_with: "t.Optional[V]" = None
1086
+ ) -> "t.Iterator[t.List[V]]":
1087
+ """
1088
+ A filter that batches items. It works pretty much like `slice`
1089
+ just the other way round. It returns a list of lists with the
1090
+ given number of items. If you provide a second parameter this
1091
+ is used to fill up missing items. See this example:
1092
+
1093
+ .. sourcecode:: html+jinja
1094
+
1095
+ <table>
1096
+ {%- for row in items|batch(3, '&nbsp;') %}
1097
+ <tr>
1098
+ {%- for column in row %}
1099
+ <td>{{ column }}</td>
1100
+ {%- endfor %}
1101
+ </tr>
1102
+ {%- endfor %}
1103
+ </table>
1104
+ """
1105
+ tmp: "t.List[V]" = []
1106
+
1107
+ for item in value:
1108
+ if len(tmp) == linecount:
1109
+ yield tmp
1110
+ tmp = []
1111
+
1112
+ tmp.append(item)
1113
+
1114
+ if tmp:
1115
+ if fill_with is not None and len(tmp) < linecount:
1116
+ tmp += [fill_with] * (linecount - len(tmp))
1117
+
1118
+ yield tmp
1119
+
1120
+
1121
+ def do_round(
1122
+ value: float,
1123
+ precision: int = 0,
1124
+ method: 'te.Literal["common", "ceil", "floor"]' = "common",
1125
+ ) -> float:
1126
+ """Round the number to a given precision. The first
1127
+ parameter specifies the precision (default is ``0``), the
1128
+ second the rounding method:
1129
+
1130
+ - ``'common'`` rounds either up or down
1131
+ - ``'ceil'`` always rounds up
1132
+ - ``'floor'`` always rounds down
1133
+
1134
+ If you don't specify a method ``'common'`` is used.
1135
+
1136
+ .. sourcecode:: jinja
1137
+
1138
+ {{ 42.55|round }}
1139
+ -> 43.0
1140
+ {{ 42.55|round(1, 'floor') }}
1141
+ -> 42.5
1142
+
1143
+ Note that even if rounded to 0 precision, a float is returned. If
1144
+ you need a real integer, pipe it through `int`:
1145
+
1146
+ .. sourcecode:: jinja
1147
+
1148
+ {{ 42.55|round|int }}
1149
+ -> 43
1150
+ """
1151
+ if method not in {"common", "ceil", "floor"}:
1152
+ raise FilterArgumentError("method must be common, ceil or floor")
1153
+
1154
+ if method == "common":
1155
+ return round(value, precision)
1156
+
1157
+ func = getattr(math, method)
1158
+ return t.cast(float, func(value * (10**precision)) / (10**precision))
1159
+
1160
+
1161
+ class _GroupTuple(t.NamedTuple):
1162
+ grouper: t.Any
1163
+ list: t.List
1164
+
1165
+ # Use the regular tuple repr to hide this subclass if users print
1166
+ # out the value during debugging.
1167
+ def __repr__(self) -> str:
1168
+ return tuple.__repr__(self)
1169
+
1170
+ def __str__(self) -> str:
1171
+ return tuple.__str__(self)
1172
+
1173
+
1174
+ @pass_environment
1175
+ def sync_do_groupby(
1176
+ environment: "Environment",
1177
+ value: "t.Iterable[V]",
1178
+ attribute: t.Union[str, int],
1179
+ default: t.Optional[t.Any] = None,
1180
+ case_sensitive: bool = False,
1181
+ ) -> "t.List[_GroupTuple]":
1182
+ """Group a sequence of objects by an attribute using Python's
1183
+ :func:`itertools.groupby`. The attribute can use dot notation for
1184
+ nested access, like ``"address.city"``. Unlike Python's ``groupby``,
1185
+ the values are sorted first so only one group is returned for each
1186
+ unique value.
1187
+
1188
+ For example, a list of ``User`` objects with a ``city`` attribute
1189
+ can be rendered in groups. In this example, ``grouper`` refers to
1190
+ the ``city`` value of the group.
1191
+
1192
+ .. sourcecode:: html+jinja
1193
+
1194
+ <ul>{% for city, items in users|groupby("city") %}
1195
+ <li>{{ city }}
1196
+ <ul>{% for user in items %}
1197
+ <li>{{ user.name }}
1198
+ {% endfor %}</ul>
1199
+ </li>
1200
+ {% endfor %}</ul>
1201
+
1202
+ ``groupby`` yields namedtuples of ``(grouper, list)``, which
1203
+ can be used instead of the tuple unpacking above. ``grouper`` is the
1204
+ value of the attribute, and ``list`` is the items with that value.
1205
+
1206
+ .. sourcecode:: html+jinja
1207
+
1208
+ <ul>{% for group in users|groupby("city") %}
1209
+ <li>{{ group.grouper }}: {{ group.list|join(", ") }}
1210
+ {% endfor %}</ul>
1211
+
1212
+ You can specify a ``default`` value to use if an object in the list
1213
+ does not have the given attribute.
1214
+
1215
+ .. sourcecode:: jinja
1216
+
1217
+ <ul>{% for city, items in users|groupby("city", default="NY") %}
1218
+ <li>{{ city }}: {{ items|map(attribute="name")|join(", ") }}</li>
1219
+ {% endfor %}</ul>
1220
+
1221
+ Like the :func:`~jinja-filters.sort` filter, sorting and grouping is
1222
+ case-insensitive by default. The ``key`` for each group will have
1223
+ the case of the first item in that group of values. For example, if
1224
+ a list of users has cities ``["CA", "NY", "ca"]``, the "CA" group
1225
+ will have two values. This can be disabled by passing
1226
+ ``case_sensitive=True``.
1227
+
1228
+ .. versionchanged:: 3.1
1229
+ Added the ``case_sensitive`` parameter. Sorting and grouping is
1230
+ case-insensitive by default, matching other filters that do
1231
+ comparisons.
1232
+
1233
+ .. versionchanged:: 3.0
1234
+ Added the ``default`` parameter.
1235
+
1236
+ .. versionchanged:: 2.6
1237
+ The attribute supports dot notation for nested access.
1238
+ """
1239
+ expr = make_attrgetter(
1240
+ environment,
1241
+ attribute,
1242
+ postprocess=ignore_case if not case_sensitive else None,
1243
+ default=default,
1244
+ )
1245
+ out = [
1246
+ _GroupTuple(key, list(values))
1247
+ for key, values in groupby(sorted(value, key=expr), expr)
1248
+ ]
1249
+
1250
+ if not case_sensitive:
1251
+ # Return the real key from the first value instead of the lowercase key.
1252
+ output_expr = make_attrgetter(environment, attribute, default=default)
1253
+ out = [_GroupTuple(output_expr(values[0]), values) for _, values in out]
1254
+
1255
+ return out
1256
+
1257
+
1258
+ @async_variant(sync_do_groupby) # type: ignore
1259
+ async def do_groupby(
1260
+ environment: "Environment",
1261
+ value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1262
+ attribute: t.Union[str, int],
1263
+ default: t.Optional[t.Any] = None,
1264
+ case_sensitive: bool = False,
1265
+ ) -> "t.List[_GroupTuple]":
1266
+ expr = make_attrgetter(
1267
+ environment,
1268
+ attribute,
1269
+ postprocess=ignore_case if not case_sensitive else None,
1270
+ default=default,
1271
+ )
1272
+ out = [
1273
+ _GroupTuple(key, await auto_to_list(values))
1274
+ for key, values in groupby(sorted(await auto_to_list(value), key=expr), expr)
1275
+ ]
1276
+
1277
+ if not case_sensitive:
1278
+ # Return the real key from the first value instead of the lowercase key.
1279
+ output_expr = make_attrgetter(environment, attribute, default=default)
1280
+ out = [_GroupTuple(output_expr(values[0]), values) for _, values in out]
1281
+
1282
+ return out
1283
+
1284
+
1285
+ @pass_environment
1286
+ def sync_do_sum(
1287
+ environment: "Environment",
1288
+ iterable: "t.Iterable[V]",
1289
+ attribute: t.Optional[t.Union[str, int]] = None,
1290
+ start: V = 0, # type: ignore
1291
+ ) -> V:
1292
+ """Returns the sum of a sequence of numbers plus the value of parameter
1293
+ 'start' (which defaults to 0). When the sequence is empty it returns
1294
+ start.
1295
+
1296
+ It is also possible to sum up only certain attributes:
1297
+
1298
+ .. sourcecode:: jinja
1299
+
1300
+ Total: {{ items|sum(attribute='price') }}
1301
+
1302
+ .. versionchanged:: 2.6
1303
+ The ``attribute`` parameter was added to allow summing up over
1304
+ attributes. Also the ``start`` parameter was moved on to the right.
1305
+ """
1306
+ if attribute is not None:
1307
+ iterable = map(make_attrgetter(environment, attribute), iterable)
1308
+
1309
+ return sum(iterable, start) # type: ignore[no-any-return, call-overload]
1310
+
1311
+
1312
+ @async_variant(sync_do_sum) # type: ignore
1313
+ async def do_sum(
1314
+ environment: "Environment",
1315
+ iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1316
+ attribute: t.Optional[t.Union[str, int]] = None,
1317
+ start: V = 0, # type: ignore
1318
+ ) -> V:
1319
+ rv = start
1320
+
1321
+ if attribute is not None:
1322
+ func = make_attrgetter(environment, attribute)
1323
+ else:
1324
+
1325
+ def func(x: V) -> V:
1326
+ return x
1327
+
1328
+ async for item in auto_aiter(iterable):
1329
+ rv += func(item)
1330
+
1331
+ return rv
1332
+
1333
+
1334
+ def sync_do_list(value: "t.Iterable[V]") -> "t.List[V]":
1335
+ """Convert the value into a list. If it was a string the returned list
1336
+ will be a list of characters.
1337
+ """
1338
+ return list(value)
1339
+
1340
+
1341
+ @async_variant(sync_do_list) # type: ignore
1342
+ async def do_list(value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]") -> "t.List[V]":
1343
+ return await auto_to_list(value)
1344
+
1345
+
1346
+ def do_mark_safe(value: str) -> Markup:
1347
+ """Mark the value as safe which means that in an environment with automatic
1348
+ escaping enabled this variable will not be escaped.
1349
+ """
1350
+ return Markup(value)
1351
+
1352
+
1353
+ def do_mark_unsafe(value: str) -> str:
1354
+ """Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
1355
+ return str(value)
1356
+
1357
+
1358
+ @typing.overload
1359
+ def do_reverse(value: str) -> str:
1360
+ ...
1361
+
1362
+
1363
+ @typing.overload
1364
+ def do_reverse(value: "t.Iterable[V]") -> "t.Iterable[V]":
1365
+ ...
1366
+
1367
+
1368
+ def do_reverse(value: t.Union[str, t.Iterable[V]]) -> t.Union[str, t.Iterable[V]]:
1369
+ """Reverse the object or return an iterator that iterates over it the other
1370
+ way round.
1371
+ """
1372
+ if isinstance(value, str):
1373
+ return value[::-1]
1374
+
1375
+ try:
1376
+ return reversed(value) # type: ignore
1377
+ except TypeError:
1378
+ try:
1379
+ rv = list(value)
1380
+ rv.reverse()
1381
+ return rv
1382
+ except TypeError as e:
1383
+ raise FilterArgumentError("argument must be iterable") from e
1384
+
1385
+
1386
+ @pass_environment
1387
+ def do_attr(
1388
+ environment: "Environment", obj: t.Any, name: str
1389
+ ) -> t.Union[Undefined, t.Any]:
1390
+ """Get an attribute of an object. ``foo|attr("bar")`` works like
1391
+ ``foo.bar`` just that always an attribute is returned and items are not
1392
+ looked up.
1393
+
1394
+ See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
1395
+ """
1396
+ try:
1397
+ name = str(name)
1398
+ except UnicodeError:
1399
+ pass
1400
+ else:
1401
+ try:
1402
+ value = getattr(obj, name)
1403
+ except AttributeError:
1404
+ pass
1405
+ else:
1406
+ if environment.sandboxed:
1407
+ environment = t.cast("SandboxedEnvironment", environment)
1408
+
1409
+ if not environment.is_safe_attribute(obj, name, value):
1410
+ return environment.unsafe_undefined(obj, name)
1411
+
1412
+ return value
1413
+
1414
+ return environment.undefined(obj=obj, name=name)
1415
+
1416
+
1417
+ @typing.overload
1418
+ def sync_do_map(
1419
+ context: "Context", value: t.Iterable, name: str, *args: t.Any, **kwargs: t.Any
1420
+ ) -> t.Iterable:
1421
+ ...
1422
+
1423
+
1424
+ @typing.overload
1425
+ def sync_do_map(
1426
+ context: "Context",
1427
+ value: t.Iterable,
1428
+ *,
1429
+ attribute: str = ...,
1430
+ default: t.Optional[t.Any] = None,
1431
+ ) -> t.Iterable:
1432
+ ...
1433
+
1434
+
1435
+ @pass_context
1436
+ def sync_do_map(
1437
+ context: "Context", value: t.Iterable, *args: t.Any, **kwargs: t.Any
1438
+ ) -> t.Iterable:
1439
+ """Applies a filter on a sequence of objects or looks up an attribute.
1440
+ This is useful when dealing with lists of objects but you are really
1441
+ only interested in a certain value of it.
1442
+
1443
+ The basic usage is mapping on an attribute. Imagine you have a list
1444
+ of users but you are only interested in a list of usernames:
1445
+
1446
+ .. sourcecode:: jinja
1447
+
1448
+ Users on this page: {{ users|map(attribute='username')|join(', ') }}
1449
+
1450
+ You can specify a ``default`` value to use if an object in the list
1451
+ does not have the given attribute.
1452
+
1453
+ .. sourcecode:: jinja
1454
+
1455
+ {{ users|map(attribute="username", default="Anonymous")|join(", ") }}
1456
+
1457
+ Alternatively you can let it invoke a filter by passing the name of the
1458
+ filter and the arguments afterwards. A good example would be applying a
1459
+ text conversion filter on a sequence:
1460
+
1461
+ .. sourcecode:: jinja
1462
+
1463
+ Users on this page: {{ titles|map('lower')|join(', ') }}
1464
+
1465
+ Similar to a generator comprehension such as:
1466
+
1467
+ .. code-block:: python
1468
+
1469
+ (u.username for u in users)
1470
+ (getattr(u, "username", "Anonymous") for u in users)
1471
+ (do_lower(x) for x in titles)
1472
+
1473
+ .. versionchanged:: 2.11.0
1474
+ Added the ``default`` parameter.
1475
+
1476
+ .. versionadded:: 2.7
1477
+ """
1478
+ if value:
1479
+ func = prepare_map(context, args, kwargs)
1480
+
1481
+ for item in value:
1482
+ yield func(item)
1483
+
1484
+
1485
+ @typing.overload
1486
+ def do_map(
1487
+ context: "Context",
1488
+ value: t.Union[t.AsyncIterable, t.Iterable],
1489
+ name: str,
1490
+ *args: t.Any,
1491
+ **kwargs: t.Any,
1492
+ ) -> t.Iterable:
1493
+ ...
1494
+
1495
+
1496
+ @typing.overload
1497
+ def do_map(
1498
+ context: "Context",
1499
+ value: t.Union[t.AsyncIterable, t.Iterable],
1500
+ *,
1501
+ attribute: str = ...,
1502
+ default: t.Optional[t.Any] = None,
1503
+ ) -> t.Iterable:
1504
+ ...
1505
+
1506
+
1507
+ @async_variant(sync_do_map) # type: ignore
1508
+ async def do_map(
1509
+ context: "Context",
1510
+ value: t.Union[t.AsyncIterable, t.Iterable],
1511
+ *args: t.Any,
1512
+ **kwargs: t.Any,
1513
+ ) -> t.AsyncIterable:
1514
+ if value:
1515
+ func = prepare_map(context, args, kwargs)
1516
+
1517
+ async for item in auto_aiter(value):
1518
+ yield await auto_await(func(item))
1519
+
1520
+
1521
+ @pass_context
1522
+ def sync_do_select(
1523
+ context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
1524
+ ) -> "t.Iterator[V]":
1525
+ """Filters a sequence of objects by applying a test to each object,
1526
+ and only selecting the objects with the test succeeding.
1527
+
1528
+ If no test is specified, each object will be evaluated as a boolean.
1529
+
1530
+ Example usage:
1531
+
1532
+ .. sourcecode:: jinja
1533
+
1534
+ {{ numbers|select("odd") }}
1535
+ {{ numbers|select("odd") }}
1536
+ {{ numbers|select("divisibleby", 3) }}
1537
+ {{ numbers|select("lessthan", 42) }}
1538
+ {{ strings|select("equalto", "mystring") }}
1539
+
1540
+ Similar to a generator comprehension such as:
1541
+
1542
+ .. code-block:: python
1543
+
1544
+ (n for n in numbers if test_odd(n))
1545
+ (n for n in numbers if test_divisibleby(n, 3))
1546
+
1547
+ .. versionadded:: 2.7
1548
+ """
1549
+ return select_or_reject(context, value, args, kwargs, lambda x: x, False)
1550
+
1551
+
1552
+ @async_variant(sync_do_select) # type: ignore
1553
+ async def do_select(
1554
+ context: "Context",
1555
+ value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1556
+ *args: t.Any,
1557
+ **kwargs: t.Any,
1558
+ ) -> "t.AsyncIterator[V]":
1559
+ return async_select_or_reject(context, value, args, kwargs, lambda x: x, False)
1560
+
1561
+
1562
+ @pass_context
1563
+ def sync_do_reject(
1564
+ context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
1565
+ ) -> "t.Iterator[V]":
1566
+ """Filters a sequence of objects by applying a test to each object,
1567
+ and rejecting the objects with the test succeeding.
1568
+
1569
+ If no test is specified, each object will be evaluated as a boolean.
1570
+
1571
+ Example usage:
1572
+
1573
+ .. sourcecode:: jinja
1574
+
1575
+ {{ numbers|reject("odd") }}
1576
+
1577
+ Similar to a generator comprehension such as:
1578
+
1579
+ .. code-block:: python
1580
+
1581
+ (n for n in numbers if not test_odd(n))
1582
+
1583
+ .. versionadded:: 2.7
1584
+ """
1585
+ return select_or_reject(context, value, args, kwargs, lambda x: not x, False)
1586
+
1587
+
1588
+ @async_variant(sync_do_reject) # type: ignore
1589
+ async def do_reject(
1590
+ context: "Context",
1591
+ value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1592
+ *args: t.Any,
1593
+ **kwargs: t.Any,
1594
+ ) -> "t.AsyncIterator[V]":
1595
+ return async_select_or_reject(context, value, args, kwargs, lambda x: not x, False)
1596
+
1597
+
1598
+ @pass_context
1599
+ def sync_do_selectattr(
1600
+ context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
1601
+ ) -> "t.Iterator[V]":
1602
+ """Filters a sequence of objects by applying a test to the specified
1603
+ attribute of each object, and only selecting the objects with the
1604
+ test succeeding.
1605
+
1606
+ If no test is specified, the attribute's value will be evaluated as
1607
+ a boolean.
1608
+
1609
+ Example usage:
1610
+
1611
+ .. sourcecode:: jinja
1612
+
1613
+ {{ users|selectattr("is_active") }}
1614
+ {{ users|selectattr("email", "none") }}
1615
+
1616
+ Similar to a generator comprehension such as:
1617
+
1618
+ .. code-block:: python
1619
+
1620
+ (u for user in users if user.is_active)
1621
+ (u for user in users if test_none(user.email))
1622
+
1623
+ .. versionadded:: 2.7
1624
+ """
1625
+ return select_or_reject(context, value, args, kwargs, lambda x: x, True)
1626
+
1627
+
1628
+ @async_variant(sync_do_selectattr) # type: ignore
1629
+ async def do_selectattr(
1630
+ context: "Context",
1631
+ value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1632
+ *args: t.Any,
1633
+ **kwargs: t.Any,
1634
+ ) -> "t.AsyncIterator[V]":
1635
+ return async_select_or_reject(context, value, args, kwargs, lambda x: x, True)
1636
+
1637
+
1638
+ @pass_context
1639
+ def sync_do_rejectattr(
1640
+ context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
1641
+ ) -> "t.Iterator[V]":
1642
+ """Filters a sequence of objects by applying a test to the specified
1643
+ attribute of each object, and rejecting the objects with the test
1644
+ succeeding.
1645
+
1646
+ If no test is specified, the attribute's value will be evaluated as
1647
+ a boolean.
1648
+
1649
+ .. sourcecode:: jinja
1650
+
1651
+ {{ users|rejectattr("is_active") }}
1652
+ {{ users|rejectattr("email", "none") }}
1653
+
1654
+ Similar to a generator comprehension such as:
1655
+
1656
+ .. code-block:: python
1657
+
1658
+ (u for user in users if not user.is_active)
1659
+ (u for user in users if not test_none(user.email))
1660
+
1661
+ .. versionadded:: 2.7
1662
+ """
1663
+ return select_or_reject(context, value, args, kwargs, lambda x: not x, True)
1664
+
1665
+
1666
+ @async_variant(sync_do_rejectattr) # type: ignore
1667
+ async def do_rejectattr(
1668
+ context: "Context",
1669
+ value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1670
+ *args: t.Any,
1671
+ **kwargs: t.Any,
1672
+ ) -> "t.AsyncIterator[V]":
1673
+ return async_select_or_reject(context, value, args, kwargs, lambda x: not x, True)
1674
+
1675
+
1676
+ @pass_eval_context
1677
+ def do_tojson(
1678
+ eval_ctx: "EvalContext", value: t.Any, indent: t.Optional[int] = None
1679
+ ) -> Markup:
1680
+ """Serialize an object to a string of JSON, and mark it safe to
1681
+ render in HTML. This filter is only for use in HTML documents.
1682
+
1683
+ The returned string is safe to render in HTML documents and
1684
+ ``<script>`` tags. The exception is in HTML attributes that are
1685
+ double quoted; either use single quotes or the ``|forceescape``
1686
+ filter.
1687
+
1688
+ :param value: The object to serialize to JSON.
1689
+ :param indent: The ``indent`` parameter passed to ``dumps``, for
1690
+ pretty-printing the value.
1691
+
1692
+ .. versionadded:: 2.9
1693
+ """
1694
+ policies = eval_ctx.environment.policies
1695
+ dumps = policies["json.dumps_function"]
1696
+ kwargs = policies["json.dumps_kwargs"]
1697
+
1698
+ if indent is not None:
1699
+ kwargs = kwargs.copy()
1700
+ kwargs["indent"] = indent
1701
+
1702
+ return htmlsafe_json_dumps(value, dumps=dumps, **kwargs)
1703
+
1704
+
1705
+ def prepare_map(
1706
+ context: "Context", args: t.Tuple, kwargs: t.Dict[str, t.Any]
1707
+ ) -> t.Callable[[t.Any], t.Any]:
1708
+ if not args and "attribute" in kwargs:
1709
+ attribute = kwargs.pop("attribute")
1710
+ default = kwargs.pop("default", None)
1711
+
1712
+ if kwargs:
1713
+ raise FilterArgumentError(
1714
+ f"Unexpected keyword argument {next(iter(kwargs))!r}"
1715
+ )
1716
+
1717
+ func = make_attrgetter(context.environment, attribute, default=default)
1718
+ else:
1719
+ try:
1720
+ name = args[0]
1721
+ args = args[1:]
1722
+ except LookupError:
1723
+ raise FilterArgumentError("map requires a filter argument") from None
1724
+
1725
+ def func(item: t.Any) -> t.Any:
1726
+ return context.environment.call_filter(
1727
+ name, item, args, kwargs, context=context
1728
+ )
1729
+
1730
+ return func
1731
+
1732
+
1733
+ def prepare_select_or_reject(
1734
+ context: "Context",
1735
+ args: t.Tuple,
1736
+ kwargs: t.Dict[str, t.Any],
1737
+ modfunc: t.Callable[[t.Any], t.Any],
1738
+ lookup_attr: bool,
1739
+ ) -> t.Callable[[t.Any], t.Any]:
1740
+ if lookup_attr:
1741
+ try:
1742
+ attr = args[0]
1743
+ except LookupError:
1744
+ raise FilterArgumentError("Missing parameter for attribute name") from None
1745
+
1746
+ transfunc = make_attrgetter(context.environment, attr)
1747
+ off = 1
1748
+ else:
1749
+ off = 0
1750
+
1751
+ def transfunc(x: V) -> V:
1752
+ return x
1753
+
1754
+ try:
1755
+ name = args[off]
1756
+ args = args[1 + off :]
1757
+
1758
+ def func(item: t.Any) -> t.Any:
1759
+ return context.environment.call_test(name, item, args, kwargs)
1760
+
1761
+ except LookupError:
1762
+ func = bool # type: ignore
1763
+
1764
+ return lambda item: modfunc(func(transfunc(item)))
1765
+
1766
+
1767
+ def select_or_reject(
1768
+ context: "Context",
1769
+ value: "t.Iterable[V]",
1770
+ args: t.Tuple,
1771
+ kwargs: t.Dict[str, t.Any],
1772
+ modfunc: t.Callable[[t.Any], t.Any],
1773
+ lookup_attr: bool,
1774
+ ) -> "t.Iterator[V]":
1775
+ if value:
1776
+ func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr)
1777
+
1778
+ for item in value:
1779
+ if func(item):
1780
+ yield item
1781
+
1782
+
1783
+ async def async_select_or_reject(
1784
+ context: "Context",
1785
+ value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
1786
+ args: t.Tuple,
1787
+ kwargs: t.Dict[str, t.Any],
1788
+ modfunc: t.Callable[[t.Any], t.Any],
1789
+ lookup_attr: bool,
1790
+ ) -> "t.AsyncIterator[V]":
1791
+ if value:
1792
+ func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr)
1793
+
1794
+ async for item in auto_aiter(value):
1795
+ if func(item):
1796
+ yield item
1797
+
1798
+
1799
+ FILTERS = {
1800
+ "abs": abs,
1801
+ "attr": do_attr,
1802
+ "batch": do_batch,
1803
+ "capitalize": do_capitalize,
1804
+ "center": do_center,
1805
+ "count": len,
1806
+ "d": do_default,
1807
+ "default": do_default,
1808
+ "dictsort": do_dictsort,
1809
+ "e": escape,
1810
+ "escape": escape,
1811
+ "filesizeformat": do_filesizeformat,
1812
+ "first": do_first,
1813
+ "float": do_float,
1814
+ "forceescape": do_forceescape,
1815
+ "format": do_format,
1816
+ "groupby": do_groupby,
1817
+ "indent": do_indent,
1818
+ "int": do_int,
1819
+ "join": do_join,
1820
+ "last": do_last,
1821
+ "length": len,
1822
+ "list": do_list,
1823
+ "lower": do_lower,
1824
+ "items": do_items,
1825
+ "map": do_map,
1826
+ "min": do_min,
1827
+ "max": do_max,
1828
+ "pprint": do_pprint,
1829
+ "random": do_random,
1830
+ "reject": do_reject,
1831
+ "rejectattr": do_rejectattr,
1832
+ "replace": do_replace,
1833
+ "reverse": do_reverse,
1834
+ "round": do_round,
1835
+ "safe": do_mark_safe,
1836
+ "select": do_select,
1837
+ "selectattr": do_selectattr,
1838
+ "slice": do_slice,
1839
+ "sort": do_sort,
1840
+ "string": soft_str,
1841
+ "striptags": do_striptags,
1842
+ "sum": do_sum,
1843
+ "title": do_title,
1844
+ "trim": do_trim,
1845
+ "truncate": do_truncate,
1846
+ "unique": do_unique,
1847
+ "upper": do_upper,
1848
+ "urlencode": do_urlencode,
1849
+ "urlize": do_urlize,
1850
+ "wordcount": do_wordcount,
1851
+ "wordwrap": do_wordwrap,
1852
+ "xmlattr": do_xmlattr,
1853
+ "tojson": do_tojson,
1854
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/idtracking.py ADDED
@@ -0,0 +1,318 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import typing as t
2
+
3
+ from . import nodes
4
+ from .visitor import NodeVisitor
5
+
6
+ VAR_LOAD_PARAMETER = "param"
7
+ VAR_LOAD_RESOLVE = "resolve"
8
+ VAR_LOAD_ALIAS = "alias"
9
+ VAR_LOAD_UNDEFINED = "undefined"
10
+
11
+
12
+ def find_symbols(
13
+ nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None
14
+ ) -> "Symbols":
15
+ sym = Symbols(parent=parent_symbols)
16
+ visitor = FrameSymbolVisitor(sym)
17
+ for node in nodes:
18
+ visitor.visit(node)
19
+ return sym
20
+
21
+
22
+ def symbols_for_node(
23
+ node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None
24
+ ) -> "Symbols":
25
+ sym = Symbols(parent=parent_symbols)
26
+ sym.analyze_node(node)
27
+ return sym
28
+
29
+
30
+ class Symbols:
31
+ def __init__(
32
+ self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None
33
+ ) -> None:
34
+ if level is None:
35
+ if parent is None:
36
+ level = 0
37
+ else:
38
+ level = parent.level + 1
39
+
40
+ self.level: int = level
41
+ self.parent = parent
42
+ self.refs: t.Dict[str, str] = {}
43
+ self.loads: t.Dict[str, t.Any] = {}
44
+ self.stores: t.Set[str] = set()
45
+
46
+ def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None:
47
+ visitor = RootVisitor(self)
48
+ visitor.visit(node, **kwargs)
49
+
50
+ def _define_ref(
51
+ self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None
52
+ ) -> str:
53
+ ident = f"l_{self.level}_{name}"
54
+ self.refs[name] = ident
55
+ if load is not None:
56
+ self.loads[ident] = load
57
+ return ident
58
+
59
+ def find_load(self, target: str) -> t.Optional[t.Any]:
60
+ if target in self.loads:
61
+ return self.loads[target]
62
+
63
+ if self.parent is not None:
64
+ return self.parent.find_load(target)
65
+
66
+ return None
67
+
68
+ def find_ref(self, name: str) -> t.Optional[str]:
69
+ if name in self.refs:
70
+ return self.refs[name]
71
+
72
+ if self.parent is not None:
73
+ return self.parent.find_ref(name)
74
+
75
+ return None
76
+
77
+ def ref(self, name: str) -> str:
78
+ rv = self.find_ref(name)
79
+ if rv is None:
80
+ raise AssertionError(
81
+ "Tried to resolve a name to a reference that was"
82
+ f" unknown to the frame ({name!r})"
83
+ )
84
+ return rv
85
+
86
+ def copy(self) -> "Symbols":
87
+ rv = object.__new__(self.__class__)
88
+ rv.__dict__.update(self.__dict__)
89
+ rv.refs = self.refs.copy()
90
+ rv.loads = self.loads.copy()
91
+ rv.stores = self.stores.copy()
92
+ return rv
93
+
94
+ def store(self, name: str) -> None:
95
+ self.stores.add(name)
96
+
97
+ # If we have not see the name referenced yet, we need to figure
98
+ # out what to set it to.
99
+ if name not in self.refs:
100
+ # If there is a parent scope we check if the name has a
101
+ # reference there. If it does it means we might have to alias
102
+ # to a variable there.
103
+ if self.parent is not None:
104
+ outer_ref = self.parent.find_ref(name)
105
+ if outer_ref is not None:
106
+ self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
107
+ return
108
+
109
+ # Otherwise we can just set it to undefined.
110
+ self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
111
+
112
+ def declare_parameter(self, name: str) -> str:
113
+ self.stores.add(name)
114
+ return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
115
+
116
+ def load(self, name: str) -> None:
117
+ if self.find_ref(name) is None:
118
+ self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
119
+
120
+ def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None:
121
+ stores: t.Dict[str, int] = {}
122
+ for branch in branch_symbols:
123
+ for target in branch.stores:
124
+ if target in self.stores:
125
+ continue
126
+ stores[target] = stores.get(target, 0) + 1
127
+
128
+ for sym in branch_symbols:
129
+ self.refs.update(sym.refs)
130
+ self.loads.update(sym.loads)
131
+ self.stores.update(sym.stores)
132
+
133
+ for name, branch_count in stores.items():
134
+ if branch_count == len(branch_symbols):
135
+ continue
136
+
137
+ target = self.find_ref(name) # type: ignore
138
+ assert target is not None, "should not happen"
139
+
140
+ if self.parent is not None:
141
+ outer_target = self.parent.find_ref(name)
142
+ if outer_target is not None:
143
+ self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
144
+ continue
145
+ self.loads[target] = (VAR_LOAD_RESOLVE, name)
146
+
147
+ def dump_stores(self) -> t.Dict[str, str]:
148
+ rv: t.Dict[str, str] = {}
149
+ node: t.Optional["Symbols"] = self
150
+
151
+ while node is not None:
152
+ for name in sorted(node.stores):
153
+ if name not in rv:
154
+ rv[name] = self.find_ref(name) # type: ignore
155
+
156
+ node = node.parent
157
+
158
+ return rv
159
+
160
+ def dump_param_targets(self) -> t.Set[str]:
161
+ rv = set()
162
+ node: t.Optional["Symbols"] = self
163
+
164
+ while node is not None:
165
+ for target, (instr, _) in self.loads.items():
166
+ if instr == VAR_LOAD_PARAMETER:
167
+ rv.add(target)
168
+
169
+ node = node.parent
170
+
171
+ return rv
172
+
173
+
174
+ class RootVisitor(NodeVisitor):
175
+ def __init__(self, symbols: "Symbols") -> None:
176
+ self.sym_visitor = FrameSymbolVisitor(symbols)
177
+
178
+ def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None:
179
+ for child in node.iter_child_nodes():
180
+ self.sym_visitor.visit(child)
181
+
182
+ visit_Template = _simple_visit
183
+ visit_Block = _simple_visit
184
+ visit_Macro = _simple_visit
185
+ visit_FilterBlock = _simple_visit
186
+ visit_Scope = _simple_visit
187
+ visit_If = _simple_visit
188
+ visit_ScopedEvalContextModifier = _simple_visit
189
+
190
+ def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
191
+ for child in node.body:
192
+ self.sym_visitor.visit(child)
193
+
194
+ def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
195
+ for child in node.iter_child_nodes(exclude=("call",)):
196
+ self.sym_visitor.visit(child)
197
+
198
+ def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
199
+ for child in node.body:
200
+ self.sym_visitor.visit(child)
201
+
202
+ def visit_For(
203
+ self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any
204
+ ) -> None:
205
+ if for_branch == "body":
206
+ self.sym_visitor.visit(node.target, store_as_param=True)
207
+ branch = node.body
208
+ elif for_branch == "else":
209
+ branch = node.else_
210
+ elif for_branch == "test":
211
+ self.sym_visitor.visit(node.target, store_as_param=True)
212
+ if node.test is not None:
213
+ self.sym_visitor.visit(node.test)
214
+ return
215
+ else:
216
+ raise RuntimeError("Unknown for branch")
217
+
218
+ if branch:
219
+ for item in branch:
220
+ self.sym_visitor.visit(item)
221
+
222
+ def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
223
+ for target in node.targets:
224
+ self.sym_visitor.visit(target)
225
+ for child in node.body:
226
+ self.sym_visitor.visit(child)
227
+
228
+ def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None:
229
+ raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}")
230
+
231
+
232
+ class FrameSymbolVisitor(NodeVisitor):
233
+ """A visitor for `Frame.inspect`."""
234
+
235
+ def __init__(self, symbols: "Symbols") -> None:
236
+ self.symbols = symbols
237
+
238
+ def visit_Name(
239
+ self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any
240
+ ) -> None:
241
+ """All assignments to names go through this function."""
242
+ if store_as_param or node.ctx == "param":
243
+ self.symbols.declare_parameter(node.name)
244
+ elif node.ctx == "store":
245
+ self.symbols.store(node.name)
246
+ elif node.ctx == "load":
247
+ self.symbols.load(node.name)
248
+
249
+ def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None:
250
+ self.symbols.load(node.name)
251
+
252
+ def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None:
253
+ self.visit(node.test, **kwargs)
254
+ original_symbols = self.symbols
255
+
256
+ def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols":
257
+ self.symbols = rv = original_symbols.copy()
258
+
259
+ for subnode in nodes:
260
+ self.visit(subnode, **kwargs)
261
+
262
+ self.symbols = original_symbols
263
+ return rv
264
+
265
+ body_symbols = inner_visit(node.body)
266
+ elif_symbols = inner_visit(node.elif_)
267
+ else_symbols = inner_visit(node.else_ or ())
268
+ self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
269
+
270
+ def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None:
271
+ self.symbols.store(node.name)
272
+
273
+ def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None:
274
+ self.generic_visit(node, **kwargs)
275
+ self.symbols.store(node.target)
276
+
277
+ def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None:
278
+ self.generic_visit(node, **kwargs)
279
+
280
+ for name in node.names:
281
+ if isinstance(name, tuple):
282
+ self.symbols.store(name[1])
283
+ else:
284
+ self.symbols.store(name)
285
+
286
+ def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None:
287
+ """Visit assignments in the correct order."""
288
+ self.visit(node.node, **kwargs)
289
+ self.visit(node.target, **kwargs)
290
+
291
+ def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None:
292
+ """Visiting stops at for blocks. However the block sequence
293
+ is visited as part of the outer scope.
294
+ """
295
+ self.visit(node.iter, **kwargs)
296
+
297
+ def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
298
+ self.visit(node.call, **kwargs)
299
+
300
+ def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None:
301
+ self.visit(node.filter, **kwargs)
302
+
303
+ def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
304
+ for target in node.values:
305
+ self.visit(target)
306
+
307
+ def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
308
+ """Stop visiting at block assigns."""
309
+ self.visit(node.target, **kwargs)
310
+
311
+ def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None:
312
+ """Stop visiting at scopes."""
313
+
314
+ def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None:
315
+ """Stop visiting at blocks."""
316
+
317
+ def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
318
+ """Do not visit into overlay scopes."""
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/lexer.py ADDED
@@ -0,0 +1,866 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Implements a Jinja / Python combination lexer. The ``Lexer`` class
2
+ is used to do some preprocessing. It filters out invalid operators like
3
+ the bitshift operators we don't allow in templates. It separates
4
+ template code and python code in expressions.
5
+ """
6
+ import re
7
+ import typing as t
8
+ from ast import literal_eval
9
+ from collections import deque
10
+ from sys import intern
11
+
12
+ from ._identifier import pattern as name_re
13
+ from .exceptions import TemplateSyntaxError
14
+ from .utils import LRUCache
15
+
16
+ if t.TYPE_CHECKING:
17
+ import typing_extensions as te
18
+ from .environment import Environment
19
+
20
+ # cache for the lexers. Exists in order to be able to have multiple
21
+ # environments with the same lexer
22
+ _lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50) # type: ignore
23
+
24
+ # static regular expressions
25
+ whitespace_re = re.compile(r"\s+")
26
+ newline_re = re.compile(r"(\r\n|\r|\n)")
27
+ string_re = re.compile(
28
+ r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
29
+ )
30
+ integer_re = re.compile(
31
+ r"""
32
+ (
33
+ 0b(_?[0-1])+ # binary
34
+ |
35
+ 0o(_?[0-7])+ # octal
36
+ |
37
+ 0x(_?[\da-f])+ # hex
38
+ |
39
+ [1-9](_?\d)* # decimal
40
+ |
41
+ 0(_?0)* # decimal zero
42
+ )
43
+ """,
44
+ re.IGNORECASE | re.VERBOSE,
45
+ )
46
+ float_re = re.compile(
47
+ r"""
48
+ (?<!\.) # doesn't start with a .
49
+ (\d+_)*\d+ # digits, possibly _ separated
50
+ (
51
+ (\.(\d+_)*\d+)? # optional fractional part
52
+ e[+\-]?(\d+_)*\d+ # exponent part
53
+ |
54
+ \.(\d+_)*\d+ # required fractional part
55
+ )
56
+ """,
57
+ re.IGNORECASE | re.VERBOSE,
58
+ )
59
+
60
+ # internal the tokens and keep references to them
61
+ TOKEN_ADD = intern("add")
62
+ TOKEN_ASSIGN = intern("assign")
63
+ TOKEN_COLON = intern("colon")
64
+ TOKEN_COMMA = intern("comma")
65
+ TOKEN_DIV = intern("div")
66
+ TOKEN_DOT = intern("dot")
67
+ TOKEN_EQ = intern("eq")
68
+ TOKEN_FLOORDIV = intern("floordiv")
69
+ TOKEN_GT = intern("gt")
70
+ TOKEN_GTEQ = intern("gteq")
71
+ TOKEN_LBRACE = intern("lbrace")
72
+ TOKEN_LBRACKET = intern("lbracket")
73
+ TOKEN_LPAREN = intern("lparen")
74
+ TOKEN_LT = intern("lt")
75
+ TOKEN_LTEQ = intern("lteq")
76
+ TOKEN_MOD = intern("mod")
77
+ TOKEN_MUL = intern("mul")
78
+ TOKEN_NE = intern("ne")
79
+ TOKEN_PIPE = intern("pipe")
80
+ TOKEN_POW = intern("pow")
81
+ TOKEN_RBRACE = intern("rbrace")
82
+ TOKEN_RBRACKET = intern("rbracket")
83
+ TOKEN_RPAREN = intern("rparen")
84
+ TOKEN_SEMICOLON = intern("semicolon")
85
+ TOKEN_SUB = intern("sub")
86
+ TOKEN_TILDE = intern("tilde")
87
+ TOKEN_WHITESPACE = intern("whitespace")
88
+ TOKEN_FLOAT = intern("float")
89
+ TOKEN_INTEGER = intern("integer")
90
+ TOKEN_NAME = intern("name")
91
+ TOKEN_STRING = intern("string")
92
+ TOKEN_OPERATOR = intern("operator")
93
+ TOKEN_BLOCK_BEGIN = intern("block_begin")
94
+ TOKEN_BLOCK_END = intern("block_end")
95
+ TOKEN_VARIABLE_BEGIN = intern("variable_begin")
96
+ TOKEN_VARIABLE_END = intern("variable_end")
97
+ TOKEN_RAW_BEGIN = intern("raw_begin")
98
+ TOKEN_RAW_END = intern("raw_end")
99
+ TOKEN_COMMENT_BEGIN = intern("comment_begin")
100
+ TOKEN_COMMENT_END = intern("comment_end")
101
+ TOKEN_COMMENT = intern("comment")
102
+ TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin")
103
+ TOKEN_LINESTATEMENT_END = intern("linestatement_end")
104
+ TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin")
105
+ TOKEN_LINECOMMENT_END = intern("linecomment_end")
106
+ TOKEN_LINECOMMENT = intern("linecomment")
107
+ TOKEN_DATA = intern("data")
108
+ TOKEN_INITIAL = intern("initial")
109
+ TOKEN_EOF = intern("eof")
110
+
111
+ # bind operators to token types
112
+ operators = {
113
+ "+": TOKEN_ADD,
114
+ "-": TOKEN_SUB,
115
+ "/": TOKEN_DIV,
116
+ "//": TOKEN_FLOORDIV,
117
+ "*": TOKEN_MUL,
118
+ "%": TOKEN_MOD,
119
+ "**": TOKEN_POW,
120
+ "~": TOKEN_TILDE,
121
+ "[": TOKEN_LBRACKET,
122
+ "]": TOKEN_RBRACKET,
123
+ "(": TOKEN_LPAREN,
124
+ ")": TOKEN_RPAREN,
125
+ "{": TOKEN_LBRACE,
126
+ "}": TOKEN_RBRACE,
127
+ "==": TOKEN_EQ,
128
+ "!=": TOKEN_NE,
129
+ ">": TOKEN_GT,
130
+ ">=": TOKEN_GTEQ,
131
+ "<": TOKEN_LT,
132
+ "<=": TOKEN_LTEQ,
133
+ "=": TOKEN_ASSIGN,
134
+ ".": TOKEN_DOT,
135
+ ":": TOKEN_COLON,
136
+ "|": TOKEN_PIPE,
137
+ ",": TOKEN_COMMA,
138
+ ";": TOKEN_SEMICOLON,
139
+ }
140
+
141
+ reverse_operators = {v: k for k, v in operators.items()}
142
+ assert len(operators) == len(reverse_operators), "operators dropped"
143
+ operator_re = re.compile(
144
+ f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})"
145
+ )
146
+
147
+ ignored_tokens = frozenset(
148
+ [
149
+ TOKEN_COMMENT_BEGIN,
150
+ TOKEN_COMMENT,
151
+ TOKEN_COMMENT_END,
152
+ TOKEN_WHITESPACE,
153
+ TOKEN_LINECOMMENT_BEGIN,
154
+ TOKEN_LINECOMMENT_END,
155
+ TOKEN_LINECOMMENT,
156
+ ]
157
+ )
158
+ ignore_if_empty = frozenset(
159
+ [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
160
+ )
161
+
162
+
163
+ def _describe_token_type(token_type: str) -> str:
164
+ if token_type in reverse_operators:
165
+ return reverse_operators[token_type]
166
+
167
+ return {
168
+ TOKEN_COMMENT_BEGIN: "begin of comment",
169
+ TOKEN_COMMENT_END: "end of comment",
170
+ TOKEN_COMMENT: "comment",
171
+ TOKEN_LINECOMMENT: "comment",
172
+ TOKEN_BLOCK_BEGIN: "begin of statement block",
173
+ TOKEN_BLOCK_END: "end of statement block",
174
+ TOKEN_VARIABLE_BEGIN: "begin of print statement",
175
+ TOKEN_VARIABLE_END: "end of print statement",
176
+ TOKEN_LINESTATEMENT_BEGIN: "begin of line statement",
177
+ TOKEN_LINESTATEMENT_END: "end of line statement",
178
+ TOKEN_DATA: "template data / text",
179
+ TOKEN_EOF: "end of template",
180
+ }.get(token_type, token_type)
181
+
182
+
183
+ def describe_token(token: "Token") -> str:
184
+ """Returns a description of the token."""
185
+ if token.type == TOKEN_NAME:
186
+ return token.value
187
+
188
+ return _describe_token_type(token.type)
189
+
190
+
191
+ def describe_token_expr(expr: str) -> str:
192
+ """Like `describe_token` but for token expressions."""
193
+ if ":" in expr:
194
+ type, value = expr.split(":", 1)
195
+
196
+ if type == TOKEN_NAME:
197
+ return value
198
+ else:
199
+ type = expr
200
+
201
+ return _describe_token_type(type)
202
+
203
+
204
+ def count_newlines(value: str) -> int:
205
+ """Count the number of newline characters in the string. This is
206
+ useful for extensions that filter a stream.
207
+ """
208
+ return len(newline_re.findall(value))
209
+
210
+
211
+ def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]:
212
+ """Compiles all the rules from the environment into a list of rules."""
213
+ e = re.escape
214
+ rules = [
215
+ (
216
+ len(environment.comment_start_string),
217
+ TOKEN_COMMENT_BEGIN,
218
+ e(environment.comment_start_string),
219
+ ),
220
+ (
221
+ len(environment.block_start_string),
222
+ TOKEN_BLOCK_BEGIN,
223
+ e(environment.block_start_string),
224
+ ),
225
+ (
226
+ len(environment.variable_start_string),
227
+ TOKEN_VARIABLE_BEGIN,
228
+ e(environment.variable_start_string),
229
+ ),
230
+ ]
231
+
232
+ if environment.line_statement_prefix is not None:
233
+ rules.append(
234
+ (
235
+ len(environment.line_statement_prefix),
236
+ TOKEN_LINESTATEMENT_BEGIN,
237
+ r"^[ \t\v]*" + e(environment.line_statement_prefix),
238
+ )
239
+ )
240
+ if environment.line_comment_prefix is not None:
241
+ rules.append(
242
+ (
243
+ len(environment.line_comment_prefix),
244
+ TOKEN_LINECOMMENT_BEGIN,
245
+ r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
246
+ )
247
+ )
248
+
249
+ return [x[1:] for x in sorted(rules, reverse=True)]
250
+
251
+
252
+ class Failure:
253
+ """Class that raises a `TemplateSyntaxError` if called.
254
+ Used by the `Lexer` to specify known errors.
255
+ """
256
+
257
+ def __init__(
258
+ self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError
259
+ ) -> None:
260
+ self.message = message
261
+ self.error_class = cls
262
+
263
+ def __call__(self, lineno: int, filename: str) -> "te.NoReturn":
264
+ raise self.error_class(self.message, lineno, filename)
265
+
266
+
267
+ class Token(t.NamedTuple):
268
+ lineno: int
269
+ type: str
270
+ value: str
271
+
272
+ def __str__(self) -> str:
273
+ return describe_token(self)
274
+
275
+ def test(self, expr: str) -> bool:
276
+ """Test a token against a token expression. This can either be a
277
+ token type or ``'token_type:token_value'``. This can only test
278
+ against string values and types.
279
+ """
280
+ # here we do a regular string equality check as test_any is usually
281
+ # passed an iterable of not interned strings.
282
+ if self.type == expr:
283
+ return True
284
+
285
+ if ":" in expr:
286
+ return expr.split(":", 1) == [self.type, self.value]
287
+
288
+ return False
289
+
290
+ def test_any(self, *iterable: str) -> bool:
291
+ """Test against multiple token expressions."""
292
+ return any(self.test(expr) for expr in iterable)
293
+
294
+
295
+ class TokenStreamIterator:
296
+ """The iterator for tokenstreams. Iterate over the stream
297
+ until the eof token is reached.
298
+ """
299
+
300
+ def __init__(self, stream: "TokenStream") -> None:
301
+ self.stream = stream
302
+
303
+ def __iter__(self) -> "TokenStreamIterator":
304
+ return self
305
+
306
+ def __next__(self) -> Token:
307
+ token = self.stream.current
308
+
309
+ if token.type is TOKEN_EOF:
310
+ self.stream.close()
311
+ raise StopIteration
312
+
313
+ next(self.stream)
314
+ return token
315
+
316
+
317
+ class TokenStream:
318
+ """A token stream is an iterable that yields :class:`Token`\\s. The
319
+ parser however does not iterate over it but calls :meth:`next` to go
320
+ one token ahead. The current active token is stored as :attr:`current`.
321
+ """
322
+
323
+ def __init__(
324
+ self,
325
+ generator: t.Iterable[Token],
326
+ name: t.Optional[str],
327
+ filename: t.Optional[str],
328
+ ):
329
+ self._iter = iter(generator)
330
+ self._pushed: "te.Deque[Token]" = deque()
331
+ self.name = name
332
+ self.filename = filename
333
+ self.closed = False
334
+ self.current = Token(1, TOKEN_INITIAL, "")
335
+ next(self)
336
+
337
+ def __iter__(self) -> TokenStreamIterator:
338
+ return TokenStreamIterator(self)
339
+
340
+ def __bool__(self) -> bool:
341
+ return bool(self._pushed) or self.current.type is not TOKEN_EOF
342
+
343
+ @property
344
+ def eos(self) -> bool:
345
+ """Are we at the end of the stream?"""
346
+ return not self
347
+
348
+ def push(self, token: Token) -> None:
349
+ """Push a token back to the stream."""
350
+ self._pushed.append(token)
351
+
352
+ def look(self) -> Token:
353
+ """Look at the next token."""
354
+ old_token = next(self)
355
+ result = self.current
356
+ self.push(result)
357
+ self.current = old_token
358
+ return result
359
+
360
+ def skip(self, n: int = 1) -> None:
361
+ """Got n tokens ahead."""
362
+ for _ in range(n):
363
+ next(self)
364
+
365
+ def next_if(self, expr: str) -> t.Optional[Token]:
366
+ """Perform the token test and return the token if it matched.
367
+ Otherwise the return value is `None`.
368
+ """
369
+ if self.current.test(expr):
370
+ return next(self)
371
+
372
+ return None
373
+
374
+ def skip_if(self, expr: str) -> bool:
375
+ """Like :meth:`next_if` but only returns `True` or `False`."""
376
+ return self.next_if(expr) is not None
377
+
378
+ def __next__(self) -> Token:
379
+ """Go one token ahead and return the old one.
380
+
381
+ Use the built-in :func:`next` instead of calling this directly.
382
+ """
383
+ rv = self.current
384
+
385
+ if self._pushed:
386
+ self.current = self._pushed.popleft()
387
+ elif self.current.type is not TOKEN_EOF:
388
+ try:
389
+ self.current = next(self._iter)
390
+ except StopIteration:
391
+ self.close()
392
+
393
+ return rv
394
+
395
+ def close(self) -> None:
396
+ """Close the stream."""
397
+ self.current = Token(self.current.lineno, TOKEN_EOF, "")
398
+ self._iter = iter(())
399
+ self.closed = True
400
+
401
+ def expect(self, expr: str) -> Token:
402
+ """Expect a given token type and return it. This accepts the same
403
+ argument as :meth:`jinja2.lexer.Token.test`.
404
+ """
405
+ if not self.current.test(expr):
406
+ expr = describe_token_expr(expr)
407
+
408
+ if self.current.type is TOKEN_EOF:
409
+ raise TemplateSyntaxError(
410
+ f"unexpected end of template, expected {expr!r}.",
411
+ self.current.lineno,
412
+ self.name,
413
+ self.filename,
414
+ )
415
+
416
+ raise TemplateSyntaxError(
417
+ f"expected token {expr!r}, got {describe_token(self.current)!r}",
418
+ self.current.lineno,
419
+ self.name,
420
+ self.filename,
421
+ )
422
+
423
+ return next(self)
424
+
425
+
426
+ def get_lexer(environment: "Environment") -> "Lexer":
427
+ """Return a lexer which is probably cached."""
428
+ key = (
429
+ environment.block_start_string,
430
+ environment.block_end_string,
431
+ environment.variable_start_string,
432
+ environment.variable_end_string,
433
+ environment.comment_start_string,
434
+ environment.comment_end_string,
435
+ environment.line_statement_prefix,
436
+ environment.line_comment_prefix,
437
+ environment.trim_blocks,
438
+ environment.lstrip_blocks,
439
+ environment.newline_sequence,
440
+ environment.keep_trailing_newline,
441
+ )
442
+ lexer = _lexer_cache.get(key)
443
+
444
+ if lexer is None:
445
+ _lexer_cache[key] = lexer = Lexer(environment)
446
+
447
+ return lexer
448
+
449
+
450
+ class OptionalLStrip(tuple):
451
+ """A special tuple for marking a point in the state that can have
452
+ lstrip applied.
453
+ """
454
+
455
+ __slots__ = ()
456
+
457
+ # Even though it looks like a no-op, creating instances fails
458
+ # without this.
459
+ def __new__(cls, *members, **kwargs): # type: ignore
460
+ return super().__new__(cls, members)
461
+
462
+
463
+ class _Rule(t.NamedTuple):
464
+ pattern: t.Pattern[str]
465
+ tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]]
466
+ command: t.Optional[str]
467
+
468
+
469
+ class Lexer:
470
+ """Class that implements a lexer for a given environment. Automatically
471
+ created by the environment class, usually you don't have to do that.
472
+
473
+ Note that the lexer is not automatically bound to an environment.
474
+ Multiple environments can share the same lexer.
475
+ """
476
+
477
+ def __init__(self, environment: "Environment") -> None:
478
+ # shortcuts
479
+ e = re.escape
480
+
481
+ def c(x: str) -> t.Pattern[str]:
482
+ return re.compile(x, re.M | re.S)
483
+
484
+ # lexing rules for tags
485
+ tag_rules: t.List[_Rule] = [
486
+ _Rule(whitespace_re, TOKEN_WHITESPACE, None),
487
+ _Rule(float_re, TOKEN_FLOAT, None),
488
+ _Rule(integer_re, TOKEN_INTEGER, None),
489
+ _Rule(name_re, TOKEN_NAME, None),
490
+ _Rule(string_re, TOKEN_STRING, None),
491
+ _Rule(operator_re, TOKEN_OPERATOR, None),
492
+ ]
493
+
494
+ # assemble the root lexing rule. because "|" is ungreedy
495
+ # we have to sort by length so that the lexer continues working
496
+ # as expected when we have parsing rules like <% for block and
497
+ # <%= for variables. (if someone wants asp like syntax)
498
+ # variables are just part of the rules if variable processing
499
+ # is required.
500
+ root_tag_rules = compile_rules(environment)
501
+
502
+ block_start_re = e(environment.block_start_string)
503
+ block_end_re = e(environment.block_end_string)
504
+ comment_end_re = e(environment.comment_end_string)
505
+ variable_end_re = e(environment.variable_end_string)
506
+
507
+ # block suffix if trimming is enabled
508
+ block_suffix_re = "\\n?" if environment.trim_blocks else ""
509
+
510
+ self.lstrip_blocks = environment.lstrip_blocks
511
+
512
+ self.newline_sequence = environment.newline_sequence
513
+ self.keep_trailing_newline = environment.keep_trailing_newline
514
+
515
+ root_raw_re = (
516
+ rf"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*"
517
+ rf"(?:\-{block_end_re}\s*|{block_end_re}))"
518
+ )
519
+ root_parts_re = "|".join(
520
+ [root_raw_re] + [rf"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules]
521
+ )
522
+
523
+ # global lexing rules
524
+ self.rules: t.Dict[str, t.List[_Rule]] = {
525
+ "root": [
526
+ # directives
527
+ _Rule(
528
+ c(rf"(.*?)(?:{root_parts_re})"),
529
+ OptionalLStrip(TOKEN_DATA, "#bygroup"), # type: ignore
530
+ "#bygroup",
531
+ ),
532
+ # data
533
+ _Rule(c(".+"), TOKEN_DATA, None),
534
+ ],
535
+ # comments
536
+ TOKEN_COMMENT_BEGIN: [
537
+ _Rule(
538
+ c(
539
+ rf"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*"
540
+ rf"|{comment_end_re}{block_suffix_re}))"
541
+ ),
542
+ (TOKEN_COMMENT, TOKEN_COMMENT_END),
543
+ "#pop",
544
+ ),
545
+ _Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None),
546
+ ],
547
+ # blocks
548
+ TOKEN_BLOCK_BEGIN: [
549
+ _Rule(
550
+ c(
551
+ rf"(?:\+{block_end_re}|\-{block_end_re}\s*"
552
+ rf"|{block_end_re}{block_suffix_re})"
553
+ ),
554
+ TOKEN_BLOCK_END,
555
+ "#pop",
556
+ ),
557
+ ]
558
+ + tag_rules,
559
+ # variables
560
+ TOKEN_VARIABLE_BEGIN: [
561
+ _Rule(
562
+ c(rf"\-{variable_end_re}\s*|{variable_end_re}"),
563
+ TOKEN_VARIABLE_END,
564
+ "#pop",
565
+ )
566
+ ]
567
+ + tag_rules,
568
+ # raw block
569
+ TOKEN_RAW_BEGIN: [
570
+ _Rule(
571
+ c(
572
+ rf"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*"
573
+ rf"(?:\+{block_end_re}|\-{block_end_re}\s*"
574
+ rf"|{block_end_re}{block_suffix_re}))"
575
+ ),
576
+ OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), # type: ignore
577
+ "#pop",
578
+ ),
579
+ _Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None),
580
+ ],
581
+ # line statements
582
+ TOKEN_LINESTATEMENT_BEGIN: [
583
+ _Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
584
+ ]
585
+ + tag_rules,
586
+ # line comments
587
+ TOKEN_LINECOMMENT_BEGIN: [
588
+ _Rule(
589
+ c(r"(.*?)()(?=\n|$)"),
590
+ (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
591
+ "#pop",
592
+ )
593
+ ],
594
+ }
595
+
596
+ def _normalize_newlines(self, value: str) -> str:
597
+ """Replace all newlines with the configured sequence in strings
598
+ and template data.
599
+ """
600
+ return newline_re.sub(self.newline_sequence, value)
601
+
602
+ def tokenize(
603
+ self,
604
+ source: str,
605
+ name: t.Optional[str] = None,
606
+ filename: t.Optional[str] = None,
607
+ state: t.Optional[str] = None,
608
+ ) -> TokenStream:
609
+ """Calls tokeniter + tokenize and wraps it in a token stream."""
610
+ stream = self.tokeniter(source, name, filename, state)
611
+ return TokenStream(self.wrap(stream, name, filename), name, filename)
612
+
613
+ def wrap(
614
+ self,
615
+ stream: t.Iterable[t.Tuple[int, str, str]],
616
+ name: t.Optional[str] = None,
617
+ filename: t.Optional[str] = None,
618
+ ) -> t.Iterator[Token]:
619
+ """This is called with the stream as returned by `tokenize` and wraps
620
+ every token in a :class:`Token` and converts the value.
621
+ """
622
+ for lineno, token, value_str in stream:
623
+ if token in ignored_tokens:
624
+ continue
625
+
626
+ value: t.Any = value_str
627
+
628
+ if token == TOKEN_LINESTATEMENT_BEGIN:
629
+ token = TOKEN_BLOCK_BEGIN
630
+ elif token == TOKEN_LINESTATEMENT_END:
631
+ token = TOKEN_BLOCK_END
632
+ # we are not interested in those tokens in the parser
633
+ elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
634
+ continue
635
+ elif token == TOKEN_DATA:
636
+ value = self._normalize_newlines(value_str)
637
+ elif token == "keyword":
638
+ token = value_str
639
+ elif token == TOKEN_NAME:
640
+ value = value_str
641
+
642
+ if not value.isidentifier():
643
+ raise TemplateSyntaxError(
644
+ "Invalid character in identifier", lineno, name, filename
645
+ )
646
+ elif token == TOKEN_STRING:
647
+ # try to unescape string
648
+ try:
649
+ value = (
650
+ self._normalize_newlines(value_str[1:-1])
651
+ .encode("ascii", "backslashreplace")
652
+ .decode("unicode-escape")
653
+ )
654
+ except Exception as e:
655
+ msg = str(e).split(":")[-1].strip()
656
+ raise TemplateSyntaxError(msg, lineno, name, filename) from e
657
+ elif token == TOKEN_INTEGER:
658
+ value = int(value_str.replace("_", ""), 0)
659
+ elif token == TOKEN_FLOAT:
660
+ # remove all "_" first to support more Python versions
661
+ value = literal_eval(value_str.replace("_", ""))
662
+ elif token == TOKEN_OPERATOR:
663
+ token = operators[value_str]
664
+
665
+ yield Token(lineno, token, value)
666
+
667
+ def tokeniter(
668
+ self,
669
+ source: str,
670
+ name: t.Optional[str],
671
+ filename: t.Optional[str] = None,
672
+ state: t.Optional[str] = None,
673
+ ) -> t.Iterator[t.Tuple[int, str, str]]:
674
+ """This method tokenizes the text and returns the tokens in a
675
+ generator. Use this method if you just want to tokenize a template.
676
+
677
+ .. versionchanged:: 3.0
678
+ Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line
679
+ breaks.
680
+ """
681
+ lines = newline_re.split(source)[::2]
682
+
683
+ if not self.keep_trailing_newline and lines[-1] == "":
684
+ del lines[-1]
685
+
686
+ source = "\n".join(lines)
687
+ pos = 0
688
+ lineno = 1
689
+ stack = ["root"]
690
+
691
+ if state is not None and state != "root":
692
+ assert state in ("variable", "block"), "invalid state"
693
+ stack.append(state + "_begin")
694
+
695
+ statetokens = self.rules[stack[-1]]
696
+ source_length = len(source)
697
+ balancing_stack: t.List[str] = []
698
+ newlines_stripped = 0
699
+ line_starting = True
700
+
701
+ while True:
702
+ # tokenizer loop
703
+ for regex, tokens, new_state in statetokens:
704
+ m = regex.match(source, pos)
705
+
706
+ # if no match we try again with the next rule
707
+ if m is None:
708
+ continue
709
+
710
+ # we only match blocks and variables if braces / parentheses
711
+ # are balanced. continue parsing with the lower rule which
712
+ # is the operator rule. do this only if the end tags look
713
+ # like operators
714
+ if balancing_stack and tokens in (
715
+ TOKEN_VARIABLE_END,
716
+ TOKEN_BLOCK_END,
717
+ TOKEN_LINESTATEMENT_END,
718
+ ):
719
+ continue
720
+
721
+ # tuples support more options
722
+ if isinstance(tokens, tuple):
723
+ groups: t.Sequence[str] = m.groups()
724
+
725
+ if isinstance(tokens, OptionalLStrip):
726
+ # Rule supports lstrip. Match will look like
727
+ # text, block type, whitespace control, type, control, ...
728
+ text = groups[0]
729
+ # Skipping the text and first type, every other group is the
730
+ # whitespace control for each type. One of the groups will be
731
+ # -, +, or empty string instead of None.
732
+ strip_sign = next(g for g in groups[2::2] if g is not None)
733
+
734
+ if strip_sign == "-":
735
+ # Strip all whitespace between the text and the tag.
736
+ stripped = text.rstrip()
737
+ newlines_stripped = text[len(stripped) :].count("\n")
738
+ groups = [stripped, *groups[1:]]
739
+ elif (
740
+ # Not marked for preserving whitespace.
741
+ strip_sign != "+"
742
+ # lstrip is enabled.
743
+ and self.lstrip_blocks
744
+ # Not a variable expression.
745
+ and not m.groupdict().get(TOKEN_VARIABLE_BEGIN)
746
+ ):
747
+ # The start of text between the last newline and the tag.
748
+ l_pos = text.rfind("\n") + 1
749
+
750
+ if l_pos > 0 or line_starting:
751
+ # If there's only whitespace between the newline and the
752
+ # tag, strip it.
753
+ if whitespace_re.fullmatch(text, l_pos):
754
+ groups = [text[:l_pos], *groups[1:]]
755
+
756
+ for idx, token in enumerate(tokens):
757
+ # failure group
758
+ if token.__class__ is Failure:
759
+ raise token(lineno, filename)
760
+ # bygroup is a bit more complex, in that case we
761
+ # yield for the current token the first named
762
+ # group that matched
763
+ elif token == "#bygroup":
764
+ for key, value in m.groupdict().items():
765
+ if value is not None:
766
+ yield lineno, key, value
767
+ lineno += value.count("\n")
768
+ break
769
+ else:
770
+ raise RuntimeError(
771
+ f"{regex!r} wanted to resolve the token dynamically"
772
+ " but no group matched"
773
+ )
774
+ # normal group
775
+ else:
776
+ data = groups[idx]
777
+
778
+ if data or token not in ignore_if_empty:
779
+ yield lineno, token, data
780
+
781
+ lineno += data.count("\n") + newlines_stripped
782
+ newlines_stripped = 0
783
+
784
+ # strings as token just are yielded as it.
785
+ else:
786
+ data = m.group()
787
+
788
+ # update brace/parentheses balance
789
+ if tokens == TOKEN_OPERATOR:
790
+ if data == "{":
791
+ balancing_stack.append("}")
792
+ elif data == "(":
793
+ balancing_stack.append(")")
794
+ elif data == "[":
795
+ balancing_stack.append("]")
796
+ elif data in ("}", ")", "]"):
797
+ if not balancing_stack:
798
+ raise TemplateSyntaxError(
799
+ f"unexpected '{data}'", lineno, name, filename
800
+ )
801
+
802
+ expected_op = balancing_stack.pop()
803
+
804
+ if expected_op != data:
805
+ raise TemplateSyntaxError(
806
+ f"unexpected '{data}', expected '{expected_op}'",
807
+ lineno,
808
+ name,
809
+ filename,
810
+ )
811
+
812
+ # yield items
813
+ if data or tokens not in ignore_if_empty:
814
+ yield lineno, tokens, data
815
+
816
+ lineno += data.count("\n")
817
+
818
+ line_starting = m.group()[-1:] == "\n"
819
+ # fetch new position into new variable so that we can check
820
+ # if there is a internal parsing error which would result
821
+ # in an infinite loop
822
+ pos2 = m.end()
823
+
824
+ # handle state changes
825
+ if new_state is not None:
826
+ # remove the uppermost state
827
+ if new_state == "#pop":
828
+ stack.pop()
829
+ # resolve the new state by group checking
830
+ elif new_state == "#bygroup":
831
+ for key, value in m.groupdict().items():
832
+ if value is not None:
833
+ stack.append(key)
834
+ break
835
+ else:
836
+ raise RuntimeError(
837
+ f"{regex!r} wanted to resolve the new state dynamically"
838
+ f" but no group matched"
839
+ )
840
+ # direct state name given
841
+ else:
842
+ stack.append(new_state)
843
+
844
+ statetokens = self.rules[stack[-1]]
845
+ # we are still at the same position and no stack change.
846
+ # this means a loop without break condition, avoid that and
847
+ # raise error
848
+ elif pos2 == pos:
849
+ raise RuntimeError(
850
+ f"{regex!r} yielded empty string without stack change"
851
+ )
852
+
853
+ # publish new function and start again
854
+ pos = pos2
855
+ break
856
+ # if loop terminated without break we haven't found a single match
857
+ # either we are at the end of the file or we have a problem
858
+ else:
859
+ # end of text
860
+ if pos >= source_length:
861
+ return
862
+
863
+ # something went wrong
864
+ raise TemplateSyntaxError(
865
+ f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename
866
+ )
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/meta.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions that expose information about templates that might be
2
+ interesting for introspection.
3
+ """
4
+ import typing as t
5
+
6
+ from . import nodes
7
+ from .compiler import CodeGenerator
8
+ from .compiler import Frame
9
+
10
+ if t.TYPE_CHECKING:
11
+ from .environment import Environment
12
+
13
+
14
+ class TrackingCodeGenerator(CodeGenerator):
15
+ """We abuse the code generator for introspection."""
16
+
17
+ def __init__(self, environment: "Environment") -> None:
18
+ super().__init__(environment, "<introspection>", "<introspection>")
19
+ self.undeclared_identifiers: t.Set[str] = set()
20
+
21
+ def write(self, x: str) -> None:
22
+ """Don't write."""
23
+
24
+ def enter_frame(self, frame: Frame) -> None:
25
+ """Remember all undeclared identifiers."""
26
+ super().enter_frame(frame)
27
+
28
+ for _, (action, param) in frame.symbols.loads.items():
29
+ if action == "resolve" and param not in self.environment.globals:
30
+ self.undeclared_identifiers.add(param)
31
+
32
+
33
+ def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]:
34
+ """Returns a set of all variables in the AST that will be looked up from
35
+ the context at runtime. Because at compile time it's not known which
36
+ variables will be used depending on the path the execution takes at
37
+ runtime, all variables are returned.
38
+
39
+ >>> from jinja2 import Environment, meta
40
+ >>> env = Environment()
41
+ >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
42
+ >>> meta.find_undeclared_variables(ast) == {'bar'}
43
+ True
44
+
45
+ .. admonition:: Implementation
46
+
47
+ Internally the code generator is used for finding undeclared variables.
48
+ This is good to know because the code generator might raise a
49
+ :exc:`TemplateAssertionError` during compilation and as a matter of
50
+ fact this function can currently raise that exception as well.
51
+ """
52
+ codegen = TrackingCodeGenerator(ast.environment) # type: ignore
53
+ codegen.visit(ast)
54
+ return codegen.undeclared_identifiers
55
+
56
+
57
+ _ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
58
+ _RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include]
59
+
60
+
61
+ def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]:
62
+ """Finds all the referenced templates from the AST. This will return an
63
+ iterator over all the hardcoded template extensions, inclusions and
64
+ imports. If dynamic inheritance or inclusion is used, `None` will be
65
+ yielded.
66
+
67
+ >>> from jinja2 import Environment, meta
68
+ >>> env = Environment()
69
+ >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
70
+ >>> list(meta.find_referenced_templates(ast))
71
+ ['layout.html', None]
72
+
73
+ This function is useful for dependency tracking. For example if you want
74
+ to rebuild parts of the website after a layout template has changed.
75
+ """
76
+ template_name: t.Any
77
+
78
+ for node in ast.find_all(_ref_types):
79
+ template: nodes.Expr = node.template # type: ignore
80
+
81
+ if not isinstance(template, nodes.Const):
82
+ # a tuple with some non consts in there
83
+ if isinstance(template, (nodes.Tuple, nodes.List)):
84
+ for template_name in template.items:
85
+ # something const, only yield the strings and ignore
86
+ # non-string consts that really just make no sense
87
+ if isinstance(template_name, nodes.Const):
88
+ if isinstance(template_name.value, str):
89
+ yield template_name.value
90
+ # something dynamic in there
91
+ else:
92
+ yield None
93
+ # something dynamic we don't know about here
94
+ else:
95
+ yield None
96
+ continue
97
+ # constant is a basestring, direct template name
98
+ if isinstance(template.value, str):
99
+ yield template.value
100
+ # a tuple or list (latter *should* not happen) made of consts,
101
+ # yield the consts that are strings. We could warn here for
102
+ # non string values
103
+ elif isinstance(node, nodes.Include) and isinstance(
104
+ template.value, (tuple, list)
105
+ ):
106
+ for template_name in template.value:
107
+ if isinstance(template_name, str):
108
+ yield template_name
109
+ # something else we don't care about, we could warn here
110
+ else:
111
+ yield None
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/nativetypes.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import typing as t
2
+ from ast import literal_eval
3
+ from ast import parse
4
+ from itertools import chain
5
+ from itertools import islice
6
+ from types import GeneratorType
7
+
8
+ from . import nodes
9
+ from .compiler import CodeGenerator
10
+ from .compiler import Frame
11
+ from .compiler import has_safe_repr
12
+ from .environment import Environment
13
+ from .environment import Template
14
+
15
+
16
+ def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]:
17
+ """Return a native Python type from the list of compiled nodes. If
18
+ the result is a single node, its value is returned. Otherwise, the
19
+ nodes are concatenated as strings. If the result can be parsed with
20
+ :func:`ast.literal_eval`, the parsed value is returned. Otherwise,
21
+ the string is returned.
22
+
23
+ :param values: Iterable of outputs to concatenate.
24
+ """
25
+ head = list(islice(values, 2))
26
+
27
+ if not head:
28
+ return None
29
+
30
+ if len(head) == 1:
31
+ raw = head[0]
32
+ if not isinstance(raw, str):
33
+ return raw
34
+ else:
35
+ if isinstance(values, GeneratorType):
36
+ values = chain(head, values)
37
+ raw = "".join([str(v) for v in values])
38
+
39
+ try:
40
+ return literal_eval(
41
+ # In Python 3.10+ ast.literal_eval removes leading spaces/tabs
42
+ # from the given string. For backwards compatibility we need to
43
+ # parse the string ourselves without removing leading spaces/tabs.
44
+ parse(raw, mode="eval")
45
+ )
46
+ except (ValueError, SyntaxError, MemoryError):
47
+ return raw
48
+
49
+
50
+ class NativeCodeGenerator(CodeGenerator):
51
+ """A code generator which renders Python types by not adding
52
+ ``str()`` around output nodes.
53
+ """
54
+
55
+ @staticmethod
56
+ def _default_finalize(value: t.Any) -> t.Any:
57
+ return value
58
+
59
+ def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
60
+ return repr("".join([str(v) for v in group]))
61
+
62
+ def _output_child_to_const(
63
+ self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
64
+ ) -> t.Any:
65
+ const = node.as_const(frame.eval_ctx)
66
+
67
+ if not has_safe_repr(const):
68
+ raise nodes.Impossible()
69
+
70
+ if isinstance(node, nodes.TemplateData):
71
+ return const
72
+
73
+ return finalize.const(const) # type: ignore
74
+
75
+ def _output_child_pre(
76
+ self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
77
+ ) -> None:
78
+ if finalize.src is not None:
79
+ self.write(finalize.src)
80
+
81
+ def _output_child_post(
82
+ self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
83
+ ) -> None:
84
+ if finalize.src is not None:
85
+ self.write(")")
86
+
87
+
88
+ class NativeEnvironment(Environment):
89
+ """An environment that renders templates to native Python types."""
90
+
91
+ code_generator_class = NativeCodeGenerator
92
+ concat = staticmethod(native_concat) # type: ignore
93
+
94
+
95
+ class NativeTemplate(Template):
96
+ environment_class = NativeEnvironment
97
+
98
+ def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
99
+ """Render the template to produce a native Python type. If the
100
+ result is a single node, its value is returned. Otherwise, the
101
+ nodes are concatenated as strings. If the result can be parsed
102
+ with :func:`ast.literal_eval`, the parsed value is returned.
103
+ Otherwise, the string is returned.
104
+ """
105
+ ctx = self.new_context(dict(*args, **kwargs))
106
+
107
+ try:
108
+ return self.environment_class.concat( # type: ignore
109
+ self.root_render_func(ctx)
110
+ )
111
+ except Exception:
112
+ return self.environment.handle_exception()
113
+
114
+ async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
115
+ if not self.environment.is_async:
116
+ raise RuntimeError(
117
+ "The environment was not created with async mode enabled."
118
+ )
119
+
120
+ ctx = self.new_context(dict(*args, **kwargs))
121
+
122
+ try:
123
+ return self.environment_class.concat( # type: ignore
124
+ [n async for n in self.root_render_func(ctx)] # type: ignore
125
+ )
126
+ except Exception:
127
+ return self.environment.handle_exception()
128
+
129
+
130
+ NativeEnvironment.template_class = NativeTemplate
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/nodes.py ADDED
@@ -0,0 +1,1204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """AST nodes generated by the parser for the compiler. Also provides
2
+ some node tree helper functions used by the parser and compiler in order
3
+ to normalize nodes.
4
+ """
5
+ import inspect
6
+ import operator
7
+ import typing as t
8
+ from collections import deque
9
+
10
+ from markupsafe import Markup
11
+
12
+ from .utils import _PassArg
13
+
14
+ if t.TYPE_CHECKING:
15
+ import typing_extensions as te
16
+ from .environment import Environment
17
+
18
+ _NodeBound = t.TypeVar("_NodeBound", bound="Node")
19
+
20
+ _binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
21
+ "*": operator.mul,
22
+ "/": operator.truediv,
23
+ "//": operator.floordiv,
24
+ "**": operator.pow,
25
+ "%": operator.mod,
26
+ "+": operator.add,
27
+ "-": operator.sub,
28
+ }
29
+
30
+ _uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = {
31
+ "not": operator.not_,
32
+ "+": operator.pos,
33
+ "-": operator.neg,
34
+ }
35
+
36
+ _cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
37
+ "eq": operator.eq,
38
+ "ne": operator.ne,
39
+ "gt": operator.gt,
40
+ "gteq": operator.ge,
41
+ "lt": operator.lt,
42
+ "lteq": operator.le,
43
+ "in": lambda a, b: a in b,
44
+ "notin": lambda a, b: a not in b,
45
+ }
46
+
47
+
48
+ class Impossible(Exception):
49
+ """Raised if the node could not perform a requested action."""
50
+
51
+
52
+ class NodeType(type):
53
+ """A metaclass for nodes that handles the field and attribute
54
+ inheritance. fields and attributes from the parent class are
55
+ automatically forwarded to the child."""
56
+
57
+ def __new__(mcs, name, bases, d): # type: ignore
58
+ for attr in "fields", "attributes":
59
+ storage = []
60
+ storage.extend(getattr(bases[0] if bases else object, attr, ()))
61
+ storage.extend(d.get(attr, ()))
62
+ assert len(bases) <= 1, "multiple inheritance not allowed"
63
+ assert len(storage) == len(set(storage)), "layout conflict"
64
+ d[attr] = tuple(storage)
65
+ d.setdefault("abstract", False)
66
+ return type.__new__(mcs, name, bases, d)
67
+
68
+
69
+ class EvalContext:
70
+ """Holds evaluation time information. Custom attributes can be attached
71
+ to it in extensions.
72
+ """
73
+
74
+ def __init__(
75
+ self, environment: "Environment", template_name: t.Optional[str] = None
76
+ ) -> None:
77
+ self.environment = environment
78
+ if callable(environment.autoescape):
79
+ self.autoescape = environment.autoescape(template_name)
80
+ else:
81
+ self.autoescape = environment.autoescape
82
+ self.volatile = False
83
+
84
+ def save(self) -> t.Mapping[str, t.Any]:
85
+ return self.__dict__.copy()
86
+
87
+ def revert(self, old: t.Mapping[str, t.Any]) -> None:
88
+ self.__dict__.clear()
89
+ self.__dict__.update(old)
90
+
91
+
92
+ def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext:
93
+ if ctx is None:
94
+ if node.environment is None:
95
+ raise RuntimeError(
96
+ "if no eval context is passed, the node must have an"
97
+ " attached environment."
98
+ )
99
+ return EvalContext(node.environment)
100
+ return ctx
101
+
102
+
103
+ class Node(metaclass=NodeType):
104
+ """Baseclass for all Jinja nodes. There are a number of nodes available
105
+ of different types. There are four major types:
106
+
107
+ - :class:`Stmt`: statements
108
+ - :class:`Expr`: expressions
109
+ - :class:`Helper`: helper nodes
110
+ - :class:`Template`: the outermost wrapper node
111
+
112
+ All nodes have fields and attributes. Fields may be other nodes, lists,
113
+ or arbitrary values. Fields are passed to the constructor as regular
114
+ positional arguments, attributes as keyword arguments. Each node has
115
+ two attributes: `lineno` (the line number of the node) and `environment`.
116
+ The `environment` attribute is set at the end of the parsing process for
117
+ all nodes automatically.
118
+ """
119
+
120
+ fields: t.Tuple[str, ...] = ()
121
+ attributes: t.Tuple[str, ...] = ("lineno", "environment")
122
+ abstract = True
123
+
124
+ lineno: int
125
+ environment: t.Optional["Environment"]
126
+
127
+ def __init__(self, *fields: t.Any, **attributes: t.Any) -> None:
128
+ if self.abstract:
129
+ raise TypeError("abstract nodes are not instantiable")
130
+ if fields:
131
+ if len(fields) != len(self.fields):
132
+ if not self.fields:
133
+ raise TypeError(f"{type(self).__name__!r} takes 0 arguments")
134
+ raise TypeError(
135
+ f"{type(self).__name__!r} takes 0 or {len(self.fields)}"
136
+ f" argument{'s' if len(self.fields) != 1 else ''}"
137
+ )
138
+ for name, arg in zip(self.fields, fields):
139
+ setattr(self, name, arg)
140
+ for attr in self.attributes:
141
+ setattr(self, attr, attributes.pop(attr, None))
142
+ if attributes:
143
+ raise TypeError(f"unknown attribute {next(iter(attributes))!r}")
144
+
145
+ def iter_fields(
146
+ self,
147
+ exclude: t.Optional[t.Container[str]] = None,
148
+ only: t.Optional[t.Container[str]] = None,
149
+ ) -> t.Iterator[t.Tuple[str, t.Any]]:
150
+ """This method iterates over all fields that are defined and yields
151
+ ``(key, value)`` tuples. Per default all fields are returned, but
152
+ it's possible to limit that to some fields by providing the `only`
153
+ parameter or to exclude some using the `exclude` parameter. Both
154
+ should be sets or tuples of field names.
155
+ """
156
+ for name in self.fields:
157
+ if (
158
+ (exclude is None and only is None)
159
+ or (exclude is not None and name not in exclude)
160
+ or (only is not None and name in only)
161
+ ):
162
+ try:
163
+ yield name, getattr(self, name)
164
+ except AttributeError:
165
+ pass
166
+
167
+ def iter_child_nodes(
168
+ self,
169
+ exclude: t.Optional[t.Container[str]] = None,
170
+ only: t.Optional[t.Container[str]] = None,
171
+ ) -> t.Iterator["Node"]:
172
+ """Iterates over all direct child nodes of the node. This iterates
173
+ over all fields and yields the values of they are nodes. If the value
174
+ of a field is a list all the nodes in that list are returned.
175
+ """
176
+ for _, item in self.iter_fields(exclude, only):
177
+ if isinstance(item, list):
178
+ for n in item:
179
+ if isinstance(n, Node):
180
+ yield n
181
+ elif isinstance(item, Node):
182
+ yield item
183
+
184
+ def find(self, node_type: t.Type[_NodeBound]) -> t.Optional[_NodeBound]:
185
+ """Find the first node of a given type. If no such node exists the
186
+ return value is `None`.
187
+ """
188
+ for result in self.find_all(node_type):
189
+ return result
190
+
191
+ return None
192
+
193
+ def find_all(
194
+ self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.Type[_NodeBound], ...]]
195
+ ) -> t.Iterator[_NodeBound]:
196
+ """Find all the nodes of a given type. If the type is a tuple,
197
+ the check is performed for any of the tuple items.
198
+ """
199
+ for child in self.iter_child_nodes():
200
+ if isinstance(child, node_type):
201
+ yield child # type: ignore
202
+ yield from child.find_all(node_type)
203
+
204
+ def set_ctx(self, ctx: str) -> "Node":
205
+ """Reset the context of a node and all child nodes. Per default the
206
+ parser will all generate nodes that have a 'load' context as it's the
207
+ most common one. This method is used in the parser to set assignment
208
+ targets and other nodes to a store context.
209
+ """
210
+ todo = deque([self])
211
+ while todo:
212
+ node = todo.popleft()
213
+ if "ctx" in node.fields:
214
+ node.ctx = ctx # type: ignore
215
+ todo.extend(node.iter_child_nodes())
216
+ return self
217
+
218
+ def set_lineno(self, lineno: int, override: bool = False) -> "Node":
219
+ """Set the line numbers of the node and children."""
220
+ todo = deque([self])
221
+ while todo:
222
+ node = todo.popleft()
223
+ if "lineno" in node.attributes:
224
+ if node.lineno is None or override:
225
+ node.lineno = lineno
226
+ todo.extend(node.iter_child_nodes())
227
+ return self
228
+
229
+ def set_environment(self, environment: "Environment") -> "Node":
230
+ """Set the environment for all nodes."""
231
+ todo = deque([self])
232
+ while todo:
233
+ node = todo.popleft()
234
+ node.environment = environment
235
+ todo.extend(node.iter_child_nodes())
236
+ return self
237
+
238
+ def __eq__(self, other: t.Any) -> bool:
239
+ if type(self) is not type(other):
240
+ return NotImplemented
241
+
242
+ return tuple(self.iter_fields()) == tuple(other.iter_fields())
243
+
244
+ __hash__ = object.__hash__
245
+
246
+ def __repr__(self) -> str:
247
+ args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields)
248
+ return f"{type(self).__name__}({args_str})"
249
+
250
+ def dump(self) -> str:
251
+ def _dump(node: t.Union[Node, t.Any]) -> None:
252
+ if not isinstance(node, Node):
253
+ buf.append(repr(node))
254
+ return
255
+
256
+ buf.append(f"nodes.{type(node).__name__}(")
257
+ if not node.fields:
258
+ buf.append(")")
259
+ return
260
+ for idx, field in enumerate(node.fields):
261
+ if idx:
262
+ buf.append(", ")
263
+ value = getattr(node, field)
264
+ if isinstance(value, list):
265
+ buf.append("[")
266
+ for idx, item in enumerate(value):
267
+ if idx:
268
+ buf.append(", ")
269
+ _dump(item)
270
+ buf.append("]")
271
+ else:
272
+ _dump(value)
273
+ buf.append(")")
274
+
275
+ buf: t.List[str] = []
276
+ _dump(self)
277
+ return "".join(buf)
278
+
279
+
280
+ class Stmt(Node):
281
+ """Base node for all statements."""
282
+
283
+ abstract = True
284
+
285
+
286
+ class Helper(Node):
287
+ """Nodes that exist in a specific context only."""
288
+
289
+ abstract = True
290
+
291
+
292
+ class Template(Node):
293
+ """Node that represents a template. This must be the outermost node that
294
+ is passed to the compiler.
295
+ """
296
+
297
+ fields = ("body",)
298
+ body: t.List[Node]
299
+
300
+
301
+ class Output(Stmt):
302
+ """A node that holds multiple expressions which are then printed out.
303
+ This is used both for the `print` statement and the regular template data.
304
+ """
305
+
306
+ fields = ("nodes",)
307
+ nodes: t.List["Expr"]
308
+
309
+
310
+ class Extends(Stmt):
311
+ """Represents an extends statement."""
312
+
313
+ fields = ("template",)
314
+ template: "Expr"
315
+
316
+
317
+ class For(Stmt):
318
+ """The for loop. `target` is the target for the iteration (usually a
319
+ :class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
320
+ of nodes that are used as loop-body, and `else_` a list of nodes for the
321
+ `else` block. If no else node exists it has to be an empty list.
322
+
323
+ For filtered nodes an expression can be stored as `test`, otherwise `None`.
324
+ """
325
+
326
+ fields = ("target", "iter", "body", "else_", "test", "recursive")
327
+ target: Node
328
+ iter: Node
329
+ body: t.List[Node]
330
+ else_: t.List[Node]
331
+ test: t.Optional[Node]
332
+ recursive: bool
333
+
334
+
335
+ class If(Stmt):
336
+ """If `test` is true, `body` is rendered, else `else_`."""
337
+
338
+ fields = ("test", "body", "elif_", "else_")
339
+ test: Node
340
+ body: t.List[Node]
341
+ elif_: t.List["If"]
342
+ else_: t.List[Node]
343
+
344
+
345
+ class Macro(Stmt):
346
+ """A macro definition. `name` is the name of the macro, `args` a list of
347
+ arguments and `defaults` a list of defaults if there are any. `body` is
348
+ a list of nodes for the macro body.
349
+ """
350
+
351
+ fields = ("name", "args", "defaults", "body")
352
+ name: str
353
+ args: t.List["Name"]
354
+ defaults: t.List["Expr"]
355
+ body: t.List[Node]
356
+
357
+
358
+ class CallBlock(Stmt):
359
+ """Like a macro without a name but a call instead. `call` is called with
360
+ the unnamed macro as `caller` argument this node holds.
361
+ """
362
+
363
+ fields = ("call", "args", "defaults", "body")
364
+ call: "Call"
365
+ args: t.List["Name"]
366
+ defaults: t.List["Expr"]
367
+ body: t.List[Node]
368
+
369
+
370
+ class FilterBlock(Stmt):
371
+ """Node for filter sections."""
372
+
373
+ fields = ("body", "filter")
374
+ body: t.List[Node]
375
+ filter: "Filter"
376
+
377
+
378
+ class With(Stmt):
379
+ """Specific node for with statements. In older versions of Jinja the
380
+ with statement was implemented on the base of the `Scope` node instead.
381
+
382
+ .. versionadded:: 2.9.3
383
+ """
384
+
385
+ fields = ("targets", "values", "body")
386
+ targets: t.List["Expr"]
387
+ values: t.List["Expr"]
388
+ body: t.List[Node]
389
+
390
+
391
+ class Block(Stmt):
392
+ """A node that represents a block.
393
+
394
+ .. versionchanged:: 3.0.0
395
+ the `required` field was added.
396
+ """
397
+
398
+ fields = ("name", "body", "scoped", "required")
399
+ name: str
400
+ body: t.List[Node]
401
+ scoped: bool
402
+ required: bool
403
+
404
+
405
+ class Include(Stmt):
406
+ """A node that represents the include tag."""
407
+
408
+ fields = ("template", "with_context", "ignore_missing")
409
+ template: "Expr"
410
+ with_context: bool
411
+ ignore_missing: bool
412
+
413
+
414
+ class Import(Stmt):
415
+ """A node that represents the import tag."""
416
+
417
+ fields = ("template", "target", "with_context")
418
+ template: "Expr"
419
+ target: str
420
+ with_context: bool
421
+
422
+
423
+ class FromImport(Stmt):
424
+ """A node that represents the from import tag. It's important to not
425
+ pass unsafe names to the name attribute. The compiler translates the
426
+ attribute lookups directly into getattr calls and does *not* use the
427
+ subscript callback of the interface. As exported variables may not
428
+ start with double underscores (which the parser asserts) this is not a
429
+ problem for regular Jinja code, but if this node is used in an extension
430
+ extra care must be taken.
431
+
432
+ The list of names may contain tuples if aliases are wanted.
433
+ """
434
+
435
+ fields = ("template", "names", "with_context")
436
+ template: "Expr"
437
+ names: t.List[t.Union[str, t.Tuple[str, str]]]
438
+ with_context: bool
439
+
440
+
441
+ class ExprStmt(Stmt):
442
+ """A statement that evaluates an expression and discards the result."""
443
+
444
+ fields = ("node",)
445
+ node: Node
446
+
447
+
448
+ class Assign(Stmt):
449
+ """Assigns an expression to a target."""
450
+
451
+ fields = ("target", "node")
452
+ target: "Expr"
453
+ node: Node
454
+
455
+
456
+ class AssignBlock(Stmt):
457
+ """Assigns a block to a target."""
458
+
459
+ fields = ("target", "filter", "body")
460
+ target: "Expr"
461
+ filter: t.Optional["Filter"]
462
+ body: t.List[Node]
463
+
464
+
465
+ class Expr(Node):
466
+ """Baseclass for all expressions."""
467
+
468
+ abstract = True
469
+
470
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
471
+ """Return the value of the expression as constant or raise
472
+ :exc:`Impossible` if this was not possible.
473
+
474
+ An :class:`EvalContext` can be provided, if none is given
475
+ a default context is created which requires the nodes to have
476
+ an attached environment.
477
+
478
+ .. versionchanged:: 2.4
479
+ the `eval_ctx` parameter was added.
480
+ """
481
+ raise Impossible()
482
+
483
+ def can_assign(self) -> bool:
484
+ """Check if it's possible to assign something to this node."""
485
+ return False
486
+
487
+
488
+ class BinExpr(Expr):
489
+ """Baseclass for all binary expressions."""
490
+
491
+ fields = ("left", "right")
492
+ left: Expr
493
+ right: Expr
494
+ operator: str
495
+ abstract = True
496
+
497
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
498
+ eval_ctx = get_eval_context(self, eval_ctx)
499
+
500
+ # intercepted operators cannot be folded at compile time
501
+ if (
502
+ eval_ctx.environment.sandboxed
503
+ and self.operator in eval_ctx.environment.intercepted_binops # type: ignore
504
+ ):
505
+ raise Impossible()
506
+ f = _binop_to_func[self.operator]
507
+ try:
508
+ return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
509
+ except Exception as e:
510
+ raise Impossible() from e
511
+
512
+
513
+ class UnaryExpr(Expr):
514
+ """Baseclass for all unary expressions."""
515
+
516
+ fields = ("node",)
517
+ node: Expr
518
+ operator: str
519
+ abstract = True
520
+
521
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
522
+ eval_ctx = get_eval_context(self, eval_ctx)
523
+
524
+ # intercepted operators cannot be folded at compile time
525
+ if (
526
+ eval_ctx.environment.sandboxed
527
+ and self.operator in eval_ctx.environment.intercepted_unops # type: ignore
528
+ ):
529
+ raise Impossible()
530
+ f = _uaop_to_func[self.operator]
531
+ try:
532
+ return f(self.node.as_const(eval_ctx))
533
+ except Exception as e:
534
+ raise Impossible() from e
535
+
536
+
537
+ class Name(Expr):
538
+ """Looks up a name or stores a value in a name.
539
+ The `ctx` of the node can be one of the following values:
540
+
541
+ - `store`: store a value in the name
542
+ - `load`: load that name
543
+ - `param`: like `store` but if the name was defined as function parameter.
544
+ """
545
+
546
+ fields = ("name", "ctx")
547
+ name: str
548
+ ctx: str
549
+
550
+ def can_assign(self) -> bool:
551
+ return self.name not in {"true", "false", "none", "True", "False", "None"}
552
+
553
+
554
+ class NSRef(Expr):
555
+ """Reference to a namespace value assignment"""
556
+
557
+ fields = ("name", "attr")
558
+ name: str
559
+ attr: str
560
+
561
+ def can_assign(self) -> bool:
562
+ # We don't need any special checks here; NSRef assignments have a
563
+ # runtime check to ensure the target is a namespace object which will
564
+ # have been checked already as it is created using a normal assignment
565
+ # which goes through a `Name` node.
566
+ return True
567
+
568
+
569
+ class Literal(Expr):
570
+ """Baseclass for literals."""
571
+
572
+ abstract = True
573
+
574
+
575
+ class Const(Literal):
576
+ """All constant values. The parser will return this node for simple
577
+ constants such as ``42`` or ``"foo"`` but it can be used to store more
578
+ complex values such as lists too. Only constants with a safe
579
+ representation (objects where ``eval(repr(x)) == x`` is true).
580
+ """
581
+
582
+ fields = ("value",)
583
+ value: t.Any
584
+
585
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
586
+ return self.value
587
+
588
+ @classmethod
589
+ def from_untrusted(
590
+ cls,
591
+ value: t.Any,
592
+ lineno: t.Optional[int] = None,
593
+ environment: "t.Optional[Environment]" = None,
594
+ ) -> "Const":
595
+ """Return a const object if the value is representable as
596
+ constant value in the generated code, otherwise it will raise
597
+ an `Impossible` exception.
598
+ """
599
+ from .compiler import has_safe_repr
600
+
601
+ if not has_safe_repr(value):
602
+ raise Impossible()
603
+ return cls(value, lineno=lineno, environment=environment)
604
+
605
+
606
+ class TemplateData(Literal):
607
+ """A constant template string."""
608
+
609
+ fields = ("data",)
610
+ data: str
611
+
612
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
613
+ eval_ctx = get_eval_context(self, eval_ctx)
614
+ if eval_ctx.volatile:
615
+ raise Impossible()
616
+ if eval_ctx.autoescape:
617
+ return Markup(self.data)
618
+ return self.data
619
+
620
+
621
+ class Tuple(Literal):
622
+ """For loop unpacking and some other things like multiple arguments
623
+ for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
624
+ is used for loading the names or storing.
625
+ """
626
+
627
+ fields = ("items", "ctx")
628
+ items: t.List[Expr]
629
+ ctx: str
630
+
631
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[t.Any, ...]:
632
+ eval_ctx = get_eval_context(self, eval_ctx)
633
+ return tuple(x.as_const(eval_ctx) for x in self.items)
634
+
635
+ def can_assign(self) -> bool:
636
+ for item in self.items:
637
+ if not item.can_assign():
638
+ return False
639
+ return True
640
+
641
+
642
+ class List(Literal):
643
+ """Any list literal such as ``[1, 2, 3]``"""
644
+
645
+ fields = ("items",)
646
+ items: t.List[Expr]
647
+
648
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.List[t.Any]:
649
+ eval_ctx = get_eval_context(self, eval_ctx)
650
+ return [x.as_const(eval_ctx) for x in self.items]
651
+
652
+
653
+ class Dict(Literal):
654
+ """Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
655
+ :class:`Pair` nodes.
656
+ """
657
+
658
+ fields = ("items",)
659
+ items: t.List["Pair"]
660
+
661
+ def as_const(
662
+ self, eval_ctx: t.Optional[EvalContext] = None
663
+ ) -> t.Dict[t.Any, t.Any]:
664
+ eval_ctx = get_eval_context(self, eval_ctx)
665
+ return dict(x.as_const(eval_ctx) for x in self.items)
666
+
667
+
668
+ class Pair(Helper):
669
+ """A key, value pair for dicts."""
670
+
671
+ fields = ("key", "value")
672
+ key: Expr
673
+ value: Expr
674
+
675
+ def as_const(
676
+ self, eval_ctx: t.Optional[EvalContext] = None
677
+ ) -> t.Tuple[t.Any, t.Any]:
678
+ eval_ctx = get_eval_context(self, eval_ctx)
679
+ return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
680
+
681
+
682
+ class Keyword(Helper):
683
+ """A key, value pair for keyword arguments where key is a string."""
684
+
685
+ fields = ("key", "value")
686
+ key: str
687
+ value: Expr
688
+
689
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[str, t.Any]:
690
+ eval_ctx = get_eval_context(self, eval_ctx)
691
+ return self.key, self.value.as_const(eval_ctx)
692
+
693
+
694
+ class CondExpr(Expr):
695
+ """A conditional expression (inline if expression). (``{{
696
+ foo if bar else baz }}``)
697
+ """
698
+
699
+ fields = ("test", "expr1", "expr2")
700
+ test: Expr
701
+ expr1: Expr
702
+ expr2: t.Optional[Expr]
703
+
704
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
705
+ eval_ctx = get_eval_context(self, eval_ctx)
706
+ if self.test.as_const(eval_ctx):
707
+ return self.expr1.as_const(eval_ctx)
708
+
709
+ # if we evaluate to an undefined object, we better do that at runtime
710
+ if self.expr2 is None:
711
+ raise Impossible()
712
+
713
+ return self.expr2.as_const(eval_ctx)
714
+
715
+
716
+ def args_as_const(
717
+ node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext]
718
+ ) -> t.Tuple[t.List[t.Any], t.Dict[t.Any, t.Any]]:
719
+ args = [x.as_const(eval_ctx) for x in node.args]
720
+ kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
721
+
722
+ if node.dyn_args is not None:
723
+ try:
724
+ args.extend(node.dyn_args.as_const(eval_ctx))
725
+ except Exception as e:
726
+ raise Impossible() from e
727
+
728
+ if node.dyn_kwargs is not None:
729
+ try:
730
+ kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
731
+ except Exception as e:
732
+ raise Impossible() from e
733
+
734
+ return args, kwargs
735
+
736
+
737
+ class _FilterTestCommon(Expr):
738
+ fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
739
+ node: Expr
740
+ name: str
741
+ args: t.List[Expr]
742
+ kwargs: t.List[Pair]
743
+ dyn_args: t.Optional[Expr]
744
+ dyn_kwargs: t.Optional[Expr]
745
+ abstract = True
746
+ _is_filter = True
747
+
748
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
749
+ eval_ctx = get_eval_context(self, eval_ctx)
750
+
751
+ if eval_ctx.volatile:
752
+ raise Impossible()
753
+
754
+ if self._is_filter:
755
+ env_map = eval_ctx.environment.filters
756
+ else:
757
+ env_map = eval_ctx.environment.tests
758
+
759
+ func = env_map.get(self.name)
760
+ pass_arg = _PassArg.from_obj(func) # type: ignore
761
+
762
+ if func is None or pass_arg is _PassArg.context:
763
+ raise Impossible()
764
+
765
+ if eval_ctx.environment.is_async and (
766
+ getattr(func, "jinja_async_variant", False) is True
767
+ or inspect.iscoroutinefunction(func)
768
+ ):
769
+ raise Impossible()
770
+
771
+ args, kwargs = args_as_const(self, eval_ctx)
772
+ args.insert(0, self.node.as_const(eval_ctx))
773
+
774
+ if pass_arg is _PassArg.eval_context:
775
+ args.insert(0, eval_ctx)
776
+ elif pass_arg is _PassArg.environment:
777
+ args.insert(0, eval_ctx.environment)
778
+
779
+ try:
780
+ return func(*args, **kwargs)
781
+ except Exception as e:
782
+ raise Impossible() from e
783
+
784
+
785
+ class Filter(_FilterTestCommon):
786
+ """Apply a filter to an expression. ``name`` is the name of the
787
+ filter, the other fields are the same as :class:`Call`.
788
+
789
+ If ``node`` is ``None``, the filter is being used in a filter block
790
+ and is applied to the content of the block.
791
+ """
792
+
793
+ node: t.Optional[Expr] # type: ignore
794
+
795
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
796
+ if self.node is None:
797
+ raise Impossible()
798
+
799
+ return super().as_const(eval_ctx=eval_ctx)
800
+
801
+
802
+ class Test(_FilterTestCommon):
803
+ """Apply a test to an expression. ``name`` is the name of the test,
804
+ the other field are the same as :class:`Call`.
805
+
806
+ .. versionchanged:: 3.0
807
+ ``as_const`` shares the same logic for filters and tests. Tests
808
+ check for volatile, async, and ``@pass_context`` etc.
809
+ decorators.
810
+ """
811
+
812
+ _is_filter = False
813
+
814
+
815
+ class Call(Expr):
816
+ """Calls an expression. `args` is a list of arguments, `kwargs` a list
817
+ of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
818
+ and `dyn_kwargs` has to be either `None` or a node that is used as
819
+ node for dynamic positional (``*args``) or keyword (``**kwargs``)
820
+ arguments.
821
+ """
822
+
823
+ fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
824
+ node: Expr
825
+ args: t.List[Expr]
826
+ kwargs: t.List[Keyword]
827
+ dyn_args: t.Optional[Expr]
828
+ dyn_kwargs: t.Optional[Expr]
829
+
830
+
831
+ class Getitem(Expr):
832
+ """Get an attribute or item from an expression and prefer the item."""
833
+
834
+ fields = ("node", "arg", "ctx")
835
+ node: Expr
836
+ arg: Expr
837
+ ctx: str
838
+
839
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
840
+ if self.ctx != "load":
841
+ raise Impossible()
842
+
843
+ eval_ctx = get_eval_context(self, eval_ctx)
844
+
845
+ try:
846
+ return eval_ctx.environment.getitem(
847
+ self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
848
+ )
849
+ except Exception as e:
850
+ raise Impossible() from e
851
+
852
+
853
+ class Getattr(Expr):
854
+ """Get an attribute or item from an expression that is a ascii-only
855
+ bytestring and prefer the attribute.
856
+ """
857
+
858
+ fields = ("node", "attr", "ctx")
859
+ node: Expr
860
+ attr: str
861
+ ctx: str
862
+
863
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
864
+ if self.ctx != "load":
865
+ raise Impossible()
866
+
867
+ eval_ctx = get_eval_context(self, eval_ctx)
868
+
869
+ try:
870
+ return eval_ctx.environment.getattr(self.node.as_const(eval_ctx), self.attr)
871
+ except Exception as e:
872
+ raise Impossible() from e
873
+
874
+
875
+ class Slice(Expr):
876
+ """Represents a slice object. This must only be used as argument for
877
+ :class:`Subscript`.
878
+ """
879
+
880
+ fields = ("start", "stop", "step")
881
+ start: t.Optional[Expr]
882
+ stop: t.Optional[Expr]
883
+ step: t.Optional[Expr]
884
+
885
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice:
886
+ eval_ctx = get_eval_context(self, eval_ctx)
887
+
888
+ def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]:
889
+ if obj is None:
890
+ return None
891
+ return obj.as_const(eval_ctx)
892
+
893
+ return slice(const(self.start), const(self.stop), const(self.step))
894
+
895
+
896
+ class Concat(Expr):
897
+ """Concatenates the list of expressions provided after converting
898
+ them to strings.
899
+ """
900
+
901
+ fields = ("nodes",)
902
+ nodes: t.List[Expr]
903
+
904
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
905
+ eval_ctx = get_eval_context(self, eval_ctx)
906
+ return "".join(str(x.as_const(eval_ctx)) for x in self.nodes)
907
+
908
+
909
+ class Compare(Expr):
910
+ """Compares an expression with some other expressions. `ops` must be a
911
+ list of :class:`Operand`\\s.
912
+ """
913
+
914
+ fields = ("expr", "ops")
915
+ expr: Expr
916
+ ops: t.List["Operand"]
917
+
918
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
919
+ eval_ctx = get_eval_context(self, eval_ctx)
920
+ result = value = self.expr.as_const(eval_ctx)
921
+
922
+ try:
923
+ for op in self.ops:
924
+ new_value = op.expr.as_const(eval_ctx)
925
+ result = _cmpop_to_func[op.op](value, new_value)
926
+
927
+ if not result:
928
+ return False
929
+
930
+ value = new_value
931
+ except Exception as e:
932
+ raise Impossible() from e
933
+
934
+ return result
935
+
936
+
937
+ class Operand(Helper):
938
+ """Holds an operator and an expression."""
939
+
940
+ fields = ("op", "expr")
941
+ op: str
942
+ expr: Expr
943
+
944
+
945
+ class Mul(BinExpr):
946
+ """Multiplies the left with the right node."""
947
+
948
+ operator = "*"
949
+
950
+
951
+ class Div(BinExpr):
952
+ """Divides the left by the right node."""
953
+
954
+ operator = "/"
955
+
956
+
957
+ class FloorDiv(BinExpr):
958
+ """Divides the left by the right node and converts the
959
+ result into an integer by truncating.
960
+ """
961
+
962
+ operator = "//"
963
+
964
+
965
+ class Add(BinExpr):
966
+ """Add the left to the right node."""
967
+
968
+ operator = "+"
969
+
970
+
971
+ class Sub(BinExpr):
972
+ """Subtract the right from the left node."""
973
+
974
+ operator = "-"
975
+
976
+
977
+ class Mod(BinExpr):
978
+ """Left modulo right."""
979
+
980
+ operator = "%"
981
+
982
+
983
+ class Pow(BinExpr):
984
+ """Left to the power of right."""
985
+
986
+ operator = "**"
987
+
988
+
989
+ class And(BinExpr):
990
+ """Short circuited AND."""
991
+
992
+ operator = "and"
993
+
994
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
995
+ eval_ctx = get_eval_context(self, eval_ctx)
996
+ return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
997
+
998
+
999
+ class Or(BinExpr):
1000
+ """Short circuited OR."""
1001
+
1002
+ operator = "or"
1003
+
1004
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
1005
+ eval_ctx = get_eval_context(self, eval_ctx)
1006
+ return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
1007
+
1008
+
1009
+ class Not(UnaryExpr):
1010
+ """Negate the expression."""
1011
+
1012
+ operator = "not"
1013
+
1014
+
1015
+ class Neg(UnaryExpr):
1016
+ """Make the expression negative."""
1017
+
1018
+ operator = "-"
1019
+
1020
+
1021
+ class Pos(UnaryExpr):
1022
+ """Make the expression positive (noop for most expressions)"""
1023
+
1024
+ operator = "+"
1025
+
1026
+
1027
+ # Helpers for extensions
1028
+
1029
+
1030
+ class EnvironmentAttribute(Expr):
1031
+ """Loads an attribute from the environment object. This is useful for
1032
+ extensions that want to call a callback stored on the environment.
1033
+ """
1034
+
1035
+ fields = ("name",)
1036
+ name: str
1037
+
1038
+
1039
+ class ExtensionAttribute(Expr):
1040
+ """Returns the attribute of an extension bound to the environment.
1041
+ The identifier is the identifier of the :class:`Extension`.
1042
+
1043
+ This node is usually constructed by calling the
1044
+ :meth:`~jinja2.ext.Extension.attr` method on an extension.
1045
+ """
1046
+
1047
+ fields = ("identifier", "name")
1048
+ identifier: str
1049
+ name: str
1050
+
1051
+
1052
+ class ImportedName(Expr):
1053
+ """If created with an import name the import name is returned on node
1054
+ access. For example ``ImportedName('cgi.escape')`` returns the `escape`
1055
+ function from the cgi module on evaluation. Imports are optimized by the
1056
+ compiler so there is no need to assign them to local variables.
1057
+ """
1058
+
1059
+ fields = ("importname",)
1060
+ importname: str
1061
+
1062
+
1063
+ class InternalName(Expr):
1064
+ """An internal name in the compiler. You cannot create these nodes
1065
+ yourself but the parser provides a
1066
+ :meth:`~jinja2.parser.Parser.free_identifier` method that creates
1067
+ a new identifier for you. This identifier is not available from the
1068
+ template and is not treated specially by the compiler.
1069
+ """
1070
+
1071
+ fields = ("name",)
1072
+ name: str
1073
+
1074
+ def __init__(self) -> None:
1075
+ raise TypeError(
1076
+ "Can't create internal names. Use the "
1077
+ "`free_identifier` method on a parser."
1078
+ )
1079
+
1080
+
1081
+ class MarkSafe(Expr):
1082
+ """Mark the wrapped expression as safe (wrap it as `Markup`)."""
1083
+
1084
+ fields = ("expr",)
1085
+ expr: Expr
1086
+
1087
+ def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup:
1088
+ eval_ctx = get_eval_context(self, eval_ctx)
1089
+ return Markup(self.expr.as_const(eval_ctx))
1090
+
1091
+
1092
+ class MarkSafeIfAutoescape(Expr):
1093
+ """Mark the wrapped expression as safe (wrap it as `Markup`) but
1094
+ only if autoescaping is active.
1095
+
1096
+ .. versionadded:: 2.5
1097
+ """
1098
+
1099
+ fields = ("expr",)
1100
+ expr: Expr
1101
+
1102
+ def as_const(
1103
+ self, eval_ctx: t.Optional[EvalContext] = None
1104
+ ) -> t.Union[Markup, t.Any]:
1105
+ eval_ctx = get_eval_context(self, eval_ctx)
1106
+ if eval_ctx.volatile:
1107
+ raise Impossible()
1108
+ expr = self.expr.as_const(eval_ctx)
1109
+ if eval_ctx.autoescape:
1110
+ return Markup(expr)
1111
+ return expr
1112
+
1113
+
1114
+ class ContextReference(Expr):
1115
+ """Returns the current template context. It can be used like a
1116
+ :class:`Name` node, with a ``'load'`` ctx and will return the
1117
+ current :class:`~jinja2.runtime.Context` object.
1118
+
1119
+ Here an example that assigns the current template name to a
1120
+ variable named `foo`::
1121
+
1122
+ Assign(Name('foo', ctx='store'),
1123
+ Getattr(ContextReference(), 'name'))
1124
+
1125
+ This is basically equivalent to using the
1126
+ :func:`~jinja2.pass_context` decorator when using the high-level
1127
+ API, which causes a reference to the context to be passed as the
1128
+ first argument to a function.
1129
+ """
1130
+
1131
+
1132
+ class DerivedContextReference(Expr):
1133
+ """Return the current template context including locals. Behaves
1134
+ exactly like :class:`ContextReference`, but includes local
1135
+ variables, such as from a ``for`` loop.
1136
+
1137
+ .. versionadded:: 2.11
1138
+ """
1139
+
1140
+
1141
+ class Continue(Stmt):
1142
+ """Continue a loop."""
1143
+
1144
+
1145
+ class Break(Stmt):
1146
+ """Break a loop."""
1147
+
1148
+
1149
+ class Scope(Stmt):
1150
+ """An artificial scope."""
1151
+
1152
+ fields = ("body",)
1153
+ body: t.List[Node]
1154
+
1155
+
1156
+ class OverlayScope(Stmt):
1157
+ """An overlay scope for extensions. This is a largely unoptimized scope
1158
+ that however can be used to introduce completely arbitrary variables into
1159
+ a sub scope from a dictionary or dictionary like object. The `context`
1160
+ field has to evaluate to a dictionary object.
1161
+
1162
+ Example usage::
1163
+
1164
+ OverlayScope(context=self.call_method('get_context'),
1165
+ body=[...])
1166
+
1167
+ .. versionadded:: 2.10
1168
+ """
1169
+
1170
+ fields = ("context", "body")
1171
+ context: Expr
1172
+ body: t.List[Node]
1173
+
1174
+
1175
+ class EvalContextModifier(Stmt):
1176
+ """Modifies the eval context. For each option that should be modified,
1177
+ a :class:`Keyword` has to be added to the :attr:`options` list.
1178
+
1179
+ Example to change the `autoescape` setting::
1180
+
1181
+ EvalContextModifier(options=[Keyword('autoescape', Const(True))])
1182
+ """
1183
+
1184
+ fields = ("options",)
1185
+ options: t.List[Keyword]
1186
+
1187
+
1188
+ class ScopedEvalContextModifier(EvalContextModifier):
1189
+ """Modifies the eval context and reverts it later. Works exactly like
1190
+ :class:`EvalContextModifier` but will only modify the
1191
+ :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
1192
+ """
1193
+
1194
+ fields = ("body",)
1195
+ body: t.List[Node]
1196
+
1197
+
1198
+ # make sure nobody creates custom nodes
1199
+ def _failing_new(*args: t.Any, **kwargs: t.Any) -> "te.NoReturn":
1200
+ raise TypeError("can't create custom node types")
1201
+
1202
+
1203
+ NodeType.__new__ = staticmethod(_failing_new) # type: ignore
1204
+ del _failing_new
tuning-competition-baseline/.venv/lib/python3.11/site-packages/jinja2/optimizer.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """The optimizer tries to constant fold expressions and modify the AST
2
+ in place so that it should be faster to evaluate.
3
+
4
+ Because the AST does not contain all the scoping information and the
5
+ compiler has to find that out, we cannot do all the optimizations we
6
+ want. For example, loop unrolling doesn't work because unrolled loops
7
+ would have a different scope. The solution would be a second syntax tree
8
+ that stored the scoping rules.
9
+ """
10
+ import typing as t
11
+
12
+ from . import nodes
13
+ from .visitor import NodeTransformer
14
+
15
+ if t.TYPE_CHECKING:
16
+ from .environment import Environment
17
+
18
+
19
+ def optimize(node: nodes.Node, environment: "Environment") -> nodes.Node:
20
+ """The context hint can be used to perform an static optimization
21
+ based on the context given."""
22
+ optimizer = Optimizer(environment)
23
+ return t.cast(nodes.Node, optimizer.visit(node))
24
+
25
+
26
+ class Optimizer(NodeTransformer):
27
+ def __init__(self, environment: "t.Optional[Environment]") -> None:
28
+ self.environment = environment
29
+
30
+ def generic_visit(
31
+ self, node: nodes.Node, *args: t.Any, **kwargs: t.Any
32
+ ) -> nodes.Node:
33
+ node = super().generic_visit(node, *args, **kwargs)
34
+
35
+ # Do constant folding. Some other nodes besides Expr have
36
+ # as_const, but folding them causes errors later on.
37
+ if isinstance(node, nodes.Expr):
38
+ try:
39
+ return nodes.Const.from_untrusted(
40
+ node.as_const(args[0] if args else None),
41
+ lineno=node.lineno,
42
+ environment=self.environment,
43
+ )
44
+ except nodes.Impossible:
45
+ pass
46
+
47
+ return node