koichi12 commited on
Commit
8728cfe
·
verified ·
1 Parent(s): 8b76dd8

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Debugging.py +20 -0
  2. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/atomic.pxd +59 -0
  3. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/bit.pxd +29 -0
  4. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/cmath.pxd +518 -0
  5. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/list.pxd +117 -0
  6. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/map.pxd +252 -0
  7. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/numeric.pxd +131 -0
  8. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/pair.pxd +1 -0
  9. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/set.pxd +228 -0
  10. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/typeinfo.pxd +10 -0
  11. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/utility.pxd +30 -0
  12. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/vector.pxd +167 -0
  13. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/openmp.pxd +50 -0
  14. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Shadow.py +609 -0
  15. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Shadow.pyi +102 -0
  16. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/StringIOTree.cpython-311-x86_64-linux-gnu.so +0 -0
  17. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/StringIOTree.py +174 -0
  18. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/TestUtils.py +398 -0
  19. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utils.py +721 -0
  20. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/__init__.py +12 -0
  21. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/nx_latex.cpython-311.pyc +0 -0
  22. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/nx_agraph.py +469 -0
  23. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_agraph.cpython-311.pyc +0 -0
  24. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_latex.py +292 -0
  25. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_pylab.py +791 -0
  26. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-311.pyc +0 -0
  27. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-311.pyc +0 -0
  28. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/__init__.cpython-311.pyc +0 -0
  29. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-311.pyc +0 -0
  30. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/test_convert_numpy.py +395 -0
  31. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/test_exceptions.py +40 -0
  32. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/test_relabel.py +347 -0
  33. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__init__.py +6 -0
  34. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__pycache__/backends.cpython-311.pyc +0 -0
  35. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-311.pyc +0 -0
  36. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc +0 -0
  37. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-311.pyc +0 -0
  38. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc +0 -0
  39. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc +0 -0
  40. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc +0 -0
  41. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc +0 -0
  42. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc +0 -0
  43. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc +0 -0
  44. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py +176 -0
  45. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc +0 -0
  46. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc +0 -0
  47. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc +0 -0
  48. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc +0 -0
  49. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc +0 -0
  50. tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc +0 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Debugging.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ###############################################
2
+ #
3
+ # Odds and ends for debugging
4
+ #
5
+ ###############################################
6
+
7
+ def print_call_chain(*args):
8
+ import sys
9
+ print(" ".join(map(str, args)))
10
+ f = sys._getframe(1)
11
+ while f:
12
+ name = f.f_code.co_name
13
+ s = f.f_locals.get('self', None)
14
+ if s:
15
+ c = getattr(s, "__class__", None)
16
+ if c:
17
+ name = "%s.%s" % (c.__name__, name)
18
+ print("Called from: %s %s" % (name, f.f_lineno))
19
+ f = f.f_back
20
+ print("-" * 70)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/atomic.pxd ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ cdef extern from "<atomic>" namespace "std" nogil:
3
+
4
+ cdef enum memory_order:
5
+ memory_order_relaxed
6
+ memory_order_consume
7
+ memory_order_acquire
8
+ memory_order_release
9
+ memory_order_acq_rel
10
+ memory_order_seq_cst
11
+
12
+ cdef cppclass atomic[T]:
13
+ atomic()
14
+ atomic(T)
15
+
16
+ bint is_lock_free()
17
+ void store(T)
18
+ void store(T, memory_order)
19
+ T load()
20
+ T load(memory_order)
21
+ T exchange(T)
22
+ T exchange(T, memory_order)
23
+
24
+ bint compare_exchange_weak(T&, T, memory_order, memory_order)
25
+ bint compare_exchange_weak(T&, T, memory_order)
26
+ bint compare_exchange_weak(T&, T)
27
+ bint compare_exchange_strong(T&, T, memory_order, memory_order)
28
+ bint compare_exchange_strong(T&, T, memory_order)
29
+ bint compare_exchange_strong(T&, T)
30
+
31
+ T fetch_add(T, memory_order)
32
+ T fetch_add(T)
33
+ T fetch_sub(T, memory_order)
34
+ T fetch_sub(T)
35
+ T fetch_and(T, memory_order)
36
+ T fetch_and(T)
37
+ T fetch_or(T, memory_order)
38
+ T fetch_or(T)
39
+ T fetch_xor(T, memory_order)
40
+ T fetch_xor(T)
41
+
42
+ T operator++()
43
+ T operator++(int)
44
+ T operator--()
45
+ T operator--(int)
46
+
47
+ # modify-in-place operators not yet supported by Cython:
48
+ # T operator+=(T)
49
+ # T operator-=(T)
50
+ # T operator&=(T)
51
+ # T operator|=(T)
52
+ # T operator^=(T)
53
+
54
+ bint operator==(atomic[T]&, atomic[T]&)
55
+ bint operator==(atomic[T]&, T&)
56
+ bint operator==(T&, atomic[T]&)
57
+ bint operator!=(atomic[T]&, atomic[T]&)
58
+ bint operator!=(atomic[T]&, T&)
59
+ bint operator!=(T&, atomic[T]&)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/bit.pxd ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<bit>" namespace "std" nogil:
2
+ # bit_cast (gcc >= 11.0, clang >= 14.0)
3
+ cdef To bit_cast[To, From](From&)
4
+
5
+ # byteswap (C++23)
6
+ #cdef T byteswap[T](T)
7
+
8
+ # integral powers of 2 (gcc >= 10.0, clang >= 12.0)
9
+ cdef bint has_single_bit[T](T)
10
+ cdef T bit_ceil[T](T)
11
+ cdef T bit_floor[T](T)
12
+ cdef int bit_width[T](T)
13
+
14
+ # rotating (gcc >= 9.0, clang >= 9.0)
15
+ cdef T rotl[T](T, int shift)
16
+ cdef T rotr[T](T, int shift)
17
+
18
+ # counting (gcc >= 9.0, clang >= 9.0)
19
+ cdef int countl_zero[T](T)
20
+ cdef int countl_one[T](T)
21
+ cdef int countr_zero[T](T)
22
+ cdef int countr_one[T](T)
23
+ cdef int popcount[T](T)
24
+
25
+ # endian
26
+ cpdef enum class endian(int):
27
+ little,
28
+ big,
29
+ native
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/cmath.pxd ADDED
@@ -0,0 +1,518 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ cdef extern from "<cmath>" namespace "std" nogil:
3
+ # all C99 functions
4
+ float acos(float x) except +
5
+ double acos(double x) except +
6
+ long double acos(long double x) except +
7
+ float acosf(float x) except +
8
+ long double acosl(long double x) except +
9
+
10
+ float asin(float x) except +
11
+ double asin(double x) except +
12
+ long double asin(long double x) except +
13
+ float asinf(float x) except +
14
+ long double asinl(long double x) except +
15
+
16
+ float atan(float x) except +
17
+ double atan(double x) except +
18
+ long double atan(long double x) except +
19
+ float atanf(float x) except +
20
+ long double atanl(long double x) except +
21
+
22
+ float atan2(float y, float x) except +
23
+ double atan2(double y, double x) except +
24
+ long double atan2(long double y, long double x) except +
25
+ float atan2f(float y, float x) except +
26
+ long double atan2l(long double y, long double x) except +
27
+
28
+ float cos(float x) except +
29
+ double cos(double x) except +
30
+ long double cos(long double x) except +
31
+ float cosf(float x) except +
32
+ long double cosl(long double x) except +
33
+
34
+ float sin(float x) except +
35
+ double sin(double x) except +
36
+ long double sin(long double x) except +
37
+ float sinf(float x) except +
38
+ long double sinl(long double x) except +
39
+
40
+ float tan(float x) except +
41
+ double tan(double x) except +
42
+ long double tan(long double x) except +
43
+ float tanf(float x) except +
44
+ long double tanl(long double x) except +
45
+
46
+ float acosh(float x) except +
47
+ double acosh(double x) except +
48
+ long double acosh(long double x) except +
49
+ float acoshf(float x) except +
50
+ long double acoshl(long double x) except +
51
+
52
+ float asinh(float x) except +
53
+ double asinh(double x) except +
54
+ long double asinh(long double x) except +
55
+ float asinhf(float x) except +
56
+ long double asinhl(long double x) except +
57
+
58
+ float atanh(float x) except +
59
+ double atanh(double x) except +
60
+ long double atanh(long double x) except +
61
+ float atanhf(float x) except +
62
+ long double atanhl(long double x) except +
63
+
64
+ float cosh(float x) except +
65
+ double cosh(double x) except +
66
+ long double cosh(long double x) except +
67
+ float coshf(float x) except +
68
+ long double coshl(long double x) except +
69
+
70
+ float sinh(float x) except +
71
+ double sinh(double x) except +
72
+ long double sinh(long double x) except +
73
+ float sinhf(float x) except +
74
+ long double sinhl(long double x) except +
75
+
76
+ float tanh(float x) except +
77
+ double tanh(double x) except +
78
+ long double tanh(long double x) except +
79
+ float tanhf(float x) except +
80
+ long double tanhl(long double x) except +
81
+
82
+ float exp(float x) except +
83
+ double exp(double x) except +
84
+ long double exp(long double x) except +
85
+ float expf(float x) except +
86
+ long double expl(long double x) except +
87
+
88
+ float exp2(float x) except +
89
+ double exp2(double x) except +
90
+ long double exp2(long double x) except +
91
+ float exp2f(float x) except +
92
+ long double exp2l(long double x) except +
93
+
94
+ float expm1(float x) except +
95
+ double expm1(double x) except +
96
+ long double expm1(long double x) except +
97
+ float expm1f(float x) except +
98
+ long double expm1l(long double x) except +
99
+
100
+ float frexp(float value, int* exp) except +
101
+ double frexp(double value, int* exp) except +
102
+ long double frexp(long double value, int* exp) except +
103
+ float frexpf(float value, int* exp) except +
104
+ long double frexpl(long double value, int* exp) except +
105
+
106
+ int ilogb(float x) except +
107
+ int ilogb(double x) except +
108
+ int ilogb(long double x) except +
109
+ int ilogbf(float x) except +
110
+ int ilogbl(long double x) except +
111
+
112
+ float ldexp(float x, int exp) except +
113
+ double ldexp(double x, int exp) except +
114
+ long double ldexp(long double x, int exp) except +
115
+ float ldexpf(float x, int exp) except +
116
+ long double ldexpl(long double x, int exp) except +
117
+
118
+ float log(float x) except +
119
+ double log(double x) except +
120
+ long double log(long double x) except +
121
+ float logf(float x) except +
122
+ long double logl(long double x) except +
123
+
124
+ float log10(float x) except +
125
+ double log10(double x) except +
126
+ long double log10(long double x) except +
127
+ float log10f(float x) except +
128
+ long double log10l(long double x) except +
129
+
130
+ float log1p(float x) except +
131
+ double log1p(double x) except +
132
+ long double log1p(long double x) except +
133
+ float log1pf(float x) except +
134
+ long double log1pl(long double x) except +
135
+
136
+ float log2(float x) except +
137
+ double log2(double x) except +
138
+ long double log2(long double x) except +
139
+ float log2f(float x) except +
140
+ long double log2l(long double x) except +
141
+
142
+ float logb(float x) except +
143
+ double logb(double x) except +
144
+ long double logb(long double x) except +
145
+ float logbf(float x) except +
146
+ long double logbl(long double x) except +
147
+
148
+ float modf(float value, float* iptr) except +
149
+ double modf(double value, double* iptr) except +
150
+ long double modf(long double value, long double* iptr) except +
151
+ float modff(float value, float* iptr) except +
152
+ long double modfl(long double value, long double* iptr) except +
153
+
154
+ float scalbn(float x, int n) except +
155
+ double scalbn(double x, int n) except +
156
+ long double scalbn(long double x, int n) except +
157
+ float scalbnf(float x, int n) except +
158
+ long double scalbnl(long double x, int n) except +
159
+
160
+ float scalbln(float x, long int n) except +
161
+ double scalbln(double x, long int n) except +
162
+ long double scalbln(long double x, long int n) except +
163
+ float scalblnf(float x, long int n) except +
164
+ long double scalblnl(long double x, long int n) except +
165
+
166
+ float cbrt(float x) except +
167
+ double cbrt(double x) except +
168
+ long double cbrt(long double x) except +
169
+ float cbrtf(float x) except +
170
+ long double cbrtl(long double x) except +
171
+
172
+ # absolute values
173
+ int abs(int j) except +
174
+ long int abs(long int j) except +
175
+ long long int abs(long long int j) except +
176
+ float abs(float j) except +
177
+ double abs(double j) except +
178
+ long double abs(long double j) except +
179
+
180
+ float fabs(float x) except +
181
+ double fabs(double x) except +
182
+ long double fabs(long double x) except +
183
+ float fabsf(float x) except +
184
+ long double fabsl(long double x) except +
185
+
186
+ float hypot(float x, float y) except +
187
+ double hypot(double x, double y) except +
188
+ long double hypot(long double x, long double y) except +
189
+ float hypotf(float x, float y) except +
190
+ long double hypotl(long double x, long double y) except +
191
+
192
+ # C++17 three-dimensional hypotenuse
193
+ float hypot(float x, float y, float z) except +
194
+ double hypot(double x, double y, double z) except +
195
+ long double hypot(long double x, long double y, long double z) except +
196
+
197
+ float pow(float x, float y) except +
198
+ double pow(double x, double y) except +
199
+ long double pow(long double x, long double y) except +
200
+ float powf(float x, float y) except +
201
+ long double powl(long double x, long double y) except +
202
+
203
+ float sqrt(float x) except +
204
+ double sqrt(double x) except +
205
+ long double sqrt(long double x) except +
206
+ float sqrtf(float x) except +
207
+ long double sqrtl(long double x) except +
208
+
209
+ float erf(float x) except +
210
+ double erf(double x) except +
211
+ long double erf(long double x) except +
212
+ float erff(float x) except +
213
+ long double erfl(long double x) except +
214
+
215
+ float erfc(float x) except +
216
+ double erfc(double x) except +
217
+ long double erfc(long double x) except +
218
+ float erfcf(float x) except +
219
+ long double erfcl(long double x) except +
220
+
221
+ float lgamma(float x) except +
222
+ double lgamma(double x) except +
223
+ long double lgamma(long double x) except +
224
+ float lgammaf(float x) except +
225
+ long double lgammal(long double x) except +
226
+
227
+ float tgamma(float x) except +
228
+ double tgamma(double x) except +
229
+ long double tgamma(long double x) except +
230
+ float tgammaf(float x) except +
231
+ long double tgammal(long double x) except +
232
+
233
+ float ceil(float x) except +
234
+ double ceil(double x) except +
235
+ long double ceil(long double x) except +
236
+ float ceilf(float x) except +
237
+ long double ceill(long double x) except +
238
+
239
+ float floor(float x) except +
240
+ double floor(double x) except +
241
+ long double floor(long double x) except +
242
+ float floorf(float x) except +
243
+ long double floorl(long double x) except +
244
+
245
+ float nearbyint(float x) except +
246
+ double nearbyint(double x) except +
247
+ long double nearbyint(long double x) except +
248
+ float nearbyintf(float x) except +
249
+ long double nearbyintl(long double x) except +
250
+
251
+ float rint(float x) except +
252
+ double rint(double x) except +
253
+ long double rint(long double x) except +
254
+ float rintf(float x) except +
255
+ long double rintl(long double x) except +
256
+
257
+ long int lrint(float x) except +
258
+ long int lrint(double x) except +
259
+ long int lrint(long double x) except +
260
+ long int lrintf(float x) except +
261
+ long int lrintl(long double x) except +
262
+
263
+ long long int llrint(float x) except +
264
+ long long int llrint(double x) except +
265
+ long long int llrint(long double x) except +
266
+ long long int llrintf(float x) except +
267
+ long long int llrintl(long double x) except +
268
+
269
+ float round(float x) except +
270
+ double round(double x) except +
271
+ long double round(long double x) except +
272
+ float roundf(float x) except +
273
+ long double roundl(long double x) except +
274
+
275
+ long int lround(float x) except +
276
+ long int lround(double x) except +
277
+ long int lround(long double x) except +
278
+ long int lroundf(float x) except +
279
+ long int lroundl(long double x) except +
280
+
281
+ long long int llround(float x) except +
282
+ long long int llround(double x) except +
283
+ long long int llround(long double x) except +
284
+ long long int llroundf(float x) except +
285
+ long long int llroundl(long double x) except +
286
+
287
+ float trunc(float x) except +
288
+ double trunc(double x) except +
289
+ long double trunc(long double x) except +
290
+ float truncf(float x) except +
291
+ long double truncl(long double x) except +
292
+
293
+ float fmod(float x, float y) except +
294
+ double fmod(double x, double y) except +
295
+ long double fmod(long double x, long double y) except +
296
+ float fmodf(float x, float y) except +
297
+ long double fmodl(long double x, long double y) except +
298
+
299
+ float remainder(float x, float y) except +
300
+ double remainder(double x, double y) except +
301
+ long double remainder(long double x, long double y) except +
302
+ float remainderf(float x, float y) except +
303
+ long double remainderl(long double x, long double y) except +
304
+
305
+ float remquo(float x, float y, int* quo) except +
306
+ double remquo(double x, double y, int* quo) except +
307
+ long double remquo(long double x, long double y, int* quo) except +
308
+ float remquof(float x, float y, int* quo) except +
309
+ long double remquol(long double x, long double y, int* quo) except +
310
+
311
+ float copysign(float x, float y) except +
312
+ double copysign(double x, double y) except +
313
+ long double copysign(long double x, long double y) except +
314
+ float copysignf(float x, float y) except +
315
+ long double copysignl(long double x, long double y) except +
316
+
317
+ double nan(const char* tagp) except +
318
+ float nanf(const char* tagp) except +
319
+ long double nanl(const char* tagp) except +
320
+
321
+ float nextafter(float x, float y) except +
322
+ double nextafter(double x, double y) except +
323
+ long double nextafter(long double x, long double y) except +
324
+ float nextafterf(float x, float y) except +
325
+ long double nextafterl(long double x, long double y) except +
326
+
327
+ float nexttoward(float x, long double y) except +
328
+ double nexttoward(double x, long double y) except +
329
+ long double nexttoward(long double x, long double y) except +
330
+ float nexttowardf(float x, long double y) except +
331
+ long double nexttowardl(long double x, long double y) except +
332
+
333
+ float fdim(float x, float y) except +
334
+ double fdim(double x, double y) except +
335
+ long double fdim(long double x, long double y) except +
336
+ float fdimf(float x, float y) except +
337
+ long double fdiml(long double x, long double y) except +
338
+
339
+ float fmax(float x, float y) except +
340
+ double fmax(double x, double y) except +
341
+ long double fmax(long double x, long double y) except +
342
+ float fmaxf(float x, float y) except +
343
+ long double fmaxl(long double x, long double y) except +
344
+
345
+ float fmin(float x, float y) except +
346
+ double fmin(double x, double y) except +
347
+ long double fmin(long double x, long double y) except +
348
+ float fminf(float x, float y) except +
349
+ long double fminl(long double x, long double y) except +
350
+
351
+ float fma(float x, float y, float z) except +
352
+ double fma(double x, double y, double z) except +
353
+ long double fma(long double x, long double y, long double z) except +
354
+ float fmaf(float x, float y, float z) except +
355
+ long double fmal(long double x, long double y, long double z) except +
356
+
357
+ # C++20 linear interpolation
358
+ float lerp(float a, float b, float t)
359
+ double lerp(double a, double b, double t)
360
+ long double lerp(long double a, long double b, long double t)
361
+
362
+ # classification / comparison functions
363
+ int fpclassify(float x) except +
364
+ int fpclassify(double x) except +
365
+ int fpclassify(long double x) except +
366
+
367
+ bint isfinite(float x) except +
368
+ bint isfinite(double x) except +
369
+ bint isfinite(long double x) except +
370
+
371
+ bint isinf(float x) except +
372
+ bint isinf(double x) except +
373
+ bint isinf(long double x) except +
374
+
375
+ bint isnan(float x) except +
376
+ bint isnan(double x) except +
377
+ bint isnan(long double x) except +
378
+
379
+ bint isnormal(float x) except +
380
+ bint isnormal(double x) except +
381
+ bint isnormal(long double x) except +
382
+
383
+ bint signbit(float x) except +
384
+ bint signbit(double x) except +
385
+ bint signbit(long double x) except +
386
+
387
+ bint isgreater(float x, float y) except +
388
+ bint isgreater(double x, double y) except +
389
+ bint isgreater(long double x, long double y) except +
390
+
391
+ bint isgreaterequal(float x, float y) except +
392
+ bint isgreaterequal(double x, double y) except +
393
+ bint isgreaterequal(long double x, long double y) except +
394
+
395
+ bint isless(float x, float y) except +
396
+ bint isless(double x, double y) except +
397
+ bint isless(long double x, long double y) except +
398
+
399
+ bint islessequal(float x, float y) except +
400
+ bint islessequal(double x, double y) except +
401
+ bint islessequal(long double x, long double y) except +
402
+
403
+ bint islessgreater(float x, float y) except +
404
+ bint islessgreater(double x, double y) except +
405
+ bint islessgreater(long double x, long double y) except +
406
+
407
+ bint isunordered(float x, float y) except +
408
+ bint isunordered(double x, double y) except +
409
+ bint isunordered(long double x, long double y) except +
410
+
411
+ # C++17 mathematical special functions
412
+
413
+ # associated Laguerre polynomials
414
+ double assoc_laguerre(unsigned int n, unsigned int m, double x) except +
415
+ float assoc_laguerref(unsigned int n, unsigned int m, float x) except +
416
+ long double assoc_laguerrel(unsigned int n, unsigned int m, long double x) except +
417
+
418
+ # associated Legendre functions
419
+ double assoc_legendre(unsigned int l, unsigned int m, double x) except +
420
+ float assoc_legendref(unsigned int l, unsigned int m, float x) except +
421
+ long double assoc_legendrel(unsigned int l, unsigned int m, long double x) except +
422
+
423
+ # beta function
424
+ double beta(double x, double y) except +
425
+ float betaf(float x, float y) except +
426
+ long double betal(long double x, long double y) except +
427
+
428
+ # complete elliptic integral of the first kind
429
+ double comp_ellint_1(double k) except +
430
+ float comp_ellint_1f(float k) except +
431
+ long double comp_ellint_1l(long double k) except +
432
+
433
+ # complete elliptic integral of the second kind
434
+ double comp_ellint_2(double k) except +
435
+ float comp_ellint_2f(float k) except +
436
+ long double comp_ellint_2l(long double k) except +
437
+
438
+ # complete elliptic integral of the third kind
439
+ double comp_ellint_3(double k, double nu) except +
440
+ float comp_ellint_3f(float k, float nu) except +
441
+ long double comp_ellint_3l(long double k, long double nu) except +
442
+
443
+ # regular modified cylindrical Bessel functions
444
+ double cyl_bessel_i(double nu, double x) except +
445
+ float cyl_bessel_if(float nu, float x) except +
446
+ long double cyl_bessel_il(long double nu, long double x) except +
447
+
448
+ # cylindrical Bessel functions of the first kind
449
+ double cyl_bessel_j(double nu, double x) except +
450
+ float cyl_bessel_jf(float nu, float x) except +
451
+ long double cyl_bessel_jl(long double nu, long double x) except +
452
+
453
+ # irregular modified cylindrical Bessel functions
454
+ double cyl_bessel_k(double nu, double x) except +
455
+ float cyl_bessel_kf(float nu, float x) except +
456
+ long double cyl_bessel_kl(long double nu, long double x) except +
457
+
458
+ # cylindrical Neumann functions
459
+ # cylindrical Bessel functions of the second kind
460
+ double cyl_neumann(double nu, double x) except +
461
+ float cyl_neumannf(float nu, float x) except +
462
+ long double cyl_neumannl(long double nu, long double x) except +
463
+
464
+ # incomplete elliptic integral of the first kind
465
+ double ellint_1(double k, double phi) except +
466
+ float ellint_1f(float k, float phi) except +
467
+ long double ellint_1l(long double k, long double phi) except +
468
+
469
+ # incomplete elliptic integral of the second kind
470
+ double ellint_2(double k, double phi) except +
471
+ float ellint_2f(float k, float phi) except +
472
+ long double ellint_2l(long double k, long double phi) except +
473
+
474
+ # incomplete elliptic integral of the third kind
475
+ double ellint_3(double k, double nu, double phi) except +
476
+ float ellint_3f(float k, float nu, float phi) except +
477
+ long double ellint_3l(long double k, long double nu, long double phi) except +
478
+
479
+ # exponential integral
480
+ double expint(double x) except +
481
+ float expintf(float x) except +
482
+ long double expintl(long double x) except +
483
+
484
+ # Hermite polynomials
485
+ double hermite(unsigned int n, double x) except +
486
+ float hermitef(unsigned int n, float x) except +
487
+ long double hermitel(unsigned int n, long double x) except +
488
+
489
+ # Laguerre polynomials
490
+ double laguerre(unsigned int n, double x) except +
491
+ float laguerref(unsigned int n, float x) except +
492
+ long double laguerrel(unsigned int n, long double x) except +
493
+
494
+ # Legendre polynomials
495
+ double legendre(unsigned int l, double x) except +
496
+ float legendref(unsigned int l, float x) except +
497
+ long double legendrel(unsigned int l, long double x) except +
498
+
499
+ # Riemann zeta function
500
+ double riemann_zeta(double x) except +
501
+ float riemann_zetaf(float x) except +
502
+ long double riemann_zetal(long double x) except +
503
+
504
+ # spherical Bessel functions of the first kind
505
+ double sph_bessel(unsigned int n, double x) except +
506
+ float sph_besself(unsigned int n, float x) except +
507
+ long double sph_bessell(unsigned int n, long double x) except +
508
+
509
+ # spherical associated Legendre functions
510
+ double sph_legendre(unsigned int l, unsigned int m, double theta) except +
511
+ float sph_legendref(unsigned int l, unsigned int m, float theta) except +
512
+ long double sph_legendrel(unsigned int l, unsigned int m, long double theta) except +
513
+
514
+ # spherical Neumann functions
515
+ # spherical Bessel functions of the second kind
516
+ double sph_neumann(unsigned int n, double x) except +
517
+ float sph_neumannf(unsigned int n, float x) except +
518
+ long double sph_neumannl(unsigned int n, long double x) except +
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/list.pxd ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<list>" namespace "std" nogil:
2
+ cdef cppclass list[T,ALLOCATOR=*]:
3
+ ctypedef T value_type
4
+ ctypedef ALLOCATOR allocator_type
5
+
6
+ # these should really be allocator_type.size_type and
7
+ # allocator_type.difference_type to be true to the C++ definition
8
+ # but cython doesn't support deferred access on template arguments
9
+ ctypedef size_t size_type
10
+ ctypedef ptrdiff_t difference_type
11
+
12
+ cppclass const_iterator
13
+ cppclass iterator:
14
+ iterator() except +
15
+ iterator(iterator&) except +
16
+ value_type& operator*()
17
+ iterator operator++()
18
+ iterator operator--()
19
+ iterator operator++(int)
20
+ iterator operator--(int)
21
+ bint operator==(iterator)
22
+ bint operator==(const_iterator)
23
+ bint operator!=(iterator)
24
+ bint operator!=(const_iterator)
25
+ cppclass const_iterator:
26
+ const_iterator() except +
27
+ const_iterator(iterator&) except +
28
+ const_iterator(const_iterator&) except +
29
+ operator=(iterator&) except +
30
+ const value_type& operator*()
31
+ const_iterator operator++()
32
+ const_iterator operator--()
33
+ const_iterator operator++(int)
34
+ const_iterator operator--(int)
35
+ bint operator==(iterator)
36
+ bint operator==(const_iterator)
37
+ bint operator!=(iterator)
38
+ bint operator!=(const_iterator)
39
+
40
+ cppclass const_reverse_iterator
41
+ cppclass reverse_iterator:
42
+ reverse_iterator() except +
43
+ reverse_iterator(reverse_iterator&) except +
44
+ value_type& operator*()
45
+ reverse_iterator operator++()
46
+ reverse_iterator operator--()
47
+ reverse_iterator operator++(int)
48
+ reverse_iterator operator--(int)
49
+ bint operator==(reverse_iterator)
50
+ bint operator==(const_reverse_iterator)
51
+ bint operator!=(reverse_iterator)
52
+ bint operator!=(const_reverse_iterator)
53
+ cppclass const_reverse_iterator:
54
+ const_reverse_iterator() except +
55
+ const_reverse_iterator(reverse_iterator&) except +
56
+ operator=(reverse_iterator&) except +
57
+ const value_type& operator*()
58
+ const_reverse_iterator operator++()
59
+ const_reverse_iterator operator--()
60
+ const_reverse_iterator operator++(int)
61
+ const_reverse_iterator operator--(int)
62
+ bint operator==(reverse_iterator)
63
+ bint operator==(const_reverse_iterator)
64
+ bint operator!=(reverse_iterator)
65
+ bint operator!=(const_reverse_iterator)
66
+
67
+ list() except +
68
+ list(list&) except +
69
+ list(size_t, T&) except +
70
+ #list operator=(list&)
71
+ bint operator==(list&, list&)
72
+ bint operator!=(list&, list&)
73
+ bint operator<(list&, list&)
74
+ bint operator>(list&, list&)
75
+ bint operator<=(list&, list&)
76
+ bint operator>=(list&, list&)
77
+ void assign(size_t, T&) except +
78
+ T& back()
79
+ iterator begin()
80
+ const_iterator const_begin "begin"()
81
+ const_iterator cbegin()
82
+ void clear()
83
+ bint empty()
84
+ iterator end()
85
+ const_iterator const_end "end"()
86
+ const_iterator cend()
87
+ iterator erase(iterator)
88
+ iterator erase(iterator, iterator)
89
+ T& front()
90
+ iterator insert(iterator, T&)
91
+ void insert(iterator, size_t, T&)
92
+ size_t max_size()
93
+ void merge(list&) except +
94
+ #void merge(list&, BinPred)
95
+ void pop_back()
96
+ void pop_front()
97
+ void push_back(T&) except +
98
+ void push_front(T&) except +
99
+ reverse_iterator rbegin()
100
+ const_reverse_iterator const_rbegin "rbegin"()
101
+ const_reverse_iterator crbegin()
102
+ void remove(T&) except +
103
+ #void remove_if(UnPred)
104
+ reverse_iterator rend()
105
+ const_reverse_iterator const_rend "rend"()
106
+ const_reverse_iterator crend()
107
+ void resize(size_t, T&) except +
108
+ void reverse()
109
+ size_t size()
110
+ void sort() except +
111
+ #void sort(BinPred)
112
+ void splice(iterator, list&)
113
+ void splice(iterator, list&, iterator)
114
+ void splice(iterator, list&, iterator, iterator)
115
+ void swap(list&)
116
+ void unique()
117
+ #void unique(BinPred)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/map.pxd ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .utility cimport pair
2
+
3
+ cdef extern from "<map>" namespace "std" nogil:
4
+ cdef cppclass map[T, U, COMPARE=*, ALLOCATOR=*]:
5
+ ctypedef T key_type
6
+ ctypedef U mapped_type
7
+ ctypedef pair[const T, U] value_type
8
+ ctypedef COMPARE key_compare
9
+ ctypedef ALLOCATOR allocator_type
10
+
11
+ # these should really be allocator_type.size_type and
12
+ # allocator_type.difference_type to be true to the C++ definition
13
+ # but cython doesn't support deferred access on template arguments
14
+ ctypedef size_t size_type
15
+ ctypedef ptrdiff_t difference_type
16
+
17
+ cppclass const_iterator
18
+ cppclass iterator:
19
+ iterator() except +
20
+ iterator(iterator&) except +
21
+ # correct would be value_type& but this does not work
22
+ # well with cython's code gen
23
+ pair[T, U]& operator*()
24
+ iterator operator++()
25
+ iterator operator--()
26
+ iterator operator++(int)
27
+ iterator operator--(int)
28
+ bint operator==(iterator)
29
+ bint operator==(const_iterator)
30
+ bint operator!=(iterator)
31
+ bint operator!=(const_iterator)
32
+ cppclass const_iterator:
33
+ const_iterator() except +
34
+ const_iterator(iterator&) except +
35
+ const_iterator(const_iterator&) except +
36
+ operator=(iterator&) except +
37
+ # correct would be const value_type& but this does not work
38
+ # well with cython's code gen
39
+ const pair[T, U]& operator*()
40
+ const_iterator operator++()
41
+ const_iterator operator--()
42
+ const_iterator operator++(int)
43
+ const_iterator operator--(int)
44
+ bint operator==(iterator)
45
+ bint operator==(const_iterator)
46
+ bint operator!=(iterator)
47
+ bint operator!=(const_iterator)
48
+
49
+ cppclass const_reverse_iterator
50
+ cppclass reverse_iterator:
51
+ reverse_iterator() except +
52
+ reverse_iterator(reverse_iterator&) except +
53
+ # correct would be value_type& but this does not work
54
+ # well with cython's code gen
55
+ pair[T, U]& operator*()
56
+ reverse_iterator operator++()
57
+ reverse_iterator operator--()
58
+ reverse_iterator operator++(int)
59
+ reverse_iterator operator--(int)
60
+ bint operator==(reverse_iterator)
61
+ bint operator==(const_reverse_iterator)
62
+ bint operator!=(reverse_iterator)
63
+ bint operator!=(const_reverse_iterator)
64
+ cppclass const_reverse_iterator:
65
+ const_reverse_iterator() except +
66
+ const_reverse_iterator(reverse_iterator&) except +
67
+ operator=(reverse_iterator&) except +
68
+ # correct would be const value_type& but this does not work
69
+ # well with cython's code gen
70
+ const pair[T, U]& operator*()
71
+ const_reverse_iterator operator++()
72
+ const_reverse_iterator operator--()
73
+ const_reverse_iterator operator++(int)
74
+ const_reverse_iterator operator--(int)
75
+ bint operator==(reverse_iterator)
76
+ bint operator==(const_reverse_iterator)
77
+ bint operator!=(reverse_iterator)
78
+ bint operator!=(const_reverse_iterator)
79
+
80
+ map() except +
81
+ map(map&) except +
82
+ #map(key_compare&)
83
+ U& operator[](const T&)
84
+ #map& operator=(map&)
85
+ bint operator==(map&, map&)
86
+ bint operator!=(map&, map&)
87
+ bint operator<(map&, map&)
88
+ bint operator>(map&, map&)
89
+ bint operator<=(map&, map&)
90
+ bint operator>=(map&, map&)
91
+ U& at(const T&) except +
92
+ const U& const_at "at"(const T&) except +
93
+ iterator begin()
94
+ const_iterator const_begin "begin" ()
95
+ const_iterator cbegin()
96
+ void clear()
97
+ size_t count(const T&)
98
+ bint empty()
99
+ iterator end()
100
+ const_iterator const_end "end" ()
101
+ const_iterator cend()
102
+ pair[iterator, iterator] equal_range(const T&)
103
+ pair[const_iterator, const_iterator] const_equal_range "equal_range"(const T&)
104
+ iterator erase(iterator)
105
+ iterator const_erase "erase"(const_iterator)
106
+ iterator erase(const_iterator, const_iterator)
107
+ size_t erase(const T&)
108
+ iterator find(const T&)
109
+ const_iterator const_find "find" (const T&)
110
+ pair[iterator, bint] insert(const pair[T, U]&) except +
111
+ iterator insert(const_iterator, const pair[T, U]&) except +
112
+ void insert[InputIt](InputIt, InputIt) except +
113
+ #key_compare key_comp()
114
+ iterator lower_bound(const T&)
115
+ const_iterator const_lower_bound "lower_bound"(const T&)
116
+ size_t max_size()
117
+ reverse_iterator rbegin()
118
+ const_reverse_iterator const_rbegin "rbegin"()
119
+ const_reverse_iterator crbegin()
120
+ reverse_iterator rend()
121
+ const_reverse_iterator const_rend "rend"()
122
+ const_reverse_iterator crend()
123
+ size_t size()
124
+ void swap(map&)
125
+ iterator upper_bound(const T&)
126
+ const_iterator const_upper_bound "upper_bound"(const T&)
127
+ #value_compare value_comp()
128
+ # C++20
129
+ bint contains(const T&)
130
+
131
+ cdef cppclass multimap[T, U, COMPARE=*, ALLOCATOR=*]:
132
+ ctypedef T key_type
133
+ ctypedef U mapped_type
134
+ ctypedef pair[const T, U] value_type
135
+ ctypedef COMPARE key_compare
136
+ ctypedef ALLOCATOR allocator_type
137
+
138
+ # these should really be allocator_type.size_type and
139
+ # allocator_type.difference_type to be true to the C++ definition
140
+ # but cython doesn't support deferred access on template arguments
141
+ ctypedef size_t size_type
142
+ ctypedef ptrdiff_t difference_type
143
+
144
+ cppclass const_iterator
145
+ cppclass iterator:
146
+ iterator() except +
147
+ iterator(iterator&) except +
148
+ # correct would be value_type& but this does not work
149
+ # well with cython's code gen
150
+ pair[T, U]& operator*()
151
+ iterator operator++()
152
+ iterator operator--()
153
+ iterator operator++(int)
154
+ iterator operator--(int)
155
+ bint operator==(iterator)
156
+ bint operator==(const_iterator)
157
+ bint operator!=(iterator)
158
+ bint operator!=(const_iterator)
159
+ cppclass const_iterator:
160
+ const_iterator() except +
161
+ const_iterator(iterator&) except +
162
+ const_iterator(const_iterator&) except +
163
+ operator=(iterator&) except +
164
+ # correct would be const value_type& but this does not work
165
+ # well with cython's code gen
166
+ const pair[T, U]& operator*()
167
+ const_iterator operator++()
168
+ const_iterator operator--()
169
+ const_iterator operator++(int)
170
+ const_iterator operator--(int)
171
+ bint operator==(iterator)
172
+ bint operator==(const_iterator)
173
+ bint operator!=(iterator)
174
+ bint operator!=(const_iterator)
175
+
176
+ cppclass const_reverse_iterator
177
+ cppclass reverse_iterator:
178
+ reverse_iterator() except +
179
+ reverse_iterator(reverse_iterator&) except +
180
+ # correct would be value_type& but this does not work
181
+ # well with cython's code gen
182
+ pair[T, U]& operator*()
183
+ reverse_iterator operator++()
184
+ reverse_iterator operator--()
185
+ reverse_iterator operator++(int)
186
+ reverse_iterator operator--(int)
187
+ bint operator==(reverse_iterator)
188
+ bint operator==(const_reverse_iterator)
189
+ bint operator!=(reverse_iterator)
190
+ bint operator!=(const_reverse_iterator)
191
+ cppclass const_reverse_iterator:
192
+ const_reverse_iterator() except +
193
+ const_reverse_iterator(reverse_iterator&) except +
194
+ operator=(reverse_iterator&) except +
195
+ # correct would be const value_type& but this does not work
196
+ # well with cython's code gen
197
+ const pair[T, U]& operator*()
198
+ const_reverse_iterator operator++()
199
+ const_reverse_iterator operator--()
200
+ const_reverse_iterator operator++(int)
201
+ const_reverse_iterator operator--(int)
202
+ bint operator==(reverse_iterator)
203
+ bint operator==(const_reverse_iterator)
204
+ bint operator!=(reverse_iterator)
205
+ bint operator!=(const_reverse_iterator)
206
+
207
+ multimap() except +
208
+ multimap(const multimap&) except +
209
+ #multimap(key_compare&)
210
+ #multimap& operator=(multimap&)
211
+ bint operator==(const multimap&, const multimap&)
212
+ bint operator!=(const multimap&, const multimap&)
213
+ bint operator<(const multimap&, const multimap&)
214
+ bint operator>(const multimap&, const multimap&)
215
+ bint operator<=(const multimap&, const multimap&)
216
+ bint operator>=(const multimap&, const multimap&)
217
+ iterator begin()
218
+ const_iterator const_begin "begin"()
219
+ const_iterator cbegin()
220
+ void clear()
221
+ size_t count(const T&)
222
+ bint empty()
223
+ iterator end()
224
+ const_iterator const_end "end"()
225
+ const_iterator cend()
226
+ pair[iterator, iterator] equal_range(const T&)
227
+ pair[const_iterator, const_iterator] const_equal_range "equal_range"(const T&)
228
+ iterator erase(iterator)
229
+ iterator const_erase "erase"(const_iterator)
230
+ iterator erase(const_iterator, const_iterator)
231
+ size_t erase(const T&)
232
+ iterator find(const T&)
233
+ const_iterator const_find "find"(const T&)
234
+ iterator insert(const pair[T, U]&) except +
235
+ iterator insert(const_iterator, const pair[T, U]&) except +
236
+ void insert[InputIt](InputIt, InputIt) except +
237
+ #key_compare key_comp()
238
+ iterator lower_bound(const T&)
239
+ const_iterator const_lower_bound "lower_bound"(const T&)
240
+ size_t max_size()
241
+ reverse_iterator rbegin()
242
+ const_reverse_iterator const_rbegin "rbegin"()
243
+ const_reverse_iterator crbegin()
244
+ reverse_iterator rend()
245
+ const_reverse_iterator const_rend "rend"()
246
+ const_reverse_iterator crend()
247
+ size_t size()
248
+ void swap(multimap&)
249
+ iterator upper_bound(const T&)
250
+ const_iterator const_upper_bound "upper_bound"(const T&)
251
+ #value_compare value_comp()
252
+ bint contains(const T&)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/numeric.pxd ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<numeric>" namespace "std" nogil:
2
+ T inner_product[InputIt1, InputIt2, T](InputIt1 first1, InputIt1 last1, InputIt2 first2, T init)
3
+
4
+ T inner_product[InputIt1, InputIt2, T, BinaryOperation1, BinaryOperation2](InputIt1 first1, InputIt1 last1,
5
+ InputIt2 first2, T init,
6
+ BinaryOperation1 op1,
7
+ BinaryOperation2 op2)
8
+
9
+ void iota[ForwardIt, T](ForwardIt first, ForwardIt last, T value)
10
+
11
+ T accumulate[InputIt, T](InputIt first, InputIt last, T init)
12
+
13
+ T accumulate[InputIt, T, BinaryOperation](InputIt first, InputIt last, T init, BinaryOperation op)
14
+
15
+ void adjacent_difference[InputIt, OutputIt](InputIt in_first, InputIt in_last, OutputIt out_first)
16
+
17
+ void adjacent_difference[InputIt, OutputIt, BinaryOperation](InputIt in_first, InputIt in_last, OutputIt out_first,
18
+ BinaryOperation op)
19
+
20
+ void partial_sum[InputIt, OutputIt](InputIt in_first, OutputIt in_last, OutputIt out_first)
21
+
22
+ void partial_sum[InputIt, OutputIt, BinaryOperation](InputIt in_first, InputIt in_last, OutputIt out_first,
23
+ BinaryOperation op)
24
+
25
+
26
+ T reduce[InputIt, T](InputIt first, InputIt last, T init)
27
+
28
+ # ambiguous with next overload
29
+ #T reduce[ExecutionPolicy, ForwardIt, T](ExecutionPolicy&& policy,
30
+ # ForwardIt first, ForwardIt last, T init)
31
+
32
+ T reduce[InputIt, T, BinaryOp](InputIt first, InputIt last, T init, BinaryOp binary_op)
33
+
34
+ T reduce[ExecutionPolicy, ForwardIt, T, BinaryOp](ExecutionPolicy&& policy,
35
+ ForwardIt first, ForwardIt last, T init, BinaryOp binary_op)
36
+
37
+ T transform_reduce[InputIt1, InputIt2, T](InputIt1 first1, InputIt1 last1,
38
+ InputIt2 first2, T init)
39
+
40
+ T transform_reduce[InputIt1, InputIt2, T, BinaryReductionOp, BinaryTransformOp](
41
+ InputIt1 first1, InputIt1 last1, InputIt2 first2, T init,
42
+ BinaryReductionOp reduce, BinaryTransformOp transform)
43
+
44
+ T transform_reduce[InputIt, T, BinaryReductionOp, UnaryTransformOp](
45
+ InputIt first, InputIt last, T init, BinaryReductionOp reduce,
46
+ UnaryTransformOp transform)
47
+
48
+ # ambiguous with previous overload
49
+ #T transform_reduce[ExecutionPolicy, ForwardIt1, ForwardIt2, T](
50
+ # ExecutionPolicy&& policy, ForwardIt1 first1, ForwardIt1 last1,
51
+ # ForwardIt2 first2, T init)
52
+
53
+ T transform_reduce[ExecutionPolicy, ForwardIt1, ForwardIt2, T, BinaryReductionOp, BinaryTransformOp](
54
+ ExecutionPolicy&& policy, ForwardIt1 first1, ForwardIt1 last1, ForwardIt2 first2, T init,
55
+ BinaryReductionOp reduce, BinaryTransformOp transform)
56
+
57
+ # ambiguous with second overload
58
+ #T transform_reduce[ExecutionPolicy, ForwardIt, T, BinaryReductionOp, UnaryTransformOp](
59
+ # ExecutionPolicy&& policy, ForwardIt first, ForwardIt last, T init, BinaryReductionOp reduce,
60
+ # UnaryTransformOp transform)
61
+
62
+ OutputIt inclusive_scan[InputIt, OutputIt](InputIt first, InputIt last, OutputIt d_first)
63
+
64
+ # ambiguous with next overload
65
+ # ForwardIt2 inclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2](
66
+ # ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last,
67
+ # ForwardIt2 d_first)
68
+
69
+ OutputIt inclusive_scan[InputIt, OutputIt, BinaryOperation](
70
+ InputIt first, InputIt last, OutputIt d_first, BinaryOperation binary_op)
71
+
72
+ # ambiguous with next overload
73
+ # ForwardIt2 inclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, BinaryOperation](
74
+ # ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
75
+ # BinaryOperation binary_op)
76
+
77
+ OutputIt inclusive_scan[InputIt, OutputIt, BinaryOperation, T](
78
+ InputIt first, InputIt last, OutputIt d_first, BinaryOperation binary_op,
79
+ T init)
80
+
81
+ #
82
+ # ForwardIt2 inclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, BinaryOperation, T](
83
+ # ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
84
+ # BinaryOperation binary_op, T init)
85
+
86
+ OutputIt exclusive_scan[InputIt, OutputIt, T](InputIt first, InputIt last,
87
+ OutputIt d_first, T init)
88
+
89
+ # ambiguous with next overload
90
+ #ForwardIt2 exclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, T](
91
+ # ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last,
92
+ # ForwardIt2 d_first, T init)
93
+
94
+ OutputIt exclusive_scan[InputIt, OutputIt, T, BinaryOperation](
95
+ InputIt first, InputIt last, OutputIt d_first, T init, BinaryOperation binary_op)
96
+
97
+ ForwardIt2 exclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, T, BinaryOperation](
98
+ ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
99
+ T init, BinaryOperation binary_op)
100
+
101
+ OutputIt transform_inclusive_scan[InputIt, OutputIt, BinaryOperation, UnaryOperation](
102
+ InputIt first, InputIt last, OutputIt d_first, BinaryOperation binary_op,
103
+ UnaryOperation unary_op)
104
+
105
+ # ambiguous with next overload
106
+ # ForwardIt2 transform_inclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, BinaryOperation, UnaryOperation](
107
+ # ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
108
+ # BinaryOperation binary_op, UnaryOperation unary_op)
109
+
110
+ OutputIt transform_inclusive_scan[InputIt, OutputIt, BinaryOperation, UnaryOperation, T](
111
+ InputIt first, InputIt last, OutputIt d_first, BinaryOperation binary_op,
112
+ UnaryOperation unary_op, T init)
113
+
114
+ ForwardIt2 transform_inclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, BinaryOperation, UnaryOperation, T](
115
+ ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
116
+ BinaryOperation binary_op, UnaryOperation unary_op, T init)
117
+
118
+ OutputIt transform_exclusive_scan[InputIt, OutputIt, T, BinaryOperation, UnaryOperation](
119
+ InputIt first, InputIt last, OutputIt d_first, T init, BinaryOperation binary_op,
120
+ UnaryOperation unary_op)
121
+
122
+ ForwardIt2 transform_exclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, T, BinaryOperation, UnaryOperation](
123
+ ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
124
+ T init, BinaryOperation binary_op, UnaryOperation unary_op)
125
+
126
+ # C++17
127
+ T gcd[T](T a, T b)
128
+ T lcm[T](T a, T b)
129
+
130
+ # C++20
131
+ T midpoint[T](T a, T b) except +
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/pair.pxd ADDED
@@ -0,0 +1 @@
 
 
1
+ from .utility cimport pair
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/set.pxd ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .utility cimport pair
2
+
3
+ cdef extern from "<set>" namespace "std" nogil:
4
+ cdef cppclass set[T]:
5
+ ctypedef T value_type
6
+
7
+ # these should really be allocator_type.size_type and
8
+ # allocator_type.difference_type to be true to the C++ definition
9
+ # but cython doesn't support deferred access on template arguments
10
+ ctypedef size_t size_type
11
+ ctypedef ptrdiff_t difference_type
12
+
13
+ cppclass const_iterator
14
+ cppclass iterator:
15
+ iterator() except +
16
+ iterator(iterator&) except +
17
+ value_type& operator*()
18
+ iterator operator++()
19
+ iterator operator--()
20
+ iterator operator++(int)
21
+ iterator operator--(int)
22
+ bint operator==(iterator)
23
+ bint operator==(const_iterator)
24
+ bint operator!=(iterator)
25
+ bint operator!=(const_iterator)
26
+ cppclass const_iterator:
27
+ const_iterator() except +
28
+ const_iterator(iterator&) except +
29
+ const_iterator(const_iterator&) except +
30
+ operator=(iterator&) except +
31
+ const value_type& operator*()
32
+ const_iterator operator++()
33
+ const_iterator operator--()
34
+ const_iterator operator++(int)
35
+ const_iterator operator--(int)
36
+ bint operator==(iterator)
37
+ bint operator==(const_iterator)
38
+ bint operator!=(iterator)
39
+ bint operator!=(const_iterator)
40
+
41
+ cppclass const_reverse_iterator
42
+ cppclass reverse_iterator:
43
+ reverse_iterator() except +
44
+ reverse_iterator(reverse_iterator&) except +
45
+ value_type& operator*()
46
+ reverse_iterator operator++()
47
+ reverse_iterator operator--()
48
+ reverse_iterator operator++(int)
49
+ reverse_iterator operator--(int)
50
+ bint operator==(reverse_iterator)
51
+ bint operator==(const_reverse_iterator)
52
+ bint operator!=(reverse_iterator)
53
+ bint operator!=(const_reverse_iterator)
54
+ cppclass const_reverse_iterator:
55
+ const_reverse_iterator() except +
56
+ const_reverse_iterator(reverse_iterator&) except +
57
+ operator=(reverse_iterator&) except +
58
+ const value_type& operator*()
59
+ const_reverse_iterator operator++()
60
+ const_reverse_iterator operator--()
61
+ const_reverse_iterator operator++(int)
62
+ const_reverse_iterator operator--(int)
63
+ bint operator==(reverse_iterator)
64
+ bint operator==(const_reverse_iterator)
65
+ bint operator!=(reverse_iterator)
66
+ bint operator!=(const_reverse_iterator)
67
+
68
+ set() except +
69
+ set(set&) except +
70
+ #set(key_compare&)
71
+ #set& operator=(set&)
72
+ bint operator==(set&, set&)
73
+ bint operator!=(set&, set&)
74
+ bint operator<(set&, set&)
75
+ bint operator>(set&, set&)
76
+ bint operator<=(set&, set&)
77
+ bint operator>=(set&, set&)
78
+ iterator begin()
79
+ const_iterator const_begin "begin"()
80
+ const_iterator cbegin()
81
+ void clear()
82
+ size_t count(const T&)
83
+ bint empty()
84
+ iterator end()
85
+ const_iterator const_end "end"()
86
+ const_iterator cend()
87
+ pair[iterator, iterator] equal_range(const T&)
88
+ pair[const_iterator, const_iterator] const_equal_range "equal_range"(const T&)
89
+ iterator erase(iterator)
90
+ iterator const_erase "erase"(const_iterator)
91
+ iterator erase(const_iterator, const_iterator)
92
+ size_t erase(const T&)
93
+ iterator find(const T&)
94
+ const_iterator const_find "find"(const T&)
95
+ pair[iterator, bint] insert(const T&) except +
96
+ iterator insert(iterator, const T&) except +
97
+ iterator insert(const_iterator, const T&) except +
98
+ iterator const_insert "insert"(const_iterator, const T&) except +
99
+ void insert[InputIt](InputIt, InputIt) except +
100
+ #key_compare key_comp()
101
+ iterator lower_bound(const T&)
102
+ const_iterator const_lower_bound "lower_bound"(const T&)
103
+ size_t max_size()
104
+ reverse_iterator rbegin()
105
+ const_reverse_iterator const_rbegin "rbegin"()
106
+ const_reverse_iterator crbegin()
107
+ reverse_iterator rend()
108
+ const_reverse_iterator const_rend "rend"()
109
+ const_reverse_iterator crend()
110
+ size_t size()
111
+ void swap(set&)
112
+ iterator upper_bound(const T&)
113
+ const_iterator const_upper_bound "upper_bound"(const T&)
114
+ #value_compare value_comp()
115
+ # C++20
116
+ bint contains(const T&)
117
+
118
+ cdef cppclass multiset[T]:
119
+ ctypedef T value_type
120
+
121
+ # these should really be allocator_type.size_type and
122
+ # allocator_type.difference_type to be true to the C++ definition
123
+ # but cython doesn't support deferred access on template arguments
124
+ ctypedef size_t size_type
125
+ ctypedef ptrdiff_t difference_type
126
+
127
+ cppclass const_iterator
128
+ cppclass iterator:
129
+ iterator() except +
130
+ iterator(iterator&) except +
131
+ value_type& operator*()
132
+ iterator operator++()
133
+ iterator operator--()
134
+ iterator operator++(int)
135
+ iterator operator--(int)
136
+ bint operator==(iterator)
137
+ bint operator==(const_iterator)
138
+ bint operator!=(iterator)
139
+ bint operator!=(const_iterator)
140
+ cppclass const_iterator:
141
+ const_iterator() except +
142
+ const_iterator(iterator&) except +
143
+ const_iterator(const_iterator&) except +
144
+ operator=(iterator&) except +
145
+ const value_type& operator*()
146
+ const_iterator operator++()
147
+ const_iterator operator--()
148
+ const_iterator operator++(int)
149
+ const_iterator operator--(int)
150
+ bint operator==(iterator)
151
+ bint operator==(const_iterator)
152
+ bint operator!=(iterator)
153
+ bint operator!=(const_iterator)
154
+
155
+ cppclass const_reverse_iterator
156
+ cppclass reverse_iterator:
157
+ reverse_iterator() except +
158
+ reverse_iterator(reverse_iterator&) except +
159
+ value_type& operator*()
160
+ reverse_iterator operator++()
161
+ reverse_iterator operator--()
162
+ reverse_iterator operator++(int)
163
+ reverse_iterator operator--(int)
164
+ bint operator==(reverse_iterator)
165
+ bint operator==(const_reverse_iterator)
166
+ bint operator!=(reverse_iterator)
167
+ bint operator!=(const_reverse_iterator)
168
+ cppclass const_reverse_iterator:
169
+ const_reverse_iterator() except +
170
+ const_reverse_iterator(reverse_iterator&) except +
171
+ operator=(reverse_iterator&) except +
172
+ const value_type& operator*()
173
+ const_reverse_iterator operator++()
174
+ const_reverse_iterator operator--()
175
+ const_reverse_iterator operator++(int)
176
+ const_reverse_iterator operator--(int)
177
+ bint operator==(reverse_iterator)
178
+ bint operator==(const_reverse_iterator)
179
+ bint operator!=(reverse_iterator)
180
+ bint operator!=(const_reverse_iterator)
181
+
182
+ multiset() except +
183
+ multiset(multiset&) except +
184
+ #multiset(key_compare&)
185
+ #multiset& operator=(multiset&)
186
+ bint operator==(multiset&, multiset&)
187
+ bint operator!=(multiset&, multiset&)
188
+ bint operator<(multiset&, multiset&)
189
+ bint operator>(multiset&, multiset&)
190
+ bint operator<=(multiset&, multiset&)
191
+ bint operator>=(multiset&, multiset&)
192
+ iterator begin()
193
+ const_iterator const_begin "begin"()
194
+ const_iterator cbegin()
195
+ void clear()
196
+ size_t count(const T&)
197
+ bint empty()
198
+ iterator end()
199
+ const_iterator const_end "end"()
200
+ const_iterator cend()
201
+ pair[iterator, iterator] equal_range(const T&)
202
+ pair[const_iterator, const_iterator] const_equal_range "equal_range"(const T&)
203
+ iterator erase(iterator)
204
+ iterator const_erase "erase"(const_iterator)
205
+ iterator erase(const_iterator, const_iterator)
206
+ size_t erase(const T&)
207
+ iterator find(const T&)
208
+ const_iterator const_find "find"(const T&)
209
+ iterator insert(const T&) except +
210
+ iterator insert(iterator, const T&) except +
211
+ iterator const_insert "insert"(const_iterator, const T&) except +
212
+ void insert[InputIt](InputIt, InputIt) except +
213
+ #key_compare key_comp()
214
+ iterator lower_bound(const T&)
215
+ const_iterator const_lower_bound "lower_bound"(const T&)
216
+ size_t max_size()
217
+ reverse_iterator rbegin()
218
+ const_reverse_iterator const_rbegin "rbegin"()
219
+ const_reverse_iterator crbegin()
220
+ reverse_iterator rend()
221
+ const_reverse_iterator const_rend "rend"()
222
+ const_reverse_iterator crend()
223
+ size_t size()
224
+ void swap(multiset&)
225
+ iterator upper_bound(const T&)
226
+ const_iterator const_upper_bound "upper_bound"(const T&)
227
+ # C++20
228
+ bint contains(const T&)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/typeinfo.pxd ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from libcpp cimport bool
2
+
3
+ cdef extern from "<typeinfo>" namespace "std" nogil:
4
+ cdef cppclass type_info:
5
+ const char* name()
6
+ int before(const type_info&)
7
+ bool operator==(const type_info&)
8
+ bool operator!=(const type_info&)
9
+ # C++11-only
10
+ size_t hash_code()
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/utility.pxd ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<utility>" namespace "std" nogil:
2
+ cdef cppclass pair[T, U]:
3
+ ctypedef T first_type
4
+ ctypedef U second_type
5
+ T first
6
+ U second
7
+ pair() except +
8
+ pair(pair&) except +
9
+ pair(T&, U&) except +
10
+ bint operator==(pair&, pair&)
11
+ bint operator!=(pair&, pair&)
12
+ bint operator<(pair&, pair&)
13
+ bint operator>(pair&, pair&)
14
+ bint operator<=(pair&, pair&)
15
+ bint operator>=(pair&, pair&)
16
+
17
+ cdef extern from * namespace "cython_std" nogil:
18
+ """
19
+ #if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)
20
+ // move should be defined for these versions of MSVC, but __cplusplus isn't set usefully
21
+ #include <type_traits>
22
+
23
+ namespace cython_std {
24
+ template <typename T> typename std::remove_reference<T>::type&& move(T& t) noexcept { return std::move(t); }
25
+ template <typename T> typename std::remove_reference<T>::type&& move(T&& t) noexcept { return std::move(t); }
26
+ }
27
+
28
+ #endif
29
+ """
30
+ cdef T move[T](T)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/vector.pxd ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<vector>" namespace "std" nogil:
2
+ cdef cppclass vector[T,ALLOCATOR=*]:
3
+ ctypedef T value_type
4
+ ctypedef ALLOCATOR allocator_type
5
+
6
+ # these should really be allocator_type.size_type and
7
+ # allocator_type.difference_type to be true to the C++ definition
8
+ # but cython doesn't support deferred access on template arguments
9
+ ctypedef size_t size_type
10
+ ctypedef ptrdiff_t difference_type
11
+
12
+ cppclass const_iterator
13
+ cppclass iterator:
14
+ iterator() except +
15
+ iterator(iterator&) except +
16
+ T& operator*()
17
+ iterator operator++()
18
+ iterator operator--()
19
+ iterator operator++(int)
20
+ iterator operator--(int)
21
+ iterator operator+(size_type)
22
+ iterator operator-(size_type)
23
+ difference_type operator-(iterator)
24
+ difference_type operator-(const_iterator)
25
+ bint operator==(iterator)
26
+ bint operator==(const_iterator)
27
+ bint operator!=(iterator)
28
+ bint operator!=(const_iterator)
29
+ bint operator<(iterator)
30
+ bint operator<(const_iterator)
31
+ bint operator>(iterator)
32
+ bint operator>(const_iterator)
33
+ bint operator<=(iterator)
34
+ bint operator<=(const_iterator)
35
+ bint operator>=(iterator)
36
+ bint operator>=(const_iterator)
37
+ cppclass const_iterator:
38
+ const_iterator() except +
39
+ const_iterator(iterator&) except +
40
+ const_iterator(const_iterator&) except +
41
+ operator=(iterator&) except +
42
+ const T& operator*()
43
+ const_iterator operator++()
44
+ const_iterator operator--()
45
+ const_iterator operator++(int)
46
+ const_iterator operator--(int)
47
+ const_iterator operator+(size_type)
48
+ const_iterator operator-(size_type)
49
+ difference_type operator-(iterator)
50
+ difference_type operator-(const_iterator)
51
+ bint operator==(iterator)
52
+ bint operator==(const_iterator)
53
+ bint operator!=(iterator)
54
+ bint operator!=(const_iterator)
55
+ bint operator<(iterator)
56
+ bint operator<(const_iterator)
57
+ bint operator>(iterator)
58
+ bint operator>(const_iterator)
59
+ bint operator<=(iterator)
60
+ bint operator<=(const_iterator)
61
+ bint operator>=(iterator)
62
+ bint operator>=(const_iterator)
63
+
64
+ cppclass const_reverse_iterator
65
+ cppclass reverse_iterator:
66
+ reverse_iterator() except +
67
+ reverse_iterator(reverse_iterator&) except +
68
+ T& operator*()
69
+ reverse_iterator operator++()
70
+ reverse_iterator operator--()
71
+ reverse_iterator operator++(int)
72
+ reverse_iterator operator--(int)
73
+ reverse_iterator operator+(size_type)
74
+ reverse_iterator operator-(size_type)
75
+ difference_type operator-(iterator)
76
+ difference_type operator-(const_iterator)
77
+ bint operator==(reverse_iterator)
78
+ bint operator==(const_reverse_iterator)
79
+ bint operator!=(reverse_iterator)
80
+ bint operator!=(const_reverse_iterator)
81
+ bint operator<(reverse_iterator)
82
+ bint operator<(const_reverse_iterator)
83
+ bint operator>(reverse_iterator)
84
+ bint operator>(const_reverse_iterator)
85
+ bint operator<=(reverse_iterator)
86
+ bint operator<=(const_reverse_iterator)
87
+ bint operator>=(reverse_iterator)
88
+ bint operator>=(const_reverse_iterator)
89
+ cppclass const_reverse_iterator:
90
+ const_reverse_iterator() except +
91
+ const_reverse_iterator(reverse_iterator&) except +
92
+ operator=(reverse_iterator&) except +
93
+ const T& operator*()
94
+ const_reverse_iterator operator++()
95
+ const_reverse_iterator operator--()
96
+ const_reverse_iterator operator++(int)
97
+ const_reverse_iterator operator--(int)
98
+ const_reverse_iterator operator+(size_type)
99
+ const_reverse_iterator operator-(size_type)
100
+ difference_type operator-(iterator)
101
+ difference_type operator-(const_iterator)
102
+ bint operator==(reverse_iterator)
103
+ bint operator==(const_reverse_iterator)
104
+ bint operator!=(reverse_iterator)
105
+ bint operator!=(const_reverse_iterator)
106
+ bint operator<(reverse_iterator)
107
+ bint operator<(const_reverse_iterator)
108
+ bint operator>(reverse_iterator)
109
+ bint operator>(const_reverse_iterator)
110
+ bint operator<=(reverse_iterator)
111
+ bint operator<=(const_reverse_iterator)
112
+ bint operator>=(reverse_iterator)
113
+ bint operator>=(const_reverse_iterator)
114
+
115
+ vector() except +
116
+ vector(vector&) except +
117
+ vector(size_type) except +
118
+ vector(size_type, T&) except +
119
+ #vector[InputIt](InputIt, InputIt)
120
+ T& operator[](size_type)
121
+ #vector& operator=(vector&)
122
+ bint operator==(vector&, vector&)
123
+ bint operator!=(vector&, vector&)
124
+ bint operator<(vector&, vector&)
125
+ bint operator>(vector&, vector&)
126
+ bint operator<=(vector&, vector&)
127
+ bint operator>=(vector&, vector&)
128
+ void assign(size_type, const T&)
129
+ void assign[InputIt](InputIt, InputIt) except +
130
+ T& at(size_type) except +
131
+ T& back()
132
+ iterator begin()
133
+ const_iterator const_begin "begin"()
134
+ const_iterator cbegin()
135
+ size_type capacity()
136
+ void clear()
137
+ bint empty()
138
+ iterator end()
139
+ const_iterator const_end "end"()
140
+ const_iterator cend()
141
+ iterator erase(iterator)
142
+ iterator erase(iterator, iterator)
143
+ T& front()
144
+ iterator insert(iterator, const T&) except +
145
+ iterator insert(iterator, size_type, const T&) except +
146
+ iterator insert[InputIt](iterator, InputIt, InputIt) except +
147
+ size_type max_size()
148
+ void pop_back()
149
+ void push_back(T&) except +
150
+ reverse_iterator rbegin()
151
+ const_reverse_iterator const_rbegin "rbegin"()
152
+ const_reverse_iterator crbegin()
153
+ reverse_iterator rend()
154
+ const_reverse_iterator const_rend "rend"()
155
+ const_reverse_iterator crend()
156
+ void reserve(size_type) except +
157
+ void resize(size_type) except +
158
+ void resize(size_type, T&) except +
159
+ size_type size()
160
+ void swap(vector&)
161
+
162
+ # C++11 methods
163
+ T* data()
164
+ const T* const_data "data"()
165
+ void shrink_to_fit() except +
166
+ iterator emplace(const_iterator, ...) except +
167
+ T& emplace_back(...) except +
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/openmp.pxd ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<omp.h>":
2
+ ctypedef struct omp_lock_t:
3
+ pass
4
+ ctypedef struct omp_nest_lock_t:
5
+ pass
6
+
7
+ ctypedef enum omp_sched_t:
8
+ omp_sched_static = 1,
9
+ omp_sched_dynamic = 2,
10
+ omp_sched_guided = 3,
11
+ omp_sched_auto = 4
12
+
13
+ extern void omp_set_num_threads(int) nogil
14
+ extern int omp_get_num_threads() nogil
15
+ extern int omp_get_max_threads() nogil
16
+ extern int omp_get_thread_num() nogil
17
+ extern int omp_get_num_procs() nogil
18
+
19
+ extern int omp_in_parallel() nogil
20
+
21
+ extern void omp_set_dynamic(int) nogil
22
+ extern int omp_get_dynamic() nogil
23
+
24
+ extern void omp_set_nested(int) nogil
25
+ extern int omp_get_nested() nogil
26
+
27
+ extern void omp_init_lock(omp_lock_t *) nogil
28
+ extern void omp_destroy_lock(omp_lock_t *) nogil
29
+ extern void omp_set_lock(omp_lock_t *) nogil
30
+ extern void omp_unset_lock(omp_lock_t *) nogil
31
+ extern int omp_test_lock(omp_lock_t *) nogil
32
+
33
+ extern void omp_init_nest_lock(omp_nest_lock_t *) nogil
34
+ extern void omp_destroy_nest_lock(omp_nest_lock_t *) nogil
35
+ extern void omp_set_nest_lock(omp_nest_lock_t *) nogil
36
+ extern void omp_unset_nest_lock(omp_nest_lock_t *) nogil
37
+ extern int omp_test_nest_lock(omp_nest_lock_t *) nogil
38
+
39
+ extern double omp_get_wtime() nogil
40
+ extern double omp_get_wtick() nogil
41
+
42
+ void omp_set_schedule(omp_sched_t, int) nogil
43
+ void omp_get_schedule(omp_sched_t *, int *) nogil
44
+ int omp_get_thread_limit() nogil
45
+ void omp_set_max_active_levels(int) nogil
46
+ int omp_get_max_active_levels() nogil
47
+ int omp_get_level() nogil
48
+ int omp_get_ancestor_thread_num(int) nogil
49
+ int omp_get_team_size(int) nogil
50
+ int omp_get_active_level() nogil
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Shadow.py ADDED
@@ -0,0 +1,609 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # cython.* namespace for pure mode.
2
+ from __future__ import absolute_import
3
+
4
+ # Possible version formats: "3.1.0", "3.1.0a1", "3.1.0a1.dev0"
5
+ __version__ = "3.0.11"
6
+
7
+ try:
8
+ from __builtin__ import basestring
9
+ except ImportError:
10
+ basestring = str
11
+
12
+
13
+ # BEGIN shameless copy from Cython/minivect/minitypes.py
14
+
15
+ class _ArrayType(object):
16
+
17
+ is_array = True
18
+ subtypes = ['dtype']
19
+
20
+ def __init__(self, dtype, ndim, is_c_contig=False, is_f_contig=False,
21
+ inner_contig=False, broadcasting=None):
22
+ self.dtype = dtype
23
+ self.ndim = ndim
24
+ self.is_c_contig = is_c_contig
25
+ self.is_f_contig = is_f_contig
26
+ self.inner_contig = inner_contig or is_c_contig or is_f_contig
27
+ self.broadcasting = broadcasting
28
+
29
+ def __repr__(self):
30
+ axes = [":"] * self.ndim
31
+ if self.is_c_contig:
32
+ axes[-1] = "::1"
33
+ elif self.is_f_contig:
34
+ axes[0] = "::1"
35
+
36
+ return "%s[%s]" % (self.dtype, ", ".join(axes))
37
+
38
+
39
+ def index_type(base_type, item):
40
+ """
41
+ Support array type creation by slicing, e.g. double[:, :] specifies
42
+ a 2D strided array of doubles. The syntax is the same as for
43
+ Cython memoryviews.
44
+ """
45
+ class InvalidTypeSpecification(Exception):
46
+ pass
47
+
48
+ def verify_slice(s):
49
+ if s.start or s.stop or s.step not in (None, 1):
50
+ raise InvalidTypeSpecification(
51
+ "Only a step of 1 may be provided to indicate C or "
52
+ "Fortran contiguity")
53
+
54
+ if isinstance(item, tuple):
55
+ step_idx = None
56
+ for idx, s in enumerate(item):
57
+ verify_slice(s)
58
+ if s.step and (step_idx or idx not in (0, len(item) - 1)):
59
+ raise InvalidTypeSpecification(
60
+ "Step may only be provided once, and only in the "
61
+ "first or last dimension.")
62
+
63
+ if s.step == 1:
64
+ step_idx = idx
65
+
66
+ return _ArrayType(base_type, len(item),
67
+ is_c_contig=step_idx == len(item) - 1,
68
+ is_f_contig=step_idx == 0)
69
+ elif isinstance(item, slice):
70
+ verify_slice(item)
71
+ return _ArrayType(base_type, 1, is_c_contig=bool(item.step))
72
+ else:
73
+ # int[8] etc.
74
+ assert int(item) == item # array size must be a plain integer
75
+ return array(base_type, item)
76
+
77
+ # END shameless copy
78
+
79
+
80
+ compiled = False
81
+
82
+ _Unspecified = object()
83
+
84
+ # Function decorators
85
+
86
+ def _empty_decorator(x):
87
+ return x
88
+
89
+ def locals(**arg_types):
90
+ return _empty_decorator
91
+
92
+ def test_assert_path_exists(*paths):
93
+ return _empty_decorator
94
+
95
+ def test_fail_if_path_exists(*paths):
96
+ return _empty_decorator
97
+
98
+ class _EmptyDecoratorAndManager(object):
99
+ def __call__(self, x):
100
+ return x
101
+ def __enter__(self):
102
+ pass
103
+ def __exit__(self, exc_type, exc_value, traceback):
104
+ pass
105
+
106
+ class _Optimization(object):
107
+ pass
108
+
109
+ cclass = ccall = cfunc = _EmptyDecoratorAndManager()
110
+
111
+ annotation_typing = returns = wraparound = boundscheck = initializedcheck = \
112
+ nonecheck = embedsignature = cdivision = cdivision_warnings = \
113
+ always_allow_keywords = profile = linetrace = infer_types = \
114
+ unraisable_tracebacks = freelist = auto_pickle = cpow = trashcan = \
115
+ auto_cpdef = c_api_binop_methods = \
116
+ allow_none_for_extension_args = callspec = show_performance_hints = \
117
+ cpp_locals = py2_import = iterable_coroutine = remove_unreachable = \
118
+ lambda _: _EmptyDecoratorAndManager()
119
+
120
+ # Note that fast_getattr is untested and undocumented!
121
+ fast_getattr = lambda _: _EmptyDecoratorAndManager()
122
+
123
+ exceptval = lambda _=None, check=True: _EmptyDecoratorAndManager()
124
+
125
+ overflowcheck = lambda _: _EmptyDecoratorAndManager()
126
+ optimize = _Optimization()
127
+
128
+
129
+ embedsignature.format = overflowcheck.fold = optimize.use_switch = \
130
+ optimize.unpack_method_calls = lambda arg: _EmptyDecoratorAndManager()
131
+
132
+ final = internal = type_version_tag = no_gc_clear = no_gc = total_ordering = \
133
+ ufunc = _empty_decorator
134
+
135
+ binding = lambda _: _empty_decorator
136
+
137
+ class warn:
138
+ undeclared = unreachable = maybe_uninitialized = unused = \
139
+ unused_arg = unused_result = \
140
+ lambda _: _EmptyDecoratorAndManager()
141
+
142
+
143
+ _cython_inline = None
144
+ def inline(f, *args, **kwds):
145
+ if isinstance(f, basestring):
146
+ global _cython_inline
147
+ if _cython_inline is None:
148
+ from Cython.Build.Inline import cython_inline as _cython_inline
149
+ return _cython_inline(f, *args, **kwds)
150
+ else:
151
+ assert len(args) == len(kwds) == 0
152
+ return f
153
+
154
+
155
+ def compile(f):
156
+ from Cython.Build.Inline import RuntimeCompiledFunction
157
+ return RuntimeCompiledFunction(f)
158
+
159
+
160
+ # Special functions
161
+
162
+ def cdiv(a, b):
163
+ if a < 0:
164
+ a = -a
165
+ b = -b
166
+ if b < 0:
167
+ return (a + b + 1) // b
168
+ return a // b
169
+
170
+ def cmod(a, b):
171
+ r = a % b
172
+ if (a * b) < 0 and r:
173
+ r -= b
174
+ return r
175
+
176
+
177
+ # Emulated language constructs
178
+
179
+ def cast(t, *args, **kwargs):
180
+ kwargs.pop('typecheck', None)
181
+ assert not kwargs
182
+
183
+ if isinstance(t, typedef):
184
+ return t(*args)
185
+ elif isinstance(t, type): # Doesn't work with old-style classes of Python 2.x
186
+ if len(args) != 1 or not (args[0] is None or isinstance(args[0], t)):
187
+ return t(*args)
188
+
189
+ return args[0]
190
+
191
+ def sizeof(arg):
192
+ return 1
193
+
194
+ def typeof(arg):
195
+ return arg.__class__.__name__
196
+ # return type(arg)
197
+
198
+ def address(arg):
199
+ return pointer(type(arg))([arg])
200
+
201
+ def _is_value_type(t):
202
+ if isinstance(t, typedef):
203
+ return _is_value_type(t._basetype)
204
+
205
+ return isinstance(t, type) and issubclass(t, (StructType, UnionType, ArrayType))
206
+
207
+ def declare(t=None, value=_Unspecified, **kwds):
208
+ if value is not _Unspecified:
209
+ return cast(t, value)
210
+ elif _is_value_type(t):
211
+ return t()
212
+ else:
213
+ return None
214
+
215
+ class _nogil(object):
216
+ """Support for 'with nogil' statement and @nogil decorator.
217
+ """
218
+ def __call__(self, x):
219
+ if callable(x):
220
+ # Used as function decorator => return the function unchanged.
221
+ return x
222
+ # Used as conditional context manager or to create an "@nogil(True/False)" decorator => keep going.
223
+ return self
224
+
225
+ def __enter__(self):
226
+ pass
227
+ def __exit__(self, exc_class, exc, tb):
228
+ return exc_class is None
229
+
230
+ nogil = _nogil()
231
+ gil = _nogil()
232
+ with_gil = _nogil() # Actually not a context manager, but compilation will give the right error.
233
+ del _nogil
234
+
235
+
236
+ # Emulated types
237
+
238
+ class CythonMetaType(type):
239
+
240
+ def __getitem__(type, ix):
241
+ return array(type, ix)
242
+
243
+ CythonTypeObject = CythonMetaType('CythonTypeObject', (object,), {})
244
+
245
+ class CythonType(CythonTypeObject):
246
+
247
+ def _pointer(self, n=1):
248
+ for i in range(n):
249
+ self = pointer(self)
250
+ return self
251
+
252
+ class PointerType(CythonType):
253
+
254
+ def __init__(self, value=None):
255
+ if isinstance(value, (ArrayType, PointerType)):
256
+ self._items = [cast(self._basetype, a) for a in value._items]
257
+ elif isinstance(value, list):
258
+ self._items = [cast(self._basetype, a) for a in value]
259
+ elif value is None or value == 0:
260
+ self._items = []
261
+ else:
262
+ raise ValueError
263
+
264
+ def __getitem__(self, ix):
265
+ if ix < 0:
266
+ raise IndexError("negative indexing not allowed in C")
267
+ return self._items[ix]
268
+
269
+ def __setitem__(self, ix, value):
270
+ if ix < 0:
271
+ raise IndexError("negative indexing not allowed in C")
272
+ self._items[ix] = cast(self._basetype, value)
273
+
274
+ def __eq__(self, value):
275
+ if value is None and not self._items:
276
+ return True
277
+ elif type(self) != type(value):
278
+ return False
279
+ else:
280
+ return not self._items and not value._items
281
+
282
+ def __repr__(self):
283
+ return "%s *" % (self._basetype,)
284
+
285
+ class ArrayType(PointerType):
286
+
287
+ def __init__(self, value=None):
288
+ if value is None:
289
+ self._items = [None] * self._n
290
+ else:
291
+ super(ArrayType, self).__init__(value)
292
+
293
+
294
+ class StructType(CythonType):
295
+
296
+ def __init__(self, *posargs, **data):
297
+ if not (posargs or data):
298
+ return
299
+ if posargs and data:
300
+ raise ValueError('Cannot accept both positional and keyword arguments.')
301
+
302
+ # Allow 'cast_from' as single positional or keyword argument.
303
+ if data and len(data) == 1 and 'cast_from' in data:
304
+ cast_from = data.pop('cast_from')
305
+ elif len(posargs) == 1 and type(posargs[0]) is type(self):
306
+ cast_from, posargs = posargs[0], ()
307
+ elif posargs:
308
+ for key, arg in zip(self._members, posargs):
309
+ setattr(self, key, arg)
310
+ return
311
+ else:
312
+ for key, value in data.items():
313
+ if key not in self._members:
314
+ raise ValueError("Invalid struct attribute for %s: %s" % (
315
+ self.__class__.__name__, key))
316
+ setattr(self, key, value)
317
+ return
318
+
319
+ # do cast
320
+ if data:
321
+ raise ValueError('Cannot accept keyword arguments when casting.')
322
+ if type(cast_from) is not type(self):
323
+ raise ValueError('Cannot cast from %s' % cast_from)
324
+ for key, value in cast_from.__dict__.items():
325
+ setattr(self, key, value)
326
+
327
+ def __setattr__(self, key, value):
328
+ if key in self._members:
329
+ self.__dict__[key] = cast(self._members[key], value)
330
+ else:
331
+ raise AttributeError("Struct has no member '%s'" % key)
332
+
333
+
334
+ class UnionType(CythonType):
335
+
336
+ def __init__(self, cast_from=_Unspecified, **data):
337
+ if cast_from is not _Unspecified:
338
+ # do type cast
339
+ if len(data) > 0:
340
+ raise ValueError('Cannot accept keyword arguments when casting.')
341
+ if isinstance(cast_from, dict):
342
+ datadict = cast_from
343
+ elif type(cast_from) is type(self):
344
+ datadict = cast_from.__dict__
345
+ else:
346
+ raise ValueError('Cannot cast from %s' % cast_from)
347
+ else:
348
+ datadict = data
349
+ if len(datadict) > 1:
350
+ raise AttributeError("Union can only store one field at a time.")
351
+ for key, value in datadict.items():
352
+ setattr(self, key, value)
353
+
354
+ def __setattr__(self, key, value):
355
+ if key == '__dict__':
356
+ CythonType.__setattr__(self, key, value)
357
+ elif key in self._members:
358
+ self.__dict__ = {key: cast(self._members[key], value)}
359
+ else:
360
+ raise AttributeError("Union has no member '%s'" % key)
361
+
362
+ def pointer(basetype):
363
+ class PointerInstance(PointerType):
364
+ _basetype = basetype
365
+ return PointerInstance
366
+
367
+ def array(basetype, n):
368
+ class ArrayInstance(ArrayType):
369
+ _basetype = basetype
370
+ _n = n
371
+ return ArrayInstance
372
+
373
+ def struct(**members):
374
+ class StructInstance(StructType):
375
+ _members = members
376
+ for key in members:
377
+ setattr(StructInstance, key, None)
378
+ return StructInstance
379
+
380
+ def union(**members):
381
+ class UnionInstance(UnionType):
382
+ _members = members
383
+ for key in members:
384
+ setattr(UnionInstance, key, None)
385
+ return UnionInstance
386
+
387
+ class typedef(CythonType):
388
+
389
+ def __init__(self, type, name=None):
390
+ self._basetype = type
391
+ self.name = name
392
+
393
+ def __call__(self, *arg):
394
+ value = cast(self._basetype, *arg)
395
+ return value
396
+
397
+ def __repr__(self):
398
+ return self.name or str(self._basetype)
399
+
400
+ __getitem__ = index_type
401
+
402
+ class _FusedType(CythonType):
403
+ __getitem__ = index_type
404
+
405
+
406
+ def fused_type(*args):
407
+ if not args:
408
+ raise TypeError("Expected at least one type as argument")
409
+
410
+ # Find the numeric type with biggest rank if all types are numeric
411
+ rank = -1
412
+ for type in args:
413
+ if type not in (py_int, py_long, py_float, py_complex):
414
+ break
415
+
416
+ if type_ordering.index(type) > rank:
417
+ result_type = type
418
+ else:
419
+ return result_type
420
+
421
+ # Not a simple numeric type, return a fused type instance. The result
422
+ # isn't really meant to be used, as we can't keep track of the context in
423
+ # pure-mode. Casting won't do anything in this case.
424
+ return _FusedType()
425
+
426
+
427
+ def _specialized_from_args(signatures, args, kwargs):
428
+ "Perhaps this should be implemented in a TreeFragment in Cython code"
429
+ raise Exception("yet to be implemented")
430
+
431
+
432
+ py_int = typedef(int, "int")
433
+ try:
434
+ py_long = typedef(long, "long")
435
+ except NameError: # Py3
436
+ py_long = typedef(int, "long")
437
+ py_float = typedef(float, "float")
438
+ py_complex = typedef(complex, "double complex")
439
+
440
+
441
+ # Predefined types
442
+
443
+ int_types = [
444
+ 'char',
445
+ 'short',
446
+ 'Py_UNICODE',
447
+ 'int',
448
+ 'Py_UCS4',
449
+ 'long',
450
+ 'longlong',
451
+ 'Py_hash_t',
452
+ 'Py_ssize_t',
453
+ 'size_t',
454
+ 'ssize_t',
455
+ 'ptrdiff_t',
456
+ ]
457
+ float_types = [
458
+ 'longdouble',
459
+ 'double',
460
+ 'float',
461
+ ]
462
+ complex_types = [
463
+ 'longdoublecomplex',
464
+ 'doublecomplex',
465
+ 'floatcomplex',
466
+ 'complex',
467
+ ]
468
+ other_types = [
469
+ 'bint',
470
+ 'void',
471
+ 'Py_tss_t',
472
+ ]
473
+
474
+ to_repr = {
475
+ 'longlong': 'long long',
476
+ 'longdouble': 'long double',
477
+ 'longdoublecomplex': 'long double complex',
478
+ 'doublecomplex': 'double complex',
479
+ 'floatcomplex': 'float complex',
480
+ }.get
481
+
482
+ gs = globals()
483
+
484
+ # note: cannot simply name the unicode type here as 2to3 gets in the way and replaces it by str
485
+ try:
486
+ import __builtin__ as builtins
487
+ except ImportError: # Py3
488
+ import builtins
489
+
490
+ gs['unicode'] = typedef(getattr(builtins, 'unicode', str), 'unicode')
491
+ del builtins
492
+
493
+ for name in int_types:
494
+ reprname = to_repr(name, name)
495
+ gs[name] = typedef(py_int, reprname)
496
+ if name not in ('Py_UNICODE', 'Py_UCS4', 'Py_hash_t', 'ptrdiff_t') and not name.endswith('size_t'):
497
+ gs['u'+name] = typedef(py_int, "unsigned " + reprname)
498
+ gs['s'+name] = typedef(py_int, "signed " + reprname)
499
+
500
+ for name in float_types:
501
+ gs[name] = typedef(py_float, to_repr(name, name))
502
+
503
+ for name in complex_types:
504
+ gs[name] = typedef(py_complex, to_repr(name, name))
505
+
506
+ bint = typedef(bool, "bint")
507
+ void = typedef(None, "void")
508
+ Py_tss_t = typedef(None, "Py_tss_t")
509
+
510
+ for t in int_types:
511
+ for i in range(1, 4):
512
+ gs["%s_%s" % ('p'*i, t)] = gs[t]._pointer(i)
513
+ if 'u'+t in gs:
514
+ gs["%s_u%s" % ('p'*i, t)] = gs['u'+t]._pointer(i)
515
+ gs["%s_s%s" % ('p'*i, t)] = gs['s'+t]._pointer(i)
516
+
517
+ for t in float_types + complex_types + other_types:
518
+ for i in range(1, 4):
519
+ gs["%s_%s" % ('p'*i, t)] = gs[t]._pointer(i)
520
+
521
+ del t, i
522
+
523
+ NULL = gs['p_void'](0)
524
+
525
+ # looks like 'gs' has some users out there by now...
526
+ #del gs
527
+
528
+ integral = floating = numeric = _FusedType()
529
+
530
+ type_ordering = [py_int, py_long, py_float, py_complex]
531
+
532
+ class CythonDotParallel(object):
533
+ """
534
+ The cython.parallel module.
535
+ """
536
+
537
+ __all__ = ['parallel', 'prange', 'threadid']
538
+
539
+ def parallel(self, num_threads=None):
540
+ return nogil
541
+
542
+ def prange(self, start=0, stop=None, step=1, nogil=False, schedule=None, chunksize=None, num_threads=None):
543
+ if stop is None:
544
+ stop = start
545
+ start = 0
546
+ return range(start, stop, step)
547
+
548
+ def threadid(self):
549
+ return 0
550
+
551
+ # def threadsavailable(self):
552
+ # return 1
553
+
554
+ class CythonDotImportedFromElsewhere(object):
555
+ """
556
+ cython.dataclasses just shadows the standard library modules of the same name
557
+ """
558
+ def __init__(self, module):
559
+ self.__path__ = []
560
+ self.__file__ = None
561
+ self.__name__ = module
562
+ self.__package__ = module
563
+
564
+ def __getattr__(self, attr):
565
+ # we typically only expect this to be called once
566
+ from importlib import import_module
567
+ import sys
568
+ try:
569
+ mod = import_module(self.__name__)
570
+ except ImportError:
571
+ # but if they don't exist (Python is not sufficiently up-to-date) then
572
+ # you can't use them
573
+ raise AttributeError("%s: the standard library module %s is not available" %
574
+ (attr, self.__name__))
575
+ sys.modules['cython.%s' % self.__name__] = mod
576
+ return getattr(mod, attr)
577
+
578
+ class CythonCImports(object):
579
+ """
580
+ Simplistic module mock to make cimports sort-of work in Python code.
581
+ """
582
+ def __init__(self, module):
583
+ self.__path__ = []
584
+ self.__file__ = None
585
+ self.__name__ = module
586
+ self.__package__ = module
587
+
588
+ def __getattr__(self, item):
589
+ if item.startswith('__') and item.endswith('__'):
590
+ raise AttributeError(item)
591
+ try:
592
+ return __import__(item)
593
+ except ImportError:
594
+ import sys
595
+ ex = AttributeError(item)
596
+ if sys.version_info >= (3, 0):
597
+ ex.__cause__ = None
598
+ raise ex
599
+
600
+
601
+ import math, sys
602
+ sys.modules['cython.parallel'] = CythonDotParallel()
603
+ sys.modules['cython.cimports'] = CythonCImports('cython.cimports')
604
+ sys.modules['cython.cimports.libc'] = CythonCImports('cython.cimports.libc')
605
+ sys.modules['cython.cimports.libc.math'] = math
606
+ # In pure Python mode @cython.dataclasses.dataclass and dataclass field should just
607
+ # shadow the standard library ones (if they are available)
608
+ dataclasses = sys.modules['cython.dataclasses'] = CythonDotImportedFromElsewhere('dataclasses')
609
+ del math, sys
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Shadow.pyi ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from builtins import (int as py_int, float as py_float,
2
+ bool as py_bool, str as py_str, complex as py_complex)
3
+ from typing import (Union, Dict, Any, Sequence, Optional,
4
+ List, TypeVar, Type, Generic)
5
+
6
+ int = py_int
7
+ long = py_int
8
+ longlong = py_int
9
+ short = py_int
10
+ char = py_int
11
+ sint = py_int
12
+ slong = py_int
13
+ slonglong = py_int
14
+ sshort = py_int
15
+ schar = py_int
16
+ uint = py_int
17
+ ulong = py_int
18
+ ulonglong = py_int
19
+ ushort = py_int
20
+ uchar = py_int
21
+ size_t = py_int
22
+ Py_ssize_t = py_int
23
+ Py_UCS4 = Union[py_int, str]
24
+ Py_UNICODE = Union[py_int, str]
25
+ float = py_float
26
+ double = py_float
27
+ longdouble = py_float
28
+ complex = py_complex
29
+ floatcomplex = py_complex
30
+ doublecomplex = py_complex
31
+ longdoublecomplex = py_complex
32
+ bint = py_bool
33
+ void = Union[None]
34
+ basestring = py_str
35
+ unicode = py_str
36
+
37
+ gs: Dict[str, Any] # Should match the return type of globals()
38
+
39
+ _T = TypeVar('_T')
40
+
41
+ class _ArrayType(object, Generic[_T]):
42
+ is_array: bool
43
+ subtypes: Sequence[str]
44
+ dtype: _T
45
+ ndim: int
46
+ is_c_contig: bool
47
+ is_f_contig: bool
48
+ inner_contig: bool
49
+ broadcasting: Any
50
+
51
+ # broadcasting is not used, so it's not clear about its type
52
+ def __init__(self, dtype: _T, ndim: int, is_c_contig: bool = ...,
53
+ is_f_contig: bool = ..., inner_contig: bool = ...,
54
+ broadcasting: Any = ...) -> None: ...
55
+ def __repr__(self) -> str: ...
56
+
57
+ class CythonTypeObject(object):
58
+ ...
59
+
60
+ class CythonType(CythonTypeObject):
61
+ ...
62
+
63
+ class PointerType(CythonType, Generic[_T]):
64
+ def __init__(
65
+ self,
66
+ value: Optional[Union[ArrayType[_T], PointerType[_T], List[_T], int]] = ...
67
+ ) -> None: ...
68
+ def __getitem__(self, ix: int) -> _T: ...
69
+ def __setitem__(self, ix: int, value: _T) -> None: ...
70
+ def __eq__(self, value: object) -> bool: ...
71
+ def __repr__(self) -> str: ...
72
+
73
+ class ArrayType(PointerType[_T]):
74
+ def __init__(self) -> None: ...
75
+
76
+ #class StructType(CythonType, Generic[_T]):
77
+ # def __init__(
78
+ # self,
79
+ # value: List[Type[_T]] = ...
80
+ # ) -> None: ...
81
+
82
+ def index_type(
83
+ base_type: _T, item: Union[tuple, slice, int]) -> _ArrayType[_T]: ...
84
+
85
+ def pointer(basetype: _T) -> Type[PointerType[_T]]: ...
86
+
87
+ def array(basetype: _T, n: int) -> Type[ArrayType[_T]]: ...
88
+
89
+ #def struct(basetype: _T) -> Type[StructType[_T]]: ...
90
+
91
+ class typedef(CythonType, Generic[_T]):
92
+ name: str
93
+
94
+ def __init__(self, type: _T, name: Optional[str] = ...) -> None: ...
95
+ def __call__(self, *arg: Any) -> _T: ...
96
+ def __repr__(self) -> str: ...
97
+ __getitem__ = index_type
98
+
99
+ #class _FusedType(CythonType, Generic[_T]):
100
+ # def __init__(self) -> None: ...
101
+
102
+ #def fused_type(*args: Tuple[_T]) -> Type[FusedType[_T]]: ...
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/StringIOTree.cpython-311-x86_64-linux-gnu.so ADDED
Binary file (89.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/StringIOTree.py ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # cython: auto_pickle=False
2
+
3
+ r"""
4
+ Implements a buffer with insertion points. When you know you need to
5
+ "get back" to a place and write more later, simply call insertion_point()
6
+ at that spot and get a new StringIOTree object that is "left behind".
7
+
8
+ EXAMPLE:
9
+
10
+ >>> a = StringIOTree()
11
+ >>> _= a.write('first\n')
12
+ >>> b = a.insertion_point()
13
+ >>> _= a.write('third\n')
14
+ >>> _= b.write('second\n')
15
+ >>> a.getvalue().split()
16
+ ['first', 'second', 'third']
17
+
18
+ >>> c = b.insertion_point()
19
+ >>> d = c.insertion_point()
20
+ >>> _= d.write('alpha\n')
21
+ >>> _= b.write('gamma\n')
22
+ >>> _= c.write('beta\n')
23
+ >>> b.getvalue().split()
24
+ ['second', 'alpha', 'beta', 'gamma']
25
+
26
+ >>> try: from cStringIO import StringIO
27
+ ... except ImportError: from io import StringIO
28
+
29
+ >>> i = StringIOTree()
30
+ >>> d.insert(i)
31
+ >>> _= i.write('inserted\n')
32
+ >>> out = StringIO()
33
+ >>> a.copyto(out)
34
+ >>> out.getvalue().split()
35
+ ['first', 'second', 'alpha', 'inserted', 'beta', 'gamma', 'third']
36
+ """
37
+
38
+ from __future__ import absolute_import #, unicode_literals
39
+
40
+ try:
41
+ # Prefer cStringIO since io.StringIO() does not support writing 'str' in Py2.
42
+ from cStringIO import StringIO
43
+ except ImportError:
44
+ from io import StringIO
45
+
46
+
47
+ class StringIOTree(object):
48
+ """
49
+ See module docs.
50
+ """
51
+
52
+ def __init__(self, stream=None):
53
+ self.prepended_children = []
54
+ if stream is None:
55
+ stream = StringIO()
56
+ self.stream = stream
57
+ self.write = stream.write
58
+ self.markers = []
59
+
60
+ def empty(self):
61
+ if self.stream.tell():
62
+ return False
63
+ return all([child.empty() for child in self.prepended_children]) if self.prepended_children else True
64
+
65
+ def getvalue(self):
66
+ content = []
67
+ self._collect_in(content)
68
+ return "".join(content)
69
+
70
+ def _collect_in(self, target_list):
71
+ for x in self.prepended_children:
72
+ x._collect_in(target_list)
73
+ stream_content = self.stream.getvalue()
74
+ if stream_content:
75
+ target_list.append(stream_content)
76
+
77
+ def copyto(self, target):
78
+ """Potentially cheaper than getvalue as no string concatenation
79
+ needs to happen."""
80
+ for child in self.prepended_children:
81
+ child.copyto(target)
82
+ stream_content = self.stream.getvalue()
83
+ if stream_content:
84
+ target.write(stream_content)
85
+
86
+ def commit(self):
87
+ # Save what we have written until now so that the buffer
88
+ # itself is empty -- this makes it ready for insertion
89
+ if self.stream.tell():
90
+ self.prepended_children.append(StringIOTree(self.stream))
91
+ self.prepended_children[-1].markers = self.markers
92
+ self.markers = []
93
+ self.stream = StringIO()
94
+ self.write = self.stream.write
95
+
96
+ def reset(self):
97
+ self.prepended_children = []
98
+ self.markers = []
99
+ self.stream = StringIO()
100
+ self.write = self.stream.write
101
+
102
+ def insert(self, iotree):
103
+ """
104
+ Insert a StringIOTree (and all of its contents) at this location.
105
+ Further writing to self appears after what is inserted.
106
+ """
107
+ self.commit()
108
+ self.prepended_children.append(iotree)
109
+
110
+ def insertion_point(self):
111
+ """
112
+ Returns a new StringIOTree, which is left behind at the current position
113
+ (it what is written to the result will appear right before whatever is
114
+ next written to self).
115
+
116
+ Calling getvalue() or copyto() on the result will only return the
117
+ contents written to it.
118
+ """
119
+ # Save what we have written until now
120
+ # This is so that getvalue on the result doesn't include it.
121
+ self.commit()
122
+ # Construct the new forked object to return
123
+ other = StringIOTree()
124
+ self.prepended_children.append(other)
125
+ return other
126
+
127
+ def allmarkers(self):
128
+ children = self.prepended_children
129
+ return [m for c in children for m in c.allmarkers()] + self.markers
130
+
131
+ """
132
+ # Print the result of allmarkers in a nice human-readable form. Use it only for debugging.
133
+ # Prints e.g.
134
+ # /path/to/source.pyx:
135
+ # cython line 2 maps to 3299-3343
136
+ # cython line 4 maps to 2236-2245 2306 3188-3201
137
+ # /path/to/othersource.pyx:
138
+ # cython line 3 maps to 1234-1270
139
+ # ...
140
+ # Note: In the example above, 3343 maps to line 2, 3344 does not.
141
+ def print_hr_allmarkers(self):
142
+ from collections import defaultdict
143
+ markers = self.allmarkers()
144
+ totmap = defaultdict(lambda: defaultdict(list))
145
+ for c_lineno, (cython_desc, cython_lineno) in enumerate(markers):
146
+ if cython_lineno > 0 and cython_desc.filename is not None:
147
+ totmap[cython_desc.filename][cython_lineno].append(c_lineno + 1)
148
+ reprstr = ""
149
+ if totmap == 0:
150
+ reprstr += "allmarkers is empty\n"
151
+ try:
152
+ sorted(totmap.items())
153
+ except:
154
+ print(totmap)
155
+ print(totmap.items())
156
+ for cython_path, filemap in sorted(totmap.items()):
157
+ reprstr += cython_path + ":\n"
158
+ for cython_lineno, c_linenos in sorted(filemap.items()):
159
+ reprstr += "\tcython line " + str(cython_lineno) + " maps to "
160
+ i = 0
161
+ while i < len(c_linenos):
162
+ reprstr += str(c_linenos[i])
163
+ flag = False
164
+ while i+1 < len(c_linenos) and c_linenos[i+1] == c_linenos[i]+1:
165
+ i += 1
166
+ flag = True
167
+ if flag:
168
+ reprstr += "-" + str(c_linenos[i]) + " "
169
+ i += 1
170
+ reprstr += "\n"
171
+
172
+ import sys
173
+ sys.stdout.write(reprstr)
174
+ """
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/TestUtils.py ADDED
@@ -0,0 +1,398 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import os
4
+ import re
5
+ import unittest
6
+ import shlex
7
+ import sys
8
+ import tempfile
9
+ import textwrap
10
+ from io import open
11
+ from functools import partial
12
+
13
+ from .Compiler import Errors
14
+ from .CodeWriter import CodeWriter
15
+ from .Compiler.TreeFragment import TreeFragment, strip_common_indent
16
+ from .Compiler.Visitor import TreeVisitor, VisitorTransform
17
+ from .Compiler import TreePath
18
+
19
+
20
+ class NodeTypeWriter(TreeVisitor):
21
+ def __init__(self):
22
+ super(NodeTypeWriter, self).__init__()
23
+ self._indents = 0
24
+ self.result = []
25
+
26
+ def visit_Node(self, node):
27
+ if not self.access_path:
28
+ name = u"(root)"
29
+ else:
30
+ tip = self.access_path[-1]
31
+ if tip[2] is not None:
32
+ name = u"%s[%d]" % tip[1:3]
33
+ else:
34
+ name = tip[1]
35
+
36
+ self.result.append(u" " * self._indents +
37
+ u"%s: %s" % (name, node.__class__.__name__))
38
+ self._indents += 1
39
+ self.visitchildren(node)
40
+ self._indents -= 1
41
+
42
+
43
+ def treetypes(root):
44
+ """Returns a string representing the tree by class names.
45
+ There's a leading and trailing whitespace so that it can be
46
+ compared by simple string comparison while still making test
47
+ cases look ok."""
48
+ w = NodeTypeWriter()
49
+ w.visit(root)
50
+ return u"\n".join([u""] + w.result + [u""])
51
+
52
+
53
+ class CythonTest(unittest.TestCase):
54
+
55
+ def setUp(self):
56
+ Errors.init_thread()
57
+
58
+ def tearDown(self):
59
+ Errors.init_thread()
60
+
61
+ def assertLines(self, expected, result):
62
+ "Checks that the given strings or lists of strings are equal line by line"
63
+ if not isinstance(expected, list):
64
+ expected = expected.split(u"\n")
65
+ if not isinstance(result, list):
66
+ result = result.split(u"\n")
67
+ for idx, (expected_line, result_line) in enumerate(zip(expected, result)):
68
+ self.assertEqual(expected_line, result_line,
69
+ "Line %d:\nExp: %s\nGot: %s" % (idx, expected_line, result_line))
70
+ self.assertEqual(len(expected), len(result),
71
+ "Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(expected), u"\n".join(result)))
72
+
73
+ def codeToLines(self, tree):
74
+ writer = CodeWriter()
75
+ writer.write(tree)
76
+ return writer.result.lines
77
+
78
+ def codeToString(self, tree):
79
+ return "\n".join(self.codeToLines(tree))
80
+
81
+ def assertCode(self, expected, result_tree):
82
+ result_lines = self.codeToLines(result_tree)
83
+
84
+ expected_lines = strip_common_indent(expected.split("\n"))
85
+
86
+ for idx, (line, expected_line) in enumerate(zip(result_lines, expected_lines)):
87
+ self.assertEqual(expected_line, line,
88
+ "Line %d:\nGot: %s\nExp: %s" % (idx, line, expected_line))
89
+ self.assertEqual(len(result_lines), len(expected_lines),
90
+ "Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(result_lines), expected))
91
+
92
+ def assertNodeExists(self, path, result_tree):
93
+ self.assertNotEqual(TreePath.find_first(result_tree, path), None,
94
+ "Path '%s' not found in result tree" % path)
95
+
96
+ def fragment(self, code, pxds=None, pipeline=None):
97
+ "Simply create a tree fragment using the name of the test-case in parse errors."
98
+ if pxds is None:
99
+ pxds = {}
100
+ if pipeline is None:
101
+ pipeline = []
102
+ name = self.id()
103
+ if name.startswith("__main__."):
104
+ name = name[len("__main__."):]
105
+ name = name.replace(".", "_")
106
+ return TreeFragment(code, name, pxds, pipeline=pipeline)
107
+
108
+ def treetypes(self, root):
109
+ return treetypes(root)
110
+
111
+ def should_fail(self, func, exc_type=Exception):
112
+ """Calls "func" and fails if it doesn't raise the right exception
113
+ (any exception by default). Also returns the exception in question.
114
+ """
115
+ try:
116
+ func()
117
+ self.fail("Expected an exception of type %r" % exc_type)
118
+ except exc_type as e:
119
+ self.assertTrue(isinstance(e, exc_type))
120
+ return e
121
+
122
+ def should_not_fail(self, func):
123
+ """Calls func and succeeds if and only if no exception is raised
124
+ (i.e. converts exception raising into a failed testcase). Returns
125
+ the return value of func."""
126
+ try:
127
+ return func()
128
+ except Exception as exc:
129
+ self.fail(str(exc))
130
+
131
+
132
+ class TransformTest(CythonTest):
133
+ """
134
+ Utility base class for transform unit tests. It is based around constructing
135
+ test trees (either explicitly or by parsing a Cython code string); running
136
+ the transform, serialize it using a customized Cython serializer (with
137
+ special markup for nodes that cannot be represented in Cython),
138
+ and do a string-comparison line-by-line of the result.
139
+
140
+ To create a test case:
141
+ - Call run_pipeline. The pipeline should at least contain the transform you
142
+ are testing; pyx should be either a string (passed to the parser to
143
+ create a post-parse tree) or a node representing input to pipeline.
144
+ The result will be a transformed result.
145
+
146
+ - Check that the tree is correct. If wanted, assertCode can be used, which
147
+ takes a code string as expected, and a ModuleNode in result_tree
148
+ (it serializes the ModuleNode to a string and compares line-by-line).
149
+
150
+ All code strings are first stripped for whitespace lines and then common
151
+ indentation.
152
+
153
+ Plans: One could have a pxd dictionary parameter to run_pipeline.
154
+ """
155
+
156
+ def run_pipeline(self, pipeline, pyx, pxds=None):
157
+ if pxds is None:
158
+ pxds = {}
159
+ tree = self.fragment(pyx, pxds).root
160
+ # Run pipeline
161
+ for T in pipeline:
162
+ tree = T(tree)
163
+ return tree
164
+
165
+
166
+ # For the test C code validation, we have to take care that the test directives (and thus
167
+ # the match strings) do not just appear in (multiline) C code comments containing the original
168
+ # Cython source code. Thus, we discard the comments before matching.
169
+ # This seems a prime case for re.VERBOSE, but it seems to match some of the whitespace.
170
+ _strip_c_comments = partial(re.compile(
171
+ re.sub(r'\s+', '', r'''
172
+ /[*] (
173
+ (?: [^*\n] | [*][^/] )*
174
+ [\n]
175
+ (?: [^*] | [*][^/] )*
176
+ ) [*]/
177
+ ''')
178
+ ).sub, '')
179
+
180
+ _strip_cython_code_from_html = partial(re.compile(
181
+ re.sub(r'\s\s+', '', r'''
182
+ (?:
183
+ <pre class=["'][^"']*cython\s+line[^"']*["']\s*>
184
+ (?:[^<]|<(?!/pre))+
185
+ </pre>
186
+ )|(?:
187
+ <style[^>]*>
188
+ (?:[^<]|<(?!/style))+
189
+ </style>
190
+ )
191
+ ''')
192
+ ).sub, '')
193
+
194
+
195
+ def _parse_pattern(pattern):
196
+ start = end = None
197
+ if pattern.startswith('/'):
198
+ start, pattern = re.split(r"(?<!\\)/", pattern[1:], maxsplit=1)
199
+ pattern = pattern.strip()
200
+ if pattern.startswith(':'):
201
+ pattern = pattern[1:].strip()
202
+ if pattern.startswith("/"):
203
+ end, pattern = re.split(r"(?<!\\)/", pattern[1:], maxsplit=1)
204
+ pattern = pattern.strip()
205
+ return start, end, pattern
206
+
207
+
208
+ class TreeAssertVisitor(VisitorTransform):
209
+ # actually, a TreeVisitor would be enough, but this needs to run
210
+ # as part of the compiler pipeline
211
+
212
+ def __init__(self):
213
+ super(TreeAssertVisitor, self).__init__()
214
+ self._module_pos = None
215
+ self._c_patterns = []
216
+ self._c_antipatterns = []
217
+
218
+ def create_c_file_validator(self):
219
+ patterns, antipatterns = self._c_patterns, self._c_antipatterns
220
+
221
+ def fail(pos, pattern, found, file_path):
222
+ Errors.error(pos, "Pattern '%s' %s found in %s" %(
223
+ pattern,
224
+ 'was' if found else 'was not',
225
+ file_path,
226
+ ))
227
+
228
+ def extract_section(file_path, content, start, end):
229
+ if start:
230
+ split = re.search(start, content)
231
+ if split:
232
+ content = content[split.end():]
233
+ else:
234
+ fail(self._module_pos, start, found=False, file_path=file_path)
235
+ if end:
236
+ split = re.search(end, content)
237
+ if split:
238
+ content = content[:split.start()]
239
+ else:
240
+ fail(self._module_pos, end, found=False, file_path=file_path)
241
+ return content
242
+
243
+ def validate_file_content(file_path, content):
244
+ for pattern in patterns:
245
+ #print("Searching pattern '%s'" % pattern)
246
+ start, end, pattern = _parse_pattern(pattern)
247
+ section = extract_section(file_path, content, start, end)
248
+ if not re.search(pattern, section):
249
+ fail(self._module_pos, pattern, found=False, file_path=file_path)
250
+
251
+ for antipattern in antipatterns:
252
+ #print("Searching antipattern '%s'" % antipattern)
253
+ start, end, antipattern = _parse_pattern(antipattern)
254
+ section = extract_section(file_path, content, start, end)
255
+ if re.search(antipattern, section):
256
+ fail(self._module_pos, antipattern, found=True, file_path=file_path)
257
+
258
+ def validate_c_file(result):
259
+ c_file = result.c_file
260
+ if not (patterns or antipatterns):
261
+ #print("No patterns defined for %s" % c_file)
262
+ return result
263
+
264
+ with open(c_file, encoding='utf8') as f:
265
+ content = f.read()
266
+ content = _strip_c_comments(content)
267
+ validate_file_content(c_file, content)
268
+
269
+ html_file = os.path.splitext(c_file)[0] + ".html"
270
+ if os.path.exists(html_file) and os.path.getmtime(c_file) <= os.path.getmtime(html_file):
271
+ with open(html_file, encoding='utf8') as f:
272
+ content = f.read()
273
+ content = _strip_cython_code_from_html(content)
274
+ validate_file_content(html_file, content)
275
+
276
+ return validate_c_file
277
+
278
+ def _check_directives(self, node):
279
+ directives = node.directives
280
+ if 'test_assert_path_exists' in directives:
281
+ for path in directives['test_assert_path_exists']:
282
+ if TreePath.find_first(node, path) is None:
283
+ Errors.error(
284
+ node.pos,
285
+ "Expected path '%s' not found in result tree" % path)
286
+ if 'test_fail_if_path_exists' in directives:
287
+ for path in directives['test_fail_if_path_exists']:
288
+ first_node = TreePath.find_first(node, path)
289
+ if first_node is not None:
290
+ Errors.error(
291
+ first_node.pos,
292
+ "Unexpected path '%s' found in result tree" % path)
293
+ if 'test_assert_c_code_has' in directives:
294
+ self._c_patterns.extend(directives['test_assert_c_code_has'])
295
+ if 'test_fail_if_c_code_has' in directives:
296
+ self._c_antipatterns.extend(directives['test_fail_if_c_code_has'])
297
+
298
+ def visit_ModuleNode(self, node):
299
+ self._module_pos = node.pos
300
+ self._check_directives(node)
301
+ self.visitchildren(node)
302
+ return node
303
+
304
+ def visit_CompilerDirectivesNode(self, node):
305
+ self._check_directives(node)
306
+ self.visitchildren(node)
307
+ return node
308
+
309
+ visit_Node = VisitorTransform.recurse_to_children
310
+
311
+
312
+ def unpack_source_tree(tree_file, workdir, cython_root):
313
+ programs = {
314
+ 'PYTHON': [sys.executable],
315
+ 'CYTHON': [sys.executable, os.path.join(cython_root, 'cython.py')],
316
+ 'CYTHONIZE': [sys.executable, os.path.join(cython_root, 'cythonize.py')]
317
+ }
318
+
319
+ if workdir is None:
320
+ workdir = tempfile.mkdtemp()
321
+ header, cur_file = [], None
322
+ with open(tree_file, 'rb') as f:
323
+ try:
324
+ for line in f:
325
+ if line[:5] == b'#####':
326
+ filename = line.strip().strip(b'#').strip().decode('utf8').replace('/', os.path.sep)
327
+ path = os.path.join(workdir, filename)
328
+ if not os.path.exists(os.path.dirname(path)):
329
+ os.makedirs(os.path.dirname(path))
330
+ if cur_file is not None:
331
+ to_close, cur_file = cur_file, None
332
+ to_close.close()
333
+ cur_file = open(path, 'wb')
334
+ elif cur_file is not None:
335
+ cur_file.write(line)
336
+ elif line.strip() and not line.lstrip().startswith(b'#'):
337
+ if line.strip() not in (b'"""', b"'''"):
338
+ command = shlex.split(line.decode('utf8'))
339
+ if not command: continue
340
+ # In Python 3: prog, *args = command
341
+ prog, args = command[0], command[1:]
342
+ try:
343
+ header.append(programs[prog]+args)
344
+ except KeyError:
345
+ header.append(command)
346
+ finally:
347
+ if cur_file is not None:
348
+ cur_file.close()
349
+ return workdir, header
350
+
351
+
352
+ def write_file(file_path, content, dedent=False, encoding=None):
353
+ r"""Write some content (text or bytes) to the file
354
+ at `file_path` without translating `'\n'` into `os.linesep`.
355
+
356
+ The default encoding is `'utf-8'`.
357
+ """
358
+ if isinstance(content, bytes):
359
+ mode = "wb"
360
+
361
+ # binary mode doesn't take an encoding and newline arguments
362
+ newline = None
363
+ default_encoding = None
364
+ else:
365
+ mode = "w"
366
+
367
+ # any "\n" characters written are not translated
368
+ # to the system default line separator, os.linesep
369
+ newline = "\n"
370
+ default_encoding = "utf-8"
371
+
372
+ if encoding is None:
373
+ encoding = default_encoding
374
+
375
+ if dedent:
376
+ content = textwrap.dedent(content)
377
+
378
+ with open(file_path, mode=mode, encoding=encoding, newline=newline) as f:
379
+ f.write(content)
380
+
381
+
382
+ def write_newer_file(file_path, newer_than, content, dedent=False, encoding=None):
383
+ r"""
384
+ Write `content` to the file `file_path` without translating `'\n'`
385
+ into `os.linesep` and make sure it is newer than the file `newer_than`.
386
+
387
+ The default encoding is `'utf-8'` (same as for `write_file`).
388
+ """
389
+ write_file(file_path, content, dedent=dedent, encoding=encoding)
390
+
391
+ try:
392
+ other_time = os.path.getmtime(newer_than)
393
+ except OSError:
394
+ # Support writing a fresh file (which is always newer than a non-existent one)
395
+ other_time = None
396
+
397
+ while other_time is None or other_time >= os.path.getmtime(file_path):
398
+ write_file(file_path, content, dedent=dedent, encoding=encoding)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Utils.py ADDED
@@ -0,0 +1,721 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Cython -- Things that don't belong anywhere else in particular
3
+ """
4
+
5
+ from __future__ import absolute_import
6
+
7
+ import cython
8
+
9
+ cython.declare(
10
+ basestring=object,
11
+ os=object, sys=object, re=object, io=object, codecs=object, glob=object, shutil=object, tempfile=object,
12
+ cython_version=object,
13
+ _function_caches=list, _parse_file_version=object, _match_file_encoding=object,
14
+ )
15
+
16
+ try:
17
+ from __builtin__ import basestring
18
+ except ImportError:
19
+ basestring = str
20
+
21
+ try:
22
+ FileNotFoundError
23
+ except NameError:
24
+ FileNotFoundError = OSError
25
+
26
+ import os
27
+ import sys
28
+ import re
29
+ import io
30
+ import codecs
31
+ import glob
32
+ import shutil
33
+ import tempfile
34
+ from functools import wraps
35
+
36
+ from . import __version__ as cython_version
37
+
38
+ PACKAGE_FILES = ("__init__.py", "__init__.pyc", "__init__.pyx", "__init__.pxd")
39
+
40
+ _build_cache_name = "__{0}_cache".format
41
+ _CACHE_NAME_PATTERN = re.compile(r"^__(.+)_cache$")
42
+
43
+ modification_time = os.path.getmtime
44
+
45
+ GENERATED_BY_MARKER = "/* Generated by Cython %s */" % cython_version
46
+ GENERATED_BY_MARKER_BYTES = GENERATED_BY_MARKER.encode('us-ascii')
47
+
48
+
49
+ class _TryFinallyGeneratorContextManager(object):
50
+ """
51
+ Fast, bare minimum @contextmanager, only for try-finally, not for exception handling.
52
+ """
53
+ def __init__(self, gen):
54
+ self._gen = gen
55
+
56
+ def __enter__(self):
57
+ return next(self._gen)
58
+
59
+ def __exit__(self, exc_type, exc_val, exc_tb):
60
+ try:
61
+ next(self._gen)
62
+ except (StopIteration, GeneratorExit):
63
+ pass
64
+
65
+
66
+ def try_finally_contextmanager(gen_func):
67
+ @wraps(gen_func)
68
+ def make_gen(*args, **kwargs):
69
+ return _TryFinallyGeneratorContextManager(gen_func(*args, **kwargs))
70
+ return make_gen
71
+
72
+
73
+ _function_caches = []
74
+
75
+
76
+ def clear_function_caches():
77
+ for cache in _function_caches:
78
+ cache.clear()
79
+
80
+
81
+ def cached_function(f):
82
+ cache = {}
83
+ _function_caches.append(cache)
84
+ uncomputed = object()
85
+
86
+ @wraps(f)
87
+ def wrapper(*args):
88
+ res = cache.get(args, uncomputed)
89
+ if res is uncomputed:
90
+ res = cache[args] = f(*args)
91
+ return res
92
+
93
+ wrapper.uncached = f
94
+ return wrapper
95
+
96
+
97
+ def _find_cache_attributes(obj):
98
+ """The function iterates over the attributes of the object and,
99
+ if it finds the name of the cache, it returns it and the corresponding method name.
100
+ The method may not be present in the object.
101
+ """
102
+ for attr_name in dir(obj):
103
+ match = _CACHE_NAME_PATTERN.match(attr_name)
104
+ if match is not None:
105
+ yield attr_name, match.group(1)
106
+
107
+
108
+ def clear_method_caches(obj):
109
+ """Removes every cache found in the object,
110
+ if a corresponding method exists for that cache.
111
+ """
112
+ for cache_name, method_name in _find_cache_attributes(obj):
113
+ if hasattr(obj, method_name):
114
+ delattr(obj, cache_name)
115
+ # if there is no corresponding method, then we assume
116
+ # that this attribute was not created by our cached method
117
+
118
+
119
+ def cached_method(f):
120
+ cache_name = _build_cache_name(f.__name__)
121
+
122
+ def wrapper(self, *args):
123
+ cache = getattr(self, cache_name, None)
124
+ if cache is None:
125
+ cache = {}
126
+ setattr(self, cache_name, cache)
127
+ if args in cache:
128
+ return cache[args]
129
+ res = cache[args] = f(self, *args)
130
+ return res
131
+
132
+ return wrapper
133
+
134
+
135
+ def replace_suffix(path, newsuf):
136
+ base, _ = os.path.splitext(path)
137
+ return base + newsuf
138
+
139
+
140
+ def open_new_file(path):
141
+ if os.path.exists(path):
142
+ # Make sure to create a new file here so we can
143
+ # safely hard link the output files.
144
+ os.unlink(path)
145
+
146
+ # we use the ISO-8859-1 encoding here because we only write pure
147
+ # ASCII strings or (e.g. for file names) byte encoded strings as
148
+ # Unicode, so we need a direct mapping from the first 256 Unicode
149
+ # characters to a byte sequence, which ISO-8859-1 provides
150
+
151
+ # note: can't use io.open() in Py2 as we may be writing str objects
152
+ return codecs.open(path, "w", encoding="ISO-8859-1")
153
+
154
+
155
+ def castrate_file(path, st):
156
+ # Remove junk contents from an output file after a
157
+ # failed compilation.
158
+ # Also sets access and modification times back to
159
+ # those specified by st (a stat struct).
160
+ if not is_cython_generated_file(path, allow_failed=True, if_not_found=False):
161
+ return
162
+
163
+ try:
164
+ f = open_new_file(path)
165
+ except EnvironmentError:
166
+ pass
167
+ else:
168
+ f.write(
169
+ "#error Do not use this file, it is the result of a failed Cython compilation.\n")
170
+ f.close()
171
+ if st:
172
+ os.utime(path, (st.st_atime, st.st_mtime-1))
173
+
174
+
175
+ def is_cython_generated_file(path, allow_failed=False, if_not_found=True):
176
+ failure_marker = b"#error Do not use this file, it is the result of a failed Cython compilation."
177
+ file_content = None
178
+ if os.path.exists(path):
179
+ try:
180
+ with open(path, "rb") as f:
181
+ file_content = f.read(len(failure_marker))
182
+ except (OSError, IOError):
183
+ pass # Probably just doesn't exist any more
184
+
185
+ if file_content is None:
186
+ # file does not exist (yet)
187
+ return if_not_found
188
+
189
+ return (
190
+ # Cython C file?
191
+ file_content.startswith(b"/* Generated by Cython ") or
192
+ # Cython output file after previous failures?
193
+ (allow_failed and file_content == failure_marker) or
194
+ # Let's allow overwriting empty files as well. They might have resulted from previous failures.
195
+ not file_content
196
+ )
197
+
198
+
199
+ def file_generated_by_this_cython(path):
200
+ file_content = b''
201
+ if os.path.exists(path):
202
+ try:
203
+ with open(path, "rb") as f:
204
+ file_content = f.read(len(GENERATED_BY_MARKER_BYTES))
205
+ except (OSError, IOError):
206
+ pass # Probably just doesn't exist any more
207
+ return file_content and file_content.startswith(GENERATED_BY_MARKER_BYTES)
208
+
209
+
210
+ def file_newer_than(path, time):
211
+ ftime = modification_time(path)
212
+ return ftime > time
213
+
214
+
215
+ def safe_makedirs(path):
216
+ try:
217
+ os.makedirs(path)
218
+ except OSError:
219
+ if not os.path.isdir(path):
220
+ raise
221
+
222
+
223
+ def copy_file_to_dir_if_newer(sourcefile, destdir):
224
+ """
225
+ Copy file sourcefile to directory destdir (creating it if needed),
226
+ preserving metadata. If the destination file exists and is not
227
+ older than the source file, the copying is skipped.
228
+ """
229
+ destfile = os.path.join(destdir, os.path.basename(sourcefile))
230
+ try:
231
+ desttime = modification_time(destfile)
232
+ except OSError:
233
+ # New file does not exist, destdir may or may not exist
234
+ safe_makedirs(destdir)
235
+ else:
236
+ # New file already exists
237
+ if not file_newer_than(sourcefile, desttime):
238
+ return
239
+ shutil.copy2(sourcefile, destfile)
240
+
241
+
242
+ @cached_function
243
+ def find_root_package_dir(file_path):
244
+ dir = os.path.dirname(file_path)
245
+ if file_path == dir:
246
+ return dir
247
+ elif is_package_dir(dir):
248
+ return find_root_package_dir(dir)
249
+ else:
250
+ return dir
251
+
252
+
253
+ @cached_function
254
+ def check_package_dir(dir_path, package_names):
255
+ namespace = True
256
+ for dirname in package_names:
257
+ dir_path = os.path.join(dir_path, dirname)
258
+ has_init = contains_init(dir_path)
259
+ if has_init:
260
+ namespace = False
261
+ return dir_path, namespace
262
+
263
+
264
+ @cached_function
265
+ def contains_init(dir_path):
266
+ for filename in PACKAGE_FILES:
267
+ path = os.path.join(dir_path, filename)
268
+ if path_exists(path):
269
+ return 1
270
+
271
+
272
+ def is_package_dir(dir_path):
273
+ if contains_init(dir_path):
274
+ return 1
275
+
276
+
277
+ @cached_function
278
+ def path_exists(path):
279
+ # try on the filesystem first
280
+ if os.path.exists(path):
281
+ return True
282
+ # figure out if a PEP 302 loader is around
283
+ try:
284
+ loader = __loader__
285
+ # XXX the code below assumes a 'zipimport.zipimporter' instance
286
+ # XXX should be easy to generalize, but too lazy right now to write it
287
+ archive_path = getattr(loader, 'archive', None)
288
+ if archive_path:
289
+ normpath = os.path.normpath(path)
290
+ if normpath.startswith(archive_path):
291
+ arcname = normpath[len(archive_path)+1:]
292
+ try:
293
+ loader.get_data(arcname)
294
+ return True
295
+ except IOError:
296
+ return False
297
+ except NameError:
298
+ pass
299
+ return False
300
+
301
+
302
+ _parse_file_version = re.compile(r".*[.]cython-([0-9]+)[.][^./\\]+$").findall
303
+
304
+
305
+ @cached_function
306
+ def find_versioned_file(directory, filename, suffix,
307
+ _current_version=int(re.sub(r"^([0-9]+)[.]([0-9]+).*", r"\1\2", cython_version))):
308
+ """
309
+ Search a directory for versioned pxd files, e.g. "lib.cython-30.pxd" for a Cython 3.0+ version.
310
+
311
+ @param directory: the directory to search
312
+ @param filename: the filename without suffix
313
+ @param suffix: the filename extension including the dot, e.g. ".pxd"
314
+ @return: the file path if found, or None
315
+ """
316
+ assert not suffix or suffix[:1] == '.'
317
+ path_prefix = os.path.join(directory, filename)
318
+
319
+ matching_files = glob.glob(
320
+ (glob.escape(path_prefix) if sys.version_info >= (3, 4) else
321
+ ''.join([ '['+c+']' if c in '[*?' else c for c in path_prefix]))
322
+ + ".cython-*" + suffix)
323
+ path = path_prefix + suffix
324
+ if not os.path.exists(path):
325
+ path = None
326
+ best_match = (-1, path) # last resort, if we do not have versioned .pxd files
327
+
328
+ for path in matching_files:
329
+ versions = _parse_file_version(path)
330
+ if versions:
331
+ int_version = int(versions[0])
332
+ # Let's assume no duplicates.
333
+ if best_match[0] < int_version <= _current_version:
334
+ best_match = (int_version, path)
335
+ return best_match[1]
336
+
337
+
338
+ # file name encodings
339
+
340
+ def decode_filename(filename):
341
+ if isinstance(filename, bytes):
342
+ try:
343
+ filename_encoding = sys.getfilesystemencoding()
344
+ if filename_encoding is None:
345
+ filename_encoding = sys.getdefaultencoding()
346
+ filename = filename.decode(filename_encoding)
347
+ except UnicodeDecodeError:
348
+ pass
349
+ return filename
350
+
351
+
352
+ # support for source file encoding detection
353
+
354
+ _match_file_encoding = re.compile(br"(\w*coding)[:=]\s*([-\w.]+)").search
355
+
356
+
357
+ def detect_opened_file_encoding(f, default='UTF-8'):
358
+ # PEPs 263 and 3120
359
+ # Most of the time the first two lines fall in the first couple of hundred chars,
360
+ # and this bulk read/split is much faster.
361
+ lines = ()
362
+ start = b''
363
+ while len(lines) < 3:
364
+ data = f.read(500)
365
+ start += data
366
+ lines = start.split(b"\n")
367
+ if not data:
368
+ break
369
+
370
+ m = _match_file_encoding(lines[0])
371
+ if m and m.group(1) != b'c_string_encoding':
372
+ return m.group(2).decode('iso8859-1')
373
+ elif len(lines) > 1:
374
+ m = _match_file_encoding(lines[1])
375
+ if m:
376
+ return m.group(2).decode('iso8859-1')
377
+ return default
378
+
379
+
380
+ def skip_bom(f):
381
+ """
382
+ Read past a BOM at the beginning of a source file.
383
+ This could be added to the scanner, but it's *substantially* easier
384
+ to keep it at this level.
385
+ """
386
+ if f.read(1) != u'\uFEFF':
387
+ f.seek(0)
388
+
389
+
390
+ def open_source_file(source_filename, encoding=None, error_handling=None):
391
+ stream = None
392
+ try:
393
+ if encoding is None:
394
+ # Most of the time the encoding is not specified, so try hard to open the file only once.
395
+ f = io.open(source_filename, 'rb')
396
+ encoding = detect_opened_file_encoding(f)
397
+ f.seek(0)
398
+ stream = io.TextIOWrapper(f, encoding=encoding, errors=error_handling)
399
+ else:
400
+ stream = io.open(source_filename, encoding=encoding, errors=error_handling)
401
+
402
+ except OSError:
403
+ if os.path.exists(source_filename):
404
+ raise # File is there, but something went wrong reading from it.
405
+ # Allow source files to be in zip files etc.
406
+ try:
407
+ loader = __loader__
408
+ if source_filename.startswith(loader.archive):
409
+ stream = open_source_from_loader(
410
+ loader, source_filename,
411
+ encoding, error_handling)
412
+ except (NameError, AttributeError):
413
+ pass
414
+
415
+ if stream is None:
416
+ raise FileNotFoundError(source_filename)
417
+ skip_bom(stream)
418
+ return stream
419
+
420
+
421
+ def open_source_from_loader(loader,
422
+ source_filename,
423
+ encoding=None, error_handling=None):
424
+ nrmpath = os.path.normpath(source_filename)
425
+ arcname = nrmpath[len(loader.archive)+1:]
426
+ data = loader.get_data(arcname)
427
+ return io.TextIOWrapper(io.BytesIO(data),
428
+ encoding=encoding,
429
+ errors=error_handling)
430
+
431
+
432
+ def str_to_number(value):
433
+ # note: this expects a string as input that was accepted by the
434
+ # parser already, with an optional "-" sign in front
435
+ is_neg = False
436
+ if value[:1] == '-':
437
+ is_neg = True
438
+ value = value[1:]
439
+ if len(value) < 2:
440
+ value = int(value, 0)
441
+ elif value[0] == '0':
442
+ literal_type = value[1] # 0'o' - 0'b' - 0'x'
443
+ if literal_type in 'xX':
444
+ # hex notation ('0x1AF')
445
+ value = strip_py2_long_suffix(value)
446
+ value = int(value[2:], 16)
447
+ elif literal_type in 'oO':
448
+ # Py3 octal notation ('0o136')
449
+ value = int(value[2:], 8)
450
+ elif literal_type in 'bB':
451
+ # Py3 binary notation ('0b101')
452
+ value = int(value[2:], 2)
453
+ else:
454
+ # Py2 octal notation ('0136')
455
+ value = int(value, 8)
456
+ else:
457
+ value = int(value, 0)
458
+ return -value if is_neg else value
459
+
460
+
461
+ def strip_py2_long_suffix(value_str):
462
+ """
463
+ Python 2 likes to append 'L' to stringified numbers
464
+ which in then can't process when converting them to numbers.
465
+ """
466
+ if value_str[-1] in 'lL':
467
+ return value_str[:-1]
468
+ return value_str
469
+
470
+
471
+ def long_literal(value):
472
+ if isinstance(value, basestring):
473
+ value = str_to_number(value)
474
+ return not -2**31 <= value < 2**31
475
+
476
+
477
+ @cached_function
478
+ def get_cython_cache_dir():
479
+ r"""
480
+ Return the base directory containing Cython's caches.
481
+
482
+ Priority:
483
+
484
+ 1. CYTHON_CACHE_DIR
485
+ 2. (OS X): ~/Library/Caches/Cython
486
+ (posix not OS X): XDG_CACHE_HOME/cython if XDG_CACHE_HOME defined
487
+ 3. ~/.cython
488
+
489
+ """
490
+ if 'CYTHON_CACHE_DIR' in os.environ:
491
+ return os.environ['CYTHON_CACHE_DIR']
492
+
493
+ parent = None
494
+ if os.name == 'posix':
495
+ if sys.platform == 'darwin':
496
+ parent = os.path.expanduser('~/Library/Caches')
497
+ else:
498
+ # this could fallback on ~/.cache
499
+ parent = os.environ.get('XDG_CACHE_HOME')
500
+
501
+ if parent and os.path.isdir(parent):
502
+ return os.path.join(parent, 'cython')
503
+
504
+ # last fallback: ~/.cython
505
+ return os.path.expanduser(os.path.join('~', '.cython'))
506
+
507
+
508
+ @try_finally_contextmanager
509
+ def captured_fd(stream=2, encoding=None):
510
+ orig_stream = os.dup(stream) # keep copy of original stream
511
+ try:
512
+ with tempfile.TemporaryFile(mode="a+b") as temp_file:
513
+ def read_output(_output=[b'']):
514
+ if not temp_file.closed:
515
+ temp_file.seek(0)
516
+ _output[0] = temp_file.read()
517
+ return _output[0]
518
+
519
+ os.dup2(temp_file.fileno(), stream) # replace stream by copy of pipe
520
+ def get_output():
521
+ result = read_output()
522
+ return result.decode(encoding) if encoding else result
523
+
524
+ yield get_output
525
+ # note: @contextlib.contextmanager requires try-finally here
526
+ os.dup2(orig_stream, stream) # restore original stream
527
+ read_output() # keep the output in case it's used after closing the context manager
528
+ finally:
529
+ os.close(orig_stream)
530
+
531
+
532
+ def get_encoding_candidates():
533
+ candidates = [sys.getdefaultencoding()]
534
+ for stream in (sys.stdout, sys.stdin, sys.__stdout__, sys.__stdin__):
535
+ encoding = getattr(stream, 'encoding', None)
536
+ # encoding might be None (e.g. somebody redirects stdout):
537
+ if encoding is not None and encoding not in candidates:
538
+ candidates.append(encoding)
539
+ return candidates
540
+
541
+
542
+ def prepare_captured(captured):
543
+ captured_bytes = captured.strip()
544
+ if not captured_bytes:
545
+ return None
546
+ for encoding in get_encoding_candidates():
547
+ try:
548
+ return captured_bytes.decode(encoding)
549
+ except UnicodeDecodeError:
550
+ pass
551
+ # last resort: print at least the readable ascii parts correctly.
552
+ return captured_bytes.decode('latin-1')
553
+
554
+
555
+ def print_captured(captured, output, header_line=None):
556
+ captured = prepare_captured(captured)
557
+ if captured:
558
+ if header_line:
559
+ output.write(header_line)
560
+ output.write(captured)
561
+
562
+
563
+ def print_bytes(s, header_text=None, end=b'\n', file=sys.stdout, flush=True):
564
+ if header_text:
565
+ file.write(header_text) # note: text! => file.write() instead of out.write()
566
+ file.flush()
567
+ try:
568
+ out = file.buffer # Py3
569
+ except AttributeError:
570
+ out = file # Py2
571
+ out.write(s)
572
+ if end:
573
+ out.write(end)
574
+ if flush:
575
+ out.flush()
576
+
577
+
578
+ class OrderedSet(object):
579
+ def __init__(self, elements=()):
580
+ self._list = []
581
+ self._set = set()
582
+ self.update(elements)
583
+
584
+ def __iter__(self):
585
+ return iter(self._list)
586
+
587
+ def update(self, elements):
588
+ for e in elements:
589
+ self.add(e)
590
+
591
+ def add(self, e):
592
+ if e not in self._set:
593
+ self._list.append(e)
594
+ self._set.add(e)
595
+
596
+ def __bool__(self):
597
+ return bool(self._set)
598
+
599
+ __nonzero__ = __bool__
600
+
601
+
602
+ # Class decorator that adds a metaclass and recreates the class with it.
603
+ # Copied from 'six'.
604
+ def add_metaclass(metaclass):
605
+ """Class decorator for creating a class with a metaclass."""
606
+ def wrapper(cls):
607
+ orig_vars = cls.__dict__.copy()
608
+ slots = orig_vars.get('__slots__')
609
+ if slots is not None:
610
+ if isinstance(slots, str):
611
+ slots = [slots]
612
+ for slots_var in slots:
613
+ orig_vars.pop(slots_var)
614
+ orig_vars.pop('__dict__', None)
615
+ orig_vars.pop('__weakref__', None)
616
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
617
+ return wrapper
618
+
619
+
620
+ def raise_error_if_module_name_forbidden(full_module_name):
621
+ # it is bad idea to call the pyx-file cython.pyx, so fail early
622
+ if full_module_name == 'cython' or full_module_name.startswith('cython.'):
623
+ raise ValueError('cython is a special module, cannot be used as a module name')
624
+
625
+
626
+ def build_hex_version(version_string):
627
+ """
628
+ Parse and translate public version identifier like '4.3a1' into the readable hex representation '0x040300A1' (like PY_VERSION_HEX).
629
+
630
+ SEE: https://peps.python.org/pep-0440/#public-version-identifiers
631
+ """
632
+ # Parse '4.12a1' into [4, 12, 0, 0xA01]
633
+ # And ignore .dev, .pre and .post segments
634
+ digits = []
635
+ release_status = 0xF0
636
+ for segment in re.split(r'(\D+)', version_string):
637
+ if segment in ('a', 'b', 'rc'):
638
+ release_status = {'a': 0xA0, 'b': 0xB0, 'rc': 0xC0}[segment]
639
+ digits = (digits + [0, 0])[:3] # 1.2a1 -> 1.2.0a1
640
+ elif segment in ('.dev', '.pre', '.post'):
641
+ break # break since those are the last segments
642
+ elif segment != '.':
643
+ digits.append(int(segment))
644
+
645
+ digits = (digits + [0] * 3)[:4]
646
+ digits[3] += release_status
647
+
648
+ # Then, build a single hex value, two hex digits per version part.
649
+ hexversion = 0
650
+ for digit in digits:
651
+ hexversion = (hexversion << 8) + digit
652
+
653
+ return '0x%08X' % hexversion
654
+
655
+
656
+ def write_depfile(target, source, dependencies):
657
+ src_base_dir = os.path.dirname(source)
658
+ cwd = os.getcwd()
659
+ if not src_base_dir.endswith(os.sep):
660
+ src_base_dir += os.sep
661
+ # paths below the base_dir are relative, otherwise absolute
662
+ paths = []
663
+ for fname in dependencies:
664
+ if fname.startswith(src_base_dir):
665
+ try:
666
+ newpath = os.path.relpath(fname, cwd)
667
+ except ValueError:
668
+ # if they are on different Windows drives, absolute is fine
669
+ newpath = os.path.abspath(fname)
670
+ else:
671
+ newpath = os.path.abspath(fname)
672
+ paths.append(newpath)
673
+
674
+ depline = os.path.relpath(target, cwd) + ": \\\n "
675
+ depline += " \\\n ".join(paths) + "\n"
676
+
677
+ with open(target+'.dep', 'w') as outfile:
678
+ outfile.write(depline)
679
+
680
+
681
+ def print_version():
682
+ print("Cython version %s" % cython_version)
683
+ # For legacy reasons, we also write the version to stderr.
684
+ # New tools should expect it in stdout, but existing ones still pipe from stderr, or from both.
685
+ if sys.stderr.isatty() or sys.stdout == sys.stderr:
686
+ return
687
+ if os.fstat(1) == os.fstat(2):
688
+ # This is somewhat unsafe since sys.stdout/err might not really be linked to streams 1/2.
689
+ # However, in most *relevant* cases, where Cython is run as an external tool, they are linked.
690
+ return
691
+ sys.stderr.write("Cython version %s\n" % cython_version)
692
+
693
+
694
+ def normalise_float_repr(float_str):
695
+ """
696
+ Generate a 'normalised', simple digits string representation of a float value
697
+ to allow string comparisons. Examples: '.123', '123.456', '123.'
698
+ """
699
+ str_value = float_str.lower().lstrip('0')
700
+
701
+ exp = 0
702
+ if 'E' in str_value or 'e' in str_value:
703
+ str_value, exp = str_value.split('E' if 'E' in str_value else 'e', 1)
704
+ exp = int(exp)
705
+
706
+ if '.' in str_value:
707
+ num_int_digits = str_value.index('.')
708
+ str_value = str_value[:num_int_digits] + str_value[num_int_digits + 1:]
709
+ else:
710
+ num_int_digits = len(str_value)
711
+ exp += num_int_digits
712
+
713
+ result = (
714
+ str_value[:exp]
715
+ + '0' * (exp - len(str_value))
716
+ + '.'
717
+ + '0' * -exp
718
+ + str_value[exp:]
719
+ ).rstrip('0')
720
+
721
+ return result if result != '.' else '.0'
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/__init__.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ from .Shadow import __version__
4
+
5
+ # Void cython.* directives (for case insensitive operating systems).
6
+ from .Shadow import *
7
+
8
+
9
+ def load_ipython_extension(ip):
10
+ """Load the extension in IPython."""
11
+ from .Build.IpythonMagic import CythonMagics # pylint: disable=cyclic-import
12
+ ip.register_magics(CythonMagics)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/nx_latex.cpython-311.pyc ADDED
Binary file (25.7 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/nx_agraph.py ADDED
@@ -0,0 +1,469 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ***************
3
+ Graphviz AGraph
4
+ ***************
5
+
6
+ Interface to pygraphviz AGraph class.
7
+
8
+ Examples
9
+ --------
10
+ >>> G = nx.complete_graph(5)
11
+ >>> A = nx.nx_agraph.to_agraph(G)
12
+ >>> H = nx.nx_agraph.from_agraph(A)
13
+
14
+ See Also
15
+ --------
16
+ - Pygraphviz: http://pygraphviz.github.io/
17
+ - Graphviz: https://www.graphviz.org
18
+ - DOT Language: http://www.graphviz.org/doc/info/lang.html
19
+ """
20
+ import os
21
+ import tempfile
22
+
23
+ import networkx as nx
24
+
25
+ __all__ = [
26
+ "from_agraph",
27
+ "to_agraph",
28
+ "write_dot",
29
+ "read_dot",
30
+ "graphviz_layout",
31
+ "pygraphviz_layout",
32
+ "view_pygraphviz",
33
+ ]
34
+
35
+
36
+ @nx._dispatch(graphs=None)
37
+ def from_agraph(A, create_using=None):
38
+ """Returns a NetworkX Graph or DiGraph from a PyGraphviz graph.
39
+
40
+ Parameters
41
+ ----------
42
+ A : PyGraphviz AGraph
43
+ A graph created with PyGraphviz
44
+
45
+ create_using : NetworkX graph constructor, optional (default=None)
46
+ Graph type to create. If graph instance, then cleared before populated.
47
+ If `None`, then the appropriate Graph type is inferred from `A`.
48
+
49
+ Examples
50
+ --------
51
+ >>> K5 = nx.complete_graph(5)
52
+ >>> A = nx.nx_agraph.to_agraph(K5)
53
+ >>> G = nx.nx_agraph.from_agraph(A)
54
+
55
+ Notes
56
+ -----
57
+ The Graph G will have a dictionary G.graph_attr containing
58
+ the default graphviz attributes for graphs, nodes and edges.
59
+
60
+ Default node attributes will be in the dictionary G.node_attr
61
+ which is keyed by node.
62
+
63
+ Edge attributes will be returned as edge data in G. With
64
+ edge_attr=False the edge data will be the Graphviz edge weight
65
+ attribute or the value 1 if no edge weight attribute is found.
66
+
67
+ """
68
+ if create_using is None:
69
+ if A.is_directed():
70
+ if A.is_strict():
71
+ create_using = nx.DiGraph
72
+ else:
73
+ create_using = nx.MultiDiGraph
74
+ else:
75
+ if A.is_strict():
76
+ create_using = nx.Graph
77
+ else:
78
+ create_using = nx.MultiGraph
79
+
80
+ # assign defaults
81
+ N = nx.empty_graph(0, create_using)
82
+ if A.name is not None:
83
+ N.name = A.name
84
+
85
+ # add graph attributes
86
+ N.graph.update(A.graph_attr)
87
+
88
+ # add nodes, attributes to N.node_attr
89
+ for n in A.nodes():
90
+ str_attr = {str(k): v for k, v in n.attr.items()}
91
+ N.add_node(str(n), **str_attr)
92
+
93
+ # add edges, assign edge data as dictionary of attributes
94
+ for e in A.edges():
95
+ u, v = str(e[0]), str(e[1])
96
+ attr = dict(e.attr)
97
+ str_attr = {str(k): v for k, v in attr.items()}
98
+ if not N.is_multigraph():
99
+ if e.name is not None:
100
+ str_attr["key"] = e.name
101
+ N.add_edge(u, v, **str_attr)
102
+ else:
103
+ N.add_edge(u, v, key=e.name, **str_attr)
104
+
105
+ # add default attributes for graph, nodes, and edges
106
+ # hang them on N.graph_attr
107
+ N.graph["graph"] = dict(A.graph_attr)
108
+ N.graph["node"] = dict(A.node_attr)
109
+ N.graph["edge"] = dict(A.edge_attr)
110
+ return N
111
+
112
+
113
+ def to_agraph(N):
114
+ """Returns a pygraphviz graph from a NetworkX graph N.
115
+
116
+ Parameters
117
+ ----------
118
+ N : NetworkX graph
119
+ A graph created with NetworkX
120
+
121
+ Examples
122
+ --------
123
+ >>> K5 = nx.complete_graph(5)
124
+ >>> A = nx.nx_agraph.to_agraph(K5)
125
+
126
+ Notes
127
+ -----
128
+ If N has an dict N.graph_attr an attempt will be made first
129
+ to copy properties attached to the graph (see from_agraph)
130
+ and then updated with the calling arguments if any.
131
+
132
+ """
133
+ try:
134
+ import pygraphviz
135
+ except ImportError as err:
136
+ raise ImportError(
137
+ "requires pygraphviz " "http://pygraphviz.github.io/"
138
+ ) from err
139
+ directed = N.is_directed()
140
+ strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph()
141
+
142
+ for node in N:
143
+ if "pos" in N.nodes[node]:
144
+ N.nodes[node]["pos"] = "{},{}!".format(
145
+ N.nodes[node]["pos"][0], N.nodes[node]["pos"][1]
146
+ )
147
+
148
+ A = pygraphviz.AGraph(name=N.name, strict=strict, directed=directed)
149
+
150
+ # default graph attributes
151
+ A.graph_attr.update(N.graph.get("graph", {}))
152
+ A.node_attr.update(N.graph.get("node", {}))
153
+ A.edge_attr.update(N.graph.get("edge", {}))
154
+
155
+ A.graph_attr.update(
156
+ (k, v) for k, v in N.graph.items() if k not in ("graph", "node", "edge")
157
+ )
158
+
159
+ # add nodes
160
+ for n, nodedata in N.nodes(data=True):
161
+ A.add_node(n)
162
+ # Add node data
163
+ a = A.get_node(n)
164
+ a.attr.update({k: str(v) for k, v in nodedata.items()})
165
+
166
+ # loop over edges
167
+ if N.is_multigraph():
168
+ for u, v, key, edgedata in N.edges(data=True, keys=True):
169
+ str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"}
170
+ A.add_edge(u, v, key=str(key))
171
+ # Add edge data
172
+ a = A.get_edge(u, v)
173
+ a.attr.update(str_edgedata)
174
+
175
+ else:
176
+ for u, v, edgedata in N.edges(data=True):
177
+ str_edgedata = {k: str(v) for k, v in edgedata.items()}
178
+ A.add_edge(u, v)
179
+ # Add edge data
180
+ a = A.get_edge(u, v)
181
+ a.attr.update(str_edgedata)
182
+
183
+ return A
184
+
185
+
186
+ def write_dot(G, path):
187
+ """Write NetworkX graph G to Graphviz dot format on path.
188
+
189
+ Parameters
190
+ ----------
191
+ G : graph
192
+ A networkx graph
193
+ path : filename
194
+ Filename or file handle to write
195
+
196
+ Notes
197
+ -----
198
+ To use a specific graph layout, call ``A.layout`` prior to `write_dot`.
199
+ Note that some graphviz layouts are not guaranteed to be deterministic,
200
+ see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
201
+ """
202
+ A = to_agraph(G)
203
+ A.write(path)
204
+ A.clear()
205
+ return
206
+
207
+
208
+ @nx._dispatch(name="agraph_read_dot", graphs=None)
209
+ def read_dot(path):
210
+ """Returns a NetworkX graph from a dot file on path.
211
+
212
+ Parameters
213
+ ----------
214
+ path : file or string
215
+ File name or file handle to read.
216
+ """
217
+ try:
218
+ import pygraphviz
219
+ except ImportError as err:
220
+ raise ImportError(
221
+ "read_dot() requires pygraphviz " "http://pygraphviz.github.io/"
222
+ ) from err
223
+ A = pygraphviz.AGraph(file=path)
224
+ gr = from_agraph(A)
225
+ A.clear()
226
+ return gr
227
+
228
+
229
+ def graphviz_layout(G, prog="neato", root=None, args=""):
230
+ """Create node positions for G using Graphviz.
231
+
232
+ Parameters
233
+ ----------
234
+ G : NetworkX graph
235
+ A graph created with NetworkX
236
+ prog : string
237
+ Name of Graphviz layout program
238
+ root : string, optional
239
+ Root node for twopi layout
240
+ args : string, optional
241
+ Extra arguments to Graphviz layout program
242
+
243
+ Returns
244
+ -------
245
+ Dictionary of x, y, positions keyed by node.
246
+
247
+ Examples
248
+ --------
249
+ >>> G = nx.petersen_graph()
250
+ >>> pos = nx.nx_agraph.graphviz_layout(G)
251
+ >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
252
+
253
+ Notes
254
+ -----
255
+ This is a wrapper for pygraphviz_layout.
256
+
257
+ Note that some graphviz layouts are not guaranteed to be deterministic,
258
+ see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
259
+ """
260
+ return pygraphviz_layout(G, prog=prog, root=root, args=args)
261
+
262
+
263
+ def pygraphviz_layout(G, prog="neato", root=None, args=""):
264
+ """Create node positions for G using Graphviz.
265
+
266
+ Parameters
267
+ ----------
268
+ G : NetworkX graph
269
+ A graph created with NetworkX
270
+ prog : string
271
+ Name of Graphviz layout program
272
+ root : string, optional
273
+ Root node for twopi layout
274
+ args : string, optional
275
+ Extra arguments to Graphviz layout program
276
+
277
+ Returns
278
+ -------
279
+ node_pos : dict
280
+ Dictionary of x, y, positions keyed by node.
281
+
282
+ Examples
283
+ --------
284
+ >>> G = nx.petersen_graph()
285
+ >>> pos = nx.nx_agraph.graphviz_layout(G)
286
+ >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
287
+
288
+ Notes
289
+ -----
290
+ If you use complex node objects, they may have the same string
291
+ representation and GraphViz could treat them as the same node.
292
+ The layout may assign both nodes a single location. See Issue #1568
293
+ If this occurs in your case, consider relabeling the nodes just
294
+ for the layout computation using something similar to::
295
+
296
+ >>> H = nx.convert_node_labels_to_integers(G, label_attribute="node_label")
297
+ >>> H_layout = nx.nx_agraph.pygraphviz_layout(G, prog="dot")
298
+ >>> G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()}
299
+
300
+ Note that some graphviz layouts are not guaranteed to be deterministic,
301
+ see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
302
+ """
303
+ try:
304
+ import pygraphviz
305
+ except ImportError as err:
306
+ raise ImportError(
307
+ "requires pygraphviz " "http://pygraphviz.github.io/"
308
+ ) from err
309
+ if root is not None:
310
+ args += f"-Groot={root}"
311
+ A = to_agraph(G)
312
+ A.layout(prog=prog, args=args)
313
+ node_pos = {}
314
+ for n in G:
315
+ node = pygraphviz.Node(A, n)
316
+ try:
317
+ xs = node.attr["pos"].split(",")
318
+ node_pos[n] = tuple(float(x) for x in xs)
319
+ except:
320
+ print("no position for node", n)
321
+ node_pos[n] = (0.0, 0.0)
322
+ return node_pos
323
+
324
+
325
+ @nx.utils.open_file(5, "w+b")
326
+ def view_pygraphviz(
327
+ G, edgelabel=None, prog="dot", args="", suffix="", path=None, show=True
328
+ ):
329
+ """Views the graph G using the specified layout algorithm.
330
+
331
+ Parameters
332
+ ----------
333
+ G : NetworkX graph
334
+ The machine to draw.
335
+ edgelabel : str, callable, None
336
+ If a string, then it specifies the edge attribute to be displayed
337
+ on the edge labels. If a callable, then it is called for each
338
+ edge and it should return the string to be displayed on the edges.
339
+ The function signature of `edgelabel` should be edgelabel(data),
340
+ where `data` is the edge attribute dictionary.
341
+ prog : string
342
+ Name of Graphviz layout program.
343
+ args : str
344
+ Additional arguments to pass to the Graphviz layout program.
345
+ suffix : str
346
+ If `filename` is None, we save to a temporary file. The value of
347
+ `suffix` will appear at the tail end of the temporary filename.
348
+ path : str, None
349
+ The filename used to save the image. If None, save to a temporary
350
+ file. File formats are the same as those from pygraphviz.agraph.draw.
351
+ show : bool, default = True
352
+ Whether to display the graph with :mod:`PIL.Image.show`,
353
+ default is `True`. If `False`, the rendered graph is still available
354
+ at `path`.
355
+
356
+ Returns
357
+ -------
358
+ path : str
359
+ The filename of the generated image.
360
+ A : PyGraphviz graph
361
+ The PyGraphviz graph instance used to generate the image.
362
+
363
+ Notes
364
+ -----
365
+ If this function is called in succession too quickly, sometimes the
366
+ image is not displayed. So you might consider time.sleep(.5) between
367
+ calls if you experience problems.
368
+
369
+ Note that some graphviz layouts are not guaranteed to be deterministic,
370
+ see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info.
371
+
372
+ """
373
+ if not len(G):
374
+ raise nx.NetworkXException("An empty graph cannot be drawn.")
375
+
376
+ # If we are providing default values for graphviz, these must be set
377
+ # before any nodes or edges are added to the PyGraphviz graph object.
378
+ # The reason for this is that default values only affect incoming objects.
379
+ # If you change the default values after the objects have been added,
380
+ # then they inherit no value and are set only if explicitly set.
381
+
382
+ # to_agraph() uses these values.
383
+ attrs = ["edge", "node", "graph"]
384
+ for attr in attrs:
385
+ if attr not in G.graph:
386
+ G.graph[attr] = {}
387
+
388
+ # These are the default values.
389
+ edge_attrs = {"fontsize": "10"}
390
+ node_attrs = {
391
+ "style": "filled",
392
+ "fillcolor": "#0000FF40",
393
+ "height": "0.75",
394
+ "width": "0.75",
395
+ "shape": "circle",
396
+ }
397
+ graph_attrs = {}
398
+
399
+ def update_attrs(which, attrs):
400
+ # Update graph attributes. Return list of those which were added.
401
+ added = []
402
+ for k, v in attrs.items():
403
+ if k not in G.graph[which]:
404
+ G.graph[which][k] = v
405
+ added.append(k)
406
+
407
+ def clean_attrs(which, added):
408
+ # Remove added attributes
409
+ for attr in added:
410
+ del G.graph[which][attr]
411
+ if not G.graph[which]:
412
+ del G.graph[which]
413
+
414
+ # Update all default values
415
+ update_attrs("edge", edge_attrs)
416
+ update_attrs("node", node_attrs)
417
+ update_attrs("graph", graph_attrs)
418
+
419
+ # Convert to agraph, so we inherit default values
420
+ A = to_agraph(G)
421
+
422
+ # Remove the default values we added to the original graph.
423
+ clean_attrs("edge", edge_attrs)
424
+ clean_attrs("node", node_attrs)
425
+ clean_attrs("graph", graph_attrs)
426
+
427
+ # If the user passed in an edgelabel, we update the labels for all edges.
428
+ if edgelabel is not None:
429
+ if not callable(edgelabel):
430
+
431
+ def func(data):
432
+ return "".join([" ", str(data[edgelabel]), " "])
433
+
434
+ else:
435
+ func = edgelabel
436
+
437
+ # update all the edge labels
438
+ if G.is_multigraph():
439
+ for u, v, key, data in G.edges(keys=True, data=True):
440
+ # PyGraphviz doesn't convert the key to a string. See #339
441
+ edge = A.get_edge(u, v, str(key))
442
+ edge.attr["label"] = str(func(data))
443
+ else:
444
+ for u, v, data in G.edges(data=True):
445
+ edge = A.get_edge(u, v)
446
+ edge.attr["label"] = str(func(data))
447
+
448
+ if path is None:
449
+ ext = "png"
450
+ if suffix:
451
+ suffix = f"_{suffix}.{ext}"
452
+ else:
453
+ suffix = f".{ext}"
454
+ path = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
455
+ else:
456
+ # Assume the decorator worked and it is a file-object.
457
+ pass
458
+
459
+ # Write graph to file
460
+ A.draw(path=path, format=None, prog=prog, args=args)
461
+ path.close()
462
+
463
+ # Show graph in a new window (depends on platform configuration)
464
+ if show:
465
+ from PIL import Image
466
+
467
+ Image.open(path.name).show()
468
+
469
+ return path.name, A
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_agraph.cpython-311.pyc ADDED
Binary file (20.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_latex.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+
5
+
6
+ def test_tikz_attributes():
7
+ G = nx.path_graph(4, create_using=nx.DiGraph)
8
+ pos = {n: (n, n) for n in G}
9
+
10
+ G.add_edge(0, 0)
11
+ G.edges[(0, 0)]["label"] = "Loop"
12
+ G.edges[(0, 0)]["label_options"] = "midway"
13
+
14
+ G.nodes[0]["style"] = "blue"
15
+ G.nodes[1]["style"] = "line width=3,draw"
16
+ G.nodes[2]["style"] = "circle,draw,blue!50"
17
+ G.nodes[3]["label"] = "Stop"
18
+ G.edges[(0, 1)]["label"] = "1st Step"
19
+ G.edges[(0, 1)]["label_options"] = "near end"
20
+ G.edges[(2, 3)]["label"] = "3rd Step"
21
+ G.edges[(2, 3)]["label_options"] = "near start"
22
+ G.edges[(2, 3)]["style"] = "bend left,green"
23
+ G.edges[(1, 2)]["label"] = "2nd"
24
+ G.edges[(1, 2)]["label_options"] = "pos=0.5"
25
+ G.edges[(1, 2)]["style"] = ">->,bend right,line width=3,green!90"
26
+
27
+ output_tex = nx.to_latex(
28
+ G,
29
+ pos=pos,
30
+ as_document=False,
31
+ tikz_options="[scale=3]",
32
+ node_options="style",
33
+ edge_options="style",
34
+ node_label="label",
35
+ edge_label="label",
36
+ edge_label_options="label_options",
37
+ )
38
+ expected_tex = r"""\begin{figure}
39
+ \begin{tikzpicture}[scale=3]
40
+ \draw
41
+ (0, 0) node[blue] (0){0}
42
+ (1, 1) node[line width=3,draw] (1){1}
43
+ (2, 2) node[circle,draw,blue!50] (2){2}
44
+ (3, 3) node (3){Stop};
45
+ \begin{scope}[->]
46
+ \draw (0) to node[near end] {1st Step} (1);
47
+ \draw[loop,] (0) to node[midway] {Loop} (0);
48
+ \draw[>->,bend right,line width=3,green!90] (1) to node[pos=0.5] {2nd} (2);
49
+ \draw[bend left,green] (2) to node[near start] {3rd Step} (3);
50
+ \end{scope}
51
+ \end{tikzpicture}
52
+ \end{figure}"""
53
+
54
+ assert output_tex == expected_tex
55
+ # print(output_tex)
56
+ # # Pretty way to assert that A.to_document() == expected_tex
57
+ # content_same = True
58
+ # for aa, bb in zip(expected_tex.split("\n"), output_tex.split("\n")):
59
+ # if aa != bb:
60
+ # content_same = False
61
+ # print(f"-{aa}|\n+{bb}|")
62
+ # assert content_same
63
+
64
+
65
+ def test_basic_multiple_graphs():
66
+ H1 = nx.path_graph(4)
67
+ H2 = nx.complete_graph(4)
68
+ H3 = nx.path_graph(8)
69
+ H4 = nx.complete_graph(8)
70
+ captions = [
71
+ "Path on 4 nodes",
72
+ "Complete graph on 4 nodes",
73
+ "Path on 8 nodes",
74
+ "Complete graph on 8 nodes",
75
+ ]
76
+ labels = ["fig2a", "fig2b", "fig2c", "fig2d"]
77
+ latex_code = nx.to_latex(
78
+ [H1, H2, H3, H4],
79
+ n_rows=2,
80
+ sub_captions=captions,
81
+ sub_labels=labels,
82
+ )
83
+ # print(latex_code)
84
+ assert "begin{document}" in latex_code
85
+ assert "begin{figure}" in latex_code
86
+ assert latex_code.count("begin{subfigure}") == 4
87
+ assert latex_code.count("tikzpicture") == 8
88
+ assert latex_code.count("[-]") == 4
89
+
90
+
91
+ def test_basic_tikz():
92
+ expected_tex = r"""\documentclass{report}
93
+ \usepackage{tikz}
94
+ \usepackage{subcaption}
95
+
96
+ \begin{document}
97
+ \begin{figure}
98
+ \begin{subfigure}{0.5\textwidth}
99
+ \begin{tikzpicture}[scale=2]
100
+ \draw[gray!90]
101
+ (0.749, 0.702) node[red!90] (0){0}
102
+ (1.0, -0.014) node[red!90] (1){1}
103
+ (-0.777, -0.705) node (2){2}
104
+ (-0.984, 0.042) node (3){3}
105
+ (-0.028, 0.375) node[cyan!90] (4){4}
106
+ (-0.412, 0.888) node (5){5}
107
+ (0.448, -0.856) node (6){6}
108
+ (0.003, -0.431) node[cyan!90] (7){7};
109
+ \begin{scope}[->,gray!90]
110
+ \draw (0) to (4);
111
+ \draw (0) to (5);
112
+ \draw (0) to (6);
113
+ \draw (0) to (7);
114
+ \draw (1) to (4);
115
+ \draw (1) to (5);
116
+ \draw (1) to (6);
117
+ \draw (1) to (7);
118
+ \draw (2) to (4);
119
+ \draw (2) to (5);
120
+ \draw (2) to (6);
121
+ \draw (2) to (7);
122
+ \draw (3) to (4);
123
+ \draw (3) to (5);
124
+ \draw (3) to (6);
125
+ \draw (3) to (7);
126
+ \end{scope}
127
+ \end{tikzpicture}
128
+ \caption{My tikz number 1 of 2}\label{tikz_1_2}
129
+ \end{subfigure}
130
+ \begin{subfigure}{0.5\textwidth}
131
+ \begin{tikzpicture}[scale=2]
132
+ \draw[gray!90]
133
+ (0.749, 0.702) node[green!90] (0){0}
134
+ (1.0, -0.014) node[green!90] (1){1}
135
+ (-0.777, -0.705) node (2){2}
136
+ (-0.984, 0.042) node (3){3}
137
+ (-0.028, 0.375) node[purple!90] (4){4}
138
+ (-0.412, 0.888) node (5){5}
139
+ (0.448, -0.856) node (6){6}
140
+ (0.003, -0.431) node[purple!90] (7){7};
141
+ \begin{scope}[->,gray!90]
142
+ \draw (0) to (4);
143
+ \draw (0) to (5);
144
+ \draw (0) to (6);
145
+ \draw (0) to (7);
146
+ \draw (1) to (4);
147
+ \draw (1) to (5);
148
+ \draw (1) to (6);
149
+ \draw (1) to (7);
150
+ \draw (2) to (4);
151
+ \draw (2) to (5);
152
+ \draw (2) to (6);
153
+ \draw (2) to (7);
154
+ \draw (3) to (4);
155
+ \draw (3) to (5);
156
+ \draw (3) to (6);
157
+ \draw (3) to (7);
158
+ \end{scope}
159
+ \end{tikzpicture}
160
+ \caption{My tikz number 2 of 2}\label{tikz_2_2}
161
+ \end{subfigure}
162
+ \caption{A graph generated with python and latex.}
163
+ \end{figure}
164
+ \end{document}"""
165
+
166
+ edges = [
167
+ (0, 4),
168
+ (0, 5),
169
+ (0, 6),
170
+ (0, 7),
171
+ (1, 4),
172
+ (1, 5),
173
+ (1, 6),
174
+ (1, 7),
175
+ (2, 4),
176
+ (2, 5),
177
+ (2, 6),
178
+ (2, 7),
179
+ (3, 4),
180
+ (3, 5),
181
+ (3, 6),
182
+ (3, 7),
183
+ ]
184
+ G = nx.DiGraph()
185
+ G.add_nodes_from(range(8))
186
+ G.add_edges_from(edges)
187
+ pos = {
188
+ 0: (0.7490296171687696, 0.702353520257394),
189
+ 1: (1.0, -0.014221357723796535),
190
+ 2: (-0.7765783344161441, -0.7054170966808919),
191
+ 3: (-0.9842690223417624, 0.04177547602465483),
192
+ 4: (-0.02768523817180917, 0.3745724439551441),
193
+ 5: (-0.41154855146767433, 0.8880106515525136),
194
+ 6: (0.44780153389148264, -0.8561492709269164),
195
+ 7: (0.0032499953371383505, -0.43092436645809945),
196
+ }
197
+
198
+ rc_node_color = {0: "red!90", 1: "red!90", 4: "cyan!90", 7: "cyan!90"}
199
+ gp_node_color = {0: "green!90", 1: "green!90", 4: "purple!90", 7: "purple!90"}
200
+
201
+ H = G.copy()
202
+ nx.set_node_attributes(G, rc_node_color, "color")
203
+ nx.set_node_attributes(H, gp_node_color, "color")
204
+
205
+ sub_captions = ["My tikz number 1 of 2", "My tikz number 2 of 2"]
206
+ sub_labels = ["tikz_1_2", "tikz_2_2"]
207
+
208
+ output_tex = nx.to_latex(
209
+ [G, H],
210
+ [pos, pos],
211
+ tikz_options="[scale=2]",
212
+ default_node_options="gray!90",
213
+ default_edge_options="gray!90",
214
+ node_options="color",
215
+ sub_captions=sub_captions,
216
+ sub_labels=sub_labels,
217
+ caption="A graph generated with python and latex.",
218
+ n_rows=2,
219
+ as_document=True,
220
+ )
221
+
222
+ assert output_tex == expected_tex
223
+ # print(output_tex)
224
+ # # Pretty way to assert that A.to_document() == expected_tex
225
+ # content_same = True
226
+ # for aa, bb in zip(expected_tex.split("\n"), output_tex.split("\n")):
227
+ # if aa != bb:
228
+ # content_same = False
229
+ # print(f"-{aa}|\n+{bb}|")
230
+ # assert content_same
231
+
232
+
233
+ def test_exception_pos_single_graph(to_latex=nx.to_latex):
234
+ # smoke test that pos can be a string
235
+ G = nx.path_graph(4)
236
+ to_latex(G, pos="pos")
237
+
238
+ # must include all nodes
239
+ pos = {0: (1, 2), 1: (0, 1), 2: (2, 1)}
240
+ with pytest.raises(nx.NetworkXError):
241
+ to_latex(G, pos)
242
+
243
+ # must have 2 values
244
+ pos[3] = (1, 2, 3)
245
+ with pytest.raises(nx.NetworkXError):
246
+ to_latex(G, pos)
247
+ pos[3] = 2
248
+ with pytest.raises(nx.NetworkXError):
249
+ to_latex(G, pos)
250
+
251
+ # check that passes with 2 values
252
+ pos[3] = (3, 2)
253
+ to_latex(G, pos)
254
+
255
+
256
+ def test_exception_multiple_graphs(to_latex=nx.to_latex):
257
+ G = nx.path_graph(3)
258
+ pos_bad = {0: (1, 2), 1: (0, 1)}
259
+ pos_OK = {0: (1, 2), 1: (0, 1), 2: (2, 1)}
260
+ fourG = [G, G, G, G]
261
+ fourpos = [pos_OK, pos_OK, pos_OK, pos_OK]
262
+
263
+ # input single dict to use for all graphs
264
+ to_latex(fourG, pos_OK)
265
+ with pytest.raises(nx.NetworkXError):
266
+ to_latex(fourG, pos_bad)
267
+
268
+ # input list of dicts to use for all graphs
269
+ to_latex(fourG, fourpos)
270
+ with pytest.raises(nx.NetworkXError):
271
+ to_latex(fourG, [pos_bad, pos_bad, pos_bad, pos_bad])
272
+
273
+ # every pos dict must include all nodes
274
+ with pytest.raises(nx.NetworkXError):
275
+ to_latex(fourG, [pos_OK, pos_OK, pos_bad, pos_OK])
276
+
277
+ # test sub_captions and sub_labels (len must match Gbunch)
278
+ with pytest.raises(nx.NetworkXError):
279
+ to_latex(fourG, fourpos, sub_captions=["hi", "hi"])
280
+
281
+ with pytest.raises(nx.NetworkXError):
282
+ to_latex(fourG, fourpos, sub_labels=["hi", "hi"])
283
+
284
+ # all pass
285
+ to_latex(fourG, fourpos, sub_captions=["hi"] * 4, sub_labels=["lbl"] * 4)
286
+
287
+
288
+ def test_exception_multigraph():
289
+ G = nx.path_graph(4, create_using=nx.MultiGraph)
290
+ G.add_edge(1, 2)
291
+ with pytest.raises(nx.NetworkXNotImplemented):
292
+ nx.to_latex(G)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_pylab.py ADDED
@@ -0,0 +1,791 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for matplotlib drawing functions."""
2
+ import itertools
3
+ import os
4
+ import warnings
5
+
6
+ import pytest
7
+
8
+ mpl = pytest.importorskip("matplotlib")
9
+ np = pytest.importorskip("numpy")
10
+ mpl.use("PS")
11
+ plt = pytest.importorskip("matplotlib.pyplot")
12
+ plt.rcParams["text.usetex"] = False
13
+
14
+
15
+ import networkx as nx
16
+
17
+ barbell = nx.barbell_graph(4, 6)
18
+
19
+
20
+ def test_draw():
21
+ try:
22
+ functions = [
23
+ nx.draw_circular,
24
+ nx.draw_kamada_kawai,
25
+ nx.draw_planar,
26
+ nx.draw_random,
27
+ nx.draw_spectral,
28
+ nx.draw_spring,
29
+ nx.draw_shell,
30
+ ]
31
+ options = [{"node_color": "black", "node_size": 100, "width": 3}]
32
+ for function, option in itertools.product(functions, options):
33
+ function(barbell, **option)
34
+ plt.savefig("test.ps")
35
+
36
+ finally:
37
+ try:
38
+ os.unlink("test.ps")
39
+ except OSError:
40
+ pass
41
+
42
+
43
+ def test_draw_shell_nlist():
44
+ try:
45
+ nlist = [list(range(4)), list(range(4, 10)), list(range(10, 14))]
46
+ nx.draw_shell(barbell, nlist=nlist)
47
+ plt.savefig("test.ps")
48
+ finally:
49
+ try:
50
+ os.unlink("test.ps")
51
+ except OSError:
52
+ pass
53
+
54
+
55
+ def test_edge_colormap():
56
+ colors = range(barbell.number_of_edges())
57
+ nx.draw_spring(
58
+ barbell, edge_color=colors, width=4, edge_cmap=plt.cm.Blues, with_labels=True
59
+ )
60
+ # plt.show()
61
+
62
+
63
+ def test_arrows():
64
+ nx.draw_spring(barbell.to_directed())
65
+ # plt.show()
66
+
67
+
68
+ @pytest.mark.parametrize(
69
+ ("edge_color", "expected"),
70
+ (
71
+ (None, "black"), # Default
72
+ ("r", "red"), # Non-default color string
73
+ (["r"], "red"), # Single non-default color in a list
74
+ ((1.0, 1.0, 0.0), "yellow"), # single color as rgb tuple
75
+ ([(1.0, 1.0, 0.0)], "yellow"), # single color as rgb tuple in list
76
+ ((0, 1, 0, 1), "lime"), # single color as rgba tuple
77
+ ([(0, 1, 0, 1)], "lime"), # single color as rgba tuple in list
78
+ ("#0000ff", "blue"), # single color hex code
79
+ (["#0000ff"], "blue"), # hex code in list
80
+ ),
81
+ )
82
+ @pytest.mark.parametrize("edgelist", (None, [(0, 1)]))
83
+ def test_single_edge_color_undirected(edge_color, expected, edgelist):
84
+ """Tests ways of specifying all edges have a single color for edges
85
+ drawn with a LineCollection"""
86
+
87
+ G = nx.path_graph(3)
88
+ drawn_edges = nx.draw_networkx_edges(
89
+ G, pos=nx.random_layout(G), edgelist=edgelist, edge_color=edge_color
90
+ )
91
+ assert mpl.colors.same_color(drawn_edges.get_color(), expected)
92
+
93
+
94
+ @pytest.mark.parametrize(
95
+ ("edge_color", "expected"),
96
+ (
97
+ (None, "black"), # Default
98
+ ("r", "red"), # Non-default color string
99
+ (["r"], "red"), # Single non-default color in a list
100
+ ((1.0, 1.0, 0.0), "yellow"), # single color as rgb tuple
101
+ ([(1.0, 1.0, 0.0)], "yellow"), # single color as rgb tuple in list
102
+ ((0, 1, 0, 1), "lime"), # single color as rgba tuple
103
+ ([(0, 1, 0, 1)], "lime"), # single color as rgba tuple in list
104
+ ("#0000ff", "blue"), # single color hex code
105
+ (["#0000ff"], "blue"), # hex code in list
106
+ ),
107
+ )
108
+ @pytest.mark.parametrize("edgelist", (None, [(0, 1)]))
109
+ def test_single_edge_color_directed(edge_color, expected, edgelist):
110
+ """Tests ways of specifying all edges have a single color for edges drawn
111
+ with FancyArrowPatches"""
112
+
113
+ G = nx.path_graph(3, create_using=nx.DiGraph)
114
+ drawn_edges = nx.draw_networkx_edges(
115
+ G, pos=nx.random_layout(G), edgelist=edgelist, edge_color=edge_color
116
+ )
117
+ for fap in drawn_edges:
118
+ assert mpl.colors.same_color(fap.get_edgecolor(), expected)
119
+
120
+
121
+ def test_edge_color_tuple_interpretation():
122
+ """If edge_color is a sequence with the same length as edgelist, then each
123
+ value in edge_color is mapped onto each edge via colormap."""
124
+ G = nx.path_graph(6, create_using=nx.DiGraph)
125
+ pos = {n: (n, n) for n in range(len(G))}
126
+
127
+ # num edges != 3 or 4 --> edge_color interpreted as rgb(a)
128
+ for ec in ((0, 0, 1), (0, 0, 1, 1)):
129
+ # More than 4 edges
130
+ drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=ec)
131
+ for fap in drawn_edges:
132
+ assert mpl.colors.same_color(fap.get_edgecolor(), ec)
133
+ # Fewer than 3 edges
134
+ drawn_edges = nx.draw_networkx_edges(
135
+ G, pos, edgelist=[(0, 1), (1, 2)], edge_color=ec
136
+ )
137
+ for fap in drawn_edges:
138
+ assert mpl.colors.same_color(fap.get_edgecolor(), ec)
139
+
140
+ # num edges == 3, len(edge_color) == 4: interpreted as rgba
141
+ drawn_edges = nx.draw_networkx_edges(
142
+ G, pos, edgelist=[(0, 1), (1, 2), (2, 3)], edge_color=(0, 0, 1, 1)
143
+ )
144
+ for fap in drawn_edges:
145
+ assert mpl.colors.same_color(fap.get_edgecolor(), "blue")
146
+
147
+ # num edges == 4, len(edge_color) == 3: interpreted as rgb
148
+ drawn_edges = nx.draw_networkx_edges(
149
+ G, pos, edgelist=[(0, 1), (1, 2), (2, 3), (3, 4)], edge_color=(0, 0, 1)
150
+ )
151
+ for fap in drawn_edges:
152
+ assert mpl.colors.same_color(fap.get_edgecolor(), "blue")
153
+
154
+ # num edges == len(edge_color) == 3: interpreted with cmap, *not* as rgb
155
+ drawn_edges = nx.draw_networkx_edges(
156
+ G, pos, edgelist=[(0, 1), (1, 2), (2, 3)], edge_color=(0, 0, 1)
157
+ )
158
+ assert mpl.colors.same_color(
159
+ drawn_edges[0].get_edgecolor(), drawn_edges[1].get_edgecolor()
160
+ )
161
+ for fap in drawn_edges:
162
+ assert not mpl.colors.same_color(fap.get_edgecolor(), "blue")
163
+
164
+ # num edges == len(edge_color) == 4: interpreted with cmap, *not* as rgba
165
+ drawn_edges = nx.draw_networkx_edges(
166
+ G, pos, edgelist=[(0, 1), (1, 2), (2, 3), (3, 4)], edge_color=(0, 0, 1, 1)
167
+ )
168
+ assert mpl.colors.same_color(
169
+ drawn_edges[0].get_edgecolor(), drawn_edges[1].get_edgecolor()
170
+ )
171
+ assert mpl.colors.same_color(
172
+ drawn_edges[2].get_edgecolor(), drawn_edges[3].get_edgecolor()
173
+ )
174
+ for fap in drawn_edges:
175
+ assert not mpl.colors.same_color(fap.get_edgecolor(), "blue")
176
+
177
+
178
+ def test_fewer_edge_colors_than_num_edges_directed():
179
+ """Test that the edge colors are cycled when there are fewer specified
180
+ colors than edges."""
181
+ G = barbell.to_directed()
182
+ pos = nx.random_layout(barbell)
183
+ edgecolors = ("r", "g", "b")
184
+ drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=edgecolors)
185
+ for fap, expected in zip(drawn_edges, itertools.cycle(edgecolors)):
186
+ assert mpl.colors.same_color(fap.get_edgecolor(), expected)
187
+
188
+
189
+ def test_more_edge_colors_than_num_edges_directed():
190
+ """Test that extra edge colors are ignored when there are more specified
191
+ colors than edges."""
192
+ G = nx.path_graph(4, create_using=nx.DiGraph) # 3 edges
193
+ pos = nx.random_layout(barbell)
194
+ edgecolors = ("r", "g", "b", "c") # 4 edge colors
195
+ drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=edgecolors)
196
+ for fap, expected in zip(drawn_edges, edgecolors[:-1]):
197
+ assert mpl.colors.same_color(fap.get_edgecolor(), expected)
198
+
199
+
200
+ def test_edge_color_string_with_global_alpha_undirected():
201
+ edge_collection = nx.draw_networkx_edges(
202
+ barbell,
203
+ pos=nx.random_layout(barbell),
204
+ edgelist=[(0, 1), (1, 2)],
205
+ edge_color="purple",
206
+ alpha=0.2,
207
+ )
208
+ ec = edge_collection.get_color().squeeze() # as rgba tuple
209
+ assert len(edge_collection.get_paths()) == 2
210
+ assert mpl.colors.same_color(ec[:-1], "purple")
211
+ assert ec[-1] == 0.2
212
+
213
+
214
+ def test_edge_color_string_with_global_alpha_directed():
215
+ drawn_edges = nx.draw_networkx_edges(
216
+ barbell.to_directed(),
217
+ pos=nx.random_layout(barbell),
218
+ edgelist=[(0, 1), (1, 2)],
219
+ edge_color="purple",
220
+ alpha=0.2,
221
+ )
222
+ assert len(drawn_edges) == 2
223
+ for fap in drawn_edges:
224
+ ec = fap.get_edgecolor() # As rgba tuple
225
+ assert mpl.colors.same_color(ec[:-1], "purple")
226
+ assert ec[-1] == 0.2
227
+
228
+
229
+ @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
230
+ def test_edge_width_default_value(graph_type):
231
+ """Test the default linewidth for edges drawn either via LineCollection or
232
+ FancyArrowPatches."""
233
+ G = nx.path_graph(2, create_using=graph_type)
234
+ pos = {n: (n, n) for n in range(len(G))}
235
+ drawn_edges = nx.draw_networkx_edges(G, pos)
236
+ if isinstance(drawn_edges, list): # directed case: list of FancyArrowPatch
237
+ drawn_edges = drawn_edges[0]
238
+ assert drawn_edges.get_linewidth() == 1
239
+
240
+
241
+ @pytest.mark.parametrize(
242
+ ("edgewidth", "expected"),
243
+ (
244
+ (3, 3), # single-value, non-default
245
+ ([3], 3), # Single value as a list
246
+ ),
247
+ )
248
+ def test_edge_width_single_value_undirected(edgewidth, expected):
249
+ G = nx.path_graph(4)
250
+ pos = {n: (n, n) for n in range(len(G))}
251
+ drawn_edges = nx.draw_networkx_edges(G, pos, width=edgewidth)
252
+ assert len(drawn_edges.get_paths()) == 3
253
+ assert drawn_edges.get_linewidth() == expected
254
+
255
+
256
+ @pytest.mark.parametrize(
257
+ ("edgewidth", "expected"),
258
+ (
259
+ (3, 3), # single-value, non-default
260
+ ([3], 3), # Single value as a list
261
+ ),
262
+ )
263
+ def test_edge_width_single_value_directed(edgewidth, expected):
264
+ G = nx.path_graph(4, create_using=nx.DiGraph)
265
+ pos = {n: (n, n) for n in range(len(G))}
266
+ drawn_edges = nx.draw_networkx_edges(G, pos, width=edgewidth)
267
+ assert len(drawn_edges) == 3
268
+ for fap in drawn_edges:
269
+ assert fap.get_linewidth() == expected
270
+
271
+
272
+ @pytest.mark.parametrize(
273
+ "edgelist",
274
+ (
275
+ [(0, 1), (1, 2), (2, 3)], # one width specification per edge
276
+ None, # fewer widths than edges - widths cycle
277
+ [(0, 1), (1, 2)], # More widths than edges - unused widths ignored
278
+ ),
279
+ )
280
+ def test_edge_width_sequence(edgelist):
281
+ G = barbell.to_directed()
282
+ pos = nx.random_layout(G)
283
+ widths = (0.5, 2.0, 12.0)
284
+ drawn_edges = nx.draw_networkx_edges(G, pos, edgelist=edgelist, width=widths)
285
+ for fap, expected_width in zip(drawn_edges, itertools.cycle(widths)):
286
+ assert fap.get_linewidth() == expected_width
287
+
288
+
289
+ def test_edge_color_with_edge_vmin_vmax():
290
+ """Test that edge_vmin and edge_vmax properly set the dynamic range of the
291
+ color map when num edges == len(edge_colors)."""
292
+ G = nx.path_graph(3, create_using=nx.DiGraph)
293
+ pos = nx.random_layout(G)
294
+ # Extract colors from the original (unscaled) colormap
295
+ drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=[0, 1.0])
296
+ orig_colors = [e.get_edgecolor() for e in drawn_edges]
297
+ # Colors from scaled colormap
298
+ drawn_edges = nx.draw_networkx_edges(
299
+ G, pos, edge_color=[0.2, 0.8], edge_vmin=0.2, edge_vmax=0.8
300
+ )
301
+ scaled_colors = [e.get_edgecolor() for e in drawn_edges]
302
+ assert mpl.colors.same_color(orig_colors, scaled_colors)
303
+
304
+
305
+ def test_directed_edges_linestyle_default():
306
+ """Test default linestyle for edges drawn with FancyArrowPatches."""
307
+ G = nx.path_graph(4, create_using=nx.DiGraph) # Graph with 3 edges
308
+ pos = {n: (n, n) for n in range(len(G))}
309
+
310
+ # edge with default style
311
+ drawn_edges = nx.draw_networkx_edges(G, pos)
312
+ assert len(drawn_edges) == 3
313
+ for fap in drawn_edges:
314
+ assert fap.get_linestyle() == "solid"
315
+
316
+
317
+ @pytest.mark.parametrize(
318
+ "style",
319
+ (
320
+ "dashed", # edge with string style
321
+ "--", # edge with simplified string style
322
+ (1, (1, 1)), # edge with (offset, onoffseq) style
323
+ ),
324
+ )
325
+ def test_directed_edges_linestyle_single_value(style):
326
+ """Tests support for specifying linestyles with a single value to be applied to
327
+ all edges in ``draw_networkx_edges`` for FancyArrowPatch outputs
328
+ (e.g. directed edges)."""
329
+
330
+ G = nx.path_graph(4, create_using=nx.DiGraph) # Graph with 3 edges
331
+ pos = {n: (n, n) for n in range(len(G))}
332
+
333
+ drawn_edges = nx.draw_networkx_edges(G, pos, style=style)
334
+ assert len(drawn_edges) == 3
335
+ for fap in drawn_edges:
336
+ assert fap.get_linestyle() == style
337
+
338
+
339
+ @pytest.mark.parametrize(
340
+ "style_seq",
341
+ (
342
+ ["dashed"], # edge with string style in list
343
+ ["--"], # edge with simplified string style in list
344
+ [(1, (1, 1))], # edge with (offset, onoffseq) style in list
345
+ ["--", "-", ":"], # edges with styles for each edge
346
+ ["--", "-"], # edges with fewer styles than edges (styles cycle)
347
+ ["--", "-", ":", "-."], # edges with more styles than edges (extra unused)
348
+ ),
349
+ )
350
+ def test_directed_edges_linestyle_sequence(style_seq):
351
+ """Tests support for specifying linestyles with sequences in
352
+ ``draw_networkx_edges`` for FancyArrowPatch outputs (e.g. directed edges)."""
353
+
354
+ G = nx.path_graph(4, create_using=nx.DiGraph) # Graph with 3 edges
355
+ pos = {n: (n, n) for n in range(len(G))}
356
+
357
+ drawn_edges = nx.draw_networkx_edges(G, pos, style=style_seq)
358
+ assert len(drawn_edges) == 3
359
+ for fap, style in zip(drawn_edges, itertools.cycle(style_seq)):
360
+ assert fap.get_linestyle() == style
361
+
362
+
363
+ def test_labels_and_colors():
364
+ G = nx.cubical_graph()
365
+ pos = nx.spring_layout(G) # positions for all nodes
366
+ # nodes
367
+ nx.draw_networkx_nodes(
368
+ G, pos, nodelist=[0, 1, 2, 3], node_color="r", node_size=500, alpha=0.75
369
+ )
370
+ nx.draw_networkx_nodes(
371
+ G,
372
+ pos,
373
+ nodelist=[4, 5, 6, 7],
374
+ node_color="b",
375
+ node_size=500,
376
+ alpha=[0.25, 0.5, 0.75, 1.0],
377
+ )
378
+ # edges
379
+ nx.draw_networkx_edges(G, pos, width=1.0, alpha=0.5)
380
+ nx.draw_networkx_edges(
381
+ G,
382
+ pos,
383
+ edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)],
384
+ width=8,
385
+ alpha=0.5,
386
+ edge_color="r",
387
+ )
388
+ nx.draw_networkx_edges(
389
+ G,
390
+ pos,
391
+ edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)],
392
+ width=8,
393
+ alpha=0.5,
394
+ edge_color="b",
395
+ )
396
+ nx.draw_networkx_edges(
397
+ G,
398
+ pos,
399
+ edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)],
400
+ arrows=True,
401
+ min_source_margin=0.5,
402
+ min_target_margin=0.75,
403
+ width=8,
404
+ edge_color="b",
405
+ )
406
+ # some math labels
407
+ labels = {}
408
+ labels[0] = r"$a$"
409
+ labels[1] = r"$b$"
410
+ labels[2] = r"$c$"
411
+ labels[3] = r"$d$"
412
+ labels[4] = r"$\alpha$"
413
+ labels[5] = r"$\beta$"
414
+ labels[6] = r"$\gamma$"
415
+ labels[7] = r"$\delta$"
416
+ nx.draw_networkx_labels(G, pos, labels, font_size=16)
417
+ nx.draw_networkx_edge_labels(G, pos, edge_labels=None, rotate=False)
418
+ nx.draw_networkx_edge_labels(G, pos, edge_labels={(4, 5): "4-5"})
419
+ # plt.show()
420
+
421
+
422
+ @pytest.mark.mpl_image_compare
423
+ def test_house_with_colors():
424
+ G = nx.house_graph()
425
+ # explicitly set positions
426
+ fig, ax = plt.subplots()
427
+ pos = {0: (0, 0), 1: (1, 0), 2: (0, 1), 3: (1, 1), 4: (0.5, 2.0)}
428
+
429
+ # Plot nodes with different properties for the "wall" and "roof" nodes
430
+ nx.draw_networkx_nodes(
431
+ G,
432
+ pos,
433
+ node_size=3000,
434
+ nodelist=[0, 1, 2, 3],
435
+ node_color="tab:blue",
436
+ )
437
+ nx.draw_networkx_nodes(
438
+ G, pos, node_size=2000, nodelist=[4], node_color="tab:orange"
439
+ )
440
+ nx.draw_networkx_edges(G, pos, alpha=0.5, width=6)
441
+ # Customize axes
442
+ ax.margins(0.11)
443
+ plt.tight_layout()
444
+ plt.axis("off")
445
+ return fig
446
+
447
+
448
+ def test_axes():
449
+ fig, ax = plt.subplots()
450
+ nx.draw(barbell, ax=ax)
451
+ nx.draw_networkx_edge_labels(barbell, nx.circular_layout(barbell), ax=ax)
452
+
453
+
454
+ def test_empty_graph():
455
+ G = nx.Graph()
456
+ nx.draw(G)
457
+
458
+
459
+ def test_draw_empty_nodes_return_values():
460
+ # See Issue #3833
461
+ import matplotlib.collections # call as mpl.collections
462
+
463
+ G = nx.Graph([(1, 2), (2, 3)])
464
+ DG = nx.DiGraph([(1, 2), (2, 3)])
465
+ pos = nx.circular_layout(G)
466
+ assert isinstance(
467
+ nx.draw_networkx_nodes(G, pos, nodelist=[]), mpl.collections.PathCollection
468
+ )
469
+ assert isinstance(
470
+ nx.draw_networkx_nodes(DG, pos, nodelist=[]), mpl.collections.PathCollection
471
+ )
472
+
473
+ # drawing empty edges used to return an empty LineCollection or empty list.
474
+ # Now it is always an empty list (because edges are now lists of FancyArrows)
475
+ assert nx.draw_networkx_edges(G, pos, edgelist=[], arrows=True) == []
476
+ assert nx.draw_networkx_edges(G, pos, edgelist=[], arrows=False) == []
477
+ assert nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=False) == []
478
+ assert nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=True) == []
479
+
480
+
481
+ def test_multigraph_edgelist_tuples():
482
+ # See Issue #3295
483
+ G = nx.path_graph(3, create_using=nx.MultiDiGraph)
484
+ nx.draw_networkx(G, edgelist=[(0, 1, 0)])
485
+ nx.draw_networkx(G, edgelist=[(0, 1, 0)], node_size=[10, 20, 0])
486
+
487
+
488
+ def test_alpha_iter():
489
+ pos = nx.random_layout(barbell)
490
+ fig = plt.figure()
491
+ # with fewer alpha elements than nodes
492
+ fig.add_subplot(131) # Each test in a new axis object
493
+ nx.draw_networkx_nodes(barbell, pos, alpha=[0.1, 0.2])
494
+ # with equal alpha elements and nodes
495
+ num_nodes = len(barbell.nodes)
496
+ alpha = [x / num_nodes for x in range(num_nodes)]
497
+ colors = range(num_nodes)
498
+ fig.add_subplot(132)
499
+ nx.draw_networkx_nodes(barbell, pos, node_color=colors, alpha=alpha)
500
+ # with more alpha elements than nodes
501
+ alpha.append(1)
502
+ fig.add_subplot(133)
503
+ nx.draw_networkx_nodes(barbell, pos, alpha=alpha)
504
+
505
+
506
+ def test_error_invalid_kwds():
507
+ with pytest.raises(ValueError, match="Received invalid argument"):
508
+ nx.draw(barbell, foo="bar")
509
+
510
+
511
+ def test_draw_networkx_arrowsize_incorrect_size():
512
+ G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 3)])
513
+ arrowsize = [1, 2, 3]
514
+ with pytest.raises(
515
+ ValueError, match="arrowsize should have the same length as edgelist"
516
+ ):
517
+ nx.draw(G, arrowsize=arrowsize)
518
+
519
+
520
+ @pytest.mark.parametrize("arrowsize", (30, [10, 20, 30]))
521
+ def test_draw_edges_arrowsize(arrowsize):
522
+ G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
523
+ pos = {0: (0, 0), 1: (0, 1), 2: (1, 0)}
524
+ edges = nx.draw_networkx_edges(G, pos=pos, arrowsize=arrowsize)
525
+
526
+ arrowsize = itertools.repeat(arrowsize) if isinstance(arrowsize, int) else arrowsize
527
+
528
+ for fap, expected in zip(edges, arrowsize):
529
+ assert isinstance(fap, mpl.patches.FancyArrowPatch)
530
+ assert fap.get_mutation_scale() == expected
531
+
532
+
533
+ def test_np_edgelist():
534
+ # see issue #4129
535
+ nx.draw_networkx(barbell, edgelist=np.array([(0, 2), (0, 3)]))
536
+
537
+
538
+ def test_draw_nodes_missing_node_from_position():
539
+ G = nx.path_graph(3)
540
+ pos = {0: (0, 0), 1: (1, 1)} # No position for node 2
541
+ with pytest.raises(nx.NetworkXError, match="has no position"):
542
+ nx.draw_networkx_nodes(G, pos)
543
+
544
+
545
+ # NOTE: parametrizing on marker to test both branches of internal
546
+ # nx.draw_networkx_edges.to_marker_edge function
547
+ @pytest.mark.parametrize("node_shape", ("o", "s"))
548
+ def test_draw_edges_min_source_target_margins(node_shape):
549
+ """Test that there is a wider gap between the node and the start of an
550
+ incident edge when min_source_margin is specified.
551
+
552
+ This test checks that the use of min_{source/target}_margin kwargs result
553
+ in shorter (more padding) between the edges and source and target nodes.
554
+ As a crude visual example, let 's' and 't' represent source and target
555
+ nodes, respectively:
556
+
557
+ Default:
558
+ s-----------------------------t
559
+
560
+ With margins:
561
+ s ----------------------- t
562
+
563
+ """
564
+ # Create a single axis object to get consistent pixel coords across
565
+ # multiple draws
566
+ fig, ax = plt.subplots()
567
+ G = nx.DiGraph([(0, 1)])
568
+ pos = {0: (0, 0), 1: (1, 0)} # horizontal layout
569
+ # Get leftmost and rightmost points of the FancyArrowPatch object
570
+ # representing the edge between nodes 0 and 1 (in pixel coordinates)
571
+ default_patch = nx.draw_networkx_edges(G, pos, ax=ax, node_shape=node_shape)[0]
572
+ default_extent = default_patch.get_extents().corners()[::2, 0]
573
+ # Now, do the same but with "padding" for the source and target via the
574
+ # min_{source/target}_margin kwargs
575
+ padded_patch = nx.draw_networkx_edges(
576
+ G,
577
+ pos,
578
+ ax=ax,
579
+ node_shape=node_shape,
580
+ min_source_margin=100,
581
+ min_target_margin=100,
582
+ )[0]
583
+ padded_extent = padded_patch.get_extents().corners()[::2, 0]
584
+
585
+ # With padding, the left-most extent of the edge should be further to the
586
+ # right
587
+ assert padded_extent[0] > default_extent[0]
588
+ # And the rightmost extent of the edge, further to the left
589
+ assert padded_extent[1] < default_extent[1]
590
+
591
+
592
+ def test_nonzero_selfloop_with_single_node():
593
+ """Ensure that selfloop extent is non-zero when there is only one node."""
594
+ # Create explicit axis object for test
595
+ fig, ax = plt.subplots()
596
+ # Graph with single node + self loop
597
+ G = nx.DiGraph()
598
+ G.add_node(0)
599
+ G.add_edge(0, 0)
600
+ # Draw
601
+ patch = nx.draw_networkx_edges(G, {0: (0, 0)})[0]
602
+ # The resulting patch must have non-zero extent
603
+ bbox = patch.get_extents()
604
+ assert bbox.width > 0 and bbox.height > 0
605
+ # Cleanup
606
+ plt.delaxes(ax)
607
+
608
+
609
+ def test_nonzero_selfloop_with_single_edge_in_edgelist():
610
+ """Ensure that selfloop extent is non-zero when only a single edge is
611
+ specified in the edgelist.
612
+ """
613
+ # Create explicit axis object for test
614
+ fig, ax = plt.subplots()
615
+ # Graph with selfloop
616
+ G = nx.path_graph(2, create_using=nx.DiGraph)
617
+ G.add_edge(1, 1)
618
+ pos = {n: (n, n) for n in G.nodes}
619
+ # Draw only the selfloop edge via the `edgelist` kwarg
620
+ patch = nx.draw_networkx_edges(G, pos, edgelist=[(1, 1)])[0]
621
+ # The resulting patch must have non-zero extent
622
+ bbox = patch.get_extents()
623
+ assert bbox.width > 0 and bbox.height > 0
624
+ # Cleanup
625
+ plt.delaxes(ax)
626
+
627
+
628
+ def test_apply_alpha():
629
+ """Test apply_alpha when there is a mismatch between the number of
630
+ supplied colors and elements.
631
+ """
632
+ nodelist = [0, 1, 2]
633
+ colorlist = ["r", "g", "b"]
634
+ alpha = 0.5
635
+ rgba_colors = nx.drawing.nx_pylab.apply_alpha(colorlist, alpha, nodelist)
636
+ assert all(rgba_colors[:, -1] == alpha)
637
+
638
+
639
+ def test_draw_edges_toggling_with_arrows_kwarg():
640
+ """
641
+ The `arrows` keyword argument is used as a 3-way switch to select which
642
+ type of object to use for drawing edges:
643
+ - ``arrows=None`` -> default (FancyArrowPatches for directed, else LineCollection)
644
+ - ``arrows=True`` -> FancyArrowPatches
645
+ - ``arrows=False`` -> LineCollection
646
+ """
647
+ import matplotlib.collections
648
+ import matplotlib.patches
649
+
650
+ UG = nx.path_graph(3)
651
+ DG = nx.path_graph(3, create_using=nx.DiGraph)
652
+ pos = {n: (n, n) for n in UG}
653
+
654
+ # Use FancyArrowPatches when arrows=True, regardless of graph type
655
+ for G in (UG, DG):
656
+ edges = nx.draw_networkx_edges(G, pos, arrows=True)
657
+ assert len(edges) == len(G.edges)
658
+ assert isinstance(edges[0], mpl.patches.FancyArrowPatch)
659
+
660
+ # Use LineCollection when arrows=False, regardless of graph type
661
+ for G in (UG, DG):
662
+ edges = nx.draw_networkx_edges(G, pos, arrows=False)
663
+ assert isinstance(edges, mpl.collections.LineCollection)
664
+
665
+ # Default behavior when arrows=None: FAPs for directed, LC's for undirected
666
+ edges = nx.draw_networkx_edges(UG, pos)
667
+ assert isinstance(edges, mpl.collections.LineCollection)
668
+ edges = nx.draw_networkx_edges(DG, pos)
669
+ assert len(edges) == len(G.edges)
670
+ assert isinstance(edges[0], mpl.patches.FancyArrowPatch)
671
+
672
+
673
+ @pytest.mark.parametrize("drawing_func", (nx.draw, nx.draw_networkx))
674
+ def test_draw_networkx_arrows_default_undirected(drawing_func):
675
+ import matplotlib.collections
676
+
677
+ G = nx.path_graph(3)
678
+ fig, ax = plt.subplots()
679
+ drawing_func(G, ax=ax)
680
+ assert any(isinstance(c, mpl.collections.LineCollection) for c in ax.collections)
681
+ assert not ax.patches
682
+ plt.delaxes(ax)
683
+
684
+
685
+ @pytest.mark.parametrize("drawing_func", (nx.draw, nx.draw_networkx))
686
+ def test_draw_networkx_arrows_default_directed(drawing_func):
687
+ import matplotlib.collections
688
+
689
+ G = nx.path_graph(3, create_using=nx.DiGraph)
690
+ fig, ax = plt.subplots()
691
+ drawing_func(G, ax=ax)
692
+ assert not any(
693
+ isinstance(c, mpl.collections.LineCollection) for c in ax.collections
694
+ )
695
+ assert ax.patches
696
+ plt.delaxes(ax)
697
+
698
+
699
+ def test_edgelist_kwarg_not_ignored():
700
+ # See gh-4994
701
+ G = nx.path_graph(3)
702
+ G.add_edge(0, 0)
703
+ fig, ax = plt.subplots()
704
+ nx.draw(G, edgelist=[(0, 1), (1, 2)], ax=ax) # Exclude self-loop from edgelist
705
+ assert not ax.patches
706
+ plt.delaxes(ax)
707
+
708
+
709
+ def test_draw_networkx_edge_label_multiedge_exception():
710
+ """
711
+ draw_networkx_edge_labels should raise an informative error message when
712
+ the edge label includes keys
713
+ """
714
+ exception_msg = "draw_networkx_edge_labels does not support multiedges"
715
+ G = nx.MultiGraph()
716
+ G.add_edge(0, 1, weight=10)
717
+ G.add_edge(0, 1, weight=20)
718
+ edge_labels = nx.get_edge_attributes(G, "weight") # Includes edge keys
719
+ pos = {n: (n, n) for n in G}
720
+ with pytest.raises(nx.NetworkXError, match=exception_msg):
721
+ nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels)
722
+
723
+
724
+ def test_draw_networkx_edge_label_empty_dict():
725
+ """Regression test for draw_networkx_edge_labels with empty dict. See
726
+ gh-5372."""
727
+ G = nx.path_graph(3)
728
+ pos = {n: (n, n) for n in G.nodes}
729
+ assert nx.draw_networkx_edge_labels(G, pos, edge_labels={}) == {}
730
+
731
+
732
+ def test_draw_networkx_edges_undirected_selfloop_colors():
733
+ """When an edgelist is supplied along with a sequence of colors, check that
734
+ the self-loops have the correct colors."""
735
+ fig, ax = plt.subplots()
736
+ # Edge list and corresponding colors
737
+ edgelist = [(1, 3), (1, 2), (2, 3), (1, 1), (3, 3), (2, 2)]
738
+ edge_colors = ["pink", "cyan", "black", "red", "blue", "green"]
739
+
740
+ G = nx.Graph(edgelist)
741
+ pos = {n: (n, n) for n in G.nodes}
742
+ nx.draw_networkx_edges(G, pos, ax=ax, edgelist=edgelist, edge_color=edge_colors)
743
+
744
+ # Verify that there are three fancy arrow patches (1 per self loop)
745
+ assert len(ax.patches) == 3
746
+
747
+ # These are points that should be contained in the self loops. For example,
748
+ # sl_points[0] will be (1, 1.1), which is inside the "path" of the first
749
+ # self-loop but outside the others
750
+ sl_points = np.array(edgelist[-3:]) + np.array([0, 0.1])
751
+
752
+ # Check that the mapping between self-loop locations and their colors is
753
+ # correct
754
+ for fap, clr, slp in zip(ax.patches, edge_colors[-3:], sl_points):
755
+ assert fap.get_path().contains_point(slp)
756
+ assert mpl.colors.same_color(fap.get_edgecolor(), clr)
757
+ plt.delaxes(ax)
758
+
759
+
760
+ @pytest.mark.parametrize(
761
+ "fap_only_kwarg", # Non-default values for kwargs that only apply to FAPs
762
+ (
763
+ {"arrowstyle": "-"},
764
+ {"arrowsize": 20},
765
+ {"connectionstyle": "arc3,rad=0.2"},
766
+ {"min_source_margin": 10},
767
+ {"min_target_margin": 10},
768
+ ),
769
+ )
770
+ def test_user_warnings_for_unused_edge_drawing_kwargs(fap_only_kwarg):
771
+ """Users should get a warning when they specify a non-default value for
772
+ one of the kwargs that applies only to edges drawn with FancyArrowPatches,
773
+ but FancyArrowPatches aren't being used under the hood."""
774
+ G = nx.path_graph(3)
775
+ pos = {n: (n, n) for n in G}
776
+ fig, ax = plt.subplots()
777
+ # By default, an undirected graph will use LineCollection to represent
778
+ # the edges
779
+ kwarg_name = list(fap_only_kwarg.keys())[0]
780
+ with pytest.warns(
781
+ UserWarning, match=f"\n\nThe {kwarg_name} keyword argument is not applicable"
782
+ ):
783
+ nx.draw_networkx_edges(G, pos, ax=ax, **fap_only_kwarg)
784
+ # FancyArrowPatches are always used when `arrows=True` is specified.
785
+ # Check that warnings are *not* raised in this case
786
+ with warnings.catch_warnings():
787
+ # Escalate warnings -> errors so tests fail if warnings are raised
788
+ warnings.simplefilter("error")
789
+ nx.draw_networkx_edges(G, pos, ax=ax, arrows=True, **fap_only_kwarg)
790
+
791
+ plt.delaxes(ax)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-311.pyc ADDED
Binary file (46.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-311.pyc ADDED
Binary file (1.11 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (215 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-311.pyc ADDED
Binary file (4.22 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/test_convert_numpy.py ADDED
@@ -0,0 +1,395 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ np = pytest.importorskip("numpy")
4
+ npt = pytest.importorskip("numpy.testing")
5
+
6
+ import networkx as nx
7
+ from networkx.generators.classic import barbell_graph, cycle_graph, path_graph
8
+ from networkx.utils import graphs_equal
9
+
10
+
11
+ class TestConvertNumpyArray:
12
+ def setup_method(self):
13
+ self.G1 = barbell_graph(10, 3)
14
+ self.G2 = cycle_graph(10, create_using=nx.DiGraph)
15
+ self.G3 = self.create_weighted(nx.Graph())
16
+ self.G4 = self.create_weighted(nx.DiGraph())
17
+
18
+ def create_weighted(self, G):
19
+ g = cycle_graph(4)
20
+ G.add_nodes_from(g)
21
+ G.add_weighted_edges_from((u, v, 10 + u) for u, v in g.edges())
22
+ return G
23
+
24
+ def assert_equal(self, G1, G2):
25
+ assert sorted(G1.nodes()) == sorted(G2.nodes())
26
+ assert sorted(G1.edges()) == sorted(G2.edges())
27
+
28
+ def identity_conversion(self, G, A, create_using):
29
+ assert A.sum() > 0
30
+ GG = nx.from_numpy_array(A, create_using=create_using)
31
+ self.assert_equal(G, GG)
32
+ GW = nx.to_networkx_graph(A, create_using=create_using)
33
+ self.assert_equal(G, GW)
34
+ GI = nx.empty_graph(0, create_using).__class__(A)
35
+ self.assert_equal(G, GI)
36
+
37
+ def test_shape(self):
38
+ "Conversion from non-square array."
39
+ A = np.array([[1, 2, 3], [4, 5, 6]])
40
+ pytest.raises(nx.NetworkXError, nx.from_numpy_array, A)
41
+
42
+ def test_identity_graph_array(self):
43
+ "Conversion from graph to array to graph."
44
+ A = nx.to_numpy_array(self.G1)
45
+ self.identity_conversion(self.G1, A, nx.Graph())
46
+
47
+ def test_identity_digraph_array(self):
48
+ """Conversion from digraph to array to digraph."""
49
+ A = nx.to_numpy_array(self.G2)
50
+ self.identity_conversion(self.G2, A, nx.DiGraph())
51
+
52
+ def test_identity_weighted_graph_array(self):
53
+ """Conversion from weighted graph to array to weighted graph."""
54
+ A = nx.to_numpy_array(self.G3)
55
+ self.identity_conversion(self.G3, A, nx.Graph())
56
+
57
+ def test_identity_weighted_digraph_array(self):
58
+ """Conversion from weighted digraph to array to weighted digraph."""
59
+ A = nx.to_numpy_array(self.G4)
60
+ self.identity_conversion(self.G4, A, nx.DiGraph())
61
+
62
+ def test_nodelist(self):
63
+ """Conversion from graph to array to graph with nodelist."""
64
+ P4 = path_graph(4)
65
+ P3 = path_graph(3)
66
+ nodelist = list(P3)
67
+ A = nx.to_numpy_array(P4, nodelist=nodelist)
68
+ GA = nx.Graph(A)
69
+ self.assert_equal(GA, P3)
70
+
71
+ # Make nodelist ambiguous by containing duplicates.
72
+ nodelist += [nodelist[0]]
73
+ pytest.raises(nx.NetworkXError, nx.to_numpy_array, P3, nodelist=nodelist)
74
+
75
+ # Make nodelist invalid by including nonexistent nodes
76
+ nodelist = [-1, 0, 1]
77
+ with pytest.raises(
78
+ nx.NetworkXError,
79
+ match=f"Nodes {nodelist - P3.nodes} in nodelist is not in G",
80
+ ):
81
+ nx.to_numpy_array(P3, nodelist=nodelist)
82
+
83
+ def test_weight_keyword(self):
84
+ WP4 = nx.Graph()
85
+ WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
86
+ P4 = path_graph(4)
87
+ A = nx.to_numpy_array(P4)
88
+ np.testing.assert_equal(A, nx.to_numpy_array(WP4, weight=None))
89
+ np.testing.assert_equal(0.5 * A, nx.to_numpy_array(WP4))
90
+ np.testing.assert_equal(0.3 * A, nx.to_numpy_array(WP4, weight="other"))
91
+
92
+ def test_from_numpy_array_type(self):
93
+ A = np.array([[1]])
94
+ G = nx.from_numpy_array(A)
95
+ assert type(G[0][0]["weight"]) == int
96
+
97
+ A = np.array([[1]]).astype(float)
98
+ G = nx.from_numpy_array(A)
99
+ assert type(G[0][0]["weight"]) == float
100
+
101
+ A = np.array([[1]]).astype(str)
102
+ G = nx.from_numpy_array(A)
103
+ assert type(G[0][0]["weight"]) == str
104
+
105
+ A = np.array([[1]]).astype(bool)
106
+ G = nx.from_numpy_array(A)
107
+ assert type(G[0][0]["weight"]) == bool
108
+
109
+ A = np.array([[1]]).astype(complex)
110
+ G = nx.from_numpy_array(A)
111
+ assert type(G[0][0]["weight"]) == complex
112
+
113
+ A = np.array([[1]]).astype(object)
114
+ pytest.raises(TypeError, nx.from_numpy_array, A)
115
+
116
+ A = np.array([[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]])
117
+ with pytest.raises(
118
+ nx.NetworkXError, match=f"Input array must be 2D, not {A.ndim}"
119
+ ):
120
+ g = nx.from_numpy_array(A)
121
+
122
+ def test_from_numpy_array_dtype(self):
123
+ dt = [("weight", float), ("cost", int)]
124
+ A = np.array([[(1.0, 2)]], dtype=dt)
125
+ G = nx.from_numpy_array(A)
126
+ assert type(G[0][0]["weight"]) == float
127
+ assert type(G[0][0]["cost"]) == int
128
+ assert G[0][0]["cost"] == 2
129
+ assert G[0][0]["weight"] == 1.0
130
+
131
+ def test_from_numpy_array_parallel_edges(self):
132
+ """Tests that the :func:`networkx.from_numpy_array` function
133
+ interprets integer weights as the number of parallel edges when
134
+ creating a multigraph.
135
+
136
+ """
137
+ A = np.array([[1, 1], [1, 2]])
138
+ # First, with a simple graph, each integer entry in the adjacency
139
+ # matrix is interpreted as the weight of a single edge in the graph.
140
+ expected = nx.DiGraph()
141
+ edges = [(0, 0), (0, 1), (1, 0)]
142
+ expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
143
+ expected.add_edge(1, 1, weight=2)
144
+ actual = nx.from_numpy_array(A, parallel_edges=True, create_using=nx.DiGraph)
145
+ assert graphs_equal(actual, expected)
146
+ actual = nx.from_numpy_array(A, parallel_edges=False, create_using=nx.DiGraph)
147
+ assert graphs_equal(actual, expected)
148
+ # Now each integer entry in the adjacency matrix is interpreted as the
149
+ # number of parallel edges in the graph if the appropriate keyword
150
+ # argument is specified.
151
+ edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)]
152
+ expected = nx.MultiDiGraph()
153
+ expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
154
+ actual = nx.from_numpy_array(
155
+ A, parallel_edges=True, create_using=nx.MultiDiGraph
156
+ )
157
+ assert graphs_equal(actual, expected)
158
+ expected = nx.MultiDiGraph()
159
+ expected.add_edges_from(set(edges), weight=1)
160
+ # The sole self-loop (edge 0) on vertex 1 should have weight 2.
161
+ expected[1][1][0]["weight"] = 2
162
+ actual = nx.from_numpy_array(
163
+ A, parallel_edges=False, create_using=nx.MultiDiGraph
164
+ )
165
+ assert graphs_equal(actual, expected)
166
+
167
+ @pytest.mark.parametrize(
168
+ "dt",
169
+ (
170
+ None, # default
171
+ int, # integer dtype
172
+ np.dtype(
173
+ [("weight", "f8"), ("color", "i1")]
174
+ ), # Structured dtype with named fields
175
+ ),
176
+ )
177
+ def test_from_numpy_array_no_edge_attr(self, dt):
178
+ A = np.array([[0, 1], [1, 0]], dtype=dt)
179
+ G = nx.from_numpy_array(A, edge_attr=None)
180
+ assert "weight" not in G.edges[0, 1]
181
+ assert len(G.edges[0, 1]) == 0
182
+
183
+ def test_from_numpy_array_multiedge_no_edge_attr(self):
184
+ A = np.array([[0, 2], [2, 0]])
185
+ G = nx.from_numpy_array(A, create_using=nx.MultiDiGraph, edge_attr=None)
186
+ assert all("weight" not in e for _, e in G[0][1].items())
187
+ assert len(G[0][1][0]) == 0
188
+
189
+ def test_from_numpy_array_custom_edge_attr(self):
190
+ A = np.array([[0, 2], [3, 0]])
191
+ G = nx.from_numpy_array(A, edge_attr="cost")
192
+ assert "weight" not in G.edges[0, 1]
193
+ assert G.edges[0, 1]["cost"] == 3
194
+
195
+ def test_symmetric(self):
196
+ """Tests that a symmetric array has edges added only once to an
197
+ undirected multigraph when using :func:`networkx.from_numpy_array`.
198
+
199
+ """
200
+ A = np.array([[0, 1], [1, 0]])
201
+ G = nx.from_numpy_array(A, create_using=nx.MultiGraph)
202
+ expected = nx.MultiGraph()
203
+ expected.add_edge(0, 1, weight=1)
204
+ assert graphs_equal(G, expected)
205
+
206
+ def test_dtype_int_graph(self):
207
+ """Test that setting dtype int actually gives an integer array.
208
+
209
+ For more information, see GitHub pull request #1363.
210
+
211
+ """
212
+ G = nx.complete_graph(3)
213
+ A = nx.to_numpy_array(G, dtype=int)
214
+ assert A.dtype == int
215
+
216
+ def test_dtype_int_multigraph(self):
217
+ """Test that setting dtype int actually gives an integer array.
218
+
219
+ For more information, see GitHub pull request #1363.
220
+
221
+ """
222
+ G = nx.MultiGraph(nx.complete_graph(3))
223
+ A = nx.to_numpy_array(G, dtype=int)
224
+ assert A.dtype == int
225
+
226
+
227
+ @pytest.fixture
228
+ def multigraph_test_graph():
229
+ G = nx.MultiGraph()
230
+ G.add_edge(1, 2, weight=7)
231
+ G.add_edge(1, 2, weight=70)
232
+ return G
233
+
234
+
235
+ @pytest.mark.parametrize(("operator", "expected"), ((sum, 77), (min, 7), (max, 70)))
236
+ def test_numpy_multigraph(multigraph_test_graph, operator, expected):
237
+ A = nx.to_numpy_array(multigraph_test_graph, multigraph_weight=operator)
238
+ assert A[1, 0] == expected
239
+
240
+
241
+ def test_to_numpy_array_multigraph_nodelist(multigraph_test_graph):
242
+ G = multigraph_test_graph
243
+ G.add_edge(0, 1, weight=3)
244
+ A = nx.to_numpy_array(G, nodelist=[1, 2])
245
+ assert A.shape == (2, 2)
246
+ assert A[1, 0] == 77
247
+
248
+
249
+ @pytest.mark.parametrize(
250
+ "G, expected",
251
+ [
252
+ (nx.Graph(), np.array([[0, 1 + 2j], [1 + 2j, 0]], dtype=complex)),
253
+ (nx.DiGraph(), np.array([[0, 1 + 2j], [0, 0]], dtype=complex)),
254
+ ],
255
+ )
256
+ def test_to_numpy_array_complex_weights(G, expected):
257
+ G.add_edge(0, 1, weight=1 + 2j)
258
+ A = nx.to_numpy_array(G, dtype=complex)
259
+ npt.assert_array_equal(A, expected)
260
+
261
+
262
+ def test_to_numpy_array_arbitrary_weights():
263
+ G = nx.DiGraph()
264
+ w = 922337203685477580102 # Out of range for int64
265
+ G.add_edge(0, 1, weight=922337203685477580102) # val not representable by int64
266
+ A = nx.to_numpy_array(G, dtype=object)
267
+ expected = np.array([[0, w], [0, 0]], dtype=object)
268
+ npt.assert_array_equal(A, expected)
269
+
270
+ # Undirected
271
+ A = nx.to_numpy_array(G.to_undirected(), dtype=object)
272
+ expected = np.array([[0, w], [w, 0]], dtype=object)
273
+ npt.assert_array_equal(A, expected)
274
+
275
+
276
+ @pytest.mark.parametrize(
277
+ "func, expected",
278
+ ((min, -1), (max, 10), (sum, 11), (np.mean, 11 / 3), (np.median, 2)),
279
+ )
280
+ def test_to_numpy_array_multiweight_reduction(func, expected):
281
+ """Test various functions for reducing multiedge weights."""
282
+ G = nx.MultiDiGraph()
283
+ weights = [-1, 2, 10.0]
284
+ for w in weights:
285
+ G.add_edge(0, 1, weight=w)
286
+ A = nx.to_numpy_array(G, multigraph_weight=func, dtype=float)
287
+ assert np.allclose(A, [[0, expected], [0, 0]])
288
+
289
+ # Undirected case
290
+ A = nx.to_numpy_array(G.to_undirected(), multigraph_weight=func, dtype=float)
291
+ assert np.allclose(A, [[0, expected], [expected, 0]])
292
+
293
+
294
+ @pytest.mark.parametrize(
295
+ ("G, expected"),
296
+ [
297
+ (nx.Graph(), [[(0, 0), (10, 5)], [(10, 5), (0, 0)]]),
298
+ (nx.DiGraph(), [[(0, 0), (10, 5)], [(0, 0), (0, 0)]]),
299
+ ],
300
+ )
301
+ def test_to_numpy_array_structured_dtype_attrs_from_fields(G, expected):
302
+ """When `dtype` is structured (i.e. has names) and `weight` is None, use
303
+ the named fields of the dtype to look up edge attributes."""
304
+ G.add_edge(0, 1, weight=10, cost=5.0)
305
+ dtype = np.dtype([("weight", int), ("cost", int)])
306
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
307
+ expected = np.asarray(expected, dtype=dtype)
308
+ npt.assert_array_equal(A, expected)
309
+
310
+
311
+ def test_to_numpy_array_structured_dtype_single_attr_default():
312
+ G = nx.path_graph(3)
313
+ dtype = np.dtype([("weight", float)]) # A single named field
314
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
315
+ expected = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]], dtype=float)
316
+ npt.assert_array_equal(A["weight"], expected)
317
+
318
+
319
+ @pytest.mark.parametrize(
320
+ ("field_name", "expected_attr_val"),
321
+ [
322
+ ("weight", 1),
323
+ ("cost", 3),
324
+ ],
325
+ )
326
+ def test_to_numpy_array_structured_dtype_single_attr(field_name, expected_attr_val):
327
+ G = nx.Graph()
328
+ G.add_edge(0, 1, cost=3)
329
+ dtype = np.dtype([(field_name, float)])
330
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
331
+ expected = np.array([[0, expected_attr_val], [expected_attr_val, 0]], dtype=float)
332
+ npt.assert_array_equal(A[field_name], expected)
333
+
334
+
335
+ @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
336
+ @pytest.mark.parametrize(
337
+ "edge",
338
+ [
339
+ (0, 1), # No edge attributes
340
+ (0, 1, {"weight": 10}), # One edge attr
341
+ (0, 1, {"weight": 5, "flow": -4}), # Multiple but not all edge attrs
342
+ (0, 1, {"weight": 2.0, "cost": 10, "flow": -45}), # All attrs
343
+ ],
344
+ )
345
+ def test_to_numpy_array_structured_dtype_multiple_fields(graph_type, edge):
346
+ G = graph_type([edge])
347
+ dtype = np.dtype([("weight", float), ("cost", float), ("flow", float)])
348
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
349
+ for attr in dtype.names:
350
+ expected = nx.to_numpy_array(G, dtype=float, weight=attr)
351
+ npt.assert_array_equal(A[attr], expected)
352
+
353
+
354
+ @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
355
+ def test_to_numpy_array_structured_dtype_scalar_nonedge(G):
356
+ G.add_edge(0, 1, weight=10)
357
+ dtype = np.dtype([("weight", float), ("cost", float)])
358
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=np.nan)
359
+ for attr in dtype.names:
360
+ expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=np.nan)
361
+ npt.assert_array_equal(A[attr], expected)
362
+
363
+
364
+ @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
365
+ def test_to_numpy_array_structured_dtype_nonedge_ary(G):
366
+ """Similar to the scalar case, except has a different non-edge value for
367
+ each named field."""
368
+ G.add_edge(0, 1, weight=10)
369
+ dtype = np.dtype([("weight", float), ("cost", float)])
370
+ nonedges = np.array([(0, np.inf)], dtype=dtype)
371
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=nonedges)
372
+ for attr in dtype.names:
373
+ nonedge = nonedges[attr]
374
+ expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=nonedge)
375
+ npt.assert_array_equal(A[attr], expected)
376
+
377
+
378
+ def test_to_numpy_array_structured_dtype_with_weight_raises():
379
+ """Using both a structured dtype (with named fields) and specifying a `weight`
380
+ parameter is ambiguous."""
381
+ G = nx.path_graph(3)
382
+ dtype = np.dtype([("weight", int), ("cost", int)])
383
+ exception_msg = "Specifying `weight` not supported for structured dtypes"
384
+ with pytest.raises(ValueError, match=exception_msg):
385
+ nx.to_numpy_array(G, dtype=dtype) # Default is weight="weight"
386
+ with pytest.raises(ValueError, match=exception_msg):
387
+ nx.to_numpy_array(G, dtype=dtype, weight="cost")
388
+
389
+
390
+ @pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph))
391
+ def test_to_numpy_array_structured_multigraph_raises(graph_type):
392
+ G = nx.path_graph(3, create_using=graph_type)
393
+ dtype = np.dtype([("weight", int), ("cost", int)])
394
+ with pytest.raises(nx.NetworkXError, match="Structured arrays are not supported"):
395
+ nx.to_numpy_array(G, dtype=dtype, weight=None)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/test_exceptions.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+
5
+ # smoke tests for exceptions
6
+
7
+
8
+ def test_raises_networkxexception():
9
+ with pytest.raises(nx.NetworkXException):
10
+ raise nx.NetworkXException
11
+
12
+
13
+ def test_raises_networkxerr():
14
+ with pytest.raises(nx.NetworkXError):
15
+ raise nx.NetworkXError
16
+
17
+
18
+ def test_raises_networkx_pointless_concept():
19
+ with pytest.raises(nx.NetworkXPointlessConcept):
20
+ raise nx.NetworkXPointlessConcept
21
+
22
+
23
+ def test_raises_networkxalgorithmerr():
24
+ with pytest.raises(nx.NetworkXAlgorithmError):
25
+ raise nx.NetworkXAlgorithmError
26
+
27
+
28
+ def test_raises_networkx_unfeasible():
29
+ with pytest.raises(nx.NetworkXUnfeasible):
30
+ raise nx.NetworkXUnfeasible
31
+
32
+
33
+ def test_raises_networkx_no_path():
34
+ with pytest.raises(nx.NetworkXNoPath):
35
+ raise nx.NetworkXNoPath
36
+
37
+
38
+ def test_raises_networkx_unbounded():
39
+ with pytest.raises(nx.NetworkXUnbounded):
40
+ raise nx.NetworkXUnbounded
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/test_relabel.py ADDED
@@ -0,0 +1,347 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.generators.classic import empty_graph
5
+ from networkx.utils import edges_equal, nodes_equal
6
+
7
+
8
+ class TestRelabel:
9
+ def test_convert_node_labels_to_integers(self):
10
+ # test that empty graph converts fine for all options
11
+ G = empty_graph()
12
+ H = nx.convert_node_labels_to_integers(G, 100)
13
+ assert list(H.nodes()) == []
14
+ assert list(H.edges()) == []
15
+
16
+ for opt in ["default", "sorted", "increasing degree", "decreasing degree"]:
17
+ G = empty_graph()
18
+ H = nx.convert_node_labels_to_integers(G, 100, ordering=opt)
19
+ assert list(H.nodes()) == []
20
+ assert list(H.edges()) == []
21
+
22
+ G = empty_graph()
23
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
24
+ H = nx.convert_node_labels_to_integers(G)
25
+ degH = (d for n, d in H.degree())
26
+ degG = (d for n, d in G.degree())
27
+ assert sorted(degH) == sorted(degG)
28
+
29
+ H = nx.convert_node_labels_to_integers(G, 1000)
30
+ degH = (d for n, d in H.degree())
31
+ degG = (d for n, d in G.degree())
32
+ assert sorted(degH) == sorted(degG)
33
+ assert nodes_equal(H.nodes(), [1000, 1001, 1002, 1003])
34
+
35
+ H = nx.convert_node_labels_to_integers(G, ordering="increasing degree")
36
+ degH = (d for n, d in H.degree())
37
+ degG = (d for n, d in G.degree())
38
+ assert sorted(degH) == sorted(degG)
39
+ assert H.degree(0) == 1
40
+ assert H.degree(1) == 2
41
+ assert H.degree(2) == 2
42
+ assert H.degree(3) == 3
43
+
44
+ H = nx.convert_node_labels_to_integers(G, ordering="decreasing degree")
45
+ degH = (d for n, d in H.degree())
46
+ degG = (d for n, d in G.degree())
47
+ assert sorted(degH) == sorted(degG)
48
+ assert H.degree(0) == 3
49
+ assert H.degree(1) == 2
50
+ assert H.degree(2) == 2
51
+ assert H.degree(3) == 1
52
+
53
+ H = nx.convert_node_labels_to_integers(
54
+ G, ordering="increasing degree", label_attribute="label"
55
+ )
56
+ degH = (d for n, d in H.degree())
57
+ degG = (d for n, d in G.degree())
58
+ assert sorted(degH) == sorted(degG)
59
+ assert H.degree(0) == 1
60
+ assert H.degree(1) == 2
61
+ assert H.degree(2) == 2
62
+ assert H.degree(3) == 3
63
+
64
+ # check mapping
65
+ assert H.nodes[3]["label"] == "C"
66
+ assert H.nodes[0]["label"] == "D"
67
+ assert H.nodes[1]["label"] == "A" or H.nodes[2]["label"] == "A"
68
+ assert H.nodes[1]["label"] == "B" or H.nodes[2]["label"] == "B"
69
+
70
+ def test_convert_to_integers2(self):
71
+ G = empty_graph()
72
+ G.add_edges_from([("C", "D"), ("A", "B"), ("A", "C"), ("B", "C")])
73
+ H = nx.convert_node_labels_to_integers(G, ordering="sorted")
74
+ degH = (d for n, d in H.degree())
75
+ degG = (d for n, d in G.degree())
76
+ assert sorted(degH) == sorted(degG)
77
+
78
+ H = nx.convert_node_labels_to_integers(
79
+ G, ordering="sorted", label_attribute="label"
80
+ )
81
+ assert H.nodes[0]["label"] == "A"
82
+ assert H.nodes[1]["label"] == "B"
83
+ assert H.nodes[2]["label"] == "C"
84
+ assert H.nodes[3]["label"] == "D"
85
+
86
+ def test_convert_to_integers_raise(self):
87
+ with pytest.raises(nx.NetworkXError):
88
+ G = nx.Graph()
89
+ H = nx.convert_node_labels_to_integers(G, ordering="increasing age")
90
+
91
+ def test_relabel_nodes_copy(self):
92
+ G = nx.empty_graph()
93
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
94
+ mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"}
95
+ H = nx.relabel_nodes(G, mapping)
96
+ assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"])
97
+
98
+ def test_relabel_nodes_function(self):
99
+ G = nx.empty_graph()
100
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
101
+ # function mapping no longer encouraged but works
102
+
103
+ def mapping(n):
104
+ return ord(n)
105
+
106
+ H = nx.relabel_nodes(G, mapping)
107
+ assert nodes_equal(H.nodes(), [65, 66, 67, 68])
108
+
109
+ def test_relabel_nodes_callable_type(self):
110
+ G = nx.path_graph(4)
111
+ H = nx.relabel_nodes(G, str)
112
+ assert nodes_equal(H.nodes, ["0", "1", "2", "3"])
113
+
114
+ @pytest.mark.parametrize("non_mc", ("0123", ["0", "1", "2", "3"]))
115
+ def test_relabel_nodes_non_mapping_or_callable(self, non_mc):
116
+ """If `mapping` is neither a Callable or a Mapping, an exception
117
+ should be raised."""
118
+ G = nx.path_graph(4)
119
+ with pytest.raises(AttributeError):
120
+ nx.relabel_nodes(G, non_mc)
121
+
122
+ def test_relabel_nodes_graph(self):
123
+ G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
124
+ mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"}
125
+ H = nx.relabel_nodes(G, mapping)
126
+ assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"])
127
+
128
+ def test_relabel_nodes_orderedgraph(self):
129
+ G = nx.Graph()
130
+ G.add_nodes_from([1, 2, 3])
131
+ G.add_edges_from([(1, 3), (2, 3)])
132
+ mapping = {1: "a", 2: "b", 3: "c"}
133
+ H = nx.relabel_nodes(G, mapping)
134
+ assert list(H.nodes) == ["a", "b", "c"]
135
+
136
+ def test_relabel_nodes_digraph(self):
137
+ G = nx.DiGraph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
138
+ mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"}
139
+ H = nx.relabel_nodes(G, mapping, copy=False)
140
+ assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"])
141
+
142
+ def test_relabel_nodes_multigraph(self):
143
+ G = nx.MultiGraph([("a", "b"), ("a", "b")])
144
+ mapping = {"a": "aardvark", "b": "bear"}
145
+ G = nx.relabel_nodes(G, mapping, copy=False)
146
+ assert nodes_equal(G.nodes(), ["aardvark", "bear"])
147
+ assert edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")])
148
+
149
+ def test_relabel_nodes_multidigraph(self):
150
+ G = nx.MultiDiGraph([("a", "b"), ("a", "b")])
151
+ mapping = {"a": "aardvark", "b": "bear"}
152
+ G = nx.relabel_nodes(G, mapping, copy=False)
153
+ assert nodes_equal(G.nodes(), ["aardvark", "bear"])
154
+ assert edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")])
155
+
156
+ def test_relabel_isolated_nodes_to_same(self):
157
+ G = nx.Graph()
158
+ G.add_nodes_from(range(4))
159
+ mapping = {1: 1}
160
+ H = nx.relabel_nodes(G, mapping, copy=False)
161
+ assert nodes_equal(H.nodes(), list(range(4)))
162
+
163
+ def test_relabel_nodes_missing(self):
164
+ G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
165
+ mapping = {0: "aardvark"}
166
+ # copy=True
167
+ H = nx.relabel_nodes(G, mapping, copy=True)
168
+ assert nodes_equal(H.nodes, G.nodes)
169
+ # copy=False
170
+ GG = G.copy()
171
+ nx.relabel_nodes(G, mapping, copy=False)
172
+ assert nodes_equal(G.nodes, GG.nodes)
173
+
174
+ def test_relabel_copy_name(self):
175
+ G = nx.Graph()
176
+ H = nx.relabel_nodes(G, {}, copy=True)
177
+ assert H.graph == G.graph
178
+ H = nx.relabel_nodes(G, {}, copy=False)
179
+ assert H.graph == G.graph
180
+ G.name = "first"
181
+ H = nx.relabel_nodes(G, {}, copy=True)
182
+ assert H.graph == G.graph
183
+ H = nx.relabel_nodes(G, {}, copy=False)
184
+ assert H.graph == G.graph
185
+
186
+ def test_relabel_toposort(self):
187
+ K5 = nx.complete_graph(4)
188
+ G = nx.complete_graph(4)
189
+ G = nx.relabel_nodes(G, {i: i + 1 for i in range(4)}, copy=False)
190
+ assert nx.is_isomorphic(K5, G)
191
+ G = nx.complete_graph(4)
192
+ G = nx.relabel_nodes(G, {i: i - 1 for i in range(4)}, copy=False)
193
+ assert nx.is_isomorphic(K5, G)
194
+
195
+ def test_relabel_selfloop(self):
196
+ G = nx.DiGraph([(1, 1), (1, 2), (2, 3)])
197
+ G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False)
198
+ assert nodes_equal(G.nodes(), ["One", "Three", "Two"])
199
+ G = nx.MultiDiGraph([(1, 1), (1, 2), (2, 3)])
200
+ G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False)
201
+ assert nodes_equal(G.nodes(), ["One", "Three", "Two"])
202
+ G = nx.MultiDiGraph([(1, 1)])
203
+ G = nx.relabel_nodes(G, {1: 0}, copy=False)
204
+ assert nodes_equal(G.nodes(), [0])
205
+
206
+ def test_relabel_multidigraph_inout_merge_nodes(self):
207
+ for MG in (nx.MultiGraph, nx.MultiDiGraph):
208
+ for cc in (True, False):
209
+ G = MG([(0, 4), (1, 4), (4, 2), (4, 3)])
210
+ G[0][4][0]["value"] = "a"
211
+ G[1][4][0]["value"] = "b"
212
+ G[4][2][0]["value"] = "c"
213
+ G[4][3][0]["value"] = "d"
214
+ G.add_edge(0, 4, key="x", value="e")
215
+ G.add_edge(4, 3, key="x", value="f")
216
+ mapping = {0: 9, 1: 9, 2: 9, 3: 9}
217
+ H = nx.relabel_nodes(G, mapping, copy=cc)
218
+ # No ordering on keys enforced
219
+ assert {"value": "a"} in H[9][4].values()
220
+ assert {"value": "b"} in H[9][4].values()
221
+ assert {"value": "c"} in H[4][9].values()
222
+ assert len(H[4][9]) == 3 if G.is_directed() else 6
223
+ assert {"value": "d"} in H[4][9].values()
224
+ assert {"value": "e"} in H[9][4].values()
225
+ assert {"value": "f"} in H[4][9].values()
226
+ assert len(H[9][4]) == 3 if G.is_directed() else 6
227
+
228
+ def test_relabel_multigraph_merge_inplace(self):
229
+ G = nx.MultiGraph([(0, 1), (0, 2), (0, 3), (0, 1), (0, 2), (0, 3)])
230
+ G[0][1][0]["value"] = "a"
231
+ G[0][2][0]["value"] = "b"
232
+ G[0][3][0]["value"] = "c"
233
+ mapping = {1: 4, 2: 4, 3: 4}
234
+ nx.relabel_nodes(G, mapping, copy=False)
235
+ # No ordering on keys enforced
236
+ assert {"value": "a"} in G[0][4].values()
237
+ assert {"value": "b"} in G[0][4].values()
238
+ assert {"value": "c"} in G[0][4].values()
239
+
240
+ def test_relabel_multidigraph_merge_inplace(self):
241
+ G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)])
242
+ G[0][1][0]["value"] = "a"
243
+ G[0][2][0]["value"] = "b"
244
+ G[0][3][0]["value"] = "c"
245
+ mapping = {1: 4, 2: 4, 3: 4}
246
+ nx.relabel_nodes(G, mapping, copy=False)
247
+ # No ordering on keys enforced
248
+ assert {"value": "a"} in G[0][4].values()
249
+ assert {"value": "b"} in G[0][4].values()
250
+ assert {"value": "c"} in G[0][4].values()
251
+
252
+ def test_relabel_multidigraph_inout_copy(self):
253
+ G = nx.MultiDiGraph([(0, 4), (1, 4), (4, 2), (4, 3)])
254
+ G[0][4][0]["value"] = "a"
255
+ G[1][4][0]["value"] = "b"
256
+ G[4][2][0]["value"] = "c"
257
+ G[4][3][0]["value"] = "d"
258
+ G.add_edge(0, 4, key="x", value="e")
259
+ G.add_edge(4, 3, key="x", value="f")
260
+ mapping = {0: 9, 1: 9, 2: 9, 3: 9}
261
+ H = nx.relabel_nodes(G, mapping, copy=True)
262
+ # No ordering on keys enforced
263
+ assert {"value": "a"} in H[9][4].values()
264
+ assert {"value": "b"} in H[9][4].values()
265
+ assert {"value": "c"} in H[4][9].values()
266
+ assert len(H[4][9]) == 3
267
+ assert {"value": "d"} in H[4][9].values()
268
+ assert {"value": "e"} in H[9][4].values()
269
+ assert {"value": "f"} in H[4][9].values()
270
+ assert len(H[9][4]) == 3
271
+
272
+ def test_relabel_multigraph_merge_copy(self):
273
+ G = nx.MultiGraph([(0, 1), (0, 2), (0, 3)])
274
+ G[0][1][0]["value"] = "a"
275
+ G[0][2][0]["value"] = "b"
276
+ G[0][3][0]["value"] = "c"
277
+ mapping = {1: 4, 2: 4, 3: 4}
278
+ H = nx.relabel_nodes(G, mapping, copy=True)
279
+ assert {"value": "a"} in H[0][4].values()
280
+ assert {"value": "b"} in H[0][4].values()
281
+ assert {"value": "c"} in H[0][4].values()
282
+
283
+ def test_relabel_multidigraph_merge_copy(self):
284
+ G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)])
285
+ G[0][1][0]["value"] = "a"
286
+ G[0][2][0]["value"] = "b"
287
+ G[0][3][0]["value"] = "c"
288
+ mapping = {1: 4, 2: 4, 3: 4}
289
+ H = nx.relabel_nodes(G, mapping, copy=True)
290
+ assert {"value": "a"} in H[0][4].values()
291
+ assert {"value": "b"} in H[0][4].values()
292
+ assert {"value": "c"} in H[0][4].values()
293
+
294
+ def test_relabel_multigraph_nonnumeric_key(self):
295
+ for MG in (nx.MultiGraph, nx.MultiDiGraph):
296
+ for cc in (True, False):
297
+ G = nx.MultiGraph()
298
+ G.add_edge(0, 1, key="I", value="a")
299
+ G.add_edge(0, 2, key="II", value="b")
300
+ G.add_edge(0, 3, key="II", value="c")
301
+ mapping = {1: 4, 2: 4, 3: 4}
302
+ nx.relabel_nodes(G, mapping, copy=False)
303
+ assert {"value": "a"} in G[0][4].values()
304
+ assert {"value": "b"} in G[0][4].values()
305
+ assert {"value": "c"} in G[0][4].values()
306
+ assert 0 in G[0][4]
307
+ assert "I" in G[0][4]
308
+ assert "II" in G[0][4]
309
+
310
+ def test_relabel_circular(self):
311
+ G = nx.path_graph(3)
312
+ mapping = {0: 1, 1: 0}
313
+ H = nx.relabel_nodes(G, mapping, copy=True)
314
+ with pytest.raises(nx.NetworkXUnfeasible):
315
+ H = nx.relabel_nodes(G, mapping, copy=False)
316
+
317
+ def test_relabel_preserve_node_order_full_mapping_with_copy_true(self):
318
+ G = nx.path_graph(3)
319
+ original_order = list(G.nodes())
320
+ mapping = {2: "a", 1: "b", 0: "c"} # dictionary keys out of order on purpose
321
+ H = nx.relabel_nodes(G, mapping, copy=True)
322
+ new_order = list(H.nodes())
323
+ assert [mapping.get(i, i) for i in original_order] == new_order
324
+
325
+ def test_relabel_preserve_node_order_full_mapping_with_copy_false(self):
326
+ G = nx.path_graph(3)
327
+ original_order = list(G)
328
+ mapping = {2: "a", 1: "b", 0: "c"} # dictionary keys out of order on purpose
329
+ H = nx.relabel_nodes(G, mapping, copy=False)
330
+ new_order = list(H)
331
+ assert [mapping.get(i, i) for i in original_order] == new_order
332
+
333
+ def test_relabel_preserve_node_order_partial_mapping_with_copy_true(self):
334
+ G = nx.path_graph(3)
335
+ original_order = list(G)
336
+ mapping = {1: "a", 0: "b"} # partial mapping and keys out of order on purpose
337
+ H = nx.relabel_nodes(G, mapping, copy=True)
338
+ new_order = list(H)
339
+ assert [mapping.get(i, i) for i in original_order] == new_order
340
+
341
+ def test_relabel_preserve_node_order_partial_mapping_with_copy_false(self):
342
+ G = nx.path_graph(3)
343
+ original_order = list(G)
344
+ mapping = {1: "a", 0: "b"} # partial mapping and keys out of order on purpose
345
+ H = nx.relabel_nodes(G, mapping, copy=False)
346
+ new_order = list(H)
347
+ assert [mapping.get(i, i) for i in original_order] != new_order
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from networkx.utils.misc import *
2
+ from networkx.utils.decorators import *
3
+ from networkx.utils.random_sequence import *
4
+ from networkx.utils.union_find import *
5
+ from networkx.utils.rcm import *
6
+ from networkx.utils.heaps import *
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__pycache__/backends.cpython-311.pyc ADDED
Binary file (39.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-311.pyc ADDED
Binary file (18.8 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc ADDED
Binary file (16.4 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-311.pyc ADDED
Binary file (7.95 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc ADDED
Binary file (2.61 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc ADDED
Binary file (5.55 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc ADDED
Binary file (17.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc ADDED
Binary file (4.63 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc ADDED
Binary file (13.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc ADDED
Binary file (399 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Logic that powers autocompletion installed by ``pip completion``.
2
+ """
3
+
4
+ import optparse
5
+ import os
6
+ import sys
7
+ from itertools import chain
8
+ from typing import Any, Iterable, List, Optional
9
+
10
+ from pip._internal.cli.main_parser import create_main_parser
11
+ from pip._internal.commands import commands_dict, create_command
12
+ from pip._internal.metadata import get_default_environment
13
+
14
+
15
+ def autocomplete() -> None:
16
+ """Entry Point for completion of main and subcommand options."""
17
+ # Don't complete if user hasn't sourced bash_completion file.
18
+ if "PIP_AUTO_COMPLETE" not in os.environ:
19
+ return
20
+ # Don't complete if autocompletion environment variables
21
+ # are not present
22
+ if not os.environ.get("COMP_WORDS") or not os.environ.get("COMP_CWORD"):
23
+ return
24
+ cwords = os.environ["COMP_WORDS"].split()[1:]
25
+ cword = int(os.environ["COMP_CWORD"])
26
+ try:
27
+ current = cwords[cword - 1]
28
+ except IndexError:
29
+ current = ""
30
+
31
+ parser = create_main_parser()
32
+ subcommands = list(commands_dict)
33
+ options = []
34
+
35
+ # subcommand
36
+ subcommand_name: Optional[str] = None
37
+ for word in cwords:
38
+ if word in subcommands:
39
+ subcommand_name = word
40
+ break
41
+ # subcommand options
42
+ if subcommand_name is not None:
43
+ # special case: 'help' subcommand has no options
44
+ if subcommand_name == "help":
45
+ sys.exit(1)
46
+ # special case: list locally installed dists for show and uninstall
47
+ should_list_installed = not current.startswith("-") and subcommand_name in [
48
+ "show",
49
+ "uninstall",
50
+ ]
51
+ if should_list_installed:
52
+ env = get_default_environment()
53
+ lc = current.lower()
54
+ installed = [
55
+ dist.canonical_name
56
+ for dist in env.iter_installed_distributions(local_only=True)
57
+ if dist.canonical_name.startswith(lc)
58
+ and dist.canonical_name not in cwords[1:]
59
+ ]
60
+ # if there are no dists installed, fall back to option completion
61
+ if installed:
62
+ for dist in installed:
63
+ print(dist)
64
+ sys.exit(1)
65
+
66
+ should_list_installables = (
67
+ not current.startswith("-") and subcommand_name == "install"
68
+ )
69
+ if should_list_installables:
70
+ for path in auto_complete_paths(current, "path"):
71
+ print(path)
72
+ sys.exit(1)
73
+
74
+ subcommand = create_command(subcommand_name)
75
+
76
+ for opt in subcommand.parser.option_list_all:
77
+ if opt.help != optparse.SUPPRESS_HELP:
78
+ options += [
79
+ (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
80
+ ]
81
+
82
+ # filter out previously specified options from available options
83
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
84
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
85
+ # filter options by current input
86
+ options = [(k, v) for k, v in options if k.startswith(current)]
87
+ # get completion type given cwords and available subcommand options
88
+ completion_type = get_path_completion_type(
89
+ cwords,
90
+ cword,
91
+ subcommand.parser.option_list_all,
92
+ )
93
+ # get completion files and directories if ``completion_type`` is
94
+ # ``<file>``, ``<dir>`` or ``<path>``
95
+ if completion_type:
96
+ paths = auto_complete_paths(current, completion_type)
97
+ options = [(path, 0) for path in paths]
98
+ for option in options:
99
+ opt_label = option[0]
100
+ # append '=' to options which require args
101
+ if option[1] and option[0][:2] == "--":
102
+ opt_label += "="
103
+ print(opt_label)
104
+ else:
105
+ # show main parser options only when necessary
106
+
107
+ opts = [i.option_list for i in parser.option_groups]
108
+ opts.append(parser.option_list)
109
+ flattened_opts = chain.from_iterable(opts)
110
+ if current.startswith("-"):
111
+ for opt in flattened_opts:
112
+ if opt.help != optparse.SUPPRESS_HELP:
113
+ subcommands += opt._long_opts + opt._short_opts
114
+ else:
115
+ # get completion type given cwords and all available options
116
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
117
+ if completion_type:
118
+ subcommands = list(auto_complete_paths(current, completion_type))
119
+
120
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
121
+ sys.exit(1)
122
+
123
+
124
+ def get_path_completion_type(
125
+ cwords: List[str], cword: int, opts: Iterable[Any]
126
+ ) -> Optional[str]:
127
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
128
+
129
+ :param cwords: same as the environmental variable ``COMP_WORDS``
130
+ :param cword: same as the environmental variable ``COMP_CWORD``
131
+ :param opts: The available options to check
132
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
133
+ """
134
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
135
+ return None
136
+ for opt in opts:
137
+ if opt.help == optparse.SUPPRESS_HELP:
138
+ continue
139
+ for o in str(opt).split("/"):
140
+ if cwords[cword - 2].split("=")[0] == o:
141
+ if not opt.metavar or any(
142
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
143
+ ):
144
+ return opt.metavar
145
+ return None
146
+
147
+
148
+ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
149
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
150
+ and directories starting with ``current``; otherwise only list directories
151
+ starting with ``current``.
152
+
153
+ :param current: The word to be completed
154
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
155
+ :return: A generator of regular files and/or directories
156
+ """
157
+ directory, filename = os.path.split(current)
158
+ current_path = os.path.abspath(directory)
159
+ # Don't complete paths if they can't be accessed
160
+ if not os.access(current_path, os.R_OK):
161
+ return
162
+ filename = os.path.normcase(filename)
163
+ # list all files that start with ``filename``
164
+ file_list = (
165
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
166
+ )
167
+ for f in file_list:
168
+ opt = os.path.join(current_path, f)
169
+ comp_file = os.path.normcase(os.path.join(directory, f))
170
+ # complete regular files when there is not ``<dir>`` after option
171
+ # complete directories when there is ``<file>``, ``<path>`` or
172
+ # ``<dir>``after option
173
+ if completion_type != "dir" and os.path.isfile(opt):
174
+ yield comp_file
175
+ elif os.path.isdir(opt):
176
+ yield os.path.join(comp_file, "")
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (4.48 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc ADDED
Binary file (10.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc ADDED
Binary file (3.03 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc ADDED
Binary file (5.65 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc ADDED
Binary file (12.2 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc ADDED
Binary file (7.87 kB). View file