koichi12 commited on
Commit
223f59f
·
verified ·
1 Parent(s): 67bb8ac

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/CodeWriter.py +820 -0
  2. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/cast.pxd +12 -0
  3. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/deque.pxd +165 -0
  4. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/forward_list.pxd +63 -0
  5. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/iterator.pxd +34 -0
  6. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/limits.pxd +61 -0
  7. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/memory.pxd +115 -0
  8. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/numbers.pxd +15 -0
  9. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/random.pxd +166 -0
  10. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/typeindex.pxd +15 -0
  11. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/unordered_map.pxd +193 -0
  12. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/chains.cpython-311.pyc +0 -0
  13. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/matching.cpython-311.pyc +0 -0
  14. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-311.pyc +0 -0
  15. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/__init__.py +11 -0
  16. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/connectivity.py +826 -0
  17. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/disjoint_paths.py +412 -0
  18. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/edge_augmentation.py +1269 -0
  19. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py +584 -0
  20. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/kcomponents.py +222 -0
  21. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/kcutsets.py +233 -0
  22. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/stoerwagner.py +150 -0
  23. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-311.pyc +0 -0
  24. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-311.pyc +0 -0
  25. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-311.pyc +0 -0
  26. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-311.pyc +0 -0
  27. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-311.pyc +0 -0
  28. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-311.pyc +0 -0
  29. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-311.pyc +0 -0
  30. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-311.pyc +0 -0
  31. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-311.pyc +0 -0
  32. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_max_weight_clique.py +181 -0
  33. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_moral.py +15 -0
  34. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_non_randomness.py +37 -0
  35. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_regular.py +86 -0
  36. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_similarity.py +923 -0
  37. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_tournament.py +162 -0
  38. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_walks.py +54 -0
  39. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__init__.py +7 -0
  40. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/layout.cpython-311.pyc +0 -0
  41. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-311.pyc +0 -0
  42. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/nx_pylab.py +1594 -0
  43. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/__init__.cpython-311.pyc +0 -0
  44. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_pylab.cpython-311.pyc +0 -0
  45. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/baseline/test_house_with_colors.png +0 -0
  46. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_agraph.py +254 -0
  47. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_layout.py +469 -0
  48. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__init__.py +0 -0
  49. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-311.pyc +0 -0
  50. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_nonisomorphic_trees.cpython-311.pyc +0 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/CodeWriter.py ADDED
@@ -0,0 +1,820 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Serializes a Cython code tree to Cython code. This is primarily useful for
3
+ debugging and testing purposes.
4
+ The output is in a strict format, no whitespace or comments from the input
5
+ is preserved (and it could not be as it is not present in the code tree).
6
+ """
7
+
8
+ from __future__ import absolute_import, print_function
9
+
10
+ from .Compiler.Visitor import TreeVisitor
11
+ from .Compiler.ExprNodes import *
12
+ from .Compiler.Nodes import CSimpleBaseTypeNode
13
+
14
+
15
+ class LinesResult(object):
16
+ def __init__(self):
17
+ self.lines = []
18
+ self.s = u""
19
+
20
+ def put(self, s):
21
+ self.s += s
22
+
23
+ def newline(self):
24
+ self.lines.append(self.s)
25
+ self.s = u""
26
+
27
+ def putline(self, s):
28
+ self.put(s)
29
+ self.newline()
30
+
31
+
32
+ class DeclarationWriter(TreeVisitor):
33
+ """
34
+ A Cython code writer that is limited to declarations nodes.
35
+ """
36
+
37
+ indent_string = u" "
38
+
39
+ def __init__(self, result=None):
40
+ super(DeclarationWriter, self).__init__()
41
+ if result is None:
42
+ result = LinesResult()
43
+ self.result = result
44
+ self.numindents = 0
45
+ self.tempnames = {}
46
+ self.tempblockindex = 0
47
+
48
+ def write(self, tree):
49
+ self.visit(tree)
50
+ return self.result
51
+
52
+ def indent(self):
53
+ self.numindents += 1
54
+
55
+ def dedent(self):
56
+ self.numindents -= 1
57
+
58
+ def startline(self, s=u""):
59
+ self.result.put(self.indent_string * self.numindents + s)
60
+
61
+ def put(self, s):
62
+ self.result.put(s)
63
+
64
+ def putline(self, s):
65
+ self.result.putline(self.indent_string * self.numindents + s)
66
+
67
+ def endline(self, s=u""):
68
+ self.result.putline(s)
69
+
70
+ def line(self, s):
71
+ self.startline(s)
72
+ self.endline()
73
+
74
+ def comma_separated_list(self, items, output_rhs=False):
75
+ if len(items) > 0:
76
+ for item in items[:-1]:
77
+ self.visit(item)
78
+ if output_rhs and item.default is not None:
79
+ self.put(u" = ")
80
+ self.visit(item.default)
81
+ self.put(u", ")
82
+ self.visit(items[-1])
83
+ if output_rhs and items[-1].default is not None:
84
+ self.put(u" = ")
85
+ self.visit(items[-1].default)
86
+
87
+ def _visit_indented(self, node):
88
+ self.indent()
89
+ self.visit(node)
90
+ self.dedent()
91
+
92
+ def visit_Node(self, node):
93
+ raise AssertionError("Node not handled by serializer: %r" % node)
94
+
95
+ def visit_ModuleNode(self, node):
96
+ self.visitchildren(node)
97
+
98
+ def visit_StatListNode(self, node):
99
+ self.visitchildren(node)
100
+
101
+ def visit_CDefExternNode(self, node):
102
+ if node.include_file is None:
103
+ file = u'*'
104
+ else:
105
+ file = u'"%s"' % node.include_file
106
+ self.putline(u"cdef extern from %s:" % file)
107
+ self._visit_indented(node.body)
108
+
109
+ def visit_CPtrDeclaratorNode(self, node):
110
+ self.put('*')
111
+ self.visit(node.base)
112
+
113
+ def visit_CReferenceDeclaratorNode(self, node):
114
+ self.put('&')
115
+ self.visit(node.base)
116
+
117
+ def visit_CArrayDeclaratorNode(self, node):
118
+ self.visit(node.base)
119
+ self.put(u'[')
120
+ if node.dimension is not None:
121
+ self.visit(node.dimension)
122
+ self.put(u']')
123
+
124
+ def visit_CFuncDeclaratorNode(self, node):
125
+ # TODO: except, gil, etc.
126
+ self.visit(node.base)
127
+ self.put(u'(')
128
+ self.comma_separated_list(node.args)
129
+ self.endline(u')')
130
+
131
+ def visit_CNameDeclaratorNode(self, node):
132
+ self.put(node.name)
133
+
134
+ def visit_CSimpleBaseTypeNode(self, node):
135
+ # See Parsing.p_sign_and_longness
136
+ if node.is_basic_c_type:
137
+ self.put(("unsigned ", "", "signed ")[node.signed])
138
+ if node.longness < 0:
139
+ self.put("short " * -node.longness)
140
+ elif node.longness > 0:
141
+ self.put("long " * node.longness)
142
+ if node.name is not None:
143
+ self.put(node.name)
144
+
145
+ def visit_CComplexBaseTypeNode(self, node):
146
+ self.visit(node.base_type)
147
+ self.visit(node.declarator)
148
+
149
+ def visit_CNestedBaseTypeNode(self, node):
150
+ self.visit(node.base_type)
151
+ self.put(u'.')
152
+ self.put(node.name)
153
+
154
+ def visit_TemplatedTypeNode(self, node):
155
+ self.visit(node.base_type_node)
156
+ self.put(u'[')
157
+ self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs)
158
+ self.put(u']')
159
+
160
+ def visit_CVarDefNode(self, node):
161
+ self.startline(u"cdef ")
162
+ self.visit(node.base_type)
163
+ self.put(u" ")
164
+ self.comma_separated_list(node.declarators, output_rhs=True)
165
+ self.endline()
166
+
167
+ def _visit_container_node(self, node, decl, extras, attributes):
168
+ # TODO: visibility
169
+ self.startline(decl)
170
+ if node.name:
171
+ self.put(u' ')
172
+ self.put(node.name)
173
+ if node.cname is not None:
174
+ self.put(u' "%s"' % node.cname)
175
+ if extras:
176
+ self.put(extras)
177
+ self.endline(':')
178
+ self.indent()
179
+ if not attributes:
180
+ self.putline('pass')
181
+ else:
182
+ for attribute in attributes:
183
+ self.visit(attribute)
184
+ self.dedent()
185
+
186
+ def visit_CStructOrUnionDefNode(self, node):
187
+ if node.typedef_flag:
188
+ decl = u'ctypedef '
189
+ else:
190
+ decl = u'cdef '
191
+ if node.visibility == 'public':
192
+ decl += u'public '
193
+ if node.packed:
194
+ decl += u'packed '
195
+ decl += node.kind
196
+ self._visit_container_node(node, decl, None, node.attributes)
197
+
198
+ def visit_CppClassNode(self, node):
199
+ extras = ""
200
+ if node.templates:
201
+ extras = u"[%s]" % ", ".join(node.templates)
202
+ if node.base_classes:
203
+ extras += "(%s)" % ", ".join(node.base_classes)
204
+ self._visit_container_node(node, u"cdef cppclass", extras, node.attributes)
205
+
206
+ def visit_CEnumDefNode(self, node):
207
+ self._visit_container_node(node, u"cdef enum", None, node.items)
208
+
209
+ def visit_CEnumDefItemNode(self, node):
210
+ self.startline(node.name)
211
+ if node.cname:
212
+ self.put(u' "%s"' % node.cname)
213
+ if node.value:
214
+ self.put(u" = ")
215
+ self.visit(node.value)
216
+ self.endline()
217
+
218
+ def visit_CClassDefNode(self, node):
219
+ assert not node.module_name
220
+ if node.decorators:
221
+ for decorator in node.decorators:
222
+ self.visit(decorator)
223
+ self.startline(u"cdef class ")
224
+ self.put(node.class_name)
225
+ if node.base_class_name:
226
+ self.put(u"(")
227
+ if node.base_class_module:
228
+ self.put(node.base_class_module)
229
+ self.put(u".")
230
+ self.put(node.base_class_name)
231
+ self.put(u")")
232
+ self.endline(u":")
233
+ self._visit_indented(node.body)
234
+
235
+ def visit_CTypeDefNode(self, node):
236
+ self.startline(u"ctypedef ")
237
+ self.visit(node.base_type)
238
+ self.put(u" ")
239
+ self.visit(node.declarator)
240
+ self.endline()
241
+
242
+ def visit_FuncDefNode(self, node):
243
+ # TODO: support cdef + cpdef functions
244
+ self.startline(u"def %s(" % node.name)
245
+ self.comma_separated_list(node.args)
246
+ self.endline(u"):")
247
+ self._visit_indented(node.body)
248
+
249
+ def visit_CFuncDefNode(self, node):
250
+ self.startline(u'cpdef ' if node.overridable else u'cdef ')
251
+ if node.modifiers:
252
+ self.put(' '.join(node.modifiers))
253
+ self.put(' ')
254
+ if node.visibility != 'private':
255
+ self.put(node.visibility)
256
+ self.put(u' ')
257
+ if node.api:
258
+ self.put(u'api ')
259
+
260
+ if node.base_type:
261
+ self.visit(node.base_type)
262
+ if node.base_type.name is not None:
263
+ self.put(u' ')
264
+
265
+ # visit the CFuncDeclaratorNode, but put a `:` at the end of line
266
+ self.visit(node.declarator.base)
267
+ self.put(u'(')
268
+ self.comma_separated_list(node.declarator.args)
269
+ self.endline(u'):')
270
+
271
+ self._visit_indented(node.body)
272
+
273
+ def visit_CArgDeclNode(self, node):
274
+ # For "CSimpleBaseTypeNode", the variable type may have been parsed as type.
275
+ # For other node types, the "name" is always None.
276
+ if not isinstance(node.base_type, CSimpleBaseTypeNode) or \
277
+ node.base_type.name is not None:
278
+ self.visit(node.base_type)
279
+
280
+ # If we printed something for "node.base_type", we may need to print an extra ' '.
281
+ #
282
+ # Special case: if "node.declarator" is a "CNameDeclaratorNode",
283
+ # its "name" might be an empty string, for example, for "cdef f(x)".
284
+ if node.declarator.declared_name():
285
+ self.put(u" ")
286
+ self.visit(node.declarator)
287
+ if node.default is not None:
288
+ self.put(u" = ")
289
+ self.visit(node.default)
290
+
291
+ def visit_CImportStatNode(self, node):
292
+ self.startline(u"cimport ")
293
+ self.put(node.module_name)
294
+ if node.as_name:
295
+ self.put(u" as ")
296
+ self.put(node.as_name)
297
+ self.endline()
298
+
299
+ def visit_FromCImportStatNode(self, node):
300
+ self.startline(u"from ")
301
+ self.put(node.module_name)
302
+ self.put(u" cimport ")
303
+ first = True
304
+ for pos, name, as_name, kind in node.imported_names:
305
+ assert kind is None
306
+ if first:
307
+ first = False
308
+ else:
309
+ self.put(u", ")
310
+ self.put(name)
311
+ if as_name:
312
+ self.put(u" as ")
313
+ self.put(as_name)
314
+ self.endline()
315
+
316
+ def visit_NameNode(self, node):
317
+ self.put(node.name)
318
+
319
+ def visit_DecoratorNode(self, node):
320
+ self.startline("@")
321
+ self.visit(node.decorator)
322
+ self.endline()
323
+
324
+ def visit_PassStatNode(self, node):
325
+ self.startline(u"pass")
326
+ self.endline()
327
+
328
+
329
+ class StatementWriter(DeclarationWriter):
330
+ """
331
+ A Cython code writer for most language statement features.
332
+ """
333
+
334
+ def visit_SingleAssignmentNode(self, node):
335
+ self.startline()
336
+ self.visit(node.lhs)
337
+ self.put(u" = ")
338
+ self.visit(node.rhs)
339
+ self.endline()
340
+
341
+ def visit_CascadedAssignmentNode(self, node):
342
+ self.startline()
343
+ for lhs in node.lhs_list:
344
+ self.visit(lhs)
345
+ self.put(u" = ")
346
+ self.visit(node.rhs)
347
+ self.endline()
348
+
349
+ def visit_PrintStatNode(self, node):
350
+ self.startline(u"print ")
351
+ self.comma_separated_list(node.arg_tuple.args)
352
+ if not node.append_newline:
353
+ self.put(u",")
354
+ self.endline()
355
+
356
+ def visit_ForInStatNode(self, node):
357
+ self.startline(u"for ")
358
+ if node.target.is_sequence_constructor:
359
+ self.comma_separated_list(node.target.args)
360
+ else:
361
+ self.visit(node.target)
362
+ self.put(u" in ")
363
+ self.visit(node.iterator.sequence)
364
+ self.endline(u":")
365
+ self._visit_indented(node.body)
366
+ if node.else_clause is not None:
367
+ self.line(u"else:")
368
+ self._visit_indented(node.else_clause)
369
+
370
+ def visit_IfStatNode(self, node):
371
+ # The IfClauseNode is handled directly without a separate match
372
+ # for clariy.
373
+ self.startline(u"if ")
374
+ self.visit(node.if_clauses[0].condition)
375
+ self.endline(":")
376
+ self._visit_indented(node.if_clauses[0].body)
377
+ for clause in node.if_clauses[1:]:
378
+ self.startline("elif ")
379
+ self.visit(clause.condition)
380
+ self.endline(":")
381
+ self._visit_indented(clause.body)
382
+ if node.else_clause is not None:
383
+ self.line("else:")
384
+ self._visit_indented(node.else_clause)
385
+
386
+ def visit_WhileStatNode(self, node):
387
+ self.startline(u"while ")
388
+ self.visit(node.condition)
389
+ self.endline(u":")
390
+ self._visit_indented(node.body)
391
+ if node.else_clause is not None:
392
+ self.line("else:")
393
+ self._visit_indented(node.else_clause)
394
+
395
+ def visit_ContinueStatNode(self, node):
396
+ self.line(u"continue")
397
+
398
+ def visit_BreakStatNode(self, node):
399
+ self.line(u"break")
400
+
401
+ def visit_SequenceNode(self, node):
402
+ self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
403
+
404
+ def visit_ExprStatNode(self, node):
405
+ self.startline()
406
+ self.visit(node.expr)
407
+ self.endline()
408
+
409
+ def visit_InPlaceAssignmentNode(self, node):
410
+ self.startline()
411
+ self.visit(node.lhs)
412
+ self.put(u" %s= " % node.operator)
413
+ self.visit(node.rhs)
414
+ self.endline()
415
+
416
+ def visit_WithStatNode(self, node):
417
+ self.startline()
418
+ self.put(u"with ")
419
+ self.visit(node.manager)
420
+ if node.target is not None:
421
+ self.put(u" as ")
422
+ self.visit(node.target)
423
+ self.endline(u":")
424
+ self._visit_indented(node.body)
425
+
426
+ def visit_TryFinallyStatNode(self, node):
427
+ self.line(u"try:")
428
+ self._visit_indented(node.body)
429
+ self.line(u"finally:")
430
+ self._visit_indented(node.finally_clause)
431
+
432
+ def visit_TryExceptStatNode(self, node):
433
+ self.line(u"try:")
434
+ self._visit_indented(node.body)
435
+ for x in node.except_clauses:
436
+ self.visit(x)
437
+ if node.else_clause is not None:
438
+ self.visit(node.else_clause)
439
+
440
+ def visit_ExceptClauseNode(self, node):
441
+ self.startline(u"except")
442
+ if node.pattern is not None:
443
+ self.put(u" ")
444
+ self.visit(node.pattern)
445
+ if node.target is not None:
446
+ self.put(u", ")
447
+ self.visit(node.target)
448
+ self.endline(":")
449
+ self._visit_indented(node.body)
450
+
451
+ def visit_ReturnStatNode(self, node):
452
+ self.startline("return")
453
+ if node.value is not None:
454
+ self.put(u" ")
455
+ self.visit(node.value)
456
+ self.endline()
457
+
458
+ def visit_ReraiseStatNode(self, node):
459
+ self.line("raise")
460
+
461
+ def visit_ImportNode(self, node):
462
+ self.put(u"(import %s)" % node.module_name.value)
463
+
464
+ def visit_TempsBlockNode(self, node):
465
+ """
466
+ Temporaries are output like $1_1', where the first number is
467
+ an index of the TempsBlockNode and the second number is an index
468
+ of the temporary which that block allocates.
469
+ """
470
+ idx = 0
471
+ for handle in node.temps:
472
+ self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx)
473
+ idx += 1
474
+ self.tempblockindex += 1
475
+ self.visit(node.body)
476
+
477
+ def visit_TempRefNode(self, node):
478
+ self.put(self.tempnames[node.handle])
479
+
480
+
481
+ class ExpressionWriter(TreeVisitor):
482
+ """
483
+ A Cython code writer that is intentionally limited to expressions.
484
+ """
485
+
486
+ def __init__(self, result=None):
487
+ super(ExpressionWriter, self).__init__()
488
+ if result is None:
489
+ result = u""
490
+ self.result = result
491
+ self.precedence = [0]
492
+
493
+ def write(self, tree):
494
+ self.visit(tree)
495
+ return self.result
496
+
497
+ def put(self, s):
498
+ self.result += s
499
+
500
+ def remove(self, s):
501
+ if self.result.endswith(s):
502
+ self.result = self.result[:-len(s)]
503
+
504
+ def comma_separated_list(self, items):
505
+ if len(items) > 0:
506
+ for item in items[:-1]:
507
+ self.visit(item)
508
+ self.put(u", ")
509
+ self.visit(items[-1])
510
+
511
+ def visit_Node(self, node):
512
+ raise AssertionError("Node not handled by serializer: %r" % node)
513
+
514
+ def visit_IntNode(self, node):
515
+ self.put(node.value)
516
+
517
+ def visit_FloatNode(self, node):
518
+ self.put(node.value)
519
+
520
+ def visit_NoneNode(self, node):
521
+ self.put(u"None")
522
+
523
+ def visit_NameNode(self, node):
524
+ self.put(node.name)
525
+
526
+ def visit_EllipsisNode(self, node):
527
+ self.put(u"...")
528
+
529
+ def visit_BoolNode(self, node):
530
+ self.put(str(node.value))
531
+
532
+ def visit_ConstNode(self, node):
533
+ self.put(str(node.value))
534
+
535
+ def visit_ImagNode(self, node):
536
+ self.put(node.value)
537
+ self.put(u"j")
538
+
539
+ def emit_string(self, node, prefix=u""):
540
+ repr_val = repr(node.value)
541
+ if repr_val[0] in 'ub':
542
+ repr_val = repr_val[1:]
543
+ self.put(u"%s%s" % (prefix, repr_val))
544
+
545
+ def visit_BytesNode(self, node):
546
+ self.emit_string(node, u"b")
547
+
548
+ def visit_StringNode(self, node):
549
+ self.emit_string(node)
550
+
551
+ def visit_UnicodeNode(self, node):
552
+ self.emit_string(node, u"u")
553
+
554
+ def emit_sequence(self, node, parens=(u"", u"")):
555
+ open_paren, close_paren = parens
556
+ items = node.subexpr_nodes()
557
+ self.put(open_paren)
558
+ self.comma_separated_list(items)
559
+ self.put(close_paren)
560
+
561
+ def visit_ListNode(self, node):
562
+ self.emit_sequence(node, u"[]")
563
+
564
+ def visit_TupleNode(self, node):
565
+ self.emit_sequence(node, u"()")
566
+
567
+ def visit_SetNode(self, node):
568
+ if len(node.subexpr_nodes()) > 0:
569
+ self.emit_sequence(node, u"{}")
570
+ else:
571
+ self.put(u"set()")
572
+
573
+ def visit_DictNode(self, node):
574
+ self.emit_sequence(node, u"{}")
575
+
576
+ def visit_DictItemNode(self, node):
577
+ self.visit(node.key)
578
+ self.put(u": ")
579
+ self.visit(node.value)
580
+
581
+ unop_precedence = {
582
+ 'not': 3, '!': 3,
583
+ '+': 11, '-': 11, '~': 11,
584
+ }
585
+ binop_precedence = {
586
+ 'or': 1,
587
+ 'and': 2,
588
+ # unary: 'not': 3, '!': 3,
589
+ 'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
590
+ '|': 5,
591
+ '^': 6,
592
+ '&': 7,
593
+ '<<': 8, '>>': 8,
594
+ '+': 9, '-': 9,
595
+ '*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
596
+ # unary: '+': 11, '-': 11, '~': 11
597
+ '**': 12,
598
+ }
599
+
600
+ def operator_enter(self, new_prec):
601
+ old_prec = self.precedence[-1]
602
+ if old_prec > new_prec:
603
+ self.put(u"(")
604
+ self.precedence.append(new_prec)
605
+
606
+ def operator_exit(self):
607
+ old_prec, new_prec = self.precedence[-2:]
608
+ if old_prec > new_prec:
609
+ self.put(u")")
610
+ self.precedence.pop()
611
+
612
+ def visit_NotNode(self, node):
613
+ op = 'not'
614
+ prec = self.unop_precedence[op]
615
+ self.operator_enter(prec)
616
+ self.put(u"not ")
617
+ self.visit(node.operand)
618
+ self.operator_exit()
619
+
620
+ def visit_UnopNode(self, node):
621
+ op = node.operator
622
+ prec = self.unop_precedence[op]
623
+ self.operator_enter(prec)
624
+ self.put(u"%s" % node.operator)
625
+ self.visit(node.operand)
626
+ self.operator_exit()
627
+
628
+ def visit_BinopNode(self, node):
629
+ op = node.operator
630
+ prec = self.binop_precedence.get(op, 0)
631
+ self.operator_enter(prec)
632
+ self.visit(node.operand1)
633
+ self.put(u" %s " % op.replace('_', ' '))
634
+ self.visit(node.operand2)
635
+ self.operator_exit()
636
+
637
+ def visit_BoolBinopNode(self, node):
638
+ self.visit_BinopNode(node)
639
+
640
+ def visit_PrimaryCmpNode(self, node):
641
+ self.visit_BinopNode(node)
642
+
643
+ def visit_IndexNode(self, node):
644
+ self.visit(node.base)
645
+ self.put(u"[")
646
+ if isinstance(node.index, TupleNode):
647
+ if node.index.subexpr_nodes():
648
+ self.emit_sequence(node.index)
649
+ else:
650
+ self.put(u"()")
651
+ else:
652
+ self.visit(node.index)
653
+ self.put(u"]")
654
+
655
+ def visit_SliceIndexNode(self, node):
656
+ self.visit(node.base)
657
+ self.put(u"[")
658
+ if node.start:
659
+ self.visit(node.start)
660
+ self.put(u":")
661
+ if node.stop:
662
+ self.visit(node.stop)
663
+ if node.slice:
664
+ self.put(u":")
665
+ self.visit(node.slice)
666
+ self.put(u"]")
667
+
668
+ def visit_SliceNode(self, node):
669
+ if not node.start.is_none:
670
+ self.visit(node.start)
671
+ self.put(u":")
672
+ if not node.stop.is_none:
673
+ self.visit(node.stop)
674
+ if not node.step.is_none:
675
+ self.put(u":")
676
+ self.visit(node.step)
677
+
678
+ def visit_CondExprNode(self, node):
679
+ self.visit(node.true_val)
680
+ self.put(u" if ")
681
+ self.visit(node.test)
682
+ self.put(u" else ")
683
+ self.visit(node.false_val)
684
+
685
+ def visit_AttributeNode(self, node):
686
+ self.visit(node.obj)
687
+ self.put(u".%s" % node.attribute)
688
+
689
+ def visit_SimpleCallNode(self, node):
690
+ self.visit(node.function)
691
+ self.put(u"(")
692
+ self.comma_separated_list(node.args)
693
+ self.put(")")
694
+
695
+ def emit_pos_args(self, node):
696
+ if node is None:
697
+ return
698
+ if isinstance(node, AddNode):
699
+ self.emit_pos_args(node.operand1)
700
+ self.emit_pos_args(node.operand2)
701
+ elif isinstance(node, TupleNode):
702
+ for expr in node.subexpr_nodes():
703
+ self.visit(expr)
704
+ self.put(u", ")
705
+ elif isinstance(node, AsTupleNode):
706
+ self.put("*")
707
+ self.visit(node.arg)
708
+ self.put(u", ")
709
+ else:
710
+ self.visit(node)
711
+ self.put(u", ")
712
+
713
+ def emit_kwd_args(self, node):
714
+ if node is None:
715
+ return
716
+ if isinstance(node, MergedDictNode):
717
+ for expr in node.subexpr_nodes():
718
+ self.emit_kwd_args(expr)
719
+ elif isinstance(node, DictNode):
720
+ for expr in node.subexpr_nodes():
721
+ self.put(u"%s=" % expr.key.value)
722
+ self.visit(expr.value)
723
+ self.put(u", ")
724
+ else:
725
+ self.put(u"**")
726
+ self.visit(node)
727
+ self.put(u", ")
728
+
729
+ def visit_GeneralCallNode(self, node):
730
+ self.visit(node.function)
731
+ self.put(u"(")
732
+ self.emit_pos_args(node.positional_args)
733
+ self.emit_kwd_args(node.keyword_args)
734
+ self.remove(u", ")
735
+ self.put(")")
736
+
737
+ def emit_comprehension(self, body, target,
738
+ sequence, condition,
739
+ parens=(u"", u"")):
740
+ open_paren, close_paren = parens
741
+ self.put(open_paren)
742
+ self.visit(body)
743
+ self.put(u" for ")
744
+ self.visit(target)
745
+ self.put(u" in ")
746
+ self.visit(sequence)
747
+ if condition:
748
+ self.put(u" if ")
749
+ self.visit(condition)
750
+ self.put(close_paren)
751
+
752
+ def visit_ComprehensionAppendNode(self, node):
753
+ self.visit(node.expr)
754
+
755
+ def visit_DictComprehensionAppendNode(self, node):
756
+ self.visit(node.key_expr)
757
+ self.put(u": ")
758
+ self.visit(node.value_expr)
759
+
760
+ def visit_ComprehensionNode(self, node):
761
+ tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"}
762
+ parens = tpmap[node.type.py_type_name()]
763
+ body = node.loop.body
764
+ target = node.loop.target
765
+ sequence = node.loop.iterator.sequence
766
+ condition = None
767
+ if hasattr(body, 'if_clauses'):
768
+ # type(body) is Nodes.IfStatNode
769
+ condition = body.if_clauses[0].condition
770
+ body = body.if_clauses[0].body
771
+ self.emit_comprehension(body, target, sequence, condition, parens)
772
+
773
+ def visit_GeneratorExpressionNode(self, node):
774
+ body = node.loop.body
775
+ target = node.loop.target
776
+ sequence = node.loop.iterator.sequence
777
+ condition = None
778
+ if hasattr(body, 'if_clauses'):
779
+ # type(body) is Nodes.IfStatNode
780
+ condition = body.if_clauses[0].condition
781
+ body = body.if_clauses[0].body.expr.arg
782
+ elif hasattr(body, 'expr'):
783
+ # type(body) is Nodes.ExprStatNode
784
+ body = body.expr.arg
785
+ self.emit_comprehension(body, target, sequence, condition, u"()")
786
+
787
+
788
+ class PxdWriter(DeclarationWriter, ExpressionWriter):
789
+ """
790
+ A Cython code writer for everything supported in pxd files.
791
+ (currently unused)
792
+ """
793
+
794
+ def __call__(self, node):
795
+ print(u'\n'.join(self.write(node).lines))
796
+ return node
797
+
798
+ def visit_CFuncDefNode(self, node):
799
+ if node.overridable:
800
+ self.startline(u'cpdef ')
801
+ else:
802
+ self.startline(u'cdef ')
803
+ if node.modifiers:
804
+ self.put(' '.join(node.modifiers))
805
+ self.put(' ')
806
+ if node.visibility != 'private':
807
+ self.put(node.visibility)
808
+ self.put(u' ')
809
+ if node.api:
810
+ self.put(u'api ')
811
+ self.visit(node.declarator)
812
+
813
+ def visit_StatNode(self, node):
814
+ pass
815
+
816
+
817
+ class CodeWriter(StatementWriter, ExpressionWriter):
818
+ """
819
+ A complete Cython code writer.
820
+ """
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/cast.pxd ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Defines the standard C++ cast operators.
2
+ #
3
+ # Due to type restrictions, these are only defined for pointer parameters,
4
+ # however that is the only case where they are significantly more interesting
5
+ # than the standard C cast operator which can be written "<T>(expression)" in
6
+ # Cython.
7
+
8
+ cdef extern from * nogil:
9
+ cdef T dynamic_cast[T](void *) except + # nullptr may also indicate failure
10
+ cdef T static_cast[T](void *)
11
+ cdef T reinterpret_cast[T](void *)
12
+ cdef T const_cast[T](void *)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/deque.pxd ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<deque>" namespace "std" nogil:
2
+ cdef cppclass deque[T,ALLOCATOR=*]:
3
+ ctypedef T value_type
4
+ ctypedef ALLOCATOR allocator_type
5
+
6
+ # these should really be allocator_type.size_type and
7
+ # allocator_type.difference_type to be true to the C++ definition
8
+ # but cython doesn't support deferred access on template arguments
9
+ ctypedef size_t size_type
10
+ ctypedef ptrdiff_t difference_type
11
+
12
+ cppclass const_iterator
13
+ cppclass iterator:
14
+ iterator() except +
15
+ iterator(iterator&) except +
16
+ value_type& operator*()
17
+ iterator operator++()
18
+ iterator operator--()
19
+ iterator operator++(int)
20
+ iterator operator--(int)
21
+ iterator operator+(size_type)
22
+ iterator operator-(size_type)
23
+ difference_type operator-(iterator)
24
+ difference_type operator-(const_iterator)
25
+ bint operator==(iterator)
26
+ bint operator==(const_iterator)
27
+ bint operator!=(iterator)
28
+ bint operator!=(const_iterator)
29
+ bint operator<(iterator)
30
+ bint operator<(const_iterator)
31
+ bint operator>(iterator)
32
+ bint operator>(const_iterator)
33
+ bint operator<=(iterator)
34
+ bint operator<=(const_iterator)
35
+ bint operator>=(iterator)
36
+ bint operator>=(const_iterator)
37
+ cppclass const_iterator:
38
+ const_iterator() except +
39
+ const_iterator(iterator&) except +
40
+ const_iterator(const_iterator&) except +
41
+ operator=(iterator&) except +
42
+ const value_type& operator*()
43
+ const_iterator operator++()
44
+ const_iterator operator--()
45
+ const_iterator operator++(int)
46
+ const_iterator operator--(int)
47
+ const_iterator operator+(size_type)
48
+ const_iterator operator-(size_type)
49
+ difference_type operator-(iterator)
50
+ difference_type operator-(const_iterator)
51
+ bint operator==(iterator)
52
+ bint operator==(const_iterator)
53
+ bint operator!=(iterator)
54
+ bint operator!=(const_iterator)
55
+ bint operator<(iterator)
56
+ bint operator<(const_iterator)
57
+ bint operator>(iterator)
58
+ bint operator>(const_iterator)
59
+ bint operator<=(iterator)
60
+ bint operator<=(const_iterator)
61
+ bint operator>=(iterator)
62
+ bint operator>=(const_iterator)
63
+
64
+ cppclass const_reverse_iterator
65
+ cppclass reverse_iterator:
66
+ reverse_iterator() except +
67
+ reverse_iterator(reverse_iterator&) except +
68
+ value_type& operator*()
69
+ reverse_iterator operator++()
70
+ reverse_iterator operator--()
71
+ reverse_iterator operator++(int)
72
+ reverse_iterator operator--(int)
73
+ reverse_iterator operator+(size_type)
74
+ reverse_iterator operator-(size_type)
75
+ difference_type operator-(iterator)
76
+ difference_type operator-(const_iterator)
77
+ bint operator==(reverse_iterator)
78
+ bint operator==(const_reverse_iterator)
79
+ bint operator!=(reverse_iterator)
80
+ bint operator!=(const_reverse_iterator)
81
+ bint operator<(reverse_iterator)
82
+ bint operator<(const_reverse_iterator)
83
+ bint operator>(reverse_iterator)
84
+ bint operator>(const_reverse_iterator)
85
+ bint operator<=(reverse_iterator)
86
+ bint operator<=(const_reverse_iterator)
87
+ bint operator>=(reverse_iterator)
88
+ bint operator>=(const_reverse_iterator)
89
+ cppclass const_reverse_iterator:
90
+ const_reverse_iterator() except +
91
+ const_reverse_iterator(reverse_iterator&) except +
92
+ operator=(reverse_iterator&) except +
93
+ const value_type& operator*()
94
+ const_reverse_iterator operator++()
95
+ const_reverse_iterator operator--()
96
+ const_reverse_iterator operator++(int)
97
+ const_reverse_iterator operator--(int)
98
+ const_reverse_iterator operator+(size_type)
99
+ const_reverse_iterator operator-(size_type)
100
+ difference_type operator-(iterator)
101
+ difference_type operator-(const_iterator)
102
+ bint operator==(reverse_iterator)
103
+ bint operator==(const_reverse_iterator)
104
+ bint operator!=(reverse_iterator)
105
+ bint operator!=(const_reverse_iterator)
106
+ bint operator<(reverse_iterator)
107
+ bint operator<(const_reverse_iterator)
108
+ bint operator>(reverse_iterator)
109
+ bint operator>(const_reverse_iterator)
110
+ bint operator<=(reverse_iterator)
111
+ bint operator<=(const_reverse_iterator)
112
+ bint operator>=(reverse_iterator)
113
+ bint operator>=(const_reverse_iterator)
114
+
115
+ deque() except +
116
+ deque(deque&) except +
117
+ deque(size_t) except +
118
+ deque(size_t, T&) except +
119
+ #deque[InputIt](InputIt, InputIt)
120
+ T& operator[](size_t)
121
+ #deque& operator=(deque&)
122
+ bint operator==(deque&, deque&)
123
+ bint operator!=(deque&, deque&)
124
+ bint operator<(deque&, deque&)
125
+ bint operator>(deque&, deque&)
126
+ bint operator<=(deque&, deque&)
127
+ bint operator>=(deque&, deque&)
128
+ void assign(size_t, T&) except +
129
+ void assign[InputIt](InputIt, InputIt) except +
130
+ T& at(size_t) except +
131
+ T& back()
132
+ iterator begin()
133
+ const_iterator const_begin "begin"()
134
+ const_iterator cbegin()
135
+ void clear()
136
+ bint empty()
137
+ iterator end()
138
+ const_iterator const_end "end"()
139
+ const_iterator cend()
140
+ iterator erase(iterator) except +
141
+ iterator erase(iterator, iterator) except +
142
+ T& front()
143
+ iterator insert(iterator, T&) except +
144
+ void insert(iterator, size_t, T&) except +
145
+ void insert[InputIt](iterator, InputIt, InputIt) except +
146
+ size_t max_size()
147
+ void pop_back()
148
+ void pop_front()
149
+ void push_back(T&) except +
150
+ void push_front(T&) except +
151
+ reverse_iterator rbegin()
152
+ #const_reverse_iterator rbegin()
153
+ const_reverse_iterator crbegin()
154
+ reverse_iterator rend()
155
+ #const_reverse_iterator rend()
156
+ const_reverse_iterator crend()
157
+ void resize(size_t) except +
158
+ void resize(size_t, T&) except +
159
+ size_t size()
160
+ void swap(deque&)
161
+
162
+ # C++11 methods
163
+ void shrink_to_fit() except +
164
+ T& emplace_front(...) except +
165
+ T& emplace_back(...) except +
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/forward_list.pxd ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<forward_list>" namespace "std" nogil:
2
+ cdef cppclass forward_list[T,ALLOCATOR=*]:
3
+ ctypedef T value_type
4
+ ctypedef ALLOCATOR allocator_type
5
+
6
+ # these should really be allocator_type.size_type and
7
+ # allocator_type.difference_type to be true to the C++ definition
8
+ # but cython doesn't support deferred access on template arguments
9
+ ctypedef size_t size_type
10
+ ctypedef ptrdiff_t difference_type
11
+
12
+ cppclass iterator:
13
+ iterator()
14
+ iterator(iterator &)
15
+ T& operator*()
16
+ iterator operator++()
17
+ iterator operator++(int)
18
+ bint operator==(iterator)
19
+ bint operator!=(iterator)
20
+ cppclass const_iterator(iterator):
21
+ pass
22
+ forward_list() except +
23
+ forward_list(forward_list&) except +
24
+ forward_list(size_t, T&) except +
25
+ #forward_list& operator=(forward_list&)
26
+ bint operator==(forward_list&, forward_list&)
27
+ bint operator!=(forward_list&, forward_list&)
28
+ bint operator<(forward_list&, forward_list&)
29
+ bint operator>(forward_list&, forward_list&)
30
+ bint operator<=(forward_list&, forward_list&)
31
+ bint operator>=(forward_list&, forward_list&)
32
+ void assign(size_t, T&)
33
+ T& front()
34
+ iterator before_begin()
35
+ const_iterator const_before_begin "before_begin"()
36
+ iterator begin()
37
+ const_iterator const_begin "begin"()
38
+ iterator end()
39
+ const_iterator const_end "end"()
40
+ bint empty()
41
+ size_t max_size()
42
+ void clear()
43
+ iterator insert_after(iterator, T&)
44
+ void insert_after(iterator, size_t, T&)
45
+ iterator erase_after(iterator)
46
+ iterator erase_after(iterator, iterator)
47
+ void push_front(T&)
48
+ void pop_front()
49
+ void resize(size_t)
50
+ void resize(size_t, T&)
51
+ void swap(forward_list&)
52
+ void merge(forward_list&)
53
+ void merge[Compare](forward_list&, Compare)
54
+ void splice_after(iterator, forward_list&)
55
+ void splice_after(iterator, forward_list&, iterator)
56
+ void splice_after(iterator, forward_list&, iterator, iterator)
57
+ void remove(const T&)
58
+ void remove_if[Predicate](Predicate)
59
+ void reverse()
60
+ void unique()
61
+ void unique[Predicate](Predicate)
62
+ void sort()
63
+ void sort[Compare](Compare)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/iterator.pxd ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #Basic reference: http://www.cplusplus.com/reference/iterator/
2
+ #Most of these classes are in fact empty structs
3
+
4
+ from libc.stddef import ptrdiff_t
5
+
6
+ cdef extern from "<iterator>" namespace "std" nogil:
7
+ cdef cppclass iterator[Category,T,Distance,Pointer,Reference]:
8
+ pass
9
+ cdef cppclass output_iterator_tag:
10
+ pass
11
+ cdef cppclass input_iterator_tag:
12
+ pass
13
+ cdef cppclass forward_iterator_tag(input_iterator_tag):
14
+ pass
15
+ cdef cppclass bidirectional_iterator_tag(forward_iterator_tag):
16
+ pass
17
+ cdef cppclass random_access_iterator_tag(bidirectional_iterator_tag):
18
+ pass
19
+
20
+ cdef cppclass back_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
21
+ pass
22
+ cdef cppclass front_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
23
+ pass
24
+ cdef cppclass insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
25
+ pass
26
+ back_insert_iterator[CONTAINER] back_inserter[CONTAINER](CONTAINER &)
27
+ front_insert_iterator[CONTAINER] front_inserter[CONTAINER](CONTAINER &)
28
+ ##Note: this is the C++98 version of inserter.
29
+ ##The C++11 versions's prototype relies on typedef members of classes, which Cython doesn't currently support:
30
+ ##template <class Container>
31
+ ##insert_iterator<Container> inserter (Container& x, typename Container::iterator it)
32
+ insert_iterator[CONTAINER] inserter[CONTAINER,ITERATOR](CONTAINER &, ITERATOR)
33
+
34
+ ptrdiff_t distance[It](It first, It last)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/limits.pxd ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<limits>" namespace "std" nogil:
2
+ enum float_round_style:
3
+ round_indeterminate = -1
4
+ round_toward_zero = 0
5
+ round_to_nearest = 1
6
+ round_toward_infinity = 2
7
+ round_toward_neg_infinity = 3
8
+
9
+ enum float_denorm_style:
10
+ denorm_indeterminate = -1
11
+ denorm_absent = 0
12
+ denorm_present = 1
13
+
14
+ #The static methods can be called as, e.g. numeric_limits[int].round_error(), etc.
15
+ #The const data members should be declared as static. Cython currently doesn't allow that
16
+ #and/or I can't figure it out, so you must instantiate an object to access, e.g.
17
+ #cdef numeric_limits[double] lm
18
+ #print lm.round_style
19
+ cdef cppclass numeric_limits[T]:
20
+ const bint is_specialized
21
+ @staticmethod
22
+ T min()
23
+ @staticmethod
24
+ T max()
25
+ const int digits
26
+ const int digits10
27
+ const bint is_signed
28
+ const bint is_integer
29
+ const bint is_exact
30
+ const int radix
31
+ @staticmethod
32
+ T epsilon()
33
+ @staticmethod
34
+ T round_error()
35
+
36
+ const int min_exponent
37
+ const int min_exponent10
38
+ const int max_exponent
39
+ const int max_exponent10
40
+
41
+ const bint has_infinity
42
+ const bint has_quiet_NaN
43
+ const bint has_signaling_NaN
44
+ const float_denorm_style has_denorm
45
+ const bint has_denorm_loss
46
+ @staticmethod
47
+ T infinity()
48
+ @staticmethod
49
+ T quiet_NaN()
50
+ @staticmethod
51
+ T signaling_NaN()
52
+ @staticmethod
53
+ T denorm_min()
54
+
55
+ const bint is_iec559
56
+ const bint is_bounded
57
+ const bint is_modulo
58
+
59
+ const bint traps
60
+ const bint tinyness_before
61
+ const float_round_style round_style
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/memory.pxd ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from libcpp cimport bool, nullptr_t, nullptr
2
+
3
+ cdef extern from "<memory>" namespace "std" nogil:
4
+ cdef cppclass default_delete[T]:
5
+ default_delete()
6
+
7
+ cdef cppclass allocator[T]:
8
+ allocator()
9
+ allocator(const allocator &)
10
+ #allocator(const allocator[U] &) #unique_ptr unit tests fail w/this
11
+ T * address(T &)
12
+ const T * address(const T &) const
13
+ T * allocate( size_t n ) # Not to standard. should be a second default argument
14
+ void deallocate(T * , size_t)
15
+ size_t max_size() const
16
+ void construct( T *, const T &) #C++98. The C++11 version is variadic AND perfect-forwarding
17
+ void destroy(T *) #C++98
18
+ void destroy[U](U *) #unique_ptr unit tests fail w/this
19
+
20
+
21
+ cdef cppclass unique_ptr[T,DELETER=*]:
22
+ unique_ptr()
23
+ unique_ptr(nullptr_t)
24
+ unique_ptr(T*)
25
+ unique_ptr(unique_ptr[T]&)
26
+
27
+ # Modifiers
28
+ T* release()
29
+ void reset()
30
+ void reset(nullptr_t)
31
+ void reset(T*)
32
+ void swap(unique_ptr&)
33
+
34
+ # Observers
35
+ T* get()
36
+ T& operator*()
37
+ #T* operator->() # Not Supported
38
+ bool operator bool()
39
+ bool operator!()
40
+
41
+ bool operator==(const unique_ptr&)
42
+ bool operator!=(const unique_ptr&)
43
+ bool operator<(const unique_ptr&)
44
+ bool operator>(const unique_ptr&)
45
+ bool operator<=(const unique_ptr&)
46
+ bool operator>=(const unique_ptr&)
47
+
48
+ bool operator==(nullptr_t)
49
+ bool operator!=(nullptr_t)
50
+
51
+ # Forward Declaration not working ("Compiler crash in AnalyseDeclarationsTransform")
52
+ #cdef cppclass weak_ptr[T]
53
+
54
+ cdef cppclass shared_ptr[T]:
55
+ shared_ptr()
56
+ shared_ptr(nullptr_t)
57
+ shared_ptr(T*)
58
+ shared_ptr(shared_ptr[T]&)
59
+ shared_ptr(shared_ptr[T]&, T*)
60
+ shared_ptr(unique_ptr[T]&)
61
+ #shared_ptr(weak_ptr[T]&) # Not Supported
62
+ shared_ptr[T]& operator=[Y](const shared_ptr[Y]& ptr)
63
+
64
+ # Modifiers
65
+ void reset()
66
+ void reset(T*)
67
+ void swap(shared_ptr&)
68
+
69
+ # Observers
70
+ T* get()
71
+ T& operator*()
72
+ #T* operator->() # Not Supported
73
+ long use_count()
74
+ bool unique()
75
+ bool operator bool()
76
+ bool operator!()
77
+ #bool owner_before[Y](const weak_ptr[Y]&) # Not Supported
78
+ bool owner_before[Y](const shared_ptr[Y]&)
79
+
80
+ bool operator==(const shared_ptr&)
81
+ bool operator!=(const shared_ptr&)
82
+ bool operator<(const shared_ptr&)
83
+ bool operator>(const shared_ptr&)
84
+ bool operator<=(const shared_ptr&)
85
+ bool operator>=(const shared_ptr&)
86
+
87
+ bool operator==(nullptr_t)
88
+ bool operator!=(nullptr_t)
89
+
90
+ cdef cppclass weak_ptr[T]:
91
+ weak_ptr()
92
+ weak_ptr(weak_ptr[T]&)
93
+ weak_ptr(shared_ptr[T]&)
94
+
95
+ # Modifiers
96
+ void reset()
97
+ void swap(weak_ptr&)
98
+
99
+ # Observers
100
+ long use_count()
101
+ bool expired()
102
+ shared_ptr[T] lock()
103
+ bool owner_before[Y](const weak_ptr[Y]&)
104
+ bool owner_before[Y](const shared_ptr[Y]&)
105
+
106
+ # Smart pointer non-member operations
107
+ shared_ptr[T] make_shared[T](...) except +
108
+
109
+ unique_ptr[T] make_unique[T](...) except +
110
+
111
+ # No checking on the compatibility of T and U.
112
+ cdef shared_ptr[T] static_pointer_cast[T, U](const shared_ptr[U]&)
113
+ cdef shared_ptr[T] dynamic_pointer_cast[T, U](const shared_ptr[U]&)
114
+ cdef shared_ptr[T] const_pointer_cast[T, U](const shared_ptr[U]&)
115
+ cdef shared_ptr[T] reinterpret_pointer_cast[T, U](const shared_ptr[U]&)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/numbers.pxd ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef extern from "<numbers>" namespace "std::numbers" nogil:
2
+ # C++20 mathematical constants
3
+ const double e
4
+ const double log2e
5
+ const double log10e
6
+ const double pi
7
+ const double inv_pi
8
+ const double inv_sqrtpi
9
+ const double ln2
10
+ const double ln10
11
+ const double sqrt2
12
+ const double sqrt3
13
+ const double inv_sqrt3
14
+ const double egamma
15
+ const double phi
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/random.pxd ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from libc.stdint cimport uint_fast32_t, uint_fast64_t
2
+
3
+
4
+ cdef extern from "<random>" namespace "std" nogil:
5
+ cdef cppclass random_device:
6
+ ctypedef uint_fast32_t result_type
7
+ random_device() except +
8
+ result_type operator()() except +
9
+
10
+ cdef cppclass mt19937:
11
+ ctypedef uint_fast32_t result_type
12
+ mt19937() except +
13
+ mt19937(result_type seed) except +
14
+ result_type operator()() except +
15
+ result_type min() except +
16
+ result_type max() except +
17
+ void discard(size_t z) except +
18
+ void seed(result_type seed) except +
19
+
20
+ cdef cppclass mt19937_64:
21
+ ctypedef uint_fast64_t result_type
22
+
23
+ mt19937_64() except +
24
+ mt19937_64(result_type seed) except +
25
+ result_type operator()() except +
26
+ result_type min() except +
27
+ result_type max() except +
28
+ void discard(size_t z) except +
29
+ void seed(result_type seed) except +
30
+
31
+ cdef cppclass uniform_int_distribution[T]:
32
+ ctypedef T result_type
33
+ uniform_int_distribution() except +
34
+ uniform_int_distribution(T, T) except +
35
+ result_type operator()[Generator](Generator&) except +
36
+ result_type min() except +
37
+ result_type max() except +
38
+
39
+ cdef cppclass uniform_real_distribution[T]:
40
+ ctypedef T result_type
41
+ uniform_real_distribution() except +
42
+ uniform_real_distribution(T, T) except +
43
+ result_type operator()[Generator](Generator&) except +
44
+ result_type min() except +
45
+ result_type max() except +
46
+
47
+ cdef cppclass bernoulli_distribution:
48
+ ctypedef bint result_type
49
+ bernoulli_distribution() except +
50
+ bernoulli_distribution(double) except +
51
+ result_type operator()[Generator](Generator&) except +
52
+ result_type min() except +
53
+ result_type max() except +
54
+
55
+ cdef cppclass binomial_distribution[T]:
56
+ ctypedef T result_type
57
+ binomial_distribution() except +
58
+ binomial_distribution(T, double) except +
59
+ result_type operator()[Generator](Generator&) except +
60
+ result_type min() except +
61
+ result_type max() except +
62
+
63
+ cdef cppclass geometric_distribution[T]:
64
+ ctypedef T result_type
65
+ geometric_distribution() except +
66
+ geometric_distribution(double) except +
67
+ result_type operator()[Generator](Generator&) except +
68
+ result_type min() except +
69
+ result_type max() except +
70
+
71
+
72
+ cdef cppclass negative_binomial_distribution[T]:
73
+ ctypedef T result_type
74
+ negative_binomial_distribution() except +
75
+ negative_binomial_distribution(T, double) except +
76
+ result_type operator()[Generator](Generator&) except +
77
+ result_type min() except +
78
+ result_type max() except +
79
+
80
+ cdef cppclass poisson_distribution[T]:
81
+ ctypedef T result_type
82
+ poisson_distribution() except +
83
+ poisson_distribution(double) except +
84
+ result_type operator()[Generator](Generator&) except +
85
+ result_type min() except +
86
+ result_type max() except +
87
+
88
+ cdef cppclass exponential_distribution[T]:
89
+ ctypedef T result_type
90
+ exponential_distribution() except +
91
+ exponential_distribution(result_type) except +
92
+ result_type operator()[Generator](Generator&) except +
93
+ result_type min() except +
94
+ result_type max() except +
95
+
96
+ cdef cppclass gamma_distribution[T]:
97
+ ctypedef T result_type
98
+ gamma_distribution() except +
99
+ gamma_distribution(result_type, result_type) except +
100
+ result_type operator()[Generator](Generator&) except +
101
+ result_type min() except +
102
+ result_type max() except +
103
+
104
+ cdef cppclass weibull_distribution[T]:
105
+ ctypedef T result_type
106
+ weibull_distribution() except +
107
+ weibull_distribution(result_type, result_type) except +
108
+ result_type operator()[Generator](Generator&) except +
109
+ result_type min() except +
110
+ result_type max() except +
111
+
112
+ cdef cppclass extreme_value_distribution[T]:
113
+ ctypedef T result_type
114
+ extreme_value_distribution() except +
115
+ extreme_value_distribution(result_type, result_type) except +
116
+ result_type operator()[Generator](Generator&) except +
117
+ result_type min() except +
118
+ result_type max() except +
119
+
120
+ cdef cppclass normal_distribution[T]:
121
+ ctypedef T result_type
122
+ normal_distribution() except +
123
+ normal_distribution(result_type, result_type) except +
124
+ result_type operator()[Generator](Generator&) except +
125
+ result_type min() except +
126
+ result_type max() except +
127
+
128
+ cdef cppclass lognormal_distribution[T]:
129
+ ctypedef T result_type
130
+ lognormal_distribution() except +
131
+ lognormal_distribution(result_type, result_type) except +
132
+ result_type operator()[Generator](Generator&) except +
133
+ result_type min() except +
134
+ result_type max() except +
135
+
136
+ cdef cppclass chi_squared_distribution[T]:
137
+ ctypedef T result_type
138
+ chi_squared_distribution() except +
139
+ chi_squared_distribution(result_type) except +
140
+ result_type operator()[Generator](Generator&) except +
141
+ result_type min() except +
142
+ result_type max() except +
143
+
144
+ cdef cppclass cauchy_distribution[T]:
145
+ ctypedef T result_type
146
+ cauchy_distribution() except +
147
+ cauchy_distribution(result_type, result_type) except +
148
+ result_type operator()[Generator](Generator&) except +
149
+ result_type min() except +
150
+ result_type max() except +
151
+
152
+ cdef cppclass fisher_f_distribution[T]:
153
+ ctypedef T result_type
154
+ fisher_f_distribution() except +
155
+ fisher_f_distribution(result_type, result_type) except +
156
+ result_type operator()[Generator](Generator&) except +
157
+ result_type min() except +
158
+ result_type max() except +
159
+
160
+ cdef cppclass student_t_distribution[T]:
161
+ ctypedef T result_type
162
+ student_t_distribution() except +
163
+ student_t_distribution(result_type) except +
164
+ result_type operator()[Generator](Generator&) except +
165
+ result_type min() except +
166
+ result_type max() except +
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/typeindex.pxd ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from libcpp cimport bool
2
+ from .typeinfo cimport type_info
3
+
4
+ # This class is C++11-only
5
+ cdef extern from "<typeindex>" namespace "std" nogil:
6
+ cdef cppclass type_index:
7
+ type_index(const type_info &)
8
+ const char* name()
9
+ size_t hash_code()
10
+ bool operator==(const type_index &)
11
+ bool operator!=(const type_index &)
12
+ bool operator<(const type_index &)
13
+ bool operator<=(const type_index &)
14
+ bool operator>(const type_index &)
15
+ bool operator>=(const type_index &)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/libcpp/unordered_map.pxd ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .utility cimport pair
2
+
3
+ cdef extern from "<unordered_map>" namespace "std" nogil:
4
+ cdef cppclass unordered_map[T, U, HASH=*, PRED=*, ALLOCATOR=*]:
5
+ ctypedef T key_type
6
+ ctypedef U mapped_type
7
+ ctypedef pair[const T, U] value_type
8
+ ctypedef ALLOCATOR allocator_type
9
+
10
+ # these should really be allocator_type.size_type and
11
+ # allocator_type.difference_type to be true to the C++ definition
12
+ # but cython doesn't support deferred access on template arguments
13
+ ctypedef size_t size_type
14
+ ctypedef ptrdiff_t difference_type
15
+
16
+ cppclass iterator
17
+ cppclass iterator:
18
+ iterator() except +
19
+ iterator(iterator&) except +
20
+ # correct would be value_type& but this does not work
21
+ # well with cython's code gen
22
+ pair[T, U]& operator*()
23
+ iterator operator++()
24
+ iterator operator--()
25
+ iterator operator++(int)
26
+ iterator operator--(int)
27
+ bint operator==(iterator)
28
+ bint operator==(const_iterator)
29
+ bint operator!=(iterator)
30
+ bint operator!=(const_iterator)
31
+ cppclass const_iterator:
32
+ const_iterator() except +
33
+ const_iterator(iterator&) except +
34
+ operator=(iterator&) except +
35
+ # correct would be const value_type& but this does not work
36
+ # well with cython's code gen
37
+ const pair[T, U]& operator*()
38
+ const_iterator operator++()
39
+ const_iterator operator--()
40
+ const_iterator operator++(int)
41
+ const_iterator operator--(int)
42
+ bint operator==(iterator)
43
+ bint operator==(const_iterator)
44
+ bint operator!=(iterator)
45
+ bint operator!=(const_iterator)
46
+
47
+ unordered_map() except +
48
+ unordered_map(unordered_map&) except +
49
+ #unordered_map(key_compare&)
50
+ U& operator[](const T&)
51
+ #unordered_map& operator=(unordered_map&)
52
+ bint operator==(unordered_map&, unordered_map&)
53
+ bint operator!=(unordered_map&, unordered_map&)
54
+ bint operator<(unordered_map&, unordered_map&)
55
+ bint operator>(unordered_map&, unordered_map&)
56
+ bint operator<=(unordered_map&, unordered_map&)
57
+ bint operator>=(unordered_map&, unordered_map&)
58
+ U& at(const T&) except +
59
+ const U& const_at "at"(const T&) except +
60
+ iterator begin()
61
+ const_iterator const_begin "begin"()
62
+ const_iterator cbegin()
63
+ void clear()
64
+ size_t count(const T&)
65
+ bint empty()
66
+ iterator end()
67
+ const_iterator const_end "end"()
68
+ const_iterator cend()
69
+ pair[iterator, iterator] equal_range(const T&)
70
+ pair[const_iterator, const_iterator] const_equal_range "equal_range"(const T&)
71
+ iterator erase(iterator)
72
+ iterator const_erase "erase"(const_iterator)
73
+ iterator erase(const_iterator, const_iterator)
74
+ size_t erase(const T&)
75
+ iterator find(const T&)
76
+ const_iterator const_find "find"(const T&)
77
+ pair[iterator, bint] insert(const pair[T, U]&) except +
78
+ iterator insert(const_iterator, const pair[T, U]&) except +
79
+ void insert[InputIt](InputIt, InputIt) except +
80
+ #key_compare key_comp()
81
+ iterator lower_bound(const T&)
82
+ const_iterator const_lower_bound "lower_bound"(const T&)
83
+ size_t max_size()
84
+ size_t size()
85
+ void swap(unordered_map&)
86
+ iterator upper_bound(const T&)
87
+ const_iterator const_upper_bound "upper_bound"(const T&)
88
+ #value_compare value_comp()
89
+ void max_load_factor(float)
90
+ float max_load_factor()
91
+ float load_factor()
92
+ void rehash(size_t)
93
+ void reserve(size_t)
94
+ size_t bucket_count()
95
+ size_t max_bucket_count()
96
+ size_t bucket_size(size_t)
97
+ size_t bucket(const T&)
98
+ # C++20
99
+ bint contains(const T&)
100
+
101
+ cdef cppclass unordered_multimap[T, U, HASH=*, PRED=*, ALLOCATOR=*]:
102
+ ctypedef T key_type
103
+ ctypedef U mapped_type
104
+ ctypedef pair[const T, U] value_type
105
+ ctypedef ALLOCATOR allocator_type
106
+
107
+ # these should really be allocator_type.size_type and
108
+ # allocator_type.difference_type to be true to the C++ definition
109
+ # but cython doesn't support deferred access on template arguments
110
+ ctypedef size_t size_type
111
+ ctypedef ptrdiff_t difference_type
112
+
113
+ cppclass const_iterator
114
+ cppclass iterator:
115
+ iterator() except +
116
+ iterator(iterator&) except +
117
+ # correct would be value_type& but this does not work
118
+ # well with cython's code gen
119
+ pair[T, U]& operator*()
120
+ iterator operator++()
121
+ iterator operator++(int)
122
+ bint operator==(iterator)
123
+ bint operator==(const_iterator)
124
+ bint operator!=(iterator)
125
+ bint operator!=(const_iterator)
126
+ cppclass const_iterator:
127
+ const_iterator() except +
128
+ const_iterator(iterator&) except +
129
+ operator=(iterator&) except +
130
+ # correct would be const value_type& but this does not work
131
+ # well with cython's code gen
132
+ const pair[T, U]& operator*()
133
+ const_iterator operator++()
134
+ const_iterator operator++(int)
135
+ bint operator==(iterator)
136
+ bint operator==(const_iterator)
137
+ bint operator!=(iterator)
138
+ bint operator!=(const_iterator)
139
+
140
+ unordered_multimap() except +
141
+ unordered_multimap(const unordered_multimap&) except +
142
+ #unordered_multimap(key_compare&)
143
+ #unordered_map& operator=(unordered_multimap&)
144
+ bint operator==(const unordered_multimap&, const unordered_multimap&)
145
+ bint operator!=(const unordered_multimap&, const unordered_multimap&)
146
+ bint operator<(const unordered_multimap&, const unordered_multimap&)
147
+ bint operator>(const unordered_multimap&, const unordered_multimap&)
148
+ bint operator<=(const unordered_multimap&, const unordered_multimap&)
149
+ bint operator>=(const unordered_multimap&, const unordered_multimap&)
150
+ iterator begin()
151
+ const_iterator const_begin "begin"()
152
+ const_iterator cbegin()
153
+ #local_iterator begin(size_t)
154
+ #const_local_iterator const_begin "begin"(size_t)
155
+ void clear()
156
+ size_t count(const T&)
157
+ bint empty()
158
+ iterator end()
159
+ const_iterator const_end "end"()
160
+ const_iterator cend()
161
+ #local_iterator end(size_t)
162
+ #const_local_iterator const_end "end"(size_t)
163
+ pair[iterator, iterator] equal_range(const T&)
164
+ pair[const_iterator, const_iterator] const_equal_range "equal_range"(const T&)
165
+ iterator erase(iterator)
166
+ iterator const_erase "erase"(const_iterator)
167
+ iterator erase(const_iterator, const_iterator)
168
+ size_t erase(const T&)
169
+ iterator find(const T&)
170
+ const_iterator const_find "find"(const T&)
171
+ iterator insert(const pair[T, U]&) except +
172
+ iterator insert(const_iterator, const pair[T, U]&) except +
173
+ void insert[InputIt](InputIt, InputIt) except +
174
+ #key_compare key_comp()
175
+ iterator lower_bound(const T&)
176
+ const_iterator const_lower_bound "lower_bound"(const T&)
177
+ size_t max_size()
178
+ size_t size()
179
+ void swap(unordered_multimap&)
180
+ iterator upper_bound(const T&)
181
+ const_iterator const_upper_bound "upper_bound"(const T&)
182
+ #value_compare value_comp()
183
+ void max_load_factor(float)
184
+ float max_load_factor()
185
+ float load_factor()
186
+ void rehash(size_t)
187
+ void reserve(size_t)
188
+ size_t bucket_count()
189
+ size_t max_bucket_count()
190
+ size_t bucket_size(size_t)
191
+ size_t bucket(const T&)
192
+ # C++20
193
+ bint contains(const T&)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/chains.cpython-311.pyc ADDED
Binary file (6.48 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/matching.cpython-311.pyc ADDED
Binary file (37.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-311.pyc ADDED
Binary file (3.62 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/__init__.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Connectivity and cut algorithms
2
+ """
3
+ from .connectivity import *
4
+ from .cuts import *
5
+ from .edge_augmentation import *
6
+ from .edge_kcomponents import *
7
+ from .disjoint_paths import *
8
+ from .kcomponents import *
9
+ from .kcutsets import *
10
+ from .stoerwagner import *
11
+ from .utils import *
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/connectivity.py ADDED
@@ -0,0 +1,826 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Flow based connectivity algorithms
3
+ """
4
+
5
+ import itertools
6
+ from operator import itemgetter
7
+
8
+ import networkx as nx
9
+
10
+ # Define the default maximum flow function to use in all flow based
11
+ # connectivity algorithms.
12
+ from networkx.algorithms.flow import (
13
+ boykov_kolmogorov,
14
+ build_residual_network,
15
+ dinitz,
16
+ edmonds_karp,
17
+ shortest_augmenting_path,
18
+ )
19
+
20
+ default_flow_func = edmonds_karp
21
+
22
+ from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity
23
+
24
+ __all__ = [
25
+ "average_node_connectivity",
26
+ "local_node_connectivity",
27
+ "node_connectivity",
28
+ "local_edge_connectivity",
29
+ "edge_connectivity",
30
+ "all_pairs_node_connectivity",
31
+ ]
32
+
33
+
34
+ @nx._dispatch(
35
+ graphs={"G": 0, "auxiliary?": 4, "residual?": 5},
36
+ preserve_edge_attrs={"residual": {"capacity": float("inf")}},
37
+ preserve_graph_attrs={"auxiliary", "residual"},
38
+ )
39
+ def local_node_connectivity(
40
+ G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None
41
+ ):
42
+ r"""Computes local node connectivity for nodes s and t.
43
+
44
+ Local node connectivity for two non adjacent nodes s and t is the
45
+ minimum number of nodes that must be removed (along with their incident
46
+ edges) to disconnect them.
47
+
48
+ This is a flow based implementation of node connectivity. We compute the
49
+ maximum flow on an auxiliary digraph build from the original input
50
+ graph (see below for details).
51
+
52
+ Parameters
53
+ ----------
54
+ G : NetworkX graph
55
+ Undirected graph
56
+
57
+ s : node
58
+ Source node
59
+
60
+ t : node
61
+ Target node
62
+
63
+ flow_func : function
64
+ A function for computing the maximum flow among a pair of nodes.
65
+ The function has to accept at least three parameters: a Digraph,
66
+ a source node, and a target node. And return a residual network
67
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
68
+ details). If flow_func is None, the default maximum flow function
69
+ (:meth:`edmonds_karp`) is used. See below for details. The choice
70
+ of the default function may change from version to version and
71
+ should not be relied on. Default value: None.
72
+
73
+ auxiliary : NetworkX DiGraph
74
+ Auxiliary digraph to compute flow based node connectivity. It has
75
+ to have a graph attribute called mapping with a dictionary mapping
76
+ node names in G and in the auxiliary digraph. If provided
77
+ it will be reused instead of recreated. Default value: None.
78
+
79
+ residual : NetworkX DiGraph
80
+ Residual network to compute maximum flow. If provided it will be
81
+ reused instead of recreated. Default value: None.
82
+
83
+ cutoff : integer, float, or None (default: None)
84
+ If specified, the maximum flow algorithm will terminate when the
85
+ flow value reaches or exceeds the cutoff. This only works for flows
86
+ that support the cutoff parameter (most do) and is ignored otherwise.
87
+
88
+ Returns
89
+ -------
90
+ K : integer
91
+ local node connectivity for nodes s and t
92
+
93
+ Examples
94
+ --------
95
+ This function is not imported in the base NetworkX namespace, so you
96
+ have to explicitly import it from the connectivity package:
97
+
98
+ >>> from networkx.algorithms.connectivity import local_node_connectivity
99
+
100
+ We use in this example the platonic icosahedral graph, which has node
101
+ connectivity 5.
102
+
103
+ >>> G = nx.icosahedral_graph()
104
+ >>> local_node_connectivity(G, 0, 6)
105
+ 5
106
+
107
+ If you need to compute local connectivity on several pairs of
108
+ nodes in the same graph, it is recommended that you reuse the
109
+ data structures that NetworkX uses in the computation: the
110
+ auxiliary digraph for node connectivity, and the residual
111
+ network for the underlying maximum flow computation.
112
+
113
+ Example of how to compute local node connectivity among
114
+ all pairs of nodes of the platonic icosahedral graph reusing
115
+ the data structures.
116
+
117
+ >>> import itertools
118
+ >>> # You also have to explicitly import the function for
119
+ >>> # building the auxiliary digraph from the connectivity package
120
+ >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity
121
+ ...
122
+ >>> H = build_auxiliary_node_connectivity(G)
123
+ >>> # And the function for building the residual network from the
124
+ >>> # flow package
125
+ >>> from networkx.algorithms.flow import build_residual_network
126
+ >>> # Note that the auxiliary digraph has an edge attribute named capacity
127
+ >>> R = build_residual_network(H, "capacity")
128
+ >>> result = dict.fromkeys(G, dict())
129
+ >>> # Reuse the auxiliary digraph and the residual network by passing them
130
+ >>> # as parameters
131
+ >>> for u, v in itertools.combinations(G, 2):
132
+ ... k = local_node_connectivity(G, u, v, auxiliary=H, residual=R)
133
+ ... result[u][v] = k
134
+ ...
135
+ >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
136
+ True
137
+
138
+ You can also use alternative flow algorithms for computing node
139
+ connectivity. For instance, in dense networks the algorithm
140
+ :meth:`shortest_augmenting_path` will usually perform better than
141
+ the default :meth:`edmonds_karp` which is faster for sparse
142
+ networks with highly skewed degree distributions. Alternative flow
143
+ functions have to be explicitly imported from the flow package.
144
+
145
+ >>> from networkx.algorithms.flow import shortest_augmenting_path
146
+ >>> local_node_connectivity(G, 0, 6, flow_func=shortest_augmenting_path)
147
+ 5
148
+
149
+ Notes
150
+ -----
151
+ This is a flow based implementation of node connectivity. We compute the
152
+ maximum flow using, by default, the :meth:`edmonds_karp` algorithm (see:
153
+ :meth:`maximum_flow`) on an auxiliary digraph build from the original
154
+ input graph:
155
+
156
+ For an undirected graph G having `n` nodes and `m` edges we derive a
157
+ directed graph H with `2n` nodes and `2m+n` arcs by replacing each
158
+ original node `v` with two nodes `v_A`, `v_B` linked by an (internal)
159
+ arc in H. Then for each edge (`u`, `v`) in G we add two arcs
160
+ (`u_B`, `v_A`) and (`v_B`, `u_A`) in H. Finally we set the attribute
161
+ capacity = 1 for each arc in H [1]_ .
162
+
163
+ For a directed graph G having `n` nodes and `m` arcs we derive a
164
+ directed graph H with `2n` nodes and `m+n` arcs by replacing each
165
+ original node `v` with two nodes `v_A`, `v_B` linked by an (internal)
166
+ arc (`v_A`, `v_B`) in H. Then for each arc (`u`, `v`) in G we add one arc
167
+ (`u_B`, `v_A`) in H. Finally we set the attribute capacity = 1 for
168
+ each arc in H.
169
+
170
+ This is equal to the local node connectivity because the value of
171
+ a maximum s-t-flow is equal to the capacity of a minimum s-t-cut.
172
+
173
+ See also
174
+ --------
175
+ :meth:`local_edge_connectivity`
176
+ :meth:`node_connectivity`
177
+ :meth:`minimum_node_cut`
178
+ :meth:`maximum_flow`
179
+ :meth:`edmonds_karp`
180
+ :meth:`preflow_push`
181
+ :meth:`shortest_augmenting_path`
182
+
183
+ References
184
+ ----------
185
+ .. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and
186
+ Erlebach, 'Network Analysis: Methodological Foundations', Lecture
187
+ Notes in Computer Science, Volume 3418, Springer-Verlag, 2005.
188
+ http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
189
+
190
+ """
191
+ if flow_func is None:
192
+ flow_func = default_flow_func
193
+
194
+ if auxiliary is None:
195
+ H = build_auxiliary_node_connectivity(G)
196
+ else:
197
+ H = auxiliary
198
+
199
+ mapping = H.graph.get("mapping", None)
200
+ if mapping is None:
201
+ raise nx.NetworkXError("Invalid auxiliary digraph.")
202
+
203
+ kwargs = {"flow_func": flow_func, "residual": residual}
204
+ if flow_func is shortest_augmenting_path:
205
+ kwargs["cutoff"] = cutoff
206
+ kwargs["two_phase"] = True
207
+ elif flow_func is edmonds_karp:
208
+ kwargs["cutoff"] = cutoff
209
+ elif flow_func is dinitz:
210
+ kwargs["cutoff"] = cutoff
211
+ elif flow_func is boykov_kolmogorov:
212
+ kwargs["cutoff"] = cutoff
213
+
214
+ return nx.maximum_flow_value(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs)
215
+
216
+
217
+ @nx._dispatch
218
+ def node_connectivity(G, s=None, t=None, flow_func=None):
219
+ r"""Returns node connectivity for a graph or digraph G.
220
+
221
+ Node connectivity is equal to the minimum number of nodes that
222
+ must be removed to disconnect G or render it trivial. If source
223
+ and target nodes are provided, this function returns the local node
224
+ connectivity: the minimum number of nodes that must be removed to break
225
+ all paths from source to target in G.
226
+
227
+ Parameters
228
+ ----------
229
+ G : NetworkX graph
230
+ Undirected graph
231
+
232
+ s : node
233
+ Source node. Optional. Default value: None.
234
+
235
+ t : node
236
+ Target node. Optional. Default value: None.
237
+
238
+ flow_func : function
239
+ A function for computing the maximum flow among a pair of nodes.
240
+ The function has to accept at least three parameters: a Digraph,
241
+ a source node, and a target node. And return a residual network
242
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
243
+ details). If flow_func is None, the default maximum flow function
244
+ (:meth:`edmonds_karp`) is used. See below for details. The
245
+ choice of the default function may change from version
246
+ to version and should not be relied on. Default value: None.
247
+
248
+ Returns
249
+ -------
250
+ K : integer
251
+ Node connectivity of G, or local node connectivity if source
252
+ and target are provided.
253
+
254
+ Examples
255
+ --------
256
+ >>> # Platonic icosahedral graph is 5-node-connected
257
+ >>> G = nx.icosahedral_graph()
258
+ >>> nx.node_connectivity(G)
259
+ 5
260
+
261
+ You can use alternative flow algorithms for the underlying maximum
262
+ flow computation. In dense networks the algorithm
263
+ :meth:`shortest_augmenting_path` will usually perform better
264
+ than the default :meth:`edmonds_karp`, which is faster for
265
+ sparse networks with highly skewed degree distributions. Alternative
266
+ flow functions have to be explicitly imported from the flow package.
267
+
268
+ >>> from networkx.algorithms.flow import shortest_augmenting_path
269
+ >>> nx.node_connectivity(G, flow_func=shortest_augmenting_path)
270
+ 5
271
+
272
+ If you specify a pair of nodes (source and target) as parameters,
273
+ this function returns the value of local node connectivity.
274
+
275
+ >>> nx.node_connectivity(G, 3, 7)
276
+ 5
277
+
278
+ If you need to perform several local computations among different
279
+ pairs of nodes on the same graph, it is recommended that you reuse
280
+ the data structures used in the maximum flow computations. See
281
+ :meth:`local_node_connectivity` for details.
282
+
283
+ Notes
284
+ -----
285
+ This is a flow based implementation of node connectivity. The
286
+ algorithm works by solving $O((n-\delta-1+\delta(\delta-1)/2))$
287
+ maximum flow problems on an auxiliary digraph. Where $\delta$
288
+ is the minimum degree of G. For details about the auxiliary
289
+ digraph and the computation of local node connectivity see
290
+ :meth:`local_node_connectivity`. This implementation is based
291
+ on algorithm 11 in [1]_.
292
+
293
+ See also
294
+ --------
295
+ :meth:`local_node_connectivity`
296
+ :meth:`edge_connectivity`
297
+ :meth:`maximum_flow`
298
+ :meth:`edmonds_karp`
299
+ :meth:`preflow_push`
300
+ :meth:`shortest_augmenting_path`
301
+
302
+ References
303
+ ----------
304
+ .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
305
+ http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
306
+
307
+ """
308
+ if (s is not None and t is None) or (s is None and t is not None):
309
+ raise nx.NetworkXError("Both source and target must be specified.")
310
+
311
+ # Local node connectivity
312
+ if s is not None and t is not None:
313
+ if s not in G:
314
+ raise nx.NetworkXError(f"node {s} not in graph")
315
+ if t not in G:
316
+ raise nx.NetworkXError(f"node {t} not in graph")
317
+ return local_node_connectivity(G, s, t, flow_func=flow_func)
318
+
319
+ # Global node connectivity
320
+ if G.is_directed():
321
+ if not nx.is_weakly_connected(G):
322
+ return 0
323
+ iter_func = itertools.permutations
324
+ # It is necessary to consider both predecessors
325
+ # and successors for directed graphs
326
+
327
+ def neighbors(v):
328
+ return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
329
+
330
+ else:
331
+ if not nx.is_connected(G):
332
+ return 0
333
+ iter_func = itertools.combinations
334
+ neighbors = G.neighbors
335
+
336
+ # Reuse the auxiliary digraph and the residual network
337
+ H = build_auxiliary_node_connectivity(G)
338
+ R = build_residual_network(H, "capacity")
339
+ kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
340
+
341
+ # Pick a node with minimum degree
342
+ # Node connectivity is bounded by degree.
343
+ v, K = min(G.degree(), key=itemgetter(1))
344
+ # compute local node connectivity with all its non-neighbors nodes
345
+ for w in set(G) - set(neighbors(v)) - {v}:
346
+ kwargs["cutoff"] = K
347
+ K = min(K, local_node_connectivity(G, v, w, **kwargs))
348
+ # Also for non adjacent pairs of neighbors of v
349
+ for x, y in iter_func(neighbors(v), 2):
350
+ if y in G[x]:
351
+ continue
352
+ kwargs["cutoff"] = K
353
+ K = min(K, local_node_connectivity(G, x, y, **kwargs))
354
+
355
+ return K
356
+
357
+
358
+ @nx._dispatch
359
+ def average_node_connectivity(G, flow_func=None):
360
+ r"""Returns the average connectivity of a graph G.
361
+
362
+ The average connectivity `\bar{\kappa}` of a graph G is the average
363
+ of local node connectivity over all pairs of nodes of G [1]_ .
364
+
365
+ .. math::
366
+
367
+ \bar{\kappa}(G) = \frac{\sum_{u,v} \kappa_{G}(u,v)}{{n \choose 2}}
368
+
369
+ Parameters
370
+ ----------
371
+
372
+ G : NetworkX graph
373
+ Undirected graph
374
+
375
+ flow_func : function
376
+ A function for computing the maximum flow among a pair of nodes.
377
+ The function has to accept at least three parameters: a Digraph,
378
+ a source node, and a target node. And return a residual network
379
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
380
+ details). If flow_func is None, the default maximum flow function
381
+ (:meth:`edmonds_karp`) is used. See :meth:`local_node_connectivity`
382
+ for details. The choice of the default function may change from
383
+ version to version and should not be relied on. Default value: None.
384
+
385
+ Returns
386
+ -------
387
+ K : float
388
+ Average node connectivity
389
+
390
+ See also
391
+ --------
392
+ :meth:`local_node_connectivity`
393
+ :meth:`node_connectivity`
394
+ :meth:`edge_connectivity`
395
+ :meth:`maximum_flow`
396
+ :meth:`edmonds_karp`
397
+ :meth:`preflow_push`
398
+ :meth:`shortest_augmenting_path`
399
+
400
+ References
401
+ ----------
402
+ .. [1] Beineke, L., O. Oellermann, and R. Pippert (2002). The average
403
+ connectivity of a graph. Discrete mathematics 252(1-3), 31-45.
404
+ http://www.sciencedirect.com/science/article/pii/S0012365X01001807
405
+
406
+ """
407
+ if G.is_directed():
408
+ iter_func = itertools.permutations
409
+ else:
410
+ iter_func = itertools.combinations
411
+
412
+ # Reuse the auxiliary digraph and the residual network
413
+ H = build_auxiliary_node_connectivity(G)
414
+ R = build_residual_network(H, "capacity")
415
+ kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
416
+
417
+ num, den = 0, 0
418
+ for u, v in iter_func(G, 2):
419
+ num += local_node_connectivity(G, u, v, **kwargs)
420
+ den += 1
421
+
422
+ if den == 0: # Null Graph
423
+ return 0
424
+ return num / den
425
+
426
+
427
+ @nx._dispatch
428
+ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None):
429
+ """Compute node connectivity between all pairs of nodes of G.
430
+
431
+ Parameters
432
+ ----------
433
+ G : NetworkX graph
434
+ Undirected graph
435
+
436
+ nbunch: container
437
+ Container of nodes. If provided node connectivity will be computed
438
+ only over pairs of nodes in nbunch.
439
+
440
+ flow_func : function
441
+ A function for computing the maximum flow among a pair of nodes.
442
+ The function has to accept at least three parameters: a Digraph,
443
+ a source node, and a target node. And return a residual network
444
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
445
+ details). If flow_func is None, the default maximum flow function
446
+ (:meth:`edmonds_karp`) is used. See below for details. The
447
+ choice of the default function may change from version
448
+ to version and should not be relied on. Default value: None.
449
+
450
+ Returns
451
+ -------
452
+ all_pairs : dict
453
+ A dictionary with node connectivity between all pairs of nodes
454
+ in G, or in nbunch if provided.
455
+
456
+ See also
457
+ --------
458
+ :meth:`local_node_connectivity`
459
+ :meth:`edge_connectivity`
460
+ :meth:`local_edge_connectivity`
461
+ :meth:`maximum_flow`
462
+ :meth:`edmonds_karp`
463
+ :meth:`preflow_push`
464
+ :meth:`shortest_augmenting_path`
465
+
466
+ """
467
+ if nbunch is None:
468
+ nbunch = G
469
+ else:
470
+ nbunch = set(nbunch)
471
+
472
+ directed = G.is_directed()
473
+ if directed:
474
+ iter_func = itertools.permutations
475
+ else:
476
+ iter_func = itertools.combinations
477
+
478
+ all_pairs = {n: {} for n in nbunch}
479
+
480
+ # Reuse auxiliary digraph and residual network
481
+ H = build_auxiliary_node_connectivity(G)
482
+ mapping = H.graph["mapping"]
483
+ R = build_residual_network(H, "capacity")
484
+ kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
485
+
486
+ for u, v in iter_func(nbunch, 2):
487
+ K = local_node_connectivity(G, u, v, **kwargs)
488
+ all_pairs[u][v] = K
489
+ if not directed:
490
+ all_pairs[v][u] = K
491
+
492
+ return all_pairs
493
+
494
+
495
+ @nx._dispatch(
496
+ graphs={"G": 0, "auxiliary?": 4, "residual?": 5},
497
+ preserve_edge_attrs={"residual": {"capacity": float("inf")}},
498
+ preserve_graph_attrs={"residual"},
499
+ )
500
+ def local_edge_connectivity(
501
+ G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None
502
+ ):
503
+ r"""Returns local edge connectivity for nodes s and t in G.
504
+
505
+ Local edge connectivity for two nodes s and t is the minimum number
506
+ of edges that must be removed to disconnect them.
507
+
508
+ This is a flow based implementation of edge connectivity. We compute the
509
+ maximum flow on an auxiliary digraph build from the original
510
+ network (see below for details). This is equal to the local edge
511
+ connectivity because the value of a maximum s-t-flow is equal to the
512
+ capacity of a minimum s-t-cut (Ford and Fulkerson theorem) [1]_ .
513
+
514
+ Parameters
515
+ ----------
516
+ G : NetworkX graph
517
+ Undirected or directed graph
518
+
519
+ s : node
520
+ Source node
521
+
522
+ t : node
523
+ Target node
524
+
525
+ flow_func : function
526
+ A function for computing the maximum flow among a pair of nodes.
527
+ The function has to accept at least three parameters: a Digraph,
528
+ a source node, and a target node. And return a residual network
529
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
530
+ details). If flow_func is None, the default maximum flow function
531
+ (:meth:`edmonds_karp`) is used. See below for details. The
532
+ choice of the default function may change from version
533
+ to version and should not be relied on. Default value: None.
534
+
535
+ auxiliary : NetworkX DiGraph
536
+ Auxiliary digraph for computing flow based edge connectivity. If
537
+ provided it will be reused instead of recreated. Default value: None.
538
+
539
+ residual : NetworkX DiGraph
540
+ Residual network to compute maximum flow. If provided it will be
541
+ reused instead of recreated. Default value: None.
542
+
543
+ cutoff : integer, float, or None (default: None)
544
+ If specified, the maximum flow algorithm will terminate when the
545
+ flow value reaches or exceeds the cutoff. This only works for flows
546
+ that support the cutoff parameter (most do) and is ignored otherwise.
547
+
548
+ Returns
549
+ -------
550
+ K : integer
551
+ local edge connectivity for nodes s and t.
552
+
553
+ Examples
554
+ --------
555
+ This function is not imported in the base NetworkX namespace, so you
556
+ have to explicitly import it from the connectivity package:
557
+
558
+ >>> from networkx.algorithms.connectivity import local_edge_connectivity
559
+
560
+ We use in this example the platonic icosahedral graph, which has edge
561
+ connectivity 5.
562
+
563
+ >>> G = nx.icosahedral_graph()
564
+ >>> local_edge_connectivity(G, 0, 6)
565
+ 5
566
+
567
+ If you need to compute local connectivity on several pairs of
568
+ nodes in the same graph, it is recommended that you reuse the
569
+ data structures that NetworkX uses in the computation: the
570
+ auxiliary digraph for edge connectivity, and the residual
571
+ network for the underlying maximum flow computation.
572
+
573
+ Example of how to compute local edge connectivity among
574
+ all pairs of nodes of the platonic icosahedral graph reusing
575
+ the data structures.
576
+
577
+ >>> import itertools
578
+ >>> # You also have to explicitly import the function for
579
+ >>> # building the auxiliary digraph from the connectivity package
580
+ >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity
581
+ >>> H = build_auxiliary_edge_connectivity(G)
582
+ >>> # And the function for building the residual network from the
583
+ >>> # flow package
584
+ >>> from networkx.algorithms.flow import build_residual_network
585
+ >>> # Note that the auxiliary digraph has an edge attribute named capacity
586
+ >>> R = build_residual_network(H, "capacity")
587
+ >>> result = dict.fromkeys(G, dict())
588
+ >>> # Reuse the auxiliary digraph and the residual network by passing them
589
+ >>> # as parameters
590
+ >>> for u, v in itertools.combinations(G, 2):
591
+ ... k = local_edge_connectivity(G, u, v, auxiliary=H, residual=R)
592
+ ... result[u][v] = k
593
+ >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
594
+ True
595
+
596
+ You can also use alternative flow algorithms for computing edge
597
+ connectivity. For instance, in dense networks the algorithm
598
+ :meth:`shortest_augmenting_path` will usually perform better than
599
+ the default :meth:`edmonds_karp` which is faster for sparse
600
+ networks with highly skewed degree distributions. Alternative flow
601
+ functions have to be explicitly imported from the flow package.
602
+
603
+ >>> from networkx.algorithms.flow import shortest_augmenting_path
604
+ >>> local_edge_connectivity(G, 0, 6, flow_func=shortest_augmenting_path)
605
+ 5
606
+
607
+ Notes
608
+ -----
609
+ This is a flow based implementation of edge connectivity. We compute the
610
+ maximum flow using, by default, the :meth:`edmonds_karp` algorithm on an
611
+ auxiliary digraph build from the original input graph:
612
+
613
+ If the input graph is undirected, we replace each edge (`u`,`v`) with
614
+ two reciprocal arcs (`u`, `v`) and (`v`, `u`) and then we set the attribute
615
+ 'capacity' for each arc to 1. If the input graph is directed we simply
616
+ add the 'capacity' attribute. This is an implementation of algorithm 1
617
+ in [1]_.
618
+
619
+ The maximum flow in the auxiliary network is equal to the local edge
620
+ connectivity because the value of a maximum s-t-flow is equal to the
621
+ capacity of a minimum s-t-cut (Ford and Fulkerson theorem).
622
+
623
+ See also
624
+ --------
625
+ :meth:`edge_connectivity`
626
+ :meth:`local_node_connectivity`
627
+ :meth:`node_connectivity`
628
+ :meth:`maximum_flow`
629
+ :meth:`edmonds_karp`
630
+ :meth:`preflow_push`
631
+ :meth:`shortest_augmenting_path`
632
+
633
+ References
634
+ ----------
635
+ .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
636
+ http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
637
+
638
+ """
639
+ if flow_func is None:
640
+ flow_func = default_flow_func
641
+
642
+ if auxiliary is None:
643
+ H = build_auxiliary_edge_connectivity(G)
644
+ else:
645
+ H = auxiliary
646
+
647
+ kwargs = {"flow_func": flow_func, "residual": residual}
648
+ if flow_func is shortest_augmenting_path:
649
+ kwargs["cutoff"] = cutoff
650
+ kwargs["two_phase"] = True
651
+ elif flow_func is edmonds_karp:
652
+ kwargs["cutoff"] = cutoff
653
+ elif flow_func is dinitz:
654
+ kwargs["cutoff"] = cutoff
655
+ elif flow_func is boykov_kolmogorov:
656
+ kwargs["cutoff"] = cutoff
657
+
658
+ return nx.maximum_flow_value(H, s, t, **kwargs)
659
+
660
+
661
+ @nx._dispatch
662
+ def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None):
663
+ r"""Returns the edge connectivity of the graph or digraph G.
664
+
665
+ The edge connectivity is equal to the minimum number of edges that
666
+ must be removed to disconnect G or render it trivial. If source
667
+ and target nodes are provided, this function returns the local edge
668
+ connectivity: the minimum number of edges that must be removed to
669
+ break all paths from source to target in G.
670
+
671
+ Parameters
672
+ ----------
673
+ G : NetworkX graph
674
+ Undirected or directed graph
675
+
676
+ s : node
677
+ Source node. Optional. Default value: None.
678
+
679
+ t : node
680
+ Target node. Optional. Default value: None.
681
+
682
+ flow_func : function
683
+ A function for computing the maximum flow among a pair of nodes.
684
+ The function has to accept at least three parameters: a Digraph,
685
+ a source node, and a target node. And return a residual network
686
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
687
+ details). If flow_func is None, the default maximum flow function
688
+ (:meth:`edmonds_karp`) is used. See below for details. The
689
+ choice of the default function may change from version
690
+ to version and should not be relied on. Default value: None.
691
+
692
+ cutoff : integer, float, or None (default: None)
693
+ If specified, the maximum flow algorithm will terminate when the
694
+ flow value reaches or exceeds the cutoff. This only works for flows
695
+ that support the cutoff parameter (most do) and is ignored otherwise.
696
+
697
+ Returns
698
+ -------
699
+ K : integer
700
+ Edge connectivity for G, or local edge connectivity if source
701
+ and target were provided
702
+
703
+ Examples
704
+ --------
705
+ >>> # Platonic icosahedral graph is 5-edge-connected
706
+ >>> G = nx.icosahedral_graph()
707
+ >>> nx.edge_connectivity(G)
708
+ 5
709
+
710
+ You can use alternative flow algorithms for the underlying
711
+ maximum flow computation. In dense networks the algorithm
712
+ :meth:`shortest_augmenting_path` will usually perform better
713
+ than the default :meth:`edmonds_karp`, which is faster for
714
+ sparse networks with highly skewed degree distributions.
715
+ Alternative flow functions have to be explicitly imported
716
+ from the flow package.
717
+
718
+ >>> from networkx.algorithms.flow import shortest_augmenting_path
719
+ >>> nx.edge_connectivity(G, flow_func=shortest_augmenting_path)
720
+ 5
721
+
722
+ If you specify a pair of nodes (source and target) as parameters,
723
+ this function returns the value of local edge connectivity.
724
+
725
+ >>> nx.edge_connectivity(G, 3, 7)
726
+ 5
727
+
728
+ If you need to perform several local computations among different
729
+ pairs of nodes on the same graph, it is recommended that you reuse
730
+ the data structures used in the maximum flow computations. See
731
+ :meth:`local_edge_connectivity` for details.
732
+
733
+ Notes
734
+ -----
735
+ This is a flow based implementation of global edge connectivity.
736
+ For undirected graphs the algorithm works by finding a 'small'
737
+ dominating set of nodes of G (see algorithm 7 in [1]_ ) and
738
+ computing local maximum flow (see :meth:`local_edge_connectivity`)
739
+ between an arbitrary node in the dominating set and the rest of
740
+ nodes in it. This is an implementation of algorithm 6 in [1]_ .
741
+ For directed graphs, the algorithm does n calls to the maximum
742
+ flow function. This is an implementation of algorithm 8 in [1]_ .
743
+
744
+ See also
745
+ --------
746
+ :meth:`local_edge_connectivity`
747
+ :meth:`local_node_connectivity`
748
+ :meth:`node_connectivity`
749
+ :meth:`maximum_flow`
750
+ :meth:`edmonds_karp`
751
+ :meth:`preflow_push`
752
+ :meth:`shortest_augmenting_path`
753
+ :meth:`k_edge_components`
754
+ :meth:`k_edge_subgraphs`
755
+
756
+ References
757
+ ----------
758
+ .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
759
+ http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
760
+
761
+ """
762
+ if (s is not None and t is None) or (s is None and t is not None):
763
+ raise nx.NetworkXError("Both source and target must be specified.")
764
+
765
+ # Local edge connectivity
766
+ if s is not None and t is not None:
767
+ if s not in G:
768
+ raise nx.NetworkXError(f"node {s} not in graph")
769
+ if t not in G:
770
+ raise nx.NetworkXError(f"node {t} not in graph")
771
+ return local_edge_connectivity(G, s, t, flow_func=flow_func, cutoff=cutoff)
772
+
773
+ # Global edge connectivity
774
+ # reuse auxiliary digraph and residual network
775
+ H = build_auxiliary_edge_connectivity(G)
776
+ R = build_residual_network(H, "capacity")
777
+ kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
778
+
779
+ if G.is_directed():
780
+ # Algorithm 8 in [1]
781
+ if not nx.is_weakly_connected(G):
782
+ return 0
783
+
784
+ # initial value for \lambda is minimum degree
785
+ L = min(d for n, d in G.degree())
786
+ nodes = list(G)
787
+ n = len(nodes)
788
+
789
+ if cutoff is not None:
790
+ L = min(cutoff, L)
791
+
792
+ for i in range(n):
793
+ kwargs["cutoff"] = L
794
+ try:
795
+ L = min(L, local_edge_connectivity(G, nodes[i], nodes[i + 1], **kwargs))
796
+ except IndexError: # last node!
797
+ L = min(L, local_edge_connectivity(G, nodes[i], nodes[0], **kwargs))
798
+ return L
799
+ else: # undirected
800
+ # Algorithm 6 in [1]
801
+ if not nx.is_connected(G):
802
+ return 0
803
+
804
+ # initial value for \lambda is minimum degree
805
+ L = min(d for n, d in G.degree())
806
+
807
+ if cutoff is not None:
808
+ L = min(cutoff, L)
809
+
810
+ # A dominating set is \lambda-covering
811
+ # We need a dominating set with at least two nodes
812
+ for node in G:
813
+ D = nx.dominating_set(G, start_with=node)
814
+ v = D.pop()
815
+ if D:
816
+ break
817
+ else:
818
+ # in complete graphs the dominating sets will always be of one node
819
+ # thus we return min degree
820
+ return L
821
+
822
+ for w in D:
823
+ kwargs["cutoff"] = L
824
+ L = min(L, local_edge_connectivity(G, v, w, **kwargs))
825
+
826
+ return L
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/disjoint_paths.py ADDED
@@ -0,0 +1,412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Flow based node and edge disjoint paths."""
2
+ import networkx as nx
3
+
4
+ # Define the default maximum flow function to use for the underlying
5
+ # maximum flow computations
6
+ from networkx.algorithms.flow import (
7
+ edmonds_karp,
8
+ preflow_push,
9
+ shortest_augmenting_path,
10
+ )
11
+ from networkx.exception import NetworkXNoPath
12
+
13
+ default_flow_func = edmonds_karp
14
+ from itertools import filterfalse as _filterfalse
15
+
16
+ # Functions to build auxiliary data structures.
17
+ from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity
18
+
19
+ __all__ = ["edge_disjoint_paths", "node_disjoint_paths"]
20
+
21
+
22
+ @nx._dispatch(
23
+ graphs={"G": 0, "auxiliary?": 5, "residual?": 6},
24
+ preserve_edge_attrs={
25
+ "auxiliary": {"capacity": float("inf")},
26
+ "residual": {"capacity": float("inf")},
27
+ },
28
+ preserve_graph_attrs={"residual"},
29
+ )
30
+ def edge_disjoint_paths(
31
+ G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None
32
+ ):
33
+ """Returns the edges disjoint paths between source and target.
34
+
35
+ Edge disjoint paths are paths that do not share any edge. The
36
+ number of edge disjoint paths between source and target is equal
37
+ to their edge connectivity.
38
+
39
+ Parameters
40
+ ----------
41
+ G : NetworkX graph
42
+
43
+ s : node
44
+ Source node for the flow.
45
+
46
+ t : node
47
+ Sink node for the flow.
48
+
49
+ flow_func : function
50
+ A function for computing the maximum flow among a pair of nodes.
51
+ The function has to accept at least three parameters: a Digraph,
52
+ a source node, and a target node. And return a residual network
53
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
54
+ details). If flow_func is None, the default maximum flow function
55
+ (:meth:`edmonds_karp`) is used. The choice of the default function
56
+ may change from version to version and should not be relied on.
57
+ Default value: None.
58
+
59
+ cutoff : integer or None (default: None)
60
+ Maximum number of paths to yield. If specified, the maximum flow
61
+ algorithm will terminate when the flow value reaches or exceeds the
62
+ cutoff. This only works for flows that support the cutoff parameter
63
+ (most do) and is ignored otherwise.
64
+
65
+ auxiliary : NetworkX DiGraph
66
+ Auxiliary digraph to compute flow based edge connectivity. It has
67
+ to have a graph attribute called mapping with a dictionary mapping
68
+ node names in G and in the auxiliary digraph. If provided
69
+ it will be reused instead of recreated. Default value: None.
70
+
71
+ residual : NetworkX DiGraph
72
+ Residual network to compute maximum flow. If provided it will be
73
+ reused instead of recreated. Default value: None.
74
+
75
+ Returns
76
+ -------
77
+ paths : generator
78
+ A generator of edge independent paths.
79
+
80
+ Raises
81
+ ------
82
+ NetworkXNoPath
83
+ If there is no path between source and target.
84
+
85
+ NetworkXError
86
+ If source or target are not in the graph G.
87
+
88
+ See also
89
+ --------
90
+ :meth:`node_disjoint_paths`
91
+ :meth:`edge_connectivity`
92
+ :meth:`maximum_flow`
93
+ :meth:`edmonds_karp`
94
+ :meth:`preflow_push`
95
+ :meth:`shortest_augmenting_path`
96
+
97
+ Examples
98
+ --------
99
+ We use in this example the platonic icosahedral graph, which has node
100
+ edge connectivity 5, thus there are 5 edge disjoint paths between any
101
+ pair of nodes.
102
+
103
+ >>> G = nx.icosahedral_graph()
104
+ >>> len(list(nx.edge_disjoint_paths(G, 0, 6)))
105
+ 5
106
+
107
+
108
+ If you need to compute edge disjoint paths on several pairs of
109
+ nodes in the same graph, it is recommended that you reuse the
110
+ data structures that NetworkX uses in the computation: the
111
+ auxiliary digraph for edge connectivity, and the residual
112
+ network for the underlying maximum flow computation.
113
+
114
+ Example of how to compute edge disjoint paths among all pairs of
115
+ nodes of the platonic icosahedral graph reusing the data
116
+ structures.
117
+
118
+ >>> import itertools
119
+ >>> # You also have to explicitly import the function for
120
+ >>> # building the auxiliary digraph from the connectivity package
121
+ >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity
122
+ >>> H = build_auxiliary_edge_connectivity(G)
123
+ >>> # And the function for building the residual network from the
124
+ >>> # flow package
125
+ >>> from networkx.algorithms.flow import build_residual_network
126
+ >>> # Note that the auxiliary digraph has an edge attribute named capacity
127
+ >>> R = build_residual_network(H, "capacity")
128
+ >>> result = {n: {} for n in G}
129
+ >>> # Reuse the auxiliary digraph and the residual network by passing them
130
+ >>> # as arguments
131
+ >>> for u, v in itertools.combinations(G, 2):
132
+ ... k = len(list(nx.edge_disjoint_paths(G, u, v, auxiliary=H, residual=R)))
133
+ ... result[u][v] = k
134
+ >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
135
+ True
136
+
137
+ You can also use alternative flow algorithms for computing edge disjoint
138
+ paths. For instance, in dense networks the algorithm
139
+ :meth:`shortest_augmenting_path` will usually perform better than
140
+ the default :meth:`edmonds_karp` which is faster for sparse
141
+ networks with highly skewed degree distributions. Alternative flow
142
+ functions have to be explicitly imported from the flow package.
143
+
144
+ >>> from networkx.algorithms.flow import shortest_augmenting_path
145
+ >>> len(list(nx.edge_disjoint_paths(G, 0, 6, flow_func=shortest_augmenting_path)))
146
+ 5
147
+
148
+ Notes
149
+ -----
150
+ This is a flow based implementation of edge disjoint paths. We compute
151
+ the maximum flow between source and target on an auxiliary directed
152
+ network. The saturated edges in the residual network after running the
153
+ maximum flow algorithm correspond to edge disjoint paths between source
154
+ and target in the original network. This function handles both directed
155
+ and undirected graphs, and can use all flow algorithms from NetworkX flow
156
+ package.
157
+
158
+ """
159
+ if s not in G:
160
+ raise nx.NetworkXError(f"node {s} not in graph")
161
+ if t not in G:
162
+ raise nx.NetworkXError(f"node {t} not in graph")
163
+
164
+ if flow_func is None:
165
+ flow_func = default_flow_func
166
+
167
+ if auxiliary is None:
168
+ H = build_auxiliary_edge_connectivity(G)
169
+ else:
170
+ H = auxiliary
171
+
172
+ # Maximum possible edge disjoint paths
173
+ possible = min(H.out_degree(s), H.in_degree(t))
174
+ if not possible:
175
+ raise NetworkXNoPath
176
+
177
+ if cutoff is None:
178
+ cutoff = possible
179
+ else:
180
+ cutoff = min(cutoff, possible)
181
+
182
+ # Compute maximum flow between source and target. Flow functions in
183
+ # NetworkX return a residual network.
184
+ kwargs = {
185
+ "capacity": "capacity",
186
+ "residual": residual,
187
+ "cutoff": cutoff,
188
+ "value_only": True,
189
+ }
190
+ if flow_func is preflow_push:
191
+ del kwargs["cutoff"]
192
+ if flow_func is shortest_augmenting_path:
193
+ kwargs["two_phase"] = True
194
+ R = flow_func(H, s, t, **kwargs)
195
+
196
+ if R.graph["flow_value"] == 0:
197
+ raise NetworkXNoPath
198
+
199
+ # Saturated edges in the residual network form the edge disjoint paths
200
+ # between source and target
201
+ cutset = [
202
+ (u, v)
203
+ for u, v, d in R.edges(data=True)
204
+ if d["capacity"] == d["flow"] and d["flow"] > 0
205
+ ]
206
+ # This is equivalent of what flow.utils.build_flow_dict returns, but
207
+ # only for the nodes with saturated edges and without reporting 0 flows.
208
+ flow_dict = {n: {} for edge in cutset for n in edge}
209
+ for u, v in cutset:
210
+ flow_dict[u][v] = 1
211
+
212
+ # Rebuild the edge disjoint paths from the flow dictionary.
213
+ paths_found = 0
214
+ for v in list(flow_dict[s]):
215
+ if paths_found >= cutoff:
216
+ # preflow_push does not support cutoff: we have to
217
+ # keep track of the paths founds and stop at cutoff.
218
+ break
219
+ path = [s]
220
+ if v == t:
221
+ path.append(v)
222
+ yield path
223
+ continue
224
+ u = v
225
+ while u != t:
226
+ path.append(u)
227
+ try:
228
+ u, _ = flow_dict[u].popitem()
229
+ except KeyError:
230
+ break
231
+ else:
232
+ path.append(t)
233
+ yield path
234
+ paths_found += 1
235
+
236
+
237
+ @nx._dispatch(
238
+ graphs={"G": 0, "auxiliary?": 5, "residual?": 6},
239
+ preserve_edge_attrs={"residual": {"capacity": float("inf")}},
240
+ preserve_node_attrs={"auxiliary": {"id": None}},
241
+ preserve_graph_attrs={"auxiliary", "residual"},
242
+ )
243
+ def node_disjoint_paths(
244
+ G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None
245
+ ):
246
+ r"""Computes node disjoint paths between source and target.
247
+
248
+ Node disjoint paths are paths that only share their first and last
249
+ nodes. The number of node independent paths between two nodes is
250
+ equal to their local node connectivity.
251
+
252
+ Parameters
253
+ ----------
254
+ G : NetworkX graph
255
+
256
+ s : node
257
+ Source node.
258
+
259
+ t : node
260
+ Target node.
261
+
262
+ flow_func : function
263
+ A function for computing the maximum flow among a pair of nodes.
264
+ The function has to accept at least three parameters: a Digraph,
265
+ a source node, and a target node. And return a residual network
266
+ that follows NetworkX conventions (see :meth:`maximum_flow` for
267
+ details). If flow_func is None, the default maximum flow function
268
+ (:meth:`edmonds_karp`) is used. See below for details. The choice
269
+ of the default function may change from version to version and
270
+ should not be relied on. Default value: None.
271
+
272
+ cutoff : integer or None (default: None)
273
+ Maximum number of paths to yield. If specified, the maximum flow
274
+ algorithm will terminate when the flow value reaches or exceeds the
275
+ cutoff. This only works for flows that support the cutoff parameter
276
+ (most do) and is ignored otherwise.
277
+
278
+ auxiliary : NetworkX DiGraph
279
+ Auxiliary digraph to compute flow based node connectivity. It has
280
+ to have a graph attribute called mapping with a dictionary mapping
281
+ node names in G and in the auxiliary digraph. If provided
282
+ it will be reused instead of recreated. Default value: None.
283
+
284
+ residual : NetworkX DiGraph
285
+ Residual network to compute maximum flow. If provided it will be
286
+ reused instead of recreated. Default value: None.
287
+
288
+ Returns
289
+ -------
290
+ paths : generator
291
+ Generator of node disjoint paths.
292
+
293
+ Raises
294
+ ------
295
+ NetworkXNoPath
296
+ If there is no path between source and target.
297
+
298
+ NetworkXError
299
+ If source or target are not in the graph G.
300
+
301
+ Examples
302
+ --------
303
+ We use in this example the platonic icosahedral graph, which has node
304
+ connectivity 5, thus there are 5 node disjoint paths between any pair
305
+ of non neighbor nodes.
306
+
307
+ >>> G = nx.icosahedral_graph()
308
+ >>> len(list(nx.node_disjoint_paths(G, 0, 6)))
309
+ 5
310
+
311
+ If you need to compute node disjoint paths between several pairs of
312
+ nodes in the same graph, it is recommended that you reuse the
313
+ data structures that NetworkX uses in the computation: the
314
+ auxiliary digraph for node connectivity and node cuts, and the
315
+ residual network for the underlying maximum flow computation.
316
+
317
+ Example of how to compute node disjoint paths reusing the data
318
+ structures:
319
+
320
+ >>> # You also have to explicitly import the function for
321
+ >>> # building the auxiliary digraph from the connectivity package
322
+ >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity
323
+ >>> H = build_auxiliary_node_connectivity(G)
324
+ >>> # And the function for building the residual network from the
325
+ >>> # flow package
326
+ >>> from networkx.algorithms.flow import build_residual_network
327
+ >>> # Note that the auxiliary digraph has an edge attribute named capacity
328
+ >>> R = build_residual_network(H, "capacity")
329
+ >>> # Reuse the auxiliary digraph and the residual network by passing them
330
+ >>> # as arguments
331
+ >>> len(list(nx.node_disjoint_paths(G, 0, 6, auxiliary=H, residual=R)))
332
+ 5
333
+
334
+ You can also use alternative flow algorithms for computing node disjoint
335
+ paths. For instance, in dense networks the algorithm
336
+ :meth:`shortest_augmenting_path` will usually perform better than
337
+ the default :meth:`edmonds_karp` which is faster for sparse
338
+ networks with highly skewed degree distributions. Alternative flow
339
+ functions have to be explicitly imported from the flow package.
340
+
341
+ >>> from networkx.algorithms.flow import shortest_augmenting_path
342
+ >>> len(list(nx.node_disjoint_paths(G, 0, 6, flow_func=shortest_augmenting_path)))
343
+ 5
344
+
345
+ Notes
346
+ -----
347
+ This is a flow based implementation of node disjoint paths. We compute
348
+ the maximum flow between source and target on an auxiliary directed
349
+ network. The saturated edges in the residual network after running the
350
+ maximum flow algorithm correspond to node disjoint paths between source
351
+ and target in the original network. This function handles both directed
352
+ and undirected graphs, and can use all flow algorithms from NetworkX flow
353
+ package.
354
+
355
+ See also
356
+ --------
357
+ :meth:`edge_disjoint_paths`
358
+ :meth:`node_connectivity`
359
+ :meth:`maximum_flow`
360
+ :meth:`edmonds_karp`
361
+ :meth:`preflow_push`
362
+ :meth:`shortest_augmenting_path`
363
+
364
+ """
365
+ if s not in G:
366
+ raise nx.NetworkXError(f"node {s} not in graph")
367
+ if t not in G:
368
+ raise nx.NetworkXError(f"node {t} not in graph")
369
+
370
+ if auxiliary is None:
371
+ H = build_auxiliary_node_connectivity(G)
372
+ else:
373
+ H = auxiliary
374
+
375
+ mapping = H.graph.get("mapping", None)
376
+ if mapping is None:
377
+ raise nx.NetworkXError("Invalid auxiliary digraph.")
378
+
379
+ # Maximum possible edge disjoint paths
380
+ possible = min(H.out_degree(f"{mapping[s]}B"), H.in_degree(f"{mapping[t]}A"))
381
+ if not possible:
382
+ raise NetworkXNoPath
383
+
384
+ if cutoff is None:
385
+ cutoff = possible
386
+ else:
387
+ cutoff = min(cutoff, possible)
388
+
389
+ kwargs = {
390
+ "flow_func": flow_func,
391
+ "residual": residual,
392
+ "auxiliary": H,
393
+ "cutoff": cutoff,
394
+ }
395
+
396
+ # The edge disjoint paths in the auxiliary digraph correspond to the node
397
+ # disjoint paths in the original graph.
398
+ paths_edges = edge_disjoint_paths(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs)
399
+ for path in paths_edges:
400
+ # Each node in the original graph maps to two nodes in auxiliary graph
401
+ yield list(_unique_everseen(H.nodes[node]["id"] for node in path))
402
+
403
+
404
+ def _unique_everseen(iterable):
405
+ # Adapted from https://docs.python.org/3/library/itertools.html examples
406
+ "List unique elements, preserving order. Remember all elements ever seen."
407
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
408
+ seen = set()
409
+ seen_add = seen.add
410
+ for element in _filterfalse(seen.__contains__, iterable):
411
+ seen_add(element)
412
+ yield element
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/edge_augmentation.py ADDED
@@ -0,0 +1,1269 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Algorithms for finding k-edge-augmentations
3
+
4
+ A k-edge-augmentation is a set of edges, that once added to a graph, ensures
5
+ that the graph is k-edge-connected; i.e. the graph cannot be disconnected
6
+ unless k or more edges are removed. Typically, the goal is to find the
7
+ augmentation with minimum weight. In general, it is not guaranteed that a
8
+ k-edge-augmentation exists.
9
+
10
+ See Also
11
+ --------
12
+ :mod:`edge_kcomponents` : algorithms for finding k-edge-connected components
13
+ :mod:`connectivity` : algorithms for determining edge connectivity.
14
+ """
15
+ import itertools as it
16
+ import math
17
+ from collections import defaultdict, namedtuple
18
+
19
+ import networkx as nx
20
+ from networkx.utils import not_implemented_for, py_random_state
21
+
22
+ __all__ = ["k_edge_augmentation", "is_k_edge_connected", "is_locally_k_edge_connected"]
23
+
24
+
25
+ @not_implemented_for("directed")
26
+ @not_implemented_for("multigraph")
27
+ @nx._dispatch
28
+ def is_k_edge_connected(G, k):
29
+ """Tests to see if a graph is k-edge-connected.
30
+
31
+ Is it impossible to disconnect the graph by removing fewer than k edges?
32
+ If so, then G is k-edge-connected.
33
+
34
+ Parameters
35
+ ----------
36
+ G : NetworkX graph
37
+ An undirected graph.
38
+
39
+ k : integer
40
+ edge connectivity to test for
41
+
42
+ Returns
43
+ -------
44
+ boolean
45
+ True if G is k-edge-connected.
46
+
47
+ See Also
48
+ --------
49
+ :func:`is_locally_k_edge_connected`
50
+
51
+ Examples
52
+ --------
53
+ >>> G = nx.barbell_graph(10, 0)
54
+ >>> nx.is_k_edge_connected(G, k=1)
55
+ True
56
+ >>> nx.is_k_edge_connected(G, k=2)
57
+ False
58
+ """
59
+ if k < 1:
60
+ raise ValueError(f"k must be positive, not {k}")
61
+ # First try to quickly determine if G is not k-edge-connected
62
+ if G.number_of_nodes() < k + 1:
63
+ return False
64
+ elif any(d < k for n, d in G.degree()):
65
+ return False
66
+ else:
67
+ # Otherwise perform the full check
68
+ if k == 1:
69
+ return nx.is_connected(G)
70
+ elif k == 2:
71
+ return nx.is_connected(G) and not nx.has_bridges(G)
72
+ else:
73
+ return nx.edge_connectivity(G, cutoff=k) >= k
74
+
75
+
76
+ @not_implemented_for("directed")
77
+ @not_implemented_for("multigraph")
78
+ @nx._dispatch
79
+ def is_locally_k_edge_connected(G, s, t, k):
80
+ """Tests to see if an edge in a graph is locally k-edge-connected.
81
+
82
+ Is it impossible to disconnect s and t by removing fewer than k edges?
83
+ If so, then s and t are locally k-edge-connected in G.
84
+
85
+ Parameters
86
+ ----------
87
+ G : NetworkX graph
88
+ An undirected graph.
89
+
90
+ s : node
91
+ Source node
92
+
93
+ t : node
94
+ Target node
95
+
96
+ k : integer
97
+ local edge connectivity for nodes s and t
98
+
99
+ Returns
100
+ -------
101
+ boolean
102
+ True if s and t are locally k-edge-connected in G.
103
+
104
+ See Also
105
+ --------
106
+ :func:`is_k_edge_connected`
107
+
108
+ Examples
109
+ --------
110
+ >>> from networkx.algorithms.connectivity import is_locally_k_edge_connected
111
+ >>> G = nx.barbell_graph(10, 0)
112
+ >>> is_locally_k_edge_connected(G, 5, 15, k=1)
113
+ True
114
+ >>> is_locally_k_edge_connected(G, 5, 15, k=2)
115
+ False
116
+ >>> is_locally_k_edge_connected(G, 1, 5, k=2)
117
+ True
118
+ """
119
+ if k < 1:
120
+ raise ValueError(f"k must be positive, not {k}")
121
+
122
+ # First try to quickly determine s, t is not k-locally-edge-connected in G
123
+ if G.degree(s) < k or G.degree(t) < k:
124
+ return False
125
+ else:
126
+ # Otherwise perform the full check
127
+ if k == 1:
128
+ return nx.has_path(G, s, t)
129
+ else:
130
+ localk = nx.connectivity.local_edge_connectivity(G, s, t, cutoff=k)
131
+ return localk >= k
132
+
133
+
134
+ @not_implemented_for("directed")
135
+ @not_implemented_for("multigraph")
136
+ @nx._dispatch
137
+ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False):
138
+ """Finds set of edges to k-edge-connect G.
139
+
140
+ Adding edges from the augmentation to G make it impossible to disconnect G
141
+ unless k or more edges are removed. This function uses the most efficient
142
+ function available (depending on the value of k and if the problem is
143
+ weighted or unweighted) to search for a minimum weight subset of available
144
+ edges that k-edge-connects G. In general, finding a k-edge-augmentation is
145
+ NP-hard, so solutions are not guaranteed to be minimal. Furthermore, a
146
+ k-edge-augmentation may not exist.
147
+
148
+ Parameters
149
+ ----------
150
+ G : NetworkX graph
151
+ An undirected graph.
152
+
153
+ k : integer
154
+ Desired edge connectivity
155
+
156
+ avail : dict or a set of 2 or 3 tuples
157
+ The available edges that can be used in the augmentation.
158
+
159
+ If unspecified, then all edges in the complement of G are available.
160
+ Otherwise, each item is an available edge (with an optional weight).
161
+
162
+ In the unweighted case, each item is an edge ``(u, v)``.
163
+
164
+ In the weighted case, each item is a 3-tuple ``(u, v, d)`` or a dict
165
+ with items ``(u, v): d``. The third item, ``d``, can be a dictionary
166
+ or a real number. If ``d`` is a dictionary ``d[weight]``
167
+ correspondings to the weight.
168
+
169
+ weight : string
170
+ key to use to find weights if ``avail`` is a set of 3-tuples where the
171
+ third item in each tuple is a dictionary.
172
+
173
+ partial : boolean
174
+ If partial is True and no feasible k-edge-augmentation exists, then all
175
+ a partial k-edge-augmentation is generated. Adding the edges in a
176
+ partial augmentation to G, minimizes the number of k-edge-connected
177
+ components and maximizes the edge connectivity between those
178
+ components. For details, see :func:`partial_k_edge_augmentation`.
179
+
180
+ Yields
181
+ ------
182
+ edge : tuple
183
+ Edges that, once added to G, would cause G to become k-edge-connected.
184
+ If partial is False, an error is raised if this is not possible.
185
+ Otherwise, generated edges form a partial augmentation, which
186
+ k-edge-connects any part of G where it is possible, and maximally
187
+ connects the remaining parts.
188
+
189
+ Raises
190
+ ------
191
+ NetworkXUnfeasible
192
+ If partial is False and no k-edge-augmentation exists.
193
+
194
+ NetworkXNotImplemented
195
+ If the input graph is directed or a multigraph.
196
+
197
+ ValueError:
198
+ If k is less than 1
199
+
200
+ Notes
201
+ -----
202
+ When k=1 this returns an optimal solution.
203
+
204
+ When k=2 and ``avail`` is None, this returns an optimal solution.
205
+ Otherwise when k=2, this returns a 2-approximation of the optimal solution.
206
+
207
+ For k>3, this problem is NP-hard and this uses a randomized algorithm that
208
+ produces a feasible solution, but provides no guarantees on the
209
+ solution weight.
210
+
211
+ Examples
212
+ --------
213
+ >>> # Unweighted cases
214
+ >>> G = nx.path_graph((1, 2, 3, 4))
215
+ >>> G.add_node(5)
216
+ >>> sorted(nx.k_edge_augmentation(G, k=1))
217
+ [(1, 5)]
218
+ >>> sorted(nx.k_edge_augmentation(G, k=2))
219
+ [(1, 5), (5, 4)]
220
+ >>> sorted(nx.k_edge_augmentation(G, k=3))
221
+ [(1, 4), (1, 5), (2, 5), (3, 5), (4, 5)]
222
+ >>> complement = list(nx.k_edge_augmentation(G, k=5, partial=True))
223
+ >>> G.add_edges_from(complement)
224
+ >>> nx.edge_connectivity(G)
225
+ 4
226
+
227
+ >>> # Weighted cases
228
+ >>> G = nx.path_graph((1, 2, 3, 4))
229
+ >>> G.add_node(5)
230
+ >>> # avail can be a tuple with a dict
231
+ >>> avail = [(1, 5, {"weight": 11}), (2, 5, {"weight": 10})]
232
+ >>> sorted(nx.k_edge_augmentation(G, k=1, avail=avail, weight="weight"))
233
+ [(2, 5)]
234
+ >>> # or avail can be a 3-tuple with a real number
235
+ >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 51)]
236
+ >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail))
237
+ [(1, 5), (2, 5), (4, 5)]
238
+ >>> # or avail can be a dict
239
+ >>> avail = {(1, 5): 11, (2, 5): 10, (4, 3): 1, (4, 5): 51}
240
+ >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail))
241
+ [(1, 5), (2, 5), (4, 5)]
242
+ >>> # If augmentation is infeasible, then a partial solution can be found
243
+ >>> avail = {(1, 5): 11}
244
+ >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail, partial=True))
245
+ [(1, 5)]
246
+ """
247
+ try:
248
+ if k <= 0:
249
+ raise ValueError(f"k must be a positive integer, not {k}")
250
+ elif G.number_of_nodes() < k + 1:
251
+ msg = f"impossible to {k} connect in graph with less than {k + 1} nodes"
252
+ raise nx.NetworkXUnfeasible(msg)
253
+ elif avail is not None and len(avail) == 0:
254
+ if not nx.is_k_edge_connected(G, k):
255
+ raise nx.NetworkXUnfeasible("no available edges")
256
+ aug_edges = []
257
+ elif k == 1:
258
+ aug_edges = one_edge_augmentation(
259
+ G, avail=avail, weight=weight, partial=partial
260
+ )
261
+ elif k == 2:
262
+ aug_edges = bridge_augmentation(G, avail=avail, weight=weight)
263
+ else:
264
+ # raise NotImplementedError(f'not implemented for k>2. k={k}')
265
+ aug_edges = greedy_k_edge_augmentation(
266
+ G, k=k, avail=avail, weight=weight, seed=0
267
+ )
268
+ # Do eager evaluation so we can catch any exceptions
269
+ # Before executing partial code.
270
+ yield from list(aug_edges)
271
+ except nx.NetworkXUnfeasible:
272
+ if partial:
273
+ # Return all available edges
274
+ if avail is None:
275
+ aug_edges = complement_edges(G)
276
+ else:
277
+ # If we can't k-edge-connect the entire graph, try to
278
+ # k-edge-connect as much as possible
279
+ aug_edges = partial_k_edge_augmentation(
280
+ G, k=k, avail=avail, weight=weight
281
+ )
282
+ yield from aug_edges
283
+ else:
284
+ raise
285
+
286
+
287
+ @nx._dispatch
288
+ def partial_k_edge_augmentation(G, k, avail, weight=None):
289
+ """Finds augmentation that k-edge-connects as much of the graph as possible.
290
+
291
+ When a k-edge-augmentation is not possible, we can still try to find a
292
+ small set of edges that partially k-edge-connects as much of the graph as
293
+ possible. All possible edges are generated between remaining parts.
294
+ This minimizes the number of k-edge-connected subgraphs in the resulting
295
+ graph and maximizes the edge connectivity between those subgraphs.
296
+
297
+ Parameters
298
+ ----------
299
+ G : NetworkX graph
300
+ An undirected graph.
301
+
302
+ k : integer
303
+ Desired edge connectivity
304
+
305
+ avail : dict or a set of 2 or 3 tuples
306
+ For more details, see :func:`k_edge_augmentation`.
307
+
308
+ weight : string
309
+ key to use to find weights if ``avail`` is a set of 3-tuples.
310
+ For more details, see :func:`k_edge_augmentation`.
311
+
312
+ Yields
313
+ ------
314
+ edge : tuple
315
+ Edges in the partial augmentation of G. These edges k-edge-connect any
316
+ part of G where it is possible, and maximally connects the remaining
317
+ parts. In other words, all edges from avail are generated except for
318
+ those within subgraphs that have already become k-edge-connected.
319
+
320
+ Notes
321
+ -----
322
+ Construct H that augments G with all edges in avail.
323
+ Find the k-edge-subgraphs of H.
324
+ For each k-edge-subgraph, if the number of nodes is more than k, then find
325
+ the k-edge-augmentation of that graph and add it to the solution. Then add
326
+ all edges in avail between k-edge subgraphs to the solution.
327
+
328
+ See Also
329
+ --------
330
+ :func:`k_edge_augmentation`
331
+
332
+ Examples
333
+ --------
334
+ >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
335
+ >>> G.add_node(8)
336
+ >>> avail = [(1, 3), (1, 4), (1, 5), (2, 4), (2, 5), (3, 5), (1, 8)]
337
+ >>> sorted(partial_k_edge_augmentation(G, k=2, avail=avail))
338
+ [(1, 5), (1, 8)]
339
+ """
340
+
341
+ def _edges_between_disjoint(H, only1, only2):
342
+ """finds edges between disjoint nodes"""
343
+ only1_adj = {u: set(H.adj[u]) for u in only1}
344
+ for u, neighbs in only1_adj.items():
345
+ # Find the neighbors of u in only1 that are also in only2
346
+ neighbs12 = neighbs.intersection(only2)
347
+ for v in neighbs12:
348
+ yield (u, v)
349
+
350
+ avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G)
351
+
352
+ # Find which parts of the graph can be k-edge-connected
353
+ H = G.copy()
354
+ H.add_edges_from(
355
+ (
356
+ (u, v, {"weight": w, "generator": (u, v)})
357
+ for (u, v), w in zip(avail, avail_w)
358
+ )
359
+ )
360
+ k_edge_subgraphs = list(nx.k_edge_subgraphs(H, k=k))
361
+
362
+ # Generate edges to k-edge-connect internal subgraphs
363
+ for nodes in k_edge_subgraphs:
364
+ if len(nodes) > 1:
365
+ # Get the k-edge-connected subgraph
366
+ C = H.subgraph(nodes).copy()
367
+ # Find the internal edges that were available
368
+ sub_avail = {
369
+ d["generator"]: d["weight"]
370
+ for (u, v, d) in C.edges(data=True)
371
+ if "generator" in d
372
+ }
373
+ # Remove potential augmenting edges
374
+ C.remove_edges_from(sub_avail.keys())
375
+ # Find a subset of these edges that makes the component
376
+ # k-edge-connected and ignore the rest
377
+ yield from nx.k_edge_augmentation(C, k=k, avail=sub_avail)
378
+
379
+ # Generate all edges between CCs that could not be k-edge-connected
380
+ for cc1, cc2 in it.combinations(k_edge_subgraphs, 2):
381
+ for u, v in _edges_between_disjoint(H, cc1, cc2):
382
+ d = H.get_edge_data(u, v)
383
+ edge = d.get("generator", None)
384
+ if edge is not None:
385
+ yield edge
386
+
387
+
388
+ @not_implemented_for("multigraph")
389
+ @not_implemented_for("directed")
390
+ @nx._dispatch
391
+ def one_edge_augmentation(G, avail=None, weight=None, partial=False):
392
+ """Finds minimum weight set of edges to connect G.
393
+
394
+ Equivalent to :func:`k_edge_augmentation` when k=1. Adding the resulting
395
+ edges to G will make it 1-edge-connected. The solution is optimal for both
396
+ weighted and non-weighted variants.
397
+
398
+ Parameters
399
+ ----------
400
+ G : NetworkX graph
401
+ An undirected graph.
402
+
403
+ avail : dict or a set of 2 or 3 tuples
404
+ For more details, see :func:`k_edge_augmentation`.
405
+
406
+ weight : string
407
+ key to use to find weights if ``avail`` is a set of 3-tuples.
408
+ For more details, see :func:`k_edge_augmentation`.
409
+
410
+ partial : boolean
411
+ If partial is True and no feasible k-edge-augmentation exists, then the
412
+ augmenting edges minimize the number of connected components.
413
+
414
+ Yields
415
+ ------
416
+ edge : tuple
417
+ Edges in the one-augmentation of G
418
+
419
+ Raises
420
+ ------
421
+ NetworkXUnfeasible
422
+ If partial is False and no one-edge-augmentation exists.
423
+
424
+ Notes
425
+ -----
426
+ Uses either :func:`unconstrained_one_edge_augmentation` or
427
+ :func:`weighted_one_edge_augmentation` depending on whether ``avail`` is
428
+ specified. Both algorithms are based on finding a minimum spanning tree.
429
+ As such both algorithms find optimal solutions and run in linear time.
430
+
431
+ See Also
432
+ --------
433
+ :func:`k_edge_augmentation`
434
+ """
435
+ if avail is None:
436
+ return unconstrained_one_edge_augmentation(G)
437
+ else:
438
+ return weighted_one_edge_augmentation(
439
+ G, avail=avail, weight=weight, partial=partial
440
+ )
441
+
442
+
443
+ @not_implemented_for("multigraph")
444
+ @not_implemented_for("directed")
445
+ @nx._dispatch
446
+ def bridge_augmentation(G, avail=None, weight=None):
447
+ """Finds the a set of edges that bridge connects G.
448
+
449
+ Equivalent to :func:`k_edge_augmentation` when k=2, and partial=False.
450
+ Adding the resulting edges to G will make it 2-edge-connected. If no
451
+ constraints are specified the returned set of edges is minimum an optimal,
452
+ otherwise the solution is approximated.
453
+
454
+ Parameters
455
+ ----------
456
+ G : NetworkX graph
457
+ An undirected graph.
458
+
459
+ avail : dict or a set of 2 or 3 tuples
460
+ For more details, see :func:`k_edge_augmentation`.
461
+
462
+ weight : string
463
+ key to use to find weights if ``avail`` is a set of 3-tuples.
464
+ For more details, see :func:`k_edge_augmentation`.
465
+
466
+ Yields
467
+ ------
468
+ edge : tuple
469
+ Edges in the bridge-augmentation of G
470
+
471
+ Raises
472
+ ------
473
+ NetworkXUnfeasible
474
+ If no bridge-augmentation exists.
475
+
476
+ Notes
477
+ -----
478
+ If there are no constraints the solution can be computed in linear time
479
+ using :func:`unconstrained_bridge_augmentation`. Otherwise, the problem
480
+ becomes NP-hard and is the solution is approximated by
481
+ :func:`weighted_bridge_augmentation`.
482
+
483
+ See Also
484
+ --------
485
+ :func:`k_edge_augmentation`
486
+ """
487
+ if G.number_of_nodes() < 3:
488
+ raise nx.NetworkXUnfeasible("impossible to bridge connect less than 3 nodes")
489
+ if avail is None:
490
+ return unconstrained_bridge_augmentation(G)
491
+ else:
492
+ return weighted_bridge_augmentation(G, avail, weight=weight)
493
+
494
+
495
+ # --- Algorithms and Helpers ---
496
+
497
+
498
+ def _ordered(u, v):
499
+ """Returns the nodes in an undirected edge in lower-triangular order"""
500
+ return (u, v) if u < v else (v, u)
501
+
502
+
503
+ def _unpack_available_edges(avail, weight=None, G=None):
504
+ """Helper to separate avail into edges and corresponding weights"""
505
+ if weight is None:
506
+ weight = "weight"
507
+ if isinstance(avail, dict):
508
+ avail_uv = list(avail.keys())
509
+ avail_w = list(avail.values())
510
+ else:
511
+
512
+ def _try_getitem(d):
513
+ try:
514
+ return d[weight]
515
+ except TypeError:
516
+ return d
517
+
518
+ avail_uv = [tup[0:2] for tup in avail]
519
+ avail_w = [1 if len(tup) == 2 else _try_getitem(tup[-1]) for tup in avail]
520
+
521
+ if G is not None:
522
+ # Edges already in the graph are filtered
523
+ flags = [not G.has_edge(u, v) for u, v in avail_uv]
524
+ avail_uv = list(it.compress(avail_uv, flags))
525
+ avail_w = list(it.compress(avail_w, flags))
526
+ return avail_uv, avail_w
527
+
528
+
529
+ MetaEdge = namedtuple("MetaEdge", ("meta_uv", "uv", "w"))
530
+
531
+
532
+ def _lightest_meta_edges(mapping, avail_uv, avail_w):
533
+ """Maps available edges in the original graph to edges in the metagraph.
534
+
535
+ Parameters
536
+ ----------
537
+ mapping : dict
538
+ mapping produced by :func:`collapse`, that maps each node in the
539
+ original graph to a node in the meta graph
540
+
541
+ avail_uv : list
542
+ list of edges
543
+
544
+ avail_w : list
545
+ list of edge weights
546
+
547
+ Notes
548
+ -----
549
+ Each node in the metagraph is a k-edge-connected component in the original
550
+ graph. We don't care about any edge within the same k-edge-connected
551
+ component, so we ignore self edges. We also are only interested in the
552
+ minimum weight edge bridging each k-edge-connected component so, we group
553
+ the edges by meta-edge and take the lightest in each group.
554
+
555
+ Examples
556
+ --------
557
+ >>> # Each group represents a meta-node
558
+ >>> groups = ([1, 2, 3], [4, 5], [6])
559
+ >>> mapping = {n: meta_n for meta_n, ns in enumerate(groups) for n in ns}
560
+ >>> avail_uv = [(1, 2), (3, 6), (1, 4), (5, 2), (6, 1), (2, 6), (3, 1)]
561
+ >>> avail_w = [20, 99, 20, 15, 50, 99, 20]
562
+ >>> sorted(_lightest_meta_edges(mapping, avail_uv, avail_w))
563
+ [MetaEdge(meta_uv=(0, 1), uv=(5, 2), w=15), MetaEdge(meta_uv=(0, 2), uv=(6, 1), w=50)]
564
+ """
565
+ grouped_wuv = defaultdict(list)
566
+ for w, (u, v) in zip(avail_w, avail_uv):
567
+ # Order the meta-edge so it can be used as a dict key
568
+ meta_uv = _ordered(mapping[u], mapping[v])
569
+ # Group each available edge using the meta-edge as a key
570
+ grouped_wuv[meta_uv].append((w, u, v))
571
+
572
+ # Now that all available edges are grouped, choose one per group
573
+ for (mu, mv), choices_wuv in grouped_wuv.items():
574
+ # Ignore available edges within the same meta-node
575
+ if mu != mv:
576
+ # Choose the lightest available edge belonging to each meta-edge
577
+ w, u, v = min(choices_wuv)
578
+ yield MetaEdge((mu, mv), (u, v), w)
579
+
580
+
581
+ @nx._dispatch
582
+ def unconstrained_one_edge_augmentation(G):
583
+ """Finds the smallest set of edges to connect G.
584
+
585
+ This is a variant of the unweighted MST problem.
586
+ If G is not empty, a feasible solution always exists.
587
+
588
+ Parameters
589
+ ----------
590
+ G : NetworkX graph
591
+ An undirected graph.
592
+
593
+ Yields
594
+ ------
595
+ edge : tuple
596
+ Edges in the one-edge-augmentation of G
597
+
598
+ See Also
599
+ --------
600
+ :func:`one_edge_augmentation`
601
+ :func:`k_edge_augmentation`
602
+
603
+ Examples
604
+ --------
605
+ >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)])
606
+ >>> G.add_nodes_from([6, 7, 8])
607
+ >>> sorted(unconstrained_one_edge_augmentation(G))
608
+ [(1, 4), (4, 6), (6, 7), (7, 8)]
609
+ """
610
+ ccs1 = list(nx.connected_components(G))
611
+ C = collapse(G, ccs1)
612
+ # When we are not constrained, we can just make a meta graph tree.
613
+ meta_nodes = list(C.nodes())
614
+ # build a path in the metagraph
615
+ meta_aug = list(zip(meta_nodes, meta_nodes[1:]))
616
+ # map that path to the original graph
617
+ inverse = defaultdict(list)
618
+ for k, v in C.graph["mapping"].items():
619
+ inverse[v].append(k)
620
+ for mu, mv in meta_aug:
621
+ yield (inverse[mu][0], inverse[mv][0])
622
+
623
+
624
+ @nx._dispatch
625
+ def weighted_one_edge_augmentation(G, avail, weight=None, partial=False):
626
+ """Finds the minimum weight set of edges to connect G if one exists.
627
+
628
+ This is a variant of the weighted MST problem.
629
+
630
+ Parameters
631
+ ----------
632
+ G : NetworkX graph
633
+ An undirected graph.
634
+
635
+ avail : dict or a set of 2 or 3 tuples
636
+ For more details, see :func:`k_edge_augmentation`.
637
+
638
+ weight : string
639
+ key to use to find weights if ``avail`` is a set of 3-tuples.
640
+ For more details, see :func:`k_edge_augmentation`.
641
+
642
+ partial : boolean
643
+ If partial is True and no feasible k-edge-augmentation exists, then the
644
+ augmenting edges minimize the number of connected components.
645
+
646
+ Yields
647
+ ------
648
+ edge : tuple
649
+ Edges in the subset of avail chosen to connect G.
650
+
651
+ See Also
652
+ --------
653
+ :func:`one_edge_augmentation`
654
+ :func:`k_edge_augmentation`
655
+
656
+ Examples
657
+ --------
658
+ >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)])
659
+ >>> G.add_nodes_from([6, 7, 8])
660
+ >>> # any edge not in avail has an implicit weight of infinity
661
+ >>> avail = [(1, 3), (1, 5), (4, 7), (4, 8), (6, 1), (8, 1), (8, 2)]
662
+ >>> sorted(weighted_one_edge_augmentation(G, avail))
663
+ [(1, 5), (4, 7), (6, 1), (8, 1)]
664
+ >>> # find another solution by giving large weights to edges in the
665
+ >>> # previous solution (note some of the old edges must be used)
666
+ >>> avail = [(1, 3), (1, 5, 99), (4, 7, 9), (6, 1, 99), (8, 1, 99), (8, 2)]
667
+ >>> sorted(weighted_one_edge_augmentation(G, avail))
668
+ [(1, 5), (4, 7), (6, 1), (8, 2)]
669
+ """
670
+ avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G)
671
+ # Collapse CCs in the original graph into nodes in a metagraph
672
+ # Then find an MST of the metagraph instead of the original graph
673
+ C = collapse(G, nx.connected_components(G))
674
+ mapping = C.graph["mapping"]
675
+ # Assign each available edge to an edge in the metagraph
676
+ candidate_mapping = _lightest_meta_edges(mapping, avail_uv, avail_w)
677
+ # nx.set_edge_attributes(C, name='weight', values=0)
678
+ C.add_edges_from(
679
+ (mu, mv, {"weight": w, "generator": uv})
680
+ for (mu, mv), uv, w in candidate_mapping
681
+ )
682
+ # Find MST of the meta graph
683
+ meta_mst = nx.minimum_spanning_tree(C)
684
+ if not partial and not nx.is_connected(meta_mst):
685
+ raise nx.NetworkXUnfeasible("Not possible to connect G with available edges")
686
+ # Yield the edge that generated the meta-edge
687
+ for mu, mv, d in meta_mst.edges(data=True):
688
+ if "generator" in d:
689
+ edge = d["generator"]
690
+ yield edge
691
+
692
+
693
+ @nx._dispatch
694
+ def unconstrained_bridge_augmentation(G):
695
+ """Finds an optimal 2-edge-augmentation of G using the fewest edges.
696
+
697
+ This is an implementation of the algorithm detailed in [1]_.
698
+ The basic idea is to construct a meta-graph of bridge-ccs, connect leaf
699
+ nodes of the trees to connect the entire graph, and finally connect the
700
+ leafs of the tree in dfs-preorder to bridge connect the entire graph.
701
+
702
+ Parameters
703
+ ----------
704
+ G : NetworkX graph
705
+ An undirected graph.
706
+
707
+ Yields
708
+ ------
709
+ edge : tuple
710
+ Edges in the bridge augmentation of G
711
+
712
+ Notes
713
+ -----
714
+ Input: a graph G.
715
+ First find the bridge components of G and collapse each bridge-cc into a
716
+ node of a metagraph graph C, which is guaranteed to be a forest of trees.
717
+
718
+ C contains p "leafs" --- nodes with exactly one incident edge.
719
+ C contains q "isolated nodes" --- nodes with no incident edges.
720
+
721
+ Theorem: If p + q > 1, then at least :math:`ceil(p / 2) + q` edges are
722
+ needed to bridge connect C. This algorithm achieves this min number.
723
+
724
+ The method first adds enough edges to make G into a tree and then pairs
725
+ leafs in a simple fashion.
726
+
727
+ Let n be the number of trees in C. Let v(i) be an isolated vertex in the
728
+ i-th tree if one exists, otherwise it is a pair of distinct leafs nodes
729
+ in the i-th tree. Alternating edges from these sets (i.e. adding edges
730
+ A1 = [(v(i)[0], v(i + 1)[1]), v(i + 1)[0], v(i + 2)[1])...]) connects C
731
+ into a tree T. This tree has p' = p + 2q - 2(n -1) leafs and no isolated
732
+ vertices. A1 has n - 1 edges. The next step finds ceil(p' / 2) edges to
733
+ biconnect any tree with p' leafs.
734
+
735
+ Convert T into an arborescence T' by picking an arbitrary root node with
736
+ degree >= 2 and directing all edges away from the root. Note the
737
+ implementation implicitly constructs T'.
738
+
739
+ The leafs of T are the nodes with no existing edges in T'.
740
+ Order the leafs of T' by DFS preorder. Then break this list in half
741
+ and add the zipped pairs to A2.
742
+
743
+ The set A = A1 + A2 is the minimum augmentation in the metagraph.
744
+
745
+ To convert this to edges in the original graph
746
+
747
+ References
748
+ ----------
749
+ .. [1] Eswaran, Kapali P., and R. Endre Tarjan. (1975) Augmentation problems.
750
+ http://epubs.siam.org/doi/abs/10.1137/0205044
751
+
752
+ See Also
753
+ --------
754
+ :func:`bridge_augmentation`
755
+ :func:`k_edge_augmentation`
756
+
757
+ Examples
758
+ --------
759
+ >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
760
+ >>> sorted(unconstrained_bridge_augmentation(G))
761
+ [(1, 7)]
762
+ >>> G = nx.path_graph((1, 2, 3, 2, 4, 5, 6, 7))
763
+ >>> sorted(unconstrained_bridge_augmentation(G))
764
+ [(1, 3), (3, 7)]
765
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
766
+ >>> G.add_node(4)
767
+ >>> sorted(unconstrained_bridge_augmentation(G))
768
+ [(1, 4), (4, 0)]
769
+ """
770
+ # -----
771
+ # Mapping of terms from (Eswaran and Tarjan):
772
+ # G = G_0 - the input graph
773
+ # C = G_0' - the bridge condensation of G. (This is a forest of trees)
774
+ # A1 = A_1 - the edges to connect the forest into a tree
775
+ # leaf = pendant - a node with degree of 1
776
+
777
+ # alpha(v) = maps the node v in G to its meta-node in C
778
+ # beta(x) = maps the meta-node x in C to any node in the bridge
779
+ # component of G corresponding to x.
780
+
781
+ # find the 2-edge-connected components of G
782
+ bridge_ccs = list(nx.connectivity.bridge_components(G))
783
+ # condense G into an forest C
784
+ C = collapse(G, bridge_ccs)
785
+
786
+ # Choose pairs of distinct leaf nodes in each tree. If this is not
787
+ # possible then make a pair using the single isolated node in the tree.
788
+ vset1 = [
789
+ tuple(cc) * 2 # case1: an isolated node
790
+ if len(cc) == 1
791
+ else sorted(cc, key=C.degree)[0:2] # case2: pair of leaf nodes
792
+ for cc in nx.connected_components(C)
793
+ ]
794
+ if len(vset1) > 1:
795
+ # Use this set to construct edges that connect C into a tree.
796
+ nodes1 = [vs[0] for vs in vset1]
797
+ nodes2 = [vs[1] for vs in vset1]
798
+ A1 = list(zip(nodes1[1:], nodes2))
799
+ else:
800
+ A1 = []
801
+ # Connect each tree in the forest to construct an arborescence
802
+ T = C.copy()
803
+ T.add_edges_from(A1)
804
+
805
+ # If there are only two leaf nodes, we simply connect them.
806
+ leafs = [n for n, d in T.degree() if d == 1]
807
+ if len(leafs) == 1:
808
+ A2 = []
809
+ if len(leafs) == 2:
810
+ A2 = [tuple(leafs)]
811
+ else:
812
+ # Choose an arbitrary non-leaf root
813
+ try:
814
+ root = next(n for n, d in T.degree() if d > 1)
815
+ except StopIteration: # no nodes found with degree > 1
816
+ return
817
+ # order the leaves of C by (induced directed) preorder
818
+ v2 = [n for n in nx.dfs_preorder_nodes(T, root) if T.degree(n) == 1]
819
+ # connecting first half of the leafs in pre-order to the second
820
+ # half will bridge connect the tree with the fewest edges.
821
+ half = math.ceil(len(v2) / 2)
822
+ A2 = list(zip(v2[:half], v2[-half:]))
823
+
824
+ # collect the edges used to augment the original forest
825
+ aug_tree_edges = A1 + A2
826
+
827
+ # Construct the mapping (beta) from meta-nodes to regular nodes
828
+ inverse = defaultdict(list)
829
+ for k, v in C.graph["mapping"].items():
830
+ inverse[v].append(k)
831
+ # sort so we choose minimum degree nodes first
832
+ inverse = {
833
+ mu: sorted(mapped, key=lambda u: (G.degree(u), u))
834
+ for mu, mapped in inverse.items()
835
+ }
836
+
837
+ # For each meta-edge, map back to an arbitrary pair in the original graph
838
+ G2 = G.copy()
839
+ for mu, mv in aug_tree_edges:
840
+ # Find the first available edge that doesn't exist and return it
841
+ for u, v in it.product(inverse[mu], inverse[mv]):
842
+ if not G2.has_edge(u, v):
843
+ G2.add_edge(u, v)
844
+ yield u, v
845
+ break
846
+
847
+
848
+ @nx._dispatch
849
+ def weighted_bridge_augmentation(G, avail, weight=None):
850
+ """Finds an approximate min-weight 2-edge-augmentation of G.
851
+
852
+ This is an implementation of the approximation algorithm detailed in [1]_.
853
+ It chooses a set of edges from avail to add to G that renders it
854
+ 2-edge-connected if such a subset exists. This is done by finding a
855
+ minimum spanning arborescence of a specially constructed metagraph.
856
+
857
+ Parameters
858
+ ----------
859
+ G : NetworkX graph
860
+ An undirected graph.
861
+
862
+ avail : set of 2 or 3 tuples.
863
+ candidate edges (with optional weights) to choose from
864
+
865
+ weight : string
866
+ key to use to find weights if avail is a set of 3-tuples where the
867
+ third item in each tuple is a dictionary.
868
+
869
+ Yields
870
+ ------
871
+ edge : tuple
872
+ Edges in the subset of avail chosen to bridge augment G.
873
+
874
+ Notes
875
+ -----
876
+ Finding a weighted 2-edge-augmentation is NP-hard.
877
+ Any edge not in ``avail`` is considered to have a weight of infinity.
878
+ The approximation factor is 2 if ``G`` is connected and 3 if it is not.
879
+ Runs in :math:`O(m + n log(n))` time
880
+
881
+ References
882
+ ----------
883
+ .. [1] Khuller, Samir, and Ramakrishna Thurimella. (1993) Approximation
884
+ algorithms for graph augmentation.
885
+ http://www.sciencedirect.com/science/article/pii/S0196677483710102
886
+
887
+ See Also
888
+ --------
889
+ :func:`bridge_augmentation`
890
+ :func:`k_edge_augmentation`
891
+
892
+ Examples
893
+ --------
894
+ >>> G = nx.path_graph((1, 2, 3, 4))
895
+ >>> # When the weights are equal, (1, 4) is the best
896
+ >>> avail = [(1, 4, 1), (1, 3, 1), (2, 4, 1)]
897
+ >>> sorted(weighted_bridge_augmentation(G, avail))
898
+ [(1, 4)]
899
+ >>> # Giving (1, 4) a high weight makes the two edge solution the best.
900
+ >>> avail = [(1, 4, 1000), (1, 3, 1), (2, 4, 1)]
901
+ >>> sorted(weighted_bridge_augmentation(G, avail))
902
+ [(1, 3), (2, 4)]
903
+ >>> # ------
904
+ >>> G = nx.path_graph((1, 2, 3, 4))
905
+ >>> G.add_node(5)
906
+ >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 1)]
907
+ >>> sorted(weighted_bridge_augmentation(G, avail=avail))
908
+ [(1, 5), (4, 5)]
909
+ >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 51)]
910
+ >>> sorted(weighted_bridge_augmentation(G, avail=avail))
911
+ [(1, 5), (2, 5), (4, 5)]
912
+ """
913
+
914
+ if weight is None:
915
+ weight = "weight"
916
+
917
+ # If input G is not connected the approximation factor increases to 3
918
+ if not nx.is_connected(G):
919
+ H = G.copy()
920
+ connectors = list(one_edge_augmentation(H, avail=avail, weight=weight))
921
+ H.add_edges_from(connectors)
922
+
923
+ yield from connectors
924
+ else:
925
+ connectors = []
926
+ H = G
927
+
928
+ if len(avail) == 0:
929
+ if nx.has_bridges(H):
930
+ raise nx.NetworkXUnfeasible("no augmentation possible")
931
+
932
+ avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=H)
933
+
934
+ # Collapse input into a metagraph. Meta nodes are bridge-ccs
935
+ bridge_ccs = nx.connectivity.bridge_components(H)
936
+ C = collapse(H, bridge_ccs)
937
+
938
+ # Use the meta graph to shrink avail to a small feasible subset
939
+ mapping = C.graph["mapping"]
940
+ # Choose the minimum weight feasible edge in each group
941
+ meta_to_wuv = {
942
+ (mu, mv): (w, uv)
943
+ for (mu, mv), uv, w in _lightest_meta_edges(mapping, avail_uv, avail_w)
944
+ }
945
+
946
+ # Mapping of terms from (Khuller and Thurimella):
947
+ # C : G_0 = (V, E^0)
948
+ # This is the metagraph where each node is a 2-edge-cc in G.
949
+ # The edges in C represent bridges in the original graph.
950
+ # (mu, mv) : E - E^0 # they group both avail and given edges in E
951
+ # T : \Gamma
952
+ # D : G^D = (V, E_D)
953
+
954
+ # The paper uses ancestor because children point to parents, which is
955
+ # contrary to networkx standards. So, we actually need to run
956
+ # nx.least_common_ancestor on the reversed Tree.
957
+
958
+ # Pick an arbitrary leaf from C as the root
959
+ try:
960
+ root = next(n for n, d in C.degree() if d == 1)
961
+ except StopIteration: # no nodes found with degree == 1
962
+ return
963
+ # Root C into a tree TR by directing all edges away from the root
964
+ # Note in their paper T directs edges towards the root
965
+ TR = nx.dfs_tree(C, root)
966
+
967
+ # Add to D the directed edges of T and set their weight to zero
968
+ # This indicates that it costs nothing to use edges that were given.
969
+ D = nx.reverse(TR).copy()
970
+
971
+ nx.set_edge_attributes(D, name="weight", values=0)
972
+
973
+ # The LCA of mu and mv in T is the shared ancestor of mu and mv that is
974
+ # located farthest from the root.
975
+ lca_gen = nx.tree_all_pairs_lowest_common_ancestor(
976
+ TR, root=root, pairs=meta_to_wuv.keys()
977
+ )
978
+
979
+ for (mu, mv), lca in lca_gen:
980
+ w, uv = meta_to_wuv[(mu, mv)]
981
+ if lca == mu:
982
+ # If u is an ancestor of v in TR, then add edge u->v to D
983
+ D.add_edge(lca, mv, weight=w, generator=uv)
984
+ elif lca == mv:
985
+ # If v is an ancestor of u in TR, then add edge v->u to D
986
+ D.add_edge(lca, mu, weight=w, generator=uv)
987
+ else:
988
+ # If neither u nor v is a ancestor of the other in TR
989
+ # let t = lca(TR, u, v) and add edges t->u and t->v
990
+ # Track the original edge that GENERATED these edges.
991
+ D.add_edge(lca, mu, weight=w, generator=uv)
992
+ D.add_edge(lca, mv, weight=w, generator=uv)
993
+
994
+ # Then compute a minimum rooted branching
995
+ try:
996
+ # Note the original edges must be directed towards to root for the
997
+ # branching to give us a bridge-augmentation.
998
+ A = _minimum_rooted_branching(D, root)
999
+ except nx.NetworkXException as err:
1000
+ # If there is no branching then augmentation is not possible
1001
+ raise nx.NetworkXUnfeasible("no 2-edge-augmentation possible") from err
1002
+
1003
+ # For each edge e, in the branching that did not belong to the directed
1004
+ # tree T, add the corresponding edge that **GENERATED** it (this is not
1005
+ # necessarily e itself!)
1006
+
1007
+ # ensure the third case does not generate edges twice
1008
+ bridge_connectors = set()
1009
+ for mu, mv in A.edges():
1010
+ data = D.get_edge_data(mu, mv)
1011
+ if "generator" in data:
1012
+ # Add the avail edge that generated the branching edge.
1013
+ edge = data["generator"]
1014
+ bridge_connectors.add(edge)
1015
+
1016
+ yield from bridge_connectors
1017
+
1018
+
1019
+ def _minimum_rooted_branching(D, root):
1020
+ """Helper function to compute a minimum rooted branching (aka rooted
1021
+ arborescence)
1022
+
1023
+ Before the branching can be computed, the directed graph must be rooted by
1024
+ removing the predecessors of root.
1025
+
1026
+ A branching / arborescence of rooted graph G is a subgraph that contains a
1027
+ directed path from the root to every other vertex. It is the directed
1028
+ analog of the minimum spanning tree problem.
1029
+
1030
+ References
1031
+ ----------
1032
+ [1] Khuller, Samir (2002) Advanced Algorithms Lecture 24 Notes.
1033
+ https://web.archive.org/web/20121030033722/https://www.cs.umd.edu/class/spring2011/cmsc651/lec07.pdf
1034
+ """
1035
+ rooted = D.copy()
1036
+ # root the graph by removing all predecessors to `root`.
1037
+ rooted.remove_edges_from([(u, root) for u in D.predecessors(root)])
1038
+ # Then compute the branching / arborescence.
1039
+ A = nx.minimum_spanning_arborescence(rooted)
1040
+ return A
1041
+
1042
+
1043
+ @nx._dispatch
1044
+ def collapse(G, grouped_nodes):
1045
+ """Collapses each group of nodes into a single node.
1046
+
1047
+ This is similar to condensation, but works on undirected graphs.
1048
+
1049
+ Parameters
1050
+ ----------
1051
+ G : NetworkX Graph
1052
+
1053
+ grouped_nodes: list or generator
1054
+ Grouping of nodes to collapse. The grouping must be disjoint.
1055
+ If grouped_nodes are strongly_connected_components then this is
1056
+ equivalent to :func:`condensation`.
1057
+
1058
+ Returns
1059
+ -------
1060
+ C : NetworkX Graph
1061
+ The collapsed graph C of G with respect to the node grouping. The node
1062
+ labels are integers corresponding to the index of the component in the
1063
+ list of grouped_nodes. C has a graph attribute named 'mapping' with a
1064
+ dictionary mapping the original nodes to the nodes in C to which they
1065
+ belong. Each node in C also has a node attribute 'members' with the set
1066
+ of original nodes in G that form the group that the node in C
1067
+ represents.
1068
+
1069
+ Examples
1070
+ --------
1071
+ >>> # Collapses a graph using disjoint groups, but not necessarily connected
1072
+ >>> G = nx.Graph([(1, 0), (2, 3), (3, 1), (3, 4), (4, 5), (5, 6), (5, 7)])
1073
+ >>> G.add_node("A")
1074
+ >>> grouped_nodes = [{0, 1, 2, 3}, {5, 6, 7}]
1075
+ >>> C = collapse(G, grouped_nodes)
1076
+ >>> members = nx.get_node_attributes(C, "members")
1077
+ >>> sorted(members.keys())
1078
+ [0, 1, 2, 3]
1079
+ >>> member_values = set(map(frozenset, members.values()))
1080
+ >>> assert {0, 1, 2, 3} in member_values
1081
+ >>> assert {4} in member_values
1082
+ >>> assert {5, 6, 7} in member_values
1083
+ >>> assert {"A"} in member_values
1084
+ """
1085
+ mapping = {}
1086
+ members = {}
1087
+ C = G.__class__()
1088
+ i = 0 # required if G is empty
1089
+ remaining = set(G.nodes())
1090
+ for i, group in enumerate(grouped_nodes):
1091
+ group = set(group)
1092
+ assert remaining.issuperset(
1093
+ group
1094
+ ), "grouped nodes must exist in G and be disjoint"
1095
+ remaining.difference_update(group)
1096
+ members[i] = group
1097
+ mapping.update((n, i) for n in group)
1098
+ # remaining nodes are in their own group
1099
+ for i, node in enumerate(remaining, start=i + 1):
1100
+ group = {node}
1101
+ members[i] = group
1102
+ mapping.update((n, i) for n in group)
1103
+ number_of_groups = i + 1
1104
+ C.add_nodes_from(range(number_of_groups))
1105
+ C.add_edges_from(
1106
+ (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v]
1107
+ )
1108
+ # Add a list of members (ie original nodes) to each node (ie scc) in C.
1109
+ nx.set_node_attributes(C, name="members", values=members)
1110
+ # Add mapping dict as graph attribute
1111
+ C.graph["mapping"] = mapping
1112
+ return C
1113
+
1114
+
1115
+ @nx._dispatch
1116
+ def complement_edges(G):
1117
+ """Returns only the edges in the complement of G
1118
+
1119
+ Parameters
1120
+ ----------
1121
+ G : NetworkX Graph
1122
+
1123
+ Yields
1124
+ ------
1125
+ edge : tuple
1126
+ Edges in the complement of G
1127
+
1128
+ Examples
1129
+ --------
1130
+ >>> G = nx.path_graph((1, 2, 3, 4))
1131
+ >>> sorted(complement_edges(G))
1132
+ [(1, 3), (1, 4), (2, 4)]
1133
+ >>> G = nx.path_graph((1, 2, 3, 4), nx.DiGraph())
1134
+ >>> sorted(complement_edges(G))
1135
+ [(1, 3), (1, 4), (2, 1), (2, 4), (3, 1), (3, 2), (4, 1), (4, 2), (4, 3)]
1136
+ >>> G = nx.complete_graph(1000)
1137
+ >>> sorted(complement_edges(G))
1138
+ []
1139
+ """
1140
+ G_adj = G._adj # Store as a variable to eliminate attribute lookup
1141
+ if G.is_directed():
1142
+ for u, v in it.combinations(G.nodes(), 2):
1143
+ if v not in G_adj[u]:
1144
+ yield (u, v)
1145
+ if u not in G_adj[v]:
1146
+ yield (v, u)
1147
+ else:
1148
+ for u, v in it.combinations(G.nodes(), 2):
1149
+ if v not in G_adj[u]:
1150
+ yield (u, v)
1151
+
1152
+
1153
+ def _compat_shuffle(rng, input):
1154
+ """wrapper around rng.shuffle for python 2 compatibility reasons"""
1155
+ rng.shuffle(input)
1156
+
1157
+
1158
+ @not_implemented_for("multigraph")
1159
+ @not_implemented_for("directed")
1160
+ @py_random_state(4)
1161
+ @nx._dispatch
1162
+ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None):
1163
+ """Greedy algorithm for finding a k-edge-augmentation
1164
+
1165
+ Parameters
1166
+ ----------
1167
+ G : NetworkX graph
1168
+ An undirected graph.
1169
+
1170
+ k : integer
1171
+ Desired edge connectivity
1172
+
1173
+ avail : dict or a set of 2 or 3 tuples
1174
+ For more details, see :func:`k_edge_augmentation`.
1175
+
1176
+ weight : string
1177
+ key to use to find weights if ``avail`` is a set of 3-tuples.
1178
+ For more details, see :func:`k_edge_augmentation`.
1179
+
1180
+ seed : integer, random_state, or None (default)
1181
+ Indicator of random number generation state.
1182
+ See :ref:`Randomness<randomness>`.
1183
+
1184
+ Yields
1185
+ ------
1186
+ edge : tuple
1187
+ Edges in the greedy augmentation of G
1188
+
1189
+ Notes
1190
+ -----
1191
+ The algorithm is simple. Edges are incrementally added between parts of the
1192
+ graph that are not yet locally k-edge-connected. Then edges are from the
1193
+ augmenting set are pruned as long as local-edge-connectivity is not broken.
1194
+
1195
+ This algorithm is greedy and does not provide optimality guarantees. It
1196
+ exists only to provide :func:`k_edge_augmentation` with the ability to
1197
+ generate a feasible solution for arbitrary k.
1198
+
1199
+ See Also
1200
+ --------
1201
+ :func:`k_edge_augmentation`
1202
+
1203
+ Examples
1204
+ --------
1205
+ >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
1206
+ >>> sorted(greedy_k_edge_augmentation(G, k=2))
1207
+ [(1, 7)]
1208
+ >>> sorted(greedy_k_edge_augmentation(G, k=1, avail=[]))
1209
+ []
1210
+ >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
1211
+ >>> avail = {(u, v): 1 for (u, v) in complement_edges(G)}
1212
+ >>> # randomized pruning process can produce different solutions
1213
+ >>> sorted(greedy_k_edge_augmentation(G, k=4, avail=avail, seed=2))
1214
+ [(1, 3), (1, 4), (1, 5), (1, 6), (1, 7), (2, 4), (2, 6), (3, 7), (5, 7)]
1215
+ >>> sorted(greedy_k_edge_augmentation(G, k=4, avail=avail, seed=3))
1216
+ [(1, 3), (1, 5), (1, 6), (2, 4), (2, 6), (3, 7), (4, 7), (5, 7)]
1217
+ """
1218
+ # Result set
1219
+ aug_edges = []
1220
+
1221
+ done = is_k_edge_connected(G, k)
1222
+ if done:
1223
+ return
1224
+ if avail is None:
1225
+ # all edges are available
1226
+ avail_uv = list(complement_edges(G))
1227
+ avail_w = [1] * len(avail_uv)
1228
+ else:
1229
+ # Get the unique set of unweighted edges
1230
+ avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G)
1231
+
1232
+ # Greedy: order lightest edges. Use degree sum to tie-break
1233
+ tiebreaker = [sum(map(G.degree, uv)) for uv in avail_uv]
1234
+ avail_wduv = sorted(zip(avail_w, tiebreaker, avail_uv))
1235
+ avail_uv = [uv for w, d, uv in avail_wduv]
1236
+
1237
+ # Incrementally add edges in until we are k-connected
1238
+ H = G.copy()
1239
+ for u, v in avail_uv:
1240
+ done = False
1241
+ if not is_locally_k_edge_connected(H, u, v, k=k):
1242
+ # Only add edges in parts that are not yet locally k-edge-connected
1243
+ aug_edges.append((u, v))
1244
+ H.add_edge(u, v)
1245
+ # Did adding this edge help?
1246
+ if H.degree(u) >= k and H.degree(v) >= k:
1247
+ done = is_k_edge_connected(H, k)
1248
+ if done:
1249
+ break
1250
+
1251
+ # Check for feasibility
1252
+ if not done:
1253
+ raise nx.NetworkXUnfeasible("not able to k-edge-connect with available edges")
1254
+
1255
+ # Randomized attempt to reduce the size of the solution
1256
+ _compat_shuffle(seed, aug_edges)
1257
+ for u, v in list(aug_edges):
1258
+ # Don't remove if we know it would break connectivity
1259
+ if H.degree(u) <= k or H.degree(v) <= k:
1260
+ continue
1261
+ H.remove_edge(u, v)
1262
+ aug_edges.remove((u, v))
1263
+ if not is_k_edge_connected(H, k=k):
1264
+ # If removing this edge breaks feasibility, undo
1265
+ H.add_edge(u, v)
1266
+ aug_edges.append((u, v))
1267
+
1268
+ # Generate results
1269
+ yield from aug_edges
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py ADDED
@@ -0,0 +1,584 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Algorithms for finding k-edge-connected components and subgraphs.
3
+
4
+ A k-edge-connected component (k-edge-cc) is a maximal set of nodes in G, such
5
+ that all pairs of node have an edge-connectivity of at least k.
6
+
7
+ A k-edge-connected subgraph (k-edge-subgraph) is a maximal set of nodes in G,
8
+ such that the subgraph of G defined by the nodes has an edge-connectivity at
9
+ least k.
10
+ """
11
+ import itertools as it
12
+ from functools import partial
13
+
14
+ import networkx as nx
15
+ from networkx.utils import arbitrary_element, not_implemented_for
16
+
17
+ __all__ = [
18
+ "k_edge_components",
19
+ "k_edge_subgraphs",
20
+ "bridge_components",
21
+ "EdgeComponentAuxGraph",
22
+ ]
23
+
24
+
25
+ @not_implemented_for("multigraph")
26
+ @nx._dispatch
27
+ def k_edge_components(G, k):
28
+ """Generates nodes in each maximal k-edge-connected component in G.
29
+
30
+ Parameters
31
+ ----------
32
+ G : NetworkX graph
33
+
34
+ k : Integer
35
+ Desired edge connectivity
36
+
37
+ Returns
38
+ -------
39
+ k_edge_components : a generator of k-edge-ccs. Each set of returned nodes
40
+ will have k-edge-connectivity in the graph G.
41
+
42
+ See Also
43
+ --------
44
+ :func:`local_edge_connectivity`
45
+ :func:`k_edge_subgraphs` : similar to this function, but the subgraph
46
+ defined by the nodes must also have k-edge-connectivity.
47
+ :func:`k_components` : similar to this function, but uses node-connectivity
48
+ instead of edge-connectivity
49
+
50
+ Raises
51
+ ------
52
+ NetworkXNotImplemented
53
+ If the input graph is a multigraph.
54
+
55
+ ValueError:
56
+ If k is less than 1
57
+
58
+ Notes
59
+ -----
60
+ Attempts to use the most efficient implementation available based on k.
61
+ If k=1, this is simply connected components for directed graphs and
62
+ connected components for undirected graphs.
63
+ If k=2 on an efficient bridge connected component algorithm from _[1] is
64
+ run based on the chain decomposition.
65
+ Otherwise, the algorithm from _[2] is used.
66
+
67
+ Examples
68
+ --------
69
+ >>> import itertools as it
70
+ >>> from networkx.utils import pairwise
71
+ >>> paths = [
72
+ ... (1, 2, 4, 3, 1, 4),
73
+ ... (5, 6, 7, 8, 5, 7, 8, 6),
74
+ ... ]
75
+ >>> G = nx.Graph()
76
+ >>> G.add_nodes_from(it.chain(*paths))
77
+ >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
78
+ >>> # note this returns {1, 4} unlike k_edge_subgraphs
79
+ >>> sorted(map(sorted, nx.k_edge_components(G, k=3)))
80
+ [[1, 4], [2], [3], [5, 6, 7, 8]]
81
+
82
+ References
83
+ ----------
84
+ .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29
85
+ .. [2] Wang, Tianhao, et al. (2015) A simple algorithm for finding all
86
+ k-edge-connected components.
87
+ http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264
88
+ """
89
+ # Compute k-edge-ccs using the most efficient algorithms available.
90
+ if k < 1:
91
+ raise ValueError("k cannot be less than 1")
92
+ if G.is_directed():
93
+ if k == 1:
94
+ return nx.strongly_connected_components(G)
95
+ else:
96
+ # TODO: investigate https://arxiv.org/abs/1412.6466 for k=2
97
+ aux_graph = EdgeComponentAuxGraph.construct(G)
98
+ return aux_graph.k_edge_components(k)
99
+ else:
100
+ if k == 1:
101
+ return nx.connected_components(G)
102
+ elif k == 2:
103
+ return bridge_components(G)
104
+ else:
105
+ aux_graph = EdgeComponentAuxGraph.construct(G)
106
+ return aux_graph.k_edge_components(k)
107
+
108
+
109
+ @not_implemented_for("multigraph")
110
+ @nx._dispatch
111
+ def k_edge_subgraphs(G, k):
112
+ """Generates nodes in each maximal k-edge-connected subgraph in G.
113
+
114
+ Parameters
115
+ ----------
116
+ G : NetworkX graph
117
+
118
+ k : Integer
119
+ Desired edge connectivity
120
+
121
+ Returns
122
+ -------
123
+ k_edge_subgraphs : a generator of k-edge-subgraphs
124
+ Each k-edge-subgraph is a maximal set of nodes that defines a subgraph
125
+ of G that is k-edge-connected.
126
+
127
+ See Also
128
+ --------
129
+ :func:`edge_connectivity`
130
+ :func:`k_edge_components` : similar to this function, but nodes only
131
+ need to have k-edge-connectivity within the graph G and the subgraphs
132
+ might not be k-edge-connected.
133
+
134
+ Raises
135
+ ------
136
+ NetworkXNotImplemented
137
+ If the input graph is a multigraph.
138
+
139
+ ValueError:
140
+ If k is less than 1
141
+
142
+ Notes
143
+ -----
144
+ Attempts to use the most efficient implementation available based on k.
145
+ If k=1, or k=2 and the graph is undirected, then this simply calls
146
+ `k_edge_components`. Otherwise the algorithm from _[1] is used.
147
+
148
+ Examples
149
+ --------
150
+ >>> import itertools as it
151
+ >>> from networkx.utils import pairwise
152
+ >>> paths = [
153
+ ... (1, 2, 4, 3, 1, 4),
154
+ ... (5, 6, 7, 8, 5, 7, 8, 6),
155
+ ... ]
156
+ >>> G = nx.Graph()
157
+ >>> G.add_nodes_from(it.chain(*paths))
158
+ >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
159
+ >>> # note this does not return {1, 4} unlike k_edge_components
160
+ >>> sorted(map(sorted, nx.k_edge_subgraphs(G, k=3)))
161
+ [[1], [2], [3], [4], [5, 6, 7, 8]]
162
+
163
+ References
164
+ ----------
165
+ .. [1] Zhou, Liu, et al. (2012) Finding maximal k-edge-connected subgraphs
166
+ from a large graph. ACM International Conference on Extending Database
167
+ Technology 2012 480-–491.
168
+ https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf
169
+ """
170
+ if k < 1:
171
+ raise ValueError("k cannot be less than 1")
172
+ if G.is_directed():
173
+ if k <= 1:
174
+ # For directed graphs ,
175
+ # When k == 1, k-edge-ccs and k-edge-subgraphs are the same
176
+ return k_edge_components(G, k)
177
+ else:
178
+ return _k_edge_subgraphs_nodes(G, k)
179
+ else:
180
+ if k <= 2:
181
+ # For undirected graphs,
182
+ # when k <= 2, k-edge-ccs and k-edge-subgraphs are the same
183
+ return k_edge_components(G, k)
184
+ else:
185
+ return _k_edge_subgraphs_nodes(G, k)
186
+
187
+
188
+ def _k_edge_subgraphs_nodes(G, k):
189
+ """Helper to get the nodes from the subgraphs.
190
+
191
+ This allows k_edge_subgraphs to return a generator.
192
+ """
193
+ for C in general_k_edge_subgraphs(G, k):
194
+ yield set(C.nodes())
195
+
196
+
197
+ @not_implemented_for("directed")
198
+ @not_implemented_for("multigraph")
199
+ @nx._dispatch
200
+ def bridge_components(G):
201
+ """Finds all bridge-connected components G.
202
+
203
+ Parameters
204
+ ----------
205
+ G : NetworkX undirected graph
206
+
207
+ Returns
208
+ -------
209
+ bridge_components : a generator of 2-edge-connected components
210
+
211
+
212
+ See Also
213
+ --------
214
+ :func:`k_edge_subgraphs` : this function is a special case for an
215
+ undirected graph where k=2.
216
+ :func:`biconnected_components` : similar to this function, but is defined
217
+ using 2-node-connectivity instead of 2-edge-connectivity.
218
+
219
+ Raises
220
+ ------
221
+ NetworkXNotImplemented
222
+ If the input graph is directed or a multigraph.
223
+
224
+ Notes
225
+ -----
226
+ Bridge-connected components are also known as 2-edge-connected components.
227
+
228
+ Examples
229
+ --------
230
+ >>> # The barbell graph with parameter zero has a single bridge
231
+ >>> G = nx.barbell_graph(5, 0)
232
+ >>> from networkx.algorithms.connectivity.edge_kcomponents import bridge_components
233
+ >>> sorted(map(sorted, bridge_components(G)))
234
+ [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]
235
+ """
236
+ H = G.copy()
237
+ H.remove_edges_from(nx.bridges(G))
238
+ yield from nx.connected_components(H)
239
+
240
+
241
+ class EdgeComponentAuxGraph:
242
+ r"""A simple algorithm to find all k-edge-connected components in a graph.
243
+
244
+ Constructing the auxiliary graph (which may take some time) allows for the
245
+ k-edge-ccs to be found in linear time for arbitrary k.
246
+
247
+ Notes
248
+ -----
249
+ This implementation is based on [1]_. The idea is to construct an auxiliary
250
+ graph from which the k-edge-ccs can be extracted in linear time. The
251
+ auxiliary graph is constructed in $O(|V|\cdot F)$ operations, where F is the
252
+ complexity of max flow. Querying the components takes an additional $O(|V|)$
253
+ operations. This algorithm can be slow for large graphs, but it handles an
254
+ arbitrary k and works for both directed and undirected inputs.
255
+
256
+ The undirected case for k=1 is exactly connected components.
257
+ The undirected case for k=2 is exactly bridge connected components.
258
+ The directed case for k=1 is exactly strongly connected components.
259
+
260
+ References
261
+ ----------
262
+ .. [1] Wang, Tianhao, et al. (2015) A simple algorithm for finding all
263
+ k-edge-connected components.
264
+ http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264
265
+
266
+ Examples
267
+ --------
268
+ >>> import itertools as it
269
+ >>> from networkx.utils import pairwise
270
+ >>> from networkx.algorithms.connectivity import EdgeComponentAuxGraph
271
+ >>> # Build an interesting graph with multiple levels of k-edge-ccs
272
+ >>> paths = [
273
+ ... (1, 2, 3, 4, 1, 3, 4, 2), # a 3-edge-cc (a 4 clique)
274
+ ... (5, 6, 7, 5), # a 2-edge-cc (a 3 clique)
275
+ ... (1, 5), # combine first two ccs into a 1-edge-cc
276
+ ... (0,), # add an additional disconnected 1-edge-cc
277
+ ... ]
278
+ >>> G = nx.Graph()
279
+ >>> G.add_nodes_from(it.chain(*paths))
280
+ >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
281
+ >>> # Constructing the AuxGraph takes about O(n ** 4)
282
+ >>> aux_graph = EdgeComponentAuxGraph.construct(G)
283
+ >>> # Once constructed, querying takes O(n)
284
+ >>> sorted(map(sorted, aux_graph.k_edge_components(k=1)))
285
+ [[0], [1, 2, 3, 4, 5, 6, 7]]
286
+ >>> sorted(map(sorted, aux_graph.k_edge_components(k=2)))
287
+ [[0], [1, 2, 3, 4], [5, 6, 7]]
288
+ >>> sorted(map(sorted, aux_graph.k_edge_components(k=3)))
289
+ [[0], [1, 2, 3, 4], [5], [6], [7]]
290
+ >>> sorted(map(sorted, aux_graph.k_edge_components(k=4)))
291
+ [[0], [1], [2], [3], [4], [5], [6], [7]]
292
+
293
+ The auxiliary graph is primarily used for k-edge-ccs but it
294
+ can also speed up the queries of k-edge-subgraphs by refining the
295
+ search space.
296
+
297
+ >>> import itertools as it
298
+ >>> from networkx.utils import pairwise
299
+ >>> from networkx.algorithms.connectivity import EdgeComponentAuxGraph
300
+ >>> paths = [
301
+ ... (1, 2, 4, 3, 1, 4),
302
+ ... ]
303
+ >>> G = nx.Graph()
304
+ >>> G.add_nodes_from(it.chain(*paths))
305
+ >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
306
+ >>> aux_graph = EdgeComponentAuxGraph.construct(G)
307
+ >>> sorted(map(sorted, aux_graph.k_edge_subgraphs(k=3)))
308
+ [[1], [2], [3], [4]]
309
+ >>> sorted(map(sorted, aux_graph.k_edge_components(k=3)))
310
+ [[1, 4], [2], [3]]
311
+ """
312
+
313
+ # @not_implemented_for('multigraph') # TODO: fix decor for classmethods
314
+ @classmethod
315
+ def construct(EdgeComponentAuxGraph, G):
316
+ """Builds an auxiliary graph encoding edge-connectivity between nodes.
317
+
318
+ Notes
319
+ -----
320
+ Given G=(V, E), initialize an empty auxiliary graph A.
321
+ Choose an arbitrary source node s. Initialize a set N of available
322
+ nodes (that can be used as the sink). The algorithm picks an
323
+ arbitrary node t from N - {s}, and then computes the minimum st-cut
324
+ (S, T) with value w. If G is directed the minimum of the st-cut or
325
+ the ts-cut is used instead. Then, the edge (s, t) is added to the
326
+ auxiliary graph with weight w. The algorithm is called recursively
327
+ first using S as the available nodes and s as the source, and then
328
+ using T and t. Recursion stops when the source is the only available
329
+ node.
330
+
331
+ Parameters
332
+ ----------
333
+ G : NetworkX graph
334
+ """
335
+ # workaround for classmethod decorator
336
+ not_implemented_for("multigraph")(lambda G: G)(G)
337
+
338
+ def _recursive_build(H, A, source, avail):
339
+ # Terminate once the flow has been compute to every node.
340
+ if {source} == avail:
341
+ return
342
+ # pick an arbitrary node as the sink
343
+ sink = arbitrary_element(avail - {source})
344
+ # find the minimum cut and its weight
345
+ value, (S, T) = nx.minimum_cut(H, source, sink)
346
+ if H.is_directed():
347
+ # check if the reverse direction has a smaller cut
348
+ value_, (T_, S_) = nx.minimum_cut(H, sink, source)
349
+ if value_ < value:
350
+ value, S, T = value_, S_, T_
351
+ # add edge with weight of cut to the aux graph
352
+ A.add_edge(source, sink, weight=value)
353
+ # recursively call until all but one node is used
354
+ _recursive_build(H, A, source, avail.intersection(S))
355
+ _recursive_build(H, A, sink, avail.intersection(T))
356
+
357
+ # Copy input to ensure all edges have unit capacity
358
+ H = G.__class__()
359
+ H.add_nodes_from(G.nodes())
360
+ H.add_edges_from(G.edges(), capacity=1)
361
+
362
+ # A is the auxiliary graph to be constructed
363
+ # It is a weighted undirected tree
364
+ A = nx.Graph()
365
+
366
+ # Pick an arbitrary node as the source
367
+ if H.number_of_nodes() > 0:
368
+ source = arbitrary_element(H.nodes())
369
+ # Initialize a set of elements that can be chosen as the sink
370
+ avail = set(H.nodes())
371
+
372
+ # This constructs A
373
+ _recursive_build(H, A, source, avail)
374
+
375
+ # This class is a container the holds the auxiliary graph A and
376
+ # provides access the k_edge_components function.
377
+ self = EdgeComponentAuxGraph()
378
+ self.A = A
379
+ self.H = H
380
+ return self
381
+
382
+ def k_edge_components(self, k):
383
+ """Queries the auxiliary graph for k-edge-connected components.
384
+
385
+ Parameters
386
+ ----------
387
+ k : Integer
388
+ Desired edge connectivity
389
+
390
+ Returns
391
+ -------
392
+ k_edge_components : a generator of k-edge-ccs
393
+
394
+ Notes
395
+ -----
396
+ Given the auxiliary graph, the k-edge-connected components can be
397
+ determined in linear time by removing all edges with weights less than
398
+ k from the auxiliary graph. The resulting connected components are the
399
+ k-edge-ccs in the original graph.
400
+ """
401
+ if k < 1:
402
+ raise ValueError("k cannot be less than 1")
403
+ A = self.A
404
+ # "traverse the auxiliary graph A and delete all edges with weights less
405
+ # than k"
406
+ aux_weights = nx.get_edge_attributes(A, "weight")
407
+ # Create a relevant graph with the auxiliary edges with weights >= k
408
+ R = nx.Graph()
409
+ R.add_nodes_from(A.nodes())
410
+ R.add_edges_from(e for e, w in aux_weights.items() if w >= k)
411
+
412
+ # Return the nodes that are k-edge-connected in the original graph
413
+ yield from nx.connected_components(R)
414
+
415
+ def k_edge_subgraphs(self, k):
416
+ """Queries the auxiliary graph for k-edge-connected subgraphs.
417
+
418
+ Parameters
419
+ ----------
420
+ k : Integer
421
+ Desired edge connectivity
422
+
423
+ Returns
424
+ -------
425
+ k_edge_subgraphs : a generator of k-edge-subgraphs
426
+
427
+ Notes
428
+ -----
429
+ Refines the k-edge-ccs into k-edge-subgraphs. The running time is more
430
+ than $O(|V|)$.
431
+
432
+ For single values of k it is faster to use `nx.k_edge_subgraphs`.
433
+ But for multiple values of k, it can be faster to build AuxGraph and
434
+ then use this method.
435
+ """
436
+ if k < 1:
437
+ raise ValueError("k cannot be less than 1")
438
+ H = self.H
439
+ A = self.A
440
+ # "traverse the auxiliary graph A and delete all edges with weights less
441
+ # than k"
442
+ aux_weights = nx.get_edge_attributes(A, "weight")
443
+ # Create a relevant graph with the auxiliary edges with weights >= k
444
+ R = nx.Graph()
445
+ R.add_nodes_from(A.nodes())
446
+ R.add_edges_from(e for e, w in aux_weights.items() if w >= k)
447
+
448
+ # Return the components whose subgraphs are k-edge-connected
449
+ for cc in nx.connected_components(R):
450
+ if len(cc) < k:
451
+ # Early return optimization
452
+ for node in cc:
453
+ yield {node}
454
+ else:
455
+ # Call subgraph solution to refine the results
456
+ C = H.subgraph(cc)
457
+ yield from k_edge_subgraphs(C, k)
458
+
459
+
460
+ def _low_degree_nodes(G, k, nbunch=None):
461
+ """Helper for finding nodes with degree less than k."""
462
+ # Nodes with degree less than k cannot be k-edge-connected.
463
+ if G.is_directed():
464
+ # Consider both in and out degree in the directed case
465
+ seen = set()
466
+ for node, degree in G.out_degree(nbunch):
467
+ if degree < k:
468
+ seen.add(node)
469
+ yield node
470
+ for node, degree in G.in_degree(nbunch):
471
+ if node not in seen and degree < k:
472
+ seen.add(node)
473
+ yield node
474
+ else:
475
+ # Only the degree matters in the undirected case
476
+ for node, degree in G.degree(nbunch):
477
+ if degree < k:
478
+ yield node
479
+
480
+
481
+ def _high_degree_components(G, k):
482
+ """Helper for filtering components that can't be k-edge-connected.
483
+
484
+ Removes and generates each node with degree less than k. Then generates
485
+ remaining components where all nodes have degree at least k.
486
+ """
487
+ # Iteratively remove parts of the graph that are not k-edge-connected
488
+ H = G.copy()
489
+ singletons = set(_low_degree_nodes(H, k))
490
+ while singletons:
491
+ # Only search neighbors of removed nodes
492
+ nbunch = set(it.chain.from_iterable(map(H.neighbors, singletons)))
493
+ nbunch.difference_update(singletons)
494
+ H.remove_nodes_from(singletons)
495
+ for node in singletons:
496
+ yield {node}
497
+ singletons = set(_low_degree_nodes(H, k, nbunch))
498
+
499
+ # Note: remaining connected components may not be k-edge-connected
500
+ if G.is_directed():
501
+ yield from nx.strongly_connected_components(H)
502
+ else:
503
+ yield from nx.connected_components(H)
504
+
505
+
506
+ @nx._dispatch
507
+ def general_k_edge_subgraphs(G, k):
508
+ """General algorithm to find all maximal k-edge-connected subgraphs in G.
509
+
510
+ Returns
511
+ -------
512
+ k_edge_subgraphs : a generator of nx.Graphs that are k-edge-subgraphs
513
+ Each k-edge-subgraph is a maximal set of nodes that defines a subgraph
514
+ of G that is k-edge-connected.
515
+
516
+ Notes
517
+ -----
518
+ Implementation of the basic algorithm from _[1]. The basic idea is to find
519
+ a global minimum cut of the graph. If the cut value is at least k, then the
520
+ graph is a k-edge-connected subgraph and can be added to the results.
521
+ Otherwise, the cut is used to split the graph in two and the procedure is
522
+ applied recursively. If the graph is just a single node, then it is also
523
+ added to the results. At the end, each result is either guaranteed to be
524
+ a single node or a subgraph of G that is k-edge-connected.
525
+
526
+ This implementation contains optimizations for reducing the number of calls
527
+ to max-flow, but there are other optimizations in _[1] that could be
528
+ implemented.
529
+
530
+ References
531
+ ----------
532
+ .. [1] Zhou, Liu, et al. (2012) Finding maximal k-edge-connected subgraphs
533
+ from a large graph. ACM International Conference on Extending Database
534
+ Technology 2012 480-–491.
535
+ https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf
536
+
537
+ Examples
538
+ --------
539
+ >>> from networkx.utils import pairwise
540
+ >>> paths = [
541
+ ... (11, 12, 13, 14, 11, 13, 14, 12), # a 4-clique
542
+ ... (21, 22, 23, 24, 21, 23, 24, 22), # another 4-clique
543
+ ... # connect the cliques with high degree but low connectivity
544
+ ... (50, 13),
545
+ ... (12, 50, 22),
546
+ ... (13, 102, 23),
547
+ ... (14, 101, 24),
548
+ ... ]
549
+ >>> G = nx.Graph(it.chain(*[pairwise(path) for path in paths]))
550
+ >>> sorted(map(len, k_edge_subgraphs(G, k=3)))
551
+ [1, 1, 1, 4, 4]
552
+ """
553
+ if k < 1:
554
+ raise ValueError("k cannot be less than 1")
555
+
556
+ # Node pruning optimization (incorporates early return)
557
+ # find_ccs is either connected_components/strongly_connected_components
558
+ find_ccs = partial(_high_degree_components, k=k)
559
+
560
+ # Quick return optimization
561
+ if G.number_of_nodes() < k:
562
+ for node in G.nodes():
563
+ yield G.subgraph([node]).copy()
564
+ return
565
+
566
+ # Intermediate results
567
+ R0 = {G.subgraph(cc).copy() for cc in find_ccs(G)}
568
+ # Subdivide CCs in the intermediate results until they are k-conn
569
+ while R0:
570
+ G1 = R0.pop()
571
+ if G1.number_of_nodes() == 1:
572
+ yield G1
573
+ else:
574
+ # Find a global minimum cut
575
+ cut_edges = nx.minimum_edge_cut(G1)
576
+ cut_value = len(cut_edges)
577
+ if cut_value < k:
578
+ # G1 is not k-edge-connected, so subdivide it
579
+ G1.remove_edges_from(cut_edges)
580
+ for cc in find_ccs(G1):
581
+ R0.add(G1.subgraph(cc).copy())
582
+ else:
583
+ # Otherwise we found a k-edge-connected subgraph
584
+ yield G1
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/kcomponents.py ADDED
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Moody and White algorithm for k-components
3
+ """
4
+ from collections import defaultdict
5
+ from itertools import combinations
6
+ from operator import itemgetter
7
+
8
+ import networkx as nx
9
+
10
+ # Define the default maximum flow function.
11
+ from networkx.algorithms.flow import edmonds_karp
12
+ from networkx.utils import not_implemented_for
13
+
14
+ default_flow_func = edmonds_karp
15
+
16
+ __all__ = ["k_components"]
17
+
18
+
19
+ @not_implemented_for("directed")
20
+ @nx._dispatch
21
+ def k_components(G, flow_func=None):
22
+ r"""Returns the k-component structure of a graph G.
23
+
24
+ A `k`-component is a maximal subgraph of a graph G that has, at least,
25
+ node connectivity `k`: we need to remove at least `k` nodes to break it
26
+ into more components. `k`-components have an inherent hierarchical
27
+ structure because they are nested in terms of connectivity: a connected
28
+ graph can contain several 2-components, each of which can contain
29
+ one or more 3-components, and so forth.
30
+
31
+ Parameters
32
+ ----------
33
+ G : NetworkX graph
34
+
35
+ flow_func : function
36
+ Function to perform the underlying flow computations. Default value
37
+ :meth:`edmonds_karp`. This function performs better in sparse graphs with
38
+ right tailed degree distributions. :meth:`shortest_augmenting_path` will
39
+ perform better in denser graphs.
40
+
41
+ Returns
42
+ -------
43
+ k_components : dict
44
+ Dictionary with all connectivity levels `k` in the input Graph as keys
45
+ and a list of sets of nodes that form a k-component of level `k` as
46
+ values.
47
+
48
+ Raises
49
+ ------
50
+ NetworkXNotImplemented
51
+ If the input graph is directed.
52
+
53
+ Examples
54
+ --------
55
+ >>> # Petersen graph has 10 nodes and it is triconnected, thus all
56
+ >>> # nodes are in a single component on all three connectivity levels
57
+ >>> G = nx.petersen_graph()
58
+ >>> k_components = nx.k_components(G)
59
+
60
+ Notes
61
+ -----
62
+ Moody and White [1]_ (appendix A) provide an algorithm for identifying
63
+ k-components in a graph, which is based on Kanevsky's algorithm [2]_
64
+ for finding all minimum-size node cut-sets of a graph (implemented in
65
+ :meth:`all_node_cuts` function):
66
+
67
+ 1. Compute node connectivity, k, of the input graph G.
68
+
69
+ 2. Identify all k-cutsets at the current level of connectivity using
70
+ Kanevsky's algorithm.
71
+
72
+ 3. Generate new graph components based on the removal of
73
+ these cutsets. Nodes in a cutset belong to both sides
74
+ of the induced cut.
75
+
76
+ 4. If the graph is neither complete nor trivial, return to 1;
77
+ else end.
78
+
79
+ This implementation also uses some heuristics (see [3]_ for details)
80
+ to speed up the computation.
81
+
82
+ See also
83
+ --------
84
+ node_connectivity
85
+ all_node_cuts
86
+ biconnected_components : special case of this function when k=2
87
+ k_edge_components : similar to this function, but uses edge-connectivity
88
+ instead of node-connectivity
89
+
90
+ References
91
+ ----------
92
+ .. [1] Moody, J. and D. White (2003). Social cohesion and embeddedness:
93
+ A hierarchical conception of social groups.
94
+ American Sociological Review 68(1), 103--28.
95
+ http://www2.asanet.org/journals/ASRFeb03MoodyWhite.pdf
96
+
97
+ .. [2] Kanevsky, A. (1993). Finding all minimum-size separating vertex
98
+ sets in a graph. Networks 23(6), 533--541.
99
+ http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract
100
+
101
+ .. [3] Torrents, J. and F. Ferraro (2015). Structural Cohesion:
102
+ Visualization and Heuristics for Fast Computation.
103
+ https://arxiv.org/pdf/1503.04476v1
104
+
105
+ """
106
+ # Dictionary with connectivity level (k) as keys and a list of
107
+ # sets of nodes that form a k-component as values. Note that
108
+ # k-components can overlap (but only k - 1 nodes).
109
+ k_components = defaultdict(list)
110
+ # Define default flow function
111
+ if flow_func is None:
112
+ flow_func = default_flow_func
113
+ # Bicomponents as a base to check for higher order k-components
114
+ for component in nx.connected_components(G):
115
+ # isolated nodes have connectivity 0
116
+ comp = set(component)
117
+ if len(comp) > 1:
118
+ k_components[1].append(comp)
119
+ bicomponents = [G.subgraph(c) for c in nx.biconnected_components(G)]
120
+ for bicomponent in bicomponents:
121
+ bicomp = set(bicomponent)
122
+ # avoid considering dyads as bicomponents
123
+ if len(bicomp) > 2:
124
+ k_components[2].append(bicomp)
125
+ for B in bicomponents:
126
+ if len(B) <= 2:
127
+ continue
128
+ k = nx.node_connectivity(B, flow_func=flow_func)
129
+ if k > 2:
130
+ k_components[k].append(set(B))
131
+ # Perform cuts in a DFS like order.
132
+ cuts = list(nx.all_node_cuts(B, k=k, flow_func=flow_func))
133
+ stack = [(k, _generate_partition(B, cuts, k))]
134
+ while stack:
135
+ (parent_k, partition) = stack[-1]
136
+ try:
137
+ nodes = next(partition)
138
+ C = B.subgraph(nodes)
139
+ this_k = nx.node_connectivity(C, flow_func=flow_func)
140
+ if this_k > parent_k and this_k > 2:
141
+ k_components[this_k].append(set(C))
142
+ cuts = list(nx.all_node_cuts(C, k=this_k, flow_func=flow_func))
143
+ if cuts:
144
+ stack.append((this_k, _generate_partition(C, cuts, this_k)))
145
+ except StopIteration:
146
+ stack.pop()
147
+
148
+ # This is necessary because k-components may only be reported at their
149
+ # maximum k level. But we want to return a dictionary in which keys are
150
+ # connectivity levels and values list of sets of components, without
151
+ # skipping any connectivity level. Also, it's possible that subsets of
152
+ # an already detected k-component appear at a level k. Checking for this
153
+ # in the while loop above penalizes the common case. Thus we also have to
154
+ # _consolidate all connectivity levels in _reconstruct_k_components.
155
+ return _reconstruct_k_components(k_components)
156
+
157
+
158
+ def _consolidate(sets, k):
159
+ """Merge sets that share k or more elements.
160
+
161
+ See: http://rosettacode.org/wiki/Set_consolidation
162
+
163
+ The iterative python implementation posted there is
164
+ faster than this because of the overhead of building a
165
+ Graph and calling nx.connected_components, but it's not
166
+ clear for us if we can use it in NetworkX because there
167
+ is no licence for the code.
168
+
169
+ """
170
+ G = nx.Graph()
171
+ nodes = dict(enumerate(sets))
172
+ G.add_nodes_from(nodes)
173
+ G.add_edges_from(
174
+ (u, v) for u, v in combinations(nodes, 2) if len(nodes[u] & nodes[v]) >= k
175
+ )
176
+ for component in nx.connected_components(G):
177
+ yield set.union(*[nodes[n] for n in component])
178
+
179
+
180
+ def _generate_partition(G, cuts, k):
181
+ def has_nbrs_in_partition(G, node, partition):
182
+ return any(n in partition for n in G[node])
183
+
184
+ components = []
185
+ nodes = {n for n, d in G.degree() if d > k} - {n for cut in cuts for n in cut}
186
+ H = G.subgraph(nodes)
187
+ for cc in nx.connected_components(H):
188
+ component = set(cc)
189
+ for cut in cuts:
190
+ for node in cut:
191
+ if has_nbrs_in_partition(G, node, cc):
192
+ component.add(node)
193
+ if len(component) < G.order():
194
+ components.append(component)
195
+ yield from _consolidate(components, k + 1)
196
+
197
+
198
+ def _reconstruct_k_components(k_comps):
199
+ result = {}
200
+ max_k = max(k_comps)
201
+ for k in reversed(range(1, max_k + 1)):
202
+ if k == max_k:
203
+ result[k] = list(_consolidate(k_comps[k], k))
204
+ elif k not in k_comps:
205
+ result[k] = list(_consolidate(result[k + 1], k))
206
+ else:
207
+ nodes_at_k = set.union(*k_comps[k])
208
+ to_add = [c for c in result[k + 1] if any(n not in nodes_at_k for n in c)]
209
+ if to_add:
210
+ result[k] = list(_consolidate(k_comps[k] + to_add, k))
211
+ else:
212
+ result[k] = list(_consolidate(k_comps[k], k))
213
+ return result
214
+
215
+
216
+ def build_k_number_dict(kcomps):
217
+ result = {}
218
+ for k, comps in sorted(kcomps.items(), key=itemgetter(0)):
219
+ for comp in comps:
220
+ for node in comp:
221
+ result[node] = k
222
+ return result
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/kcutsets.py ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Kanevsky all minimum node k cutsets algorithm.
3
+ """
4
+ import copy
5
+ from collections import defaultdict
6
+ from itertools import combinations
7
+ from operator import itemgetter
8
+
9
+ import networkx as nx
10
+ from networkx.algorithms.flow import (
11
+ build_residual_network,
12
+ edmonds_karp,
13
+ shortest_augmenting_path,
14
+ )
15
+
16
+ from .utils import build_auxiliary_node_connectivity
17
+
18
+ default_flow_func = edmonds_karp
19
+
20
+
21
+ __all__ = ["all_node_cuts"]
22
+
23
+
24
+ @nx._dispatch
25
+ def all_node_cuts(G, k=None, flow_func=None):
26
+ r"""Returns all minimum k cutsets of an undirected graph G.
27
+
28
+ This implementation is based on Kanevsky's algorithm [1]_ for finding all
29
+ minimum-size node cut-sets of an undirected graph G; ie the set (or sets)
30
+ of nodes of cardinality equal to the node connectivity of G. Thus if
31
+ removed, would break G into two or more connected components.
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+ Undirected graph
37
+
38
+ k : Integer
39
+ Node connectivity of the input graph. If k is None, then it is
40
+ computed. Default value: None.
41
+
42
+ flow_func : function
43
+ Function to perform the underlying flow computations. Default value is
44
+ :func:`~networkx.algorithms.flow.edmonds_karp`. This function performs
45
+ better in sparse graphs with right tailed degree distributions.
46
+ :func:`~networkx.algorithms.flow.shortest_augmenting_path` will
47
+ perform better in denser graphs.
48
+
49
+
50
+ Returns
51
+ -------
52
+ cuts : a generator of node cutsets
53
+ Each node cutset has cardinality equal to the node connectivity of
54
+ the input graph.
55
+
56
+ Examples
57
+ --------
58
+ >>> # A two-dimensional grid graph has 4 cutsets of cardinality 2
59
+ >>> G = nx.grid_2d_graph(5, 5)
60
+ >>> cutsets = list(nx.all_node_cuts(G))
61
+ >>> len(cutsets)
62
+ 4
63
+ >>> all(2 == len(cutset) for cutset in cutsets)
64
+ True
65
+ >>> nx.node_connectivity(G)
66
+ 2
67
+
68
+ Notes
69
+ -----
70
+ This implementation is based on the sequential algorithm for finding all
71
+ minimum-size separating vertex sets in a graph [1]_. The main idea is to
72
+ compute minimum cuts using local maximum flow computations among a set
73
+ of nodes of highest degree and all other non-adjacent nodes in the Graph.
74
+ Once we find a minimum cut, we add an edge between the high degree
75
+ node and the target node of the local maximum flow computation to make
76
+ sure that we will not find that minimum cut again.
77
+
78
+ See also
79
+ --------
80
+ node_connectivity
81
+ edmonds_karp
82
+ shortest_augmenting_path
83
+
84
+ References
85
+ ----------
86
+ .. [1] Kanevsky, A. (1993). Finding all minimum-size separating vertex
87
+ sets in a graph. Networks 23(6), 533--541.
88
+ http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract
89
+
90
+ """
91
+ if not nx.is_connected(G):
92
+ raise nx.NetworkXError("Input graph is disconnected.")
93
+
94
+ # Address some corner cases first.
95
+ # For complete Graphs
96
+ if nx.density(G) == 1:
97
+ for cut_set in combinations(G, len(G) - 1):
98
+ yield set(cut_set)
99
+ return
100
+ # Initialize data structures.
101
+ # Keep track of the cuts already computed so we do not repeat them.
102
+ seen = []
103
+ # Even-Tarjan reduction is what we call auxiliary digraph
104
+ # for node connectivity.
105
+ H = build_auxiliary_node_connectivity(G)
106
+ H_nodes = H.nodes # for speed
107
+ mapping = H.graph["mapping"]
108
+ # Keep a copy of original predecessors, H will be modified later.
109
+ # Shallow copy is enough.
110
+ original_H_pred = copy.copy(H._pred)
111
+ R = build_residual_network(H, "capacity")
112
+ kwargs = {"capacity": "capacity", "residual": R}
113
+ # Define default flow function
114
+ if flow_func is None:
115
+ flow_func = default_flow_func
116
+ if flow_func is shortest_augmenting_path:
117
+ kwargs["two_phase"] = True
118
+ # Begin the actual algorithm
119
+ # step 1: Find node connectivity k of G
120
+ if k is None:
121
+ k = nx.node_connectivity(G, flow_func=flow_func)
122
+ # step 2:
123
+ # Find k nodes with top degree, call it X:
124
+ X = {n for n, d in sorted(G.degree(), key=itemgetter(1), reverse=True)[:k]}
125
+ # Check if X is a k-node-cutset
126
+ if _is_separating_set(G, X):
127
+ seen.append(X)
128
+ yield X
129
+
130
+ for x in X:
131
+ # step 3: Compute local connectivity flow of x with all other
132
+ # non adjacent nodes in G
133
+ non_adjacent = set(G) - X - set(G[x])
134
+ for v in non_adjacent:
135
+ # step 4: compute maximum flow in an Even-Tarjan reduction H of G
136
+ # and step 5: build the associated residual network R
137
+ R = flow_func(H, f"{mapping[x]}B", f"{mapping[v]}A", **kwargs)
138
+ flow_value = R.graph["flow_value"]
139
+
140
+ if flow_value == k:
141
+ # Find the nodes incident to the flow.
142
+ E1 = flowed_edges = [
143
+ (u, w) for (u, w, d) in R.edges(data=True) if d["flow"] != 0
144
+ ]
145
+ VE1 = incident_nodes = {n for edge in E1 for n in edge}
146
+ # Remove saturated edges form the residual network.
147
+ # Note that reversed edges are introduced with capacity 0
148
+ # in the residual graph and they need to be removed too.
149
+ saturated_edges = [
150
+ (u, w, d)
151
+ for (u, w, d) in R.edges(data=True)
152
+ if d["capacity"] == d["flow"] or d["capacity"] == 0
153
+ ]
154
+ R.remove_edges_from(saturated_edges)
155
+ R_closure = nx.transitive_closure(R)
156
+ # step 6: shrink the strongly connected components of
157
+ # residual flow network R and call it L.
158
+ L = nx.condensation(R)
159
+ cmap = L.graph["mapping"]
160
+ inv_cmap = defaultdict(list)
161
+ for n, scc in cmap.items():
162
+ inv_cmap[scc].append(n)
163
+ # Find the incident nodes in the condensed graph.
164
+ VE1 = {cmap[n] for n in VE1}
165
+ # step 7: Compute all antichains of L;
166
+ # they map to closed sets in H.
167
+ # Any edge in H that links a closed set is part of a cutset.
168
+ for antichain in nx.antichains(L):
169
+ # Only antichains that are subsets of incident nodes counts.
170
+ # Lemma 8 in reference.
171
+ if not set(antichain).issubset(VE1):
172
+ continue
173
+ # Nodes in an antichain of the condensation graph of
174
+ # the residual network map to a closed set of nodes that
175
+ # define a node partition of the auxiliary digraph H
176
+ # through taking all of antichain's predecessors in the
177
+ # transitive closure.
178
+ S = set()
179
+ for scc in antichain:
180
+ S.update(inv_cmap[scc])
181
+ S_ancestors = set()
182
+ for n in S:
183
+ S_ancestors.update(R_closure._pred[n])
184
+ S.update(S_ancestors)
185
+ if f"{mapping[x]}B" not in S or f"{mapping[v]}A" in S:
186
+ continue
187
+ # Find the cutset that links the node partition (S,~S) in H
188
+ cutset = set()
189
+ for u in S:
190
+ cutset.update((u, w) for w in original_H_pred[u] if w not in S)
191
+ # The edges in H that form the cutset are internal edges
192
+ # (ie edges that represent a node of the original graph G)
193
+ if any(H_nodes[u]["id"] != H_nodes[w]["id"] for u, w in cutset):
194
+ continue
195
+ node_cut = {H_nodes[u]["id"] for u, _ in cutset}
196
+
197
+ if len(node_cut) == k:
198
+ # The cut is invalid if it includes internal edges of
199
+ # end nodes. The other half of Lemma 8 in ref.
200
+ if x in node_cut or v in node_cut:
201
+ continue
202
+ if node_cut not in seen:
203
+ yield node_cut
204
+ seen.append(node_cut)
205
+
206
+ # Add an edge (x, v) to make sure that we do not
207
+ # find this cutset again. This is equivalent
208
+ # of adding the edge in the input graph
209
+ # G.add_edge(x, v) and then regenerate H and R:
210
+ # Add edges to the auxiliary digraph.
211
+ # See build_residual_network for convention we used
212
+ # in residual graphs.
213
+ H.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1)
214
+ H.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1)
215
+ # Add edges to the residual network.
216
+ R.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1)
217
+ R.add_edge(f"{mapping[v]}A", f"{mapping[x]}B", capacity=0)
218
+ R.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1)
219
+ R.add_edge(f"{mapping[x]}A", f"{mapping[v]}B", capacity=0)
220
+
221
+ # Add again the saturated edges to reuse the residual network
222
+ R.add_edges_from(saturated_edges)
223
+
224
+
225
+ def _is_separating_set(G, cut):
226
+ """Assumes that the input graph is connected"""
227
+ if len(cut) == len(G) - 1:
228
+ return True
229
+
230
+ H = nx.restricted_view(G, cut, [])
231
+ if nx.is_connected(H):
232
+ return False
233
+ return True
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/stoerwagner.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Stoer-Wagner minimum cut algorithm.
3
+ """
4
+ from itertools import islice
5
+
6
+ import networkx as nx
7
+
8
+ from ...utils import BinaryHeap, arbitrary_element, not_implemented_for
9
+
10
+ __all__ = ["stoer_wagner"]
11
+
12
+
13
+ @not_implemented_for("directed")
14
+ @not_implemented_for("multigraph")
15
+ @nx._dispatch(edge_attrs="weight")
16
+ def stoer_wagner(G, weight="weight", heap=BinaryHeap):
17
+ r"""Returns the weighted minimum edge cut using the Stoer-Wagner algorithm.
18
+
19
+ Determine the minimum edge cut of a connected graph using the
20
+ Stoer-Wagner algorithm. In weighted cases, all weights must be
21
+ nonnegative.
22
+
23
+ The running time of the algorithm depends on the type of heaps used:
24
+
25
+ ============== =============================================
26
+ Type of heap Running time
27
+ ============== =============================================
28
+ Binary heap $O(n (m + n) \log n)$
29
+ Fibonacci heap $O(nm + n^2 \log n)$
30
+ Pairing heap $O(2^{2 \sqrt{\log \log n}} nm + n^2 \log n)$
31
+ ============== =============================================
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+ Edges of the graph are expected to have an attribute named by the
37
+ weight parameter below. If this attribute is not present, the edge is
38
+ considered to have unit weight.
39
+
40
+ weight : string
41
+ Name of the weight attribute of the edges. If the attribute is not
42
+ present, unit weight is assumed. Default value: 'weight'.
43
+
44
+ heap : class
45
+ Type of heap to be used in the algorithm. It should be a subclass of
46
+ :class:`MinHeap` or implement a compatible interface.
47
+
48
+ If a stock heap implementation is to be used, :class:`BinaryHeap` is
49
+ recommended over :class:`PairingHeap` for Python implementations without
50
+ optimized attribute accesses (e.g., CPython) despite a slower
51
+ asymptotic running time. For Python implementations with optimized
52
+ attribute accesses (e.g., PyPy), :class:`PairingHeap` provides better
53
+ performance. Default value: :class:`BinaryHeap`.
54
+
55
+ Returns
56
+ -------
57
+ cut_value : integer or float
58
+ The sum of weights of edges in a minimum cut.
59
+
60
+ partition : pair of node lists
61
+ A partitioning of the nodes that defines a minimum cut.
62
+
63
+ Raises
64
+ ------
65
+ NetworkXNotImplemented
66
+ If the graph is directed or a multigraph.
67
+
68
+ NetworkXError
69
+ If the graph has less than two nodes, is not connected or has a
70
+ negative-weighted edge.
71
+
72
+ Examples
73
+ --------
74
+ >>> G = nx.Graph()
75
+ >>> G.add_edge("x", "a", weight=3)
76
+ >>> G.add_edge("x", "b", weight=1)
77
+ >>> G.add_edge("a", "c", weight=3)
78
+ >>> G.add_edge("b", "c", weight=5)
79
+ >>> G.add_edge("b", "d", weight=4)
80
+ >>> G.add_edge("d", "e", weight=2)
81
+ >>> G.add_edge("c", "y", weight=2)
82
+ >>> G.add_edge("e", "y", weight=3)
83
+ >>> cut_value, partition = nx.stoer_wagner(G)
84
+ >>> cut_value
85
+ 4
86
+ """
87
+ n = len(G)
88
+ if n < 2:
89
+ raise nx.NetworkXError("graph has less than two nodes.")
90
+ if not nx.is_connected(G):
91
+ raise nx.NetworkXError("graph is not connected.")
92
+
93
+ # Make a copy of the graph for internal use.
94
+ G = nx.Graph(
95
+ (u, v, {"weight": e.get(weight, 1)}) for u, v, e in G.edges(data=True) if u != v
96
+ )
97
+
98
+ for u, v, e in G.edges(data=True):
99
+ if e["weight"] < 0:
100
+ raise nx.NetworkXError("graph has a negative-weighted edge.")
101
+
102
+ cut_value = float("inf")
103
+ nodes = set(G)
104
+ contractions = [] # contracted node pairs
105
+
106
+ # Repeatedly pick a pair of nodes to contract until only one node is left.
107
+ for i in range(n - 1):
108
+ # Pick an arbitrary node u and create a set A = {u}.
109
+ u = arbitrary_element(G)
110
+ A = {u}
111
+ # Repeatedly pick the node "most tightly connected" to A and add it to
112
+ # A. The tightness of connectivity of a node not in A is defined by the
113
+ # of edges connecting it to nodes in A.
114
+ h = heap() # min-heap emulating a max-heap
115
+ for v, e in G[u].items():
116
+ h.insert(v, -e["weight"])
117
+ # Repeat until all but one node has been added to A.
118
+ for j in range(n - i - 2):
119
+ u = h.pop()[0]
120
+ A.add(u)
121
+ for v, e in G[u].items():
122
+ if v not in A:
123
+ h.insert(v, h.get(v, 0) - e["weight"])
124
+ # A and the remaining node v define a "cut of the phase". There is a
125
+ # minimum cut of the original graph that is also a cut of the phase.
126
+ # Due to contractions in earlier phases, v may in fact represent
127
+ # multiple nodes in the original graph.
128
+ v, w = h.min()
129
+ w = -w
130
+ if w < cut_value:
131
+ cut_value = w
132
+ best_phase = i
133
+ # Contract v and the last node added to A.
134
+ contractions.append((u, v))
135
+ for w, e in G[v].items():
136
+ if w != u:
137
+ if w not in G[u]:
138
+ G.add_edge(u, w, weight=e["weight"])
139
+ else:
140
+ G[u][w]["weight"] += e["weight"]
141
+ G.remove_node(v)
142
+
143
+ # Recover the optimal partitioning from the contractions.
144
+ G = nx.Graph(islice(contractions, best_phase))
145
+ v = contractions[best_phase][1]
146
+ G.add_node(v)
147
+ reachable = set(nx.single_source_shortest_path_length(G, v))
148
+ partition = (list(reachable), list(nodes - reachable))
149
+
150
+ return cut_value, partition
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (226 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-311.pyc ADDED
Binary file (13.8 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-311.pyc ADDED
Binary file (36.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-311.pyc ADDED
Binary file (29.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-311.pyc ADDED
Binary file (57.6 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-311.pyc ADDED
Binary file (17.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-311.pyc ADDED
Binary file (22.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-311.pyc ADDED
Binary file (24.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-311.pyc ADDED
Binary file (10.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_max_weight_clique.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Maximum weight clique test suite.
2
+
3
+ """
4
+
5
+ import pytest
6
+
7
+ import networkx as nx
8
+
9
+
10
+ class TestMaximumWeightClique:
11
+ def test_basic_cases(self):
12
+ def check_basic_case(graph_func, expected_weight, weight_accessor):
13
+ graph = graph_func()
14
+ clique, weight = nx.algorithms.max_weight_clique(graph, weight_accessor)
15
+ assert verify_clique(
16
+ graph, clique, weight, expected_weight, weight_accessor
17
+ )
18
+
19
+ for graph_func, (expected_weight, expected_size) in TEST_CASES.items():
20
+ check_basic_case(graph_func, expected_weight, "weight")
21
+ check_basic_case(graph_func, expected_size, None)
22
+
23
+ def test_key_error(self):
24
+ graph = two_node_graph()
25
+ with pytest.raises(KeyError):
26
+ nx.algorithms.max_weight_clique(graph, "nonexistent-key")
27
+
28
+ def test_error_on_non_integer_weight(self):
29
+ graph = two_node_graph()
30
+ graph.nodes[2]["weight"] = 1.5
31
+ with pytest.raises(ValueError):
32
+ nx.algorithms.max_weight_clique(graph)
33
+
34
+ def test_unaffected_by_self_loops(self):
35
+ graph = two_node_graph()
36
+ graph.add_edge(1, 1)
37
+ graph.add_edge(2, 2)
38
+ clique, weight = nx.algorithms.max_weight_clique(graph, "weight")
39
+ assert verify_clique(graph, clique, weight, 30, "weight")
40
+ graph = three_node_independent_set()
41
+ graph.add_edge(1, 1)
42
+ clique, weight = nx.algorithms.max_weight_clique(graph, "weight")
43
+ assert verify_clique(graph, clique, weight, 20, "weight")
44
+
45
+ def test_30_node_prob(self):
46
+ G = nx.Graph()
47
+ G.add_nodes_from(range(1, 31))
48
+ for i in range(1, 31):
49
+ G.nodes[i]["weight"] = i + 1
50
+ # fmt: off
51
+ G.add_edges_from(
52
+ [
53
+ (1, 12), (1, 13), (1, 15), (1, 16), (1, 18), (1, 19), (1, 20),
54
+ (1, 23), (1, 26), (1, 28), (1, 29), (1, 30), (2, 3), (2, 4),
55
+ (2, 5), (2, 8), (2, 9), (2, 10), (2, 14), (2, 17), (2, 18),
56
+ (2, 21), (2, 22), (2, 23), (2, 27), (3, 9), (3, 15), (3, 21),
57
+ (3, 22), (3, 23), (3, 24), (3, 27), (3, 28), (3, 29), (4, 5),
58
+ (4, 6), (4, 8), (4, 21), (4, 22), (4, 23), (4, 26), (4, 28),
59
+ (4, 30), (5, 6), (5, 8), (5, 9), (5, 13), (5, 14), (5, 15),
60
+ (5, 16), (5, 20), (5, 21), (5, 22), (5, 25), (5, 28), (5, 29),
61
+ (6, 7), (6, 8), (6, 13), (6, 17), (6, 18), (6, 19), (6, 24),
62
+ (6, 26), (6, 27), (6, 28), (6, 29), (7, 12), (7, 14), (7, 15),
63
+ (7, 16), (7, 17), (7, 20), (7, 25), (7, 27), (7, 29), (7, 30),
64
+ (8, 10), (8, 15), (8, 16), (8, 18), (8, 20), (8, 22), (8, 24),
65
+ (8, 26), (8, 27), (8, 28), (8, 30), (9, 11), (9, 12), (9, 13),
66
+ (9, 14), (9, 15), (9, 16), (9, 19), (9, 20), (9, 21), (9, 24),
67
+ (9, 30), (10, 12), (10, 15), (10, 18), (10, 19), (10, 20),
68
+ (10, 22), (10, 23), (10, 24), (10, 26), (10, 27), (10, 29),
69
+ (10, 30), (11, 13), (11, 15), (11, 16), (11, 17), (11, 18),
70
+ (11, 19), (11, 20), (11, 22), (11, 29), (11, 30), (12, 14),
71
+ (12, 17), (12, 18), (12, 19), (12, 20), (12, 21), (12, 23),
72
+ (12, 25), (12, 26), (12, 30), (13, 20), (13, 22), (13, 23),
73
+ (13, 24), (13, 30), (14, 16), (14, 20), (14, 21), (14, 22),
74
+ (14, 23), (14, 25), (14, 26), (14, 27), (14, 29), (14, 30),
75
+ (15, 17), (15, 18), (15, 20), (15, 21), (15, 26), (15, 27),
76
+ (15, 28), (16, 17), (16, 18), (16, 19), (16, 20), (16, 21),
77
+ (16, 29), (16, 30), (17, 18), (17, 21), (17, 22), (17, 25),
78
+ (17, 27), (17, 28), (17, 30), (18, 19), (18, 20), (18, 21),
79
+ (18, 22), (18, 23), (18, 24), (19, 20), (19, 22), (19, 23),
80
+ (19, 24), (19, 25), (19, 27), (19, 30), (20, 21), (20, 23),
81
+ (20, 24), (20, 26), (20, 28), (20, 29), (21, 23), (21, 26),
82
+ (21, 27), (21, 29), (22, 24), (22, 25), (22, 26), (22, 29),
83
+ (23, 25), (23, 30), (24, 25), (24, 26), (25, 27), (25, 29),
84
+ (26, 27), (26, 28), (26, 30), (28, 29), (29, 30),
85
+ ]
86
+ )
87
+ # fmt: on
88
+ clique, weight = nx.algorithms.max_weight_clique(G)
89
+ assert verify_clique(G, clique, weight, 111, "weight")
90
+
91
+
92
+ # ############################ Utility functions ############################
93
+ def verify_clique(
94
+ graph, clique, reported_clique_weight, expected_clique_weight, weight_accessor
95
+ ):
96
+ for node1 in clique:
97
+ for node2 in clique:
98
+ if node1 == node2:
99
+ continue
100
+ if not graph.has_edge(node1, node2):
101
+ return False
102
+
103
+ if weight_accessor is None:
104
+ clique_weight = len(clique)
105
+ else:
106
+ clique_weight = sum(graph.nodes[v]["weight"] for v in clique)
107
+
108
+ if clique_weight != expected_clique_weight:
109
+ return False
110
+ if clique_weight != reported_clique_weight:
111
+ return False
112
+
113
+ return True
114
+
115
+
116
+ # ############################ Graph Generation ############################
117
+
118
+
119
+ def empty_graph():
120
+ return nx.Graph()
121
+
122
+
123
+ def one_node_graph():
124
+ graph = nx.Graph()
125
+ graph.add_nodes_from([1])
126
+ graph.nodes[1]["weight"] = 10
127
+ return graph
128
+
129
+
130
+ def two_node_graph():
131
+ graph = nx.Graph()
132
+ graph.add_nodes_from([1, 2])
133
+ graph.add_edges_from([(1, 2)])
134
+ graph.nodes[1]["weight"] = 10
135
+ graph.nodes[2]["weight"] = 20
136
+ return graph
137
+
138
+
139
+ def three_node_clique():
140
+ graph = nx.Graph()
141
+ graph.add_nodes_from([1, 2, 3])
142
+ graph.add_edges_from([(1, 2), (1, 3), (2, 3)])
143
+ graph.nodes[1]["weight"] = 10
144
+ graph.nodes[2]["weight"] = 20
145
+ graph.nodes[3]["weight"] = 5
146
+ return graph
147
+
148
+
149
+ def three_node_independent_set():
150
+ graph = nx.Graph()
151
+ graph.add_nodes_from([1, 2, 3])
152
+ graph.nodes[1]["weight"] = 10
153
+ graph.nodes[2]["weight"] = 20
154
+ graph.nodes[3]["weight"] = 5
155
+ return graph
156
+
157
+
158
+ def disconnected():
159
+ graph = nx.Graph()
160
+ graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)])
161
+ graph.nodes[1]["weight"] = 10
162
+ graph.nodes[2]["weight"] = 20
163
+ graph.nodes[3]["weight"] = 5
164
+ graph.nodes[4]["weight"] = 100
165
+ graph.nodes[5]["weight"] = 200
166
+ graph.nodes[6]["weight"] = 50
167
+ return graph
168
+
169
+
170
+ # --------------------------------------------------------------------------
171
+ # Basic tests for all strategies
172
+ # For each basic graph function, specify expected weight of max weight clique
173
+ # and expected size of maximum clique
174
+ TEST_CASES = {
175
+ empty_graph: (0, 0),
176
+ one_node_graph: (10, 1),
177
+ two_node_graph: (30, 2),
178
+ three_node_clique: (35, 3),
179
+ three_node_independent_set: (20, 1),
180
+ disconnected: (300, 2),
181
+ }
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_moral.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.algorithms.moral import moral_graph
3
+
4
+
5
+ def test_get_moral_graph():
6
+ graph = nx.DiGraph()
7
+ graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
8
+ graph.add_edges_from([(1, 2), (3, 2), (4, 1), (4, 5), (6, 5), (7, 5)])
9
+ H = moral_graph(graph)
10
+ assert not H.is_directed()
11
+ assert H.has_edge(1, 3)
12
+ assert H.has_edge(4, 6)
13
+ assert H.has_edge(6, 7)
14
+ assert H.has_edge(4, 7)
15
+ assert not H.has_edge(1, 5)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_non_randomness.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+
5
+ np = pytest.importorskip("numpy")
6
+
7
+
8
+ @pytest.mark.parametrize(
9
+ "k, weight, expected",
10
+ [
11
+ (None, None, 7.21), # infers 3 communities
12
+ (2, None, 11.7),
13
+ (None, "weight", 25.45),
14
+ (2, "weight", 38.8),
15
+ ],
16
+ )
17
+ def test_non_randomness(k, weight, expected):
18
+ G = nx.karate_club_graph()
19
+ np.testing.assert_almost_equal(
20
+ nx.non_randomness(G, k, weight)[0], expected, decimal=2
21
+ )
22
+
23
+
24
+ def test_non_connected():
25
+ G = nx.Graph()
26
+ G.add_edge(1, 2)
27
+ G.add_node(3)
28
+ with pytest.raises(nx.NetworkXException):
29
+ nx.non_randomness(G)
30
+
31
+
32
+ def test_self_loops():
33
+ G = nx.Graph()
34
+ G.add_edge(1, 2)
35
+ G.add_edge(1, 1)
36
+ with pytest.raises(nx.NetworkXError):
37
+ nx.non_randomness(G)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_regular.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx
4
+ import networkx as nx
5
+ import networkx.algorithms.regular as reg
6
+ import networkx.generators as gen
7
+
8
+
9
+ class TestKFactor:
10
+ def test_k_factor_trivial(self):
11
+ g = gen.cycle_graph(4)
12
+ f = reg.k_factor(g, 2)
13
+ assert g.edges == f.edges
14
+
15
+ def test_k_factor1(self):
16
+ g = gen.grid_2d_graph(4, 4)
17
+ g_kf = reg.k_factor(g, 2)
18
+ for edge in g_kf.edges():
19
+ assert g.has_edge(edge[0], edge[1])
20
+ for _, degree in g_kf.degree():
21
+ assert degree == 2
22
+
23
+ def test_k_factor2(self):
24
+ g = gen.complete_graph(6)
25
+ g_kf = reg.k_factor(g, 3)
26
+ for edge in g_kf.edges():
27
+ assert g.has_edge(edge[0], edge[1])
28
+ for _, degree in g_kf.degree():
29
+ assert degree == 3
30
+
31
+ def test_k_factor3(self):
32
+ g = gen.grid_2d_graph(4, 4)
33
+ with pytest.raises(nx.NetworkXUnfeasible):
34
+ reg.k_factor(g, 3)
35
+
36
+ def test_k_factor4(self):
37
+ g = gen.lattice.hexagonal_lattice_graph(4, 4)
38
+ # Perfect matching doesn't exist for 4,4 hexagonal lattice graph
39
+ with pytest.raises(nx.NetworkXUnfeasible):
40
+ reg.k_factor(g, 2)
41
+
42
+ def test_k_factor5(self):
43
+ g = gen.complete_graph(6)
44
+ # small k to exercise SmallKGadget
45
+ g_kf = reg.k_factor(g, 2)
46
+ for edge in g_kf.edges():
47
+ assert g.has_edge(edge[0], edge[1])
48
+ for _, degree in g_kf.degree():
49
+ assert degree == 2
50
+
51
+
52
+ class TestIsRegular:
53
+ def test_is_regular1(self):
54
+ g = gen.cycle_graph(4)
55
+ assert reg.is_regular(g)
56
+
57
+ def test_is_regular2(self):
58
+ g = gen.complete_graph(5)
59
+ assert reg.is_regular(g)
60
+
61
+ def test_is_regular3(self):
62
+ g = gen.lollipop_graph(5, 5)
63
+ assert not reg.is_regular(g)
64
+
65
+ def test_is_regular4(self):
66
+ g = nx.DiGraph()
67
+ g.add_edges_from([(0, 1), (1, 2), (2, 0)])
68
+ assert reg.is_regular(g)
69
+
70
+
71
+ class TestIsKRegular:
72
+ def test_is_k_regular1(self):
73
+ g = gen.cycle_graph(4)
74
+ assert reg.is_k_regular(g, 2)
75
+ assert not reg.is_k_regular(g, 3)
76
+
77
+ def test_is_k_regular2(self):
78
+ g = gen.complete_graph(5)
79
+ assert reg.is_k_regular(g, 4)
80
+ assert not reg.is_k_regular(g, 3)
81
+ assert not reg.is_k_regular(g, 6)
82
+
83
+ def test_is_k_regular3(self):
84
+ g = gen.lollipop_graph(5, 5)
85
+ assert not reg.is_k_regular(g, 5)
86
+ assert not reg.is_k_regular(g, 6)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_similarity.py ADDED
@@ -0,0 +1,923 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.similarity import (
5
+ graph_edit_distance,
6
+ optimal_edit_paths,
7
+ optimize_graph_edit_distance,
8
+ )
9
+ from networkx.generators.classic import (
10
+ circular_ladder_graph,
11
+ cycle_graph,
12
+ path_graph,
13
+ wheel_graph,
14
+ )
15
+
16
+
17
+ def nmatch(n1, n2):
18
+ return n1 == n2
19
+
20
+
21
+ def ematch(e1, e2):
22
+ return e1 == e2
23
+
24
+
25
+ def getCanonical():
26
+ G = nx.Graph()
27
+ G.add_node("A", label="A")
28
+ G.add_node("B", label="B")
29
+ G.add_node("C", label="C")
30
+ G.add_node("D", label="D")
31
+ G.add_edge("A", "B", label="a-b")
32
+ G.add_edge("B", "C", label="b-c")
33
+ G.add_edge("B", "D", label="b-d")
34
+ return G
35
+
36
+
37
+ class TestSimilarity:
38
+ @classmethod
39
+ def setup_class(cls):
40
+ global np
41
+ np = pytest.importorskip("numpy")
42
+ pytest.importorskip("scipy")
43
+
44
+ def test_graph_edit_distance_roots_and_timeout(self):
45
+ G0 = nx.star_graph(5)
46
+ G1 = G0.copy()
47
+ pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2])
48
+ pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2, 3, 4])
49
+ pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 3))
50
+ pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(3, 9))
51
+ pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 9))
52
+ assert graph_edit_distance(G0, G1, roots=(1, 2)) == 0
53
+ assert graph_edit_distance(G0, G1, roots=(0, 1)) == 8
54
+ assert graph_edit_distance(G0, G1, roots=(1, 2), timeout=5) == 0
55
+ assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=5) == 8
56
+ assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=0.0001) is None
57
+ # test raise on 0 timeout
58
+ pytest.raises(nx.NetworkXError, graph_edit_distance, G0, G1, timeout=0)
59
+
60
+ def test_graph_edit_distance(self):
61
+ G0 = nx.Graph()
62
+ G1 = path_graph(6)
63
+ G2 = cycle_graph(6)
64
+ G3 = wheel_graph(7)
65
+
66
+ assert graph_edit_distance(G0, G0) == 0
67
+ assert graph_edit_distance(G0, G1) == 11
68
+ assert graph_edit_distance(G1, G0) == 11
69
+ assert graph_edit_distance(G0, G2) == 12
70
+ assert graph_edit_distance(G2, G0) == 12
71
+ assert graph_edit_distance(G0, G3) == 19
72
+ assert graph_edit_distance(G3, G0) == 19
73
+
74
+ assert graph_edit_distance(G1, G1) == 0
75
+ assert graph_edit_distance(G1, G2) == 1
76
+ assert graph_edit_distance(G2, G1) == 1
77
+ assert graph_edit_distance(G1, G3) == 8
78
+ assert graph_edit_distance(G3, G1) == 8
79
+
80
+ assert graph_edit_distance(G2, G2) == 0
81
+ assert graph_edit_distance(G2, G3) == 7
82
+ assert graph_edit_distance(G3, G2) == 7
83
+
84
+ assert graph_edit_distance(G3, G3) == 0
85
+
86
+ def test_graph_edit_distance_node_match(self):
87
+ G1 = cycle_graph(5)
88
+ G2 = cycle_graph(5)
89
+ for n, attr in G1.nodes.items():
90
+ attr["color"] = "red" if n % 2 == 0 else "blue"
91
+ for n, attr in G2.nodes.items():
92
+ attr["color"] = "red" if n % 2 == 1 else "blue"
93
+ assert graph_edit_distance(G1, G2) == 0
94
+ assert (
95
+ graph_edit_distance(
96
+ G1, G2, node_match=lambda n1, n2: n1["color"] == n2["color"]
97
+ )
98
+ == 1
99
+ )
100
+
101
+ def test_graph_edit_distance_edge_match(self):
102
+ G1 = path_graph(6)
103
+ G2 = path_graph(6)
104
+ for e, attr in G1.edges.items():
105
+ attr["color"] = "red" if min(e) % 2 == 0 else "blue"
106
+ for e, attr in G2.edges.items():
107
+ attr["color"] = "red" if min(e) // 3 == 0 else "blue"
108
+ assert graph_edit_distance(G1, G2) == 0
109
+ assert (
110
+ graph_edit_distance(
111
+ G1, G2, edge_match=lambda e1, e2: e1["color"] == e2["color"]
112
+ )
113
+ == 2
114
+ )
115
+
116
+ def test_graph_edit_distance_node_cost(self):
117
+ G1 = path_graph(6)
118
+ G2 = path_graph(6)
119
+ for n, attr in G1.nodes.items():
120
+ attr["color"] = "red" if n % 2 == 0 else "blue"
121
+ for n, attr in G2.nodes.items():
122
+ attr["color"] = "red" if n % 2 == 1 else "blue"
123
+
124
+ def node_subst_cost(uattr, vattr):
125
+ if uattr["color"] == vattr["color"]:
126
+ return 1
127
+ else:
128
+ return 10
129
+
130
+ def node_del_cost(attr):
131
+ if attr["color"] == "blue":
132
+ return 20
133
+ else:
134
+ return 50
135
+
136
+ def node_ins_cost(attr):
137
+ if attr["color"] == "blue":
138
+ return 40
139
+ else:
140
+ return 100
141
+
142
+ assert (
143
+ graph_edit_distance(
144
+ G1,
145
+ G2,
146
+ node_subst_cost=node_subst_cost,
147
+ node_del_cost=node_del_cost,
148
+ node_ins_cost=node_ins_cost,
149
+ )
150
+ == 6
151
+ )
152
+
153
+ def test_graph_edit_distance_edge_cost(self):
154
+ G1 = path_graph(6)
155
+ G2 = path_graph(6)
156
+ for e, attr in G1.edges.items():
157
+ attr["color"] = "red" if min(e) % 2 == 0 else "blue"
158
+ for e, attr in G2.edges.items():
159
+ attr["color"] = "red" if min(e) // 3 == 0 else "blue"
160
+
161
+ def edge_subst_cost(gattr, hattr):
162
+ if gattr["color"] == hattr["color"]:
163
+ return 0.01
164
+ else:
165
+ return 0.1
166
+
167
+ def edge_del_cost(attr):
168
+ if attr["color"] == "blue":
169
+ return 0.2
170
+ else:
171
+ return 0.5
172
+
173
+ def edge_ins_cost(attr):
174
+ if attr["color"] == "blue":
175
+ return 0.4
176
+ else:
177
+ return 1.0
178
+
179
+ assert (
180
+ graph_edit_distance(
181
+ G1,
182
+ G2,
183
+ edge_subst_cost=edge_subst_cost,
184
+ edge_del_cost=edge_del_cost,
185
+ edge_ins_cost=edge_ins_cost,
186
+ )
187
+ == 0.23
188
+ )
189
+
190
+ def test_graph_edit_distance_upper_bound(self):
191
+ G1 = circular_ladder_graph(2)
192
+ G2 = circular_ladder_graph(6)
193
+ assert graph_edit_distance(G1, G2, upper_bound=5) is None
194
+ assert graph_edit_distance(G1, G2, upper_bound=24) == 22
195
+ assert graph_edit_distance(G1, G2) == 22
196
+
197
+ def test_optimal_edit_paths(self):
198
+ G1 = path_graph(3)
199
+ G2 = cycle_graph(3)
200
+ paths, cost = optimal_edit_paths(G1, G2)
201
+ assert cost == 1
202
+ assert len(paths) == 6
203
+
204
+ def canonical(vertex_path, edge_path):
205
+ return (
206
+ tuple(sorted(vertex_path)),
207
+ tuple(sorted(edge_path, key=lambda x: (None in x, x))),
208
+ )
209
+
210
+ expected_paths = [
211
+ (
212
+ [(0, 0), (1, 1), (2, 2)],
213
+ [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))],
214
+ ),
215
+ (
216
+ [(0, 0), (1, 2), (2, 1)],
217
+ [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))],
218
+ ),
219
+ (
220
+ [(0, 1), (1, 0), (2, 2)],
221
+ [((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))],
222
+ ),
223
+ (
224
+ [(0, 1), (1, 2), (2, 0)],
225
+ [((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))],
226
+ ),
227
+ (
228
+ [(0, 2), (1, 0), (2, 1)],
229
+ [((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))],
230
+ ),
231
+ (
232
+ [(0, 2), (1, 1), (2, 0)],
233
+ [((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))],
234
+ ),
235
+ ]
236
+ assert {canonical(*p) for p in paths} == {canonical(*p) for p in expected_paths}
237
+
238
+ def test_optimize_graph_edit_distance(self):
239
+ G1 = circular_ladder_graph(2)
240
+ G2 = circular_ladder_graph(6)
241
+ bestcost = 1000
242
+ for cost in optimize_graph_edit_distance(G1, G2):
243
+ assert cost < bestcost
244
+ bestcost = cost
245
+ assert bestcost == 22
246
+
247
+ # def test_graph_edit_distance_bigger(self):
248
+ # G1 = circular_ladder_graph(12)
249
+ # G2 = circular_ladder_graph(16)
250
+ # assert_equal(graph_edit_distance(G1, G2), 22)
251
+
252
+ def test_selfloops(self):
253
+ G0 = nx.Graph()
254
+ G1 = nx.Graph()
255
+ G1.add_edges_from((("A", "A"), ("A", "B")))
256
+ G2 = nx.Graph()
257
+ G2.add_edges_from((("A", "B"), ("B", "B")))
258
+ G3 = nx.Graph()
259
+ G3.add_edges_from((("A", "A"), ("A", "B"), ("B", "B")))
260
+
261
+ assert graph_edit_distance(G0, G0) == 0
262
+ assert graph_edit_distance(G0, G1) == 4
263
+ assert graph_edit_distance(G1, G0) == 4
264
+ assert graph_edit_distance(G0, G2) == 4
265
+ assert graph_edit_distance(G2, G0) == 4
266
+ assert graph_edit_distance(G0, G3) == 5
267
+ assert graph_edit_distance(G3, G0) == 5
268
+
269
+ assert graph_edit_distance(G1, G1) == 0
270
+ assert graph_edit_distance(G1, G2) == 0
271
+ assert graph_edit_distance(G2, G1) == 0
272
+ assert graph_edit_distance(G1, G3) == 1
273
+ assert graph_edit_distance(G3, G1) == 1
274
+
275
+ assert graph_edit_distance(G2, G2) == 0
276
+ assert graph_edit_distance(G2, G3) == 1
277
+ assert graph_edit_distance(G3, G2) == 1
278
+
279
+ assert graph_edit_distance(G3, G3) == 0
280
+
281
+ def test_digraph(self):
282
+ G0 = nx.DiGraph()
283
+ G1 = nx.DiGraph()
284
+ G1.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("D", "A")))
285
+ G2 = nx.DiGraph()
286
+ G2.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("A", "D")))
287
+ G3 = nx.DiGraph()
288
+ G3.add_edges_from((("A", "B"), ("A", "C"), ("B", "D"), ("C", "D")))
289
+
290
+ assert graph_edit_distance(G0, G0) == 0
291
+ assert graph_edit_distance(G0, G1) == 8
292
+ assert graph_edit_distance(G1, G0) == 8
293
+ assert graph_edit_distance(G0, G2) == 8
294
+ assert graph_edit_distance(G2, G0) == 8
295
+ assert graph_edit_distance(G0, G3) == 8
296
+ assert graph_edit_distance(G3, G0) == 8
297
+
298
+ assert graph_edit_distance(G1, G1) == 0
299
+ assert graph_edit_distance(G1, G2) == 2
300
+ assert graph_edit_distance(G2, G1) == 2
301
+ assert graph_edit_distance(G1, G3) == 4
302
+ assert graph_edit_distance(G3, G1) == 4
303
+
304
+ assert graph_edit_distance(G2, G2) == 0
305
+ assert graph_edit_distance(G2, G3) == 2
306
+ assert graph_edit_distance(G3, G2) == 2
307
+
308
+ assert graph_edit_distance(G3, G3) == 0
309
+
310
+ def test_multigraph(self):
311
+ G0 = nx.MultiGraph()
312
+ G1 = nx.MultiGraph()
313
+ G1.add_edges_from((("A", "B"), ("B", "C"), ("A", "C")))
314
+ G2 = nx.MultiGraph()
315
+ G2.add_edges_from((("A", "B"), ("B", "C"), ("B", "C"), ("A", "C")))
316
+ G3 = nx.MultiGraph()
317
+ G3.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"), ("A", "C"), ("A", "C")))
318
+
319
+ assert graph_edit_distance(G0, G0) == 0
320
+ assert graph_edit_distance(G0, G1) == 6
321
+ assert graph_edit_distance(G1, G0) == 6
322
+ assert graph_edit_distance(G0, G2) == 7
323
+ assert graph_edit_distance(G2, G0) == 7
324
+ assert graph_edit_distance(G0, G3) == 8
325
+ assert graph_edit_distance(G3, G0) == 8
326
+
327
+ assert graph_edit_distance(G1, G1) == 0
328
+ assert graph_edit_distance(G1, G2) == 1
329
+ assert graph_edit_distance(G2, G1) == 1
330
+ assert graph_edit_distance(G1, G3) == 2
331
+ assert graph_edit_distance(G3, G1) == 2
332
+
333
+ assert graph_edit_distance(G2, G2) == 0
334
+ assert graph_edit_distance(G2, G3) == 1
335
+ assert graph_edit_distance(G3, G2) == 1
336
+
337
+ assert graph_edit_distance(G3, G3) == 0
338
+
339
+ def test_multidigraph(self):
340
+ G1 = nx.MultiDiGraph()
341
+ G1.add_edges_from(
342
+ (
343
+ ("hardware", "kernel"),
344
+ ("kernel", "hardware"),
345
+ ("kernel", "userspace"),
346
+ ("userspace", "kernel"),
347
+ )
348
+ )
349
+ G2 = nx.MultiDiGraph()
350
+ G2.add_edges_from(
351
+ (
352
+ ("winter", "spring"),
353
+ ("spring", "summer"),
354
+ ("summer", "autumn"),
355
+ ("autumn", "winter"),
356
+ )
357
+ )
358
+
359
+ assert graph_edit_distance(G1, G2) == 5
360
+ assert graph_edit_distance(G2, G1) == 5
361
+
362
+ # by https://github.com/jfbeaumont
363
+ def testCopy(self):
364
+ G = nx.Graph()
365
+ G.add_node("A", label="A")
366
+ G.add_node("B", label="B")
367
+ G.add_edge("A", "B", label="a-b")
368
+ assert (
369
+ graph_edit_distance(G, G.copy(), node_match=nmatch, edge_match=ematch) == 0
370
+ )
371
+
372
+ def testSame(self):
373
+ G1 = nx.Graph()
374
+ G1.add_node("A", label="A")
375
+ G1.add_node("B", label="B")
376
+ G1.add_edge("A", "B", label="a-b")
377
+ G2 = nx.Graph()
378
+ G2.add_node("A", label="A")
379
+ G2.add_node("B", label="B")
380
+ G2.add_edge("A", "B", label="a-b")
381
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 0
382
+
383
+ def testOneEdgeLabelDiff(self):
384
+ G1 = nx.Graph()
385
+ G1.add_node("A", label="A")
386
+ G1.add_node("B", label="B")
387
+ G1.add_edge("A", "B", label="a-b")
388
+ G2 = nx.Graph()
389
+ G2.add_node("A", label="A")
390
+ G2.add_node("B", label="B")
391
+ G2.add_edge("A", "B", label="bad")
392
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
393
+
394
+ def testOneNodeLabelDiff(self):
395
+ G1 = nx.Graph()
396
+ G1.add_node("A", label="A")
397
+ G1.add_node("B", label="B")
398
+ G1.add_edge("A", "B", label="a-b")
399
+ G2 = nx.Graph()
400
+ G2.add_node("A", label="Z")
401
+ G2.add_node("B", label="B")
402
+ G2.add_edge("A", "B", label="a-b")
403
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
404
+
405
+ def testOneExtraNode(self):
406
+ G1 = nx.Graph()
407
+ G1.add_node("A", label="A")
408
+ G1.add_node("B", label="B")
409
+ G1.add_edge("A", "B", label="a-b")
410
+ G2 = nx.Graph()
411
+ G2.add_node("A", label="A")
412
+ G2.add_node("B", label="B")
413
+ G2.add_edge("A", "B", label="a-b")
414
+ G2.add_node("C", label="C")
415
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
416
+
417
+ def testOneExtraEdge(self):
418
+ G1 = nx.Graph()
419
+ G1.add_node("A", label="A")
420
+ G1.add_node("B", label="B")
421
+ G1.add_node("C", label="C")
422
+ G1.add_node("C", label="C")
423
+ G1.add_edge("A", "B", label="a-b")
424
+ G2 = nx.Graph()
425
+ G2.add_node("A", label="A")
426
+ G2.add_node("B", label="B")
427
+ G2.add_node("C", label="C")
428
+ G2.add_edge("A", "B", label="a-b")
429
+ G2.add_edge("A", "C", label="a-c")
430
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
431
+
432
+ def testOneExtraNodeAndEdge(self):
433
+ G1 = nx.Graph()
434
+ G1.add_node("A", label="A")
435
+ G1.add_node("B", label="B")
436
+ G1.add_edge("A", "B", label="a-b")
437
+ G2 = nx.Graph()
438
+ G2.add_node("A", label="A")
439
+ G2.add_node("B", label="B")
440
+ G2.add_node("C", label="C")
441
+ G2.add_edge("A", "B", label="a-b")
442
+ G2.add_edge("A", "C", label="a-c")
443
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
444
+
445
+ def testGraph1(self):
446
+ G1 = getCanonical()
447
+ G2 = nx.Graph()
448
+ G2.add_node("A", label="A")
449
+ G2.add_node("B", label="B")
450
+ G2.add_node("D", label="D")
451
+ G2.add_node("E", label="E")
452
+ G2.add_edge("A", "B", label="a-b")
453
+ G2.add_edge("B", "D", label="b-d")
454
+ G2.add_edge("D", "E", label="d-e")
455
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 3
456
+
457
+ def testGraph2(self):
458
+ G1 = getCanonical()
459
+ G2 = nx.Graph()
460
+ G2.add_node("A", label="A")
461
+ G2.add_node("B", label="B")
462
+ G2.add_node("C", label="C")
463
+ G2.add_node("D", label="D")
464
+ G2.add_node("E", label="E")
465
+ G2.add_edge("A", "B", label="a-b")
466
+ G2.add_edge("B", "C", label="b-c")
467
+ G2.add_edge("C", "D", label="c-d")
468
+ G2.add_edge("C", "E", label="c-e")
469
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 4
470
+
471
+ def testGraph3(self):
472
+ G1 = getCanonical()
473
+ G2 = nx.Graph()
474
+ G2.add_node("A", label="A")
475
+ G2.add_node("B", label="B")
476
+ G2.add_node("C", label="C")
477
+ G2.add_node("D", label="D")
478
+ G2.add_node("E", label="E")
479
+ G2.add_node("F", label="F")
480
+ G2.add_node("G", label="G")
481
+ G2.add_edge("A", "C", label="a-c")
482
+ G2.add_edge("A", "D", label="a-d")
483
+ G2.add_edge("D", "E", label="d-e")
484
+ G2.add_edge("D", "F", label="d-f")
485
+ G2.add_edge("D", "G", label="d-g")
486
+ G2.add_edge("E", "B", label="e-b")
487
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 12
488
+
489
+ def testGraph4(self):
490
+ G1 = getCanonical()
491
+ G2 = nx.Graph()
492
+ G2.add_node("A", label="A")
493
+ G2.add_node("B", label="B")
494
+ G2.add_node("C", label="C")
495
+ G2.add_node("D", label="D")
496
+ G2.add_edge("A", "B", label="a-b")
497
+ G2.add_edge("B", "C", label="b-c")
498
+ G2.add_edge("C", "D", label="c-d")
499
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
500
+
501
+ def testGraph4_a(self):
502
+ G1 = getCanonical()
503
+ G2 = nx.Graph()
504
+ G2.add_node("A", label="A")
505
+ G2.add_node("B", label="B")
506
+ G2.add_node("C", label="C")
507
+ G2.add_node("D", label="D")
508
+ G2.add_edge("A", "B", label="a-b")
509
+ G2.add_edge("B", "C", label="b-c")
510
+ G2.add_edge("A", "D", label="a-d")
511
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
512
+
513
+ def testGraph4_b(self):
514
+ G1 = getCanonical()
515
+ G2 = nx.Graph()
516
+ G2.add_node("A", label="A")
517
+ G2.add_node("B", label="B")
518
+ G2.add_node("C", label="C")
519
+ G2.add_node("D", label="D")
520
+ G2.add_edge("A", "B", label="a-b")
521
+ G2.add_edge("B", "C", label="b-c")
522
+ G2.add_edge("B", "D", label="bad")
523
+ assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
524
+
525
+ # note: nx.simrank_similarity_numpy not included because returns np.array
526
+ simrank_algs = [
527
+ nx.simrank_similarity,
528
+ nx.algorithms.similarity._simrank_similarity_python,
529
+ ]
530
+
531
+ @pytest.mark.parametrize("simrank_similarity", simrank_algs)
532
+ def test_simrank_no_source_no_target(self, simrank_similarity):
533
+ G = nx.cycle_graph(5)
534
+ expected = {
535
+ 0: {
536
+ 0: 1,
537
+ 1: 0.3951219505902448,
538
+ 2: 0.5707317069281646,
539
+ 3: 0.5707317069281646,
540
+ 4: 0.3951219505902449,
541
+ },
542
+ 1: {
543
+ 0: 0.3951219505902448,
544
+ 1: 1,
545
+ 2: 0.3951219505902449,
546
+ 3: 0.5707317069281646,
547
+ 4: 0.5707317069281646,
548
+ },
549
+ 2: {
550
+ 0: 0.5707317069281646,
551
+ 1: 0.3951219505902449,
552
+ 2: 1,
553
+ 3: 0.3951219505902449,
554
+ 4: 0.5707317069281646,
555
+ },
556
+ 3: {
557
+ 0: 0.5707317069281646,
558
+ 1: 0.5707317069281646,
559
+ 2: 0.3951219505902449,
560
+ 3: 1,
561
+ 4: 0.3951219505902449,
562
+ },
563
+ 4: {
564
+ 0: 0.3951219505902449,
565
+ 1: 0.5707317069281646,
566
+ 2: 0.5707317069281646,
567
+ 3: 0.3951219505902449,
568
+ 4: 1,
569
+ },
570
+ }
571
+ actual = simrank_similarity(G)
572
+ for k, v in expected.items():
573
+ assert v == pytest.approx(actual[k], abs=1e-2)
574
+
575
+ # For a DiGraph test, use the first graph from the paper cited in
576
+ # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
577
+ G = nx.DiGraph()
578
+ G.add_node(0, label="Univ")
579
+ G.add_node(1, label="ProfA")
580
+ G.add_node(2, label="ProfB")
581
+ G.add_node(3, label="StudentA")
582
+ G.add_node(4, label="StudentB")
583
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
584
+
585
+ expected = {
586
+ 0: {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443},
587
+ 1: {0: 0.0, 1: 1, 2: 0.4135512472705618, 3: 0.0, 4: 0.10586911930126384},
588
+ 2: {
589
+ 0: 0.1323363991265798,
590
+ 1: 0.4135512472705618,
591
+ 2: 1,
592
+ 3: 0.04234764772050554,
593
+ 4: 0.08822426608438655,
594
+ },
595
+ 3: {0: 0.0, 1: 0.0, 2: 0.04234764772050554, 3: 1, 4: 0.3308409978164495},
596
+ 4: {
597
+ 0: 0.03387811817640443,
598
+ 1: 0.10586911930126384,
599
+ 2: 0.08822426608438655,
600
+ 3: 0.3308409978164495,
601
+ 4: 1,
602
+ },
603
+ }
604
+ # Use the importance_factor from the paper to get the same numbers.
605
+ actual = simrank_similarity(G, importance_factor=0.8)
606
+ for k, v in expected.items():
607
+ assert v == pytest.approx(actual[k], abs=1e-2)
608
+
609
+ @pytest.mark.parametrize("simrank_similarity", simrank_algs)
610
+ def test_simrank_source_no_target(self, simrank_similarity):
611
+ G = nx.cycle_graph(5)
612
+ expected = {
613
+ 0: 1,
614
+ 1: 0.3951219505902448,
615
+ 2: 0.5707317069281646,
616
+ 3: 0.5707317069281646,
617
+ 4: 0.3951219505902449,
618
+ }
619
+ actual = simrank_similarity(G, source=0)
620
+ assert expected == pytest.approx(actual, abs=1e-2)
621
+
622
+ # For a DiGraph test, use the first graph from the paper cited in
623
+ # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
624
+ G = nx.DiGraph()
625
+ G.add_node(0, label="Univ")
626
+ G.add_node(1, label="ProfA")
627
+ G.add_node(2, label="ProfB")
628
+ G.add_node(3, label="StudentA")
629
+ G.add_node(4, label="StudentB")
630
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
631
+
632
+ expected = {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443}
633
+ # Use the importance_factor from the paper to get the same numbers.
634
+ actual = simrank_similarity(G, importance_factor=0.8, source=0)
635
+ assert expected == pytest.approx(actual, abs=1e-2)
636
+
637
+ @pytest.mark.parametrize("simrank_similarity", simrank_algs)
638
+ def test_simrank_noninteger_nodes(self, simrank_similarity):
639
+ G = nx.cycle_graph(5)
640
+ G = nx.relabel_nodes(G, dict(enumerate("abcde")))
641
+ expected = {
642
+ "a": 1,
643
+ "b": 0.3951219505902448,
644
+ "c": 0.5707317069281646,
645
+ "d": 0.5707317069281646,
646
+ "e": 0.3951219505902449,
647
+ }
648
+ actual = simrank_similarity(G, source="a")
649
+ assert expected == pytest.approx(actual, abs=1e-2)
650
+
651
+ # For a DiGraph test, use the first graph from the paper cited in
652
+ # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
653
+ G = nx.DiGraph()
654
+ G.add_node(0, label="Univ")
655
+ G.add_node(1, label="ProfA")
656
+ G.add_node(2, label="ProfB")
657
+ G.add_node(3, label="StudentA")
658
+ G.add_node(4, label="StudentB")
659
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
660
+ node_labels = dict(enumerate(nx.get_node_attributes(G, "label").values()))
661
+ G = nx.relabel_nodes(G, node_labels)
662
+
663
+ expected = {
664
+ "Univ": 1,
665
+ "ProfA": 0.0,
666
+ "ProfB": 0.1323363991265798,
667
+ "StudentA": 0.0,
668
+ "StudentB": 0.03387811817640443,
669
+ }
670
+ # Use the importance_factor from the paper to get the same numbers.
671
+ actual = simrank_similarity(G, importance_factor=0.8, source="Univ")
672
+ assert expected == pytest.approx(actual, abs=1e-2)
673
+
674
+ @pytest.mark.parametrize("simrank_similarity", simrank_algs)
675
+ def test_simrank_source_and_target(self, simrank_similarity):
676
+ G = nx.cycle_graph(5)
677
+ expected = 1
678
+ actual = simrank_similarity(G, source=0, target=0)
679
+ assert expected == pytest.approx(actual, abs=1e-2)
680
+
681
+ # For a DiGraph test, use the first graph from the paper cited in
682
+ # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
683
+ G = nx.DiGraph()
684
+ G.add_node(0, label="Univ")
685
+ G.add_node(1, label="ProfA")
686
+ G.add_node(2, label="ProfB")
687
+ G.add_node(3, label="StudentA")
688
+ G.add_node(4, label="StudentB")
689
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
690
+
691
+ expected = 0.1323363991265798
692
+ # Use the importance_factor from the paper to get the same numbers.
693
+ # Use the pair (0,2) because (0,0) and (0,1) have trivial results.
694
+ actual = simrank_similarity(G, importance_factor=0.8, source=0, target=2)
695
+ assert expected == pytest.approx(actual, abs=1e-5)
696
+
697
+ @pytest.mark.parametrize("alg", simrank_algs)
698
+ def test_simrank_max_iterations(self, alg):
699
+ G = nx.cycle_graph(5)
700
+ pytest.raises(nx.ExceededMaxIterations, alg, G, max_iterations=10)
701
+
702
+ def test_simrank_between_versions(self):
703
+ G = nx.cycle_graph(5)
704
+ # _python tolerance 1e-4
705
+ expected_python_tol4 = {
706
+ 0: 1,
707
+ 1: 0.394512499239852,
708
+ 2: 0.5703550452791322,
709
+ 3: 0.5703550452791323,
710
+ 4: 0.394512499239852,
711
+ }
712
+ # _numpy tolerance 1e-4
713
+ expected_numpy_tol4 = {
714
+ 0: 1.0,
715
+ 1: 0.3947180735764555,
716
+ 2: 0.570482097206368,
717
+ 3: 0.570482097206368,
718
+ 4: 0.3947180735764555,
719
+ }
720
+ actual = nx.simrank_similarity(G, source=0)
721
+ assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-7)
722
+ # versions differ at 1e-4 level but equal at 1e-3
723
+ assert expected_python_tol4 != pytest.approx(actual, abs=1e-4)
724
+ assert expected_python_tol4 == pytest.approx(actual, abs=1e-3)
725
+
726
+ actual = nx.similarity._simrank_similarity_python(G, source=0)
727
+ assert expected_python_tol4 == pytest.approx(actual, abs=1e-7)
728
+ # versions differ at 1e-4 level but equal at 1e-3
729
+ assert expected_numpy_tol4 != pytest.approx(actual, abs=1e-4)
730
+ assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-3)
731
+
732
+ def test_simrank_numpy_no_source_no_target(self):
733
+ G = nx.cycle_graph(5)
734
+ expected = np.array(
735
+ [
736
+ [
737
+ 1.0,
738
+ 0.3947180735764555,
739
+ 0.570482097206368,
740
+ 0.570482097206368,
741
+ 0.3947180735764555,
742
+ ],
743
+ [
744
+ 0.3947180735764555,
745
+ 1.0,
746
+ 0.3947180735764555,
747
+ 0.570482097206368,
748
+ 0.570482097206368,
749
+ ],
750
+ [
751
+ 0.570482097206368,
752
+ 0.3947180735764555,
753
+ 1.0,
754
+ 0.3947180735764555,
755
+ 0.570482097206368,
756
+ ],
757
+ [
758
+ 0.570482097206368,
759
+ 0.570482097206368,
760
+ 0.3947180735764555,
761
+ 1.0,
762
+ 0.3947180735764555,
763
+ ],
764
+ [
765
+ 0.3947180735764555,
766
+ 0.570482097206368,
767
+ 0.570482097206368,
768
+ 0.3947180735764555,
769
+ 1.0,
770
+ ],
771
+ ]
772
+ )
773
+ actual = nx.similarity._simrank_similarity_numpy(G)
774
+ np.testing.assert_allclose(expected, actual, atol=1e-7)
775
+
776
+ def test_simrank_numpy_source_no_target(self):
777
+ G = nx.cycle_graph(5)
778
+ expected = np.array(
779
+ [
780
+ 1.0,
781
+ 0.3947180735764555,
782
+ 0.570482097206368,
783
+ 0.570482097206368,
784
+ 0.3947180735764555,
785
+ ]
786
+ )
787
+ actual = nx.similarity._simrank_similarity_numpy(G, source=0)
788
+ np.testing.assert_allclose(expected, actual, atol=1e-7)
789
+
790
+ def test_simrank_numpy_source_and_target(self):
791
+ G = nx.cycle_graph(5)
792
+ expected = 1.0
793
+ actual = nx.similarity._simrank_similarity_numpy(G, source=0, target=0)
794
+ np.testing.assert_allclose(expected, actual, atol=1e-7)
795
+
796
+ def test_panther_similarity_unweighted(self):
797
+ np.random.seed(42)
798
+
799
+ G = nx.Graph()
800
+ G.add_edge(0, 1)
801
+ G.add_edge(0, 2)
802
+ G.add_edge(0, 3)
803
+ G.add_edge(1, 2)
804
+ G.add_edge(2, 4)
805
+ expected = {3: 0.5, 2: 0.5, 1: 0.5, 4: 0.125}
806
+ sim = nx.panther_similarity(G, 0, path_length=2)
807
+ assert sim == expected
808
+
809
+ def test_panther_similarity_weighted(self):
810
+ np.random.seed(42)
811
+
812
+ G = nx.Graph()
813
+ G.add_edge("v1", "v2", w=5)
814
+ G.add_edge("v1", "v3", w=1)
815
+ G.add_edge("v1", "v4", w=2)
816
+ G.add_edge("v2", "v3", w=0.1)
817
+ G.add_edge("v3", "v5", w=1)
818
+ expected = {"v3": 0.75, "v4": 0.5, "v2": 0.5, "v5": 0.25}
819
+ sim = nx.panther_similarity(G, "v1", path_length=2, weight="w")
820
+ assert sim == expected
821
+
822
+ def test_generate_random_paths_unweighted(self):
823
+ np.random.seed(42)
824
+
825
+ index_map = {}
826
+ num_paths = 10
827
+ path_length = 2
828
+ G = nx.Graph()
829
+ G.add_edge(0, 1)
830
+ G.add_edge(0, 2)
831
+ G.add_edge(0, 3)
832
+ G.add_edge(1, 2)
833
+ G.add_edge(2, 4)
834
+ paths = nx.generate_random_paths(
835
+ G, num_paths, path_length=path_length, index_map=index_map
836
+ )
837
+ expected_paths = [
838
+ [3, 0, 3],
839
+ [4, 2, 1],
840
+ [2, 1, 0],
841
+ [2, 0, 3],
842
+ [3, 0, 1],
843
+ [3, 0, 1],
844
+ [4, 2, 0],
845
+ [2, 1, 0],
846
+ [3, 0, 2],
847
+ [2, 1, 2],
848
+ ]
849
+ expected_map = {
850
+ 0: {0, 2, 3, 4, 5, 6, 7, 8},
851
+ 1: {1, 2, 4, 5, 7, 9},
852
+ 2: {1, 2, 3, 6, 7, 8, 9},
853
+ 3: {0, 3, 4, 5, 8},
854
+ 4: {1, 6},
855
+ }
856
+
857
+ assert expected_paths == list(paths)
858
+ assert expected_map == index_map
859
+
860
+ def test_generate_random_paths_weighted(self):
861
+ np.random.seed(42)
862
+
863
+ index_map = {}
864
+ num_paths = 10
865
+ path_length = 6
866
+ G = nx.Graph()
867
+ G.add_edge("a", "b", weight=0.6)
868
+ G.add_edge("a", "c", weight=0.2)
869
+ G.add_edge("c", "d", weight=0.1)
870
+ G.add_edge("c", "e", weight=0.7)
871
+ G.add_edge("c", "f", weight=0.9)
872
+ G.add_edge("a", "d", weight=0.3)
873
+ paths = nx.generate_random_paths(
874
+ G, num_paths, path_length=path_length, index_map=index_map
875
+ )
876
+
877
+ expected_paths = [
878
+ ["d", "c", "f", "c", "d", "a", "b"],
879
+ ["e", "c", "f", "c", "f", "c", "e"],
880
+ ["d", "a", "b", "a", "b", "a", "c"],
881
+ ["b", "a", "d", "a", "b", "a", "b"],
882
+ ["d", "a", "b", "a", "b", "a", "d"],
883
+ ["d", "a", "b", "a", "b", "a", "c"],
884
+ ["d", "a", "b", "a", "b", "a", "b"],
885
+ ["f", "c", "f", "c", "f", "c", "e"],
886
+ ["d", "a", "d", "a", "b", "a", "b"],
887
+ ["e", "c", "f", "c", "e", "c", "d"],
888
+ ]
889
+ expected_map = {
890
+ "d": {0, 2, 3, 4, 5, 6, 8, 9},
891
+ "c": {0, 1, 2, 5, 7, 9},
892
+ "f": {0, 1, 9, 7},
893
+ "a": {0, 2, 3, 4, 5, 6, 8},
894
+ "b": {0, 2, 3, 4, 5, 6, 8},
895
+ "e": {1, 9, 7},
896
+ }
897
+
898
+ assert expected_paths == list(paths)
899
+ assert expected_map == index_map
900
+
901
+ def test_symmetry_with_custom_matching(self):
902
+ print("G2 is edge (a,b) and G3 is edge (a,a)")
903
+ print("but node order for G2 is (a,b) while for G3 it is (b,a)")
904
+
905
+ a, b = "A", "B"
906
+ G2 = nx.Graph()
907
+ G2.add_nodes_from((a, b))
908
+ G2.add_edges_from([(a, b)])
909
+ G3 = nx.Graph()
910
+ G3.add_nodes_from((b, a))
911
+ G3.add_edges_from([(a, a)])
912
+ for G in (G2, G3):
913
+ for n in G:
914
+ G.nodes[n]["attr"] = n
915
+ for e in G.edges:
916
+ G.edges[e]["attr"] = e
917
+ match = lambda x, y: x == y
918
+
919
+ print("Starting G2 to G3 GED calculation")
920
+ assert nx.graph_edit_distance(G2, G3, node_match=match, edge_match=match) == 1
921
+
922
+ print("Starting G3 to G2 GED calculation")
923
+ assert nx.graph_edit_distance(G3, G2, node_match=match, edge_match=match) == 1
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_tournament.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the :mod:`networkx.algorithms.tournament` module."""
2
+ from itertools import combinations
3
+
4
+ import pytest
5
+
6
+ from networkx import DiGraph
7
+ from networkx.algorithms.tournament import (
8
+ hamiltonian_path,
9
+ index_satisfying,
10
+ is_reachable,
11
+ is_strongly_connected,
12
+ is_tournament,
13
+ random_tournament,
14
+ score_sequence,
15
+ tournament_matrix,
16
+ )
17
+
18
+
19
+ def test_condition_not_satisfied():
20
+ condition = lambda x: x > 0
21
+ iter_in = [0]
22
+ assert index_satisfying(iter_in, condition) == 1
23
+
24
+
25
+ def test_empty_iterable():
26
+ condition = lambda x: x > 0
27
+ with pytest.raises(ValueError):
28
+ index_satisfying([], condition)
29
+
30
+
31
+ def test_is_tournament():
32
+ G = DiGraph()
33
+ G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
34
+ assert is_tournament(G)
35
+
36
+
37
+ def test_self_loops():
38
+ """A tournament must have no self-loops."""
39
+ G = DiGraph()
40
+ G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
41
+ G.add_edge(0, 0)
42
+ assert not is_tournament(G)
43
+
44
+
45
+ def test_missing_edges():
46
+ """A tournament must not have any pair of nodes without at least
47
+ one edge joining the pair.
48
+
49
+ """
50
+ G = DiGraph()
51
+ G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3)])
52
+ assert not is_tournament(G)
53
+
54
+
55
+ def test_bidirectional_edges():
56
+ """A tournament must not have any pair of nodes with greater
57
+ than one edge joining the pair.
58
+
59
+ """
60
+ G = DiGraph()
61
+ G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
62
+ G.add_edge(1, 0)
63
+ assert not is_tournament(G)
64
+
65
+
66
+ def test_graph_is_tournament():
67
+ for _ in range(10):
68
+ G = random_tournament(5)
69
+ assert is_tournament(G)
70
+
71
+
72
+ def test_graph_is_tournament_seed():
73
+ for _ in range(10):
74
+ G = random_tournament(5, seed=1)
75
+ assert is_tournament(G)
76
+
77
+
78
+ def test_graph_is_tournament_one_node():
79
+ G = random_tournament(1)
80
+ assert is_tournament(G)
81
+
82
+
83
+ def test_graph_is_tournament_zero_node():
84
+ G = random_tournament(0)
85
+ assert is_tournament(G)
86
+
87
+
88
+ def test_hamiltonian_empty_graph():
89
+ path = hamiltonian_path(DiGraph())
90
+ assert len(path) == 0
91
+
92
+
93
+ def test_path_is_hamiltonian():
94
+ G = DiGraph()
95
+ G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
96
+ path = hamiltonian_path(G)
97
+ assert len(path) == 4
98
+ assert all(v in G[u] for u, v in zip(path, path[1:]))
99
+
100
+
101
+ def test_hamiltonian_cycle():
102
+ """Tests that :func:`networkx.tournament.hamiltonian_path`
103
+ returns a Hamiltonian cycle when provided a strongly connected
104
+ tournament.
105
+
106
+ """
107
+ G = DiGraph()
108
+ G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
109
+ path = hamiltonian_path(G)
110
+ assert len(path) == 4
111
+ assert all(v in G[u] for u, v in zip(path, path[1:]))
112
+ assert path[0] in G[path[-1]]
113
+
114
+
115
+ def test_score_sequence_edge():
116
+ G = DiGraph([(0, 1)])
117
+ assert score_sequence(G) == [0, 1]
118
+
119
+
120
+ def test_score_sequence_triangle():
121
+ G = DiGraph([(0, 1), (1, 2), (2, 0)])
122
+ assert score_sequence(G) == [1, 1, 1]
123
+
124
+
125
+ def test_tournament_matrix():
126
+ np = pytest.importorskip("numpy")
127
+ pytest.importorskip("scipy")
128
+ npt = np.testing
129
+ G = DiGraph([(0, 1)])
130
+ m = tournament_matrix(G)
131
+ npt.assert_array_equal(m.todense(), np.array([[0, 1], [-1, 0]]))
132
+
133
+
134
+ def test_reachable_pair():
135
+ """Tests for a reachable pair of nodes."""
136
+ G = DiGraph([(0, 1), (1, 2), (2, 0)])
137
+ assert is_reachable(G, 0, 2)
138
+
139
+
140
+ def test_same_node_is_reachable():
141
+ """Tests that a node is always reachable from it."""
142
+ # G is an arbitrary tournament on ten nodes.
143
+ G = DiGraph(sorted(p) for p in combinations(range(10), 2))
144
+ assert all(is_reachable(G, v, v) for v in G)
145
+
146
+
147
+ def test_unreachable_pair():
148
+ """Tests for an unreachable pair of nodes."""
149
+ G = DiGraph([(0, 1), (0, 2), (1, 2)])
150
+ assert not is_reachable(G, 1, 0)
151
+
152
+
153
+ def test_is_strongly_connected():
154
+ """Tests for a strongly connected tournament."""
155
+ G = DiGraph([(0, 1), (1, 2), (2, 0)])
156
+ assert is_strongly_connected(G)
157
+
158
+
159
+ def test_not_strongly_connected():
160
+ """Tests for a tournament that is not strongly connected."""
161
+ G = DiGraph([(0, 1), (0, 2), (1, 2)])
162
+ assert not is_strongly_connected(G)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_walks.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the :mod:`networkx.algorithms.walks` module."""
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+
7
+ pytest.importorskip("numpy")
8
+ pytest.importorskip("scipy")
9
+
10
+
11
+ def test_directed():
12
+ G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
13
+ num_walks = nx.number_of_walks(G, 3)
14
+ expected = {0: {0: 1, 1: 0, 2: 0}, 1: {0: 0, 1: 1, 2: 0}, 2: {0: 0, 1: 0, 2: 1}}
15
+ assert num_walks == expected
16
+
17
+
18
+ def test_undirected():
19
+ G = nx.cycle_graph(3)
20
+ num_walks = nx.number_of_walks(G, 3)
21
+ expected = {0: {0: 2, 1: 3, 2: 3}, 1: {0: 3, 1: 2, 2: 3}, 2: {0: 3, 1: 3, 2: 2}}
22
+ assert num_walks == expected
23
+
24
+
25
+ def test_non_integer_nodes():
26
+ G = nx.DiGraph([("A", "B"), ("B", "C"), ("C", "A")])
27
+ num_walks = nx.number_of_walks(G, 2)
28
+ expected = {
29
+ "A": {"A": 0, "B": 0, "C": 1},
30
+ "B": {"A": 1, "B": 0, "C": 0},
31
+ "C": {"A": 0, "B": 1, "C": 0},
32
+ }
33
+ assert num_walks == expected
34
+
35
+
36
+ def test_zero_length():
37
+ G = nx.cycle_graph(3)
38
+ num_walks = nx.number_of_walks(G, 0)
39
+ expected = {0: {0: 1, 1: 0, 2: 0}, 1: {0: 0, 1: 1, 2: 0}, 2: {0: 0, 1: 0, 2: 1}}
40
+ assert num_walks == expected
41
+
42
+
43
+ def test_negative_length_exception():
44
+ G = nx.cycle_graph(3)
45
+ with pytest.raises(ValueError):
46
+ nx.number_of_walks(G, -1)
47
+
48
+
49
+ def test_hidden_weight_attr():
50
+ G = nx.cycle_graph(3)
51
+ G.add_edge(1, 2, weight=5)
52
+ num_walks = nx.number_of_walks(G, 3)
53
+ expected = {0: {0: 2, 1: 3, 2: 3}, 1: {0: 3, 1: 2, 2: 3}, 2: {0: 3, 1: 3, 2: 2}}
54
+ assert num_walks == expected
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # graph drawing and interface to graphviz
2
+
3
+ from .layout import *
4
+ from .nx_latex import *
5
+ from .nx_pylab import *
6
+ from . import nx_agraph
7
+ from . import nx_pydot
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/layout.cpython-311.pyc ADDED
Binary file (49.9 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-311.pyc ADDED
Binary file (19.3 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/nx_pylab.py ADDED
@@ -0,0 +1,1594 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ **********
3
+ Matplotlib
4
+ **********
5
+
6
+ Draw networks with matplotlib.
7
+
8
+ Examples
9
+ --------
10
+ >>> G = nx.complete_graph(5)
11
+ >>> nx.draw(G)
12
+
13
+ See Also
14
+ --------
15
+ - :doc:`matplotlib <matplotlib:index>`
16
+ - :func:`matplotlib.pyplot.scatter`
17
+ - :obj:`matplotlib.patches.FancyArrowPatch`
18
+ """
19
+ from numbers import Number
20
+
21
+ import networkx as nx
22
+ from networkx.drawing.layout import (
23
+ circular_layout,
24
+ kamada_kawai_layout,
25
+ planar_layout,
26
+ random_layout,
27
+ shell_layout,
28
+ spectral_layout,
29
+ spring_layout,
30
+ )
31
+
32
+ __all__ = [
33
+ "draw",
34
+ "draw_networkx",
35
+ "draw_networkx_nodes",
36
+ "draw_networkx_edges",
37
+ "draw_networkx_labels",
38
+ "draw_networkx_edge_labels",
39
+ "draw_circular",
40
+ "draw_kamada_kawai",
41
+ "draw_random",
42
+ "draw_spectral",
43
+ "draw_spring",
44
+ "draw_planar",
45
+ "draw_shell",
46
+ ]
47
+
48
+
49
+ def draw(G, pos=None, ax=None, **kwds):
50
+ """Draw the graph G with Matplotlib.
51
+
52
+ Draw the graph as a simple representation with no node
53
+ labels or edge labels and using the full Matplotlib figure area
54
+ and no axis labels by default. See draw_networkx() for more
55
+ full-featured drawing that allows title, axis labels etc.
56
+
57
+ Parameters
58
+ ----------
59
+ G : graph
60
+ A networkx graph
61
+
62
+ pos : dictionary, optional
63
+ A dictionary with nodes as keys and positions as values.
64
+ If not specified a spring layout positioning will be computed.
65
+ See :py:mod:`networkx.drawing.layout` for functions that
66
+ compute node positions.
67
+
68
+ ax : Matplotlib Axes object, optional
69
+ Draw the graph in specified Matplotlib axes.
70
+
71
+ kwds : optional keywords
72
+ See networkx.draw_networkx() for a description of optional keywords.
73
+
74
+ Examples
75
+ --------
76
+ >>> G = nx.dodecahedral_graph()
77
+ >>> nx.draw(G)
78
+ >>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout
79
+
80
+ See Also
81
+ --------
82
+ draw_networkx
83
+ draw_networkx_nodes
84
+ draw_networkx_edges
85
+ draw_networkx_labels
86
+ draw_networkx_edge_labels
87
+
88
+ Notes
89
+ -----
90
+ This function has the same name as pylab.draw and pyplot.draw
91
+ so beware when using `from networkx import *`
92
+
93
+ since you might overwrite the pylab.draw function.
94
+
95
+ With pyplot use
96
+
97
+ >>> import matplotlib.pyplot as plt
98
+ >>> G = nx.dodecahedral_graph()
99
+ >>> nx.draw(G) # networkx draw()
100
+ >>> plt.draw() # pyplot draw()
101
+
102
+ Also see the NetworkX drawing examples at
103
+ https://networkx.org/documentation/latest/auto_examples/index.html
104
+ """
105
+ import matplotlib.pyplot as plt
106
+
107
+ if ax is None:
108
+ cf = plt.gcf()
109
+ else:
110
+ cf = ax.get_figure()
111
+ cf.set_facecolor("w")
112
+ if ax is None:
113
+ if cf.axes:
114
+ ax = cf.gca()
115
+ else:
116
+ ax = cf.add_axes((0, 0, 1, 1))
117
+
118
+ if "with_labels" not in kwds:
119
+ kwds["with_labels"] = "labels" in kwds
120
+
121
+ draw_networkx(G, pos=pos, ax=ax, **kwds)
122
+ ax.set_axis_off()
123
+ plt.draw_if_interactive()
124
+ return
125
+
126
+
127
+ def draw_networkx(G, pos=None, arrows=None, with_labels=True, **kwds):
128
+ r"""Draw the graph G using Matplotlib.
129
+
130
+ Draw the graph with Matplotlib with options for node positions,
131
+ labeling, titles, and many other drawing features.
132
+ See draw() for simple drawing without labels or axes.
133
+
134
+ Parameters
135
+ ----------
136
+ G : graph
137
+ A networkx graph
138
+
139
+ pos : dictionary, optional
140
+ A dictionary with nodes as keys and positions as values.
141
+ If not specified a spring layout positioning will be computed.
142
+ See :py:mod:`networkx.drawing.layout` for functions that
143
+ compute node positions.
144
+
145
+ arrows : bool or None, optional (default=None)
146
+ If `None`, directed graphs draw arrowheads with
147
+ `~matplotlib.patches.FancyArrowPatch`, while undirected graphs draw edges
148
+ via `~matplotlib.collections.LineCollection` for speed.
149
+ If `True`, draw arrowheads with FancyArrowPatches (bendable and stylish).
150
+ If `False`, draw edges using LineCollection (linear and fast).
151
+ For directed graphs, if True draw arrowheads.
152
+ Note: Arrows will be the same color as edges.
153
+
154
+ arrowstyle : str (default='-\|>' for directed graphs)
155
+ For directed graphs, choose the style of the arrowsheads.
156
+ For undirected graphs default to '-'
157
+
158
+ See `matplotlib.patches.ArrowStyle` for more options.
159
+
160
+ arrowsize : int or list (default=10)
161
+ For directed graphs, choose the size of the arrow head's length and
162
+ width. A list of values can be passed in to assign a different size for arrow head's length and width.
163
+ See `matplotlib.patches.FancyArrowPatch` for attribute `mutation_scale`
164
+ for more info.
165
+
166
+ with_labels : bool (default=True)
167
+ Set to True to draw labels on the nodes.
168
+
169
+ ax : Matplotlib Axes object, optional
170
+ Draw the graph in the specified Matplotlib axes.
171
+
172
+ nodelist : list (default=list(G))
173
+ Draw only specified nodes
174
+
175
+ edgelist : list (default=list(G.edges()))
176
+ Draw only specified edges
177
+
178
+ node_size : scalar or array (default=300)
179
+ Size of nodes. If an array is specified it must be the
180
+ same length as nodelist.
181
+
182
+ node_color : color or array of colors (default='#1f78b4')
183
+ Node color. Can be a single color or a sequence of colors with the same
184
+ length as nodelist. Color can be string or rgb (or rgba) tuple of
185
+ floats from 0-1. If numeric values are specified they will be
186
+ mapped to colors using the cmap and vmin,vmax parameters. See
187
+ matplotlib.scatter for more details.
188
+
189
+ node_shape : string (default='o')
190
+ The shape of the node. Specification is as matplotlib.scatter
191
+ marker, one of 'so^>v<dph8'.
192
+
193
+ alpha : float or None (default=None)
194
+ The node and edge transparency
195
+
196
+ cmap : Matplotlib colormap, optional
197
+ Colormap for mapping intensities of nodes
198
+
199
+ vmin,vmax : float, optional
200
+ Minimum and maximum for node colormap scaling
201
+
202
+ linewidths : scalar or sequence (default=1.0)
203
+ Line width of symbol border
204
+
205
+ width : float or array of floats (default=1.0)
206
+ Line width of edges
207
+
208
+ edge_color : color or array of colors (default='k')
209
+ Edge color. Can be a single color or a sequence of colors with the same
210
+ length as edgelist. Color can be string or rgb (or rgba) tuple of
211
+ floats from 0-1. If numeric values are specified they will be
212
+ mapped to colors using the edge_cmap and edge_vmin,edge_vmax parameters.
213
+
214
+ edge_cmap : Matplotlib colormap, optional
215
+ Colormap for mapping intensities of edges
216
+
217
+ edge_vmin,edge_vmax : floats, optional
218
+ Minimum and maximum for edge colormap scaling
219
+
220
+ style : string (default=solid line)
221
+ Edge line style e.g.: '-', '--', '-.', ':'
222
+ or words like 'solid' or 'dashed'.
223
+ (See `matplotlib.patches.FancyArrowPatch`: `linestyle`)
224
+
225
+ labels : dictionary (default=None)
226
+ Node labels in a dictionary of text labels keyed by node
227
+
228
+ font_size : int (default=12 for nodes, 10 for edges)
229
+ Font size for text labels
230
+
231
+ font_color : color (default='k' black)
232
+ Font color string. Color can be string or rgb (or rgba) tuple of
233
+ floats from 0-1.
234
+
235
+ font_weight : string (default='normal')
236
+ Font weight
237
+
238
+ font_family : string (default='sans-serif')
239
+ Font family
240
+
241
+ label : string, optional
242
+ Label for graph legend
243
+
244
+ kwds : optional keywords
245
+ See networkx.draw_networkx_nodes(), networkx.draw_networkx_edges(), and
246
+ networkx.draw_networkx_labels() for a description of optional keywords.
247
+
248
+ Notes
249
+ -----
250
+ For directed graphs, arrows are drawn at the head end. Arrows can be
251
+ turned off with keyword arrows=False.
252
+
253
+ Examples
254
+ --------
255
+ >>> G = nx.dodecahedral_graph()
256
+ >>> nx.draw(G)
257
+ >>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout
258
+
259
+ >>> import matplotlib.pyplot as plt
260
+ >>> limits = plt.axis("off") # turn off axis
261
+
262
+ Also see the NetworkX drawing examples at
263
+ https://networkx.org/documentation/latest/auto_examples/index.html
264
+
265
+ See Also
266
+ --------
267
+ draw
268
+ draw_networkx_nodes
269
+ draw_networkx_edges
270
+ draw_networkx_labels
271
+ draw_networkx_edge_labels
272
+ """
273
+ from inspect import signature
274
+
275
+ import matplotlib.pyplot as plt
276
+
277
+ # Get all valid keywords by inspecting the signatures of draw_networkx_nodes,
278
+ # draw_networkx_edges, draw_networkx_labels
279
+
280
+ valid_node_kwds = signature(draw_networkx_nodes).parameters.keys()
281
+ valid_edge_kwds = signature(draw_networkx_edges).parameters.keys()
282
+ valid_label_kwds = signature(draw_networkx_labels).parameters.keys()
283
+
284
+ # Create a set with all valid keywords across the three functions and
285
+ # remove the arguments of this function (draw_networkx)
286
+ valid_kwds = (valid_node_kwds | valid_edge_kwds | valid_label_kwds) - {
287
+ "G",
288
+ "pos",
289
+ "arrows",
290
+ "with_labels",
291
+ }
292
+
293
+ if any(k not in valid_kwds for k in kwds):
294
+ invalid_args = ", ".join([k for k in kwds if k not in valid_kwds])
295
+ raise ValueError(f"Received invalid argument(s): {invalid_args}")
296
+
297
+ node_kwds = {k: v for k, v in kwds.items() if k in valid_node_kwds}
298
+ edge_kwds = {k: v for k, v in kwds.items() if k in valid_edge_kwds}
299
+ label_kwds = {k: v for k, v in kwds.items() if k in valid_label_kwds}
300
+
301
+ if pos is None:
302
+ pos = nx.drawing.spring_layout(G) # default to spring layout
303
+
304
+ draw_networkx_nodes(G, pos, **node_kwds)
305
+ draw_networkx_edges(G, pos, arrows=arrows, **edge_kwds)
306
+ if with_labels:
307
+ draw_networkx_labels(G, pos, **label_kwds)
308
+ plt.draw_if_interactive()
309
+
310
+
311
+ def draw_networkx_nodes(
312
+ G,
313
+ pos,
314
+ nodelist=None,
315
+ node_size=300,
316
+ node_color="#1f78b4",
317
+ node_shape="o",
318
+ alpha=None,
319
+ cmap=None,
320
+ vmin=None,
321
+ vmax=None,
322
+ ax=None,
323
+ linewidths=None,
324
+ edgecolors=None,
325
+ label=None,
326
+ margins=None,
327
+ ):
328
+ """Draw the nodes of the graph G.
329
+
330
+ This draws only the nodes of the graph G.
331
+
332
+ Parameters
333
+ ----------
334
+ G : graph
335
+ A networkx graph
336
+
337
+ pos : dictionary
338
+ A dictionary with nodes as keys and positions as values.
339
+ Positions should be sequences of length 2.
340
+
341
+ ax : Matplotlib Axes object, optional
342
+ Draw the graph in the specified Matplotlib axes.
343
+
344
+ nodelist : list (default list(G))
345
+ Draw only specified nodes
346
+
347
+ node_size : scalar or array (default=300)
348
+ Size of nodes. If an array it must be the same length as nodelist.
349
+
350
+ node_color : color or array of colors (default='#1f78b4')
351
+ Node color. Can be a single color or a sequence of colors with the same
352
+ length as nodelist. Color can be string or rgb (or rgba) tuple of
353
+ floats from 0-1. If numeric values are specified they will be
354
+ mapped to colors using the cmap and vmin,vmax parameters. See
355
+ matplotlib.scatter for more details.
356
+
357
+ node_shape : string (default='o')
358
+ The shape of the node. Specification is as matplotlib.scatter
359
+ marker, one of 'so^>v<dph8'.
360
+
361
+ alpha : float or array of floats (default=None)
362
+ The node transparency. This can be a single alpha value,
363
+ in which case it will be applied to all the nodes of color. Otherwise,
364
+ if it is an array, the elements of alpha will be applied to the colors
365
+ in order (cycling through alpha multiple times if necessary).
366
+
367
+ cmap : Matplotlib colormap (default=None)
368
+ Colormap for mapping intensities of nodes
369
+
370
+ vmin,vmax : floats or None (default=None)
371
+ Minimum and maximum for node colormap scaling
372
+
373
+ linewidths : [None | scalar | sequence] (default=1.0)
374
+ Line width of symbol border
375
+
376
+ edgecolors : [None | scalar | sequence] (default = node_color)
377
+ Colors of node borders. Can be a single color or a sequence of colors with the
378
+ same length as nodelist. Color can be string or rgb (or rgba) tuple of floats
379
+ from 0-1. If numeric values are specified they will be mapped to colors
380
+ using the cmap and vmin,vmax parameters. See `~matplotlib.pyplot.scatter` for more details.
381
+
382
+ label : [None | string]
383
+ Label for legend
384
+
385
+ margins : float or 2-tuple, optional
386
+ Sets the padding for axis autoscaling. Increase margin to prevent
387
+ clipping for nodes that are near the edges of an image. Values should
388
+ be in the range ``[0, 1]``. See :meth:`matplotlib.axes.Axes.margins`
389
+ for details. The default is `None`, which uses the Matplotlib default.
390
+
391
+ Returns
392
+ -------
393
+ matplotlib.collections.PathCollection
394
+ `PathCollection` of the nodes.
395
+
396
+ Examples
397
+ --------
398
+ >>> G = nx.dodecahedral_graph()
399
+ >>> nodes = nx.draw_networkx_nodes(G, pos=nx.spring_layout(G))
400
+
401
+ Also see the NetworkX drawing examples at
402
+ https://networkx.org/documentation/latest/auto_examples/index.html
403
+
404
+ See Also
405
+ --------
406
+ draw
407
+ draw_networkx
408
+ draw_networkx_edges
409
+ draw_networkx_labels
410
+ draw_networkx_edge_labels
411
+ """
412
+ from collections.abc import Iterable
413
+
414
+ import matplotlib as mpl
415
+ import matplotlib.collections # call as mpl.collections
416
+ import matplotlib.pyplot as plt
417
+ import numpy as np
418
+
419
+ if ax is None:
420
+ ax = plt.gca()
421
+
422
+ if nodelist is None:
423
+ nodelist = list(G)
424
+
425
+ if len(nodelist) == 0: # empty nodelist, no drawing
426
+ return mpl.collections.PathCollection(None)
427
+
428
+ try:
429
+ xy = np.asarray([pos[v] for v in nodelist])
430
+ except KeyError as err:
431
+ raise nx.NetworkXError(f"Node {err} has no position.") from err
432
+
433
+ if isinstance(alpha, Iterable):
434
+ node_color = apply_alpha(node_color, alpha, nodelist, cmap, vmin, vmax)
435
+ alpha = None
436
+
437
+ node_collection = ax.scatter(
438
+ xy[:, 0],
439
+ xy[:, 1],
440
+ s=node_size,
441
+ c=node_color,
442
+ marker=node_shape,
443
+ cmap=cmap,
444
+ vmin=vmin,
445
+ vmax=vmax,
446
+ alpha=alpha,
447
+ linewidths=linewidths,
448
+ edgecolors=edgecolors,
449
+ label=label,
450
+ )
451
+ ax.tick_params(
452
+ axis="both",
453
+ which="both",
454
+ bottom=False,
455
+ left=False,
456
+ labelbottom=False,
457
+ labelleft=False,
458
+ )
459
+
460
+ if margins is not None:
461
+ if isinstance(margins, Iterable):
462
+ ax.margins(*margins)
463
+ else:
464
+ ax.margins(margins)
465
+
466
+ node_collection.set_zorder(2)
467
+ return node_collection
468
+
469
+
470
+ def draw_networkx_edges(
471
+ G,
472
+ pos,
473
+ edgelist=None,
474
+ width=1.0,
475
+ edge_color="k",
476
+ style="solid",
477
+ alpha=None,
478
+ arrowstyle=None,
479
+ arrowsize=10,
480
+ edge_cmap=None,
481
+ edge_vmin=None,
482
+ edge_vmax=None,
483
+ ax=None,
484
+ arrows=None,
485
+ label=None,
486
+ node_size=300,
487
+ nodelist=None,
488
+ node_shape="o",
489
+ connectionstyle="arc3",
490
+ min_source_margin=0,
491
+ min_target_margin=0,
492
+ ):
493
+ r"""Draw the edges of the graph G.
494
+
495
+ This draws only the edges of the graph G.
496
+
497
+ Parameters
498
+ ----------
499
+ G : graph
500
+ A networkx graph
501
+
502
+ pos : dictionary
503
+ A dictionary with nodes as keys and positions as values.
504
+ Positions should be sequences of length 2.
505
+
506
+ edgelist : collection of edge tuples (default=G.edges())
507
+ Draw only specified edges
508
+
509
+ width : float or array of floats (default=1.0)
510
+ Line width of edges
511
+
512
+ edge_color : color or array of colors (default='k')
513
+ Edge color. Can be a single color or a sequence of colors with the same
514
+ length as edgelist. Color can be string or rgb (or rgba) tuple of
515
+ floats from 0-1. If numeric values are specified they will be
516
+ mapped to colors using the edge_cmap and edge_vmin,edge_vmax parameters.
517
+
518
+ style : string or array of strings (default='solid')
519
+ Edge line style e.g.: '-', '--', '-.', ':'
520
+ or words like 'solid' or 'dashed'.
521
+ Can be a single style or a sequence of styles with the same
522
+ length as the edge list.
523
+ If less styles than edges are given the styles will cycle.
524
+ If more styles than edges are given the styles will be used sequentially
525
+ and not be exhausted.
526
+ Also, `(offset, onoffseq)` tuples can be used as style instead of a strings.
527
+ (See `matplotlib.patches.FancyArrowPatch`: `linestyle`)
528
+
529
+ alpha : float or array of floats (default=None)
530
+ The edge transparency. This can be a single alpha value,
531
+ in which case it will be applied to all specified edges. Otherwise,
532
+ if it is an array, the elements of alpha will be applied to the colors
533
+ in order (cycling through alpha multiple times if necessary).
534
+
535
+ edge_cmap : Matplotlib colormap, optional
536
+ Colormap for mapping intensities of edges
537
+
538
+ edge_vmin,edge_vmax : floats, optional
539
+ Minimum and maximum for edge colormap scaling
540
+
541
+ ax : Matplotlib Axes object, optional
542
+ Draw the graph in the specified Matplotlib axes.
543
+
544
+ arrows : bool or None, optional (default=None)
545
+ If `None`, directed graphs draw arrowheads with
546
+ `~matplotlib.patches.FancyArrowPatch`, while undirected graphs draw edges
547
+ via `~matplotlib.collections.LineCollection` for speed.
548
+ If `True`, draw arrowheads with FancyArrowPatches (bendable and stylish).
549
+ If `False`, draw edges using LineCollection (linear and fast).
550
+
551
+ Note: Arrowheads will be the same color as edges.
552
+
553
+ arrowstyle : str (default='-\|>' for directed graphs)
554
+ For directed graphs and `arrows==True` defaults to '-\|>',
555
+ For undirected graphs default to '-'.
556
+
557
+ See `matplotlib.patches.ArrowStyle` for more options.
558
+
559
+ arrowsize : int (default=10)
560
+ For directed graphs, choose the size of the arrow head's length and
561
+ width. See `matplotlib.patches.FancyArrowPatch` for attribute
562
+ `mutation_scale` for more info.
563
+
564
+ connectionstyle : string (default="arc3")
565
+ Pass the connectionstyle parameter to create curved arc of rounding
566
+ radius rad. For example, connectionstyle='arc3,rad=0.2'.
567
+ See `matplotlib.patches.ConnectionStyle` and
568
+ `matplotlib.patches.FancyArrowPatch` for more info.
569
+
570
+ node_size : scalar or array (default=300)
571
+ Size of nodes. Though the nodes are not drawn with this function, the
572
+ node size is used in determining edge positioning.
573
+
574
+ nodelist : list, optional (default=G.nodes())
575
+ This provides the node order for the `node_size` array (if it is an array).
576
+
577
+ node_shape : string (default='o')
578
+ The marker used for nodes, used in determining edge positioning.
579
+ Specification is as a `matplotlib.markers` marker, e.g. one of 'so^>v<dph8'.
580
+
581
+ label : None or string
582
+ Label for legend
583
+
584
+ min_source_margin : int (default=0)
585
+ The minimum margin (gap) at the beginning of the edge at the source.
586
+
587
+ min_target_margin : int (default=0)
588
+ The minimum margin (gap) at the end of the edge at the target.
589
+
590
+ Returns
591
+ -------
592
+ matplotlib.collections.LineCollection or a list of matplotlib.patches.FancyArrowPatch
593
+ If ``arrows=True``, a list of FancyArrowPatches is returned.
594
+ If ``arrows=False``, a LineCollection is returned.
595
+ If ``arrows=None`` (the default), then a LineCollection is returned if
596
+ `G` is undirected, otherwise returns a list of FancyArrowPatches.
597
+
598
+ Notes
599
+ -----
600
+ For directed graphs, arrows are drawn at the head end. Arrows can be
601
+ turned off with keyword arrows=False or by passing an arrowstyle without
602
+ an arrow on the end.
603
+
604
+ Be sure to include `node_size` as a keyword argument; arrows are
605
+ drawn considering the size of nodes.
606
+
607
+ Self-loops are always drawn with `~matplotlib.patches.FancyArrowPatch`
608
+ regardless of the value of `arrows` or whether `G` is directed.
609
+ When ``arrows=False`` or ``arrows=None`` and `G` is undirected, the
610
+ FancyArrowPatches corresponding to the self-loops are not explicitly
611
+ returned. They should instead be accessed via the ``Axes.patches``
612
+ attribute (see examples).
613
+
614
+ Examples
615
+ --------
616
+ >>> G = nx.dodecahedral_graph()
617
+ >>> edges = nx.draw_networkx_edges(G, pos=nx.spring_layout(G))
618
+
619
+ >>> G = nx.DiGraph()
620
+ >>> G.add_edges_from([(1, 2), (1, 3), (2, 3)])
621
+ >>> arcs = nx.draw_networkx_edges(G, pos=nx.spring_layout(G))
622
+ >>> alphas = [0.3, 0.4, 0.5]
623
+ >>> for i, arc in enumerate(arcs): # change alpha values of arcs
624
+ ... arc.set_alpha(alphas[i])
625
+
626
+ The FancyArrowPatches corresponding to self-loops are not always
627
+ returned, but can always be accessed via the ``patches`` attribute of the
628
+ `matplotlib.Axes` object.
629
+
630
+ >>> import matplotlib.pyplot as plt
631
+ >>> fig, ax = plt.subplots()
632
+ >>> G = nx.Graph([(0, 1), (0, 0)]) # Self-loop at node 0
633
+ >>> edge_collection = nx.draw_networkx_edges(G, pos=nx.circular_layout(G), ax=ax)
634
+ >>> self_loop_fap = ax.patches[0]
635
+
636
+ Also see the NetworkX drawing examples at
637
+ https://networkx.org/documentation/latest/auto_examples/index.html
638
+
639
+ See Also
640
+ --------
641
+ draw
642
+ draw_networkx
643
+ draw_networkx_nodes
644
+ draw_networkx_labels
645
+ draw_networkx_edge_labels
646
+
647
+ """
648
+ import matplotlib as mpl
649
+ import matplotlib.collections # call as mpl.collections
650
+ import matplotlib.colors # call as mpl.colors
651
+ import matplotlib.patches # call as mpl.patches
652
+ import matplotlib.path # call as mpl.path
653
+ import matplotlib.pyplot as plt
654
+ import numpy as np
655
+
656
+ # The default behavior is to use LineCollection to draw edges for
657
+ # undirected graphs (for performance reasons) and use FancyArrowPatches
658
+ # for directed graphs.
659
+ # The `arrows` keyword can be used to override the default behavior
660
+ use_linecollection = not G.is_directed()
661
+ if arrows in (True, False):
662
+ use_linecollection = not arrows
663
+
664
+ # Some kwargs only apply to FancyArrowPatches. Warn users when they use
665
+ # non-default values for these kwargs when LineCollection is being used
666
+ # instead of silently ignoring the specified option
667
+ if use_linecollection and any(
668
+ [
669
+ arrowstyle is not None,
670
+ arrowsize != 10,
671
+ connectionstyle != "arc3",
672
+ min_source_margin != 0,
673
+ min_target_margin != 0,
674
+ ]
675
+ ):
676
+ import warnings
677
+
678
+ msg = (
679
+ "\n\nThe {0} keyword argument is not applicable when drawing edges\n"
680
+ "with LineCollection.\n\n"
681
+ "To make this warning go away, either specify `arrows=True` to\n"
682
+ "force FancyArrowPatches or use the default value for {0}.\n"
683
+ "Note that using FancyArrowPatches may be slow for large graphs.\n"
684
+ )
685
+ if arrowstyle is not None:
686
+ msg = msg.format("arrowstyle")
687
+ if arrowsize != 10:
688
+ msg = msg.format("arrowsize")
689
+ if connectionstyle != "arc3":
690
+ msg = msg.format("connectionstyle")
691
+ if min_source_margin != 0:
692
+ msg = msg.format("min_source_margin")
693
+ if min_target_margin != 0:
694
+ msg = msg.format("min_target_margin")
695
+ warnings.warn(msg, category=UserWarning, stacklevel=2)
696
+
697
+ if arrowstyle == None:
698
+ if G.is_directed():
699
+ arrowstyle = "-|>"
700
+ else:
701
+ arrowstyle = "-"
702
+
703
+ if ax is None:
704
+ ax = plt.gca()
705
+
706
+ if edgelist is None:
707
+ edgelist = list(G.edges())
708
+
709
+ if len(edgelist) == 0: # no edges!
710
+ return []
711
+
712
+ if nodelist is None:
713
+ nodelist = list(G.nodes())
714
+
715
+ # FancyArrowPatch handles color=None different from LineCollection
716
+ if edge_color is None:
717
+ edge_color = "k"
718
+ edgelist_tuple = list(map(tuple, edgelist))
719
+
720
+ # set edge positions
721
+ edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist])
722
+
723
+ # Check if edge_color is an array of floats and map to edge_cmap.
724
+ # This is the only case handled differently from matplotlib
725
+ if (
726
+ np.iterable(edge_color)
727
+ and (len(edge_color) == len(edge_pos))
728
+ and np.all([isinstance(c, Number) for c in edge_color])
729
+ ):
730
+ if edge_cmap is not None:
731
+ assert isinstance(edge_cmap, mpl.colors.Colormap)
732
+ else:
733
+ edge_cmap = plt.get_cmap()
734
+ if edge_vmin is None:
735
+ edge_vmin = min(edge_color)
736
+ if edge_vmax is None:
737
+ edge_vmax = max(edge_color)
738
+ color_normal = mpl.colors.Normalize(vmin=edge_vmin, vmax=edge_vmax)
739
+ edge_color = [edge_cmap(color_normal(e)) for e in edge_color]
740
+
741
+ def _draw_networkx_edges_line_collection():
742
+ edge_collection = mpl.collections.LineCollection(
743
+ edge_pos,
744
+ colors=edge_color,
745
+ linewidths=width,
746
+ antialiaseds=(1,),
747
+ linestyle=style,
748
+ alpha=alpha,
749
+ )
750
+ edge_collection.set_cmap(edge_cmap)
751
+ edge_collection.set_clim(edge_vmin, edge_vmax)
752
+ edge_collection.set_zorder(1) # edges go behind nodes
753
+ edge_collection.set_label(label)
754
+ ax.add_collection(edge_collection)
755
+
756
+ return edge_collection
757
+
758
+ def _draw_networkx_edges_fancy_arrow_patch():
759
+ # Note: Waiting for someone to implement arrow to intersection with
760
+ # marker. Meanwhile, this works well for polygons with more than 4
761
+ # sides and circle.
762
+
763
+ def to_marker_edge(marker_size, marker):
764
+ if marker in "s^>v<d": # `large` markers need extra space
765
+ return np.sqrt(2 * marker_size) / 2
766
+ else:
767
+ return np.sqrt(marker_size) / 2
768
+
769
+ # Draw arrows with `matplotlib.patches.FancyarrowPatch`
770
+ arrow_collection = []
771
+
772
+ if isinstance(arrowsize, list):
773
+ if len(arrowsize) != len(edge_pos):
774
+ raise ValueError("arrowsize should have the same length as edgelist")
775
+ else:
776
+ mutation_scale = arrowsize # scale factor of arrow head
777
+
778
+ base_connection_style = mpl.patches.ConnectionStyle(connectionstyle)
779
+
780
+ # Fallback for self-loop scale. Left outside of _connectionstyle so it is
781
+ # only computed once
782
+ max_nodesize = np.array(node_size).max()
783
+
784
+ def _connectionstyle(posA, posB, *args, **kwargs):
785
+ # check if we need to do a self-loop
786
+ if np.all(posA == posB):
787
+ # Self-loops are scaled by view extent, except in cases the extent
788
+ # is 0, e.g. for a single node. In this case, fall back to scaling
789
+ # by the maximum node size
790
+ selfloop_ht = 0.005 * max_nodesize if h == 0 else h
791
+ # this is called with _screen space_ values so convert back
792
+ # to data space
793
+ data_loc = ax.transData.inverted().transform(posA)
794
+ v_shift = 0.1 * selfloop_ht
795
+ h_shift = v_shift * 0.5
796
+ # put the top of the loop first so arrow is not hidden by node
797
+ path = [
798
+ # 1
799
+ data_loc + np.asarray([0, v_shift]),
800
+ # 4 4 4
801
+ data_loc + np.asarray([h_shift, v_shift]),
802
+ data_loc + np.asarray([h_shift, 0]),
803
+ data_loc,
804
+ # 4 4 4
805
+ data_loc + np.asarray([-h_shift, 0]),
806
+ data_loc + np.asarray([-h_shift, v_shift]),
807
+ data_loc + np.asarray([0, v_shift]),
808
+ ]
809
+
810
+ ret = mpl.path.Path(ax.transData.transform(path), [1, 4, 4, 4, 4, 4, 4])
811
+ # if not, fall back to the user specified behavior
812
+ else:
813
+ ret = base_connection_style(posA, posB, *args, **kwargs)
814
+
815
+ return ret
816
+
817
+ # FancyArrowPatch doesn't handle color strings
818
+ arrow_colors = mpl.colors.colorConverter.to_rgba_array(edge_color, alpha)
819
+ for i, (src, dst) in zip(fancy_edges_indices, edge_pos):
820
+ x1, y1 = src
821
+ x2, y2 = dst
822
+ shrink_source = 0 # space from source to tail
823
+ shrink_target = 0 # space from head to target
824
+
825
+ if isinstance(arrowsize, list):
826
+ # Scale each factor of each arrow based on arrowsize list
827
+ mutation_scale = arrowsize[i]
828
+
829
+ if np.iterable(node_size): # many node sizes
830
+ source, target = edgelist[i][:2]
831
+ source_node_size = node_size[nodelist.index(source)]
832
+ target_node_size = node_size[nodelist.index(target)]
833
+ shrink_source = to_marker_edge(source_node_size, node_shape)
834
+ shrink_target = to_marker_edge(target_node_size, node_shape)
835
+ else:
836
+ shrink_source = shrink_target = to_marker_edge(node_size, node_shape)
837
+
838
+ if shrink_source < min_source_margin:
839
+ shrink_source = min_source_margin
840
+
841
+ if shrink_target < min_target_margin:
842
+ shrink_target = min_target_margin
843
+
844
+ if len(arrow_colors) > i:
845
+ arrow_color = arrow_colors[i]
846
+ elif len(arrow_colors) == 1:
847
+ arrow_color = arrow_colors[0]
848
+ else: # Cycle through colors
849
+ arrow_color = arrow_colors[i % len(arrow_colors)]
850
+
851
+ if np.iterable(width):
852
+ if len(width) > i:
853
+ line_width = width[i]
854
+ else:
855
+ line_width = width[i % len(width)]
856
+ else:
857
+ line_width = width
858
+
859
+ if (
860
+ np.iterable(style)
861
+ and not isinstance(style, str)
862
+ and not isinstance(style, tuple)
863
+ ):
864
+ if len(style) > i:
865
+ linestyle = style[i]
866
+ else: # Cycle through styles
867
+ linestyle = style[i % len(style)]
868
+ else:
869
+ linestyle = style
870
+
871
+ arrow = mpl.patches.FancyArrowPatch(
872
+ (x1, y1),
873
+ (x2, y2),
874
+ arrowstyle=arrowstyle,
875
+ shrinkA=shrink_source,
876
+ shrinkB=shrink_target,
877
+ mutation_scale=mutation_scale,
878
+ color=arrow_color,
879
+ linewidth=line_width,
880
+ connectionstyle=_connectionstyle,
881
+ linestyle=linestyle,
882
+ zorder=1,
883
+ ) # arrows go behind nodes
884
+
885
+ arrow_collection.append(arrow)
886
+ ax.add_patch(arrow)
887
+
888
+ return arrow_collection
889
+
890
+ # compute initial view
891
+ minx = np.amin(np.ravel(edge_pos[:, :, 0]))
892
+ maxx = np.amax(np.ravel(edge_pos[:, :, 0]))
893
+ miny = np.amin(np.ravel(edge_pos[:, :, 1]))
894
+ maxy = np.amax(np.ravel(edge_pos[:, :, 1]))
895
+ w = maxx - minx
896
+ h = maxy - miny
897
+
898
+ # Draw the edges
899
+ if use_linecollection:
900
+ edge_viz_obj = _draw_networkx_edges_line_collection()
901
+ # Make sure selfloop edges are also drawn
902
+ selfloops_to_draw = [loop for loop in nx.selfloop_edges(G) if loop in edgelist]
903
+ if selfloops_to_draw:
904
+ fancy_edges_indices = [
905
+ edgelist_tuple.index(loop) for loop in selfloops_to_draw
906
+ ]
907
+ edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in selfloops_to_draw])
908
+ arrowstyle = "-"
909
+ _draw_networkx_edges_fancy_arrow_patch()
910
+ else:
911
+ fancy_edges_indices = range(len(edgelist))
912
+ edge_viz_obj = _draw_networkx_edges_fancy_arrow_patch()
913
+
914
+ # update view after drawing
915
+ padx, pady = 0.05 * w, 0.05 * h
916
+ corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady)
917
+ ax.update_datalim(corners)
918
+ ax.autoscale_view()
919
+
920
+ ax.tick_params(
921
+ axis="both",
922
+ which="both",
923
+ bottom=False,
924
+ left=False,
925
+ labelbottom=False,
926
+ labelleft=False,
927
+ )
928
+
929
+ return edge_viz_obj
930
+
931
+
932
+ def draw_networkx_labels(
933
+ G,
934
+ pos,
935
+ labels=None,
936
+ font_size=12,
937
+ font_color="k",
938
+ font_family="sans-serif",
939
+ font_weight="normal",
940
+ alpha=None,
941
+ bbox=None,
942
+ horizontalalignment="center",
943
+ verticalalignment="center",
944
+ ax=None,
945
+ clip_on=True,
946
+ ):
947
+ """Draw node labels on the graph G.
948
+
949
+ Parameters
950
+ ----------
951
+ G : graph
952
+ A networkx graph
953
+
954
+ pos : dictionary
955
+ A dictionary with nodes as keys and positions as values.
956
+ Positions should be sequences of length 2.
957
+
958
+ labels : dictionary (default={n: n for n in G})
959
+ Node labels in a dictionary of text labels keyed by node.
960
+ Node-keys in labels should appear as keys in `pos`.
961
+ If needed use: `{n:lab for n,lab in labels.items() if n in pos}`
962
+
963
+ font_size : int (default=12)
964
+ Font size for text labels
965
+
966
+ font_color : color (default='k' black)
967
+ Font color string. Color can be string or rgb (or rgba) tuple of
968
+ floats from 0-1.
969
+
970
+ font_weight : string (default='normal')
971
+ Font weight
972
+
973
+ font_family : string (default='sans-serif')
974
+ Font family
975
+
976
+ alpha : float or None (default=None)
977
+ The text transparency
978
+
979
+ bbox : Matplotlib bbox, (default is Matplotlib's ax.text default)
980
+ Specify text box properties (e.g. shape, color etc.) for node labels.
981
+
982
+ horizontalalignment : string (default='center')
983
+ Horizontal alignment {'center', 'right', 'left'}
984
+
985
+ verticalalignment : string (default='center')
986
+ Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'}
987
+
988
+ ax : Matplotlib Axes object, optional
989
+ Draw the graph in the specified Matplotlib axes.
990
+
991
+ clip_on : bool (default=True)
992
+ Turn on clipping of node labels at axis boundaries
993
+
994
+ Returns
995
+ -------
996
+ dict
997
+ `dict` of labels keyed on the nodes
998
+
999
+ Examples
1000
+ --------
1001
+ >>> G = nx.dodecahedral_graph()
1002
+ >>> labels = nx.draw_networkx_labels(G, pos=nx.spring_layout(G))
1003
+
1004
+ Also see the NetworkX drawing examples at
1005
+ https://networkx.org/documentation/latest/auto_examples/index.html
1006
+
1007
+ See Also
1008
+ --------
1009
+ draw
1010
+ draw_networkx
1011
+ draw_networkx_nodes
1012
+ draw_networkx_edges
1013
+ draw_networkx_edge_labels
1014
+ """
1015
+ import matplotlib.pyplot as plt
1016
+
1017
+ if ax is None:
1018
+ ax = plt.gca()
1019
+
1020
+ if labels is None:
1021
+ labels = {n: n for n in G.nodes()}
1022
+
1023
+ text_items = {} # there is no text collection so we'll fake one
1024
+ for n, label in labels.items():
1025
+ (x, y) = pos[n]
1026
+ if not isinstance(label, str):
1027
+ label = str(label) # this makes "1" and 1 labeled the same
1028
+ t = ax.text(
1029
+ x,
1030
+ y,
1031
+ label,
1032
+ size=font_size,
1033
+ color=font_color,
1034
+ family=font_family,
1035
+ weight=font_weight,
1036
+ alpha=alpha,
1037
+ horizontalalignment=horizontalalignment,
1038
+ verticalalignment=verticalalignment,
1039
+ transform=ax.transData,
1040
+ bbox=bbox,
1041
+ clip_on=clip_on,
1042
+ )
1043
+ text_items[n] = t
1044
+
1045
+ ax.tick_params(
1046
+ axis="both",
1047
+ which="both",
1048
+ bottom=False,
1049
+ left=False,
1050
+ labelbottom=False,
1051
+ labelleft=False,
1052
+ )
1053
+
1054
+ return text_items
1055
+
1056
+
1057
+ def draw_networkx_edge_labels(
1058
+ G,
1059
+ pos,
1060
+ edge_labels=None,
1061
+ label_pos=0.5,
1062
+ font_size=10,
1063
+ font_color="k",
1064
+ font_family="sans-serif",
1065
+ font_weight="normal",
1066
+ alpha=None,
1067
+ bbox=None,
1068
+ horizontalalignment="center",
1069
+ verticalalignment="center",
1070
+ ax=None,
1071
+ rotate=True,
1072
+ clip_on=True,
1073
+ ):
1074
+ """Draw edge labels.
1075
+
1076
+ Parameters
1077
+ ----------
1078
+ G : graph
1079
+ A networkx graph
1080
+
1081
+ pos : dictionary
1082
+ A dictionary with nodes as keys and positions as values.
1083
+ Positions should be sequences of length 2.
1084
+
1085
+ edge_labels : dictionary (default=None)
1086
+ Edge labels in a dictionary of labels keyed by edge two-tuple.
1087
+ Only labels for the keys in the dictionary are drawn.
1088
+
1089
+ label_pos : float (default=0.5)
1090
+ Position of edge label along edge (0=head, 0.5=center, 1=tail)
1091
+
1092
+ font_size : int (default=10)
1093
+ Font size for text labels
1094
+
1095
+ font_color : color (default='k' black)
1096
+ Font color string. Color can be string or rgb (or rgba) tuple of
1097
+ floats from 0-1.
1098
+
1099
+ font_weight : string (default='normal')
1100
+ Font weight
1101
+
1102
+ font_family : string (default='sans-serif')
1103
+ Font family
1104
+
1105
+ alpha : float or None (default=None)
1106
+ The text transparency
1107
+
1108
+ bbox : Matplotlib bbox, optional
1109
+ Specify text box properties (e.g. shape, color etc.) for edge labels.
1110
+ Default is {boxstyle='round', ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0)}.
1111
+
1112
+ horizontalalignment : string (default='center')
1113
+ Horizontal alignment {'center', 'right', 'left'}
1114
+
1115
+ verticalalignment : string (default='center')
1116
+ Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'}
1117
+
1118
+ ax : Matplotlib Axes object, optional
1119
+ Draw the graph in the specified Matplotlib axes.
1120
+
1121
+ rotate : bool (default=True)
1122
+ Rotate edge labels to lie parallel to edges
1123
+
1124
+ clip_on : bool (default=True)
1125
+ Turn on clipping of edge labels at axis boundaries
1126
+
1127
+ Returns
1128
+ -------
1129
+ dict
1130
+ `dict` of labels keyed by edge
1131
+
1132
+ Examples
1133
+ --------
1134
+ >>> G = nx.dodecahedral_graph()
1135
+ >>> edge_labels = nx.draw_networkx_edge_labels(G, pos=nx.spring_layout(G))
1136
+
1137
+ Also see the NetworkX drawing examples at
1138
+ https://networkx.org/documentation/latest/auto_examples/index.html
1139
+
1140
+ See Also
1141
+ --------
1142
+ draw
1143
+ draw_networkx
1144
+ draw_networkx_nodes
1145
+ draw_networkx_edges
1146
+ draw_networkx_labels
1147
+ """
1148
+ import matplotlib.pyplot as plt
1149
+ import numpy as np
1150
+
1151
+ if ax is None:
1152
+ ax = plt.gca()
1153
+ if edge_labels is None:
1154
+ labels = {(u, v): d for u, v, d in G.edges(data=True)}
1155
+ else:
1156
+ labels = edge_labels
1157
+ # Informative exception for multiedges
1158
+ try:
1159
+ (u, v) = next(iter(labels)) # ensures no edge key provided
1160
+ except ValueError as err:
1161
+ raise nx.NetworkXError(
1162
+ "draw_networkx_edge_labels does not support multiedges."
1163
+ ) from err
1164
+ except StopIteration:
1165
+ pass
1166
+
1167
+ text_items = {}
1168
+ for (n1, n2), label in labels.items():
1169
+ (x1, y1) = pos[n1]
1170
+ (x2, y2) = pos[n2]
1171
+ (x, y) = (
1172
+ x1 * label_pos + x2 * (1.0 - label_pos),
1173
+ y1 * label_pos + y2 * (1.0 - label_pos),
1174
+ )
1175
+
1176
+ if rotate:
1177
+ # in degrees
1178
+ angle = np.arctan2(y2 - y1, x2 - x1) / (2.0 * np.pi) * 360
1179
+ # make label orientation "right-side-up"
1180
+ if angle > 90:
1181
+ angle -= 180
1182
+ if angle < -90:
1183
+ angle += 180
1184
+ # transform data coordinate angle to screen coordinate angle
1185
+ xy = np.array((x, y))
1186
+ trans_angle = ax.transData.transform_angles(
1187
+ np.array((angle,)), xy.reshape((1, 2))
1188
+ )[0]
1189
+ else:
1190
+ trans_angle = 0.0
1191
+ # use default box of white with white border
1192
+ if bbox is None:
1193
+ bbox = {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)}
1194
+ if not isinstance(label, str):
1195
+ label = str(label) # this makes "1" and 1 labeled the same
1196
+
1197
+ t = ax.text(
1198
+ x,
1199
+ y,
1200
+ label,
1201
+ size=font_size,
1202
+ color=font_color,
1203
+ family=font_family,
1204
+ weight=font_weight,
1205
+ alpha=alpha,
1206
+ horizontalalignment=horizontalalignment,
1207
+ verticalalignment=verticalalignment,
1208
+ rotation=trans_angle,
1209
+ transform=ax.transData,
1210
+ bbox=bbox,
1211
+ zorder=1,
1212
+ clip_on=clip_on,
1213
+ )
1214
+ text_items[(n1, n2)] = t
1215
+
1216
+ ax.tick_params(
1217
+ axis="both",
1218
+ which="both",
1219
+ bottom=False,
1220
+ left=False,
1221
+ labelbottom=False,
1222
+ labelleft=False,
1223
+ )
1224
+
1225
+ return text_items
1226
+
1227
+
1228
+ def draw_circular(G, **kwargs):
1229
+ """Draw the graph `G` with a circular layout.
1230
+
1231
+ This is a convenience function equivalent to::
1232
+
1233
+ nx.draw(G, pos=nx.circular_layout(G), **kwargs)
1234
+
1235
+ Parameters
1236
+ ----------
1237
+ G : graph
1238
+ A networkx graph
1239
+
1240
+ kwargs : optional keywords
1241
+ See `draw_networkx` for a description of optional keywords.
1242
+
1243
+ Notes
1244
+ -----
1245
+ The layout is computed each time this function is called. For
1246
+ repeated drawing it is much more efficient to call
1247
+ `~networkx.drawing.layout.circular_layout` directly and reuse the result::
1248
+
1249
+ >>> G = nx.complete_graph(5)
1250
+ >>> pos = nx.circular_layout(G)
1251
+ >>> nx.draw(G, pos=pos) # Draw the original graph
1252
+ >>> # Draw a subgraph, reusing the same node positions
1253
+ >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
1254
+
1255
+ Examples
1256
+ --------
1257
+ >>> G = nx.path_graph(5)
1258
+ >>> nx.draw_circular(G)
1259
+
1260
+ See Also
1261
+ --------
1262
+ :func:`~networkx.drawing.layout.circular_layout`
1263
+ """
1264
+ draw(G, circular_layout(G), **kwargs)
1265
+
1266
+
1267
+ def draw_kamada_kawai(G, **kwargs):
1268
+ """Draw the graph `G` with a Kamada-Kawai force-directed layout.
1269
+
1270
+ This is a convenience function equivalent to::
1271
+
1272
+ nx.draw(G, pos=nx.kamada_kawai_layout(G), **kwargs)
1273
+
1274
+ Parameters
1275
+ ----------
1276
+ G : graph
1277
+ A networkx graph
1278
+
1279
+ kwargs : optional keywords
1280
+ See `draw_networkx` for a description of optional keywords.
1281
+
1282
+ Notes
1283
+ -----
1284
+ The layout is computed each time this function is called.
1285
+ For repeated drawing it is much more efficient to call
1286
+ `~networkx.drawing.layout.kamada_kawai_layout` directly and reuse the
1287
+ result::
1288
+
1289
+ >>> G = nx.complete_graph(5)
1290
+ >>> pos = nx.kamada_kawai_layout(G)
1291
+ >>> nx.draw(G, pos=pos) # Draw the original graph
1292
+ >>> # Draw a subgraph, reusing the same node positions
1293
+ >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
1294
+
1295
+ Examples
1296
+ --------
1297
+ >>> G = nx.path_graph(5)
1298
+ >>> nx.draw_kamada_kawai(G)
1299
+
1300
+ See Also
1301
+ --------
1302
+ :func:`~networkx.drawing.layout.kamada_kawai_layout`
1303
+ """
1304
+ draw(G, kamada_kawai_layout(G), **kwargs)
1305
+
1306
+
1307
+ def draw_random(G, **kwargs):
1308
+ """Draw the graph `G` with a random layout.
1309
+
1310
+ This is a convenience function equivalent to::
1311
+
1312
+ nx.draw(G, pos=nx.random_layout(G), **kwargs)
1313
+
1314
+ Parameters
1315
+ ----------
1316
+ G : graph
1317
+ A networkx graph
1318
+
1319
+ kwargs : optional keywords
1320
+ See `draw_networkx` for a description of optional keywords.
1321
+
1322
+ Notes
1323
+ -----
1324
+ The layout is computed each time this function is called.
1325
+ For repeated drawing it is much more efficient to call
1326
+ `~networkx.drawing.layout.random_layout` directly and reuse the result::
1327
+
1328
+ >>> G = nx.complete_graph(5)
1329
+ >>> pos = nx.random_layout(G)
1330
+ >>> nx.draw(G, pos=pos) # Draw the original graph
1331
+ >>> # Draw a subgraph, reusing the same node positions
1332
+ >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
1333
+
1334
+ Examples
1335
+ --------
1336
+ >>> G = nx.lollipop_graph(4, 3)
1337
+ >>> nx.draw_random(G)
1338
+
1339
+ See Also
1340
+ --------
1341
+ :func:`~networkx.drawing.layout.random_layout`
1342
+ """
1343
+ draw(G, random_layout(G), **kwargs)
1344
+
1345
+
1346
+ def draw_spectral(G, **kwargs):
1347
+ """Draw the graph `G` with a spectral 2D layout.
1348
+
1349
+ This is a convenience function equivalent to::
1350
+
1351
+ nx.draw(G, pos=nx.spectral_layout(G), **kwargs)
1352
+
1353
+ For more information about how node positions are determined, see
1354
+ `~networkx.drawing.layout.spectral_layout`.
1355
+
1356
+ Parameters
1357
+ ----------
1358
+ G : graph
1359
+ A networkx graph
1360
+
1361
+ kwargs : optional keywords
1362
+ See `draw_networkx` for a description of optional keywords.
1363
+
1364
+ Notes
1365
+ -----
1366
+ The layout is computed each time this function is called.
1367
+ For repeated drawing it is much more efficient to call
1368
+ `~networkx.drawing.layout.spectral_layout` directly and reuse the result::
1369
+
1370
+ >>> G = nx.complete_graph(5)
1371
+ >>> pos = nx.spectral_layout(G)
1372
+ >>> nx.draw(G, pos=pos) # Draw the original graph
1373
+ >>> # Draw a subgraph, reusing the same node positions
1374
+ >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
1375
+
1376
+ Examples
1377
+ --------
1378
+ >>> G = nx.path_graph(5)
1379
+ >>> nx.draw_spectral(G)
1380
+
1381
+ See Also
1382
+ --------
1383
+ :func:`~networkx.drawing.layout.spectral_layout`
1384
+ """
1385
+ draw(G, spectral_layout(G), **kwargs)
1386
+
1387
+
1388
+ def draw_spring(G, **kwargs):
1389
+ """Draw the graph `G` with a spring layout.
1390
+
1391
+ This is a convenience function equivalent to::
1392
+
1393
+ nx.draw(G, pos=nx.spring_layout(G), **kwargs)
1394
+
1395
+ Parameters
1396
+ ----------
1397
+ G : graph
1398
+ A networkx graph
1399
+
1400
+ kwargs : optional keywords
1401
+ See `draw_networkx` for a description of optional keywords.
1402
+
1403
+ Notes
1404
+ -----
1405
+ `~networkx.drawing.layout.spring_layout` is also the default layout for
1406
+ `draw`, so this function is equivalent to `draw`.
1407
+
1408
+ The layout is computed each time this function is called.
1409
+ For repeated drawing it is much more efficient to call
1410
+ `~networkx.drawing.layout.spring_layout` directly and reuse the result::
1411
+
1412
+ >>> G = nx.complete_graph(5)
1413
+ >>> pos = nx.spring_layout(G)
1414
+ >>> nx.draw(G, pos=pos) # Draw the original graph
1415
+ >>> # Draw a subgraph, reusing the same node positions
1416
+ >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
1417
+
1418
+ Examples
1419
+ --------
1420
+ >>> G = nx.path_graph(20)
1421
+ >>> nx.draw_spring(G)
1422
+
1423
+ See Also
1424
+ --------
1425
+ draw
1426
+ :func:`~networkx.drawing.layout.spring_layout`
1427
+ """
1428
+ draw(G, spring_layout(G), **kwargs)
1429
+
1430
+
1431
+ def draw_shell(G, nlist=None, **kwargs):
1432
+ """Draw networkx graph `G` with shell layout.
1433
+
1434
+ This is a convenience function equivalent to::
1435
+
1436
+ nx.draw(G, pos=nx.shell_layout(G, nlist=nlist), **kwargs)
1437
+
1438
+ Parameters
1439
+ ----------
1440
+ G : graph
1441
+ A networkx graph
1442
+
1443
+ nlist : list of list of nodes, optional
1444
+ A list containing lists of nodes representing the shells.
1445
+ Default is `None`, meaning all nodes are in a single shell.
1446
+ See `~networkx.drawing.layout.shell_layout` for details.
1447
+
1448
+ kwargs : optional keywords
1449
+ See `draw_networkx` for a description of optional keywords.
1450
+
1451
+ Notes
1452
+ -----
1453
+ The layout is computed each time this function is called.
1454
+ For repeated drawing it is much more efficient to call
1455
+ `~networkx.drawing.layout.shell_layout` directly and reuse the result::
1456
+
1457
+ >>> G = nx.complete_graph(5)
1458
+ >>> pos = nx.shell_layout(G)
1459
+ >>> nx.draw(G, pos=pos) # Draw the original graph
1460
+ >>> # Draw a subgraph, reusing the same node positions
1461
+ >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
1462
+
1463
+ Examples
1464
+ --------
1465
+ >>> G = nx.path_graph(4)
1466
+ >>> shells = [[0], [1, 2, 3]]
1467
+ >>> nx.draw_shell(G, nlist=shells)
1468
+
1469
+ See Also
1470
+ --------
1471
+ :func:`~networkx.drawing.layout.shell_layout`
1472
+ """
1473
+ draw(G, shell_layout(G, nlist=nlist), **kwargs)
1474
+
1475
+
1476
+ def draw_planar(G, **kwargs):
1477
+ """Draw a planar networkx graph `G` with planar layout.
1478
+
1479
+ This is a convenience function equivalent to::
1480
+
1481
+ nx.draw(G, pos=nx.planar_layout(G), **kwargs)
1482
+
1483
+ Parameters
1484
+ ----------
1485
+ G : graph
1486
+ A planar networkx graph
1487
+
1488
+ kwargs : optional keywords
1489
+ See `draw_networkx` for a description of optional keywords.
1490
+
1491
+ Raises
1492
+ ------
1493
+ NetworkXException
1494
+ When `G` is not planar
1495
+
1496
+ Notes
1497
+ -----
1498
+ The layout is computed each time this function is called.
1499
+ For repeated drawing it is much more efficient to call
1500
+ `~networkx.drawing.layout.planar_layout` directly and reuse the result::
1501
+
1502
+ >>> G = nx.path_graph(5)
1503
+ >>> pos = nx.planar_layout(G)
1504
+ >>> nx.draw(G, pos=pos) # Draw the original graph
1505
+ >>> # Draw a subgraph, reusing the same node positions
1506
+ >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red")
1507
+
1508
+ Examples
1509
+ --------
1510
+ >>> G = nx.path_graph(4)
1511
+ >>> nx.draw_planar(G)
1512
+
1513
+ See Also
1514
+ --------
1515
+ :func:`~networkx.drawing.layout.planar_layout`
1516
+ """
1517
+ draw(G, planar_layout(G), **kwargs)
1518
+
1519
+
1520
+ def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None):
1521
+ """Apply an alpha (or list of alphas) to the colors provided.
1522
+
1523
+ Parameters
1524
+ ----------
1525
+
1526
+ colors : color string or array of floats (default='r')
1527
+ Color of element. Can be a single color format string,
1528
+ or a sequence of colors with the same length as nodelist.
1529
+ If numeric values are specified they will be mapped to
1530
+ colors using the cmap and vmin,vmax parameters. See
1531
+ matplotlib.scatter for more details.
1532
+
1533
+ alpha : float or array of floats
1534
+ Alpha values for elements. This can be a single alpha value, in
1535
+ which case it will be applied to all the elements of color. Otherwise,
1536
+ if it is an array, the elements of alpha will be applied to the colors
1537
+ in order (cycling through alpha multiple times if necessary).
1538
+
1539
+ elem_list : array of networkx objects
1540
+ The list of elements which are being colored. These could be nodes,
1541
+ edges or labels.
1542
+
1543
+ cmap : matplotlib colormap
1544
+ Color map for use if colors is a list of floats corresponding to points
1545
+ on a color mapping.
1546
+
1547
+ vmin, vmax : float
1548
+ Minimum and maximum values for normalizing colors if a colormap is used
1549
+
1550
+ Returns
1551
+ -------
1552
+
1553
+ rgba_colors : numpy ndarray
1554
+ Array containing RGBA format values for each of the node colours.
1555
+
1556
+ """
1557
+ from itertools import cycle, islice
1558
+
1559
+ import matplotlib as mpl
1560
+ import matplotlib.cm # call as mpl.cm
1561
+ import matplotlib.colors # call as mpl.colors
1562
+ import numpy as np
1563
+
1564
+ # If we have been provided with a list of numbers as long as elem_list,
1565
+ # apply the color mapping.
1566
+ if len(colors) == len(elem_list) and isinstance(colors[0], Number):
1567
+ mapper = mpl.cm.ScalarMappable(cmap=cmap)
1568
+ mapper.set_clim(vmin, vmax)
1569
+ rgba_colors = mapper.to_rgba(colors)
1570
+ # Otherwise, convert colors to matplotlib's RGB using the colorConverter
1571
+ # object. These are converted to numpy ndarrays to be consistent with the
1572
+ # to_rgba method of ScalarMappable.
1573
+ else:
1574
+ try:
1575
+ rgba_colors = np.array([mpl.colors.colorConverter.to_rgba(colors)])
1576
+ except ValueError:
1577
+ rgba_colors = np.array(
1578
+ [mpl.colors.colorConverter.to_rgba(color) for color in colors]
1579
+ )
1580
+ # Set the final column of the rgba_colors to have the relevant alpha values
1581
+ try:
1582
+ # If alpha is longer than the number of colors, resize to the number of
1583
+ # elements. Also, if rgba_colors.size (the number of elements of
1584
+ # rgba_colors) is the same as the number of elements, resize the array,
1585
+ # to avoid it being interpreted as a colormap by scatter()
1586
+ if len(alpha) > len(rgba_colors) or rgba_colors.size == len(elem_list):
1587
+ rgba_colors = np.resize(rgba_colors, (len(elem_list), 4))
1588
+ rgba_colors[1:, 0] = rgba_colors[0, 0]
1589
+ rgba_colors[1:, 1] = rgba_colors[0, 1]
1590
+ rgba_colors[1:, 2] = rgba_colors[0, 2]
1591
+ rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors)))
1592
+ except TypeError:
1593
+ rgba_colors[:, -1] = alpha
1594
+ return rgba_colors
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (223 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_pylab.cpython-311.pyc ADDED
Binary file (45.7 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/baseline/test_house_with_colors.png ADDED
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_agraph.py ADDED
@@ -0,0 +1,254 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for PyGraphviz interface."""
2
+ import os
3
+ import tempfile
4
+
5
+ import pytest
6
+
7
+ pygraphviz = pytest.importorskip("pygraphviz")
8
+
9
+
10
+ import networkx as nx
11
+ from networkx.utils import edges_equal, graphs_equal, nodes_equal
12
+
13
+
14
+ class TestAGraph:
15
+ def build_graph(self, G):
16
+ edges = [("A", "B"), ("A", "C"), ("A", "C"), ("B", "C"), ("A", "D")]
17
+ G.add_edges_from(edges)
18
+ G.add_node("E")
19
+ G.graph["metal"] = "bronze"
20
+ return G
21
+
22
+ def assert_equal(self, G1, G2):
23
+ assert nodes_equal(G1.nodes(), G2.nodes())
24
+ assert edges_equal(G1.edges(), G2.edges())
25
+ assert G1.graph["metal"] == G2.graph["metal"]
26
+
27
+ def agraph_checks(self, G):
28
+ G = self.build_graph(G)
29
+ A = nx.nx_agraph.to_agraph(G)
30
+ H = nx.nx_agraph.from_agraph(A)
31
+ self.assert_equal(G, H)
32
+
33
+ fd, fname = tempfile.mkstemp()
34
+ nx.drawing.nx_agraph.write_dot(H, fname)
35
+ Hin = nx.nx_agraph.read_dot(fname)
36
+ self.assert_equal(H, Hin)
37
+ os.close(fd)
38
+ os.unlink(fname)
39
+
40
+ (fd, fname) = tempfile.mkstemp()
41
+ with open(fname, "w") as fh:
42
+ nx.drawing.nx_agraph.write_dot(H, fh)
43
+
44
+ with open(fname) as fh:
45
+ Hin = nx.nx_agraph.read_dot(fh)
46
+ os.close(fd)
47
+ os.unlink(fname)
48
+ self.assert_equal(H, Hin)
49
+
50
+ def test_from_agraph_name(self):
51
+ G = nx.Graph(name="test")
52
+ A = nx.nx_agraph.to_agraph(G)
53
+ H = nx.nx_agraph.from_agraph(A)
54
+ assert G.name == "test"
55
+
56
+ @pytest.mark.parametrize(
57
+ "graph_class", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)
58
+ )
59
+ def test_from_agraph_create_using(self, graph_class):
60
+ G = nx.path_graph(3)
61
+ A = nx.nx_agraph.to_agraph(G)
62
+ H = nx.nx_agraph.from_agraph(A, create_using=graph_class)
63
+ assert isinstance(H, graph_class)
64
+
65
+ def test_from_agraph_named_edges(self):
66
+ # Create an AGraph from an existing (non-multi) Graph
67
+ G = nx.Graph()
68
+ G.add_nodes_from([0, 1])
69
+ A = nx.nx_agraph.to_agraph(G)
70
+ # Add edge (+ name, given by key) to the AGraph
71
+ A.add_edge(0, 1, key="foo")
72
+ # Verify a.name roundtrips out to 'key' in from_agraph
73
+ H = nx.nx_agraph.from_agraph(A)
74
+ assert isinstance(H, nx.Graph)
75
+ assert ("0", "1", {"key": "foo"}) in H.edges(data=True)
76
+
77
+ def test_undirected(self):
78
+ self.agraph_checks(nx.Graph())
79
+
80
+ def test_directed(self):
81
+ self.agraph_checks(nx.DiGraph())
82
+
83
+ def test_multi_undirected(self):
84
+ self.agraph_checks(nx.MultiGraph())
85
+
86
+ def test_multi_directed(self):
87
+ self.agraph_checks(nx.MultiDiGraph())
88
+
89
+ def test_to_agraph_with_nodedata(self):
90
+ G = nx.Graph()
91
+ G.add_node(1, color="red")
92
+ A = nx.nx_agraph.to_agraph(G)
93
+ assert dict(A.nodes()[0].attr) == {"color": "red"}
94
+
95
+ @pytest.mark.parametrize("graph_class", (nx.Graph, nx.MultiGraph))
96
+ def test_to_agraph_with_edgedata(self, graph_class):
97
+ G = graph_class()
98
+ G.add_nodes_from([0, 1])
99
+ G.add_edge(0, 1, color="yellow")
100
+ A = nx.nx_agraph.to_agraph(G)
101
+ assert dict(A.edges()[0].attr) == {"color": "yellow"}
102
+
103
+ def test_view_pygraphviz_path(self, tmp_path):
104
+ G = nx.complete_graph(3)
105
+ input_path = str(tmp_path / "graph.png")
106
+ out_path, A = nx.nx_agraph.view_pygraphviz(G, path=input_path, show=False)
107
+ assert out_path == input_path
108
+ # Ensure file is not empty
109
+ with open(input_path, "rb") as fh:
110
+ data = fh.read()
111
+ assert len(data) > 0
112
+
113
+ def test_view_pygraphviz_file_suffix(self, tmp_path):
114
+ G = nx.complete_graph(3)
115
+ path, A = nx.nx_agraph.view_pygraphviz(G, suffix=1, show=False)
116
+ assert path[-6:] == "_1.png"
117
+
118
+ def test_view_pygraphviz(self):
119
+ G = nx.Graph() # "An empty graph cannot be drawn."
120
+ pytest.raises(nx.NetworkXException, nx.nx_agraph.view_pygraphviz, G)
121
+ G = nx.barbell_graph(4, 6)
122
+ nx.nx_agraph.view_pygraphviz(G, show=False)
123
+
124
+ def test_view_pygraphviz_edgelabel(self):
125
+ G = nx.Graph()
126
+ G.add_edge(1, 2, weight=7)
127
+ G.add_edge(2, 3, weight=8)
128
+ path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel="weight", show=False)
129
+ for edge in A.edges():
130
+ assert edge.attr["weight"] in ("7", "8")
131
+
132
+ def test_view_pygraphviz_callable_edgelabel(self):
133
+ G = nx.complete_graph(3)
134
+
135
+ def foo_label(data):
136
+ return "foo"
137
+
138
+ path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel=foo_label, show=False)
139
+ for edge in A.edges():
140
+ assert edge.attr["label"] == "foo"
141
+
142
+ def test_view_pygraphviz_multigraph_edgelabels(self):
143
+ G = nx.MultiGraph()
144
+ G.add_edge(0, 1, key=0, name="left_fork")
145
+ G.add_edge(0, 1, key=1, name="right_fork")
146
+ path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel="name", show=False)
147
+ edges = A.edges()
148
+ assert len(edges) == 2
149
+ for edge in edges:
150
+ assert edge.attr["label"].strip() in ("left_fork", "right_fork")
151
+
152
+ def test_graph_with_reserved_keywords(self):
153
+ # test attribute/keyword clash case for #1582
154
+ # node: n
155
+ # edges: u,v
156
+ G = nx.Graph()
157
+ G = self.build_graph(G)
158
+ G.nodes["E"]["n"] = "keyword"
159
+ G.edges[("A", "B")]["u"] = "keyword"
160
+ G.edges[("A", "B")]["v"] = "keyword"
161
+ A = nx.nx_agraph.to_agraph(G)
162
+
163
+ def test_view_pygraphviz_no_added_attrs_to_input(self):
164
+ G = nx.complete_graph(2)
165
+ path, A = nx.nx_agraph.view_pygraphviz(G, show=False)
166
+ assert G.graph == {}
167
+
168
+ @pytest.mark.xfail(reason="known bug in clean_attrs")
169
+ def test_view_pygraphviz_leaves_input_graph_unmodified(self):
170
+ G = nx.complete_graph(2)
171
+ # Add entries to graph dict that to_agraph handles specially
172
+ G.graph["node"] = {"width": "0.80"}
173
+ G.graph["edge"] = {"fontsize": "14"}
174
+ path, A = nx.nx_agraph.view_pygraphviz(G, show=False)
175
+ assert G.graph == {"node": {"width": "0.80"}, "edge": {"fontsize": "14"}}
176
+
177
+ def test_graph_with_AGraph_attrs(self):
178
+ G = nx.complete_graph(2)
179
+ # Add entries to graph dict that to_agraph handles specially
180
+ G.graph["node"] = {"width": "0.80"}
181
+ G.graph["edge"] = {"fontsize": "14"}
182
+ path, A = nx.nx_agraph.view_pygraphviz(G, show=False)
183
+ # Ensure user-specified values are not lost
184
+ assert dict(A.node_attr)["width"] == "0.80"
185
+ assert dict(A.edge_attr)["fontsize"] == "14"
186
+
187
+ def test_round_trip_empty_graph(self):
188
+ G = nx.Graph()
189
+ A = nx.nx_agraph.to_agraph(G)
190
+ H = nx.nx_agraph.from_agraph(A)
191
+ # assert graphs_equal(G, H)
192
+ AA = nx.nx_agraph.to_agraph(H)
193
+ HH = nx.nx_agraph.from_agraph(AA)
194
+ assert graphs_equal(H, HH)
195
+ G.graph["graph"] = {}
196
+ G.graph["node"] = {}
197
+ G.graph["edge"] = {}
198
+ assert graphs_equal(G, HH)
199
+
200
+ @pytest.mark.xfail(reason="integer->string node conversion in round trip")
201
+ def test_round_trip_integer_nodes(self):
202
+ G = nx.complete_graph(3)
203
+ A = nx.nx_agraph.to_agraph(G)
204
+ H = nx.nx_agraph.from_agraph(A)
205
+ assert graphs_equal(G, H)
206
+
207
+ def test_graphviz_alias(self):
208
+ G = self.build_graph(nx.Graph())
209
+ pos_graphviz = nx.nx_agraph.graphviz_layout(G)
210
+ pos_pygraphviz = nx.nx_agraph.pygraphviz_layout(G)
211
+ assert pos_graphviz == pos_pygraphviz
212
+
213
+ @pytest.mark.parametrize("root", range(5))
214
+ def test_pygraphviz_layout_root(self, root):
215
+ # NOTE: test depends on layout prog being deterministic
216
+ G = nx.complete_graph(5)
217
+ A = nx.nx_agraph.to_agraph(G)
218
+ # Get layout with root arg is not None
219
+ pygv_layout = nx.nx_agraph.pygraphviz_layout(G, prog="circo", root=root)
220
+ # Equivalent layout directly on AGraph
221
+ A.layout(args=f"-Groot={root}", prog="circo")
222
+ # Parse AGraph layout
223
+ a1_pos = tuple(float(v) for v in dict(A.get_node("1").attr)["pos"].split(","))
224
+ assert pygv_layout[1] == a1_pos
225
+
226
+ def test_2d_layout(self):
227
+ G = nx.Graph()
228
+ G = self.build_graph(G)
229
+ G.graph["dimen"] = 2
230
+ pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato")
231
+ pos = list(pos.values())
232
+ assert len(pos) == 5
233
+ assert len(pos[0]) == 2
234
+
235
+ def test_3d_layout(self):
236
+ G = nx.Graph()
237
+ G = self.build_graph(G)
238
+ G.graph["dimen"] = 3
239
+ pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato")
240
+ pos = list(pos.values())
241
+ assert len(pos) == 5
242
+ assert len(pos[0]) == 3
243
+
244
+ def test_no_warnings_raised(self):
245
+ # Test that no warnings are raised when Networkx graph
246
+ # is converted to Pygraphviz graph and 'pos'
247
+ # attribute is given
248
+ G = nx.Graph()
249
+ G.add_node(0, pos=(0, 0))
250
+ G.add_node(1, pos=(1, 1))
251
+ A = nx.nx_agraph.to_agraph(G)
252
+ with pytest.warns(None) as record:
253
+ A.layout()
254
+ assert len(record) == 0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/test_layout.py ADDED
@@ -0,0 +1,469 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for layout functions."""
2
+ import pytest
3
+
4
+ import networkx as nx
5
+
6
+ np = pytest.importorskip("numpy")
7
+ pytest.importorskip("scipy")
8
+
9
+
10
+ class TestLayout:
11
+ @classmethod
12
+ def setup_class(cls):
13
+ cls.Gi = nx.grid_2d_graph(5, 5)
14
+ cls.Gs = nx.Graph()
15
+ nx.add_path(cls.Gs, "abcdef")
16
+ cls.bigG = nx.grid_2d_graph(25, 25) # > 500 nodes for sparse
17
+
18
+ def test_spring_fixed_without_pos(self):
19
+ G = nx.path_graph(4)
20
+ pytest.raises(ValueError, nx.spring_layout, G, fixed=[0])
21
+ pos = {0: (1, 1), 2: (0, 0)}
22
+ pytest.raises(ValueError, nx.spring_layout, G, fixed=[0, 1], pos=pos)
23
+ nx.spring_layout(G, fixed=[0, 2], pos=pos) # No ValueError
24
+
25
+ def test_spring_init_pos(self):
26
+ # Tests GH #2448
27
+ import math
28
+
29
+ G = nx.Graph()
30
+ G.add_edges_from([(0, 1), (1, 2), (2, 0), (2, 3)])
31
+
32
+ init_pos = {0: (0.0, 0.0)}
33
+ fixed_pos = [0]
34
+ pos = nx.fruchterman_reingold_layout(G, pos=init_pos, fixed=fixed_pos)
35
+ has_nan = any(math.isnan(c) for coords in pos.values() for c in coords)
36
+ assert not has_nan, "values should not be nan"
37
+
38
+ def test_smoke_empty_graph(self):
39
+ G = []
40
+ nx.random_layout(G)
41
+ nx.circular_layout(G)
42
+ nx.planar_layout(G)
43
+ nx.spring_layout(G)
44
+ nx.fruchterman_reingold_layout(G)
45
+ nx.spectral_layout(G)
46
+ nx.shell_layout(G)
47
+ nx.bipartite_layout(G, G)
48
+ nx.spiral_layout(G)
49
+ nx.multipartite_layout(G)
50
+ nx.kamada_kawai_layout(G)
51
+
52
+ def test_smoke_int(self):
53
+ G = self.Gi
54
+ nx.random_layout(G)
55
+ nx.circular_layout(G)
56
+ nx.planar_layout(G)
57
+ nx.spring_layout(G)
58
+ nx.fruchterman_reingold_layout(G)
59
+ nx.fruchterman_reingold_layout(self.bigG)
60
+ nx.spectral_layout(G)
61
+ nx.spectral_layout(G.to_directed())
62
+ nx.spectral_layout(self.bigG)
63
+ nx.spectral_layout(self.bigG.to_directed())
64
+ nx.shell_layout(G)
65
+ nx.spiral_layout(G)
66
+ nx.kamada_kawai_layout(G)
67
+ nx.kamada_kawai_layout(G, dim=1)
68
+ nx.kamada_kawai_layout(G, dim=3)
69
+ nx.arf_layout(G)
70
+
71
+ def test_smoke_string(self):
72
+ G = self.Gs
73
+ nx.random_layout(G)
74
+ nx.circular_layout(G)
75
+ nx.planar_layout(G)
76
+ nx.spring_layout(G)
77
+ nx.fruchterman_reingold_layout(G)
78
+ nx.spectral_layout(G)
79
+ nx.shell_layout(G)
80
+ nx.spiral_layout(G)
81
+ nx.kamada_kawai_layout(G)
82
+ nx.kamada_kawai_layout(G, dim=1)
83
+ nx.kamada_kawai_layout(G, dim=3)
84
+ nx.arf_layout(G)
85
+
86
+ def check_scale_and_center(self, pos, scale, center):
87
+ center = np.array(center)
88
+ low = center - scale
89
+ hi = center + scale
90
+ vpos = np.array(list(pos.values()))
91
+ length = vpos.max(0) - vpos.min(0)
92
+ assert (length <= 2 * scale).all()
93
+ assert (vpos >= low).all()
94
+ assert (vpos <= hi).all()
95
+
96
+ def test_scale_and_center_arg(self):
97
+ sc = self.check_scale_and_center
98
+ c = (4, 5)
99
+ G = nx.complete_graph(9)
100
+ G.add_node(9)
101
+ sc(nx.random_layout(G, center=c), scale=0.5, center=(4.5, 5.5))
102
+ # rest can have 2*scale length: [-scale, scale]
103
+ sc(nx.spring_layout(G, scale=2, center=c), scale=2, center=c)
104
+ sc(nx.spectral_layout(G, scale=2, center=c), scale=2, center=c)
105
+ sc(nx.circular_layout(G, scale=2, center=c), scale=2, center=c)
106
+ sc(nx.shell_layout(G, scale=2, center=c), scale=2, center=c)
107
+ sc(nx.spiral_layout(G, scale=2, center=c), scale=2, center=c)
108
+ sc(nx.kamada_kawai_layout(G, scale=2, center=c), scale=2, center=c)
109
+
110
+ c = (2, 3, 5)
111
+ sc(nx.kamada_kawai_layout(G, dim=3, scale=2, center=c), scale=2, center=c)
112
+
113
+ def test_planar_layout_non_planar_input(self):
114
+ G = nx.complete_graph(9)
115
+ pytest.raises(nx.NetworkXException, nx.planar_layout, G)
116
+
117
+ def test_smoke_planar_layout_embedding_input(self):
118
+ embedding = nx.PlanarEmbedding()
119
+ embedding.set_data({0: [1, 2], 1: [0, 2], 2: [0, 1]})
120
+ nx.planar_layout(embedding)
121
+
122
+ def test_default_scale_and_center(self):
123
+ sc = self.check_scale_and_center
124
+ c = (0, 0)
125
+ G = nx.complete_graph(9)
126
+ G.add_node(9)
127
+ sc(nx.random_layout(G), scale=0.5, center=(0.5, 0.5))
128
+ sc(nx.spring_layout(G), scale=1, center=c)
129
+ sc(nx.spectral_layout(G), scale=1, center=c)
130
+ sc(nx.circular_layout(G), scale=1, center=c)
131
+ sc(nx.shell_layout(G), scale=1, center=c)
132
+ sc(nx.spiral_layout(G), scale=1, center=c)
133
+ sc(nx.kamada_kawai_layout(G), scale=1, center=c)
134
+
135
+ c = (0, 0, 0)
136
+ sc(nx.kamada_kawai_layout(G, dim=3), scale=1, center=c)
137
+
138
+ def test_circular_planar_and_shell_dim_error(self):
139
+ G = nx.path_graph(4)
140
+ pytest.raises(ValueError, nx.circular_layout, G, dim=1)
141
+ pytest.raises(ValueError, nx.shell_layout, G, dim=1)
142
+ pytest.raises(ValueError, nx.shell_layout, G, dim=3)
143
+ pytest.raises(ValueError, nx.planar_layout, G, dim=1)
144
+ pytest.raises(ValueError, nx.planar_layout, G, dim=3)
145
+
146
+ def test_adjacency_interface_numpy(self):
147
+ A = nx.to_numpy_array(self.Gs)
148
+ pos = nx.drawing.layout._fruchterman_reingold(A)
149
+ assert pos.shape == (6, 2)
150
+ pos = nx.drawing.layout._fruchterman_reingold(A, dim=3)
151
+ assert pos.shape == (6, 3)
152
+ pos = nx.drawing.layout._sparse_fruchterman_reingold(A)
153
+ assert pos.shape == (6, 2)
154
+
155
+ def test_adjacency_interface_scipy(self):
156
+ A = nx.to_scipy_sparse_array(self.Gs, dtype="d")
157
+ pos = nx.drawing.layout._sparse_fruchterman_reingold(A)
158
+ assert pos.shape == (6, 2)
159
+ pos = nx.drawing.layout._sparse_spectral(A)
160
+ assert pos.shape == (6, 2)
161
+ pos = nx.drawing.layout._sparse_fruchterman_reingold(A, dim=3)
162
+ assert pos.shape == (6, 3)
163
+
164
+ def test_single_nodes(self):
165
+ G = nx.path_graph(1)
166
+ vpos = nx.shell_layout(G)
167
+ assert not vpos[0].any()
168
+ G = nx.path_graph(4)
169
+ vpos = nx.shell_layout(G, [[0], [1, 2], [3]])
170
+ assert not vpos[0].any()
171
+ assert vpos[3].any() # ensure node 3 not at origin (#3188)
172
+ assert np.linalg.norm(vpos[3]) <= 1 # ensure node 3 fits (#3753)
173
+ vpos = nx.shell_layout(G, [[0], [1, 2], [3]], rotate=0)
174
+ assert np.linalg.norm(vpos[3]) <= 1 # ensure node 3 fits (#3753)
175
+
176
+ def test_smoke_initial_pos_fruchterman_reingold(self):
177
+ pos = nx.circular_layout(self.Gi)
178
+ npos = nx.fruchterman_reingold_layout(self.Gi, pos=pos)
179
+
180
+ def test_smoke_initial_pos_arf(self):
181
+ pos = nx.circular_layout(self.Gi)
182
+ npos = nx.arf_layout(self.Gi, pos=pos)
183
+
184
+ def test_fixed_node_fruchterman_reingold(self):
185
+ # Dense version (numpy based)
186
+ pos = nx.circular_layout(self.Gi)
187
+ npos = nx.spring_layout(self.Gi, pos=pos, fixed=[(0, 0)])
188
+ assert tuple(pos[(0, 0)]) == tuple(npos[(0, 0)])
189
+ # Sparse version (scipy based)
190
+ pos = nx.circular_layout(self.bigG)
191
+ npos = nx.spring_layout(self.bigG, pos=pos, fixed=[(0, 0)])
192
+ for axis in range(2):
193
+ assert pos[(0, 0)][axis] == pytest.approx(npos[(0, 0)][axis], abs=1e-7)
194
+
195
+ def test_center_parameter(self):
196
+ G = nx.path_graph(1)
197
+ nx.random_layout(G, center=(1, 1))
198
+ vpos = nx.circular_layout(G, center=(1, 1))
199
+ assert tuple(vpos[0]) == (1, 1)
200
+ vpos = nx.planar_layout(G, center=(1, 1))
201
+ assert tuple(vpos[0]) == (1, 1)
202
+ vpos = nx.spring_layout(G, center=(1, 1))
203
+ assert tuple(vpos[0]) == (1, 1)
204
+ vpos = nx.fruchterman_reingold_layout(G, center=(1, 1))
205
+ assert tuple(vpos[0]) == (1, 1)
206
+ vpos = nx.spectral_layout(G, center=(1, 1))
207
+ assert tuple(vpos[0]) == (1, 1)
208
+ vpos = nx.shell_layout(G, center=(1, 1))
209
+ assert tuple(vpos[0]) == (1, 1)
210
+ vpos = nx.spiral_layout(G, center=(1, 1))
211
+ assert tuple(vpos[0]) == (1, 1)
212
+
213
+ def test_center_wrong_dimensions(self):
214
+ G = nx.path_graph(1)
215
+ assert id(nx.spring_layout) == id(nx.fruchterman_reingold_layout)
216
+ pytest.raises(ValueError, nx.random_layout, G, center=(1, 1, 1))
217
+ pytest.raises(ValueError, nx.circular_layout, G, center=(1, 1, 1))
218
+ pytest.raises(ValueError, nx.planar_layout, G, center=(1, 1, 1))
219
+ pytest.raises(ValueError, nx.spring_layout, G, center=(1, 1, 1))
220
+ pytest.raises(ValueError, nx.spring_layout, G, dim=3, center=(1, 1))
221
+ pytest.raises(ValueError, nx.spectral_layout, G, center=(1, 1, 1))
222
+ pytest.raises(ValueError, nx.spectral_layout, G, dim=3, center=(1, 1))
223
+ pytest.raises(ValueError, nx.shell_layout, G, center=(1, 1, 1))
224
+ pytest.raises(ValueError, nx.spiral_layout, G, center=(1, 1, 1))
225
+ pytest.raises(ValueError, nx.kamada_kawai_layout, G, center=(1, 1, 1))
226
+
227
+ def test_empty_graph(self):
228
+ G = nx.empty_graph()
229
+ vpos = nx.random_layout(G, center=(1, 1))
230
+ assert vpos == {}
231
+ vpos = nx.circular_layout(G, center=(1, 1))
232
+ assert vpos == {}
233
+ vpos = nx.planar_layout(G, center=(1, 1))
234
+ assert vpos == {}
235
+ vpos = nx.bipartite_layout(G, G)
236
+ assert vpos == {}
237
+ vpos = nx.spring_layout(G, center=(1, 1))
238
+ assert vpos == {}
239
+ vpos = nx.fruchterman_reingold_layout(G, center=(1, 1))
240
+ assert vpos == {}
241
+ vpos = nx.spectral_layout(G, center=(1, 1))
242
+ assert vpos == {}
243
+ vpos = nx.shell_layout(G, center=(1, 1))
244
+ assert vpos == {}
245
+ vpos = nx.spiral_layout(G, center=(1, 1))
246
+ assert vpos == {}
247
+ vpos = nx.multipartite_layout(G, center=(1, 1))
248
+ assert vpos == {}
249
+ vpos = nx.kamada_kawai_layout(G, center=(1, 1))
250
+ assert vpos == {}
251
+ vpos = nx.arf_layout(G)
252
+ assert vpos == {}
253
+
254
+ def test_bipartite_layout(self):
255
+ G = nx.complete_bipartite_graph(3, 5)
256
+ top, bottom = nx.bipartite.sets(G)
257
+
258
+ vpos = nx.bipartite_layout(G, top)
259
+ assert len(vpos) == len(G)
260
+
261
+ top_x = vpos[list(top)[0]][0]
262
+ bottom_x = vpos[list(bottom)[0]][0]
263
+ for node in top:
264
+ assert vpos[node][0] == top_x
265
+ for node in bottom:
266
+ assert vpos[node][0] == bottom_x
267
+
268
+ vpos = nx.bipartite_layout(
269
+ G, top, align="horizontal", center=(2, 2), scale=2, aspect_ratio=1
270
+ )
271
+ assert len(vpos) == len(G)
272
+
273
+ top_y = vpos[list(top)[0]][1]
274
+ bottom_y = vpos[list(bottom)[0]][1]
275
+ for node in top:
276
+ assert vpos[node][1] == top_y
277
+ for node in bottom:
278
+ assert vpos[node][1] == bottom_y
279
+
280
+ pytest.raises(ValueError, nx.bipartite_layout, G, top, align="foo")
281
+
282
+ def test_multipartite_layout(self):
283
+ sizes = (0, 5, 7, 2, 8)
284
+ G = nx.complete_multipartite_graph(*sizes)
285
+
286
+ vpos = nx.multipartite_layout(G)
287
+ assert len(vpos) == len(G)
288
+
289
+ start = 0
290
+ for n in sizes:
291
+ end = start + n
292
+ assert all(vpos[start][0] == vpos[i][0] for i in range(start + 1, end))
293
+ start += n
294
+
295
+ vpos = nx.multipartite_layout(G, align="horizontal", scale=2, center=(2, 2))
296
+ assert len(vpos) == len(G)
297
+
298
+ start = 0
299
+ for n in sizes:
300
+ end = start + n
301
+ assert all(vpos[start][1] == vpos[i][1] for i in range(start + 1, end))
302
+ start += n
303
+
304
+ pytest.raises(ValueError, nx.multipartite_layout, G, align="foo")
305
+
306
+ def test_kamada_kawai_costfn_1d(self):
307
+ costfn = nx.drawing.layout._kamada_kawai_costfn
308
+
309
+ pos = np.array([4.0, 7.0])
310
+ invdist = 1 / np.array([[0.1, 2.0], [2.0, 0.3]])
311
+
312
+ cost, grad = costfn(pos, np, invdist, meanweight=0, dim=1)
313
+
314
+ assert cost == pytest.approx(((3 / 2.0 - 1) ** 2), abs=1e-7)
315
+ assert grad[0] == pytest.approx((-0.5), abs=1e-7)
316
+ assert grad[1] == pytest.approx(0.5, abs=1e-7)
317
+
318
+ def check_kamada_kawai_costfn(self, pos, invdist, meanwt, dim):
319
+ costfn = nx.drawing.layout._kamada_kawai_costfn
320
+
321
+ cost, grad = costfn(pos.ravel(), np, invdist, meanweight=meanwt, dim=dim)
322
+
323
+ expected_cost = 0.5 * meanwt * np.sum(np.sum(pos, axis=0) ** 2)
324
+ for i in range(pos.shape[0]):
325
+ for j in range(i + 1, pos.shape[0]):
326
+ diff = np.linalg.norm(pos[i] - pos[j])
327
+ expected_cost += (diff * invdist[i][j] - 1.0) ** 2
328
+
329
+ assert cost == pytest.approx(expected_cost, abs=1e-7)
330
+
331
+ dx = 1e-4
332
+ for nd in range(pos.shape[0]):
333
+ for dm in range(pos.shape[1]):
334
+ idx = nd * pos.shape[1] + dm
335
+ ps = pos.flatten()
336
+
337
+ ps[idx] += dx
338
+ cplus = costfn(ps, np, invdist, meanweight=meanwt, dim=pos.shape[1])[0]
339
+
340
+ ps[idx] -= 2 * dx
341
+ cminus = costfn(ps, np, invdist, meanweight=meanwt, dim=pos.shape[1])[0]
342
+
343
+ assert grad[idx] == pytest.approx((cplus - cminus) / (2 * dx), abs=1e-5)
344
+
345
+ def test_kamada_kawai_costfn(self):
346
+ invdist = 1 / np.array([[0.1, 2.1, 1.7], [2.1, 0.2, 0.6], [1.7, 0.6, 0.3]])
347
+ meanwt = 0.3
348
+
349
+ # 2d
350
+ pos = np.array([[1.3, -3.2], [2.7, -0.3], [5.1, 2.5]])
351
+
352
+ self.check_kamada_kawai_costfn(pos, invdist, meanwt, 2)
353
+
354
+ # 3d
355
+ pos = np.array([[0.9, 8.6, -8.7], [-10, -0.5, -7.1], [9.1, -8.1, 1.6]])
356
+
357
+ self.check_kamada_kawai_costfn(pos, invdist, meanwt, 3)
358
+
359
+ def test_spiral_layout(self):
360
+ G = self.Gs
361
+
362
+ # a lower value of resolution should result in a more compact layout
363
+ # intuitively, the total distance from the start and end nodes
364
+ # via each node in between (transiting through each) will be less,
365
+ # assuming rescaling does not occur on the computed node positions
366
+ pos_standard = np.array(list(nx.spiral_layout(G, resolution=0.35).values()))
367
+ pos_tighter = np.array(list(nx.spiral_layout(G, resolution=0.34).values()))
368
+ distances = np.linalg.norm(pos_standard[:-1] - pos_standard[1:], axis=1)
369
+ distances_tighter = np.linalg.norm(pos_tighter[:-1] - pos_tighter[1:], axis=1)
370
+ assert sum(distances) > sum(distances_tighter)
371
+
372
+ # return near-equidistant points after the first value if set to true
373
+ pos_equidistant = np.array(list(nx.spiral_layout(G, equidistant=True).values()))
374
+ distances_equidistant = np.linalg.norm(
375
+ pos_equidistant[:-1] - pos_equidistant[1:], axis=1
376
+ )
377
+ assert np.allclose(
378
+ distances_equidistant[1:], distances_equidistant[-1], atol=0.01
379
+ )
380
+
381
+ def test_spiral_layout_equidistant(self):
382
+ G = nx.path_graph(10)
383
+ pos = nx.spiral_layout(G, equidistant=True)
384
+ # Extract individual node positions as an array
385
+ p = np.array(list(pos.values()))
386
+ # Elementwise-distance between node positions
387
+ dist = np.linalg.norm(p[1:] - p[:-1], axis=1)
388
+ assert np.allclose(np.diff(dist), 0, atol=1e-3)
389
+
390
+ def test_rescale_layout_dict(self):
391
+ G = nx.empty_graph()
392
+ vpos = nx.random_layout(G, center=(1, 1))
393
+ assert nx.rescale_layout_dict(vpos) == {}
394
+
395
+ G = nx.empty_graph(2)
396
+ vpos = {0: (0.0, 0.0), 1: (1.0, 1.0)}
397
+ s_vpos = nx.rescale_layout_dict(vpos)
398
+ assert np.linalg.norm([sum(x) for x in zip(*s_vpos.values())]) < 1e-6
399
+
400
+ G = nx.empty_graph(3)
401
+ vpos = {0: (0, 0), 1: (1, 1), 2: (0.5, 0.5)}
402
+ s_vpos = nx.rescale_layout_dict(vpos)
403
+
404
+ expectation = {
405
+ 0: np.array((-1, -1)),
406
+ 1: np.array((1, 1)),
407
+ 2: np.array((0, 0)),
408
+ }
409
+ for k, v in expectation.items():
410
+ assert (s_vpos[k] == v).all()
411
+ s_vpos = nx.rescale_layout_dict(vpos, scale=2)
412
+ expectation = {
413
+ 0: np.array((-2, -2)),
414
+ 1: np.array((2, 2)),
415
+ 2: np.array((0, 0)),
416
+ }
417
+ for k, v in expectation.items():
418
+ assert (s_vpos[k] == v).all()
419
+
420
+ def test_arf_layout_partial_input_test(self):
421
+ """
422
+ Checks whether partial pos input still returns a proper position.
423
+ """
424
+ G = self.Gs
425
+ node = nx.utils.arbitrary_element(G)
426
+ pos = nx.circular_layout(G)
427
+ del pos[node]
428
+ pos = nx.arf_layout(G, pos=pos)
429
+ assert len(pos) == len(G)
430
+
431
+ def test_arf_layout_negative_a_check(self):
432
+ """
433
+ Checks input parameters correctly raises errors. For example, `a` should be larger than 1
434
+ """
435
+ G = self.Gs
436
+ pytest.raises(ValueError, nx.arf_layout, G=G, a=-1)
437
+
438
+
439
+ def test_multipartite_layout_nonnumeric_partition_labels():
440
+ """See gh-5123."""
441
+ G = nx.Graph()
442
+ G.add_node(0, subset="s0")
443
+ G.add_node(1, subset="s0")
444
+ G.add_node(2, subset="s1")
445
+ G.add_node(3, subset="s1")
446
+ G.add_edges_from([(0, 2), (0, 3), (1, 2)])
447
+ pos = nx.multipartite_layout(G)
448
+ assert len(pos) == len(G)
449
+
450
+
451
+ def test_multipartite_layout_layer_order():
452
+ """Return the layers in sorted order if the layers of the multipartite
453
+ graph are sortable. See gh-5691"""
454
+ G = nx.Graph()
455
+ for node, layer in zip(("a", "b", "c", "d", "e"), (2, 3, 1, 2, 4)):
456
+ G.add_node(node, subset=layer)
457
+
458
+ # Horizontal alignment, therefore y-coord determines layers
459
+ pos = nx.multipartite_layout(G, align="horizontal")
460
+
461
+ # Nodes "a" and "d" are in the same layer
462
+ assert pos["a"][-1] == pos["d"][-1]
463
+ # positions should be sorted according to layer
464
+ assert pos["c"][-1] < pos["a"][-1] < pos["b"][-1] < pos["e"][-1]
465
+
466
+ # Make sure that multipartite_layout still works when layers are not sortable
467
+ G.nodes["a"]["subset"] = "layer_0" # Can't sort mixed strs/ints
468
+ pos_nosort = nx.multipartite_layout(G) # smoke test: this should not raise
469
+ assert pos_nosort.keys() == pos.keys()
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__init__.py ADDED
File without changes
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-311.pyc ADDED
Binary file (6.17 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/generators/tests/__pycache__/test_nonisomorphic_trees.cpython-311.pyc ADDED
Binary file (5.54 kB). View file